code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
import zeam.form.ztk.compat
from zeam.form.base.markers import Marker, NO_VALUE
from zeam.form.base.widgets import FieldWidget
from zeam.form.ztk.fields import Field, registerSchemaField
from grokcore import component as grok
from zope.i18nmessageid import MessageFactory
from zope.schema import interfaces as schema_interfaces
_ = MessageFactory("zeam.form.base")
class TextLineField(Field):
"""A text line field.
"""
def __init__(self, title,
minLength=0,
maxLength=None,
**options):
super(TextLineField, self).__init__(title, **options)
self.minLength = minLength
self.maxLength = maxLength
def isEmpty(self, value):
return value is NO_VALUE or not len(value)
def validate(self, value, form):
error = super(TextLineField, self).validate(value, form)
if error is not None:
return error
if not isinstance(value, Marker) and len(value):
assert isinstance(value, zeam.form.ztk.compat.string_types)
if self.minLength and len(value) < self.minLength:
return _(u"This text is too short.")
if self.maxLength and len(value) > self.maxLength:
return _(u"This text is too long.")
return None
# BBB
TextLineSchemaField = TextLineField
class TextLineWidget(FieldWidget):
grok.adapts(TextLineField, None, None)
defaultHtmlClass = ['field', 'field-textline', 'form-control']
defaultHtmlAttributes = set(['readonly', 'required', 'autocomplete',
'maxlength', 'pattern', 'placeholder',
'size', 'style', 'disabled'])
def TextLineSchemaFactory(schema):
field = TextLineField(
schema.title or None,
identifier=schema.__name__,
description=schema.description,
required=schema.required,
readonly=schema.readonly,
minLength=schema.min_length,
maxLength=schema.max_length,
interface=schema.interface,
constrainValue=schema.constraint,
defaultFactory=schema.defaultFactory,
defaultValue=schema.__dict__['default'] or NO_VALUE)
return field
def register():
registerSchemaField(TextLineSchemaFactory, schema_interfaces.ITextLine) | zeam.form.ztk | /zeam.form.ztk-1.4.0.tar.gz/zeam.form.ztk-1.4.0/src/zeam/form/ztk/widgets/textline.py | textline.py |
import datetime
from zeam.form.base.markers import NO_VALUE, Marker
from zeam.form.base.widgets import FieldWidget, DisplayFieldWidget
from zeam.form.base.widgets import WidgetExtractor
from zeam.form.ztk.fields import Field, registerSchemaField
from grokcore import component as grok
from zope.i18n.format import DateTimeParseError
from zope.i18nmessageid import MessageFactory
from zope.interface import Interface
from zope.schema import interfaces as schema_interfaces
_ = MessageFactory("zeam.form.base")
class TimeField(Field):
"""A time field.
"""
valueLength = 'short'
def __init__(self, title,
min=None,
max=None,
**options):
super(TimeField, self).__init__(title, **options)
self.min = min
self.max = max
def getFormatter(self, form):
return form.request.locale.dates.getFormatter('time', self.valueLength)
def validate(self, value, form):
error = super(TimeField, self).validate(value, form)
if error is not None:
return error
if not isinstance(value, Marker):
assert isinstance(value, datetime.time)
if self.min is not None and value < self.min:
formatter = self.getFormatter(form)
return _(u"This time is before ${not_before}.",
dict(not_before=formatter.format(self.min)))
if self.max is not None and value > self.max:
formatter = self.getFormatter(form)
return _(u"This time is after ${not_after}.",
dict(not_after=formatter.format(self.max)))
return None
# BBB
TimeSchemaField = TimeField
class TimeFieldWidget(FieldWidget):
grok.adapts(TimeField, Interface, Interface)
defaultHtmlClass = ['field', 'field-time']
def valueToUnicode(self, value):
formatter = self.component.getFormatter(self.form)
return formatter.format(value)
class TimeWidgetExtractor(WidgetExtractor):
grok.adapts(TimeField, Interface, Interface)
def extract(self):
value, error = super(TimeWidgetExtractor, self).extract()
if value is not NO_VALUE:
formatter = self.component.getFormatter(self.form)
try:
value = formatter.parse(value)
except (ValueError, DateTimeParseError) as error:
return None, str(error)
return value, error
class TimeFieldDisplayWidget(DisplayFieldWidget):
grok.adapts(TimeField, Interface, Interface)
def valueToUnicode(self, value):
formatter = self.component.getFormatter(self.form)
return formatter.format(value)
def TimeSchemaFactory(schema):
field = TimeField(
schema.title or None,
identifier=schema.__name__,
description=schema.description,
required=schema.required,
readonly=schema.readonly,
min=schema.min,
max=schema.max,
interface=schema.interface,
constrainValue=schema.constraint,
defaultFactory=schema.defaultFactory,
defaultValue=schema.__dict__['default'] or NO_VALUE)
return field
def register():
registerSchemaField(TimeSchemaFactory, schema_interfaces.ITime) | zeam.form.ztk | /zeam.form.ztk-1.4.0.tar.gz/zeam.form.ztk-1.4.0/src/zeam/form/ztk/widgets/time.py | time.py |
from zeam.form.base.interfaces import IFieldExtractionValueSetting
from zeam.form.base.markers import Marker, NO_VALUE
from zeam.form.base.widgets import FieldWidget, FieldWidgetExtractor
from zeam.form.ztk.fields import Field, registerSchemaField
from grokcore import component as grok
from zope.i18nmessageid import MessageFactory
from zope.interface import Interface
from zope.schema import interfaces as schema_interfaces
_ = MessageFactory("zeam.form.base")
class IntegerField(Field):
"""A integer field.
"""
def __init__(self, title,
min=None,
max=None,
**options):
# We pass min and max to Field to have them in htmlAttributes as well
super(IntegerField, self).__init__(title, min=min, max=max, **options)
self.min = min
self.max = max
def validate(self, value, form):
error = super(IntegerField, self).validate(value, form)
if error is not None:
return error
if not isinstance(value, Marker):
assert isinstance(value, (int, float))
if self.min is not None and value < self.min:
return _(u"This number is too small.")
if self.max is not None and value > self.max:
return _(u"This number is too big.")
return None
# BBB
IntSchemaField = IntegerField
class IntegerFieldWidgetExtractor(FieldWidgetExtractor):
grok.adapts(IntegerField, IFieldExtractionValueSetting, Interface)
valueType = int
failedMessage = _(u"This number is not a valid whole number.")
def extract(self):
value, error = super(IntegerFieldWidgetExtractor, self).extract()
if error:
return (value, error)
if value is not NO_VALUE and len(value):
try:
value = self.valueType(value)
except (ValueError, TypeError):
return (NO_VALUE, self.failedMessage)
return (value, None)
return (NO_VALUE, None)
class FloatField(IntegerField):
"""A float field.
"""
# BBB
FloatSchemaField = FloatField
class CurrencyField(FloatField):
""" A currency field.
"""
def __init__(self, min=None,
max=None,
symbol=u'€',
thousandsSeparator=u',',
decimalSeparator=u'.',
symbolPrecedes=False,
symbolSpaceSepartor=True,
fracDigits=2,
positiveSign=u'',
negativeSign=u'-',
signPrecedes=True,
**options):
super(CurrencyField, self).__init__(max=max, min=min, **options)
self.symbol = symbol
self.thousandsSeparator = thousandsSeparator
self.decimalSeparator = decimalSeparator
self.symbolPrecedes = symbolPrecedes
self.symbolSpaceSepartor = symbolSpaceSepartor
self.fracDigits = fracDigits
self.positiveSign = positiveSign
self.negativeSign = negativeSign
self.signPrecedes = signPrecedes
class FloatFieldWidgetExtractor(IntegerFieldWidgetExtractor):
grok.adapts(FloatField, IFieldExtractionValueSetting, Interface)
valueType = float
failedMessage = _(u"This number is not a valid decimal number.")
class NumberWidget(FieldWidget):
grok.adapts(IntegerField, Interface, Interface)
defaultHtmlClass = ['field', 'field-number']
defaultHtmlAttributes = set(['readonly', 'required', 'autocomplete',
'max', 'min', 'setup', 'placeholder',
'style'])
class CurrencyDisplayWidget(FieldWidget):
grok.adapts(CurrencyField, Interface, Interface)
grok.name('display')
defaultHtmlClass = ['field', 'field-currency']
def valueToUnicode(self, value):
return self.formatHtmlCurrency(value)
def formatHtmlCurrency(self, value):
string_value = ("%%.0%df" % self.component.fracDigits) % abs(value)
integer_part, decimal_part = string_value.split('.')
digits = list(integer_part)
chars = []
count = 0
while digits:
digit = digits.pop()
chars.append(digit)
count += 1
if count % 3 == 0 and len(digits):
chars.append(self.component.thousandsSeparator)
integer_part = "".join(reversed(chars))
buf = u''
if self.component.symbolPrecedes:
buf += self.component.symbol
if self.component.symbolSpaceSepartor:
buf += ' '
if value >= 0:
buf += self.component.positiveSign
else:
buf += self.component.negativeSign
buf += integer_part + self.component.decimalSeparator + decimal_part
if not self.component.symbolPrecedes:
if self.component.symbolSpaceSepartor:
buf += ' '
buf += self.component.symbol
return buf
def IntegerSchemaFactory(schema):
field = IntegerField(
schema.title or None,
identifier=schema.__name__,
description=schema.description,
required=schema.required,
readonly=schema.readonly,
min=schema.min,
max=schema.max,
interface=schema.interface,
constrainValue=schema.constraint,
defaultFactory=schema.defaultFactory,
defaultValue=schema.__dict__['default'] or NO_VALUE)
return field
def FloatSchemaFactory(schema):
field = FloatField(
schema.title or None,
identifier=schema.__name__,
description=schema.description,
required=schema.required,
readonly=schema.readonly,
min=schema.min,
max=schema.max,
interface=schema.interface,
constrainValue=schema.constraint,
defaultFactory=schema.defaultFactory,
defaultValue=schema.__dict__['default'] or NO_VALUE)
return field
def register():
registerSchemaField(FloatSchemaFactory, schema_interfaces.IFloat)
registerSchemaField(IntegerSchemaFactory, schema_interfaces.IInt) | zeam.form.ztk | /zeam.form.ztk-1.4.0.tar.gz/zeam.form.ztk-1.4.0/src/zeam/form/ztk/widgets/number.py | number.py |
import zeam.form.ztk.compat
from zeam.form.base.markers import Marker, NO_VALUE
from zeam.form.base.widgets import FieldWidget
from zeam.form.ztk.fields import Field, registerSchemaField
from grokcore import component as grok
from zope.i18nmessageid import MessageFactory
from zope.schema import interfaces as schema_interfaces
_ = MessageFactory("zeam.form.base")
class PasswordField(Field):
"""A password field.
"""
def __init__(self, title,
minLength=0,
maxLength=None,
**options):
super(PasswordField, self).__init__(title, **options)
self.minLength = minLength
self.maxLength = maxLength
def isEmpty(self, value):
return value is NO_VALUE or not len(value)
def validate(self, value, form):
error = super(PasswordField, self).validate(value, form)
if error is not None:
return error
if not isinstance(value, Marker) and len(value):
assert isinstance(value, zeam.form.ztk.compat.string_types)
if self.minLength and len(value) < self.minLength:
return _(u"This password is too short.")
if self.maxLength and len(value) > self.maxLength:
return _(u"This password is too long.")
return None
class PasswordWidget(FieldWidget):
grok.adapts(PasswordField, None, None)
defaultHtmlClass = ['field', 'field-password']
defaultHtmlAttributes = ['readonly', 'required', 'autocomplete',
'maxlength', 'pattern', 'placeholder',
'size', 'style']
def PasswordSchemaFactory(schema):
field = PasswordField(
schema.title or None,
identifier=schema.__name__,
description=schema.description,
required=schema.required,
readonly=schema.readonly,
minLength=schema.min_length,
maxLength=schema.max_length,
interface=schema.interface,
constrainValue=schema.constraint,
defaultFactory=schema.defaultFactory,
defaultValue=schema.__dict__['default'] or NO_VALUE)
return field
def register():
registerSchemaField(PasswordSchemaFactory, schema_interfaces.IPassword) | zeam.form.ztk | /zeam.form.ztk-1.4.0.tar.gz/zeam.form.ztk-1.4.0/src/zeam/form/ztk/widgets/password.py | password.py |
from zeam.form.base.markers import NO_VALUE, Marker
from zeam.form.base.widgets import FieldWidget
from zeam.form.base.widgets import WidgetExtractor
from zeam.form.ztk.fields import Field, registerSchemaField
from zeam.form.ztk.interfaces import IFormSourceBinder
from grokcore import component as grok
from zope import component
from zope.i18nmessageid import MessageFactory
from zope.interface import Interface
from zope.schema import interfaces as schema_interfaces
from zope.schema.interfaces import IContextSourceBinder
from zope.schema.interfaces import IVocabularyTokenized, IVocabularyFactory
_ = MessageFactory("zeam.form.base")
class ChoiceField(Field):
"""A choice field.
"""
_source = None
_vocabularyFactory = None
_vocabularyName = None
def __init__(self, title,
source=None,
vocabularyName=None,
**options):
super(ChoiceField, self).__init__(title, **options)
if source is not None:
self.source = source
elif vocabularyName is not None:
self.vocabularyFactory = vocabularyName
@property
def vocabularyFactory(self):
if self._vocabularyFactory is None:
if self._vocabularyName is not None:
self._vocabularyFactory = component.getUtility(
schema_interfaces.IVocabularyFactory,
name=self._vocabularyName)
return self._vocabularyFactory
@vocabularyFactory.setter
def vocabularyFactory(self, factory):
if isinstance(factory, str):
self._vocabularyName = factory
self._vocabularyFactory = None
else:
self._vocabularyName = None
self._vocabularyFactory = factory
self._source = None
@property
def source(self):
return self._source
@source.setter
def source(self, source):
# Verify if this is a source or a vocabulary
if IVocabularyTokenized.providedBy(source):
self._source = source
else:
# Be sure to reset the source
self._source = None
self._vocabularyFactory = source
def getChoices(self, form):
source = self.source
if source is None:
factory = self.vocabularyFactory
assert factory is not None, \
"No vocabulary source available."
if (IContextSourceBinder.providedBy(factory) or
IVocabularyFactory.providedBy(factory)):
source = factory(form.context)
elif IFormSourceBinder.providedBy(factory):
source = factory(form)
assert IVocabularyTokenized.providedBy(source), \
"No valid vocabulary available, %s is not valid for %s" % (
source, self)
return source
def validate(self, value, form):
error = super(ChoiceField, self).validate(value, form)
if error is not None:
return error
if not isinstance(value, Marker):
choices = self.getChoices(form)
if value not in choices:
return _(u"The selected value is not among the possible choices.")
return None
# BBB
ChoiceSchemaField = ChoiceField
class ChoiceFieldWidget(FieldWidget):
grok.adapts(ChoiceField, Interface, Interface)
defaultHtmlClass = ['field', 'field-choice']
defaultHtmlAttributes = set(['required', 'size', 'style', 'disabled'])
_choices = None
def __init__(self, field, form, request):
super(ChoiceFieldWidget, self).__init__(field, form, request)
self.source = field
def lookupTerm(self, value):
choices = self.choices()
try:
return choices.getTerm(value)
except LookupError:
# the stored value is invalid. fallback on the default one.
default = self.component.getDefaultValue(self.form)
if default is not NO_VALUE:
return choices.getTerm(default)
return None
def valueToUnicode(self, value):
term = self.lookupTerm(value)
if term is not None:
return term.token
return u''
def choices(self):
if self._choices is not None:
return self._choices
# self.source is used instead of self.component in order to be
# able to override it in subclasses.
self._choices = self.source.getChoices(self.form)
return self._choices
class ChoiceDisplayWidget(ChoiceFieldWidget):
grok.name('display')
def valueToUnicode(self, value):
term = self.lookupTerm(value)
if term is not None:
return term.title
return u''
class ChoiceWidgetExtractor(WidgetExtractor):
grok.adapts(ChoiceField, Interface, Interface)
def extract(self):
value, error = super(ChoiceWidgetExtractor, self).extract()
if value is not NO_VALUE:
choices = self.component.getChoices(self.form)
try:
value = choices.getTermByToken(value).value
except LookupError:
return (None, u'Invalid value')
return (value, error)
# Radio Widget
class RadioFieldWidget(ChoiceFieldWidget):
grok.adapts(ChoiceField, Interface, Interface)
grok.name('radio')
def renderableChoices(self):
current = self.inputValue()
base_id = self.htmlId()
for i, choice in enumerate(self.choices()):
yield {'token': choice.token,
'title': choice.title or choice.token,
'checked': choice.token == current and 'checked' or None,
'id': base_id + '-' + str(i)}
def ChoiceSchemaFactory(schema):
field = ChoiceField(
schema.title or None,
identifier=schema.__name__,
description=schema.description,
required=schema.required,
readonly=schema.readonly,
source=schema.vocabulary,
vocabularyName=schema.vocabularyName,
interface=schema.interface,
constrainValue=schema.constraint,
defaultFactory=schema.defaultFactory,
defaultValue=schema.__dict__['default'] or NO_VALUE)
return field
def register():
registerSchemaField(ChoiceSchemaFactory, schema_interfaces.IChoice) | zeam.form.ztk | /zeam.form.ztk-1.4.0.tar.gz/zeam.form.ztk-1.4.0/src/zeam/form/ztk/widgets/choice.py | choice.py |
from zeam.form.base.datamanager import ObjectDataManager
from zeam.form.base.errors import Errors
from zeam.form.base.fields import Fields
from zeam.form.base.form import cloneFormData
from zeam.form.base.markers import NO_VALUE, Marker, DEFAULT
from zeam.form.base.widgets import WidgetExtractor
from zeam.form.base.widgets import Widgets, FieldWidget
from zeam.form.ztk.fields import Field, registerSchemaField
from zeam.form.ztk.interfaces import IObjectField
from grokcore import component as grok
from zope.component import getUtility
from zope.component.interfaces import IFactory
from zope.interface import Interface, implementer
from zope.schema import interfaces as schema_interfaces
@implementer(IObjectField)
class ObjectField(Field):
"""A collection field.
"""
objectFactory = DEFAULT
dataManager = ObjectDataManager
def __init__(self, title, schema=None, **options):
super(ObjectField, self).__init__(title, **options)
self._schema = schema
self._fields = Fields()
if schema is not None:
self._fields.extend(schema)
@property
def objectSchema(self):
return self._schema
@property
def objectFields(self):
return self._fields
def getObjectFactory(self):
if self.objectFactory is not DEFAULT:
return self.objectFactory
schema = self.objectSchema
return getUtility(IFactory, name=schema.__identifier__)
# BBB
ObjectSchemaField = ObjectField
class ObjectFieldWidget(FieldWidget):
grok.adapts(ObjectField, Interface, Interface)
def prepareContentValue(self, value):
if value is NO_VALUE:
return {self.identifier: []}
return {self.identifier: value}
def update(self):
super(ObjectFieldWidget, self).update()
value = self.component.dataManager(self.inputValue())
form = cloneFormData(self.form, value, self.identifier)
self.objectWidgets = Widgets(form=form, request=self.request)
self.objectWidgets.extend(self.component.objectFields)
self.objectWidgets.update()
class ObjectDisplayWidget(ObjectFieldWidget):
grok.name('display')
class ObjectFieldExtractor(WidgetExtractor):
grok.adapts(ObjectField, Interface, Interface)
def extract(self):
is_present = self.request.form.get(self.identifier, NO_VALUE)
if is_present is NO_VALUE:
return (NO_VALUE, None)
value = None
form = cloneFormData(self.form, None, self.identifier)
data, errors = form.extractData(self.component.objectFields)
if not errors:
factory = self.component.getObjectFactory()
# Create an object with values
value = factory(**{
k:v for k, v in data.items() if not isinstance(v, Marker)})
return (value, None)
return (value, Errors(*errors, identifier=self.identifier))
def ObjectSchemaFactory(schema):
field = ObjectField(
schema.title or None,
identifier=schema.__name__,
description=schema.description,
required=schema.required,
readonly=schema.readonly,
schema=schema.schema,
interface=schema.interface,
constrainValue=schema.constraint,
defaultFactory=schema.defaultFactory,
defaultValue=schema.__dict__['default'] or NO_VALUE)
return field
def register():
registerSchemaField(ObjectSchemaFactory, schema_interfaces.IObject) | zeam.form.ztk | /zeam.form.ztk-1.4.0.tar.gz/zeam.form.ztk-1.4.0/src/zeam/form/ztk/widgets/object.py | object.py |
from zeam.form.base.markers import Marker, NO_VALUE
from zeam.form.base.widgets import FieldWidget, DisplayFieldWidget
from zeam.form.base.widgets import WidgetExtractor
from zeam.form.ztk.fields import Field, registerSchemaField
from grokcore import component as grok
from zope.i18n.format import DateTimeParseError
from zope.i18nmessageid import MessageFactory
from zope.interface import Interface
from zope.schema import interfaces as schema_interfaces
_ = MessageFactory("zeam.form.base")
class DateField(Field):
"""A date field.
"""
valueLength = 'short'
@property
def valueType(self): # Read-only
return 'date'
def __init__(self, title,
min=None,
max=None,
**options):
super(DateField, self).__init__(title, **options)
self.min = min
self.max = max
def getFormatter(self, form):
return form.request.locale.dates.getFormatter(
self.valueType, self.valueLength)
def validate(self, value, form):
error = super(DateField, self).validate(value, form)
if error is not None:
return error
if not isinstance(value, Marker):
if self.min is not None and value < self.min:
formatter = self.getFormatter(form)
return _(u"This date is before the ${not_before}.",
dict(not_before=formatter.format(self.min)))
if self.max is not None and value > self.max:
formatter = self.getFormatter(form)
return _(u"This date is after the ${not_after}.",
dict(not_after=formatter.format(self.max)))
return None
# BBB
DateSchemaField = DateField
class DatetimeField(DateField):
"""A datetime field.
"""
valueLength = 'short'
@property
def valueType(self): # Read-only
return 'dateTime'
# BBB
DatetimeSchemaField = DatetimeField
class DateFieldWidget(FieldWidget):
grok.adapts(DateField, Interface, Interface)
defaultHtmlClass = ['field', 'field-date']
def valueToUnicode(self, value):
formatter = self.component.getFormatter(self.form)
return formatter.format(value)
class DateWidgetExtractor(WidgetExtractor):
grok.adapts(DateField, Interface, Interface)
def extract(self):
value, error = super(DateWidgetExtractor, self).extract()
if value is not NO_VALUE:
if not len(value):
return NO_VALUE, None
formatter = self.component.getFormatter(self.form)
try:
value = formatter.parse(value)
except (ValueError, DateTimeParseError) as error:
return None, str(error)
return value, error
class DateFieldDisplayWidget(DisplayFieldWidget):
grok.adapts(DateField, Interface, Interface)
def valueToUnicode(self, value):
formatter = self.component.getFormatter(self.form)
return formatter.format(value)
def DateSchemaFactory(schema):
field = DateField(
schema.title or None,
identifier=schema.__name__,
description=schema.description,
required=schema.required,
readonly=schema.readonly,
min=schema.min,
max=schema.max,
interface=schema.interface,
constrainValue=schema.constraint,
defaultFactory=schema.defaultFactory,
defaultValue=schema.__dict__['default'] or NO_VALUE)
return field
def DatetimeSchemaFactory(schema):
field = DatetimeField(
schema.title or None,
identifier=schema.__name__,
description=schema.description,
required=schema.required,
readonly=schema.readonly,
min=schema.min,
max=schema.max,
interface=schema.interface,
constrainValue=schema.constraint,
defaultFactory=schema.defaultFactory,
defaultValue=schema.__dict__['default'] or NO_VALUE)
return field
def register():
registerSchemaField(DatetimeSchemaFactory, schema_interfaces.IDatetime)
registerSchemaField(DateSchemaFactory, schema_interfaces.IDate) | zeam.form.ztk | /zeam.form.ztk-1.4.0.tar.gz/zeam.form.ztk-1.4.0/src/zeam/form/ztk/widgets/date.py | date.py |
import zeam.form.ztk.compat
from zeam.form.base.markers import Marker, NO_VALUE
from zeam.form.base.widgets import FieldWidget
from zeam.form.ztk.fields import Field, registerSchemaField
from grokcore import component as grok
from zope.i18nmessageid import MessageFactory
from zope.interface import Interface
from zope.schema import interfaces as schema_interfaces
_ = MessageFactory("zeam.form.base")
class TextField(Field):
"""A text field.
"""
def __init__(self, title,
minLength=0,
maxLength=None,
**options):
if 'cols' not in options:
options['cols'] = '80'
if 'rows' not in options:
options['rows'] = '5'
super(TextField, self).__init__(title, **options)
self.minLength = minLength
self.maxLength = maxLength
def isEmpty(self, value):
return value is NO_VALUE or not len(value)
def validate(self, value, form):
error = super(TextField, self).validate(value, form)
if error is not None:
return error
if not isinstance(value, Marker) and len(value):
assert isinstance(value, zeam.form.ztk.compat.string_types)
if self.minLength and len(value) < self.minLength:
return _(u"Not enough text was entered.")
if self.maxLength and len(value) > self.maxLength:
return _(u"Too much text was entered.")
return None
# BBB
TextSchemaField = TextField
class TextareaWidget(FieldWidget):
grok.adapts(TextField, Interface, Interface)
defaultHtmlClass = ['field', 'field-text']
defaultHtmlAttributes = set(['maxlength', 'placeholder', 'required',
'rows', 'warp', 'readonly', 'cols',
'style'])
def TextSchemaFactory(schema):
field = TextField(
schema.title or None,
identifier=schema.__name__,
description=schema.description,
required=schema.required,
readonly=schema.readonly,
minLength=schema.min_length,
maxLength=schema.max_length,
interface=schema.interface,
constrainValue=schema.constraint,
defaultFactory=schema.defaultFactory,
defaultValue=schema.__dict__['default'] or NO_VALUE)
return field
def register():
registerSchemaField(TextSchemaFactory, schema_interfaces.IText) | zeam.form.ztk | /zeam.form.ztk-1.4.0.tar.gz/zeam.form.ztk-1.4.0/src/zeam/form/ztk/widgets/text.py | text.py |
import re
import zeam.form.ztk.compat
from zeam.form.base.markers import Marker, NO_VALUE
from zeam.form.base.widgets import FieldWidget
from zeam.form.ztk.fields import Field, registerSchemaField
from grokcore import component as grok
from zope.i18nmessageid import MessageFactory
from zope.interface import Interface
from zope.schema import interfaces as schema_interfaces
_ = MessageFactory("zeam.form.base")
isURI = re.compile(
# scheme
r"[a-zA-z0-9+.-]+:"
# non space (should be pickier)
r"\S*$").match
class URIField(Field):
"""A text line field.
"""
target = '_self'
def __init__(self, title,
minLength=0,
maxLength=None,
**options):
super(URIField, self).__init__(title, **options)
self.minLength = minLength
self.maxLength = maxLength
def isEmpty(self, value):
return value is NO_VALUE or not len(value)
def validate(self, value, form):
error = super(URIField, self).validate(value, form)
if error is not None:
return error
if not isinstance(value, Marker) and len(value):
assert isinstance(value, zeam.form.ztk.compat.string_types)
if not isURI(value):
return _(u"The URI is malformed.")
if self.minLength and len(value) < self.minLength:
return _(u"The URI is too short.")
if self.maxLength and len(value) > self.maxLength:
return _(u"The URI is too long.")
return None
# BBB
URISchemaField = URIField
class URIWidget(FieldWidget):
grok.adapts(URIField, Interface, Interface)
defaultHtmlClass = ['field', 'field-uri']
defaultHtmlAttributes = set(['readonly', 'required', 'autocomplete',
'maxlength', 'pattern', 'placeholder',
'size', 'style'])
class URIDisplayWidget(FieldWidget):
grok.adapts(URIField, Interface, Interface)
grok.name('display')
@property
def target(self):
return self.component.target
def URISchemaFactory(schema):
field = URIField(
schema.title or None,
identifier=schema.__name__,
description=schema.description,
required=schema.required,
readonly=schema.readonly,
minLength=schema.min_length,
maxLength=schema.max_length,
interface=schema.interface,
constrainValue=schema.constraint,
defaultFactory=schema.defaultFactory,
defaultValue=schema.__dict__['default'] or NO_VALUE)
return field
def register():
registerSchemaField(URISchemaFactory, schema_interfaces.IURI) | zeam.form.ztk | /zeam.form.ztk-1.4.0.tar.gz/zeam.form.ztk-1.4.0/src/zeam/form/ztk/widgets/uri.py | uri.py |
(function ($, jsontemplate){
var FIELD_NAME_REGEXP = /(.*)\.field\.(\d+)$/;
var increment = function(value) {
return (parseInt(value) + 1).toString();
};
var starts_with = function(string) {
var starter = '^';
for (var i= 1; i < arguments.length; i++) {
starter += arguments[i];
};
return string.match(starter);
};
var create_template = function($node) {
if (!$node.length) {
// allowAdding is false;
return undefined;
};
var identifier = $node.attr('rel'),
template = new jsontemplate.Template(
$.trim($node.get(0).innerHTML), {
// For the moment this use an hijack version of
// json-template.
undefined_callable: function(name){
return '{' + name + '}';
}
});
// Remove the template from the DOM.
$node.remove();
// Return an object that let you render the template
return {
identifier: identifier,
template: template,
render: function(identifier) {
var parameters = {};
parameters[this.identifier] = identifier;
var data = $(this.template.expand(parameters));
if (!data.is('.field-collection-line')) {
data = data.find('.field-collection-line:first');
};
// The line might contains other field-collection
data.find('div.field-collection').each(function (){
create_field($(this));
});
return data;
}
};
};
var update_line_names = function($line, base_name, count) {
var selector_name = base_name + '.checked.',
present_name = base_name + '.present.',
field_name = base_name + '.field.';
var rewriter = function () {
var $input = $(this),
input_name,
template_name = $input.attr('name');
if (starts_with(template_name, selector_name)) {
$input.attr('name', selector_name + count);
} else if (starts_with(template_name, present_name)) {
$input.attr('name', present_name + count);
} else if (starts_with(template_name, field_name)) {
input_name = field_name + count;
var i = field_name.length;
// Consume the old count
for (; i < template_name.length && template_name[i] != '.'; i++);
// Copy the end of the old name to the new one
for (; i < template_name.length; i++) {
input_name += template_name[i];
};
$input.attr('name', input_name);
};
};
// Rewrite name for input, textarea and select tags.
$line.find('input').each(rewriter);
$line.find('textarea').each(rewriter);
$line.find('select').each(rewriter);
// Update the rel attribute on the file.
$line.attr('rel', field_name + count);
};
var update_move_buttons = function($line_top, $line_bottom) {
// Show or hide move up/down button depending if it is the
// first line or last line or not. This code exist because IE
// 7 doesn't support last-child in CSS.
if ($line_top.is(':first-child')) {
$line_bottom.children('.ordering-actions').children(
'.field-collection-move-up').show();
$line_top.children('.ordering-actions').children(
'.field-collection-move-up').hide();
};
if ($line_bottom.is(':last-child')) {
$line_top.children('.ordering-actions').children(
'.field-collection-move-down').show();
$line_bottom.children('.ordering-actions').children(
'.field-collection-move-down').hide();
};
};
var create_field = function($field) {
if (!$field.is('.field-collection')) {
return;
};
var $container = $field.find('.field-collection-lines:first'),
template = create_template($field.children('.field-collection-template'));
var move_line_down = function () {
var $line = $(this).closest('.field-collection-line'),
$next_line = $line.next();
if ($next_line.is('.field-collection-line')) {
var name_info = FIELD_NAME_REGEXP.exec($line.attr('rel')),
base_name = name_info[1],
count = name_info[2];
var next_name_info = FIELD_NAME_REGEXP.exec($next_line.attr('rel')),
next_count = next_name_info[2];
$line.remove();
$line.insertAfter($next_line);
update_line_names($line, base_name, next_count);
update_line_names($next_line, base_name, count);
update_move_buttons($next_line, $line);
};
return false;
};
var move_line_up = function () {
var $line = $(this).closest('.field-collection-line'),
$previous_line = $line.prev();
if ($previous_line.is('.field-collection-line')) {
var name_info = FIELD_NAME_REGEXP.exec($line.attr('rel')),
base_name = name_info[1],
count = name_info[2];
var previous_name_info = FIELD_NAME_REGEXP.exec(
$previous_line.attr('rel')),
previous_count = previous_name_info[2];
$line.remove();
$line.insertBefore($previous_line);
update_line_names($line, base_name, previous_count);
update_line_names($previous_line, base_name, count);
update_move_buttons($line, $previous_line);
};
return false;
};
var remove_line = function() {
var $selected = $container.children(
'.field-collection-line').children(
'.line-actions').children(
'input.field-collection-line-selector:checked');
$selected.each(function (){
var $line = $(this).closest('.field-collection-line'),
$previous_line = $line.prev('.field-collection-line'),
$next_line = $line.next('.field-collection-line');
$line.remove();
update_move_buttons($next_line, $previous_line);
var $lines = $container.find('.field-collection-line');
if (!$lines.length) {
var $empty_message = $field.find('.field-collection-empty'),
$header_message = $field.children('.field-collection-header'),
$actions = $field.children('.multi-actions'),
$remove_button = $actions.children('.field-collection-remove-line');
$empty_message.slideDown();
$header_message.slideUp();
$remove_button.fadeOut();
};
});
return false;
};
var add_line = function() {
// Clear the empty message
var $header_message = $field.children('.field-collection-header'),
$empty_message = $field.children('.field-collection-empty');
if ($empty_message.is(':visible')) {
$header_message.slideDown();
$empty_message.slideUp();
};
// Display the remove button
var $actions = $field.children('.multi-actions'),
$remove_button = $actions.children('.field-collection-remove-line');
if (!$remove_button.is(':visible')) {
$remove_button.fadeIn();
};
// Add a line
var $counter = $field.children('input.field-collection-counter'),
identifier = $counter.val(),
$new_line = template.render(identifier),
$container = $field.find('.field-collection-lines:first'),
$previous_line = $container.children('.field-collection-line:last');
if (!$previous_line.length) {
$previous_line = $new_line;
};
$new_line.appendTo($container);
update_move_buttons($previous_line, $new_line);
$counter.val(increment(identifier));
$new_line.trigger('addline-zeamform');
return false;
};
// Bind events
$field.on('click', 'input.field-collection-add-line', add_line);
$field.on('click', 'input.field-collection-remove-line', remove_line);
$field.on('click', 'button.field-collection-move-up', move_line_up);
$field.on('click', 'button.field-collection-move-down', move_line_down);
// Clear style on any existing buttons.
update_move_buttons(
$container.children('.field-collection-line:first'),
$container.children('.field-collection-line:last'));
};
$.extend($.fn, {
ZeamCollectionWidget: function () {
return $(this).each(function() { create_field($(this));});
}
});
$(document).ready(function (){
// Load existing fields.
$('form div.field-collection').ZeamCollectionWidget();
});
})(jQuery, jsontemplate); | zeam.form.ztk | /zeam.form.ztk-1.4.0.tar.gz/zeam.form.ztk-1.4.0/src/zeam/form/ztk/widgets/static/collection.js | collection.js |
zeam.jsontemplate
*****************
Introduction
============
This library packages a patched version of json template for
`fanstatic`_. This behaved exactly like the official version, except
it add an option ``undefined_callable`` that is a function that will
be called in case of an undefined value, to return the value to be
use.
.. _`fanstatic`: http://fanstatic.org
This requires integration between your web framework and ``fanstatic``,
and making sure that the original resources (shipped in the ``resources``
directory in ``zeam.jsontemplate``) are published to some URL.
| zeam.jsontemplate | /zeam.jsontemplate-0.1.tar.gz/zeam.jsontemplate-0.1/README.txt | README.txt |
// $Id$
//
// JavaScript implementation of json-template.
//
// This is predefined in tests, shouldn't be defined anywhere else. TODO: Do
// something nicer.
var log = log || function() {};
var repr = repr || function() {};
// The "module" exported by this script is called "jsontemplate":
var jsontemplate = function() {
// Regex escaping for metacharacters
function EscapeMeta(meta) {
return meta.replace(/([\{\}\(\)\[\]\|\^\$\-\+\?])/g, '\\$1');
}
var token_re_cache = {};
function _MakeTokenRegex(meta_left, meta_right) {
var key = meta_left + meta_right;
var regex = token_re_cache[key];
if (regex === undefined) {
var str = '(' + EscapeMeta(meta_left) + '.*?' + EscapeMeta(meta_right) +
'\n?)';
regex = new RegExp(str, 'g');
}
return regex;
}
//
// Formatters
//
function HtmlEscape(s) {
return s.replace(/&/g,'&').
replace(/>/g,'>').
replace(/</g,'<');
}
function HtmlTagEscape(s) {
return s.replace(/&/g,'&').
replace(/>/g,'>').
replace(/</g,'<').
replace(/"/g,'"');
}
// Default ToString can be changed
function ToString(s) {
if (s === null) {
return 'null';
}
return s.toString();
}
// Formatter to pluralize words
function _Pluralize(value, unused_context, args) {
var s, p;
switch (args.length) {
case 0:
s = ''; p = 's';
break;
case 1:
s = ''; p = args[0];
break;
case 2:
s = args[0]; p = args[1];
break;
default:
// Should have been checked at compile time
throw {
name: 'EvaluationError', message: 'pluralize got too many args'
};
}
return (value > 1) ? p : s;
}
function _Cycle(value, unused_context, args) {
// Cycle between various values on consecutive integers.
// @index starts from 1, so use 1-based indexing.
return args[(value - 1) % args.length];
}
var DEFAULT_FORMATTERS = {
'html': HtmlEscape,
'htmltag': HtmlTagEscape,
'html-attr-value': HtmlTagEscape,
'str': ToString,
'raw': function(x) { return x; },
'AbsUrl': function(value, context) {
// TODO: Normalize leading/trailing slashes
return context.get('base-url') + '/' + value;
}
};
var DEFAULT_PREDICATES = {
'singular?': function(x) { return x == 1; },
'plural?': function(x) { return x > 1; },
'Debug?': function(unused, context) {
try {
return context.get('debug');
} catch(err) {
if (err.name == 'UndefinedVariable') {
return false;
} else {
throw err;
}
}
}
};
var FunctionRegistry = function() {
return {
lookup: function(user_str) {
return [null, null];
}
};
};
var SimpleRegistry = function(obj) {
return {
lookup: function(user_str) {
var func = obj[user_str] || null;
return [func, null];
}
};
};
var CallableRegistry = function(callable) {
return {
lookup: function(user_str) {
var func = callable(user_str);
return [func, null];
}
};
};
// Default formatters which can't be expressed in DEFAULT_FORMATTERS
var PrefixRegistry = function(functions) {
return {
lookup: function(user_str) {
for (var i = 0; i < functions.length; i++) {
var name = functions[i].name, func = functions[i].func;
if (user_str.slice(0, name.length) == name) {
// Delimiter is usually a space, but could be something else
var args;
var splitchar = user_str.charAt(name.length);
if (splitchar === '') {
args = []; // No arguments
} else {
args = user_str.split(splitchar).slice(1);
}
return [func, args];
}
}
return [null, null]; // No formatter
}
};
};
var ChainedRegistry = function(registries) {
return {
lookup: function(user_str) {
for (var i=0; i<registries.length; i++) {
var result = registries[i].lookup(user_str);
if (result[0]) {
return result;
}
}
return [null, null]; // Nothing found
}
};
};
//
// Template implementation
//
function _ScopedContext(context, options) {
// The stack contains:
// The current context (an object).
// An iteration index. -1 means we're NOT iterating.
var stack = [{context: context, index: -1}];
return {
PushSection: function(name) {
if (name === undefined || name === null) {
return null;
}
var new_context;
if (name == '@') {
new_context = stack[stack.length-1].context;
} else {
new_context = stack[stack.length-1].context[name] || null;
}
stack.push({context: new_context, index: -1});
return new_context;
},
Pop: function() {
stack.pop();
},
next: function() {
var stacktop = stack[stack.length-1];
// Now we're iterating -- push a new mutable object onto the stack
if (stacktop.index == -1) {
stacktop = {context: null, index: 0};
stack.push(stacktop);
}
// The thing we're iterating over
var context_array = stack[stack.length-2].context;
// We're already done
if (stacktop.index == context_array.length) {
stack.pop();
return undefined; // sentinel to say that we're done
}
stacktop.context = context_array[stacktop.index++];
return true; // OK, we mutated the stack
},
_Undefined: function(name) {
if (options.undefined_str !== undefined) {
return options.undefined_str;
};
if (options.undefined_callable !== undefined) {
return options.undefined_callable(name);
};
throw {
name: 'UndefinedVariable', message: name + ' is not defined'
};
},
_LookUpStack: function(name) {
var i = stack.length - 1;
while (true) {
var frame = stack[i];
if (name == '@index') {
if (frame.index != -1) { // -1 is undefined
return frame.index;
}
} else {
var context = frame.context;
if (typeof context === 'object') {
var value = context[name];
if (value !== undefined) {
return value;
}
}
}
i--;
if (i <= -1) {
return this._Undefined(name);
}
}
},
get: function(name) {
if (name == '@') {
return stack[stack.length-1].context;
}
var parts = name.split('.');
var value = this._LookUpStack(parts[0]);
if (parts.length > 1) {
for (var i=1; i<parts.length; i++) {
value = value[parts[i]];
if (value === undefined) {
return this._Undefined(parts[i]);
}
}
}
return value;
}
};
}
// Crockford's "functional inheritance" pattern
var _AbstractSection = function(spec) {
var that = {};
that.current_clause = [];
that.Append = function(statement) {
that.current_clause.push(statement);
};
that.AlternatesWith = function() {
throw {
name: 'TemplateSyntaxError',
message:
'{.alternates with} can only appear with in {.repeated section ...}'
};
};
that.NewOrClause = function(pred) {
throw { name: 'NotImplemented' }; // "Abstract"
};
return that;
};
var _Section = function(spec) {
var that = _AbstractSection(spec);
that.statements = {'default': that.current_clause};
that.section_name = spec.section_name;
that.Statements = function(clause) {
clause = clause || 'default';
return that.statements[clause] || [];
};
that.NewOrClause = function(pred) {
if (pred) {
throw {
name: 'TemplateSyntaxError',
message: '{.or} clause only takes a predicate inside predicate blocks'
};
}
that.current_clause = [];
that.statements['or'] = that.current_clause;
};
return that;
};
// Repeated section is like section, but it supports {.alternates with}
var _RepeatedSection = function(spec) {
var that = _Section(spec);
that.AlternatesWith = function() {
that.current_clause = [];
that.statements['alternate'] = that.current_clause;
};
return that;
};
// Represents a sequence of predicate clauses.
var _PredicateSection = function(spec) {
var that = _AbstractSection(spec);
// Array of func, statements
that.clauses = [];
that.NewOrClause = function(pred) {
// {.or} always executes if reached, so use identity func with no args
pred = pred || [function(x) { return true; }, null];
that.current_clause = [];
that.clauses.push([pred, that.current_clause]);
};
return that;
};
function _Execute(statements, context, callback) {
for (var i=0; i<statements.length; i++) {
var statement = statements[i];
if (typeof(statement) == 'string') {
callback(statement);
} else {
var func = statement[0];
var args = statement[1];
func(args, context, callback);
}
}
}
function _DoSubstitute(statement, context, callback) {
var value;
value = context.get(statement.name);
// Format values
for (var i=0; i<statement.formatters.length; i++) {
var pair = statement.formatters[i];
var formatter = pair[0];
var args = pair[1];
value = formatter(value, context, args);
}
callback(value);
}
// for [section foo]
function _DoSection(args, context, callback) {
var block = args;
var value = context.PushSection(block.section_name);
var do_section = false;
// "truthy" values should have their sections executed.
if (value) {
do_section = true;
}
// Except: if the value is a zero-length array (which is "truthy")
if (value && value.length === 0) {
do_section = false;
}
if (do_section) {
_Execute(block.Statements(), context, callback);
context.Pop();
} else { // Empty list, None, False, etc.
context.Pop();
_Execute(block.Statements('or'), context, callback);
}
}
// {.pred1?} A {.or pred2?} B ... {.or} Z {.end}
function _DoPredicates(args, context, callback) {
// Here we execute the first clause that evaluates to true, and then stop.
var block = args;
var value = context.get('@');
for (var i=0; i<block.clauses.length; i++) {
var clause = block.clauses[i];
var predicate = clause[0][0];
var pred_args = clause[0][1];
var statements = clause[1];
var do_clause = predicate(value, context, pred_args);
if (do_clause) {
_Execute(statements, context, callback);
break;
}
}
}
function _DoRepeatedSection(args, context, callback) {
var block = args;
var items = context.PushSection(block.section_name);
var pushed = true;
if (items && items.length > 0) {
// TODO: check that items is an array; apparently this is hard in JavaScript
//if type(items) is not list:
// raise EvaluationError('Expected a list; got %s' % type(items))
// Execute the statements in the block for every item in the list.
// Execute the alternate block on every iteration except the last. Each
// item could be an atom (string, integer, etc.) or a dictionary.
var last_index = items.length - 1;
var statements = block.Statements();
var alt_statements = block.Statements('alternate');
for (var i=0; context.next() !== undefined; i++) {
_Execute(statements, context, callback);
if (i != last_index) {
_Execute(alt_statements, context, callback);
}
}
} else {
_Execute(block.Statements('or'), context, callback);
}
context.Pop();
}
var _SECTION_RE = /(repeated)?\s*(section)\s+(\S+)?/;
var _OR_RE = /or(?:\s+(.+))?/;
var _IF_RE = /if(?:\s+(.+))?/;
// Turn a object literal, function, or Registry into a Registry
function MakeRegistry(obj) {
if (!obj) {
// if null/undefined, use a totally empty FunctionRegistry
return new FunctionRegistry();
} else if (typeof obj === 'function') {
return new CallableRegistry(obj);
} else if (obj.lookup !== undefined) {
// TODO: Is this a good pattern? There is a namespace conflict where get
// could be either a formatter or a method on a FunctionRegistry.
// instanceof might be more robust.
return obj;
} else if (typeof obj === 'object') {
return new SimpleRegistry(obj);
}
}
// TODO: The compile function could be in a different module, in case we want to
// compile on the server side.
function _Compile(template_str, options) {
var more_formatters = MakeRegistry(options.more_formatters);
// default formatters with arguments
var default_formatters = PrefixRegistry([
{name: 'pluralize', func: _Pluralize},
{name: 'cycle', func: _Cycle}
]);
var all_formatters = new ChainedRegistry([
more_formatters,
SimpleRegistry(DEFAULT_FORMATTERS),
default_formatters
]);
var more_predicates = MakeRegistry(options.more_predicates);
// TODO: Add defaults
var all_predicates = new ChainedRegistry([
more_predicates, SimpleRegistry(DEFAULT_PREDICATES)
]);
// We want to allow an explicit null value for default_formatter, which means
// that an error is raised if no formatter is specified.
var default_formatter;
if (options.default_formatter === undefined) {
default_formatter = 'str';
} else {
default_formatter = options.default_formatter;
}
function GetFormatter(format_str) {
var pair = all_formatters.lookup(format_str);
if (!pair[0]) {
throw {
name: 'BadFormatter',
message: format_str + ' is not a valid formatter'
};
}
return pair;
}
function GetPredicate(pred_str) {
var pair = all_predicates.lookup(pred_str);
if (!pair[0]) {
throw {
name: 'BadPredicate',
message: pred_str + ' is not a valid predicate'
};
}
return pair;
}
var format_char = options.format_char || '|';
if (format_char != ':' && format_char != '|') {
throw {
name: 'ConfigurationError',
message: 'Only format characters : and | are accepted'
};
}
var meta = options.meta || '{}';
var n = meta.length;
if (n % 2 == 1) {
throw {
name: 'ConfigurationError',
message: meta + ' has an odd number of metacharacters'
};
}
var meta_left = meta.substring(0, n/2);
var meta_right = meta.substring(n/2, n);
var token_re = _MakeTokenRegex(meta_left, meta_right);
var current_block = _Section({});
var stack = [current_block];
var strip_num = meta_left.length; // assume they're the same length
var token_match;
var last_index = 0;
while (true) {
token_match = token_re.exec(template_str);
if (token_match === null) {
break;
} else {
var token = token_match[0];
}
// Add the previous literal to the program
if (token_match.index > last_index) {
var tok = template_str.slice(last_index, token_match.index);
current_block.Append(tok);
}
last_index = token_re.lastIndex;
var had_newline = false;
if (token.slice(-1) == '\n') {
token = token.slice(null, -1);
had_newline = true;
}
token = token.slice(strip_num, -strip_num);
if (token.charAt(0) == '#') {
continue; // comment
}
if (token.charAt(0) == '.') { // Keyword
token = token.substring(1, token.length);
var literal = {
'meta-left': meta_left,
'meta-right': meta_right,
'space': ' ',
'tab': '\t',
'newline': '\n'
}[token];
if (literal !== undefined) {
current_block.Append(literal);
continue;
}
var new_block, func;
var section_match = token.match(_SECTION_RE);
if (section_match) {
var repeated = section_match[1];
var section_name = section_match[3];
if (repeated) {
func = _DoRepeatedSection;
new_block = _RepeatedSection({section_name: section_name});
} else {
func = _DoSection;
new_block = _Section({section_name: section_name});
}
current_block.Append([func, new_block]);
stack.push(new_block);
current_block = new_block;
continue;
}
var pred_str, pred;
// Check {.or pred?} before {.pred?}
var or_match = token.match(_OR_RE);
if (or_match) {
pred_str = or_match[1];
pred = pred_str ? GetPredicate(pred_str) : null;
current_block.NewOrClause(pred);
continue;
}
// Match either {.pred?} or {.if pred?}
var matched = false;
var if_match = token.match(_IF_RE);
if (if_match) {
pred_str = if_match[1];
matched = true;
} else if (token.charAt(token.length-1) == '?') {
pred_str = token;
matched = true;
}
if (matched) {
pred = pred_str ? GetPredicate(pred_str) : null;
new_block = _PredicateSection();
new_block.NewOrClause(pred);
current_block.Append([_DoPredicates, new_block]);
stack.push(new_block);
current_block = new_block;
continue;
}
if (token == 'alternates with') {
current_block.AlternatesWith();
continue;
}
if (token == 'end') {
// End the block
stack.pop();
if (stack.length > 0) {
current_block = stack[stack.length-1];
} else {
throw {
name: 'TemplateSyntaxError',
message: 'Got too many {end} statements'
};
}
continue;
}
}
// A variable substitution
var parts = token.split(format_char);
var formatters;
var name;
if (parts.length == 1) {
if (default_formatter === null) {
throw {
name: 'MissingFormatter',
message: 'This template requires explicit formatters.'
};
}
// If no formatter is specified, use the default.
formatters = [GetFormatter(default_formatter)];
name = token;
} else {
formatters = [];
for (var j=1; j<parts.length; j++) {
formatters.push(GetFormatter(parts[j]));
}
name = parts[0];
}
current_block.Append([_DoSubstitute, {name: name, formatters: formatters}]);
if (had_newline) {
current_block.Append('\n');
}
}
// Add the trailing literal
current_block.Append(template_str.slice(last_index));
if (stack.length !== 1) {
throw {
name: 'TemplateSyntaxError',
message: 'Got too few {end} statements'
};
}
return current_block;
}
// The Template class is defined in the traditional style so that users can add
// methods by mutating the prototype attribute. TODO: Need a good idiom for
// inheritance without mutating globals.
function Template(template_str, options) {
// Add 'new' if we were not called with 'new', so prototyping works.
if(!(this instanceof Template)) {
return new Template(template_str, options);
}
this._options = options || {};
this._program = _Compile(template_str, this._options);
};
Template.prototype.render = function(data_dict, callback) {
var context = _ScopedContext(data_dict, this._options);
_Execute(this._program.Statements(), context, callback);
};
Template.prototype.expand = function(data_dict) {
var tokens = [];
this.render(data_dict, function(x) { tokens.push(x); });
return tokens.join('');
};
// fromString is a construction method that allows metadata to be written at the
// beginning of the template string. See Python's FromFile for a detailed
// description of the format.
//
// The argument 'options' takes precedence over the options in the template, and
// can be used for non-serializable options like template formatters.
var OPTION_RE = /^([a-zA-Z\-]+):\s*(.*)/;
var OPTION_NAMES = [
'meta', 'format-char', 'default-formatter', 'undefined-str', 'undefined-callable'];
// Use this "linear search" instead of Array.indexOf, which is nonstandard
var OPTION_NAMES_RE = new RegExp(OPTION_NAMES.join('|'));
function fromString(s, options) {
var parsed = {};
var begin = 0, end = 0;
while (true) {
var parsedOption = false;
end = s.indexOf('\n', begin);
if (end == -1) {
break;
}
var line = s.slice(begin, end);
begin = end+1;
var match = line.match(OPTION_RE);
if (match !== null) {
var name = match[1].toLowerCase(), value = match[2];
if (name.match(OPTION_NAMES_RE)) {
name = name.replace('-', '_');
value = value.replace(/^\s+/, '').replace(/\s+$/, '');
if (name == 'default_formatter' && value.toLowerCase() == 'none') {
value = null;
}
parsed[name] = value;
parsedOption = true;
}
}
if (!parsedOption) {
break;
}
}
// TODO: This doesn't enforce the blank line between options and template, but
// that might be more trouble than it's worth
if (parsed !== {}) {
body = s.slice(begin);
} else {
body = s;
}
for (var o in options) {
parsed[o] = options[o];
}
return Template(body, parsed);
}
// We just export one name for now, the Template "class".
// We need HtmlEscape in the browser tests, so might as well export it.
return {
Template: Template, HtmlEscape: HtmlEscape,
FunctionRegistry: FunctionRegistry, SimpleRegistry: SimpleRegistry,
CallableRegistry: CallableRegistry, ChainedRegistry: ChainedRegistry,
fromString: fromString,
// Private but exposed for testing
_Section: _Section
};
}(); | zeam.jsontemplate | /zeam.jsontemplate-0.1.tar.gz/zeam.jsontemplate-0.1/src/zeam/jsontemplate/resources/json-template.js | json-template.js |
from zope.interface import Interface, Attribute
class IBatchBehavior(Interface):
"""A basic batch behavior.
"""
start = Attribute(
u"Starting indice over the batch")
count = Attribute(
u"Number of element in a batch")
data = Attribute(
u"Data to be batched (source)")
name = Attribute(
u"Name of the batch")
factory = Attribute(
u"Factory used to create element returned from the batch")
previous = Attribute(
u"Previous index or None")
next = Attribute(
u"Next index or None")
def all():
"""Returns an interator on all (index, starting element) of the
batch.
"""
def batch_length():
"""Returns the number of available index in the batch.
"""
def __getitem__(index):
"""Return item at index in the current batch view.
"""
def __iter__():
"""Returns an iterator on content in the current batch view.
"""
def __len__():
"""Returns number of items in the curretn batch views.
"""
class IBatch(IBatchBehavior):
"""A batch object.
"""
first = Attribute(u"First element")
last = Attribute(u"Last element")
class IActiveBatch(IBatchBehavior):
"""An active batch call a callback in order to get items for the
current selection, instead of iterating over an existing sequence.
"""
class IDateBatch(IActiveBatch):
"""Batch element by date.
"""
min = Attribute(u"Minimal date where to stop the batch")
max = Attribute(u"Maximal date where to stop the batch")
class IAlphabeticalBatch(IActiveBatch, IBatch):
"""Batch element by letter.
"""
letters = Attribute(u"List of letters the batch will iterate through")
class IBatching(Interface):
"""Used to render a batch.
"""
keep_query_string = Attribute(u"Should query string be kept")
batch = Attribute(u"Iterate through each batch navigation entry.")
batch_previous = Attribute(u"Previous batch/navigation entry")
# Cannot call batch_next next because of a Chameleon issue
batch_next = Attribute(u"Next batch/navigation entry") | zeam.utils.batch | /zeam.utils.batch-1.1.tar.gz/zeam.utils.batch-1.1/src/zeam/utils/batch/interfaces.py | interfaces.py |
from zeam.utils.batch.interfaces import IBatch, IActiveBatch
from zope.interface import implements
class BatchBaseIterator(object):
"""An iterator on Batch object.
"""
def __init__(self, context):
self.context = context
self.start = 0
def __iter__(self):
return self
class BatchItemIterator(BatchBaseIterator):
"""Return the next object in the Batch iterator.
"""
def __init__(self, context, factory=None):
super(BatchItemIterator, self).__init__(context)
self.factory = factory
def next(self):
try:
element = self.context[self.start]
except IndexError:
raise StopIteration
self.start += 1
return element
class BatchIndiceIterator(BatchBaseIterator):
"""Return the next indice in the Batch iterator.
"""
def next(self):
last = self.context.count * self.context.batch_length()
if not last:
raise StopIteration
if self.start < last:
value = self.start
self.start += self.context.count
return (value, value / self.context.count + 1)
raise StopIteration
class Batch(object):
"""A simple batch object.
"""
implements(IBatch)
def __init__(
self, collection,
start=0, count=10, name='', request=None, factory=None):
if request is not None:
key = 'bstart'
if name:
key += '_' + name
try:
start = int(request.form.get(key, 0))
except (ValueError, TypeError):
pass
self.start = start
self.count = count
self.data = collection
self.name = name
self.factory = factory
def _setData(self, data):
self._data = data
self._end = len(self._data)
if not self.count or self.count > self._end:
# self._count is the effective count to use.
self._count = self._end
else:
self._count = self.count
def _getData(self):
return self._data
data = property(_getData, _setData)
def __getitem__(self, index):
if index < 0 or index >= self._count:
raise IndexError, "invalid index"
element = self.data[self.start + index]
if self.factory is not None:
return self.factory(element)
return element
def batch_length(self):
if not self.count:
return 0
last = self._end % self.count
if last:
last = 1
return (self._end / self.count) + last
def __iter__(self):
return BatchItemIterator(self, factory=self.factory)
def __len__(self):
return max(min(self._end - self.start, self._count), 0)
def all(self):
return BatchIndiceIterator(self)
@property
def first(self):
return 0
@property
def previous(self):
previous = self.start - self.count
if previous < 0:
return None
return previous
@property
def last(self):
len = self.batch_length()
if not len:
return 0
return (len - 1) * self.count
@property
def next(self):
next = self.start + self.count
if next >= (self.count * self.batch_length()):
return None
return next
class ActiveBatch(object):
implements(IActiveBatch)
def __init__(
self, collection,
start=None, count=None, name='', request=None, factory=None):
self.start = start
self.name = name
self.factory = factory
self.count = count
self._setData(collection)
def _setData(self, collection):
self._data = list(collection(self.start))
self._count = len(self._data)
def _getData(self):
return self._data
data = property(_getData, _setData)
def __getitem__(self, index):
if index < 0 or index >= self._count:
raise IndexError("invalid index")
element = self._data[index]
if self.factory is not None:
return self.factory(element)
return element
def __iter__(self):
return BatchItemIterator(self, factory=self.factory)
def __len__(self):
return self._count | zeam.utils.batch | /zeam.utils.batch-1.1.tar.gz/zeam.utils.batch-1.1/src/zeam/utils/batch/batch.py | batch.py |
================
zeam.utils.batch
================
This package provides a batch functionality for Zope 2, Zope 3 and Grok.
.. contents::
Example
=======
A very straightforward example. We need to define a context to work
on::
>>> import grokcore.view as grok
>>> from persistent import Persistent
>>> from zope.component import queryMultiAdapter
>>> from zeam.utils.batch import Batch
>>> from zeam.utils.batch.interfaces import IBatching
>>> class Content(Persistent):
... pass
And now, you can define a view which use a batch, and render it::
>>> class MyViewClass(grok.View):
... grok.context(Content)
...
... def update(self):
... fulllist = [1, 2, 3, 4, 5, 6, 7, 8, 9]
... self.myitems = Batch(
... fulllist , count=3, name='nbs', request=self.request,
... factory=lambda x: str(x))
...
... self.batch = queryMultiAdapter(
... (self, self.myitems, self.request),
... IBatching)()
...
... template = grok.PageTemplate('''
... <tal:navigation tal:replace="structure view/batch" />
... <span tal:content="item" tal:repeat="item view/myitems" />
... <tal:navigation tal:replace="structure view/batch" />
... ''')
And this work::
>>> from grokcore.component import testing
>>> testing.grok_component("view", MyViewClass)
True
>>> from zope.publisher.browser import TestRequest
>>> request = TestRequest()
>>> root = getRootFolder()
>>> root['myObject'] = Content()
>>> myobj = root['myObject']
>>> view = queryMultiAdapter((myobj, request), name="myviewclass")
>>> "batchNav" in view()
True
API
===
``Batch``
This object implements the batch.
The batch object is instanciated with the following arguments:
- a list of the objects to batch
- the request
- the number of items per page (as count, default to 10)
- a name (optional)
- a factory that will be passed each item before each iteration (optional)
The batch is an iterable object behaving like a list.
It only gives access to the set of objects for the current page.
It provides the number of pages generated and the current position.
Please refer to the interface, for more information.
A multi adapter providing ``IBatching`` can render the batch.
It adapts the context, the batch object and the request. The __call__
method of this component will return a snippet of HTML containing
basic controls for your batch: a next and previous link and a direct
access to the other pages.
``DateBatch``
This object implements a batch for date range. It follows the same
API than the regular batch, except:
- the list of objects is replaced by a callable that takes a
datetime value has parameter and return a list of objects for the
given periode
- the count option is changed to use either the ``BATCH_DAY`` or
``BATCH_MONTH`` marker object.
| zeam.utils.batch | /zeam.utils.batch-1.1.tar.gz/zeam.utils.batch-1.1/src/zeam/utils/batch/README.txt | README.txt |
from urllib import urlencode
import megrok.pagetemplate
import grokcore.component as grok
from zope.cachedescriptors.property import CachedProperty
from zope.interface import Interface
from zope.publisher.interfaces.http import IHTTPRequest
from zope.traversing.browser import absoluteURL
from zope.traversing.interfaces import ITraversable
from zeam.utils.batch.interfaces import IBatch, IBatching
class BasicBatching(grok.MultiAdapter):
grok.baseclass()
grok.implements(IBatching)
grok.provides(IBatching)
keep_query_string = True
def __init__(self, context, batch, request):
self.context = context
self.request = request
self._batch = batch
def __call__(self):
template = megrok.pagetemplate.getPageTemplate(self, self.request)
if template is None:
return u""
return template()
@CachedProperty
def url(self):
return absoluteURL(self.context, self.request)
@CachedProperty
def query_string(self):
params = self.request.form.copy()
for key in params.keys():
if key.startswith('bstart'):
del params[key]
return urlencode(params)
def default_namespace(self):
namespace = {}
namespace['context'] = self.context
namespace['request'] = self.request
namespace['batch'] = self
return namespace
def namespace(self):
namespace = {}
return namespace
def _create_link(self, position):
def append_qs(url):
if not self.keep_query_string:
return url
if self.query_string:
return "%s?%s" % (url, self.query_string,)
return url
if not position:
return append_qs(self.url)
if self._batch.name:
return append_qs("%s/++batch++%s+%s" % (
self.url, self._batch.name, position))
return append_qs("%s/++batch++%s" % (self.url, position))
@property
def batch_length(self):
return self._batch.batch_length()
@property
def batch(self):
raise NotImplementedError
@property
def batch_next(self):
raise NotImplementedError
@property
def batch_previous(self):
raise NotImplementedError
class Batching(BasicBatching):
"""View object on batched elements.
"""
grok.adapts(Interface, IBatch, IHTTPRequest)
@property
def batch(self):
end = self._batch.batch_length()
if end > 1:
count = 0
wanted = self._batch.start / self._batch.count
ldots = False
for pos, item in self._batch.all():
if (((count > 2) and (count < (wanted - 3))) or
((count < (end - 3)) and (count > (wanted + 3)))):
if not ldots:
ldots = True
yield dict(name=None, url=None, style=None)
else:
ldots = False
url_item = self._create_link(pos)
current_item = (pos == self._batch.start)
style = current_item and 'current' or None
yield dict(name=item, url=url_item, style=style)
count += 1
@property
def batch_previous(self):
previous = self._batch.previous
avail = not (previous is None)
return avail and dict(url=self._create_link(previous)) or None
@property
def batch_next(self):
next = self._batch.next
avail = not (next is None)
return avail and dict(url=self._create_link(next)) or None
@property
def batch_first(self):
return dict(url=self._create_link(self._batch.first))
@property
def batch_last(self):
return dict(url=self._create_link(self._batch.last))
class BatchPages(megrok.pagetemplate.PageTemplate):
megrok.pagetemplate.view(Batching)
class Namespace(grok.MultiAdapter):
"""Make batch works with namespace.
"""
grok.name('batch')
grok.provides(ITraversable)
grok.adapts(Interface, IHTTPRequest)
def __init__(self, context, request):
self.context = context
self.request = request
def traverse(self, name, ignored):
if '+' in name:
key, value = name.split('+')
key = 'bstart_' + key
else:
key = 'bstart'
value = name
self.request.form[key] = value
return self.context | zeam.utils.batch | /zeam.utils.batch-1.1.tar.gz/zeam.utils.batch-1.1/src/zeam/utils/batch/views.py | views.py |
from datetime import datetime
from zeam.utils.batch.interfaces import IDateBatch
from zeam.utils.batch.batch import ActiveBatch
from zope.interface import implements
# Create a batch level for each ...
BATCH_DAY = object()
BATCH_MONTH = object()
class DateBatch(ActiveBatch):
implements(IDateBatch)
def __init__(
self, collection,
start=None, count=BATCH_MONTH, name='', request=None, factory=None,
min=None, max=None):
if request is not None:
key = 'bstart'
if name:
key += '_' + name
if key in request.form:
try:
start = datetime.strptime(request.form[key], '%Y-%m')
except (ValueError, TypeError):
pass
if start is None:
start = datetime.now()
self.min = min
self.max = max
super(DateBatch, self).__init__(
collection,
start=start, count=count, name=name,
request=request, factory=factory)
def all(self):
start = 1
if self.min is not None:
if self.min.year == self.start.year:
# We are on the starting year.
start = self.min.month
elif self.min.year > self.start.year:
# We are before the starting year
start = 13
end = 13
if self.max is not None:
if self.max.year == self.start.year:
# We are on the ending year
end = self.max.month + 1
elif self.max.year < self.start.year:
# We are after the ending year
end = 1
for month in range(start, end):
yield datetime(self.start.year, month, 1)
def batch_length(self):
return 12
@property
def previous(self):
if self.min is not None and self.min.year >= self.start.year:
# We are before the minimal year.
return None
if self.max is not None and self.max.year < self.start.year:
# We are after the maximal year.
return None
return datetime(self.start.year - 1, 12, 1)
@property
def next(self):
if self.max is not None and self.max.year <= self.start.year:
# We are after the maximal year.
return None
if self.min is not None and self.min.year > self.start.year:
# We are before the minimal year.
return None
return datetime(self.start.year + 1, 1, 1) | zeam.utils.batch | /zeam.utils.batch-1.1.tar.gz/zeam.utils.batch-1.1/src/zeam/utils/batch/date/batch.py | batch.py |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | zeanr-distributions | /zeanr_distributions-0.1.tar.gz/zeanr_distributions-0.1/zeanr_distributions/Gaussiandistribution.py | Gaussiandistribution.py |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Binomial(Distribution):
""" Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
n (int) number of trials
TODO: Fill out all functions below
"""
def __init__(self, prob=.5, size=20):
self.n = size
self.p = prob
Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev())
def calculate_mean(self):
"""Function to calculate the mean from p and n
Args:
None
Returns:
float: mean of the data set
"""
self.mean = self.p * self.n
return self.mean
def calculate_stdev(self):
"""Function to calculate the standard deviation from p and n.
Args:
None
Returns:
float: standard deviation of the data set
"""
self.stdev = math.sqrt(self.n * self.p * (1 - self.p))
return self.stdev
def replace_stats_with_data(self):
"""Function to calculate p and n from the data set
Args:
None
Returns:
float: the p value
float: the n value
"""
self.n = len(self.data)
self.p = 1.0 * sum(self.data) / len(self.data)
self.mean = self.calculate_mean()
self.stdev = self.calculate_stdev()
return self.p, self.n
def plot_bar(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n])
plt.title('Bar Chart of Data')
plt.xlabel('outcome')
plt.ylabel('count')
def pdf(self, k):
"""Probability density function calculator for the binomial distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k)))
b = (self.p ** k) * (1 - self.p) ** (self.n - k)
return a * b
def plot_bar_pdf(self):
"""Function to plot the pdf of the binomial distribution
Args:
None
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
x = []
y = []
# calculate the x values to visualize
for i in range(self.n + 1):
x.append(i)
y.append(self.pdf(i))
# make the plots
plt.bar(x, y)
plt.title('Distribution of Outcomes')
plt.ylabel('Probability')
plt.xlabel('Outcome')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Binomial distributions with equal p
Args:
other (Binomial): Binomial instance
Returns:
Binomial: Binomial distribution
"""
try:
assert self.p == other.p, 'p values are not equal'
except AssertionError as error:
raise
result = Binomial()
result.n = self.n + other.n
result.p = self.p
result.calculate_mean()
result.calculate_stdev()
return result
def __repr__(self):
"""Function to output the characteristics of the Binomial instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}, p {}, n {}".\
format(self.mean, self.stdev, self.p, self.n) | zeanr-distributions | /zeanr_distributions-0.1.tar.gz/zeanr_distributions-0.1/zeanr_distributions/Binomialdistribution.py | Binomialdistribution.py |
# zebe-data-service
#### 项目介绍
zebe-data-service
#### 软件架构
软件架构说明
#### 安装教程
1. xxxx
2. xxxx
3. xxxx
#### 使用说明
1. xxxx
2. xxxx
3. xxxx
#### 参与贡献
1. Fork 本项目
2. 新建 Feat_xxx 分支
3. 提交代码
4. 新建 Pull Request
#### 码云特技
1. 使用 Readme\_XXX.md 来支持不同的语言,例如 Readme\_en.md, Readme\_zh.md
2. 码云官方博客 [blog.gitee.com](https://blog.gitee.com)
3. 你可以 [https://gitee.com/explore](https://gitee.com/explore) 这个地址来了解码云上的优秀开源项目
4. [GVP](https://gitee.com/gvp) 全称是码云最有价值开源项目,是码云综合评定出的优秀开源项目
5. 码云官方提供的使用手册 [http://git.mydoc.io/](http://git.mydoc.io/)
6. 码云封面人物是一档用来展示码云会员风采的栏目 [https://gitee.com/gitee-stars/](https://gitee.com/gitee-stars/) | zebe-data-service | /zebe-data-service-1.7.0.tar.gz/zebe-data-service-1.7.0/README.md | README.md |
from datetime import datetime
from sqlalchemy import Column, String, Integer, SmallInteger, Text, Float, DateTime
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from zebe.config.db import PROJECT_DB
engine = create_engine('sqlite:///' + PROJECT_DB, echo=True)
ProjectModelBase = declarative_base()
# 项目
class Project(ProjectModelBase):
__tablename__ = 'project'
id = Column(Integer, primary_key=True, autoincrement=True, index=True) # ID
name = Column(String(40), default="", nullable=False) # 名称
type = Column(String(10), default="", nullable=False, index=True) # 类型
is_main = Column(SmallInteger, default=0, nullable=False, index=True) # 是否主要(0=否 1=是)
description = Column(Text, default="", nullable=False) # 描述
year = Column(SmallInteger, default=0, nullable=False, index=True) # 年份
relative_url = Column(String(300), default="", nullable=False) # 相关链接
cover = Column(String(300), default="", nullable=False) # 封面图
total_task = Column(Integer, default=0, nullable=False) # 总任务数
finished_task = Column(Integer, default=0, nullable=False) # 已完成任务数
estimated_time = Column(Integer, default=-1, nullable=False) # 预估时长(分)
progress = Column(Float, default=0.0, nullable=False) # 进度
create_time = Column(DateTime, default=datetime.now(), nullable=False) # 创建时间
finish_time = Column(DateTime) # 完成时间
def __repr__(self):
return "<%s(name='%s')>" % (self.__class__.name, self.name)
# 任务
class Task(ProjectModelBase):
__tablename__ = 'task'
id = Column(Integer, primary_key=True, autoincrement=True, index=True) # ID
title = Column(String(50), default="", nullable=False) # 标题
project_id = Column(Integer, default=0, nullable=False, index=True) # 所属项目ID
estimated_time = Column(Integer, default=-1, nullable=False) # 预估时长(分)
status = Column(SmallInteger, default=0, nullable=False, index=True) # 状态(0=未完成 1=已完成)
idx_order = Column(Integer, default=0, nullable=False) # 排序
is_assigned = Column(Integer, default=0, nullable=False) # 是否已分配
year = Column(SmallInteger, default=0, nullable=False, index=True) # 年份
create_time = Column(DateTime, default=datetime.now(), nullable=False) # 创建时间
start_time = Column(DateTime) # 开始时间
end_time = Column(DateTime) # 结束时间
finish_time = Column(DateTime) # 完成时间
def __repr__(self):
return "<%s(title='%s')>" % (self.__class__.name, self.title)
# 自动日常任务
class AutoDailyTask(ProjectModelBase):
__tablename__ = 'auto_daily_task'
id = Column(Integer, primary_key=True, autoincrement=True, index=True) # ID
title = Column(String(50), default="", nullable=False) # 标题
is_main = Column(SmallInteger, default=0, nullable=False, index=True) # 是否主要(0=否 1=是)
task_id = Column(Integer) # 所属任务ID
project_id = Column(Integer) # 所属项目ID
estimated_time = Column(Integer, default=-1, nullable=False) # 预估时长(分)
status = Column(SmallInteger, default=0, nullable=False, index=True) # 状态(0=未完成 1=已完成)
idx_order = Column(Integer, default=0, nullable=False) # 排序
year = Column(SmallInteger, default=0, nullable=False, index=True) # 年份
date = Column(String(10), default="", nullable=False, index=True) # 日期
create_time = Column(DateTime, default=datetime.now(), nullable=False) # 创建时间
finish_time = Column(DateTime) # 完成时间
delay_count = Column(SmallInteger, default=0, nullable=False) # 拖延次数
def __repr__(self):
return "<%s(title='%s')>" % (self.__class__.name, self.title)
ProjectModelBase.metadata.create_all(engine)
ProjectModelSession = sessionmaker(bind=engine) | zebe-data-service | /zebe-data-service-1.7.0.tar.gz/zebe-data-service-1.7.0/zebe/modle/entity/project.py | project.py |
from datetime import datetime
from sqlalchemy import Column, String, DateTime, Integer, Text
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from zebe.config.db import WECHAT_DB
engine = create_engine('sqlite:///' + WECHAT_DB, echo=True)
WeChatModelBase = declarative_base()
# 秒记先生微信用户
class WeChatMiaoJiUser(WeChatModelBase):
__tablename__ = 'wechat_miaoji_user'
id = Column(String(28), primary_key=True) # ID(和openId相同)
realname = Column(String(20), nullable=False, default='') # 真实姓名
nickname = Column(String(32), nullable=False, default='') # 昵称
wxid = Column(String(20), nullable=False, default='') # 微信号
follow_time = Column(DateTime, nullable=False, default=datetime.now()) # 关注时间
unfollow_time = Column(DateTime) # 取消关注时间
def __repr__(self):
return "<%s(id='%s')>" % (self.__class__.name, self.id)
# 秒记先生微信用户发来的消息
class WeChatMiaoJiUserMessage(WeChatModelBase):
__tablename__ = 'wechat_miaoji_user_msg'
id = Column(Integer, primary_key=True) # ID
type = Column(String(10), default="", nullable=False, index=True) # 消息类型
content = Column(Text, default="", nullable=False) # 文本内容
voice = Column(Text, default="", nullable=False) # 语音内容
link = Column(String(300), default="", nullable=False) # 链接
source = Column(String(40), default="", nullable=False) # 消息来源
reply = Column(Text, default="", nullable=False) # 回复内容
create_time = Column(DateTime, default=datetime.now(), nullable=False) # 发送时间
def __repr__(self):
return "<%s(content='%s')>" % (self.__class__.name, self.content)
# Zebe个人微信用户
class WeChatZebeUser(WeChatModelBase):
__tablename__ = 'wechat_zebe_user'
id = Column(String(28), primary_key=True) # ID(和openId相同)
realname = Column(String(20), nullable=False, default='') # 真实姓名
nickname = Column(String(32), nullable=False, default='') # 昵称
wxid = Column(String(20), nullable=False, default='') # 微信号
follow_time = Column(DateTime, nullable=False, default=datetime.now()) # 关注时间
unfollow_time = Column(DateTime) # 取消关注时间
def __repr__(self):
return "<%s(id='%s')>" % (self.__class__.name, self.id)
# Zebe个人微信用户发来的消息
class WeChatZebeUserMessage(WeChatModelBase):
__tablename__ = 'wechat_zebe_user_msg'
id = Column(Integer, primary_key=True) # ID
type = Column(String(10), default="", nullable=False, index=True) # 消息类型
content = Column(Text, default="", nullable=False) # 文本内容
voice = Column(Text, default="", nullable=False) # 语音内容
link = Column(String(300), default="", nullable=False) # 链接
source = Column(String(40), default="", nullable=False) # 消息来源
reply = Column(Text, default="", nullable=False) # 回复内容
create_time = Column(DateTime, default=datetime.now(), nullable=False) # 发送时间
def __repr__(self):
return "<%s(content='%s')>" % (self.__class__.name, self.content)
WeChatModelBase.metadata.create_all(engine)
WechatModelSession = sessionmaker(bind=engine) | zebe-data-service | /zebe-data-service-1.7.0.tar.gz/zebe-data-service-1.7.0/zebe/modle/entity/wechat.py | wechat.py |
import logging
from sqlalchemy import func
class ModelBaseService:
# 初始化
def __init__(self, entity, session):
self.entity = entity
self.session = session()
# 校验是否为有效的实体类型
def __is_valid_entity(self, entity):
return entity is not None and isinstance(entity, self.entity)
# 获取实体类的公开属性
@staticmethod
def __get_public_fields_of_entity(entity):
public_fields = []
if hasattr(entity, '_sa_instance_state'):
keys = entity._sa_instance_state.attrs._data.keys()
for field in keys:
public_fields.append(field)
logging.info('获取实体类的公开属性成功:' + str(public_fields))
return public_fields
# 添加数据
def add(self, entity):
if self.__is_valid_entity(entity):
self.session.add(entity)
self.session.commit()
self.session.close()
logging.info('保存成功')
else:
logging.warning('不是自身实体类型,无法添加')
# 批量添加数据
def multi_add(self, entity_list):
if entity_list is not None and isinstance(entity_list, list):
for entity in entity_list:
if self.__is_valid_entity(entity):
self.session.add(entity)
self.session.commit()
self.session.close()
else:
logging.warning('不是列表数据类型,无法添加')
# 修改数据
def update(self, entity):
if self.__is_valid_entity(entity):
session_entity = self.session.query(self.entity).filter(self.entity.id == entity.id).one_or_none()
if session_entity is not None:
entity_fields = self.__get_public_fields_of_entity(entity)
for field in entity_fields:
# 过滤掉主键(ID)
if field != 'id':
if hasattr(session_entity, field):
value = getattr(entity, field)
setattr(session_entity, field, value)
logging.debug('会话中的实体类设置了属性[' + field + ']=' + str(value))
else:
logging.debug('过滤掉了主键(ID)')
self.session.commit()
self.session.close()
logging.info('修改成功')
else:
logging.error('ID为' + str(entity.id) + '的数据不存在,无法修改')
else:
logging.warning('不是自身实体类型,无法修改')
# 查询全部数据条数
def count_all(self):
total = self.session.query(self.entity).count()
self.session.close()
return total
# 查询单条数据
def find_one(self, data_id):
result = None
if data_id is not None:
result = self.session.query(self.entity).filter(self.entity.id == data_id).one_or_none()
self.session.close()
logging.info('查询单条数据 -> 成功')
else:
logging.warning('查询单条数据 -> 失败,因为ID为空')
return result
# 随机查询一条数据
def find_one_random(self):
result = self.session.query(self.entity).order_by(func.random()).limit(1).one_or_none()
self.session.close()
return result
# 随机查询多条数据
def find_many_random(self, amount, exclude_id):
total = self.count_all()
max_query = amount if amount <= total else total
result_id_set = set()
result_entity_list = []
while True:
if len(result_entity_list) == max_query:
self.session.close()
print('------------------ 已找到' + str(max_query) + '条随机数据。')
break
else:
result = self.session.query(self.entity).filter(self.entity.id != exclude_id).order_by(
func.random()).limit(1).one_or_none()
if not (result.id in result_id_set):
result_entity_list.append(result)
result_id_set.add(result.id)
print('找到一条随机数据:' + str(result.id))
else:
print('找到一条随机数据:' + str(result.id) + ',但ID重复,重新查找。。。')
return result_entity_list
# 查询全部数据
def find_all(self):
result = self.session.query(self.entity).all()
self.session.close()
logging.info('查询全部数据 -> 成功')
return result
# 分页查询数据
def find_by_page(self, page, per_page):
offset = (page - 1) * per_page
result = self.session.query(self.entity).order_by(self.entity.id.asc()).offset(offset).limit(per_page).all()
self.session.close()
logging.info('分页查询数据 -> 成功')
return result
# 根据实例删除单条数据
def delete_one(self, entity):
if self.__is_valid_entity(entity):
self.session.query(self.entity).filter(self.entity.id == entity.id).delete()
self.session.commit()
self.session.close()
logging.info('根据实例删除单条数据 -> 成功')
else:
logging.warning('不是自身实体类型,无法删除')
# 根据ID删除单条数据
def delete_one_by_id(self, data_id):
if data_id is not None:
self.session.query(self.entity).filter(self.entity.id == data_id).delete()
self.session.commit()
self.session.close()
logging.info('根据ID删除单条数据 -> 成功')
else:
logging.warning('根据ID删除单条数据 -> 失败,因为ID为空')
# 根据ID删除多条数据
def delete_many_by_ids(self, data_ids):
if data_ids is not None:
if isinstance(data_ids, list):
data_list = self.session.query(self.entity).filter(self.entity.id.in_(data_ids))
for data in data_list:
self.session.delete(data)
self.session.commit()
self.session.close()
logging.info('根据ID删除多条数据 -> 成功')
else:
logging.warning('根据ID删除多条数据 -> 失败,因为传入的ID参数不是集合')
else:
logging.warning('根据ID删除多条数据 -> 失败,因为ID为空')
# 删除全部数据
def delete_all(self):
self.session.query(self.entity).delete(synchronize_session=False)
self.session.commit()
self.session.close()
logging.info('删除全部数据 -> 成功') | zebe-data-service | /zebe-data-service-1.7.0.tar.gz/zebe-data-service-1.7.0/zebe/service/base.py | base.py |
from datetime import datetime
from zebe.modle.entity.diary import Diary, DiaryModelSession
from zebe.service.base import ModelBaseService
# 日记服务
class DiaryService(ModelBaseService):
def __init__(self):
super().__init__(Diary, DiaryModelSession)
def find_one_by_title(self, title):
"""
通过标题查找一篇日记
:param title: 标题
:return:
"""
result = None
if title is not None:
result = self.session.query(self.entity).filter(self.entity.title == title).one_or_none()
self.session.close()
return result
def find_all_by_date(self, year, month, day):
"""
按日期查询全部数据
:param year: 年
:param month: 月
:param day: 日
:return:
"""
result = self.session.query(self.entity).filter(self.entity.year == year, self.entity.month == month,
self.entity.day == day).all()
self.session.close()
return result
def find_all_by_year(self, year):
"""
按年查询全部数据
:param year: 年
:return:
"""
result = self.session.query(self.entity).filter(self.entity.year == year).all()
self.session.close()
return result
def find_all_by_month(self, year, month):
"""
按月查询全部数据
:param year: 年
:param month: 月
:return:
"""
result = self.session.query(self.entity).filter(self.entity.year == year, self.entity.month == month).all()
self.session.close()
return result
def find_all_by_today(self):
"""
查询今天全部数据
:return:
"""
now = datetime.now()
year = now.year
month = now.month
day = now.day
return self.find_all_by_date(year, month, day)
def find_all_by_this_month(self):
"""
查询本月的全部数据
:return:
"""
now = datetime.now()
year = now.year
month = now.month
return self.find_all_by_month(year, month)
def find_all_by_this_year(self):
"""
查询本年的全部数据
:return:
"""
now = datetime.now()
year = now.year
return self.find_all_by_year(year)
def find_all_order_by_date(self):
"""
查询全部数据并指定排序
:return:
"""
result = self.session.query(self.entity).order_by(self.entity.create_time.desc()).all()
self.session.close()
return result
@staticmethod
def get_total_learn_count(diary_list):
"""
计算一组日记列表的总收获数量
:param diary_list: 日记列表
:return:
"""
total = 0
if diary_list is not None:
for diary in diary_list:
total += diary.learned
return total | zebe-data-service | /zebe-data-service-1.7.0.tar.gz/zebe-data-service-1.7.0/zebe/service/diary.py | diary.py |
from datetime import datetime
from zebe.utils.datetime_util import get_total_day_of_year, get_which_day_of_year, get_today_str, get_tomorrow_str
from zebe.modle.entity.project import Project, ProjectModelSession, AutoDailyTask, Task
from zebe.service.base import ModelBaseService
# 项目服务
class ProjectService(ModelBaseService):
def __init__(self):
super().__init__(Project, ProjectModelSession)
# 按照年份和类型查询全部项目
def find_by_year_and_type(self, year, project_type):
entity = self.entity
result = self.session.query(entity).filter(entity.year == int(year), entity.type == project_type).all()
self.session.close()
return result
# 按照年份查询全部项目
def find_all_by_year(self, year):
entity = self.entity
result = self.session.query(entity).filter(entity.year == int(year)).all()
self.session.close()
return result
# 按照年份查询主要项目
def find_main_by_year(self, year):
entity = self.entity
result = self.session.query(entity).filter(entity.year == int(year), entity.is_main == 1).all()
self.session.close()
return result
# 按照年份查询次要项目
def find_secondary_by_year(self, year):
entity = self.entity
result = self.session.query(entity).filter(entity.year == int(year), entity.is_main == 0).all()
self.session.close()
return result
# 按照年份和类型查询项目总数
def count_by_year_and_type(self, year, project_type):
entity = self.entity
result = self.session.query(entity).filter(entity.year == int(year), entity.type == project_type).count()
self.session.close()
return result
# 更新单个任务进度
def update_progress(self, data_id, progress):
entity = self.session.query(self.entity).filter(self.entity.id == data_id).one_or_none()
if entity is not None:
entity.progress = progress
self.session.commit()
self.session.close()
# 按照名称查找项目
def find_one_by_name(self, name):
result = self.session.query(self.entity).filter(self.entity.name == name).one_or_none()
self.session.close()
return result
# 获取一组项目的进度
def get_progress_of_projects(self, project_list):
progress = 0
if project_list is not None and len(project_list) > 0:
total_project = len(project_list)
total_progress = 0
for project in project_list:
total_progress += project.progress
progress = round(total_progress / total_project, 2)
return progress
# 获取一组项目的任务总数
def get_total_task_of_projects(self, project_list):
total_task = 0
if project_list is not None and len(project_list) > 0:
for project in project_list:
total_task += project.total_task
return total_task
# 获取一组项目的已完成任务总数
def get_finished_task_of_projects(self, project_list):
total_finished_task = 0
if project_list is not None and len(project_list) > 0:
for project in project_list:
total_finished_task += project.finished_task
return total_finished_task
# 任务服务
class TaskService(ModelBaseService):
def __init__(self):
super().__init__(Task, ProjectModelSession)
# 按照年份查询全部任务总数
def count_all_by_year(self, year):
entity = self.entity
result = self.session.query(entity).filter(entity.year == int(year)).count()
self.session.close()
return result
# 按照项目查询全部任务
def find_all_by_project(self, project_id):
entity = self.entity
result = self.session.query(entity).filter(entity.project_id == int(project_id)).all()
self.session.close()
return result
# 按照年份查询已完成任务总数
def count_finished_by_year(self, year):
# TODO 1 是个魔法数字,修正为常量
entity = self.entity
result = self.session.query(entity).filter(entity.year == int(year), entity.status == 1).count()
self.session.close()
return result
# 获取截止到某一天应该完成的任务总数(按年计算任务量)
def get_task_should_finished_by_day(self, year, month, day):
total_days_of_year = get_total_day_of_year(year)
which_day_in_year = get_which_day_of_year(year, month, day)
total_task_of_year = self.count_all_by_year(year)
today_should_finished_task = int((total_task_of_year / total_days_of_year) * which_day_in_year)
return today_should_finished_task
# 自动日常任务服务
class AutoDailyTaskService(ModelBaseService):
def __init__(self):
super().__init__(AutoDailyTask, ProjectModelSession)
def get_today_task_all(self):
"""
查询今日全部任务
:return:
"""
today = datetime.now().strftime("%Y-%m-%d")
return self.find_all_by_date(today)
def get_today_task_unfinished(self):
"""
查询今日未完成任务
:return:
"""
today = datetime.now().strftime("%Y-%m-%d")
return self.find_unfinished_by_date(today)
def find_one_by_title_and_date(self, title, date):
"""
按标题和日期查询单个任务
:param date:
:return:
"""
entity = self.entity
result = self.session.query(entity).filter(entity.title == title, entity.date == str(date)).one_or_none()
self.session.close()
return result
def find_all_by_date(self, date):
"""
按日期查询全部任务
:param date:
:return:
"""
entity = self.entity
result = self.session.query(entity).filter(entity.date == str(date)).order_by(self.entity.status.asc(),
self.entity.is_main.desc(),
self.entity.idx_order.asc()).all()
self.session.close()
return result
def find_all_unfinished(self):
"""
查询所有未完成任务
:return:
"""
entity = self.entity
result = self.session.query(entity).filter(entity.status == 0).all()
self.session.close()
return result
# 按日期查询未完成任务
def find_unfinished_by_date(self, date):
entity = self.entity
result = self.session.query(entity).filter(entity.date == str(date), entity.status == 0).order_by(self.entity.status.asc(),
self.entity.is_main.desc(),
self.entity.idx_order.asc()).all()
self.session.close()
return result
# 按任务ID查询单个任务
def find_one_by_task(self, task_id):
result = self.session.query(self.entity).filter(self.entity.task_id == task_id).one_or_none()
self.session.close()
return result
# 按日期删除全部数据
def delete_all_by_date(self, date):
self.session.query(self.entity).filter(self.entity.date == str(date)).delete(synchronize_session=False)
self.session.commit()
self.session.close()
# 标记任务完成
def mark_finish(self, data_id):
self.__mark_finish_or_unfinish(data_id, True)
# 标记任务未完成
def mark_unfinish(self, data_id):
self.__mark_finish_or_unfinish(data_id, False)
# 标记任务完成|未完成
def __mark_finish_or_unfinish(self, data_id, finished):
entity = self.find_one(data_id)
if entity is not None:
task_id = entity.task_id
project_id = entity.project_id
entity.status = 1 if finished else 0 # TODO 1和0是魔法数字,改为常量
entity.finish_time = datetime.now() if finished else None
self.update(entity)
# 如果任务是某个项目下的任务,则自动把任务标记为已完成
if task_id is not None:
task_service = TaskService()
task = task_service.find_one(task_id)
if task is not None:
task.status = 1 if finished else 0
task_service.update(task)
# 如果任务归属于某个项目,则自动更新项目的进度
if project_id is not None:
project_service = ProjectService()
project = project_service.find_one(project_id)
if project is not None:
project.finished_task = project.finished_task + 1
project.progress = round((project.finished_task / project.total_task) * 100, 2)
project_service.update(project)
print("任务归属于项目,已自动更新项目的进度")
else:
print("任务不归属于任何项目,不需要更新项目的进度")
def move_all_unfinished_task_to_today(self):
"""
移动所有未完成任务到今天,对应任务的拖延次数加1
:return:
"""
task_list = self.find_all_unfinished()
if len(task_list) > 0:
today = get_today_str()
for task in task_list:
exist_task = self.find_one_by_title_and_date(task.title, today)
if exist_task is None:
task.date = today
task.delay_count = task.delay_count + 1
print('过期任务[' + task.title + ']已经移动到今日任务列表')
self.update(task)
def move_today_unfinished_task_to_tomorrow(self):
"""
移动今日未完成任务到明天,对应任务的拖延次数加1
:return:
"""
task_list = self.get_today_task_unfinished()
if len(task_list) > 0:
tomorrow = get_tomorrow_str()
for task in task_list:
exist_task = self.find_one_by_title_and_date(task.title, tomorrow)
if exist_task is None:
task.date = tomorrow
task.delay_count = task.delay_count + 1
print('过期任务[' + task.title + ']已经移动到明日任务列表')
self.update(task)
else:
print('过期任务[' + task.title + ']已经存在于明日任务列表') | zebe-data-service | /zebe-data-service-1.7.0.tar.gz/zebe-data-service-1.7.0/zebe/service/project.py | project.py |
# zebe-python-utils
#### 项目介绍
Zebe个人Python工具类集合,不保证对所有人都通用,工具类可能随时更新、优化和添加新的工具类,请注意。
#### 软件架构
软件架构说明
#### 安装教程
1. xxxx
2. xxxx
3. xxxx
#### 使用说明
1. xxxx
2. xxxx
3. xxxx
#### 参与贡献
1. Fork 本项目
2. 新建 Feat_xxx 分支
3. 提交代码
4. 新建 Pull Request
#### 码云特技
1. 使用 Readme\_XXX.md 来支持不同的语言,例如 Readme\_en.md, Readme\_zh.md
2. 码云官方博客 [blog.gitee.com](https://blog.gitee.com)
3. 你可以 [https://gitee.com/explore](https://gitee.com/explore) 这个地址来了解码云上的优秀开源项目
4. [GVP](https://gitee.com/gvp) 全称是码云最有价值开源项目,是码云综合评定出的优秀开源项目
5. 码云官方提供的使用手册 [http://git.mydoc.io/](http://git.mydoc.io/)
6. 码云封面人物是一档用来展示码云会员风采的栏目 [https://gitee.com/gitee-stars/](https://gitee.com/gitee-stars/) | zebe-python-utils | /zebe-python-utils-0.0.8.tar.gz/zebe-python-utils-0.0.8/README.md | README.md |
# zebedee-py
A python library that makes building with the ZBD API easy and fast. To sign-up for the ZBD API, use https://dashboard.zebedee.io .
To get started, you'll need to create a project using using the Developer Dashboard. Once you have a project, grab the API key from the API key tab. We will now assume that you have an API Key.
## features
The Project class has all available endpoints for the [ZBD API](https://docs.zebedee.io/api/intro).
## example usage
Install the package using:
`pip install zebedee`
Now you're ready to get started!
Here's example code.
```
from zebedee import *
# create a new ZBD object with callback URL to get updates after there is a status update.
project_a = zebedee.Project("your_api_key", "https://your-website.com/zbd/webhook/")
# Call the get_wallet_details function to retrieve your wallet details
wallet_details = project_a.get_wallet_details()
# Create a new charge for 1 hour with an amount of 5000 msats and a description
charge_details = project_a.create_charge(amount_of_seconds_to_expire_after=3600, amount_msats=5000, description='Test Charge')
# Get the details of a charge with the specified ZBD ID
charge_id = charge_details["id"]
charge_details = project_a.get_charge_details(charge_id)
# Create a new static charge with the specified parameters
static_charge_details = project_a.create_static_charge(allowed_slots=10, min_amount_msats=1000, max_amount_msats=10000, description='Test Static Charge', internal_id='123', success_message='Payment successful')
# Update the details of an existing static charge
static_charge_id = static_charge_details["id"]
updated_static_charge_details = project_a.update_static_charge_details(id=static_charge_id,allowed_slots=None, min_amount_msats=2000, max_amount_msats=20000, description='Updated Static Charge', success_message='Payment successful')
# Get the details of a static charge with the specified ZBD ID
static_charge_id = static_charge_details["id"]
static_charge_details = project_a.get_static_charge_details(static_charge_id)
# Create a new withdrawal request with the specified parameters
withdrawal_request_details = project_a.create_withdrawal_request(amount_of_seconds_to_expire_after=3600, amount_msats=10000, description='Test Withdrawal Request', internal_id='123')
# Get the details of a withdrawal request with the specified ZBD ID
withdrawal_request_id = withdrawal_request_details["id"]
withdrawal_request_details = project_a.get_withdrawal_request_details(withdrawal_request_id)
# Send a keysend payment to the specified public key with the specified amount and metadata
public_key = 'your_public_key_here'
amount_msats = 1000
metadata = {'key': 'value'}
payment_details = project_a.send_keysend_payment(public_key, amount_msats, metadata=metadata)
print(payment_details)
# Pay an invoice with the specified parameters
invoice = 'your_invoice_here'
description = 'Test Invoice Payment'
internal_id = '123'
payment_details = project_a.pay_invoice(invoice, description, internal_id)
# get payment details for a Zebedee payment with zbd_id=123
get_payment_details = project_a.get_payment_details(zbd_id=payment_details["id"])
# validate a lightning address
validate_lightning_address = project_a.validate_lightning_address(lightning_address="[email protected]")
# send a payment to a lightning address
payment_response = project_a.send_payment_to_lightning_address(
lightning_address="[email protected]", amount_msats=10000, comment="payment comment", internal_id="test:1"
)
# fetch a charge for a lightning address
charge_response = project_a.fetch_charge_from_lightning_address(
lightning_address="[email protected]", amount_msats=10000, description="charge description"
)
# send a payment to a gamertag
gamertag_payment_details = project_a.send_payment_to_gamertag(gamertag="santos", amount_msats=1000, description="payment description")
# get details for a gamertag transaction with zbd_id=123
transaction_details = project_a.get_gamertag_transaction_details(zbd_id=gamertag_payment_details["transactionId"])
# get the user ID associated with a gamertag
user_id = project_a.get_user_id_from_gamertag(gamertag="santos")
# get the gamertag associated with a user ID
gamertag = project_a.get_gamertag_from_user_id(user_id=user_id)
# fetch a charge for a gamertag
charge_response = project_a.fetch_charge_from_gamertag(
gamertag="santos", amount_msats=1000, description="charge description", internal_id="internal_id"
)
# check if an IP is in a supported region
supported = project_a.is_supported_region(ip="123.45.67.89")
# get the IP address of the ZBD production server
prod_server_ip = project_a.get_zbd_prod_server_ip()
# get the current BTC-USD exchange rate
btc_usd_price = project_a.get_btc_usd_quote_price()
# convert sats to msats -> returns 1000
amount_msats = project_a.convert_sats_to_msats(amount_sats=1)
# convert msats to sats -> returns 1
amount_sats = project_a.convert_msats_to_sats(amount_msats=1000)
# transfer funds between ZBD wallets
transfer_response = project_a.transfer_zbd_funds(amount_msats=1000, receiver_wallet_id="receiver_wallet_id")
```
## best practices
- use an environmental variable for each apikey before going live with this code. | zebedee | /zebedee-0.1.3.tar.gz/zebedee-0.1.3/README.md | README.md |
# Zeblok Python SDK
Python SDK will help in integrating outside systems with Zeblok platform via code
## Create Python Package
Run the following command from the project root directory to create the python wheel file
```python
python setup.py sdist bdist_wheel
```
## Usage
### Microservice
#### Authentication
```python
from zeblok.auth import Auth
auth = Auth(username='<your-username>', password='<your-password>', base_url='<your-zbl-url>')
auth_token = auth.get_token()
```
#### Deploy Image
```python
from zeblok.microservice import ModelMicroService
microservice_model = ModelMicroService(
base_url='<your-zbl-url>', token=auth.get_token(), bucket_name='<bucket-name>',
username='<your-username>', storage_url='<storage-url>'
)
microservice_model.deploy_microservice(
image_name='<image-name>', microservice_id='<microservice-id>', plan_id="<plan-id>",
service_name=f'<unique-service-name>', namespace_id='<namespace-id>',
datacenter_id='<datacenter-id>', envs=['example-env1', 'example-env2', 'example-env3']
)
```
#### Get Namespaces
```python
from zeblok.microservice import ModelMicroService
microservice_model = ModelMicroService(
base_url='<your-zbl-url>', token=auth.get_token(), bucket_name='<bucket-name>',
username='<your-username>', storage_url='<storage-url>'
)
microservice_model.get_plans()
```
#### Get Datacenters
```python
from zeblok.microservice import ModelMicroService
microservice_model = ModelMicroService(
base_url='<your-zbl-url>', token=auth.get_token(), bucket_name='<bucket-name>',
username='<your-username>', storage_url='<storage-url>'
)
microservice_model.get_datacenters()
```
#### Get Namespaces
```python
from zeblok.microservice import ModelMicroService
microservice_model = ModelMicroService(
base_url='<your-zbl-url>', token=auth.get_token(), bucket_name='<bucket-name>',
username='<your-username>', storage_url='<storage-url>'
)
microservice_model.get_namespaces()
```
### Pipeline
#### Deploy Pipeline
```python
from zeblok.pipeline import Pipeline
pipeline = Pipeline(
base_url="<your-zbl-url>",
cross_cloud_service_url='<zbl-cross-cloud-url>', storage_url='<storage-url>',
api_access_key='<api_access_key>', api_access_secret='<api_access_secret>',
storage_username='<storage_username>', storage_access_secret='<storage_access_secret>'
)
pipeline.deploy_pipeline(
model_folder_path='<model-folder-path>', entrypoint='<entrypoint>', bucket_name='<bucket-name>',
pipeline_name='<unique-pipeline-name>', plan_id=['<plan-id1>', '<plan-id2'],
namespace_id='<namespace-id>', datacenter_id='<datacenter-id>', autodeploy=True
)
```
#### Get Namespaces
```python
from zeblok.pipeline import Pipeline
pipeline = Pipeline(
base_url="<your-zbl-url>", cross_cloud_service_url='<zbl-cross-cloud-url>', storage_url='<storage-url>',
api_access_key='<api_access_key>', api_access_secret='<api_access_secret>',
storage_username='<storage_username>', storage_access_secret='<storage_access_secret>'
)
pipeline.get_plans()
```
#### Get Datacenters
```python
from zeblok.pipeline import Pipeline
pipeline = Pipeline(
base_url="<your-zbl-url>", cross_cloud_service_url='<zbl-cross-cloud-url>', storage_url='<storage-url>',
api_access_key='<api_access_key>', api_access_secret='<api_access_secret>',
storage_username='<storage_username>', storage_access_secret='<storage_access_secret>'
)
pipeline.get_datacenters()
```
#### Get Namespaces
```python
from zeblok.pipeline import Pipeline
pipeline = Pipeline(
base_url="<your-zbl-url>", cross_cloud_service_url='<zbl-cross-cloud-url>', storage_url='<storage-url>',
api_access_key='<api_access_key>', api_access_secret='<api_access_secret>',
storage_username='<storage_username>', storage_access_secret='<storage_access_secret>'
)
pipeline.get_namespaces()
```
| zeblok-py | /zeblok-py-0.0.8.tar.gz/zeblok-py-0.0.8/README.md | README.md |
from typing import Union
from pathlib import Path
import minio
import os
from datetime import timedelta
from .errors import FileUploadError, InvalidCredentialsError, BucketDoesNotExistsError
from zipfile import ZipFile
from .utils import api_error, Progress, get_all_file_paths, validate_base_url, validate_platform_id, \
validate_namespace_id, validate_deployment_name, validate_username
import requests
import json
STORAGE_URL = "datalake.zbl-aws.zeblok.com:9000"
CROSS_CLOUD_SERVICE_URL = "http://cross-cloud-services-673735588.us-east-2.elb.amazonaws.com/upload"
class DeployModel:
__slots__ = ['__CROSS_CLOUD_SERVICE_URL', '__storage_url', '__bucket_name', '__base_url', '__token', '__username']
def __init__(self, base_url: str, token: str, bucket_name: str, username: str, storage_url: str):
validate_base_url(base_url=base_url)
if type(token) is not str:
raise InvalidCredentialsError('token can only be of type String')
elif token == '':
raise InvalidCredentialsError('token cannot empty')
elif token.split()[0] != 'Bearer':
raise InvalidCredentialsError('Please pass a valid Bearer token')
validate_username(username=username)
self.__bucket_name = bucket_name
self.__username = username
self.__storage_url = storage_url
self.__CROSS_CLOUD_SERVICE_URL = CROSS_CLOUD_SERVICE_URL
self.__base_url = 'https://' + base_url
self.__token = token
@property
def token(self):
return self.__token
@property
def base_url(self):
return self.__base_url
@property
def cross_cloud_service_url(self):
return self.__CROSS_CLOUD_SERVICE_URL
def get_datacenters(self) -> [dict]:
response = requests.get(
f"{self.__base_url}/api/v1/dataCenter/public/", data=json.dumps({"isActive": True}),
headers={'Authorization': self.__token, 'Content-Type': 'application/json', 'request-from': 'Ai-MicroCloud'}
)
if response.status_code == 200 and response.json()['success']:
datacenters = []
for data in response.json()['data']:
datacenters.append({'_id': data['_id'], 'name': data['name']})
return datacenters
else:
api_error(status_code=response.status_code, message=response.text)
def get_namespaces(self) -> [dict]:
response = requests.get(
f"{self.__base_url}/api/v1/namespaces/org/", data=json.dumps({"state": "ready"}),
headers={'Authorization': self.__token, 'Content-Type': 'application/json', 'request-from': 'Ai-MicroCloud'}
)
if response.status_code == 200 and response.json()['success']:
return response.json()['data']
else:
api_error(status_code=response.status_code, message=response.text)
@staticmethod
def prepare_model_zip(model_folder_path: Path) -> Path:
print("\nPreparing model zip")
model_zipfile_path = model_folder_path.parent.joinpath(f'{model_folder_path.name.lower()}.zip')
with ZipFile(model_zipfile_path, 'w') as zip:
file_paths = get_all_file_paths(directory=model_folder_path)
for file in file_paths:
zip.write(filename=file.as_posix(), arcname=file.relative_to(model_folder_path))
print("Model zip prepared")
return model_zipfile_path
def upload_file(self, file_name: Path, secret_key: str) -> str:
try:
client = minio.Minio(
endpoint=self.__storage_url, access_key=self.__username, secret_key=secret_key, secure=False
)
if not client.bucket_exists(self.__bucket_name):
raise BucketDoesNotExistsError(f'{self.__bucket_name} does not exists')
client.fput_object(
bucket_name=self.__bucket_name, object_name=f"{file_name.name}",
file_path=file_name.as_posix(), content_type="application/zip",
progress=Progress(), part_size=5 * 1024 * 1024
)
url = client.presigned_get_object(
bucket_name=self.__bucket_name, object_name=f"{file_name.name}",
expires=timedelta(hours=3)
)
finally:
os.remove(file_name)
return url
def call_cross_cloud_service(
self, cross_cloud_service_url: str, presigned_get_url: str, image_name: str, file_name: str,
image_id: Union[str, None], deployment_type: str, autodeploy: bool = False,
namespace_id: Union[None, str] = None, platform_id: Union[None, str] = None,
deployment_name: Union[None, str] = None
):
if autodeploy:
validate_platform_id(platform_id=platform_id)
validate_namespace_id(namespace_id=namespace_id)
validate_deployment_name(deployment_name=deployment_name)
response = requests.post(
cross_cloud_service_url,
headers={'Authorization': self.__token, 'Content-Type': 'application/json',
'request-from': 'Ai-MicroCloud'},
data=json.dumps({
"imageName": image_name, "url": presigned_get_url, "filename": file_name,
"portalUrl": f"{self.__base_url}", "imageId": image_id,
"autoDeploy": autodeploy, "namespaceId": None if not autodeploy else namespace_id,
"platform": None if not autodeploy else platform_id,
"deploymentName": None if not autodeploy else deployment_name,
"deploymentType": deployment_type
})
)
if response.status_code == 200:
if response.json()['success']:
return
else:
raise FileUploadError(response.json()['job'])
else:
api_error(status_code=response.status_code, message=response.text) | zeblok-py | /zeblok-py-0.0.8.tar.gz/zeblok-py-0.0.8/zeblok/deploy.py | deploy.py |
from typing import Union
from pathlib import Path
import minio
import os
from datetime import timedelta
from .errors import FileUploadError, InvalidCredentialsError, BucketDoesNotExistsError, PipelineCreationError
from zipfile import ZipFile
from .utils import api_error, Progress, get_all_file_paths, validate_url, validate_platform_id, \
validate_namespace_id, validate_secret_key, validate_envs_args, \
validate_model_folder
import requests
import json
import time
STORAGE_URL = "datalake.zbl-aws.zeblok.com:9000"
CROSS_CLOUD_SERVICE_URL = "http://cross-cloud-services-673735588.us-east-2.elb.amazonaws.com/upload"
class Pipeline:
__slots__ = [
'__base_url', '__cross_cloud_service_url', '__storage_url', '__api_access_key', '__api_access_secret',
'__storage_username', '__storage_access_secret'
]
def __init__(
self, base_url: str, cross_cloud_service_url: str, storage_url: str, api_access_key: str,
api_access_secret: str, storage_username: str, storage_access_secret: str
):
validate_url(url_name='base_url', url=base_url)
validate_url(url_name='cross_cloud_service_url', url=cross_cloud_service_url)
validate_url(url_name='storage_url', url=storage_url)
validate_secret_key(key_name='api_access_key', secret_key=api_access_key)
validate_secret_key(key_name='api_access_secret', secret_key=api_access_secret)
validate_secret_key(key_name='storage_username', secret_key=storage_username)
validate_secret_key(key_name='storage_access_secret', secret_key=storage_access_secret)
self.__base_url = 'https://' + base_url
self.__cross_cloud_service_url = 'https://' + cross_cloud_service_url
self.__storage_url = storage_url
self.__api_access_key = api_access_key
self.__api_access_secret = api_access_secret
self.__storage_username = storage_username
self.__storage_access_secret = storage_access_secret
@property
def base_url(self):
return self.__base_url
@property
def cross_cloud_service_url(self):
return self.__cross_cloud_service_url
def get_datacenters(self) -> None:
response = requests.get(
f"{self.__base_url}/api/v1/dataCenter/public/", data=json.dumps({"isActive": True}),
auth=(self.__api_access_key, self.__api_access_secret),
headers={'Content-Type': 'application/json'}
)
if response.status_code == 200 and response.json()['success']:
for data in response.json()['data']:
print(f"Datacenter Name: {data['name']} | Datacenter ID: {data['_id']}")
else:
api_error(status_code=response.status_code, message=response.text)
def get_namespaces(self) -> [dict]:
response = requests.get(
f"{self.__base_url}/api/v1/namespaces/org/", data=json.dumps({"state": "ready"}),
auth=(self.__api_access_key, self.__api_access_secret),
headers={'Content-Type': 'application/json'}
)
if response.status_code == 200 and response.json()['success']:
for data in response.json()['data']:
print(f"Namespace Name: {data['name']} | Namespace ID: {data['_id']}")
else:
api_error(status_code=response.status_code, message=response.text)
def get_plans(self) -> None:
response = requests.get(
f"{self.base_url}/api/v1/plans/",
auth=(self.__api_access_key, self.__api_access_secret),
headers={'Content-Type': 'application/json'}
)
if response.status_code == 200 and response.json()['success']:
for plan in response.json()['data']:
print(f"Plan Name: {plan['planName']} | Id: {plan['_id']}")
print("\tResources")
print(f"\t\tvCPU: {plan['resources']['CPU']}")
print(f"\t\tGPU: {plan['resources']['GPU']}")
print(f"\t\tmemory [GB]: {plan['resources']['memory']}")
print(f"\t\tstorage [GB]: {plan['resources']['storage']}")
print(f"\t\tworkers: {plan['resources']['workers']}")
print(f"\tDatacenter Name: {plan['dataCenterId']['name']} | Datacenter ID: {plan['dataCenterId']['_id']}")
else:
api_error(status_code=response.status_code, message=response.text)
def __get_dockerfile(self, envs: list[str], entrypoint: list[str]) -> str:
response = requests.get(
f"{self.__base_url}/api/v1/system/dockerfile", auth=(self.__api_access_key, self.__api_access_secret),
headers={'Content-Type': 'application/json'}, data=json.dumps({
"from": {"baseImage": "miniconda3"}, "copy": {"./": "./"},
"entrypoint": entrypoint, "env": dict([kv.split("=") for kv in envs])
})
)
if response.status_code == 200:
if response.json()['success']:
return response.json()['data']
else:
api_error(status_code=response.status_code, message=response.json()['error']['message'])
else:
api_error(status_code=response.status_code, message=response.text)
def __put_dockerfile(self, model_folder_path: Path, envs: list[str], entrypoint: list[str]) -> bool:
validate_model_folder(model_folder_path=model_folder_path)
with open(model_folder_path.joinpath('Dockerfile'), 'w') as fp:
fp.write(self.__get_dockerfile(envs=envs, entrypoint=entrypoint))
return True
@staticmethod
def __prepare_model_zip(model_folder_path: Path) -> Path:
print("\nPreparing model zip .....")
model_zipfile_path = model_folder_path.parent.joinpath(f'{model_folder_path.name.lower()}.zip')
with ZipFile(model_zipfile_path, 'w') as zip:
file_paths = get_all_file_paths(directory=model_folder_path)
for file in file_paths:
zip.write(filename=file.as_posix(), arcname=file.relative_to(model_folder_path))
print("Model zip prepared")
return model_zipfile_path
def __upload_file(self, file_name: Path, bucket_name: str) -> str:
try:
client = minio.Minio(
endpoint=self.__storage_url, access_key=self.__storage_username,
secret_key=self.__storage_access_secret, secure=False
)
if not client.bucket_exists(bucket_name):
raise BucketDoesNotExistsError(f'{bucket_name} does not exists')
client.fput_object(
bucket_name=bucket_name, object_name=f"{file_name.name}",
file_path=file_name.as_posix(), content_type="application/zip",
progress=Progress(), part_size=5 * 1024 * 1024
)
url = client.presigned_get_object(
bucket_name=bucket_name, object_name=f"{file_name.name}",
expires=timedelta(hours=3)
)
finally:
os.remove(file_name)
return url
def __call_cross_cloud_service(
self, cross_cloud_service_url: str, presigned_get_url: str, image_name: str, file_name: str,
image_id: Union[str, None], plan_id: list[str], deployment_type: str, autodeploy: bool = False,
namespace_id: Union[None, str] = None, platform_id: Union[None, list[str]] = None
):
if autodeploy:
# validate_platform_id(platform_id=platform_id)
validate_namespace_id(namespace_id=namespace_id)
response = requests.post(
cross_cloud_service_url, auth=(self.__api_access_key, self.__api_access_secret),
headers={'Content-Type': 'application/json'},
data=json.dumps({
"imageName": image_name,
"url": presigned_get_url,
"imageId": image_id,
"filename": file_name,
"portalUrl": f"{self.__base_url}",
"autoDeploy": autodeploy,
"namespaceId": None if not autodeploy else namespace_id,
"platform": None if not autodeploy else platform_id,
"deploymentType": deployment_type,
"planId": plan_id
})
)
if response.status_code == 200:
if response.json()['success']:
return
else:
raise FileUploadError(response.json()['job'])
else:
api_error(status_code=response.status_code, message=response.text)
def __create_pipeline(
self, pipeline_name: str, plan_id: list[str], namespace_id: str, datacenter_id: list[str], envs: list[str],
args: list[str], docker_image_tag: str
):
response = requests.post(
f"{self.base_url}/api/v1/pipeline/", auth=(self.__api_access_key, self.__api_access_secret),
headers={'Content-Type': 'application/json'},
data=json.dumps({
"pipelineName": pipeline_name,
"planId": plan_id,
"namespaceId": namespace_id,
"yamlString": "from",
"platform": datacenter_id,
"env": envs,
"args": args,
"dockerImageTag": docker_image_tag
})
)
print(response.request.body)
if response.status_code == 200:
if response.json()['success']:
return response.json()['data']['_id']
else:
raise PipelineCreationError(response.json())
else:
api_error(status_code=response.status_code, message=response.text)
def deploy_pipeline(
self, model_folder_path: str, bucket_name: str, pipeline_name: str, plan_id: list[str], namespace_id: str,
datacenter_id: list[str], envs: Union[str, None] = None, args: Union[str, None] = None,
entrypoint: Union[str, None] = None, autodeploy: bool = False
):
model_folder_path = Path(model_folder_path)
envs = [] if envs is None else list(map(str.strip, envs.split(',')))
if len(envs):
validate_envs_args(name='envs', val=envs)
args = [] if args is None else list(map(str.strip, args.split(',')))
if len(args):
validate_envs_args(name='args', val=args)
entrypoint = [] if entrypoint is None else list(map(str.strip, entrypoint.split()))
# self.__put_dockerfile(model_folder_path=model_folder_path, envs=envs, entrypoint=entrypoint)
docker_image_tag = f"dockerhub.zeblok.io/zeblok/{pipeline_name.lower()}:{int(time.time())}"
pipeline_id = self.__create_pipeline(
pipeline_name=pipeline_name, plan_id=plan_id, namespace_id=namespace_id, datacenter_id=datacenter_id,
envs=envs, args=args, docker_image_tag=docker_image_tag
)
print("Completed create_pipeline")
model_zipfile_path = self.__prepare_model_zip(model_folder_path)
print(model_zipfile_path)
print("Completed model Zipping")
presigned_url = self.__upload_file(file_name=model_zipfile_path, bucket_name=bucket_name)
print("Completed zip file upload")
self.__call_cross_cloud_service(
cross_cloud_service_url=self.cross_cloud_service_url, presigned_get_url=presigned_url,
image_name=docker_image_tag, file_name=model_zipfile_path.name, image_id=pipeline_id, autodeploy=autodeploy,
namespace_id=namespace_id, platform_id=datacenter_id,
deployment_type='pipeline', plan_id=plan_id
)
print("Completed cross-cloud service call") | zeblok-py | /zeblok-py-0.0.8.tar.gz/zeblok-py-0.0.8/zeblok/pipeline.py | pipeline.py |
from .deploy import DeployModel
from pathlib import Path
from .errors import InvalidModelFolder, FileUploadError, InvalidCredentialsError, ModelDeploymentError
from .utils import api_error, get_all_file_paths
import requests
import time
import json
from .utils import validate_model_folder
class ModelMicroService(DeployModel):
def __init__(self, base_url: str, token: str, bucket_name: str, username: str, storage_url: str):
super(ModelMicroService, self).__init__(
base_url=base_url, token=token, bucket_name=bucket_name, username=username, storage_url=storage_url
)
def __put_dockerfile(self, model_folder_path: Path) -> bool:
validate_model_folder(model_folder_path=model_folder_path)
with open(model_folder_path.joinpath('Dockerfile'), 'w') as fp:
fp.write(self.__get_dockerfile(self.base_url, self.token))
return True
@staticmethod
def __get_dockerfile(base_url: str, token: str) -> str:
response = requests.get(
f"{base_url}/api/v1/system/dockerfile",
headers={'Authorization': token, 'Content-Type': 'application/json', 'request-from': 'Ai-MicroCloud'}
)
if response.status_code == 200:
if response.json()['success']:
return response.json()['data']
else:
api_error(status_code=response.status_code, message=response.json()['error']['message'])
else:
api_error(status_code=response.status_code, message=response.text)
def get_plans(self) -> None:
response = requests.get(
f"{self.base_url}/api/v1/plans/",
headers={'Authorization': self.token, 'Content-Type': 'application/json', 'request-from': 'Ai-MicroCloud'}
)
if response.status_code == 200 and response.json()['success']:
for plan in response.json()['data']:
print(f"Plan Name: {plan['planName']}")
print(f"Id: {plan['_id']}")
print(f"Datacenters:")
print(f"\tName: {plan['dataCenterId']['name']}")
print(f"\tId: {plan['dataCenterId']['_id']}")
else:
api_error(status_code=response.status_code, message=response.text)
def __create_microservice(
self, microservice_name: str, microservice_description: str, image_name: str, plan_id: str
) -> str:
response = requests.post(
f"{self.base_url}/api/v1/microservices/",
data=json.dumps({
"name": microservice_name,
"description": microservice_description,
"imageTag": [{"displayName": image_name, "dockerImage": image_name}],
"defaultPlan": plan_id,
"plans": [plan_id],
"parameters": {
"ports": "4200"
},
"s3ImageLink": "https://zeblokcomputationalpublicimages.s3.ap-south-1.amazonaws.com/logo_6238a5205888964d61cdd42a.jpg",
"isPublic": "true"
}),
headers={'Authorization': self.token, 'Content-Type': 'application/json', 'request-from': 'Ai-MicroCloud'}
)
# print(response.request.body)
if response.status_code == 201:
if response.json()['success']:
print([{"displayName": image_name, "dockerImage": image_name}])
return response.json()['data']['_id']
else:
raise ModelDeploymentError(response.json()['message'])
else:
api_error(status_code=response.status_code, message=response.text)
def upload_model(
self, model_folder_path: str, storage_secret_key: str, microservice_name: str,
microservice_description: str, plan_id: str
) -> str:
folder_path = Path(model_folder_path)
# self.__put_dockerfile(Path(folder_path))
model_zipfile_path = self.prepare_model_zip(model_folder_path=folder_path)
presigned_url = self.upload_file(file_name=model_zipfile_path, secret_key=storage_secret_key)
version = int(time.time())
image_name = f"zeblok/{model_zipfile_path.stem}:{version}".lower()
self.call_cross_cloud_service(
cross_cloud_service_url=self.cross_cloud_service_url, presigned_get_url=presigned_url,
image_name=image_name, file_name=model_zipfile_path.name, image_id=None, autodeploy=False,
namespace_id='', deployment_name='', platform_id='', deployment_type='microservices'
)
return self.__create_microservice(
microservice_name=f"{microservice_name}: {version}", microservice_description=microservice_description,
image_name=image_name, plan_id=plan_id
)
def deploy_microservice(
self, image_name: str, microservice_id: str, plan_id: str, service_name: str,
namespace_id: str, datacenter_id: str, envs: list[str]
):
response = requests.post(
f"{self.base_url}/api/v1/spawnedservices/",
data=json.dumps({
"microserviceId": microservice_id, "dataCenterId": datacenter_id,
"planId": plan_id,
"namespaceId": namespace_id,
"parameters": {
"ports": [5000],
"envs": envs,
"args": None,
"command": None
},
"name": service_name,
"imageTag": {"displayName": image_name, "dockerImage": image_name}
}),
headers={'Authorization': self.token, 'Content-Type': 'application/json', 'request-from': 'Ai-MicroCloud'}
)
if response.status_code == 201:
if response.json()['success']:
return response.json()['message']
else:
raise ModelDeploymentError(response.json()['message'])
else:
api_error(status_code=response.status_code, message=response.text) | zeblok-py | /zeblok-py-0.0.8.tar.gz/zeblok-py-0.0.8/zeblok/microservice.py | microservice.py |
import sys
import time
from queue import Empty, Queue
from threading import Thread
from pathlib import Path
from os import walk
import os
from .errors import AuthenticationError, ServerError, InvalidCredentialsError, InvalidBaseURL, InvalidStorageURL
_BAR_SIZE = 20
_KILOBYTE = 1024
_FINISHED_BAR = '#'
_REMAINING_BAR = '-'
_UNKNOWN_SIZE = '?'
_STR_MEGABYTE = ' MB'
_HOURS_OF_ELAPSED = '%d:%02d:%02d'
_MINUTES_OF_ELAPSED = '%02d:%02d'
_RATE_FORMAT = '%5.2f'
_PERCENTAGE_FORMAT = '%3d%%'
_HUMANINZED_FORMAT = '%0.2f'
_DISPLAY_FORMAT = '|%s| %s/%s %s [elapsed: %s left: %s, %s MB/sec]'
_REFRESH_CHAR = '\r'
def get_all_file_paths(directory):
file_paths = []
for root, directories, files in walk(directory):
for filename in files:
file_paths.append(Path(root).joinpath(filename))
return file_paths
def seconds_to_time(seconds):
"""
Consistent time format to be displayed on the elapsed time in screen.
:param seconds: seconds
"""
minutes, seconds = divmod(int(seconds), 60)
hours, m = divmod(minutes, 60)
if hours:
return _HOURS_OF_ELAPSED % (hours, m, seconds)
else:
return _MINUTES_OF_ELAPSED % (m, seconds)
def format_string(current_size, total_length, elapsed_time):
"""
Consistent format to be displayed on the screen.
:param current_size: Number of finished object size
:param total_length: Total object size
:param elapsed_time: number of seconds passed since start
"""
n_to_mb = current_size / _KILOBYTE / _KILOBYTE
elapsed_str = seconds_to_time(elapsed_time)
rate = _RATE_FORMAT % (
n_to_mb / elapsed_time) if elapsed_time else _UNKNOWN_SIZE
frac = float(current_size) / total_length
bar_length = int(frac * _BAR_SIZE)
bar = (_FINISHED_BAR * bar_length +
_REMAINING_BAR * (_BAR_SIZE - bar_length))
percentage = _PERCENTAGE_FORMAT % (frac * 100)
left_str = (
seconds_to_time(
elapsed_time / current_size * (total_length - current_size))
if current_size else _UNKNOWN_SIZE)
humanized_total = _HUMANINZED_FORMAT % (
total_length / _KILOBYTE / _KILOBYTE) + _STR_MEGABYTE
humanized_n = _HUMANINZED_FORMAT % n_to_mb + _STR_MEGABYTE
return _DISPLAY_FORMAT % (bar, humanized_n, humanized_total, percentage,
elapsed_str, left_str, rate)
def api_error(status_code: int, message: str):
if status_code == 401:
raise AuthenticationError("User not authenticated. Please check your __api_access_key and api_access_secret")
else:
raise ServerError(f"Status code = {status_code}\n{message}")
class Progress(Thread):
"""
Constructs a :class:`Progress` object.
:param interval: Sets the time interval to be displayed on the screen.
:param stdout: Sets the standard output
:return: :class:`Progress` object
"""
def __init__(self, interval=1, stdout=sys.stdout):
Thread.__init__(self)
self.daemon = True
self.total_length = 0
self.interval = interval
self.object_name = None
self.last_printed_len = 0
self.current_size = 0
self.display_queue = Queue()
self.initial_time = time.time()
self.stdout = stdout
self.start()
self.prefix = None
def set_meta(self, total_length, object_name):
"""
Metadata settings for the object. This method called before uploading
object
:param total_length: Total length of object.
:param object_name: Object name to be showed.
"""
self.total_length = total_length
self.object_name = object_name
self.prefix = self.object_name + ': ' if self.object_name else ''
def run(self):
displayed_time = 0
while True:
try:
# display every interval secs
task = self.display_queue.get(timeout=self.interval)
except Empty:
elapsed_time = time.time() - self.initial_time
if elapsed_time > displayed_time:
displayed_time = elapsed_time
self.print_status(current_size=self.current_size,
total_length=self.total_length,
displayed_time=displayed_time,
prefix=self.prefix)
continue
current_size, total_length = task
displayed_time = time.time() - self.initial_time
self.print_status(current_size=current_size,
total_length=total_length,
displayed_time=displayed_time,
prefix=self.prefix)
self.display_queue.task_done()
if current_size == total_length:
# once we have done uploading everything return
self.done_progress()
return
def update(self, size):
"""
Update object size to be showed. This method called while uploading
:param size: Object size to be showed. The object size should be in
bytes.
"""
if not isinstance(size, int):
raise ValueError('{} type can not be displayed. '
'Please change it to Int.'.format(type(size)))
self.current_size += size
self.display_queue.put((self.current_size, self.total_length))
def done_progress(self):
self.total_length = 0
self.object_name = None
self.last_printed_len = 0
self.current_size = 0
def print_status(self, current_size, total_length, displayed_time, prefix):
formatted_str = prefix + format_string(
current_size, total_length, displayed_time)
self.stdout.write(_REFRESH_CHAR + formatted_str + ' ' *
max(self.last_printed_len - len(formatted_str), 0))
self.stdout.flush()
self.last_printed_len = len(formatted_str)
def validate_secret_key(key_name: str, secret_key: str):
if type(secret_key) is not str:
raise InvalidCredentialsError(f'{key_name} can only be of type String')
if secret_key == '':
raise InvalidCredentialsError(f'{key_name} cannot empty')
def validate_username(username: str):
if type(username) is not str:
raise InvalidCredentialsError('username can only be of type String')
if username == '':
raise InvalidCredentialsError('username cannot empty')
def validate_url(url_name: str, url: str):
if type(url) is not str:
raise InvalidBaseURL(f'{url_name} can only be of type String')
if url == '':
raise InvalidBaseURL(f'{url_name} cannot empty')
def validate_base_url(base_url: str):
if type(base_url) is not str:
raise InvalidBaseURL('base_url can only be of type String')
if base_url == '':
raise InvalidBaseURL('base_url cannot empty')
def validate_storage_url(storage_url: str):
if type(storage_url) is not str:
raise InvalidBaseURL('storage_url can only be of type String')
if storage_url == '':
raise InvalidBaseURL('storage_url cannot empty')
def validate_deployment_name(deployment_name: str):
if type(deployment_name) is not str:
raise ValueError('deployment_name can only be of type String')
if deployment_name == '':
raise ValueError('deployment_name cannot empty')
def validate_model_version(model_version: str):
if type(model_version) is not str:
raise ValueError('model_version can only be of type String')
if model_version == '':
raise ValueError('model_version cannot empty')
def validate_namespace_id(namespace_id: str):
if type(namespace_id) is not str:
raise ValueError('namespace_id can only be of type String')
if namespace_id == '':
raise ValueError('namespace_id cannot empty')
def validate_platform_id(platform_id: list[str]):
if type(platform_id) is not list[str]:
raise ValueError('platform_id can only be of type List')
if platform_id == '':
raise ValueError('platform_id cannot empty')
def validate_model_pipeline(model_pipeline: str):
if type(model_pipeline) is not str:
raise ValueError('image_name can only be of type String')
if model_pipeline == '':
raise ValueError('image_name cannot empty')
def validate_model_folder(model_folder_path: Path):
if model_folder_path == Path(''):
raise ValueError(f"Model folder path is empty")
if not model_folder_path.exists():
raise FileNotFoundError(f"{model_folder_path} does not exist")
if not model_folder_path.is_dir():
raise NotADirectoryError(f"{model_folder_path} is not a folder")
if not os.access(model_folder_path, os.W_OK):
raise PermissionError(f"Folder doesn't have write permission: {model_folder_path}")
def validate_envs_args(name: str, val: list[str]):
assert all([len(kv.split("=")) == 2 for kv in val]), f"{name} should be a string with comma-separated key value pairs. For e.g. 'k1=v1, k2=v2, k3=v3'" | zeblok-py | /zeblok-py-0.0.8.tar.gz/zeblok-py-0.0.8/zeblok/utils.py | utils.py |
from typing import Union
from .deploy import DeployModel
from pathlib import Path
import os
import time
from .errors import InvalidModelFolder, FileUploadError, ModelDeploymentError
from .utils import api_error, get_all_file_paths, validate_model_pipeline, validate_username, validate_platform_id, \
validate_namespace_id, validate_deployment_name, validate_model_folder
import requests
import json
class ModelAPI(DeployModel):
def __init__(self, base_url: str, token: str, bucket_name: str, username: str, storage_url: str):
super(ModelAPI, self).__init__(
base_url=base_url, token=token, bucket_name=bucket_name, username=username, storage_url=storage_url
)
def get_model_pipelines(self) -> [dict]:
response = requests.get(
f"{self.base_url}/api/v1/aimodel/", data=json.dumps({"state": "ready"}),
headers={'Authorization': self.token, 'Content-Type': 'application/json'}
)
if response.status_code == 200 and response.json()['success']:
image_names = []
for data in response.json()['data']:
image_names.append({'_id': data['_id'], 'imageName': data['imageName']})
return image_names
else:
api_error(status_code=response.status_code, message=response.text)
@staticmethod
def __validate_folder_format(model_folder_path: Path) -> None:
validate_model_folder(model_folder_path=model_folder_path)
for filepath in get_all_file_paths(directory=model_folder_path):
if filepath.name == 'Dockerfile' and filepath.parent.name == model_folder_path.name:
return
raise InvalidModelFolder(f"Invalid BentoML folder format: {model_folder_path}")
def __register_image_name(self, image_name: str):
response = requests.post(
f"{self.base_url}/api/v1/aimodel/", data=json.dumps({"imageName": image_name, "type": "bentoml"}),
headers={'Authorization': self.token, 'Content-Type': 'application/json'}
)
print(response.request.body)
if response.status_code == 201:
if response.json()['success']:
return response.json()['data']['_id']
else:
raise FileUploadError(response.json()['message'])
else:
api_error(status_code=response.status_code, message=response.text)
def upload_model(
self, model_folder_path: str, username: str, storage_secret_key: str,
autodeploy: bool = False, namespace_id: Union[None, str] = None, platform_id: Union[None, str] = None,
deployment_name: Union[None, str] = None,
) -> str:
validate_username(username=username)
folder_path = Path(model_folder_path)
self.__validate_folder_format(model_folder_path=folder_path)
model_zipfile_path = self.prepare_model_zip(model_folder_path=folder_path)
presigned_url = self.upload_file(file_name=model_zipfile_path, secret_key=storage_secret_key)
image_name = f"zeblok/{model_zipfile_path.stem}:{int(time.time())}".lower()
image_id = self.__register_image_name(image_name=image_name)
self.call_cross_cloud_service(
cross_cloud_service_url=self.cross_cloud_service_url, presigned_get_url=presigned_url,
image_name=image_name, file_name=model_zipfile_path.name, image_id=image_id, autodeploy=autodeploy,
namespace_id=namespace_id, deployment_name=deployment_name, platform_id=platform_id,
deployment_type='aimodel'
)
print(
f"\nSuccessfully uploaded the Model folder | Filename: {model_zipfile_path.name}, Image Name: {image_name}"
)
return image_name
def deploy_model(self, deployment_name: str, namespace_id: str, platform_id: str, model_pipeline: str):
validate_deployment_name(deployment_name=deployment_name)
validate_namespace_id(namespace_id=namespace_id)
validate_platform_id(platform_id=platform_id)
validate_model_pipeline(model_pipeline=model_pipeline)
if model_pipeline not in [img_nm['imageName'] for img_nm in self.get_model_pipelines()]:
raise ValueError(f'Image Name: {model_pipeline} not found in the database')
response = requests.post(
f"{self.base_url}/api/v1/k8deployments/",
headers={'Authorization': self.token, 'Content-Type': 'application/json'},
data=json.dumps({
"imageName": model_pipeline, "nodePreference": "NO PREFERENCE", "kioskId": None,
"platform": platform_id,
"namespaceId": namespace_id, "deploymentName": deployment_name
})
)
if response.status_code == 201:
if response.json()['success']:
print(
f"\n{response.json()['message']}"
)
return response.json()['success']
else:
raise ModelDeploymentError(response.json()['message'])
else:
api_error(status_code=response.status_code, message=response.text) | zeblok-py | /zeblok-py-0.0.8.tar.gz/zeblok-py-0.0.8/zeblok/api.py | api.py |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | zeblonk-probability | /zeblonk_probability-0.2.tar.gz/zeblonk_probability-0.2/zeblonk_probability/Gaussiandistribution.py | Gaussiandistribution.py |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Binomial(Distribution):
""" Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
n (int) number of trials
TODO: Fill out all functions below
"""
def __init__(self, prob=.5, size=20):
self.n = size
self.p = prob
Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev())
def calculate_mean(self):
"""Function to calculate the mean from p and n
Args:
None
Returns:
float: mean of the data set
"""
self.mean = self.p * self.n
return self.mean
def calculate_stdev(self):
"""Function to calculate the standard deviation from p and n.
Args:
None
Returns:
float: standard deviation of the data set
"""
self.stdev = math.sqrt(self.n * self.p * (1 - self.p))
return self.stdev
def replace_stats_with_data(self):
"""Function to calculate p and n from the data set
Args:
None
Returns:
float: the p value
float: the n value
"""
self.n = len(self.data)
self.p = 1.0 * sum(self.data) / len(self.data)
self.mean = self.calculate_mean()
self.stdev = self.calculate_stdev()
def plot_bar(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n])
plt.title('Bar Chart of Data')
plt.xlabel('outcome')
plt.ylabel('count')
def pdf(self, k):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k)))
b = (self.p ** k) * (1 - self.p) ** (self.n - k)
return a * b
def plot_bar_pdf(self):
"""Function to plot the pdf of the binomial distribution
Args:
None
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
x = []
y = []
# calculate the x values to visualize
for i in range(self.n + 1):
x.append(i)
y.append(self.pdf(i))
# make the plots
plt.bar(x, y)
plt.title('Distribution of Outcomes')
plt.ylabel('Probability')
plt.xlabel('Outcome')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Binomial distributions with equal p
Args:
other (Binomial): Binomial instance
Returns:
Binomial: Binomial distribution
"""
try:
assert self.p == other.p, 'p values are not equal'
except AssertionError as error:
raise
result = Binomial()
result.n = self.n + other.n
result.p = self.p
result.calculate_mean()
result.calculate_stdev()
return result
def __repr__(self):
"""Function to output the characteristics of the Binomial instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}, p {}, n {}".\
format(self.mean, self.stdev, self.p, self.n) | zeblonk-probability | /zeblonk_probability-0.2.tar.gz/zeblonk_probability-0.2/zeblonk_probability/Binomialdistribution.py | Binomialdistribution.py |
import argparse
import enum
import string
from typing import Optional, List
import requests
import requests_unixsocket
import yaml
import zebr0
KEY_DEFAULT = "lxd-stack"
URL_DEFAULT = "http+unix://%2Fvar%2Fsnap%2Flxd%2Fcommon%2Flxd%2Funix.socket"
class ResourceType(enum.Enum):
"""
Enumerates the various LXD resource types managed by the library.
"""
STORAGE_POOLS = "storage-pools", "/1.0/storage-pools"
VOLUMES = "volumes", "/1.0/storage-pools/${parent}/volumes/custom"
NETWORKS = "networks", "/1.0/networks"
PROFILES = "profiles", "/1.0/profiles"
INSTANCES = "instances", "/1.0/instances"
def name(self) -> str:
return self.value[0]
def path(self, config) -> str:
"""
:param config: the resource's configuration
:return: the corresponding path relative to the LXD API base URL
"""
return string.Template(self.value[1]).substitute(config)
class Client:
"""
A simple wrapper around the LXD REST API to manage resources either directly or via "stacks".
This Client connects to the LXD API through the Unix socket (for now).
Apart from how asynchronous operations are handled, it's mainly a convenient, idempotent passthrough.
Therefore, the official documentation is where you'll find all the configuration details you'll need to create LXD resources:
* storage-pools and volumes: https://linuxcontainers.org/lxd/docs/master/api/#/storage and https://linuxcontainers.org/lxd/docs/master/storage
* networks: https://linuxcontainers.org/lxd/docs/master/api/#/networks and https://linuxcontainers.org/lxd/docs/master/networks
* profiles: https://linuxcontainers.org/lxd/docs/master/api/#/profiles and https://linuxcontainers.org/lxd/docs/master/profiles
* instances: https://linuxcontainers.org/lxd/docs/master/api/#/instances and https://linuxcontainers.org/lxd/docs/master/instances
A "stack" is very a convenient way to manage a group of resources linked together.
Heavily inspired by the LXD "preseed" format (see https://linuxcontainers.org/lxd/docs/master/preseed), the structure is almost identical, except:
* "storage_pools" has been renamed "storage-pools" to match the API
* the root "config" element is ignored (use a real preseed file if you want to configure LXD that way)
* instances and volumes are managed through new root elements, "instances" and "volumes"
A typical stack example can be found in tests/test_cli.py.
Check the various functions to see what you can do with stacks and resources.
:param url: URL of the LXD API (scheme is "http+unix", socket path is percent-encoded into the host field), defaults to "http+unix://%2Fvar%2Fsnap%2Flxd%2Fcommon%2Flxd%2Funix.socket"
"""
def __init__(self, url: str = URL_DEFAULT):
self.url = url
self.session = requests_unixsocket.Session()
# this "hook" will be executed after each request (see http://docs.python-requests.org/en/master/user/advanced/#event-hooks)
def hook(response, **_):
response_json = response.json()
if not response.ok:
raise requests.HTTPError(response_json.get("error"))
# some lxd operations are asynchronous, we have to wait for them to finish before continuing
# see https://linuxcontainers.org/lxd/docs/master/rest-api/#background-operation
if response_json.get("type") == "async":
operation = self.session.get(self.url + response_json.get("operation") + "/wait").json().get("metadata")
if operation.get("status_code") != 200:
raise requests.HTTPError(operation.get("err"))
self.session.hooks["response"].append(hook)
def exists(self, config: dict, resource_type: ResourceType) -> bool:
"""
:param config: the resource's configuration
:param resource_type: the resource's type
:return: whether the resource exists or not
"""
resource_path = resource_type.path(config) + "/" + config.get("name")
print("checking existence", resource_path)
try:
self.session.get(self.url + resource_path)
return True
except requests.HTTPError:
return False
def create(self, config: dict, resource_type: ResourceType) -> None:
"""
Creates a resource if it doesn't exist.
The required configuration depends on the resource's type (see zebr0_lxd.Client).
:param config: the resource's desired configuration
:param resource_type: the resource's type
"""
type_path = resource_type.path(config)
resource_path = type_path + "/" + config.get("name")
if not self.exists(config, resource_type):
print("creating", resource_path)
self.session.post(self.url + type_path, json=config)
def delete(self, config: dict, resource_type: ResourceType) -> None:
"""
Deletes a resource if it exists.
:param config: the resource's configuration
:param resource_type: the resource's type
"""
resource_path = resource_type.path(config) + "/" + config.get("name")
if self.exists(config, resource_type):
print(f"deleting", resource_path)
self.session.delete(self.url + resource_path)
def is_running(self, config: dict, resource_type: ResourceType = ResourceType.INSTANCES) -> bool:
"""
:param config: the resource's configuration
:param resource_type: the resource's type, defaults to INSTANCES
:return: whether the resource is running or not
"""
resource_path = resource_type.path(config) + "/" + config.get("name")
print("checking status", resource_path)
return self.session.get(self.url + resource_path).json().get("metadata").get("status") == "Running"
def start(self, config: dict, resource_type: ResourceType) -> None:
"""
Starts a resource if it's not running.
:param config: the resource's configuration
:param resource_type: the resource's type
"""
resource_path = resource_type.path(config) + "/" + config.get("name")
if not self.is_running(config, resource_type):
print("starting", resource_path)
self.session.put(self.url + resource_path + "/state", json={"action": "start"})
def stop(self, config: dict, resource_type: ResourceType) -> None:
"""
Stops a resource if it's running.
:param config: the resource's configuration
:param resource_type: the resource's type
"""
resource_path = resource_type.path(config) + "/" + config.get("name")
if self.exists(config, resource_type) and self.is_running(config, resource_type):
print("stopping", resource_path)
self.session.put(self.url + resource_path + "/state", json={"action": "stop"})
def create_stack(self, stack: dict) -> None:
"""
Creates the resources in the given stack if they don't exist.
The required configurations depend on the resource's type (see zebr0_lxd.Client).
:param stack: the stack as a dictionary
"""
for resource_type in ResourceType:
for config in stack.get(resource_type.name()) or []:
self.create(config, resource_type)
def delete_stack(self, stack: dict) -> None:
"""
Deletes the resources in the given stack if they exist.
:param stack: the stack as a dictionary
"""
for resource_type in reversed(ResourceType):
for config in stack.get(resource_type.name()) or []:
self.delete(config, resource_type)
def start_stack(self, stack: dict) -> None:
"""
Starts the resources in the given stack if they're not running.
:param stack: the stack as a dictionary
"""
for resource_type in [ResourceType.INSTANCES]:
for config in stack.get(resource_type.name()) or []:
self.start(config, resource_type)
def stop_stack(self, stack: dict) -> None:
"""
Stops the resources in the given stack if they're running.
:param stack: the stack as a dictionary
"""
for resource_type in [ResourceType.INSTANCES]:
for config in stack.get(resource_type.name()) or []:
self.stop(config, resource_type)
def main(args: Optional[List[str]] = None) -> None:
"""
usage: zebr0-lxd [-h] [-u <url>] [-l [<level> [<level> ...]]] [-c <duration>] [-f <path>] [--lxd-url <url>] {create,delete,start,stop} [key]
LXD provisioning based on zebr0 key-value system.
Fetches a stack from the key-value server and manages it on LXD.
positional arguments:
{create,delete,start,stop}
operation to execute on the stack
key the stack's key, defaults to 'lxd-stack'
optional arguments:
-h, --help show this help message and exit
-u <url>, --url <url>
URL of the key-value server, defaults to https://hub.zebr0.io
-l [<level> [<level> ...]], --levels [<level> [<level> ...]]
levels of specialization (e.g. "mattermost production" for a <project>/<environment>/<key> structure), defaults to ""
-c <duration>, --cache <duration>
in seconds, the duration of the cache of http responses, defaults to 300 seconds
-f <path>, --configuration-file <path>
path to the configuration file, defaults to /etc/zebr0.conf for a system-wide configuration
--lxd-url <url> URL of the LXD API (scheme is "http+unix", socket path is percent-encoded into the host field), defaults to "http+unix://%2Fvar%2Fsnap%2Flxd%2Fcommon%2Flxd%2Funix.socket"
"""
argparser = zebr0.build_argument_parser(description="LXD provisioning based on zebr0 key-value system.\nFetches a stack from the key-value server and manages it on LXD.", formatter_class=argparse.RawDescriptionHelpFormatter)
argparser.add_argument("command", choices=["create", "delete", "start", "stop"], help="operation to execute on the stack")
argparser.add_argument("key", nargs="?", default=KEY_DEFAULT, help="the stack's key, defaults to 'lxd-stack'")
argparser.add_argument("--lxd-url", default=URL_DEFAULT, help='URL of the LXD API (scheme is "http+unix", socket path is percent-encoded into the host field), defaults to "http+unix://%%2Fvar%%2Fsnap%%2Flxd%%2Fcommon%%2Flxd%%2Funix.socket"', metavar="<url>")
args = argparser.parse_args(args)
value = zebr0.Client(**vars(args)).get(args.key)
if not value:
print(f"key '{args.key}' not found on server {args.url}")
exit(1)
stack = yaml.load(value, Loader=yaml.BaseLoader)
if not isinstance(stack, dict):
print(f"key '{args.key}' on server {args.url} is not a proper yaml or json dictionary")
exit(1)
# creates a Client and calls the function corresponding to the given command
getattr(Client(args.lxd_url), args.command + "_stack")(stack) | zebr0-lxd | /zebr0-lxd-0.9.0.tar.gz/zebr0-lxd-0.9.0/zebr0_lxd/__init__.py | __init__.py |
import enum
import hashlib
import json
import subprocess
from dataclasses import dataclass, asdict, field
from functools import cached_property
from pathlib import Path
from typing import Tuple, List
import time
import zebr0
from zebr0_script.const import ATTEMPTS_DEFAULT, PAUSE_DEFAULT, VARIANT_DEFAULT
class Status(str, enum.Enum):
PENDING = "pending"
SUCCESS = "success"
FAILURE = "failure"
IGNORED = "ignored"
@dataclass
class _Entry: # abstract
@cached_property
def json(self) -> str:
return json.dumps(asdict(self))
@cached_property
def md5(self) -> str:
return hashlib.md5(self.json.encode(zebr0.ENCODING)).hexdigest()
def execute(self, **_) -> Tuple[Status, List[str]]:
raise NotImplementedError()
@dataclass
class Command(_Entry):
command: str
variant: str = VARIANT_DEFAULT
def execute(self, attempts: int = ATTEMPTS_DEFAULT, pause: float = PAUSE_DEFAULT, **_) -> Tuple[Status, List[str]]:
while True:
sp = subprocess.Popen(["/bin/bash", "-l", "-c", self.command], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding=zebr0.ENCODING)
attempts -= 1
output = []
for line in sp.stdout:
print(".", end="") # progress bar: each stdout line prints a dot
output.append(line.rstrip())
if output:
print() # if at least one dot has been printed, we need a new line at the end
if sp.wait() == 0: # wait() returns the exit code
return Status.SUCCESS, output
elif attempts > 0:
print(f"error, will try again in {pause} seconds ({attempts} attempts remaining)")
time.sleep(pause)
else:
return Status.FAILURE, output
@dataclass
class KeyToTarget(_Entry):
key: str
target: str
def execute(self, client: zebr0.Client, **_) -> Tuple[Status, List[str]]:
if not (value := client.get(self.key, strip=False)):
return Status.FAILURE, [f"key '{self.key}' not found on server {client.url}"]
try:
target_path = Path(self.target)
target_path.parent.mkdir(parents=True, exist_ok=True)
target_path.write_text(value, encoding=zebr0.ENCODING)
return Status.SUCCESS, []
except OSError as error:
return Status.FAILURE, str(error).splitlines()
@dataclass
class ContentToTarget(_Entry):
content: str
target: str
def execute(self, **_) -> Tuple[Status, List[str]]:
try:
target_path = Path(self.target)
target_path.parent.mkdir(parents=True, exist_ok=True)
target_path.write_text(self.content, encoding=zebr0.ENCODING)
return Status.SUCCESS, []
except OSError as error:
return Status.FAILURE, str(error).splitlines()
@dataclass
class Task:
entry: _Entry
status: Status
output: List[str] = field(init=False)
def execute(self, **kwargs) -> None:
self.status, self.output = self.entry.execute(**kwargs)
def write_report(self, reports_path: Path) -> None:
reports_path.joinpath(self.entry.md5).write_text(json.dumps(asdict(self), indent=2), encoding=zebr0.ENCODING) | zebr0-script | /zebr0-script-0.11.0.tar.gz/zebr0-script-0.11.0/zebr0_script/classes.py | classes.py |
import datetime
import json
from pathlib import Path
from typing import Optional, List
import yaml
import zebr0
from zebr0_script.classes import Status, Command, KeyToTarget, ContentToTarget, Task
from zebr0_script.const import REPORTS_PATH_DEFAULT, KEY_DEFAULT, ATTEMPTS_DEFAULT, PAUSE_DEFAULT, VARIANT_DEFAULT, INCLUDE, COMMAND, VARIANT, KEY, TARGET, CONTENT, STATUS
def run(url: str,
levels: Optional[List[str]],
cache: int,
configuration_file: Path,
reports_path: Path = REPORTS_PATH_DEFAULT,
key: str = KEY_DEFAULT,
attempts: int = ATTEMPTS_DEFAULT,
pause: float = PAUSE_DEFAULT,
dry: bool = False,
**_) -> None:
client = zebr0.Client(url, levels, cache, configuration_file)
reports_path.mkdir(parents=True, exist_ok=True)
print(f"fetching script '{key}' from server {client.url} with levels {client.levels}")
# the dynamic of the loop and the 'tasks' list below is this:
# - includes are replaced by the corresponding script items, which are in turn replaced by Task objects (if they're not includes themselves)
# - the index is only incremented after that final Task transformation
tasks = [{INCLUDE: key}]
i = 0
while i < len(tasks):
if isinstance(tasks[i], str): # normalizing a command string into its dictionary counterpart
tasks[i] = {COMMAND: tasks[i], VARIANT: VARIANT_DEFAULT}
if not isinstance(tasks[i], dict):
raise ValueError(f"malformed task, must be a string or a dictionary: {json.dumps(tasks[i])}")
if (keys := tasks[i].keys()) == {INCLUDE}:
if not (value := client.get(key := tasks[i].get(INCLUDE))):
raise LookupError(f"key '{key}' not found")
if not isinstance(script := yaml.load(value, Loader=yaml.BaseLoader), list):
raise ValueError(f"key '{key}' is not a proper yaml or json list")
tasks[i:i + 1] = script # replaces the include by the script's items (see https://docs.python.org/3/glossary.html#term-slice)
continue
# creating an Entry object first
if keys == {COMMAND, VARIANT}:
entry = Command(tasks[i].get(COMMAND), tasks[i].get(VARIANT))
elif keys == {KEY, TARGET}:
entry = KeyToTarget(tasks[i].get(KEY), tasks[i].get(TARGET))
elif keys == {CONTENT, TARGET}:
entry = ContentToTarget(tasks[i].get(CONTENT), tasks[i].get(TARGET))
else:
raise ValueError(f"malformed task, unknown keys: {json.dumps(tasks[i])}")
# computing status
if any(task.entry == entry for task in tasks[:i]):
status = Status.IGNORED
elif (report := reports_path.joinpath(entry.md5)).exists():
status = json.loads(report.read_text(encoding=zebr0.ENCODING)).get(STATUS)
else:
status = Status.PENDING
# creating the Task and moving to the next item
print(f"{status}: {entry.json}")
tasks[i] = Task(entry, status)
i += 1
if dry: # a dry run stops here
return
print()
# filtering the tasks that are going to be executed
if not (tasks := [task for task in tasks if task.status in [Status.PENDING, Status.FAILURE]]):
print("no pending or failed task to run")
return
for task in tasks:
print("executing:", task.entry.json)
task.execute(attempts=attempts, pause=pause, client=client)
task.write_report(reports_path)
if task.status == Status.SUCCESS:
print("success!")
else:
print("failed! output:", json.dumps(task.output, indent=2))
raise ChildProcessError()
def log(reports_path: Path = REPORTS_PATH_DEFAULT, md5: str = None, **_) -> None:
if md5:
print(reports_path.joinpath(md5).read_text(encoding=zebr0.ENCODING))
return
if not reports_path.exists() or not (files := [path for path in reports_path.iterdir() if path.is_file()]):
print("no report found")
return
for file in sorted(files, key=(get_mtime := lambda path: path.stat().st_mtime)):
report = json.loads(file.read_text(encoding=zebr0.ENCODING))
print(
file.name,
datetime.datetime.fromtimestamp(get_mtime(file)).strftime("%c"),
report.get("status"),
json.dumps(report.get("entry"))
)
def main(args: Optional[List[str]] = None) -> None:
argparser = zebr0.build_argument_parser(description="Minimalist local deployment based on zebr0 key-value system.")
argparser.add_argument("-r", "--reports-path", type=Path, default=REPORTS_PATH_DEFAULT, help=f"path to the reports' directory, defaults to {str(REPORTS_PATH_DEFAULT)}", metavar="<path>")
subparsers = argparser.add_subparsers()
run_parser = subparsers.add_parser("run", description="", help="")
run_parser.add_argument("key", nargs="?", default=KEY_DEFAULT, help=f"the script's key, defaults to '{KEY_DEFAULT}'")
run_parser.add_argument("--attempts", type=int, default=ATTEMPTS_DEFAULT, help=f"maximum number of attempts before reporting a failure, defaults to {ATTEMPTS_DEFAULT}", metavar="<value>")
run_parser.add_argument("--pause", type=float, default=PAUSE_DEFAULT, help=f"delay in seconds between two attempts, defaults to {PAUSE_DEFAULT}", metavar="<value>")
run_parser.add_argument("--dry", action="store_true", help="performs a dry run, ie. will stop before executing the tasks")
run_parser.set_defaults(command=run)
log_parser = subparsers.add_parser("log", description="", help="")
log_parser.add_argument("md5", nargs="?", help="")
log_parser.set_defaults(command=log)
args = argparser.parse_args(args)
args.command(**vars(args)) | zebr0-script | /zebr0-script-0.11.0.tar.gz/zebr0-script-0.11.0/zebr0_script/__init__.py | __init__.py |
import serial;
import json;
class Printer:
def __init__(self,port="com1",baudrate=9600,bytesize=8,stopbits=1,parity='N'):
self.port = port
self.baudrate=9600
self.bytesize=bytesize
self.stopbits=stopbits
self.parity=parity
def initTemplate(self,template):
ser = serial.Serial(port=self.port,baudrate=self.baudrate,
bytesize=self.bytesize,parity=self.parity,
stopbits=self.stopbits)
ser.write(template.encode())
print template
ser.close()
def printLableWithTemplate(self,template_name="",fn_fields="",params={}):
str = '^XA\n^XFR:'+template_name+".GRF\n"
field_map = json.loads(fn_fields)
for (k,v) in field_map.items():
print k + ' = '+v
if (v == 'QR Code'):
str += '^'+k+'^FD '+json.dumps(params)+'^FS\n'
else:
str += '^'+k+'^FD'+params[v]+'^FS\n'
str += '^XZ'
print "----------------------\n"
print str
print "----------------------\n"
ser = serial.Serial(port=self.port, baudrate=self.baudrate,
bytesize=self.bytesize, parity=self.parity,
stopbits=self.stopbits)
ser.write(str.encode())
ser.close()
def printLable(self,zpl_content=""):
ser = serial.Serial(port=self.port, baudrate=self.baudrate,
bytesize=self.bytesize, parity=self.parity,
stopbits=self.stopbits)
ser.write(zpl_content.encode())
ser.close()
if __name__ == '__main__':
str = '{"FN1":"sn","FN2":"mac","FN3":"QR Code"}'
params={}
params["sn"]="A001"
params["mac"]="18:05:03:11:22:33"
template_name="IR915L.GPR"
template ='^XA\n^DFR:'+template_name+'^FS\n' \
'^FO50,50^ADN36,10^FDSN: ^FS\n' \
'^FO50,100^ADN36,10^FDMAC:^FS\n' \
'^FO100,50^ADN36,10^FN1^FS\n' \
'^FO100,100^ADN36,10^FN2^FS\n' \
'^FO50,150^AD72,20^FN3^FS\n' \
'^FO50,180^BQN2,3^FN3^FS\n^XZ\n'
#step 1: open com
p = Printer(port="com1")
#step 2: init the template, send it to the printer
p.initTemplate(template)
#step 3: print the label.
p.printLableWithTemplate(template_name=template_name,fn_fields=str,params=params)
"""
^XA
^DFR:ECOER915.GRF^FS
^FO60,35
^GB1028,686,6,,2^FS
^FO60,160
^GB1028,480,6^FS
^FO280,60
^ADN,90, 24^FDEcoer Smart IoT Gateway^FS
^FO600,220
^ADN,18,10^FDContains FCCID:^FS
^FO800,220
^ADN,18,10^FN0^FS
^FO100,240
^ADN,36,20^FDModle:^FS
^FO270,240
^ADN,36,20^FN1^FS
^FO100,300
^ADN,36,20^FDP/N:^FS
^FO270,300
^ADN,36,20^FN2^FS
^FO100,360
^ADN,36,20^FDS/N:^FS
^FO270,360
^ADN,36,20^FN3^FS
^FO100,420
^ADN,36,20^FDIMEI:^FS
^FO270,420
^ADN,36,20^FN4^FS
^FO100,480
^ADN,36,20^FDICCID:^FS
^FO270,480
^ADN,36,20^FN5^FS
^FO100,650
^ADN,30,20^FDDate:^FS
^FO270,650
^ADN,30,20^FN10^FS
^FO780,280
^BQN,2,10^FN20^FS
^XZ
^XA
^XFR:ECOER915.GRF
^FN20^FD {"mac": "18:05:03:11:22:33", "sn": "A001"}^FS
^FN10^FD07.2017^FS
^FN0^FDQIPELS61-US^FS
^FN1^FDEG910L^FS
^FN2^FDFS71-S-ESIM^FS
^FN3^FDEG9101721012345^FS
^FN4^FD89011703278101364837^FS
^FN5^FD3278101364837^FS
^XZ
""" | zebra-printer | /zebra_printer-1.0.0.tar.gz/zebra_printer-1.0.0/zebra_print.py | zebra_print.py |
****************
Supported boards
****************
Check https://www.zebra.com/us/en/support-downloads/software/developer-tools/scanner-sdk-for-linux.html
******************
Known issues
******************
MS4717: CMD_DEVICE_SWITCH_HOST_MODE does not work
**********
Installing
**********
It's working on Ubuntu 18.04 with Zebra SDK 4.4, which you can download from https://www.zebra.com/de/de/support-downloads/software/developer-tools/scanner-sdk-for-linux.html
You can easily install zebra_scanner with pip:
.. code-block:: sh
sudo apt-get install libboost-dev libboost-python-dev libpugixml-dev
sudo pip3 install pybind11
sudo pip3 install . # in zebra-scanner directory
*****************
A minimal example
*****************
.. code-block:: python
import pprint
import time
from zebra_scanner import CoreScanner
pp = pprint.PrettyPrinter(indent=4)
scanners = []
cs = CoreScanner()
@cs.on_scanner_added
def on_scanner_added(scanner):
print("New scanner found:")
pp.pprint(scanner.__dict__)
scanners.append(scanner)
scanner.pull_trigger()
scanner.fetch_attributes()
for id, attribute in scanner.attributes.items():
if id<10:
pp.pprint({
"id": id,
"datatype": attribute.datatype,
"value": attribute.value,
"permission": attribute.permission
})
@scanner.on_barcode
def on_barcode(barcode):
print("Scanned:")
print(barcode.code, barcode.type)
@cs.on_scanner_removed
def on_scanner_removed(scanner):
print("Scanner removed:")
scanner.release_trigger()
scanners.remove(scanner)
pp.pprint(scanner.__dict__)
while True:
time.sleep(0.1)
# do nothing while the scanner is reading in continous mode
*******************
Running the example
*******************
.. code-block:: sh
~/Development/zebra-scanner/examples$ python test.py
New scanner found:
{ 'DoM': '10Mar18',
'GUID': 'AFF531D4821A3E4BB2127A380DA81FB0',
'PID': '1900',
'VID': '05e0',
'firwmare': 'PAABLS00-005-R05',
'modelnumber': 'PL-3307-B100R',
'scannerID': '1',
'serialnumber': '00000000K10U532B',
'type': 'SNAPI'}
{ 'datatype': 'F', 'id': 0, 'permission': 7, 'value': True}
{ 'datatype': 'F', 'id': 1, 'permission': 7, 'value': True}
{ 'datatype': 'F', 'id': 2, 'permission': 7, 'value': True}
{ 'datatype': 'F', 'id': 3, 'permission': 7, 'value': True}
{ 'datatype': 'F', 'id': 4, 'permission': 7, 'value': True}
{ 'datatype': 'F', 'id': 5, 'permission': 7, 'value': False}
{ 'datatype': 'F', 'id': 6, 'permission': 7, 'value': True}
{ 'datatype': 'F', 'id': 7, 'permission': 7, 'value': False}
{ 'datatype': 'F', 'id': 8, 'permission': 7, 'value': True}
{ 'datatype': 'F', 'id': 9, 'permission': 7, 'value': False}
Scanned:
('Hello World', '3')
Scanned:
('00140092390052832143', '15')
Scanned:
('31039999993000000072\x1d', '15')
Scanned:
('01540092393881021000017500861331', '15')
Scanned:
('00140092390052832143', '15')
^CScanner removed:
{ 'DoM': '10Mar18',
'GUID': 'AFF531D4821A3E4BB2127A380DA81FB0',
'PID': '1900',
'VID': '05e0',
'firwmare': 'PAABLS00-005-R05',
'modelnumber': 'PL-3307-B100R',
'scannerID': '1',
'serialnumber': '00000000K10U532B',
'type': 'SNAPI'}
| zebra-scanner | /zebra-scanner-0.2.5.tar.gz/zebra-scanner-0.2.5/README.rst | README.rst |
# Copyright (c) 2011-2020 Ben Croston
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os.path
import sys
if sys.platform.lower().startswith('win'):
IS_WINDOWS = True
import win32print
else:
IS_WINDOWS = False
import subprocess
class Zebra:
"""A class to communicate with (Zebra) label printers"""
def __init__(self, queue=None):
"""queue - name of the printer queue (optional)"""
self.queue = queue
def _output_unix(self, commands):
if self.queue == 'zebra_python_unittest':
p = subprocess.Popen(['cat','-'], stdin=subprocess.PIPE)
else:
p = subprocess.Popen(['lpr','-P{}'.format(self.queue),'-oraw'], stdin=subprocess.PIPE)
p.communicate(commands)
p.stdin.close()
def _output_win(self, commands):
if self.queue == 'zebra_python_unittest':
print(commands)
return
hPrinter = win32print.OpenPrinter(self.queue)
try:
hJob = win32print.StartDocPrinter(hPrinter, 1, ('Label',None,'RAW'))
try:
win32print.StartPagePrinter(hPrinter)
win32print.WritePrinter(hPrinter, commands)
win32print.EndPagePrinter(hPrinter)
finally:
win32print.EndDocPrinter(hPrinter)
finally:
win32print.ClosePrinter(hPrinter)
def output(self, commands, encoding='cp437'):
"""Send raw commands to the label printer
commands - commands to send to the printer. Converted to a byte string if necessary.
encoding - Encoding used if 'commands' is not a byte string
"""
assert self.queue is not None
if type(commands) != bytes:
commands = str(commands).encode(encoding=encoding)
if IS_WINDOWS:
self._output_win(commands)
else:
self._output_unix(commands)
def print_config_label(self):
"""
Send an EPL2 command to print label(s) with current config settings
"""
self.output('\nU\n')
def _getqueues_unix(self):
queues = []
try:
output = subprocess.check_output(['lpstat','-p'], universal_newlines=True)
except subprocess.CalledProcessError:
return []
for line in output.split('\n'):
if line.startswith('printer'):
queues.append(line.split(' ')[1])
return queues
def _getqueues_win(self):
printers = []
for (a,b,name,d) in win32print.EnumPrinters(win32print.PRINTER_ENUM_LOCAL):
printers.append(name)
return printers
def getqueues(self):
"""Returns a list of printer queues on local machine"""
if IS_WINDOWS:
return self._getqueues_win()
else:
return self._getqueues_unix()
def setqueue(self, queue):
"""Set the printer queue"""
self.queue = queue
def setup(self, direct_thermal=None, label_height=None, label_width=None):
"""Set up the label printer using EPL2. Parameters are not set if they are None.
Not necessary if using AutoSense (hold feed button while powering on)
direct_thermal - True if using direct thermal labels
label_height - tuple (label height, label gap) in dots
label_width - in dots
"""
commands = '\n'
if direct_thermal:
commands += 'OD\n'
if label_height:
commands += 'Q%s,%s\n'%(label_height[0],label_height[1])
if label_width:
commands += 'q%s\n'%label_width
self.output(commands)
def reset_default(self):
"""Reset the printer to factory settings using EPL2"""
self.output('\n^default\n')
def reset(self):
"""Resets the printer using EPL2 - equivalent to switching off/on"""
self.output('\n^@\n')
def autosense(self):
"""Run AutoSense by sending an EPL2 command
Get the printer to detect label and gap length and set the sensor levels
"""
self.output('\nxa\n')
def store_graphic(self, name, filename):
"""Store a 1 bit PCX file on the label printer, using EPL2.
name - name to be used on printer
filename - local filename
"""
assert filename.lower().endswith('.pcx')
commands = '\nGK"%s"\n'%name
commands += 'GK"%s"\n'%name
size = os.path.getsize(filename)
commands += 'GM"%s"%s\n'%(name,size)
self.output(commands)
self.output(open(filename,'rb').read())
def print_graphic(self, x, y, width, length, data, qty):
"""Print a label from 1 bit data, using EPL2
x,y - top left coordinates of the image, in dots
width - width of image, in dots. Must be a multiple of 8.
length - length of image, in dots
data - raw graphical data, in bytes
qty - number of labels to print
"""
assert type(data) == bytes
assert width % 8 == 0 # make sure width is a multiple of 8
assert (width//8) * length == len(data)
commands = b"\nN\nGW%d,%d,%d,%d,%s\nP%d\n"%(x, y, width//8, length, data, qty)
self.output(commands)
if __name__ == '__main__':
z = Zebra()
print('Printer queues found:',z.getqueues())
z.setqueue('zebra_python_unittest')
z.setup(direct_thermal=True, label_height=(406,32), label_width=609) # 3" x 2" direct thermal label
z.store_graphic('logo','logo.pcx')
label = """
N
GG419,40,"logo"
A40,80,0,4,1,1,N,"Tangerine Duck 4.4%"
A40,198,0,3,1,1,N,"Duty paid on 39.9l"
A40,240,0,3,1,1,N,"Gyle: 127 Best Before: 16/09/2011"
A40,320,0,4,1,1,N,"Pump & Truncheon"
P1
"""
z.output(label) | zebra | /zebra-0.1.0-py3-none-any.whl/zebra.py | zebra.py |
import requests
import logging.handlers
from typing import Literal
WIDGET_MODES = Literal["live_chart", "boolean", "knob", "area_chart"]
WIDGET_LABELS = Literal["hourly", "daily", "instant"]
DEVICE_STATUS = Literal["active", "inactive"]
ELEMENT_TYPE = Literal["device", "sensor", "widget"]
class ZebraClient:
def __init__(self, sensor_id: str, server_address: str, schema: str = "http"):
self.sens_id = sensor_id
self.server = server_address
self.schema = schema
self.sens_url = f"{schema}://{server_address}/log/set_data/"
def send(self, value):
try:
requests.post(
url=self.sens_url,
data={
"value": value,
"sens_id": self.sens_id
}
)
except:
pass
def get_handler(self, name=None, level=4):
logger = logging.getLogger(name=name)
http_handler = logging.handlers.HTTPHandler(
self.server,
'/log/set/',
method='POST'
)
logger.setLevel(level)
logger.addHandler(http_handler)
return logger
def create_widget(self, mode: WIDGET_MODES, labels: WIDGET_LABELS, dataset_label: str):
try:
data = {
"mode": mode,
"labels": labels,
"dataset_label": dataset_label
}
return requests.post(f"{self.schema}://{self.server}/api/create_widget/",
data=data).json()["id"]
except Exception as e:
raise Exception(e)
def create_sensor(self, name: str, chart_id: str, widget_id):
try:
data = {
"name": name,
"chart_id": chart_id,
"widget_id": widget_id
}
return requests.post(f"{self.schema}://{self.server}/api/create_sensor/",
data=data).json()["id"]
except Exception as e:
raise Exception(e)
def create_device(self, name: str, address: str, status: DEVICE_STATUS, sensors: list):
try:
data = {
"name": name,
"address": address,
"status": status,
"sensors[]": sensors
}
return requests.post(f"{self.schema}://{self.server}/api/create_device/",
data=data).json()["id"]
except Exception as e:
raise Exception(e)
def delete(self, element_type: ELEMENT_TYPE, element_id: int):
try:
data = {
"id": element_id
}
if element_type == "device":
url = f"{self.schema}://{self.server}/api/delete_device/"
elif element_type == "sensor":
url = f"{self.schema}://{self.server}/api/delete_sensor/"
else:
url = f"{self.schema}://{self.server}/api/delete_widget/"
return requests.post(url, data=data).json()
except Exception as e:
raise Exception(e) | zebracat-monitoring-client | /zebracat_monitoring_client-0.0.6-py3-none-any.whl/zebracat_monitoring_client/client.py | client.py |
# Unicorn Engine
# By Nguyen Anh Quynh <[email protected]>, 2015
usage() {
cat 1>&2 <<EOF
make.sh - The build script for unicorn engine
USAGE:
$ ./make.sh [OPTIONS]
OPTIONS:
Build the project
asan Build for ASan
install Install the project
uninstall Uninstall the project
macos-universal Build universal binaries on macOS
macos-universal-no Build non-universal binaries that includes only 64-bit code on macOS
cross-win32 Cross-compile Windows 32-bit binary with MinGW
cross-win64 Cross-compile Windows 64-bit binary with MinGW
cross-android_arm Cross-compile for Android Arm
cross-android_arm64 Cross-compile for Android Arm64
linux32 Cross-compile Unicorn on 64-bit Linux to target 32-bit binary
msvc_update_genfiles Generate files for MSVC projects
EOF
}
MAKE_JOBS=$((MAKE_JOBS+0))
[ ${MAKE_JOBS} -lt 1 ] && \
MAKE_JOBS=4
# build for ASAN
asan() {
env UNICORN_DEBUG=yes UNICORN_ASAN=yes "${MAKE}" V=1
}
build_cross() {
[ "$UNAME" = Darwin ] && LIBARCHS="i386 x86_64"
CROSS=$1
CC=$CROSS-gcc \
AR=$CROSS-gcc-ar \
RANLIB=$CROSS-gcc-ranlib \
${MAKE}
}
build_linux32() {
PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig" \
CFLAGS=-m32 \
LDFLAGS=-m32 \
LDFLAGS_STATIC=-m32 \
LIBRARY_PATH="/usr/lib/i386-linux-gnu" \
UNICORN_QEMU_FLAGS="--cpu=i386 ${UNICORN_QEMU_FLAGS}" \
${MAKE}
}
install() {
# Mac OSX needs to find the right directory for pkgconfig
if [ "$UNAME" = Darwin ]; then
# we are going to install into /usr/local, so remove old installs under /usr
rm -rf /usr/lib/libunicorn*
rm -rf /usr/include/unicorn
# install into /usr/local
PREFIX=${PREFIX:-/usr/local}
${MAKE} install
else # not OSX
test -d /usr/lib64 && LIBDIRARCH=lib64
${MAKE} install
fi
}
uninstall() {
# Mac OSX needs to find the right directory for pkgconfig
if [ "$UNAME" = "Darwin" ]; then
# find the directory automatically, so we can support both Macport & Brew
PKGCFGDIR="$(pkg-config --variable pc_path pkg-config | cut -d ':' -f 1)"
PREFIX=${PREFIX:-/usr/local}
${MAKE} uninstall
else # not OSX
test -d /usr/lib64 && LIBDIRARCH=lib64
${MAKE} uninstall
fi
}
msvc_update_genfiles() {
${MAKE}
cp qemu/qapi-types.h msvc/unicorn/qapi-types.h
cp qemu/qapi-visit.h msvc/unicorn/qapi-visit.h
cp qemu/qapi-types.c msvc/unicorn/qapi-types.c
cp qemu/qapi-visit.c msvc/unicorn/qapi-visit.c
cp qemu/config-host.h msvc/unicorn/config-host.h
cp qemu/aarch64-softmmu/config-target.h msvc/unicorn/aarch64-softmmu/config-target.h
cp qemu/aarch64eb-softmmu/config-target.h msvc/unicorn/aarch64eb-softmmu/config-target.h
cp qemu/arm-softmmu/config-target.h msvc/unicorn/arm-softmmu/config-target.h
cp qemu/armeb-softmmu/config-target.h msvc/unicorn/armeb-softmmu/config-target.h
cp qemu/m68k-softmmu/config-target.h msvc/unicorn/m68k-softmmu/config-target.h
cp qemu/mips64el-softmmu/config-target.h msvc/unicorn/mips64el-softmmu/config-target.h
cp qemu/mips64-softmmu/config-target.h msvc/unicorn/mips64-softmmu/config-target.h
cp qemu/mipsel-softmmu/config-target.h msvc/unicorn/mipsel-softmmu/config-target.h
cp qemu/mips-softmmu/config-target.h msvc/unicorn/mips-softmmu/config-target.h
cp qemu/sparc64-softmmu/config-target.h msvc/unicorn/sparc64-softmmu/config-target.h
cp qemu/sparc-softmmu/config-target.h msvc/unicorn/sparc-softmmu/config-target.h
cp qemu/x86_64-softmmu/config-target.h msvc/unicorn/x86_64-softmmu/config-target.h
}
UNAME=${UNAME:-$(uname)}
MAKE=${MAKE:-make}
#[ -n "${MAKE_JOBS}" ] && MAKE="$MAKE -j${MAKE_JOBS}"
if [ "$UNAME" = SunOS ]; then
MAKE=${MAKE:-gmake}
INSTALL_BIN=ginstall
CC=gcc
fi
if echo "$UNAME" | grep -q BSD; then
MAKE=gmake
PREFIX=${PREFIX:-/usr/local}
fi
export CC INSTALL_BIN PREFIX PKGCFGDIR LIBDIRARCH LIBARCHS CFLAGS LDFLAGS
case "$1" in
"" ) ${MAKE};;
"asan" ) asan;;
"install" ) install;;
"uninstall" ) uninstall;;
"macos-universal" ) MACOS_UNIVERSAL=yes ${MAKE};;
"macos-universal-no" ) MACOS_UNIVERSAL=no ${MAKE};;
"cross-win32" ) build_cross i686-w64-mingw32;;
"cross-win64" ) build_cross x86_64-w64-mingw32;;
"cross-android_arm" ) CROSS=arm-linux-androideabi ${MAKE};;
"cross-android_arm64" ) CROSS=aarch64-linux-android ${MAKE};;
"linux32" ) build_linux32;;
"msvc_update_genfiles" ) msvc_update_genfiles;;
* )
usage;
exit 1;;
esac | zebracorn | /zebracorn-0.0.1.tar.gz/zebracorn-0.0.1/src/make.sh | make.sh |
Zebracorn
==============
Zebracorn is a lightweight, multi-platform, multi-architecture CPU emulator framework
based on a fork of [Unicorn](http://www.unicorn-engine.org).
The API is identical to Unicorn, but Zebracorn offers a few additional features:
- Hooks:
- RDTSC instruction
- Block Translation
- Callback after N blocks executed
- Execution Info:
- Block count executed
- Instruction count executed
- Instruction Meta:
- Tiny Code Generator (TCG) representation of each instruction
- Unicorn AFL integration
These APIs are only supported through the Python bindings.
The Zebracorn engine is primarily made available to support functionality in the [zelos binary emulator](https://github.com/zeropointdynamics/zelos).
Installation
------------
Zebracorn is distributed as a python `pip` package. Other bindings are not supported. To install the python package:
```bash
$ pip install zebracorn
```
Python packages are available for Windows, Linux and MacOS.
License
-------
Unicorn and Qemu are released under the [GPL license](COPYING).
| zebracorn | /zebracorn-0.0.1.tar.gz/zebracorn-0.0.1/src/README.md | README.md |
Unicorn-Engine MSVC Native Port Notes
Zak Escano - January 2017
These notes are to help myself and others with the upkeep of the msvc native port
of unicorn-engine.
:: Command line build instructions
msbuild -m -p:Configuration=Release -p:Platform=Win32
msbuild -m -p:Configuration=Release -p:Platform=x64
Then bundle and release the folder "distro".
:: Build settings
Visual Studio Version: Visual Studio 2017 v15.9.15
Platform Toolset: Default. Known to work with the 8.1 SDK
Character Set: Use Multi-Byte Character Set
Runtime Library Debug: Multi-threaded Debug (/MTd)
Runtime Library Release: Multi-threaded (/MT)
Precompiled Header: Not Using Precompiled Headers
Additional Options: /wd4018 /wd4244 /wd4267
:: Build advice
- If you see warnings about spectre-mitigated libraries and then some strange
errors, the errors may be related to the spectre libraries. Install them.
(via the visual studio installation manager)
- The "platform default" target SDK may not actually be installed for you. Try
installing the Windows 8.1 SDK via the visual studio installation manager.
:: Changes porting unicorn from GNU/GCC to MSVC.
There were many many many changes to make this also build in MSVC
while still retaining the ability to build in GNU/GCC.
Most were due to either GCC specific things or MSVC lack of decent
standard C support especially in VS2012. Also some were for
posix/platform specific stuff that is not present in windows.
Some of the more common changes were:
* Compatibility for GCC style __attribute__'s.
* Change GCC switch case ranges to specify every case individually, ie:
"case 1 ... 3:" changes to "case 1: case 2: case 3:"
* Change GCC struct member initialisation to the more generic
initialisation of all members in order, ie:
{ .value = 1, .stuff = 2 } to { 1, 2 }
* Remove GCC style macro return values which MSVC does not support, ie:
#define RETURN_ONE(x) ({ some stuff; (void)1; })
* Compatibility for posix headers that are missing in windows, ie:
stdbool.h, stdint.h, sys/time.h, unistd.h
:: CPU specific libraries
The gnu/gcc way of building the qemu portion of unicorn-engine involves makefile magic
that builds the same set of sourcecode files multiple times. They are built once for each
supported CPU type and force "#include" a CPU specific header file to re-"#define"
function and variable names that would otherwise be the same for each build.
These multiple builds of the same files are then all linked together to form
the unicorn library.
As an example when building for "x86_64" cpu type the generated header file "x86_64.h"
is force included and it contains a bunch of defines such as:
#define phys_mem_clean phys_mem_clean_x86_64
So you can see that it adds the cpu type on to the end of each name in order
to keep the names unique over the multiple builds.
The way I handle this in MSVC is to build a seperate cpu specific library, containing
this set of repeatedly used sourcecode files, for each supported cpu type.
These cpu specific libraries are then linked together to build the unicorn library.
For each supported CPU type
* Each CPU specific lib has a "forced include" file specified at:
Configuration Properties -> C/C++ -> Advanced -> Forced Include File
so for x86-64 this is "the file "x86_64.h" which is a generated file.
:: Other things
* The Qemu code for GNU/GCC seems to rely on __i386__ or __x86_64__ defined if
the host is 32bit or 64bit respectively.
So when building 32bit libs in msvc we define __i386__.
And when building 64bit libs in msvc we define __x86_64__.
* There is a tcg-target.c for each target that is included into tcg.c.
This is done using "#include tcg-target.c"
It is NOT built separately as part of the *.c files for the project.
:: Info from makefiles
This info is compiled here together to help with deciding on the build settings to use.
It may or may not be of use to anyone in the future once this all builds ok :)
QEMU_INCLUDES=-I$(SRC_PATH)/tcg -I$(SRC_PATH)/tcg/$(ARCH) -I. -I$(SRC_PATH) -I$(SRC_PATH)/include
QEMU_CFLAGS=-m32 -D__USE_MINGW_ANSI_STDIO=1 -DWIN32_LEAN_AND_MEAN -DWINVER=0x501 -D_GNU_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -Wstrict-prototypes -Wredundant-decls -Wall -Wundef -Wwrite-strings -Wmissing-prototypes -fno-strict-aliasing -fno-common -DUNICORN_HAS_X86 -DUNICORN_HAS_ARM -DUNICORN_HAS_M68K -DUNICORN_HAS_ARM64 -DUNICORN_HAS_MIPS -DUNICORN_HAS_MIPSEL -DUNICORN_HAS_MIPS64 -DUNICORN_HAS_MIPS64EL -DUNICORN_HAS_SPARC -fPIC
QEMU_CFLAGS += -I.. -I$(SRC_PATH)/target-$(TARGET_BASE_ARCH) -DNEED_CPU_H
QEMU_CFLAGS+=-I$(SRC_PATH)/include
QEMU_CFLAGS+=-include x86_64.h
includes
-I$(SRC_PATH)/tcg
-I$(SRC_PATH)/tcg/$(ARCH)
-I.
-I$(SRC_PATH)
-I$(SRC_PATH)/include
-I..
-I$(SRC_PATH)/target-$(TARGET_BASE_ARCH)
-I$(SRC_PATH)/include
-include x86_64.h
defines
-D__USE_MINGW_ANSI_STDIO=1
-DWIN32_LEAN_AND_MEAN
-DWINVER=0x501
-D_GNU_SOURCE
-D_FILE_OFFSET_BITS=64
-D_LARGEFILE_SOURCE
-DNEED_CPU_H
-DUNICORN_HAS_X86
-DUNICORN_HAS_ARM
-DUNICORN_HAS_M68K
-DUNICORN_HAS_ARM64
-DUNICORN_HAS_MIPS
-DUNICORN_HAS_MIPSEL
-DUNICORN_HAS_MIPS64
-DUNICORN_HAS_MIPS64EL
-DUNICORN_HAS_SPARC
qemu/config-host.mak
extra_cflags=-m32 -DUNICORN_HAS_X86 -DUNICORN_HAS_ARM -DUNICORN_HAS_M68K -DUNICORN_HAS_ARM64 -DUNICORN_HAS_MIPS -DUNICORN_HAS_MIPSEL -DUNICORN_HAS_MIPS64 -DUNICORN_HAS_MIPS64EL -DUNICORN_HAS_SPARC -fPIC
extra_ldflags=
libs_softmmu=
ARCH=i386
CONFIG_WIN32=y
CONFIG_FILEVERSION=2,2,1,0
CONFIG_PRODUCTVERSION=2,2,1,0
VERSION=2.2.1
PKGVERSION=
SRC_PATH=/f/GitHub/unicorn/qemu
TARGET_DIRS=x86_64-softmmu arm-softmmu m68k-softmmu aarch64-softmmu mips-softmmu mipsel-softmmu mips64-softmmu mips64el-softmmu sparc-softmmu sparc64-softmmu
GLIB_CFLAGS=-pthread -mms-bitfields -IC:/msys64/mingw32/include/glib-2.0 -IC:/msys64/mingw32/lib/glib-2.0/include
CONFIG_ZERO_MALLOC=y
CONFIG_CPUID_H=y
CONFIG_THREAD_SETNAME_BYTHREAD=y
CONFIG_PTHREAD_SETNAME_NP=y
CFLAGS=-pthread -mms-bitfields -IC:/msys64/mingw32/include/glib-2.0 -IC:/msys64/mingw32/lib/glib-2.0/include -g
QEMU_CFLAGS=-m32 -D__USE_MINGW_ANSI_STDIO=1 -DWIN32_LEAN_AND_MEAN -DWINVER=0x501 -D_GNU_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -Wstrict-prototypes -Wredundant-decls -Wall -Wundef -Wwrite-strings -Wmissing-prototypes -fno-strict-aliasing -fno-common -DUNICORN_HAS_X86 -DUNICORN_HAS_ARM -DUNICORN_HAS_M68K -DUNICORN_HAS_ARM64 -DUNICORN_HAS_MIPS -DUNICORN_HAS_MIPSEL -DUNICORN_HAS_MIPS64 -DUNICORN_HAS_MIPS64EL -DUNICORN_HAS_SPARC -fPIC
QEMU_INCLUDES=-I$(SRC_PATH)/tcg -I$(SRC_PATH)/tcg/$(ARCH) -I. -I$(SRC_PATH) -I$(SRC_PATH)/include
LDFLAGS=-Wl,--nxcompat -Wl,--no-seh -Wl,--dynamicbase -Wl,--warn-common -m32 -g
LIBS+=-LC:/msys64/mingw32/lib -lgthread-2.0 -pthread -lglib-2.0 -lintl -lwinmm -lws2_32 -liphlpapi -lz
qemu/x86_64-softmmu/Makefile
QEMU_CFLAGS += -I.. -I$(SRC_PATH)/target-$(TARGET_BASE_ARCH) -DNEED_CPU_H
QEMU_CFLAGS+=-I$(SRC_PATH)/include
qemu/x86_64-softmmu/config-target.mak
TARGET_X86_64=y
TARGET_NAME=x86_64
TARGET_BASE_ARCH=i386
TARGET_ABI_DIR=x86_64
CONFIG_SOFTMMU=y
LDFLAGS+=
QEMU_CFLAGS+=
QEMU_CFLAGS+=-include x86_64.h
qemu/x86_64-softmmu/config-devices.mak
CONFIG_VGA=y
CONFIG_QXL=$(CONFIG_SPICE)
CONFIG_VGA_PCI=y
CONFIG_VGA_ISA=y
CONFIG_VGA_CIRRUS=y
CONFIG_VMWARE_VGA=y
CONFIG_VMMOUSE=y
CONFIG_SERIAL=y
CONFIG_PARALLEL=y
CONFIG_I8254=y
CONFIG_PCSPK=y
CONFIG_PCKBD=y
CONFIG_FDC=y
CONFIG_ACPI=y
CONFIG_APM=y
CONFIG_I8257=y
CONFIG_IDE_ISA=y
CONFIG_IDE_PIIX=y
CONFIG_NE2000_ISA=y
CONFIG_PIIX_PCI=y
CONFIG_HPET=y
CONFIG_APPLESMC=y
CONFIG_I8259=y
CONFIG_PFLASH_CFI01=y
CONFIG_TPM_TIS=$(CONFIG_TPM)
CONFIG_PCI_HOTPLUG_OLD=y
CONFIG_MC146818RTC=y
CONFIG_PAM=y
CONFIG_PCI_PIIX=y
CONFIG_WDT_IB700=y
CONFIG_XEN_I386=$(CONFIG_XEN)
CONFIG_ISA_DEBUG=y
CONFIG_ISA_TESTDEV=y
CONFIG_VMPORT=y
CONFIG_SGA=y
CONFIG_LPC_ICH9=y
CONFIG_PCI_Q35=y
CONFIG_APIC=y
CONFIG_IOAPIC=y
CONFIG_ICC_BUS=y
CONFIG_PVPANIC=y
CONFIG_MEM_HOTPLUG=y
CONFIG_PCI=y
CONFIG_VIRTIO_PCI=y
CONFIG_VIRTIO=y
CONFIG_USB_UHCI=y
CONFIG_USB_OHCI=y
CONFIG_USB_EHCI=y
CONFIG_USB_XHCI=y
CONFIG_NE2000_PCI=y
CONFIG_EEPRO100_PCI=y
CONFIG_PCNET_PCI=y
CONFIG_PCNET_COMMON=y
CONFIG_AC97=y
CONFIG_HDA=y
CONFIG_ES1370=y
CONFIG_LSI_SCSI_PCI=y
CONFIG_VMW_PVSCSI_SCSI_PCI=y
CONFIG_MEGASAS_SCSI_PCI=y
CONFIG_RTL8139_PCI=y
CONFIG_E1000_PCI=y
CONFIG_VMXNET3_PCI=y
CONFIG_IDE_CORE=y
CONFIG_IDE_QDEV=y
CONFIG_IDE_PCI=y
CONFIG_AHCI=y
CONFIG_ESP=y
CONFIG_ESP_PCI=y
CONFIG_SERIAL=y
CONFIG_SERIAL_PCI=y
CONFIG_IPACK=y
CONFIG_WDT_IB6300ESB=y
CONFIG_PCI_TESTDEV=y
CONFIG_NVME_PCI=y
CONFIG_SB16=y
CONFIG_ADLIB=y
CONFIG_GUS=y
CONFIG_CS4231A=y
CONFIG_USB_TABLET_WACOM=y
CONFIG_USB_STORAGE_BOT=y
CONFIG_USB_STORAGE_UAS=y
CONFIG_USB_STORAGE_MTP=y
CONFIG_USB_SMARTCARD=y
CONFIG_USB_AUDIO=y
CONFIG_USB_SERIAL=y
CONFIG_USB_NETWORK=y
CONFIG_USB_BLUETOOTH=y | zebracorn | /zebracorn-0.0.1.tar.gz/zebracorn-0.0.1/src/msvc/README.TXT | README.TXT |
# syntax: ./header_gen.py <arm|aarch64|x86|name>
import sys
symbols = (
'arm_release',
'aarch64_tb_set_jmp_target',
'ppc_tb_set_jmp_target',
'use_idiv_instructions_rt',
'tcg_target_deposit_valid',
'helper_power_down',
'check_exit_request',
'address_space_unregister',
'tb_invalidate_phys_page_fast',
'phys_mem_clean',
'tb_cleanup',
'memory_map',
'memory_map_ptr',
'memory_unmap',
'memory_free',
'free_code_gen_buffer',
'helper_raise_exception',
'tcg_enabled',
'tcg_exec_init',
'memory_register_types',
'cpu_exec_init_all',
'vm_start',
'resume_all_vcpus',
'a15_l2ctlr_read',
'a64_translate_init',
'aa32_generate_debug_exceptions',
'aa64_cacheop_access',
'aa64_daif_access',
'aa64_daif_write',
'aa64_dczid_read',
'aa64_fpcr_read',
'aa64_fpcr_write',
'aa64_fpsr_read',
'aa64_fpsr_write',
'aa64_generate_debug_exceptions',
'aa64_zva_access',
'aarch64_banked_spsr_index',
'aarch64_restore_sp',
'aarch64_save_sp',
'accel_find',
'accel_init_machine',
'accel_type',
'access_with_adjusted_size',
'add128',
'add16_sat',
'add16_usat',
'add192',
'add8_sat',
'add8_usat',
'add_cpreg_to_hashtable',
'add_cpreg_to_list',
'addFloat128Sigs',
'addFloat32Sigs',
'addFloat64Sigs',
'addFloatx80Sigs',
'add_qemu_ldst_label',
'address_space_access_valid',
'address_space_destroy',
'address_space_destroy_dispatch',
'address_space_get_flatview',
'address_space_init',
'address_space_init_dispatch',
'address_space_lookup_region',
'address_space_map',
'address_space_read',
'address_space_rw',
'address_space_translate',
'address_space_translate_for_iotlb',
'address_space_translate_internal',
'address_space_unmap',
'address_space_update_topology',
'address_space_update_topology_pass',
'address_space_write',
'addrrange_contains',
'addrrange_end',
'addrrange_equal',
'addrrange_intersection',
'addrrange_intersects',
'addrrange_make',
'adjust_endianness',
'all_helpers',
'alloc_code_gen_buffer',
'alloc_entry',
'always_true',
'arm1026_initfn',
'arm1136_initfn',
'arm1136_r2_initfn',
'arm1176_initfn',
'arm11mpcore_initfn',
'arm926_initfn',
'arm946_initfn',
'arm_ccnt_enabled',
'arm_cp_read_zero',
'arm_cp_reset_ignore',
'arm_cpu_do_interrupt',
'arm_cpu_exec_interrupt',
'arm_cpu_finalizefn',
'arm_cpu_get_phys_page_debug',
'arm_cpu_handle_mmu_fault',
'arm_cpu_initfn',
'arm_cpu_list',
'cpu_loop_exit',
'arm_cpu_post_init',
'arm_cpu_realizefn',
'arm_cpu_register_gdb_regs_for_features',
'arm_cpu_register_types',
'cpu_resume_from_signal',
'arm_cpus',
'arm_cpu_set_pc',
'arm_cp_write_ignore',
'arm_current_el',
'arm_dc_feature',
'arm_debug_excp_handler',
'arm_debug_target_el',
'arm_el_is_aa64',
'arm_env_get_cpu',
'arm_excp_target_el',
'arm_excp_unmasked',
'arm_feature',
'arm_generate_debug_exceptions',
'gen_intermediate_code',
'gen_intermediate_code_pc',
'arm_gen_test_cc',
'arm_gt_ptimer_cb',
'arm_gt_vtimer_cb',
'arm_handle_psci_call',
'arm_is_psci_call',
'arm_is_secure',
'arm_is_secure_below_el3',
'arm_ldl_code',
'arm_lduw_code',
'arm_log_exception',
'arm_reg_read',
'arm_reg_reset',
'arm_reg_write',
'restore_state_to_opc',
'arm_rmode_to_sf',
'arm_singlestep_active',
'tlb_fill',
'tlb_flush',
'tlb_flush_page',
'tlb_set_page',
'arm_translate_init',
'arm_v7m_class_init',
'arm_v7m_cpu_do_interrupt',
'ats_access',
'ats_write',
'bad_mode_switch',
'bank_number',
'bitmap_zero_extend',
'bp_wp_matches',
'breakpoint_invalidate',
'build_page_bitmap',
'bus_add_child',
'bus_class_init',
'bus_info',
'bus_unparent',
'cache_block_ops_cp_reginfo',
'cache_dirty_status_cp_reginfo',
'cache_test_clean_cp_reginfo',
'call_recip_estimate',
'can_merge',
'capacity_increase',
'ccsidr_read',
'check_ap',
'check_breakpoints',
'check_watchpoints',
'cho',
'clear_bit',
'clz32',
'clz64',
'cmp_flatrange_addr',
'code_gen_alloc',
'commonNaNToFloat128',
'commonNaNToFloat16',
'commonNaNToFloat32',
'commonNaNToFloat64',
'commonNaNToFloatx80',
'compute_abs_deadline',
'cond_name',
'configure_accelerator',
'container_get',
'container_info',
'container_register_types',
'contextidr_write',
'core_log_global_start',
'core_log_global_stop',
'core_memory_listener',
'cortexa15_cp_reginfo',
'cortex_a15_initfn',
'cortexa8_cp_reginfo',
'cortex_a8_initfn',
'cortexa9_cp_reginfo',
'cortex_a9_initfn',
'cortex_m3_initfn',
'count_cpreg',
'countLeadingZeros32',
'countLeadingZeros64',
'cp_access_ok',
'cpacr_write',
'cpreg_field_is_64bit',
'cp_reginfo',
'cpreg_key_compare',
'cpreg_make_keylist',
'cp_reg_reset',
'cpreg_to_kvm_id',
'cpsr_read',
'cpsr_write',
'cptype_valid',
'cpu_abort',
'cpu_arm_exec',
'cpu_arm_gen_code',
'cpu_arm_init',
'cpu_breakpoint_insert',
'cpu_breakpoint_remove',
'cpu_breakpoint_remove_all',
'cpu_breakpoint_remove_by_ref',
'cpu_can_do_io',
'cpu_can_run',
'cpu_class_init',
'cpu_common_class_by_name',
'cpu_common_exec_interrupt',
'cpu_common_get_arch_id',
'cpu_common_get_memory_mapping',
'cpu_common_get_paging_enabled',
'cpu_common_has_work',
'cpu_common_initfn',
'cpu_common_noop',
'cpu_common_parse_features',
'cpu_common_realizefn',
'cpu_common_reset',
'cpu_dump_statistics',
'cpu_exec_init',
'cpu_flush_icache_range',
'cpu_gen_init',
'cpu_get_clock',
'cpu_get_real_ticks',
'cpu_get_tb_cpu_state',
'cpu_handle_debug_exception',
'cpu_handle_guest_debug',
'cpu_inb',
'cpu_inl',
'cpu_interrupt',
'cpu_interrupt_handler',
'cpu_inw',
'cpu_io_recompile',
'cpu_is_stopped',
'cpu_ldl_code',
'cpu_ldub_code',
'cpu_lduw_code',
'cpu_memory_rw_debug',
'cpu_mmu_index',
'cpu_outb',
'cpu_outl',
'cpu_outw',
'cpu_physical_memory_clear_dirty_range',
'cpu_physical_memory_get_clean',
'cpu_physical_memory_get_dirty',
'cpu_physical_memory_get_dirty_flag',
'cpu_physical_memory_is_clean',
'cpu_physical_memory_is_io',
'cpu_physical_memory_map',
'cpu_physical_memory_range_includes_clean',
'cpu_physical_memory_reset_dirty',
'cpu_physical_memory_rw',
'cpu_physical_memory_set_dirty_flag',
'cpu_physical_memory_set_dirty_range',
'cpu_physical_memory_unmap',
'cpu_physical_memory_write_rom',
'cpu_physical_memory_write_rom_internal',
'cpu_register',
'cpu_register_types',
'cpu_restore_state',
'cpu_restore_state_from_tb',
'cpu_single_step',
'cpu_tb_exec',
'cpu_tlb_reset_dirty_all',
'cpu_to_be64',
'cpu_to_le32',
'cpu_to_le64',
'cpu_type_info',
'cpu_unassigned_access',
'cpu_watchpoint_address_matches',
'cpu_watchpoint_insert',
'cpu_watchpoint_remove',
'cpu_watchpoint_remove_all',
'cpu_watchpoint_remove_by_ref',
'crc32c_table',
'create_new_memory_mapping',
'csselr_write',
'cto32',
'ctr_el0_access',
'ctz32',
'ctz64',
'dacr_write',
'dbgbcr_write',
'dbgbvr_write',
'dbgwcr_write',
'dbgwvr_write',
'debug_cp_reginfo',
'debug_frame',
'debug_lpae_cp_reginfo',
'define_arm_cp_regs',
'define_arm_cp_regs_with_opaque',
'define_debug_regs',
'define_one_arm_cp_reg',
'define_one_arm_cp_reg_with_opaque',
'deposit32',
'deposit64',
'deregister_tm_clones',
'device_class_base_init',
'device_class_init',
'device_finalize',
'device_get_realized',
'device_initfn',
'device_post_init',
'device_reset',
'device_set_realized',
'device_type_info',
'disas_arm_insn',
'disas_coproc_insn',
'disas_dsp_insn',
'disas_iwmmxt_insn',
'disas_neon_data_insn',
'disas_neon_ls_insn',
'disas_thumb2_insn',
'disas_thumb_insn',
'disas_vfp_insn',
'disas_vfp_v8_insn',
'do_arm_semihosting',
'do_clz16',
'do_clz8',
'do_constant_folding',
'do_constant_folding_2',
'do_constant_folding_cond',
'do_constant_folding_cond2',
'do_constant_folding_cond_32',
'do_constant_folding_cond_64',
'do_constant_folding_cond_eq',
'do_fcvt_f16_to_f32',
'do_fcvt_f32_to_f16',
'do_ssat',
'do_usad',
'do_usat',
'do_v7m_exception_exit',
'dummy_c15_cp_reginfo',
'dummy_func',
'dummy_section',
'_DYNAMIC',
'_edata',
'_end',
'end_list',
'eq128',
'ErrorClass_lookup',
'error_copy',
'error_exit',
'error_get_class',
'error_get_pretty',
'error_setg_file_open',
'estimateDiv128To64',
'estimateSqrt32',
'excnames',
'excp_is_internal',
'extended_addresses_enabled',
'extended_mpu_ap_bits',
'extract32',
'extract64',
'extractFloat128Exp',
'extractFloat128Frac0',
'extractFloat128Frac1',
'extractFloat128Sign',
'extractFloat16Exp',
'extractFloat16Frac',
'extractFloat16Sign',
'extractFloat32Exp',
'extractFloat32Frac',
'extractFloat32Sign',
'extractFloat64Exp',
'extractFloat64Frac',
'extractFloat64Sign',
'extractFloatx80Exp',
'extractFloatx80Frac',
'extractFloatx80Sign',
'fcse_write',
'find_better_copy',
'find_default_machine',
'find_desc_by_name',
'find_first_bit',
'find_paging_enabled_cpu',
'find_ram_block',
'find_ram_offset',
'find_string',
'find_type',
'_fini',
'flatrange_equal',
'flatview_destroy',
'flatview_init',
'flatview_insert',
'flatview_lookup',
'flatview_ref',
'flatview_simplify',
'flatview_unref',
'float128_add',
'float128_compare',
'float128_compare_internal',
'float128_compare_quiet',
'float128_default_nan',
'float128_div',
'float128_eq',
'float128_eq_quiet',
'float128_is_quiet_nan',
'float128_is_signaling_nan',
'float128_le',
'float128_le_quiet',
'float128_lt',
'float128_lt_quiet',
'float128_maybe_silence_nan',
'float128_mul',
'float128_rem',
'float128_round_to_int',
'float128_scalbn',
'float128_sqrt',
'float128_sub',
'float128ToCommonNaN',
'float128_to_float32',
'float128_to_float64',
'float128_to_floatx80',
'float128_to_int32',
'float128_to_int32_round_to_zero',
'float128_to_int64',
'float128_to_int64_round_to_zero',
'float128_unordered',
'float128_unordered_quiet',
'float16_default_nan',
'float16_is_quiet_nan',
'float16_is_signaling_nan',
'float16_maybe_silence_nan',
'float16ToCommonNaN',
'float16_to_float32',
'float16_to_float64',
'float32_abs',
'float32_add',
'float32_chs',
'float32_compare',
'float32_compare_internal',
'float32_compare_quiet',
'float32_default_nan',
'float32_div',
'float32_eq',
'float32_eq_quiet',
'float32_exp2',
'float32_exp2_coefficients',
'float32_is_any_nan',
'float32_is_infinity',
'float32_is_neg',
'float32_is_quiet_nan',
'float32_is_signaling_nan',
'float32_is_zero',
'float32_is_zero_or_denormal',
'float32_le',
'float32_le_quiet',
'float32_log2',
'float32_lt',
'float32_lt_quiet',
'float32_max',
'float32_maxnum',
'float32_maxnummag',
'float32_maybe_silence_nan',
'float32_min',
'float32_minmax',
'float32_minnum',
'float32_minnummag',
'float32_mul',
'float32_muladd',
'float32_rem',
'float32_round_to_int',
'float32_scalbn',
'float32_set_sign',
'float32_sqrt',
'float32_squash_input_denormal',
'float32_sub',
'float32ToCommonNaN',
'float32_to_float128',
'float32_to_float16',
'float32_to_float64',
'float32_to_floatx80',
'float32_to_int16',
'float32_to_int16_round_to_zero',
'float32_to_int32',
'float32_to_int32_round_to_zero',
'float32_to_int64',
'float32_to_int64_round_to_zero',
'float32_to_uint16',
'float32_to_uint16_round_to_zero',
'float32_to_uint32',
'float32_to_uint32_round_to_zero',
'float32_to_uint64',
'float32_to_uint64_round_to_zero',
'float32_unordered',
'float32_unordered_quiet',
'float64_abs',
'float64_add',
'float64_chs',
'float64_compare',
'float64_compare_internal',
'float64_compare_quiet',
'float64_default_nan',
'float64_div',
'float64_eq',
'float64_eq_quiet',
'float64_is_any_nan',
'float64_is_infinity',
'float64_is_neg',
'float64_is_quiet_nan',
'float64_is_signaling_nan',
'float64_is_zero',
'float64_le',
'float64_le_quiet',
'float64_log2',
'float64_lt',
'float64_lt_quiet',
'float64_max',
'float64_maxnum',
'float64_maxnummag',
'float64_maybe_silence_nan',
'float64_min',
'float64_minmax',
'float64_minnum',
'float64_minnummag',
'float64_mul',
'float64_muladd',
'float64_rem',
'float64_round_to_int',
'float64_scalbn',
'float64_set_sign',
'float64_sqrt',
'float64_squash_input_denormal',
'float64_sub',
'float64ToCommonNaN',
'float64_to_float128',
'float64_to_float16',
'float64_to_float32',
'float64_to_floatx80',
'float64_to_int16',
'float64_to_int16_round_to_zero',
'float64_to_int32',
'float64_to_int32_round_to_zero',
'float64_to_int64',
'float64_to_int64_round_to_zero',
'float64_to_uint16',
'float64_to_uint16_round_to_zero',
'float64_to_uint32',
'float64_to_uint32_round_to_zero',
'float64_to_uint64',
'float64_to_uint64_round_to_zero',
'float64_trunc_to_int',
'float64_unordered',
'float64_unordered_quiet',
'float_raise',
'floatx80_add',
'floatx80_compare',
'floatx80_compare_internal',
'floatx80_compare_quiet',
'floatx80_default_nan',
'floatx80_div',
'floatx80_eq',
'floatx80_eq_quiet',
'floatx80_is_quiet_nan',
'floatx80_is_signaling_nan',
'floatx80_le',
'floatx80_le_quiet',
'floatx80_lt',
'floatx80_lt_quiet',
'floatx80_maybe_silence_nan',
'floatx80_mul',
'floatx80_rem',
'floatx80_round_to_int',
'floatx80_scalbn',
'floatx80_sqrt',
'floatx80_sub',
'floatx80ToCommonNaN',
'floatx80_to_float128',
'floatx80_to_float32',
'floatx80_to_float64',
'floatx80_to_int32',
'floatx80_to_int32_round_to_zero',
'floatx80_to_int64',
'floatx80_to_int64_round_to_zero',
'floatx80_unordered',
'floatx80_unordered_quiet',
'flush_icache_range',
'format_string',
'fp_decode_rm',
'frame_dummy',
'free_range',
'fstat64',
'futex_wait',
'futex_wake',
'gen_aa32_ld16s',
'gen_aa32_ld16u',
'gen_aa32_ld32u',
'gen_aa32_ld64',
'gen_aa32_ld8s',
'gen_aa32_ld8u',
'gen_aa32_st16',
'gen_aa32_st32',
'gen_aa32_st64',
'gen_aa32_st8',
'gen_adc',
'gen_adc_CC',
'gen_add16',
'gen_add_carry',
'gen_add_CC',
'gen_add_datah_offset',
'gen_add_data_offset',
'gen_addq',
'gen_addq_lo',
'gen_addq_msw',
'gen_arm_parallel_addsub',
'gen_arm_shift_im',
'gen_arm_shift_reg',
'gen_bx',
'gen_bx_im',
'gen_clrex',
'generate_memory_topology',
'generic_timer_cp_reginfo',
'gen_exception',
'gen_exception_insn',
'gen_exception_internal',
'gen_exception_internal_insn',
'gen_exception_return',
'gen_goto_tb',
'gen_helper_access_check_cp_reg',
'gen_helper_add_saturate',
'gen_helper_add_setq',
'gen_helper_clear_pstate_ss',
'gen_helper_clz32',
'gen_helper_clz64',
'gen_helper_clz_arm',
'gen_helper_cpsr_read',
'gen_helper_cpsr_write',
'gen_helper_crc32_arm',
'gen_helper_crc32c',
'gen_helper_crypto_aese',
'gen_helper_crypto_aesmc',
'gen_helper_crypto_sha1_3reg',
'gen_helper_crypto_sha1h',
'gen_helper_crypto_sha1su1',
'gen_helper_crypto_sha256h',
'gen_helper_crypto_sha256h2',
'gen_helper_crypto_sha256su0',
'gen_helper_crypto_sha256su1',
'gen_helper_double_saturate',
'gen_helper_exception_internal',
'gen_helper_exception_with_syndrome',
'gen_helper_get_cp_reg',
'gen_helper_get_cp_reg64',
'gen_helper_get_r13_banked',
'gen_helper_get_user_reg',
'gen_helper_iwmmxt_addcb',
'gen_helper_iwmmxt_addcl',
'gen_helper_iwmmxt_addcw',
'gen_helper_iwmmxt_addnb',
'gen_helper_iwmmxt_addnl',
'gen_helper_iwmmxt_addnw',
'gen_helper_iwmmxt_addsb',
'gen_helper_iwmmxt_addsl',
'gen_helper_iwmmxt_addsw',
'gen_helper_iwmmxt_addub',
'gen_helper_iwmmxt_addul',
'gen_helper_iwmmxt_adduw',
'gen_helper_iwmmxt_align',
'gen_helper_iwmmxt_avgb0',
'gen_helper_iwmmxt_avgb1',
'gen_helper_iwmmxt_avgw0',
'gen_helper_iwmmxt_avgw1',
'gen_helper_iwmmxt_bcstb',
'gen_helper_iwmmxt_bcstl',
'gen_helper_iwmmxt_bcstw',
'gen_helper_iwmmxt_cmpeqb',
'gen_helper_iwmmxt_cmpeql',
'gen_helper_iwmmxt_cmpeqw',
'gen_helper_iwmmxt_cmpgtsb',
'gen_helper_iwmmxt_cmpgtsl',
'gen_helper_iwmmxt_cmpgtsw',
'gen_helper_iwmmxt_cmpgtub',
'gen_helper_iwmmxt_cmpgtul',
'gen_helper_iwmmxt_cmpgtuw',
'gen_helper_iwmmxt_insr',
'gen_helper_iwmmxt_macsw',
'gen_helper_iwmmxt_macuw',
'gen_helper_iwmmxt_maddsq',
'gen_helper_iwmmxt_madduq',
'gen_helper_iwmmxt_maxsb',
'gen_helper_iwmmxt_maxsl',
'gen_helper_iwmmxt_maxsw',
'gen_helper_iwmmxt_maxub',
'gen_helper_iwmmxt_maxul',
'gen_helper_iwmmxt_maxuw',
'gen_helper_iwmmxt_minsb',
'gen_helper_iwmmxt_minsl',
'gen_helper_iwmmxt_minsw',
'gen_helper_iwmmxt_minub',
'gen_helper_iwmmxt_minul',
'gen_helper_iwmmxt_minuw',
'gen_helper_iwmmxt_msbb',
'gen_helper_iwmmxt_msbl',
'gen_helper_iwmmxt_msbw',
'gen_helper_iwmmxt_muladdsl',
'gen_helper_iwmmxt_muladdsw',
'gen_helper_iwmmxt_muladdswl',
'gen_helper_iwmmxt_mulshw',
'gen_helper_iwmmxt_mulslw',
'gen_helper_iwmmxt_muluhw',
'gen_helper_iwmmxt_mululw',
'gen_helper_iwmmxt_packsl',
'gen_helper_iwmmxt_packsq',
'gen_helper_iwmmxt_packsw',
'gen_helper_iwmmxt_packul',
'gen_helper_iwmmxt_packuq',
'gen_helper_iwmmxt_packuw',
'gen_helper_iwmmxt_rorl',
'gen_helper_iwmmxt_rorq',
'gen_helper_iwmmxt_rorw',
'gen_helper_iwmmxt_sadb',
'gen_helper_iwmmxt_sadw',
'gen_helper_iwmmxt_setpsr_nz',
'gen_helper_iwmmxt_shufh',
'gen_helper_iwmmxt_slll',
'gen_helper_iwmmxt_sllq',
'gen_helper_iwmmxt_sllw',
'gen_helper_iwmmxt_sral',
'gen_helper_iwmmxt_sraq',
'gen_helper_iwmmxt_sraw',
'gen_helper_iwmmxt_srll',
'gen_helper_iwmmxt_srlq',
'gen_helper_iwmmxt_srlw',
'gen_helper_iwmmxt_subnb',
'gen_helper_iwmmxt_subnl',
'gen_helper_iwmmxt_subnw',
'gen_helper_iwmmxt_subsb',
'gen_helper_iwmmxt_subsl',
'gen_helper_iwmmxt_subsw',
'gen_helper_iwmmxt_subub',
'gen_helper_iwmmxt_subul',
'gen_helper_iwmmxt_subuw',
'gen_helper_iwmmxt_unpackhb',
'gen_helper_iwmmxt_unpackhl',
'gen_helper_iwmmxt_unpackhsb',
'gen_helper_iwmmxt_unpackhsl',
'gen_helper_iwmmxt_unpackhsw',
'gen_helper_iwmmxt_unpackhub',
'gen_helper_iwmmxt_unpackhul',
'gen_helper_iwmmxt_unpackhuw',
'gen_helper_iwmmxt_unpackhw',
'gen_helper_iwmmxt_unpacklb',
'gen_helper_iwmmxt_unpackll',
'gen_helper_iwmmxt_unpacklsb',
'gen_helper_iwmmxt_unpacklsl',
'gen_helper_iwmmxt_unpacklsw',
'gen_helper_iwmmxt_unpacklub',
'gen_helper_iwmmxt_unpacklul',
'gen_helper_iwmmxt_unpackluw',
'gen_helper_iwmmxt_unpacklw',
'gen_helper_neon_abd_f32',
'gen_helper_neon_abdl_s16',
'gen_helper_neon_abdl_s32',
'gen_helper_neon_abdl_s64',
'gen_helper_neon_abdl_u16',
'gen_helper_neon_abdl_u32',
'gen_helper_neon_abdl_u64',
'gen_helper_neon_abd_s16',
'gen_helper_neon_abd_s32',
'gen_helper_neon_abd_s8',
'gen_helper_neon_abd_u16',
'gen_helper_neon_abd_u32',
'gen_helper_neon_abd_u8',
'gen_helper_neon_abs_s16',
'gen_helper_neon_abs_s8',
'gen_helper_neon_acge_f32',
'gen_helper_neon_acgt_f32',
'gen_helper_neon_addl_saturate_s32',
'gen_helper_neon_addl_saturate_s64',
'gen_helper_neon_addl_u16',
'gen_helper_neon_addl_u32',
'gen_helper_neon_add_u16',
'gen_helper_neon_add_u8',
'gen_helper_neon_ceq_f32',
'gen_helper_neon_ceq_u16',
'gen_helper_neon_ceq_u32',
'gen_helper_neon_ceq_u8',
'gen_helper_neon_cge_f32',
'gen_helper_neon_cge_s16',
'gen_helper_neon_cge_s32',
'gen_helper_neon_cge_s8',
'gen_helper_neon_cge_u16',
'gen_helper_neon_cge_u32',
'gen_helper_neon_cge_u8',
'gen_helper_neon_cgt_f32',
'gen_helper_neon_cgt_s16',
'gen_helper_neon_cgt_s32',
'gen_helper_neon_cgt_s8',
'gen_helper_neon_cgt_u16',
'gen_helper_neon_cgt_u32',
'gen_helper_neon_cgt_u8',
'gen_helper_neon_cls_s16',
'gen_helper_neon_cls_s32',
'gen_helper_neon_cls_s8',
'gen_helper_neon_clz_u16',
'gen_helper_neon_clz_u8',
'gen_helper_neon_cnt_u8',
'gen_helper_neon_fcvt_f16_to_f32',
'gen_helper_neon_fcvt_f32_to_f16',
'gen_helper_neon_hadd_s16',
'gen_helper_neon_hadd_s32',
'gen_helper_neon_hadd_s8',
'gen_helper_neon_hadd_u16',
'gen_helper_neon_hadd_u32',
'gen_helper_neon_hadd_u8',
'gen_helper_neon_hsub_s16',
'gen_helper_neon_hsub_s32',
'gen_helper_neon_hsub_s8',
'gen_helper_neon_hsub_u16',
'gen_helper_neon_hsub_u32',
'gen_helper_neon_hsub_u8',
'gen_helper_neon_max_s16',
'gen_helper_neon_max_s32',
'gen_helper_neon_max_s8',
'gen_helper_neon_max_u16',
'gen_helper_neon_max_u32',
'gen_helper_neon_max_u8',
'gen_helper_neon_min_s16',
'gen_helper_neon_min_s32',
'gen_helper_neon_min_s8',
'gen_helper_neon_min_u16',
'gen_helper_neon_min_u32',
'gen_helper_neon_min_u8',
'gen_helper_neon_mull_p8',
'gen_helper_neon_mull_s16',
'gen_helper_neon_mull_s8',
'gen_helper_neon_mull_u16',
'gen_helper_neon_mull_u8',
'gen_helper_neon_mul_p8',
'gen_helper_neon_mul_u16',
'gen_helper_neon_mul_u8',
'gen_helper_neon_narrow_high_u16',
'gen_helper_neon_narrow_high_u8',
'gen_helper_neon_narrow_round_high_u16',
'gen_helper_neon_narrow_round_high_u8',
'gen_helper_neon_narrow_sat_s16',
'gen_helper_neon_narrow_sat_s32',
'gen_helper_neon_narrow_sat_s8',
'gen_helper_neon_narrow_sat_u16',
'gen_helper_neon_narrow_sat_u32',
'gen_helper_neon_narrow_sat_u8',
'gen_helper_neon_narrow_u16',
'gen_helper_neon_narrow_u8',
'gen_helper_neon_negl_u16',
'gen_helper_neon_negl_u32',
'gen_helper_neon_paddl_u16',
'gen_helper_neon_paddl_u32',
'gen_helper_neon_padd_u16',
'gen_helper_neon_padd_u8',
'gen_helper_neon_pmax_s16',
'gen_helper_neon_pmax_s8',
'gen_helper_neon_pmax_u16',
'gen_helper_neon_pmax_u8',
'gen_helper_neon_pmin_s16',
'gen_helper_neon_pmin_s8',
'gen_helper_neon_pmin_u16',
'gen_helper_neon_pmin_u8',
'gen_helper_neon_pmull_64_hi',
'gen_helper_neon_pmull_64_lo',
'gen_helper_neon_qabs_s16',
'gen_helper_neon_qabs_s32',
'gen_helper_neon_qabs_s8',
'gen_helper_neon_qadd_s16',
'gen_helper_neon_qadd_s32',
'gen_helper_neon_qadd_s64',
'gen_helper_neon_qadd_s8',
'gen_helper_neon_qadd_u16',
'gen_helper_neon_qadd_u32',
'gen_helper_neon_qadd_u64',
'gen_helper_neon_qadd_u8',
'gen_helper_neon_qdmulh_s16',
'gen_helper_neon_qdmulh_s32',
'gen_helper_neon_qneg_s16',
'gen_helper_neon_qneg_s32',
'gen_helper_neon_qneg_s8',
'gen_helper_neon_qrdmulh_s16',
'gen_helper_neon_qrdmulh_s32',
'gen_helper_neon_qrshl_s16',
'gen_helper_neon_qrshl_s32',
'gen_helper_neon_qrshl_s64',
'gen_helper_neon_qrshl_s8',
'gen_helper_neon_qrshl_u16',
'gen_helper_neon_qrshl_u32',
'gen_helper_neon_qrshl_u64',
'gen_helper_neon_qrshl_u8',
'gen_helper_neon_qshl_s16',
'gen_helper_neon_qshl_s32',
'gen_helper_neon_qshl_s64',
'gen_helper_neon_qshl_s8',
'gen_helper_neon_qshl_u16',
'gen_helper_neon_qshl_u32',
'gen_helper_neon_qshl_u64',
'gen_helper_neon_qshl_u8',
'gen_helper_neon_qshlu_s16',
'gen_helper_neon_qshlu_s32',
'gen_helper_neon_qshlu_s64',
'gen_helper_neon_qshlu_s8',
'gen_helper_neon_qsub_s16',
'gen_helper_neon_qsub_s32',
'gen_helper_neon_qsub_s64',
'gen_helper_neon_qsub_s8',
'gen_helper_neon_qsub_u16',
'gen_helper_neon_qsub_u32',
'gen_helper_neon_qsub_u64',
'gen_helper_neon_qsub_u8',
'gen_helper_neon_qunzip16',
'gen_helper_neon_qunzip32',
'gen_helper_neon_qunzip8',
'gen_helper_neon_qzip16',
'gen_helper_neon_qzip32',
'gen_helper_neon_qzip8',
'gen_helper_neon_rhadd_s16',
'gen_helper_neon_rhadd_s32',
'gen_helper_neon_rhadd_s8',
'gen_helper_neon_rhadd_u16',
'gen_helper_neon_rhadd_u32',
'gen_helper_neon_rhadd_u8',
'gen_helper_neon_rshl_s16',
'gen_helper_neon_rshl_s32',
'gen_helper_neon_rshl_s64',
'gen_helper_neon_rshl_s8',
'gen_helper_neon_rshl_u16',
'gen_helper_neon_rshl_u32',
'gen_helper_neon_rshl_u64',
'gen_helper_neon_rshl_u8',
'gen_helper_neon_shl_s16',
'gen_helper_neon_shl_s32',
'gen_helper_neon_shl_s64',
'gen_helper_neon_shl_s8',
'gen_helper_neon_shl_u16',
'gen_helper_neon_shl_u32',
'gen_helper_neon_shl_u64',
'gen_helper_neon_shl_u8',
'gen_helper_neon_subl_u16',
'gen_helper_neon_subl_u32',
'gen_helper_neon_sub_u16',
'gen_helper_neon_sub_u8',
'gen_helper_neon_tbl',
'gen_helper_neon_tst_u16',
'gen_helper_neon_tst_u32',
'gen_helper_neon_tst_u8',
'gen_helper_neon_unarrow_sat16',
'gen_helper_neon_unarrow_sat32',
'gen_helper_neon_unarrow_sat8',
'gen_helper_neon_unzip16',
'gen_helper_neon_unzip8',
'gen_helper_neon_widen_s16',
'gen_helper_neon_widen_s8',
'gen_helper_neon_widen_u16',
'gen_helper_neon_widen_u8',
'gen_helper_neon_zip16',
'gen_helper_neon_zip8',
'gen_helper_pre_hvc',
'gen_helper_pre_smc',
'gen_helper_qadd16',
'gen_helper_qadd8',
'gen_helper_qaddsubx',
'gen_helper_qsub16',
'gen_helper_qsub8',
'gen_helper_qsubaddx',
'gen_helper_rbit',
'gen_helper_recpe_f32',
'gen_helper_recpe_u32',
'gen_helper_recps_f32',
'gen_helper_rintd',
'gen_helper_rintd_exact',
'gen_helper_rints',
'gen_helper_rints_exact',
'gen_helper_ror_cc',
'gen_helper_rsqrte_f32',
'gen_helper_rsqrte_u32',
'gen_helper_rsqrts_f32',
'gen_helper_sadd16',
'gen_helper_sadd8',
'gen_helper_saddsubx',
'gen_helper_sar_cc',
'gen_helper_sdiv',
'gen_helper_sel_flags',
'gen_helper_set_cp_reg',
'gen_helper_set_cp_reg64',
'gen_helper_set_neon_rmode',
'gen_helper_set_r13_banked',
'gen_helper_set_rmode',
'gen_helper_set_user_reg',
'gen_helper_shadd16',
'gen_helper_shadd8',
'gen_helper_shaddsubx',
'gen_helper_shl_cc',
'gen_helper_shr_cc',
'gen_helper_shsub16',
'gen_helper_shsub8',
'gen_helper_shsubaddx',
'gen_helper_ssat',
'gen_helper_ssat16',
'gen_helper_ssub16',
'gen_helper_ssub8',
'gen_helper_ssubaddx',
'gen_helper_sub_saturate',
'gen_helper_sxtb16',
'gen_helper_uadd16',
'gen_helper_uadd8',
'gen_helper_uaddsubx',
'gen_helper_udiv',
'gen_helper_uhadd16',
'gen_helper_uhadd8',
'gen_helper_uhaddsubx',
'gen_helper_uhsub16',
'gen_helper_uhsub8',
'gen_helper_uhsubaddx',
'gen_helper_uqadd16',
'gen_helper_uqadd8',
'gen_helper_uqaddsubx',
'gen_helper_uqsub16',
'gen_helper_uqsub8',
'gen_helper_uqsubaddx',
'gen_helper_usad8',
'gen_helper_usat',
'gen_helper_usat16',
'gen_helper_usub16',
'gen_helper_usub8',
'gen_helper_usubaddx',
'gen_helper_uxtb16',
'gen_helper_v7m_mrs',
'gen_helper_v7m_msr',
'gen_helper_vfp_absd',
'gen_helper_vfp_abss',
'gen_helper_vfp_addd',
'gen_helper_vfp_adds',
'gen_helper_vfp_cmpd',
'gen_helper_vfp_cmped',
'gen_helper_vfp_cmpes',
'gen_helper_vfp_cmps',
'gen_helper_vfp_divd',
'gen_helper_vfp_divs',
'gen_helper_vfp_fcvtds',
'gen_helper_vfp_fcvt_f16_to_f32',
'gen_helper_vfp_fcvt_f16_to_f64',
'gen_helper_vfp_fcvt_f32_to_f16',
'gen_helper_vfp_fcvt_f64_to_f16',
'gen_helper_vfp_fcvtsd',
'gen_helper_vfp_get_fpscr',
'gen_helper_vfp_maxnumd',
'gen_helper_vfp_maxnums',
'gen_helper_vfp_maxs',
'gen_helper_vfp_minnumd',
'gen_helper_vfp_minnums',
'gen_helper_vfp_mins',
'gen_helper_vfp_muladdd',
'gen_helper_vfp_muladds',
'gen_helper_vfp_muld',
'gen_helper_vfp_muls',
'gen_helper_vfp_negd',
'gen_helper_vfp_negs',
'gen_helper_vfp_set_fpscr',
'gen_helper_vfp_shtod',
'gen_helper_vfp_shtos',
'gen_helper_vfp_sitod',
'gen_helper_vfp_sitos',
'gen_helper_vfp_sltod',
'gen_helper_vfp_sltos',
'gen_helper_vfp_sqrtd',
'gen_helper_vfp_sqrts',
'gen_helper_vfp_subd',
'gen_helper_vfp_subs',
'gen_helper_vfp_toshd_round_to_zero',
'gen_helper_vfp_toshs_round_to_zero',
'gen_helper_vfp_tosid',
'gen_helper_vfp_tosis',
'gen_helper_vfp_tosizd',
'gen_helper_vfp_tosizs',
'gen_helper_vfp_tosld',
'gen_helper_vfp_tosld_round_to_zero',
'gen_helper_vfp_tosls',
'gen_helper_vfp_tosls_round_to_zero',
'gen_helper_vfp_touhd_round_to_zero',
'gen_helper_vfp_touhs_round_to_zero',
'gen_helper_vfp_touid',
'gen_helper_vfp_touis',
'gen_helper_vfp_touizd',
'gen_helper_vfp_touizs',
'gen_helper_vfp_tould',
'gen_helper_vfp_tould_round_to_zero',
'gen_helper_vfp_touls',
'gen_helper_vfp_touls_round_to_zero',
'gen_helper_vfp_uhtod',
'gen_helper_vfp_uhtos',
'gen_helper_vfp_uitod',
'gen_helper_vfp_uitos',
'gen_helper_vfp_ultod',
'gen_helper_vfp_ultos',
'gen_helper_wfe',
'gen_helper_wfi',
'gen_hvc',
'gen_intermediate_code_internal',
'gen_intermediate_code_internal_a64',
'gen_iwmmxt_address',
'gen_iwmmxt_shift',
'gen_jmp',
'gen_load_and_replicate',
'gen_load_exclusive',
'gen_logic_CC',
'gen_logicq_cc',
'gen_lookup_tb',
'gen_mov_F0_vreg',
'gen_mov_F1_vreg',
'gen_mov_vreg_F0',
'gen_muls_i64_i32',
'gen_mulu_i64_i32',
'gen_mulxy',
'gen_neon_add',
'gen_neon_addl',
'gen_neon_addl_saturate',
'gen_neon_bsl',
'gen_neon_dup_high16',
'gen_neon_dup_low16',
'gen_neon_dup_u8',
'gen_neon_mull',
'gen_neon_narrow',
'gen_neon_narrow_op',
'gen_neon_narrow_sats',
'gen_neon_narrow_satu',
'gen_neon_negl',
'gen_neon_rsb',
'gen_neon_shift_narrow',
'gen_neon_subl',
'gen_neon_trn_u16',
'gen_neon_trn_u8',
'gen_neon_unarrow_sats',
'gen_neon_unzip',
'gen_neon_widen',
'gen_neon_zip',
'gen_new_label',
'gen_nop_hint',
'gen_op_iwmmxt_addl_M0_wRn',
'gen_op_iwmmxt_addnb_M0_wRn',
'gen_op_iwmmxt_addnl_M0_wRn',
'gen_op_iwmmxt_addnw_M0_wRn',
'gen_op_iwmmxt_addsb_M0_wRn',
'gen_op_iwmmxt_addsl_M0_wRn',
'gen_op_iwmmxt_addsw_M0_wRn',
'gen_op_iwmmxt_addub_M0_wRn',
'gen_op_iwmmxt_addul_M0_wRn',
'gen_op_iwmmxt_adduw_M0_wRn',
'gen_op_iwmmxt_andq_M0_wRn',
'gen_op_iwmmxt_avgb0_M0_wRn',
'gen_op_iwmmxt_avgb1_M0_wRn',
'gen_op_iwmmxt_avgw0_M0_wRn',
'gen_op_iwmmxt_avgw1_M0_wRn',
'gen_op_iwmmxt_cmpeqb_M0_wRn',
'gen_op_iwmmxt_cmpeql_M0_wRn',
'gen_op_iwmmxt_cmpeqw_M0_wRn',
'gen_op_iwmmxt_cmpgtsb_M0_wRn',
'gen_op_iwmmxt_cmpgtsl_M0_wRn',
'gen_op_iwmmxt_cmpgtsw_M0_wRn',
'gen_op_iwmmxt_cmpgtub_M0_wRn',
'gen_op_iwmmxt_cmpgtul_M0_wRn',
'gen_op_iwmmxt_cmpgtuw_M0_wRn',
'gen_op_iwmmxt_macsw_M0_wRn',
'gen_op_iwmmxt_macuw_M0_wRn',
'gen_op_iwmmxt_maddsq_M0_wRn',
'gen_op_iwmmxt_madduq_M0_wRn',
'gen_op_iwmmxt_maxsb_M0_wRn',
'gen_op_iwmmxt_maxsl_M0_wRn',
'gen_op_iwmmxt_maxsw_M0_wRn',
'gen_op_iwmmxt_maxub_M0_wRn',
'gen_op_iwmmxt_maxul_M0_wRn',
'gen_op_iwmmxt_maxuw_M0_wRn',
'gen_op_iwmmxt_minsb_M0_wRn',
'gen_op_iwmmxt_minsl_M0_wRn',
'gen_op_iwmmxt_minsw_M0_wRn',
'gen_op_iwmmxt_minub_M0_wRn',
'gen_op_iwmmxt_minul_M0_wRn',
'gen_op_iwmmxt_minuw_M0_wRn',
'gen_op_iwmmxt_movq_M0_wRn',
'gen_op_iwmmxt_movq_wRn_M0',
'gen_op_iwmmxt_mulshw_M0_wRn',
'gen_op_iwmmxt_mulslw_M0_wRn',
'gen_op_iwmmxt_muluhw_M0_wRn',
'gen_op_iwmmxt_mululw_M0_wRn',
'gen_op_iwmmxt_orq_M0_wRn',
'gen_op_iwmmxt_packsl_M0_wRn',
'gen_op_iwmmxt_packsq_M0_wRn',
'gen_op_iwmmxt_packsw_M0_wRn',
'gen_op_iwmmxt_packul_M0_wRn',
'gen_op_iwmmxt_packuq_M0_wRn',
'gen_op_iwmmxt_packuw_M0_wRn',
'gen_op_iwmmxt_sadb_M0_wRn',
'gen_op_iwmmxt_sadw_M0_wRn',
'gen_op_iwmmxt_set_cup',
'gen_op_iwmmxt_set_mup',
'gen_op_iwmmxt_setpsr_nz',
'gen_op_iwmmxt_subnb_M0_wRn',
'gen_op_iwmmxt_subnl_M0_wRn',
'gen_op_iwmmxt_subnw_M0_wRn',
'gen_op_iwmmxt_subsb_M0_wRn',
'gen_op_iwmmxt_subsl_M0_wRn',
'gen_op_iwmmxt_subsw_M0_wRn',
'gen_op_iwmmxt_subub_M0_wRn',
'gen_op_iwmmxt_subul_M0_wRn',
'gen_op_iwmmxt_subuw_M0_wRn',
'gen_op_iwmmxt_unpackhb_M0_wRn',
'gen_op_iwmmxt_unpackhl_M0_wRn',
'gen_op_iwmmxt_unpackhsb_M0',
'gen_op_iwmmxt_unpackhsl_M0',
'gen_op_iwmmxt_unpackhsw_M0',
'gen_op_iwmmxt_unpackhub_M0',
'gen_op_iwmmxt_unpackhul_M0',
'gen_op_iwmmxt_unpackhuw_M0',
'gen_op_iwmmxt_unpackhw_M0_wRn',
'gen_op_iwmmxt_unpacklb_M0_wRn',
'gen_op_iwmmxt_unpackll_M0_wRn',
'gen_op_iwmmxt_unpacklsb_M0',
'gen_op_iwmmxt_unpacklsl_M0',
'gen_op_iwmmxt_unpacklsw_M0',
'gen_op_iwmmxt_unpacklub_M0',
'gen_op_iwmmxt_unpacklul_M0',
'gen_op_iwmmxt_unpackluw_M0',
'gen_op_iwmmxt_unpacklw_M0_wRn',
'gen_op_iwmmxt_xorq_M0_wRn',
'gen_rev16',
'gen_revsh',
'gen_rfe',
'gen_sar',
'gen_sbc_CC',
'gen_sbfx',
'gen_set_CF_bit31',
'gen_set_condexec',
'gen_set_cpsr',
'gen_set_label',
'gen_set_pc_im',
'gen_set_psr',
'gen_set_psr_im',
'gen_shl',
'gen_shr',
'gen_smc',
'gen_smul_dual',
'gen_srs',
'gen_ss_advance',
'gen_step_complete_exception',
'gen_store_exclusive',
'gen_storeq_reg',
'gen_sub_carry',
'gen_sub_CC',
'gen_subq_msw',
'gen_swap_half',
'gen_thumb2_data_op',
'gen_thumb2_parallel_addsub',
'gen_ubfx',
'gen_vfp_abs',
'gen_vfp_add',
'gen_vfp_cmp',
'gen_vfp_cmpe',
'gen_vfp_div',
'gen_vfp_F1_ld0',
'gen_vfp_F1_mul',
'gen_vfp_F1_neg',
'gen_vfp_ld',
'gen_vfp_mrs',
'gen_vfp_msr',
'gen_vfp_mul',
'gen_vfp_neg',
'gen_vfp_shto',
'gen_vfp_sito',
'gen_vfp_slto',
'gen_vfp_sqrt',
'gen_vfp_st',
'gen_vfp_sub',
'gen_vfp_tosh',
'gen_vfp_tosi',
'gen_vfp_tosiz',
'gen_vfp_tosl',
'gen_vfp_touh',
'gen_vfp_toui',
'gen_vfp_touiz',
'gen_vfp_toul',
'gen_vfp_uhto',
'gen_vfp_uito',
'gen_vfp_ulto',
'get_arm_cp_reginfo',
'get_clock',
'get_clock_realtime',
'get_constraint_priority',
'get_float_exception_flags',
'get_float_rounding_mode',
'get_fpstatus_ptr',
'get_level1_table_address',
'get_mem_index',
'get_next_param_value',
'get_opt_name',
'get_opt_value',
'get_page_addr_code',
'get_param_value',
'get_phys_addr',
'get_phys_addr_lpae',
'get_phys_addr_mpu',
'get_phys_addr_v5',
'get_phys_addr_v6',
'get_system_memory',
'get_ticks_per_sec',
'g_list_insert_sorted_merged',
'_GLOBAL_OFFSET_TABLE_',
'gt_cntfrq_access',
'gt_cnt_read',
'gt_cnt_reset',
'gt_counter_access',
'gt_ctl_write',
'gt_cval_write',
'gt_get_countervalue',
'gt_pct_access',
'gt_ptimer_access',
'gt_recalc_timer',
'gt_timer_access',
'gt_tval_read',
'gt_tval_write',
'gt_vct_access',
'gt_vtimer_access',
'guest_phys_blocks_free',
'guest_phys_blocks_init',
'handle_vcvt',
'handle_vminmaxnm',
'handle_vrint',
'handle_vsel',
'has_help_option',
'have_bmi1',
'have_bmi2',
'hcr_write',
'helper_access_check_cp_reg',
'helper_add_saturate',
'helper_add_setq',
'helper_add_usaturate',
'helper_be_ldl_cmmu',
'helper_be_ldq_cmmu',
'helper_be_ldq_mmu',
'helper_be_ldsl_mmu',
'helper_be_ldsw_mmu',
'helper_be_ldul_mmu',
'helper_be_lduw_mmu',
'helper_be_ldw_cmmu',
'helper_be_stl_mmu',
'helper_be_stq_mmu',
'helper_be_stw_mmu',
'helper_clear_pstate_ss',
'helper_clz_arm',
'helper_cpsr_read',
'helper_cpsr_write',
'helper_crc32_arm',
'helper_crc32c',
'helper_crypto_aese',
'helper_crypto_aesmc',
'helper_crypto_sha1_3reg',
'helper_crypto_sha1h',
'helper_crypto_sha1su1',
'helper_crypto_sha256h',
'helper_crypto_sha256h2',
'helper_crypto_sha256su0',
'helper_crypto_sha256su1',
'helper_dc_zva',
'helper_double_saturate',
'helper_exception_internal',
'helper_exception_return',
'helper_exception_with_syndrome',
'helper_get_cp_reg',
'helper_get_cp_reg64',
'helper_get_r13_banked',
'helper_get_user_reg',
'helper_iwmmxt_addcb',
'helper_iwmmxt_addcl',
'helper_iwmmxt_addcw',
'helper_iwmmxt_addnb',
'helper_iwmmxt_addnl',
'helper_iwmmxt_addnw',
'helper_iwmmxt_addsb',
'helper_iwmmxt_addsl',
'helper_iwmmxt_addsw',
'helper_iwmmxt_addub',
'helper_iwmmxt_addul',
'helper_iwmmxt_adduw',
'helper_iwmmxt_align',
'helper_iwmmxt_avgb0',
'helper_iwmmxt_avgb1',
'helper_iwmmxt_avgw0',
'helper_iwmmxt_avgw1',
'helper_iwmmxt_bcstb',
'helper_iwmmxt_bcstl',
'helper_iwmmxt_bcstw',
'helper_iwmmxt_cmpeqb',
'helper_iwmmxt_cmpeql',
'helper_iwmmxt_cmpeqw',
'helper_iwmmxt_cmpgtsb',
'helper_iwmmxt_cmpgtsl',
'helper_iwmmxt_cmpgtsw',
'helper_iwmmxt_cmpgtub',
'helper_iwmmxt_cmpgtul',
'helper_iwmmxt_cmpgtuw',
'helper_iwmmxt_insr',
'helper_iwmmxt_macsw',
'helper_iwmmxt_macuw',
'helper_iwmmxt_maddsq',
'helper_iwmmxt_madduq',
'helper_iwmmxt_maxsb',
'helper_iwmmxt_maxsl',
'helper_iwmmxt_maxsw',
'helper_iwmmxt_maxub',
'helper_iwmmxt_maxul',
'helper_iwmmxt_maxuw',
'helper_iwmmxt_minsb',
'helper_iwmmxt_minsl',
'helper_iwmmxt_minsw',
'helper_iwmmxt_minub',
'helper_iwmmxt_minul',
'helper_iwmmxt_minuw',
'helper_iwmmxt_msbb',
'helper_iwmmxt_msbl',
'helper_iwmmxt_msbw',
'helper_iwmmxt_muladdsl',
'helper_iwmmxt_muladdsw',
'helper_iwmmxt_muladdswl',
'helper_iwmmxt_mulshw',
'helper_iwmmxt_mulslw',
'helper_iwmmxt_muluhw',
'helper_iwmmxt_mululw',
'helper_iwmmxt_packsl',
'helper_iwmmxt_packsq',
'helper_iwmmxt_packsw',
'helper_iwmmxt_packul',
'helper_iwmmxt_packuq',
'helper_iwmmxt_packuw',
'helper_iwmmxt_rorl',
'helper_iwmmxt_rorq',
'helper_iwmmxt_rorw',
'helper_iwmmxt_sadb',
'helper_iwmmxt_sadw',
'helper_iwmmxt_setpsr_nz',
'helper_iwmmxt_shufh',
'helper_iwmmxt_slll',
'helper_iwmmxt_sllq',
'helper_iwmmxt_sllw',
'helper_iwmmxt_sral',
'helper_iwmmxt_sraq',
'helper_iwmmxt_sraw',
'helper_iwmmxt_srll',
'helper_iwmmxt_srlq',
'helper_iwmmxt_srlw',
'helper_iwmmxt_subnb',
'helper_iwmmxt_subnl',
'helper_iwmmxt_subnw',
'helper_iwmmxt_subsb',
'helper_iwmmxt_subsl',
'helper_iwmmxt_subsw',
'helper_iwmmxt_subub',
'helper_iwmmxt_subul',
'helper_iwmmxt_subuw',
'helper_iwmmxt_unpackhb',
'helper_iwmmxt_unpackhl',
'helper_iwmmxt_unpackhsb',
'helper_iwmmxt_unpackhsl',
'helper_iwmmxt_unpackhsw',
'helper_iwmmxt_unpackhub',
'helper_iwmmxt_unpackhul',
'helper_iwmmxt_unpackhuw',
'helper_iwmmxt_unpackhw',
'helper_iwmmxt_unpacklb',
'helper_iwmmxt_unpackll',
'helper_iwmmxt_unpacklsb',
'helper_iwmmxt_unpacklsl',
'helper_iwmmxt_unpacklsw',
'helper_iwmmxt_unpacklub',
'helper_iwmmxt_unpacklul',
'helper_iwmmxt_unpackluw',
'helper_iwmmxt_unpacklw',
'helper_ldb_cmmu',
'helper_ldb_mmu',
'helper_ldl_cmmu',
'helper_ldl_mmu',
'helper_ldq_cmmu',
'helper_ldq_mmu',
'helper_ldw_cmmu',
'helper_ldw_mmu',
'helper_le_ldl_cmmu',
'helper_le_ldq_cmmu',
'helper_le_ldq_mmu',
'helper_le_ldsl_mmu',
'helper_le_ldsw_mmu',
'helper_le_ldul_mmu',
'helper_le_lduw_mmu',
'helper_le_ldw_cmmu',
'helper_le_stl_mmu',
'helper_le_stq_mmu',
'helper_le_stw_mmu',
'helper_msr_i_pstate',
'helper_neon_abd_f32',
'helper_neon_abdl_s16',
'helper_neon_abdl_s32',
'helper_neon_abdl_s64',
'helper_neon_abdl_u16',
'helper_neon_abdl_u32',
'helper_neon_abdl_u64',
'helper_neon_abd_s16',
'helper_neon_abd_s32',
'helper_neon_abd_s8',
'helper_neon_abd_u16',
'helper_neon_abd_u32',
'helper_neon_abd_u8',
'helper_neon_abs_s16',
'helper_neon_abs_s8',
'helper_neon_acge_f32',
'helper_neon_acge_f64',
'helper_neon_acgt_f32',
'helper_neon_acgt_f64',
'helper_neon_addl_saturate_s32',
'helper_neon_addl_saturate_s64',
'helper_neon_addl_u16',
'helper_neon_addl_u32',
'helper_neon_add_u16',
'helper_neon_add_u8',
'helper_neon_ceq_f32',
'helper_neon_ceq_u16',
'helper_neon_ceq_u32',
'helper_neon_ceq_u8',
'helper_neon_cge_f32',
'helper_neon_cge_s16',
'helper_neon_cge_s32',
'helper_neon_cge_s8',
'helper_neon_cge_u16',
'helper_neon_cge_u32',
'helper_neon_cge_u8',
'helper_neon_cgt_f32',
'helper_neon_cgt_s16',
'helper_neon_cgt_s32',
'helper_neon_cgt_s8',
'helper_neon_cgt_u16',
'helper_neon_cgt_u32',
'helper_neon_cgt_u8',
'helper_neon_cls_s16',
'helper_neon_cls_s32',
'helper_neon_cls_s8',
'helper_neon_clz_u16',
'helper_neon_clz_u8',
'helper_neon_cnt_u8',
'helper_neon_fcvt_f16_to_f32',
'helper_neon_fcvt_f32_to_f16',
'helper_neon_hadd_s16',
'helper_neon_hadd_s32',
'helper_neon_hadd_s8',
'helper_neon_hadd_u16',
'helper_neon_hadd_u32',
'helper_neon_hadd_u8',
'helper_neon_hsub_s16',
'helper_neon_hsub_s32',
'helper_neon_hsub_s8',
'helper_neon_hsub_u16',
'helper_neon_hsub_u32',
'helper_neon_hsub_u8',
'helper_neon_max_s16',
'helper_neon_max_s32',
'helper_neon_max_s8',
'helper_neon_max_u16',
'helper_neon_max_u32',
'helper_neon_max_u8',
'helper_neon_min_s16',
'helper_neon_min_s32',
'helper_neon_min_s8',
'helper_neon_min_u16',
'helper_neon_min_u32',
'helper_neon_min_u8',
'helper_neon_mull_p8',
'helper_neon_mull_s16',
'helper_neon_mull_s8',
'helper_neon_mull_u16',
'helper_neon_mull_u8',
'helper_neon_mul_p8',
'helper_neon_mul_u16',
'helper_neon_mul_u8',
'helper_neon_narrow_high_u16',
'helper_neon_narrow_high_u8',
'helper_neon_narrow_round_high_u16',
'helper_neon_narrow_round_high_u8',
'helper_neon_narrow_sat_s16',
'helper_neon_narrow_sat_s32',
'helper_neon_narrow_sat_s8',
'helper_neon_narrow_sat_u16',
'helper_neon_narrow_sat_u32',
'helper_neon_narrow_sat_u8',
'helper_neon_narrow_u16',
'helper_neon_narrow_u8',
'helper_neon_negl_u16',
'helper_neon_negl_u32',
'helper_neon_paddl_u16',
'helper_neon_paddl_u32',
'helper_neon_padd_u16',
'helper_neon_padd_u8',
'helper_neon_pmax_s16',
'helper_neon_pmax_s8',
'helper_neon_pmax_u16',
'helper_neon_pmax_u8',
'helper_neon_pmin_s16',
'helper_neon_pmin_s8',
'helper_neon_pmin_u16',
'helper_neon_pmin_u8',
'helper_neon_pmull_64_hi',
'helper_neon_pmull_64_lo',
'helper_neon_qabs_s16',
'helper_neon_qabs_s32',
'helper_neon_qabs_s64',
'helper_neon_qabs_s8',
'helper_neon_qadd_s16',
'helper_neon_qadd_s32',
'helper_neon_qadd_s64',
'helper_neon_qadd_s8',
'helper_neon_qadd_u16',
'helper_neon_qadd_u32',
'helper_neon_qadd_u64',
'helper_neon_qadd_u8',
'helper_neon_qdmulh_s16',
'helper_neon_qdmulh_s32',
'helper_neon_qneg_s16',
'helper_neon_qneg_s32',
'helper_neon_qneg_s64',
'helper_neon_qneg_s8',
'helper_neon_qrdmulh_s16',
'helper_neon_qrdmulh_s32',
'helper_neon_qrshl_s16',
'helper_neon_qrshl_s32',
'helper_neon_qrshl_s64',
'helper_neon_qrshl_s8',
'helper_neon_qrshl_u16',
'helper_neon_qrshl_u32',
'helper_neon_qrshl_u64',
'helper_neon_qrshl_u8',
'helper_neon_qshl_s16',
'helper_neon_qshl_s32',
'helper_neon_qshl_s64',
'helper_neon_qshl_s8',
'helper_neon_qshl_u16',
'helper_neon_qshl_u32',
'helper_neon_qshl_u64',
'helper_neon_qshl_u8',
'helper_neon_qshlu_s16',
'helper_neon_qshlu_s32',
'helper_neon_qshlu_s64',
'helper_neon_qshlu_s8',
'helper_neon_qsub_s16',
'helper_neon_qsub_s32',
'helper_neon_qsub_s64',
'helper_neon_qsub_s8',
'helper_neon_qsub_u16',
'helper_neon_qsub_u32',
'helper_neon_qsub_u64',
'helper_neon_qsub_u8',
'helper_neon_qunzip16',
'helper_neon_qunzip32',
'helper_neon_qunzip8',
'helper_neon_qzip16',
'helper_neon_qzip32',
'helper_neon_qzip8',
'helper_neon_rbit_u8',
'helper_neon_rhadd_s16',
'helper_neon_rhadd_s32',
'helper_neon_rhadd_s8',
'helper_neon_rhadd_u16',
'helper_neon_rhadd_u32',
'helper_neon_rhadd_u8',
'helper_neon_rshl_s16',
'helper_neon_rshl_s32',
'helper_neon_rshl_s64',
'helper_neon_rshl_s8',
'helper_neon_rshl_u16',
'helper_neon_rshl_u32',
'helper_neon_rshl_u64',
'helper_neon_rshl_u8',
'helper_neon_shl_s16',
'helper_neon_shl_s32',
'helper_neon_shl_s64',
'helper_neon_shl_s8',
'helper_neon_shl_u16',
'helper_neon_shl_u32',
'helper_neon_shl_u64',
'helper_neon_shl_u8',
'helper_neon_sqadd_u16',
'helper_neon_sqadd_u32',
'helper_neon_sqadd_u64',
'helper_neon_sqadd_u8',
'helper_neon_subl_u16',
'helper_neon_subl_u32',
'helper_neon_sub_u16',
'helper_neon_sub_u8',
'helper_neon_tbl',
'helper_neon_tst_u16',
'helper_neon_tst_u32',
'helper_neon_tst_u8',
'helper_neon_unarrow_sat16',
'helper_neon_unarrow_sat32',
'helper_neon_unarrow_sat8',
'helper_neon_unzip16',
'helper_neon_unzip8',
'helper_neon_uqadd_s16',
'helper_neon_uqadd_s32',
'helper_neon_uqadd_s64',
'helper_neon_uqadd_s8',
'helper_neon_widen_s16',
'helper_neon_widen_s8',
'helper_neon_widen_u16',
'helper_neon_widen_u8',
'helper_neon_zip16',
'helper_neon_zip8',
'helper_pre_hvc',
'helper_pre_smc',
'helper_qadd16',
'helper_qadd8',
'helper_qaddsubx',
'helper_qsub16',
'helper_qsub8',
'helper_qsubaddx',
'helper_rbit',
'helper_recpe_f32',
'helper_recpe_f64',
'helper_recpe_u32',
'helper_recps_f32',
'helper_ret_ldb_cmmu',
'helper_ret_ldsb_mmu',
'helper_ret_ldub_mmu',
'helper_ret_stb_mmu',
'helper_rintd',
'helper_rintd_exact',
'helper_rints',
'helper_rints_exact',
'helper_ror_cc',
'helper_rsqrte_f32',
'helper_rsqrte_f64',
'helper_rsqrte_u32',
'helper_rsqrts_f32',
'helper_sadd16',
'helper_sadd8',
'helper_saddsubx',
'helper_sar_cc',
'helper_sdiv',
'helper_sel_flags',
'helper_set_cp_reg',
'helper_set_cp_reg64',
'helper_set_neon_rmode',
'helper_set_r13_banked',
'helper_set_rmode',
'helper_set_user_reg',
'helper_shadd16',
'helper_shadd8',
'helper_shaddsubx',
'helper_shl_cc',
'helper_shr_cc',
'helper_shsub16',
'helper_shsub8',
'helper_shsubaddx',
'helper_ssat',
'helper_ssat16',
'helper_ssub16',
'helper_ssub8',
'helper_ssubaddx',
'helper_stb_mmu',
'helper_stl_mmu',
'helper_stq_mmu',
'helper_stw_mmu',
'helper_sub_saturate',
'helper_sub_usaturate',
'helper_sxtb16',
'helper_uadd16',
'helper_uadd8',
'helper_uaddsubx',
'helper_udiv',
'helper_uhadd16',
'helper_uhadd8',
'helper_uhaddsubx',
'helper_uhsub16',
'helper_uhsub8',
'helper_uhsubaddx',
'helper_uqadd16',
'helper_uqadd8',
'helper_uqaddsubx',
'helper_uqsub16',
'helper_uqsub8',
'helper_uqsubaddx',
'helper_usad8',
'helper_usat',
'helper_usat16',
'helper_usub16',
'helper_usub8',
'helper_usubaddx',
'helper_uxtb16',
'helper_v7m_mrs',
'helper_v7m_msr',
'helper_vfp_absd',
'helper_vfp_abss',
'helper_vfp_addd',
'helper_vfp_adds',
'helper_vfp_cmpd',
'helper_vfp_cmped',
'helper_vfp_cmpes',
'helper_vfp_cmps',
'helper_vfp_divd',
'helper_vfp_divs',
'helper_vfp_fcvtds',
'helper_vfp_fcvt_f16_to_f32',
'helper_vfp_fcvt_f16_to_f64',
'helper_vfp_fcvt_f32_to_f16',
'helper_vfp_fcvt_f64_to_f16',
'helper_vfp_fcvtsd',
'helper_vfp_get_fpscr',
'helper_vfp_maxd',
'helper_vfp_maxnumd',
'helper_vfp_maxnums',
'helper_vfp_maxs',
'helper_vfp_mind',
'helper_vfp_minnumd',
'helper_vfp_minnums',
'helper_vfp_mins',
'helper_vfp_muladdd',
'helper_vfp_muladds',
'helper_vfp_muld',
'helper_vfp_muls',
'helper_vfp_negd',
'helper_vfp_negs',
'helper_vfp_set_fpscr',
'helper_vfp_shtod',
'helper_vfp_shtos',
'helper_vfp_sitod',
'helper_vfp_sitos',
'helper_vfp_sltod',
'helper_vfp_sltos',
'helper_vfp_sqrtd',
'helper_vfp_sqrts',
'helper_vfp_sqtod',
'helper_vfp_sqtos',
'helper_vfp_subd',
'helper_vfp_subs',
'helper_vfp_toshd',
'helper_vfp_toshd_round_to_zero',
'helper_vfp_toshs',
'helper_vfp_toshs_round_to_zero',
'helper_vfp_tosid',
'helper_vfp_tosis',
'helper_vfp_tosizd',
'helper_vfp_tosizs',
'helper_vfp_tosld',
'helper_vfp_tosld_round_to_zero',
'helper_vfp_tosls',
'helper_vfp_tosls_round_to_zero',
'helper_vfp_tosqd',
'helper_vfp_tosqs',
'helper_vfp_touhd',
'helper_vfp_touhd_round_to_zero',
'helper_vfp_touhs',
'helper_vfp_touhs_round_to_zero',
'helper_vfp_touid',
'helper_vfp_touis',
'helper_vfp_touizd',
'helper_vfp_touizs',
'helper_vfp_tould',
'helper_vfp_tould_round_to_zero',
'helper_vfp_touls',
'helper_vfp_touls_round_to_zero',
'helper_vfp_touqd',
'helper_vfp_touqs',
'helper_vfp_uhtod',
'helper_vfp_uhtos',
'helper_vfp_uitod',
'helper_vfp_uitos',
'helper_vfp_ultod',
'helper_vfp_ultos',
'helper_vfp_uqtod',
'helper_vfp_uqtos',
'helper_wfe',
'helper_wfi',
'hex2decimal',
'hw_breakpoint_update',
'hw_breakpoint_update_all',
'hw_watchpoint_update',
'hw_watchpoint_update_all',
'_init',
'init_cpreg_list',
'init_lists',
'input_type_enum',
'int128_2_64',
'int128_add',
'int128_addto',
'int128_and',
'int128_eq',
'int128_ge',
'int128_get64',
'int128_gt',
'int128_le',
'int128_lt',
'int128_make64',
'int128_max',
'int128_min',
'int128_ne',
'int128_neg',
'int128_nz',
'int128_rshift',
'int128_sub',
'int128_subfrom',
'int128_zero',
'int16_to_float32',
'int16_to_float64',
'int32_to_float128',
'int32_to_float32',
'int32_to_float64',
'int32_to_floatx80',
'int64_to_float128',
'int64_to_float32',
'int64_to_float64',
'int64_to_floatx80',
'invalidate_and_set_dirty',
'invalidate_page_bitmap',
'io_mem_read',
'io_mem_write',
'io_readb',
'io_readl',
'io_readq',
'io_readw',
'iotlb_to_region',
'io_writeb',
'io_writel',
'io_writeq',
'io_writew',
'is_a64',
'is_help_option',
'isr_read',
'is_valid_option_list',
'iwmmxt_load_creg',
'iwmmxt_load_reg',
'iwmmxt_store_creg',
'iwmmxt_store_reg',
'__jit_debug_descriptor',
'__jit_debug_register_code',
'kvm_to_cpreg_id',
'last_ram_offset',
'ldl_be_p',
'ldl_be_phys',
'ldl_he_p',
'ldl_le_p',
'ldl_le_phys',
'ldl_phys',
'ldl_phys_internal',
'ldq_be_p',
'ldq_be_phys',
'ldq_he_p',
'ldq_le_p',
'ldq_le_phys',
'ldq_phys',
'ldq_phys_internal',
'ldst_name',
'ldub_p',
'ldub_phys',
'lduw_be_p',
'lduw_be_phys',
'lduw_he_p',
'lduw_le_p',
'lduw_le_phys',
'lduw_phys',
'lduw_phys_internal',
'le128',
'linked_bp_matches',
'listener_add_address_space',
'load_cpu_offset',
'load_reg',
'load_reg_var',
'log_cpu_state',
'lpae_cp_reginfo',
'lt128',
'machine_class_init',
'machine_finalize',
'machine_info',
'machine_initfn',
'machine_register_types',
'machvirt_init',
'machvirt_machine_init',
'maj',
'mapping_conflict',
'mapping_contiguous',
'mapping_have_same_region',
'mapping_merge',
'mem_add',
'mem_begin',
'mem_commit',
'memory_access_is_direct',
'memory_access_size',
'memory_init',
'memory_listener_match',
'memory_listener_register',
'memory_listener_unregister',
'memory_map_init',
'memory_mapping_filter',
'memory_mapping_list_add_mapping_sorted',
'memory_mapping_list_add_merge_sorted',
'memory_mapping_list_free',
'memory_mapping_list_init',
'memory_region_access_valid',
'memory_region_add_subregion',
'memory_region_add_subregion_common',
'memory_region_add_subregion_overlap',
'memory_region_big_endian',
'memory_region_clear_pending',
'memory_region_del_subregion',
'memory_region_destructor_alias',
'memory_region_destructor_none',
'memory_region_destructor_ram',
'memory_region_destructor_ram_from_ptr',
'memory_region_dispatch_read',
'memory_region_dispatch_read1',
'memory_region_dispatch_write',
'memory_region_escape_name',
'memory_region_finalize',
'memory_region_find',
'memory_region_get_addr',
'memory_region_get_alignment',
'memory_region_get_container',
'memory_region_get_fd',
'memory_region_get_may_overlap',
'memory_region_get_priority',
'memory_region_get_ram_addr',
'memory_region_get_ram_ptr',
'memory_region_get_size',
'memory_region_info',
'memory_region_init',
'memory_region_init_alias',
'memory_region_initfn',
'memory_region_init_io',
'memory_region_init_ram',
'memory_region_init_ram_ptr',
'memory_region_init_reservation',
'memory_region_is_iommu',
'memory_region_is_logging',
'memory_region_is_mapped',
'memory_region_is_ram',
'memory_region_is_rom',
'memory_region_is_romd',
'memory_region_is_skip_dump',
'memory_region_is_unassigned',
'memory_region_name',
'memory_region_need_escape',
'memory_region_oldmmio_read_accessor',
'memory_region_oldmmio_write_accessor',
'memory_region_present',
'memory_region_read_accessor',
'memory_region_readd_subregion',
'memory_region_ref',
'memory_region_resolve_container',
'memory_region_rom_device_set_romd',
'memory_region_section_get_iotlb',
'memory_region_set_address',
'memory_region_set_alias_offset',
'memory_region_set_enabled',
'memory_region_set_readonly',
'memory_region_set_skip_dump',
'memory_region_size',
'memory_region_to_address_space',
'memory_region_transaction_begin',
'memory_region_transaction_commit',
'memory_region_unref',
'memory_region_update_container_subregions',
'memory_region_write_accessor',
'memory_region_wrong_endianness',
'memory_try_enable_merging',
'module_call_init',
'module_load',
'mpidr_cp_reginfo',
'mpidr_read',
'msr_mask',
'mul128By64To192',
'mul128To256',
'mul64To128',
'muldiv64',
'neon_2rm_is_float_op',
'neon_2rm_sizes',
'neon_3r_sizes',
'neon_get_scalar',
'neon_load_reg',
'neon_load_reg64',
'neon_load_scratch',
'neon_ls_element_type',
'neon_reg_offset',
'neon_store_reg',
'neon_store_reg64',
'neon_store_scratch',
'new_ldst_label',
'next_list',
'normalizeFloat128Subnormal',
'normalizeFloat16Subnormal',
'normalizeFloat32Subnormal',
'normalizeFloat64Subnormal',
'normalizeFloatx80Subnormal',
'normalizeRoundAndPackFloat128',
'normalizeRoundAndPackFloat32',
'normalizeRoundAndPackFloat64',
'normalizeRoundAndPackFloatx80',
'not_v6_cp_reginfo',
'not_v7_cp_reginfo',
'not_v8_cp_reginfo',
'object_child_foreach',
'object_class_foreach',
'object_class_foreach_tramp',
'object_class_get_list',
'object_class_get_list_tramp',
'object_class_get_parent',
'object_deinit',
'object_dynamic_cast',
'object_finalize',
'object_finalize_child_property',
'object_get_child_property',
'object_get_link_property',
'object_get_root',
'object_initialize_with_type',
'object_init_with_type',
'object_instance_init',
'object_new_with_type',
'object_post_init_with_type',
'object_property_add_alias',
'object_property_add_link',
'object_property_add_uint16_ptr',
'object_property_add_uint32_ptr',
'object_property_add_uint64_ptr',
'object_property_add_uint8_ptr',
'object_property_allow_set_link',
'object_property_del',
'object_property_del_all',
'object_property_find',
'object_property_get',
'object_property_get_bool',
'object_property_get_int',
'object_property_get_link',
'object_property_get_qobject',
'object_property_get_str',
'object_property_get_type',
'object_property_is_child',
'object_property_set',
'object_property_set_description',
'object_property_set_link',
'object_property_set_qobject',
'object_release_link_property',
'object_resolve_abs_path',
'object_resolve_child_property',
'object_resolve_link',
'object_resolve_link_property',
'object_resolve_partial_path',
'object_resolve_path',
'object_resolve_path_component',
'object_resolve_path_type',
'object_set_link_property',
'object_unparent',
'omap_cachemaint_write',
'omap_cp_reginfo',
'omap_threadid_write',
'omap_ticonfig_write',
'omap_wfi_write',
'op_bits',
'open_modeflags',
'op_to_mov',
'op_to_movi',
'output_type_enum',
'packFloat128',
'packFloat16',
'packFloat32',
'packFloat64',
'packFloatx80',
'page_find',
'page_find_alloc',
'page_flush_tb',
'page_flush_tb_1',
'page_init',
'page_size_init',
'par',
'parse_array',
'parse_error',
'parse_escape',
'parse_keyword',
'parse_literal',
'parse_object',
'parse_optional',
'parse_option_bool',
'parse_option_number',
'parse_option_size',
'parse_pair',
'parser_context_free',
'parser_context_new',
'parser_context_peek_token',
'parser_context_pop_token',
'parser_context_restore',
'parser_context_save',
'parse_str',
'parse_type_bool',
'parse_type_int',
'parse_type_number',
'parse_type_size',
'parse_type_str',
'parse_value',
'par_write',
'patch_reloc',
'phys_map_node_alloc',
'phys_map_node_reserve',
'phys_mem_alloc',
'phys_mem_set_alloc',
'phys_page_compact',
'phys_page_compact_all',
'phys_page_find',
'phys_page_set',
'phys_page_set_level',
'phys_section_add',
'phys_section_destroy',
'phys_sections_free',
'pickNaN',
'pickNaNMulAdd',
'pmccfiltr_write',
'pmccntr_read',
'pmccntr_sync',
'pmccntr_write',
'pmccntr_write32',
'pmcntenclr_write',
'pmcntenset_write',
'pmcr_write',
'pmintenclr_write',
'pmintenset_write',
'pmovsr_write',
'pmreg_access',
'pmsav5_cp_reginfo',
'pmsav5_data_ap_read',
'pmsav5_data_ap_write',
'pmsav5_insn_ap_read',
'pmsav5_insn_ap_write',
'pmuserenr_write',
'pmxevtyper_write',
'print_type_bool',
'print_type_int',
'print_type_number',
'print_type_size',
'print_type_str',
'propagateFloat128NaN',
'propagateFloat32MulAddNaN',
'propagateFloat32NaN',
'propagateFloat64MulAddNaN',
'propagateFloat64NaN',
'propagateFloatx80NaN',
'property_get_alias',
'property_get_bool',
'property_get_str',
'property_get_uint16_ptr',
'property_get_uint32_ptr',
'property_get_uint64_ptr',
'property_get_uint8_ptr',
'property_release_alias',
'property_release_bool',
'property_release_str',
'property_resolve_alias',
'property_set_alias',
'property_set_bool',
'property_set_str',
'pstate_read',
'pstate_write',
'pxa250_initfn',
'pxa255_initfn',
'pxa260_initfn',
'pxa261_initfn',
'pxa262_initfn',
'pxa270a0_initfn',
'pxa270a1_initfn',
'pxa270b0_initfn',
'pxa270b1_initfn',
'pxa270c0_initfn',
'pxa270c5_initfn',
'qapi_dealloc_end_implicit_struct',
'qapi_dealloc_end_list',
'qapi_dealloc_end_struct',
'qapi_dealloc_get_visitor',
'qapi_dealloc_next_list',
'qapi_dealloc_pop',
'qapi_dealloc_push',
'qapi_dealloc_start_implicit_struct',
'qapi_dealloc_start_list',
'qapi_dealloc_start_struct',
'qapi_dealloc_start_union',
'qapi_dealloc_type_bool',
'qapi_dealloc_type_enum',
'qapi_dealloc_type_int',
'qapi_dealloc_type_number',
'qapi_dealloc_type_size',
'qapi_dealloc_type_str',
'qapi_dealloc_visitor_cleanup',
'qapi_dealloc_visitor_new',
'qapi_free_boolList',
'qapi_free_ErrorClassList',
'qapi_free_int16List',
'qapi_free_int32List',
'qapi_free_int64List',
'qapi_free_int8List',
'qapi_free_intList',
'qapi_free_numberList',
'qapi_free_strList',
'qapi_free_uint16List',
'qapi_free_uint32List',
'qapi_free_uint64List',
'qapi_free_uint8List',
'qapi_free_X86CPUFeatureWordInfo',
'qapi_free_X86CPUFeatureWordInfoList',
'qapi_free_X86CPURegister32List',
'qbool_destroy_obj',
'qbool_from_int',
'qbool_get_int',
'qbool_type',
'qbus_create',
'qbus_create_inplace',
'qbus_finalize',
'qbus_initfn',
'qbus_realize',
'qdev_create',
'qdev_get_type',
'qdev_register_types',
'qdev_set_parent_bus',
'qdev_try_create',
'qdict_add_key',
'qdict_array_split',
'qdict_clone_shallow',
'qdict_del',
'qdict_destroy_obj',
'qdict_entry_key',
'qdict_entry_value',
'qdict_extract_subqdict',
'qdict_find',
'qdict_first',
'qdict_flatten',
'qdict_flatten_qdict',
'qdict_flatten_qlist',
'qdict_get',
'qdict_get_bool',
'qdict_get_double',
'qdict_get_int',
'qdict_get_obj',
'qdict_get_qdict',
'qdict_get_qlist',
'qdict_get_str',
'qdict_get_try_bool',
'qdict_get_try_int',
'qdict_get_try_str',
'qdict_haskey',
'qdict_has_prefixed_entries',
'qdict_iter',
'qdict_join',
'qdict_new',
'qdict_next',
'qdict_next_entry',
'qdict_put_obj',
'qdict_size',
'qdict_type',
'qemu_clock_get_us',
'qemu_clock_ptr',
'qemu_clocks',
'qemu_get_cpu',
'qemu_get_guest_memory_mapping',
'qemu_get_guest_simple_memory_mapping',
'qemu_get_ram_block',
'qemu_get_ram_block_host_ptr',
'qemu_get_ram_fd',
'qemu_get_ram_ptr',
'qemu_host_page_mask',
'qemu_host_page_size',
'qemu_init_vcpu',
'qemu_ld_helpers',
'qemu_log_close',
'qemu_log_enabled',
'qemu_log_flush',
'qemu_loglevel_mask',
'qemu_log_vprintf',
'qemu_oom_check',
'qemu_parse_fd',
'qemu_ram_addr_from_host',
'qemu_ram_addr_from_host_nofail',
'qemu_ram_alloc',
'qemu_ram_alloc_from_ptr',
'qemu_ram_foreach_block',
'qemu_ram_free',
'qemu_ram_free_from_ptr',
'qemu_ram_ptr_length',
'qemu_ram_remap',
'qemu_ram_setup_dump',
'qemu_ram_unset_idstr',
'qemu_real_host_page_size',
'qemu_st_helpers',
'qemu_tcg_init_vcpu',
'qemu_try_memalign',
'qentry_destroy',
'qerror_human',
'qerror_report',
'qerror_report_err',
'qfloat_destroy_obj',
'qfloat_from_double',
'qfloat_get_double',
'qfloat_type',
'qint_destroy_obj',
'qint_from_int',
'qint_get_int',
'qint_type',
'qlist_append_obj',
'qlist_copy',
'qlist_copy_elem',
'qlist_destroy_obj',
'qlist_empty',
'qlist_entry_obj',
'qlist_first',
'qlist_iter',
'qlist_new',
'qlist_next',
'qlist_peek',
'qlist_pop',
'qlist_size',
'qlist_size_iter',
'qlist_type',
'qmp_input_end_implicit_struct',
'qmp_input_end_list',
'qmp_input_end_struct',
'qmp_input_get_next_type',
'qmp_input_get_object',
'qmp_input_get_visitor',
'qmp_input_next_list',
'qmp_input_optional',
'qmp_input_pop',
'qmp_input_push',
'qmp_input_start_implicit_struct',
'qmp_input_start_list',
'qmp_input_start_struct',
'qmp_input_type_bool',
'qmp_input_type_int',
'qmp_input_type_number',
'qmp_input_type_str',
'qmp_input_visitor_cleanup',
'qmp_input_visitor_new',
'qmp_input_visitor_new_strict',
'qmp_output_add_obj',
'qmp_output_end_list',
'qmp_output_end_struct',
'qmp_output_first',
'qmp_output_get_qobject',
'qmp_output_get_visitor',
'qmp_output_last',
'qmp_output_next_list',
'qmp_output_pop',
'qmp_output_push_obj',
'qmp_output_start_list',
'qmp_output_start_struct',
'qmp_output_type_bool',
'qmp_output_type_int',
'qmp_output_type_number',
'qmp_output_type_str',
'qmp_output_visitor_cleanup',
'qmp_output_visitor_new',
'qobject_decref',
'qobject_to_qbool',
'qobject_to_qdict',
'qobject_to_qfloat',
'qobject_to_qint',
'qobject_to_qlist',
'qobject_to_qstring',
'qobject_type',
'qstring_append',
'qstring_append_chr',
'qstring_append_int',
'qstring_destroy_obj',
'qstring_from_escaped_str',
'qstring_from_str',
'qstring_from_substr',
'qstring_get_length',
'qstring_get_str',
'qstring_new',
'qstring_type',
'ram_block_add',
'ram_size',
'range_compare',
'range_covers_byte',
'range_get_last',
'range_merge',
'ranges_can_merge',
'raw_read',
'raw_write',
'rcon',
'read_raw_cp_reg',
'recip_estimate',
'recip_sqrt_estimate',
'register_cp_regs_for_features',
'register_multipage',
'register_subpage',
'register_tm_clones',
'register_types_object',
'regnames',
'render_memory_region',
'reset_all_temps',
'reset_temp',
'rol32',
'rol64',
'ror32',
'ror64',
'roundAndPackFloat128',
'roundAndPackFloat16',
'roundAndPackFloat32',
'roundAndPackFloat64',
'roundAndPackFloatx80',
'roundAndPackInt32',
'roundAndPackInt64',
'roundAndPackUint64',
'round_to_inf',
'run_on_cpu',
's0',
'S0',
's1',
'S1',
'sa1100_initfn',
'sa1110_initfn',
'save_globals',
'scr_write',
'sctlr_write',
'set_bit',
'set_bits',
'set_default_nan_mode',
'set_feature',
'set_float_detect_tininess',
'set_float_exception_flags',
'set_float_rounding_mode',
'set_flush_inputs_to_zero',
'set_flush_to_zero',
'set_swi_errno',
'sextract32',
'sextract64',
'shift128ExtraRightJamming',
'shift128Right',
'shift128RightJamming',
'shift32RightJamming',
'shift64ExtraRightJamming',
'shift64RightJamming',
'shifter_out_im',
'shortShift128Left',
'shortShift192Left',
'simple_mpu_ap_bits',
'size_code_gen_buffer',
'softmmu_lock_user',
'softmmu_lock_user_string',
'softmmu_tget32',
'softmmu_tget8',
'softmmu_tput32',
'softmmu_unlock_user',
'sort_constraints',
'sp_el0_access',
'spsel_read',
'spsel_write',
'start_list',
'stb_p',
'stb_phys',
'stl_be_p',
'stl_be_phys',
'stl_he_p',
'stl_le_p',
'stl_le_phys',
'stl_phys',
'stl_phys_internal',
'stl_phys_notdirty',
'store_cpu_offset',
'store_reg',
'store_reg_bx',
'store_reg_from_load',
'stq_be_p',
'stq_be_phys',
'stq_he_p',
'stq_le_p',
'stq_le_phys',
'stq_phys',
'string_input_get_visitor',
'string_input_visitor_cleanup',
'string_input_visitor_new',
'strongarm_cp_reginfo',
'strstart',
'strtosz',
'strtosz_suffix',
'stw_be_p',
'stw_be_phys',
'stw_he_p',
'stw_le_p',
'stw_le_phys',
'stw_phys',
'stw_phys_internal',
'sub128',
'sub16_sat',
'sub16_usat',
'sub192',
'sub8_sat',
'sub8_usat',
'subFloat128Sigs',
'subFloat32Sigs',
'subFloat64Sigs',
'subFloatx80Sigs',
'subpage_accepts',
'subpage_init',
'subpage_ops',
'subpage_read',
'subpage_register',
'subpage_write',
'suffix_mul',
'swap_commutative',
'swap_commutative2',
'switch_mode',
'switch_v7m_sp',
'syn_aa32_bkpt',
'syn_aa32_hvc',
'syn_aa32_smc',
'syn_aa32_svc',
'syn_breakpoint',
'sync_globals',
'syn_cp14_rrt_trap',
'syn_cp14_rt_trap',
'syn_cp15_rrt_trap',
'syn_cp15_rt_trap',
'syn_data_abort',
'syn_fp_access_trap',
'syn_insn_abort',
'syn_swstep',
'syn_uncategorized',
'syn_watchpoint',
'syscall_err',
'system_bus_class_init',
'system_bus_info',
't2ee_cp_reginfo',
'table_logic_cc',
'target_parse_constraint',
'target_words_bigendian',
'tb_add_jump',
'tb_alloc',
'tb_alloc_page',
'tb_check_watchpoint',
'tb_find_fast',
'tb_find_pc',
'tb_find_slow',
'tb_flush',
'tb_flush_jmp_cache',
'tb_free',
'tb_gen_code',
'tb_hash_remove',
'tb_invalidate_phys_addr',
'tb_invalidate_phys_page_range',
'tb_invalidate_phys_range',
'tb_jmp_cache_hash_func',
'tb_jmp_cache_hash_page',
'tb_jmp_remove',
'tb_link_page',
'tb_page_remove',
'tb_phys_hash_func',
'tb_phys_invalidate',
'tb_reset_jump',
'tb_set_jmp_target',
'tcg_accel_class_init',
'tcg_accel_type',
'tcg_add_param_i32',
'tcg_add_param_i64',
'tcg_add_target_add_op_defs',
'tcg_allowed',
'tcg_canonicalize_memop',
'tcg_commit',
'tcg_cond_to_jcc',
'tcg_constant_folding',
'tcg_const_i32',
'tcg_const_i64',
'tcg_const_local_i32',
'tcg_const_local_i64',
'tcg_context_init',
'tcg_cpu_address_space_init',
'tcg_cpu_exec',
'tcg_current_code_size',
'tcg_dump_info',
'tcg_dump_ops',
'tcg_exec_all',
'tcg_find_helper',
'tcg_func_start',
'tcg_gen_abs_i32',
'tcg_gen_add2_i32',
'tcg_gen_add_i32',
'tcg_gen_add_i64',
'tcg_gen_addi_i32',
'tcg_gen_addi_i64',
'tcg_gen_andc_i32',
'tcg_gen_and_i32',
'tcg_gen_and_i64',
'tcg_gen_andi_i32',
'tcg_gen_andi_i64',
'tcg_gen_br',
'tcg_gen_brcond_i32',
'tcg_gen_brcond_i64',
'tcg_gen_brcondi_i32',
'tcg_gen_bswap16_i32',
'tcg_gen_bswap32_i32',
'tcg_gen_callN',
'tcg_gen_code',
'tcg_gen_code_common',
'tcg_gen_code_search_pc',
'tcg_gen_concat_i32_i64',
'tcg_gen_debug_insn_start',
'tcg_gen_deposit_i32',
'tcg_gen_exit_tb',
'tcg_gen_ext16s_i32',
'tcg_gen_ext16u_i32',
'tcg_gen_ext32s_i64',
'tcg_gen_ext32u_i64',
'tcg_gen_ext8s_i32',
'tcg_gen_ext8u_i32',
'tcg_gen_ext_i32_i64',
'tcg_gen_extu_i32_i64',
'tcg_gen_goto_tb',
'tcg_gen_ld_i32',
'tcg_gen_ld_i64',
'tcg_gen_ldst_op_i32',
'tcg_gen_ldst_op_i64',
'tcg_gen_movcond_i32',
'tcg_gen_movcond_i64',
'tcg_gen_mov_i32',
'tcg_gen_mov_i64',
'tcg_gen_movi_i32',
'tcg_gen_movi_i64',
'tcg_gen_mul_i32',
'tcg_gen_muls2_i32',
'tcg_gen_mulu2_i32',
'tcg_gen_neg_i32',
'tcg_gen_neg_i64',
'tcg_gen_not_i32',
'tcg_gen_op0',
'tcg_gen_op1i',
'tcg_gen_op2_i32',
'tcg_gen_op2_i64',
'tcg_gen_op2i_i32',
'tcg_gen_op2i_i64',
'tcg_gen_op3_i32',
'tcg_gen_op3_i64',
'tcg_gen_op4_i32',
'tcg_gen_op4i_i32',
'tcg_gen_op4ii_i32',
'tcg_gen_op4ii_i64',
'tcg_gen_op5ii_i32',
'tcg_gen_op6_i32',
'tcg_gen_op6i_i32',
'tcg_gen_op6i_i64',
'tcg_gen_orc_i32',
'tcg_gen_or_i32',
'tcg_gen_or_i64',
'tcg_gen_ori_i32',
'tcg_gen_qemu_ld_i32',
'tcg_gen_qemu_ld_i64',
'tcg_gen_qemu_st_i32',
'tcg_gen_qemu_st_i64',
'tcg_gen_rotl_i32',
'tcg_gen_rotli_i32',
'tcg_gen_rotr_i32',
'tcg_gen_rotri_i32',
'tcg_gen_sar_i32',
'tcg_gen_sari_i32',
'tcg_gen_setcond_i32',
'tcg_gen_shl_i32',
'tcg_gen_shl_i64',
'tcg_gen_shli_i32',
'tcg_gen_shli_i64',
'tcg_gen_shr_i32',
'tcg_gen_shifti_i64',
'tcg_gen_shr_i64',
'tcg_gen_shri_i32',
'tcg_gen_shri_i64',
'tcg_gen_st_i32',
'tcg_gen_st_i64',
'tcg_gen_sub_i32',
'tcg_gen_sub_i64',
'tcg_gen_subi_i32',
'tcg_gen_trunc_i64_i32',
'tcg_gen_trunc_shr_i64_i32',
'tcg_gen_xor_i32',
'tcg_gen_xor_i64',
'tcg_gen_xori_i32',
'tcg_get_arg_str_i32',
'tcg_get_arg_str_i64',
'tcg_get_arg_str_idx',
'tcg_global_mem_new_i32',
'tcg_global_mem_new_i64',
'tcg_global_mem_new_internal',
'tcg_global_reg_new_i32',
'tcg_global_reg_new_i64',
'tcg_global_reg_new_internal',
'tcg_handle_interrupt',
'tcg_init',
'tcg_invert_cond',
'tcg_la_bb_end',
'tcg_la_br_end',
'tcg_la_func_end',
'tcg_liveness_analysis',
'tcg_malloc',
'tcg_malloc_internal',
'tcg_op_defs_org',
'tcg_opt_gen_mov',
'tcg_opt_gen_movi',
'tcg_optimize',
'tcg_out16',
'tcg_out32',
'tcg_out64',
'tcg_out8',
'tcg_out_addi',
'tcg_out_branch',
'tcg_out_brcond32',
'tcg_out_brcond64',
'tcg_out_bswap32',
'tcg_out_bswap64',
'tcg_out_call',
'tcg_out_cmp',
'tcg_out_ext16s',
'tcg_out_ext16u',
'tcg_out_ext32s',
'tcg_out_ext32u',
'tcg_out_ext8s',
'tcg_out_ext8u',
'tcg_out_jmp',
'tcg_out_jxx',
'tcg_out_label',
'tcg_out_ld',
'tcg_out_modrm',
'tcg_out_modrm_offset',
'tcg_out_modrm_sib_offset',
'tcg_out_mov',
'tcg_out_movcond32',
'tcg_out_movcond64',
'tcg_out_movi',
'tcg_out_op',
'tcg_out_pop',
'tcg_out_push',
'tcg_out_qemu_ld',
'tcg_out_qemu_ld_direct',
'tcg_out_qemu_ld_slow_path',
'tcg_out_qemu_st',
'tcg_out_qemu_st_direct',
'tcg_out_qemu_st_slow_path',
'tcg_out_reloc',
'tcg_out_rolw_8',
'tcg_out_setcond32',
'tcg_out_setcond64',
'tcg_out_shifti',
'tcg_out_st',
'tcg_out_tb_finalize',
'tcg_out_tb_init',
'tcg_out_tlb_load',
'tcg_out_vex_modrm',
'tcg_patch32',
'tcg_patch8',
'tcg_pcrel_diff',
'tcg_pool_reset',
'tcg_prologue_init',
'tcg_ptr_byte_diff',
'tcg_reg_alloc',
'tcg_reg_alloc_bb_end',
'tcg_reg_alloc_call',
'tcg_reg_alloc_mov',
'tcg_reg_alloc_movi',
'tcg_reg_alloc_op',
'tcg_reg_alloc_start',
'tcg_reg_free',
'tcg_reg_sync',
'tcg_set_frame',
'tcg_set_nop',
'tcg_swap_cond',
'tcg_target_callee_save_regs',
'tcg_target_call_iarg_regs',
'tcg_target_call_oarg_regs',
'tcg_target_const_match',
'tcg_target_init',
'tcg_target_qemu_prologue',
'tcg_target_reg_alloc_order',
'tcg_temp_alloc',
'tcg_temp_free_i32',
'tcg_temp_free_i64',
'tcg_temp_free_internal',
'tcg_temp_local_new_i32',
'tcg_temp_local_new_i64',
'tcg_temp_new_i32',
'tcg_temp_new_i64',
'tcg_temp_new_internal',
'tcg_temp_new_internal_i32',
'tcg_temp_new_internal_i64',
'tdb_hash',
'teecr_write',
'teehbr_access',
'temp_allocate_frame',
'temp_dead',
'temps_are_copies',
'temp_save',
'temp_sync',
'tgen_arithi',
'tgen_arithr',
'thumb2_logic_op',
'ti925t_initfn',
'tlb_add_large_page',
'tlb_flush_entry',
'tlbi_aa64_asid_is_write',
'tlbi_aa64_asid_write',
'tlbi_aa64_vaa_is_write',
'tlbi_aa64_vaa_write',
'tlbi_aa64_va_is_write',
'tlbi_aa64_va_write',
'tlbiall_is_write',
'tlbiall_write',
'tlbiasid_is_write',
'tlbiasid_write',
'tlbimvaa_is_write',
'tlbimvaa_write',
'tlbimva_is_write',
'tlbimva_write',
'tlb_is_dirty_ram',
'tlb_protect_code',
'tlb_reset_dirty_range',
'tlb_reset_dirty_range_all',
'tlb_set_dirty',
'tlb_set_dirty1',
'tlb_unprotect_code_phys',
'tlb_vaddr_to_host',
'token_get_type',
'token_get_value',
'token_is_escape',
'token_is_keyword',
'token_is_operator',
'tokens_append_from_iter',
'to_qiv',
'to_qov',
'tosa_init',
'tosa_machine_init',
'tswap32',
'tswap64',
'type_class_get_size',
'type_get_by_name',
'type_get_parent',
'type_has_parent',
'type_initialize',
'type_initialize_interface',
'type_is_ancestor',
'type_new',
'type_object_get_size',
'type_register_internal',
'type_table_add',
'type_table_get',
'type_table_lookup',
'uint16_to_float32',
'uint16_to_float64',
'uint32_to_float32',
'uint32_to_float64',
'uint64_to_float128',
'uint64_to_float32',
'uint64_to_float64',
'unassigned_io_ops',
'unassigned_io_read',
'unassigned_io_write',
'unassigned_mem_accepts',
'unassigned_mem_ops',
'unassigned_mem_read',
'unassigned_mem_write',
'update_spsel',
'v6_cp_reginfo',
'v6k_cp_reginfo',
'v7_cp_reginfo',
'v7mp_cp_reginfo',
'v7m_pop',
'v7m_push',
'v8_cp_reginfo',
'v8_el2_cp_reginfo',
'v8_el3_cp_reginfo',
'v8_el3_no_el2_cp_reginfo',
'vapa_cp_reginfo',
'vbar_write',
'vfp_exceptbits_from_host',
'vfp_exceptbits_to_host',
'vfp_get_fpcr',
'vfp_get_fpscr',
'vfp_get_fpsr',
'vfp_reg_offset',
'vfp_set_fpcr',
'vfp_set_fpscr',
'vfp_set_fpsr',
'visit_end_implicit_struct',
'visit_end_list',
'visit_end_struct',
'visit_end_union',
'visit_get_next_type',
'visit_next_list',
'visit_optional',
'visit_start_implicit_struct',
'visit_start_list',
'visit_start_struct',
'visit_start_union',
'vmsa_cp_reginfo',
'vmsa_tcr_el1_write',
'vmsa_ttbcr_raw_write',
'vmsa_ttbcr_reset',
'vmsa_ttbcr_write',
'vmsa_ttbr_write',
'write_cpustate_to_list',
'write_list_to_cpustate',
'write_raw_cp_reg',
'X86CPURegister32_lookup',
'x86_op_defs',
'xpsr_read',
'xpsr_write',
'xscale_cpar_write',
'xscale_cp_reginfo'
)
arm_symbols = (
'ARM_REGS_STORAGE_SIZE',
)
aarch64_symbols = (
'ARM64_REGS_STORAGE_SIZE',
'arm64_release',
'arm64_reg_reset',
'arm64_reg_read',
'arm64_reg_write',
'gen_a64_set_pc_im',
'aarch64_cpu_register_types',
'helper_udiv64',
'helper_sdiv64',
'helper_cls64',
'helper_cls32',
'helper_rbit64',
'helper_vfp_cmps_a64',
'helper_vfp_cmpes_a64',
'helper_vfp_cmpd_a64',
'helper_vfp_cmped_a64',
'helper_vfp_mulxs',
'helper_vfp_mulxd',
'helper_simd_tbl',
'helper_neon_ceq_f64',
'helper_neon_cge_f64',
'helper_neon_cgt_f64',
'helper_recpsf_f32',
'helper_recpsf_f64',
'helper_rsqrtsf_f32',
'helper_rsqrtsf_f64',
'helper_neon_addlp_s8',
'helper_neon_addlp_u8',
'helper_neon_addlp_s16',
'helper_neon_addlp_u16',
'helper_frecpx_f32',
'helper_frecpx_f64',
'helper_fcvtx_f64_to_f32',
'helper_crc32_64',
'helper_crc32c_64',
'aarch64_cpu_do_interrupt',
)
mips_symbols = (
'cpu_mips_exec',
'cpu_mips_get_random',
'cpu_mips_get_count',
'cpu_mips_store_count',
'cpu_mips_store_compare',
'cpu_mips_start_count',
'cpu_mips_stop_count',
'mips_machine_init',
'cpu_mips_kseg0_to_phys',
'cpu_mips_phys_to_kseg0',
'cpu_mips_kvm_um_phys_to_kseg0',
'mips_cpu_register_types',
'cpu_mips_init',
'cpu_state_reset',
'helper_msa_andi_b',
'helper_msa_ori_b',
'helper_msa_nori_b',
'helper_msa_xori_b',
'helper_msa_bmnzi_b',
'helper_msa_bmzi_b',
'helper_msa_bseli_b',
'helper_msa_shf_df',
'helper_msa_and_v',
'helper_msa_or_v',
'helper_msa_nor_v',
'helper_msa_xor_v',
'helper_msa_bmnz_v',
'helper_msa_bmz_v',
'helper_msa_bsel_v',
'helper_msa_addvi_df',
'helper_msa_subvi_df',
'helper_msa_ceqi_df',
'helper_msa_clei_s_df',
'helper_msa_clei_u_df',
'helper_msa_clti_s_df',
'helper_msa_clti_u_df',
'helper_msa_maxi_s_df',
'helper_msa_maxi_u_df',
'helper_msa_mini_s_df',
'helper_msa_mini_u_df',
'helper_msa_ldi_df',
'helper_msa_slli_df',
'helper_msa_srai_df',
'helper_msa_srli_df',
'helper_msa_bclri_df',
'helper_msa_bseti_df',
'helper_msa_bnegi_df',
'helper_msa_sat_s_df',
'helper_msa_sat_u_df',
'helper_msa_srari_df',
'helper_msa_srlri_df',
'helper_msa_binsli_df',
'helper_msa_binsri_df',
'helper_msa_sll_df',
'helper_msa_sra_df',
'helper_msa_srl_df',
'helper_msa_bclr_df',
'helper_msa_bset_df',
'helper_msa_bneg_df',
'helper_msa_addv_df',
'helper_msa_subv_df',
'helper_msa_max_s_df',
'helper_msa_max_u_df',
'helper_msa_min_s_df',
'helper_msa_min_u_df',
'helper_msa_max_a_df',
'helper_msa_min_a_df',
'helper_msa_ceq_df',
'helper_msa_clt_s_df',
'helper_msa_clt_u_df',
'helper_msa_cle_s_df',
'helper_msa_cle_u_df',
'helper_msa_add_a_df',
'helper_msa_adds_a_df',
'helper_msa_adds_s_df',
'helper_msa_adds_u_df',
'helper_msa_ave_s_df',
'helper_msa_ave_u_df',
'helper_msa_aver_s_df',
'helper_msa_aver_u_df',
'helper_msa_subs_s_df',
'helper_msa_subs_u_df',
'helper_msa_subsus_u_df',
'helper_msa_subsuu_s_df',
'helper_msa_asub_s_df',
'helper_msa_asub_u_df',
'helper_msa_mulv_df',
'helper_msa_div_s_df',
'helper_msa_div_u_df',
'helper_msa_mod_s_df',
'helper_msa_mod_u_df',
'helper_msa_dotp_s_df',
'helper_msa_dotp_u_df',
'helper_msa_srar_df',
'helper_msa_srlr_df',
'helper_msa_hadd_s_df',
'helper_msa_hadd_u_df',
'helper_msa_hsub_s_df',
'helper_msa_hsub_u_df',
'helper_msa_mul_q_df',
'helper_msa_mulr_q_df',
'helper_msa_sld_df',
'helper_msa_maddv_df',
'helper_msa_msubv_df',
'helper_msa_dpadd_s_df',
'helper_msa_dpadd_u_df',
'helper_msa_dpsub_s_df',
'helper_msa_dpsub_u_df',
'helper_msa_binsl_df',
'helper_msa_binsr_df',
'helper_msa_madd_q_df',
'helper_msa_msub_q_df',
'helper_msa_maddr_q_df',
'helper_msa_msubr_q_df',
'helper_msa_splat_df',
'helper_msa_pckev_df',
'helper_msa_pckod_df',
'helper_msa_ilvl_df',
'helper_msa_ilvr_df',
'helper_msa_ilvev_df',
'helper_msa_ilvod_df',
'helper_msa_vshf_df',
'helper_msa_sldi_df',
'helper_msa_splati_df',
'helper_msa_copy_s_df',
'helper_msa_copy_u_df',
'helper_msa_insert_df',
'helper_msa_insve_df',
'helper_msa_ctcmsa',
'helper_msa_cfcmsa',
'helper_msa_move_v',
'helper_msa_fill_df',
'helper_msa_nlzc_df',
'helper_msa_nloc_df',
'helper_msa_pcnt_df',
'helper_msa_fcaf_df',
'helper_msa_fcun_df',
'helper_msa_fceq_df',
'helper_msa_fcueq_df',
'helper_msa_fclt_df',
'helper_msa_fcult_df',
'helper_msa_fcle_df',
'helper_msa_fcule_df',
'helper_msa_fsaf_df',
'helper_msa_fsun_df',
'helper_msa_fseq_df',
'helper_msa_fsueq_df',
'helper_msa_fslt_df',
'helper_msa_fsult_df',
'helper_msa_fsle_df',
'helper_msa_fsule_df',
'helper_msa_fcor_df',
'helper_msa_fcune_df',
'helper_msa_fcne_df',
'helper_msa_fsor_df',
'helper_msa_fsune_df',
'helper_msa_fsne_df',
'helper_msa_fadd_df',
'helper_msa_fsub_df',
'helper_msa_fmul_df',
'helper_msa_fdiv_df',
'helper_msa_fmadd_df',
'helper_msa_fmsub_df',
'helper_msa_fexp2_df',
'helper_msa_fexdo_df',
'helper_msa_ftq_df',
'helper_msa_fmin_df',
'helper_msa_fmin_a_df',
'helper_msa_fmax_df',
'helper_msa_fmax_a_df',
'helper_msa_fclass_df',
'helper_msa_ftrunc_s_df',
'helper_msa_ftrunc_u_df',
'helper_msa_fsqrt_df',
'helper_msa_frsqrt_df',
'helper_msa_frcp_df',
'helper_msa_frint_df',
'helper_msa_flog2_df',
'helper_msa_fexupl_df',
'helper_msa_fexupr_df',
'helper_msa_ffql_df',
'helper_msa_ffqr_df',
'helper_msa_ftint_s_df',
'helper_msa_ftint_u_df',
'helper_msa_ffint_s_df',
'helper_msa_ffint_u_df',
'helper_paddsb',
'helper_paddusb',
'helper_paddsh',
'helper_paddush',
'helper_paddb',
'helper_paddh',
'helper_paddw',
'helper_psubsb',
'helper_psubusb',
'helper_psubsh',
'helper_psubush',
'helper_psubb',
'helper_psubh',
'helper_psubw',
'helper_pshufh',
'helper_packsswh',
'helper_packsshb',
'helper_packushb',
'helper_punpcklwd',
'helper_punpckhwd',
'helper_punpcklhw',
'helper_punpckhhw',
'helper_punpcklbh',
'helper_punpckhbh',
'helper_pavgh',
'helper_pavgb',
'helper_pmaxsh',
'helper_pminsh',
'helper_pmaxub',
'helper_pminub',
'helper_pcmpeqw',
'helper_pcmpgtw',
'helper_pcmpeqh',
'helper_pcmpgth',
'helper_pcmpeqb',
'helper_pcmpgtb',
'helper_psllw',
'helper_psrlw',
'helper_psraw',
'helper_psllh',
'helper_psrlh',
'helper_psrah',
'helper_pmullh',
'helper_pmulhh',
'helper_pmulhuh',
'helper_pmaddhw',
'helper_pasubub',
'helper_biadd',
'helper_pmovmskb',
'helper_absq_s_ph',
'helper_absq_s_qb',
'helper_absq_s_w',
'helper_addqh_ph',
'helper_addqh_r_ph',
'helper_addqh_r_w',
'helper_addqh_w',
'helper_adduh_qb',
'helper_adduh_r_qb',
'helper_subqh_ph',
'helper_subqh_r_ph',
'helper_subqh_r_w',
'helper_subqh_w',
'helper_addq_ph',
'helper_addq_s_ph',
'helper_addq_s_w',
'helper_addu_ph',
'helper_addu_qb',
'helper_addu_s_ph',
'helper_addu_s_qb',
'helper_subq_ph',
'helper_subq_s_ph',
'helper_subq_s_w',
'helper_subu_ph',
'helper_subu_qb',
'helper_subu_s_ph',
'helper_subu_s_qb',
'helper_subuh_qb',
'helper_subuh_r_qb',
'helper_addsc',
'helper_addwc',
'helper_modsub',
'helper_raddu_w_qb',
'helper_precr_qb_ph',
'helper_precrq_qb_ph',
'helper_precr_sra_ph_w',
'helper_precr_sra_r_ph_w',
'helper_precrq_ph_w',
'helper_precrq_rs_ph_w',
'helper_precrqu_s_qb_ph',
'helper_precequ_ph_qbl',
'helper_precequ_ph_qbr',
'helper_precequ_ph_qbla',
'helper_precequ_ph_qbra',
'helper_preceu_ph_qbl',
'helper_preceu_ph_qbr',
'helper_preceu_ph_qbla',
'helper_preceu_ph_qbra',
'helper_shll_qb',
'helper_shrl_qb',
'helper_shra_qb',
'helper_shra_r_qb',
'helper_shll_ph',
'helper_shll_s_ph',
'helper_shll_s_w',
'helper_shra_r_w',
'helper_shrl_ph',
'helper_shra_ph',
'helper_shra_r_ph',
'helper_muleu_s_ph_qbl',
'helper_muleu_s_ph_qbr',
'helper_mulq_rs_ph',
'helper_mul_ph',
'helper_mul_s_ph',
'helper_mulq_s_ph',
'helper_muleq_s_w_phl',
'helper_muleq_s_w_phr',
'helper_mulsaq_s_w_ph',
'helper_mulsa_w_ph',
'helper_dpau_h_qbl',
'helper_dpau_h_qbr',
'helper_dpsu_h_qbl',
'helper_dpsu_h_qbr',
'helper_dpa_w_ph',
'helper_dpax_w_ph',
'helper_dps_w_ph',
'helper_dpsx_w_ph',
'helper_dpaq_s_w_ph',
'helper_dpaqx_s_w_ph',
'helper_dpsq_s_w_ph',
'helper_dpsqx_s_w_ph',
'helper_dpaqx_sa_w_ph',
'helper_dpsqx_sa_w_ph',
'helper_dpaq_sa_l_w',
'helper_dpsq_sa_l_w',
'helper_maq_s_w_phl',
'helper_maq_s_w_phr',
'helper_maq_sa_w_phl',
'helper_maq_sa_w_phr',
'helper_mulq_s_w',
'helper_mulq_rs_w',
'helper_bitrev',
'helper_insv',
'helper_cmpgu_eq_qb',
'helper_cmpgu_lt_qb',
'helper_cmpgu_le_qb',
'helper_cmpu_eq_qb',
'helper_cmpu_lt_qb',
'helper_cmpu_le_qb',
'helper_cmp_eq_ph',
'helper_cmp_lt_ph',
'helper_cmp_le_ph',
'helper_pick_qb',
'helper_pick_ph',
'helper_packrl_ph',
'helper_extr_w',
'helper_extr_r_w',
'helper_extr_rs_w',
'helper_extr_s_h',
'helper_extp',
'helper_extpdp',
'helper_shilo',
'helper_mthlip',
'cpu_wrdsp',
'helper_wrdsp',
'cpu_rddsp',
'helper_rddsp',
'helper_raise_exception_err',
'helper_clo',
'helper_clz',
'helper_muls',
'helper_mulsu',
'helper_macc',
'helper_macchi',
'helper_maccu',
'helper_macchiu',
'helper_msac',
'helper_msachi',
'helper_msacu',
'helper_msachiu',
'helper_mulhi',
'helper_mulhiu',
'helper_mulshi',
'helper_mulshiu',
'helper_bitswap',
'helper_ll',
'helper_sc',
'helper_swl',
'helper_swr',
'helper_lwm',
'helper_swm',
'helper_mfc0_mvpcontrol',
'helper_mfc0_mvpconf0',
'helper_mfc0_mvpconf1',
'helper_mfc0_random',
'helper_mfc0_tcstatus',
'helper_mftc0_tcstatus',
'helper_mfc0_tcbind',
'helper_mftc0_tcbind',
'helper_mfc0_tcrestart',
'helper_mftc0_tcrestart',
'helper_mfc0_tchalt',
'helper_mftc0_tchalt',
'helper_mfc0_tccontext',
'helper_mftc0_tccontext',
'helper_mfc0_tcschedule',
'helper_mftc0_tcschedule',
'helper_mfc0_tcschefback',
'helper_mftc0_tcschefback',
'helper_mfc0_count',
'helper_mftc0_entryhi',
'helper_mftc0_cause',
'helper_mftc0_status',
'helper_mfc0_lladdr',
'helper_mfc0_watchlo',
'helper_mfc0_watchhi',
'helper_mfc0_debug',
'helper_mftc0_debug',
'helper_mtc0_index',
'helper_mtc0_mvpcontrol',
'helper_mtc0_vpecontrol',
'helper_mttc0_vpecontrol',
'helper_mftc0_vpecontrol',
'helper_mftc0_vpeconf0',
'helper_mtc0_vpeconf0',
'helper_mttc0_vpeconf0',
'helper_mtc0_vpeconf1',
'helper_mtc0_yqmask',
'helper_mtc0_vpeopt',
'helper_mtc0_entrylo0',
'helper_mtc0_tcstatus',
'helper_mttc0_tcstatus',
'helper_mtc0_tcbind',
'helper_mttc0_tcbind',
'helper_mtc0_tcrestart',
'helper_mttc0_tcrestart',
'helper_mtc0_tchalt',
'helper_mttc0_tchalt',
'helper_mtc0_tccontext',
'helper_mttc0_tccontext',
'helper_mtc0_tcschedule',
'helper_mttc0_tcschedule',
'helper_mtc0_tcschefback',
'helper_mttc0_tcschefback',
'helper_mtc0_entrylo1',
'helper_mtc0_context',
'helper_mtc0_pagemask',
'helper_mtc0_pagegrain',
'helper_mtc0_wired',
'helper_mtc0_srsconf0',
'helper_mtc0_srsconf1',
'helper_mtc0_srsconf2',
'helper_mtc0_srsconf3',
'helper_mtc0_srsconf4',
'helper_mtc0_hwrena',
'helper_mtc0_count',
'helper_mtc0_entryhi',
'helper_mttc0_entryhi',
'helper_mtc0_compare',
'helper_mtc0_status',
'helper_mttc0_status',
'helper_mtc0_intctl',
'helper_mtc0_srsctl',
'helper_mtc0_cause',
'helper_mttc0_cause',
'helper_mftc0_epc',
'helper_mftc0_ebase',
'helper_mtc0_ebase',
'helper_mttc0_ebase',
'helper_mftc0_configx',
'helper_mtc0_config0',
'helper_mtc0_config2',
'helper_mtc0_config4',
'helper_mtc0_config5',
'helper_mtc0_lladdr',
'helper_mtc0_watchlo',
'helper_mtc0_watchhi',
'helper_mtc0_xcontext',
'helper_mtc0_framemask',
'helper_mtc0_debug',
'helper_mttc0_debug',
'helper_mtc0_performance0',
'helper_mtc0_taglo',
'helper_mtc0_datalo',
'helper_mtc0_taghi',
'helper_mtc0_datahi',
'helper_mftgpr',
'helper_mftlo',
'helper_mfthi',
'helper_mftacx',
'helper_mftdsp',
'helper_mttgpr',
'helper_mttlo',
'helper_mtthi',
'helper_mttacx',
'helper_mttdsp',
'helper_dmt',
'helper_emt',
'helper_dvpe',
'helper_evpe',
'helper_fork',
'helper_yield',
'r4k_helper_tlbinv',
'r4k_helper_tlbinvf',
'r4k_helper_tlbwi',
'r4k_helper_tlbwr',
'r4k_helper_tlbp',
'r4k_helper_tlbr',
'helper_tlbwi',
'helper_tlbwr',
'helper_tlbp',
'helper_tlbr',
'helper_tlbinv',
'helper_tlbinvf',
'helper_di',
'helper_ei',
'helper_eret',
'helper_deret',
'helper_rdhwr_cpunum',
'helper_rdhwr_synci_step',
'helper_rdhwr_cc',
'helper_rdhwr_ccres',
'helper_pmon',
'helper_wait',
'mips_cpu_do_unaligned_access',
'mips_cpu_unassigned_access',
'ieee_rm',
'helper_cfc1',
'helper_ctc1',
'ieee_ex_to_mips',
'helper_float_sqrt_d',
'helper_float_sqrt_s',
'helper_float_cvtd_s',
'helper_float_cvtd_w',
'helper_float_cvtd_l',
'helper_float_cvtl_d',
'helper_float_cvtl_s',
'helper_float_cvtps_pw',
'helper_float_cvtpw_ps',
'helper_float_cvts_d',
'helper_float_cvts_w',
'helper_float_cvts_l',
'helper_float_cvts_pl',
'helper_float_cvts_pu',
'helper_float_cvtw_s',
'helper_float_cvtw_d',
'helper_float_roundl_d',
'helper_float_roundl_s',
'helper_float_roundw_d',
'helper_float_roundw_s',
'helper_float_truncl_d',
'helper_float_truncl_s',
'helper_float_truncw_d',
'helper_float_truncw_s',
'helper_float_ceill_d',
'helper_float_ceill_s',
'helper_float_ceilw_d',
'helper_float_ceilw_s',
'helper_float_floorl_d',
'helper_float_floorl_s',
'helper_float_floorw_d',
'helper_float_floorw_s',
'helper_float_abs_d',
'helper_float_abs_s',
'helper_float_abs_ps',
'helper_float_chs_d',
'helper_float_chs_s',
'helper_float_chs_ps',
'helper_float_maddf_s',
'helper_float_maddf_d',
'helper_float_msubf_s',
'helper_float_msubf_d',
'helper_float_max_s',
'helper_float_max_d',
'helper_float_maxa_s',
'helper_float_maxa_d',
'helper_float_min_s',
'helper_float_min_d',
'helper_float_mina_s',
'helper_float_mina_d',
'helper_float_rint_s',
'helper_float_rint_d',
'helper_float_class_s',
'helper_float_class_d',
'helper_float_recip_d',
'helper_float_recip_s',
'helper_float_rsqrt_d',
'helper_float_rsqrt_s',
'helper_float_recip1_d',
'helper_float_recip1_s',
'helper_float_recip1_ps',
'helper_float_rsqrt1_d',
'helper_float_rsqrt1_s',
'helper_float_rsqrt1_ps',
'helper_float_add_d',
'helper_float_add_s',
'helper_float_add_ps',
'helper_float_sub_d',
'helper_float_sub_s',
'helper_float_sub_ps',
'helper_float_mul_d',
'helper_float_mul_s',
'helper_float_mul_ps',
'helper_float_div_d',
'helper_float_div_s',
'helper_float_div_ps',
'helper_float_madd_d',
'helper_float_madd_s',
'helper_float_madd_ps',
'helper_float_msub_d',
'helper_float_msub_s',
'helper_float_msub_ps',
'helper_float_nmadd_d',
'helper_float_nmadd_s',
'helper_float_nmadd_ps',
'helper_float_nmsub_d',
'helper_float_nmsub_s',
'helper_float_nmsub_ps',
'helper_float_recip2_d',
'helper_float_recip2_s',
'helper_float_recip2_ps',
'helper_float_rsqrt2_d',
'helper_float_rsqrt2_s',
'helper_float_rsqrt2_ps',
'helper_float_addr_ps',
'helper_float_mulr_ps',
'helper_cmp_d_f',
'helper_cmpabs_d_f',
'helper_cmp_d_un',
'helper_cmpabs_d_un',
'helper_cmp_d_eq',
'helper_cmpabs_d_eq',
'helper_cmp_d_ueq',
'helper_cmpabs_d_ueq',
'helper_cmp_d_olt',
'helper_cmpabs_d_olt',
'helper_cmp_d_ult',
'helper_cmpabs_d_ult',
'helper_cmp_d_ole',
'helper_cmpabs_d_ole',
'helper_cmp_d_ule',
'helper_cmpabs_d_ule',
'helper_cmp_d_sf',
'helper_cmpabs_d_sf',
'helper_cmp_d_ngle',
'helper_cmpabs_d_ngle',
'helper_cmp_d_seq',
'helper_cmpabs_d_seq',
'helper_cmp_d_ngl',
'helper_cmpabs_d_ngl',
'helper_cmp_d_lt',
'helper_cmpabs_d_lt',
'helper_cmp_d_nge',
'helper_cmpabs_d_nge',
'helper_cmp_d_le',
'helper_cmpabs_d_le',
'helper_cmp_d_ngt',
'helper_cmpabs_d_ngt',
'helper_cmp_s_f',
'helper_cmpabs_s_f',
'helper_cmp_s_un',
'helper_cmpabs_s_un',
'helper_cmp_s_eq',
'helper_cmpabs_s_eq',
'helper_cmp_s_ueq',
'helper_cmpabs_s_ueq',
'helper_cmp_s_olt',
'helper_cmpabs_s_olt',
'helper_cmp_s_ult',
'helper_cmpabs_s_ult',
'helper_cmp_s_ole',
'helper_cmpabs_s_ole',
'helper_cmp_s_ule',
'helper_cmpabs_s_ule',
'helper_cmp_s_sf',
'helper_cmpabs_s_sf',
'helper_cmp_s_ngle',
'helper_cmpabs_s_ngle',
'helper_cmp_s_seq',
'helper_cmpabs_s_seq',
'helper_cmp_s_ngl',
'helper_cmpabs_s_ngl',
'helper_cmp_s_lt',
'helper_cmpabs_s_lt',
'helper_cmp_s_nge',
'helper_cmpabs_s_nge',
'helper_cmp_s_le',
'helper_cmpabs_s_le',
'helper_cmp_s_ngt',
'helper_cmpabs_s_ngt',
'helper_cmp_ps_f',
'helper_cmpabs_ps_f',
'helper_cmp_ps_un',
'helper_cmpabs_ps_un',
'helper_cmp_ps_eq',
'helper_cmpabs_ps_eq',
'helper_cmp_ps_ueq',
'helper_cmpabs_ps_ueq',
'helper_cmp_ps_olt',
'helper_cmpabs_ps_olt',
'helper_cmp_ps_ult',
'helper_cmpabs_ps_ult',
'helper_cmp_ps_ole',
'helper_cmpabs_ps_ole',
'helper_cmp_ps_ule',
'helper_cmpabs_ps_ule',
'helper_cmp_ps_sf',
'helper_cmpabs_ps_sf',
'helper_cmp_ps_ngle',
'helper_cmpabs_ps_ngle',
'helper_cmp_ps_seq',
'helper_cmpabs_ps_seq',
'helper_cmp_ps_ngl',
'helper_cmpabs_ps_ngl',
'helper_cmp_ps_lt',
'helper_cmpabs_ps_lt',
'helper_cmp_ps_nge',
'helper_cmpabs_ps_nge',
'helper_cmp_ps_le',
'helper_cmpabs_ps_le',
'helper_cmp_ps_ngt',
'helper_cmpabs_ps_ngt',
'helper_r6_cmp_d_af',
'helper_r6_cmp_d_un',
'helper_r6_cmp_d_eq',
'helper_r6_cmp_d_ueq',
'helper_r6_cmp_d_lt',
'helper_r6_cmp_d_ult',
'helper_r6_cmp_d_le',
'helper_r6_cmp_d_ule',
'helper_r6_cmp_d_saf',
'helper_r6_cmp_d_sun',
'helper_r6_cmp_d_seq',
'helper_r6_cmp_d_sueq',
'helper_r6_cmp_d_slt',
'helper_r6_cmp_d_sult',
'helper_r6_cmp_d_sle',
'helper_r6_cmp_d_sule',
'helper_r6_cmp_d_or',
'helper_r6_cmp_d_une',
'helper_r6_cmp_d_ne',
'helper_r6_cmp_d_sor',
'helper_r6_cmp_d_sune',
'helper_r6_cmp_d_sne',
'helper_r6_cmp_s_af',
'helper_r6_cmp_s_un',
'helper_r6_cmp_s_eq',
'helper_r6_cmp_s_ueq',
'helper_r6_cmp_s_lt',
'helper_r6_cmp_s_ult',
'helper_r6_cmp_s_le',
'helper_r6_cmp_s_ule',
'helper_r6_cmp_s_saf',
'helper_r6_cmp_s_sun',
'helper_r6_cmp_s_seq',
'helper_r6_cmp_s_sueq',
'helper_r6_cmp_s_slt',
'helper_r6_cmp_s_sult',
'helper_r6_cmp_s_sle',
'helper_r6_cmp_s_sule',
'helper_r6_cmp_s_or',
'helper_r6_cmp_s_une',
'helper_r6_cmp_s_ne',
'helper_r6_cmp_s_sor',
'helper_r6_cmp_s_sune',
'helper_r6_cmp_s_sne',
'helper_msa_ld_df',
'helper_msa_st_df',
'no_mmu_map_address',
'fixed_mmu_map_address',
'r4k_map_address',
'mips_cpu_get_phys_page_debug',
'mips_cpu_handle_mmu_fault',
'cpu_mips_translate_address',
'exception_resume_pc',
'mips_cpu_do_interrupt',
'mips_cpu_exec_interrupt',
'r4k_invalidate_tlb',
'helper_absq_s_ob',
'helper_absq_s_qh',
'helper_absq_s_pw',
'helper_adduh_ob',
'helper_adduh_r_ob',
'helper_subuh_ob',
'helper_subuh_r_ob',
'helper_addq_pw',
'helper_addq_qh',
'helper_addq_s_pw',
'helper_addq_s_qh',
'helper_addu_ob',
'helper_addu_qh',
'helper_addu_s_ob',
'helper_addu_s_qh',
'helper_subq_pw',
'helper_subq_qh',
'helper_subq_s_pw',
'helper_subq_s_qh',
'helper_subu_ob',
'helper_subu_qh',
'helper_subu_s_ob',
'helper_subu_s_qh',
'helper_raddu_l_ob',
'helper_precr_ob_qh',
'helper_precr_sra_qh_pw',
'helper_precr_sra_r_qh_pw',
'helper_precrq_ob_qh',
'helper_precrq_qh_pw',
'helper_precrq_rs_qh_pw',
'helper_precrq_pw_l',
'helper_precrqu_s_ob_qh',
'helper_preceq_pw_qhl',
'helper_preceq_pw_qhr',
'helper_preceq_pw_qhla',
'helper_preceq_pw_qhra',
'helper_precequ_qh_obl',
'helper_precequ_qh_obr',
'helper_precequ_qh_obla',
'helper_precequ_qh_obra',
'helper_preceu_qh_obl',
'helper_preceu_qh_obr',
'helper_preceu_qh_obla',
'helper_preceu_qh_obra',
'helper_shll_ob',
'helper_shrl_ob',
'helper_shra_ob',
'helper_shra_r_ob',
'helper_shll_qh',
'helper_shll_s_qh',
'helper_shrl_qh',
'helper_shra_qh',
'helper_shra_r_qh',
'helper_shll_pw',
'helper_shll_s_pw',
'helper_shra_pw',
'helper_shra_r_pw',
'helper_muleu_s_qh_obl',
'helper_muleu_s_qh_obr',
'helper_mulq_rs_qh',
'helper_muleq_s_pw_qhl',
'helper_muleq_s_pw_qhr',
'helper_mulsaq_s_w_qh',
'helper_dpau_h_obl',
'helper_dpau_h_obr',
'helper_dpsu_h_obl',
'helper_dpsu_h_obr',
'helper_dpa_w_qh',
'helper_dpaq_s_w_qh',
'helper_dps_w_qh',
'helper_dpsq_s_w_qh',
'helper_dpaq_sa_l_pw',
'helper_dpsq_sa_l_pw',
'helper_mulsaq_s_l_pw',
'helper_maq_s_w_qhll',
'helper_maq_s_w_qhlr',
'helper_maq_s_w_qhrl',
'helper_maq_s_w_qhrr',
'helper_maq_sa_w_qhll',
'helper_maq_sa_w_qhlr',
'helper_maq_sa_w_qhrl',
'helper_maq_sa_w_qhrr',
'helper_maq_s_l_pwl',
'helper_maq_s_l_pwr',
'helper_dmadd',
'helper_dmaddu',
'helper_dmsub',
'helper_dmsubu',
'helper_dinsv',
'helper_cmpgu_eq_ob',
'helper_cmpgu_lt_ob',
'helper_cmpgu_le_ob',
'helper_cmpu_eq_ob',
'helper_cmpu_lt_ob',
'helper_cmpu_le_ob',
'helper_cmp_eq_qh',
'helper_cmp_lt_qh',
'helper_cmp_le_qh',
'helper_cmp_eq_pw',
'helper_cmp_lt_pw',
'helper_cmp_le_pw',
'helper_cmpgdu_eq_ob',
'helper_cmpgdu_lt_ob',
'helper_cmpgdu_le_ob',
'helper_pick_ob',
'helper_pick_qh',
'helper_pick_pw',
'helper_packrl_pw',
'helper_dextr_w',
'helper_dextr_r_w',
'helper_dextr_rs_w',
'helper_dextr_l',
'helper_dextr_r_l',
'helper_dextr_rs_l',
'helper_dextr_s_h',
'helper_dextp',
'helper_dextpdp',
'helper_dshilo',
'helper_dmthlip',
'helper_dclo',
'helper_dclz',
'helper_dbitswap',
'helper_lld',
'helper_scd',
'helper_sdl',
'helper_sdr',
'helper_ldm',
'helper_sdm',
'helper_dmfc0_tcrestart',
'helper_dmfc0_tchalt',
'helper_dmfc0_tccontext',
'helper_dmfc0_tcschedule',
'helper_dmfc0_tcschefback',
'helper_dmfc0_lladdr',
'helper_dmfc0_watchlo',
'helper_dmtc0_entrylo0',
'helper_dmtc0_entrylo1',
'mips_reg_reset',
'mips_reg_read',
'mips_reg_write',
'mips_tcg_init',
'mips_cpu_list',
'mips_release',
'MIPS64_REGS_STORAGE_SIZE',
'MIPS_REGS_STORAGE_SIZE'
)
sparc_symbols = (
'cpu_sparc_exec',
'helper_compute_psr',
'helper_compute_C_icc',
'cpu_sparc_init',
'cpu_sparc_set_id',
'sparc_cpu_register_types',
'helper_fadds',
'helper_faddd',
'helper_faddq',
'helper_fsubs',
'helper_fsubd',
'helper_fsubq',
'helper_fmuls',
'helper_fmuld',
'helper_fmulq',
'helper_fdivs',
'helper_fdivd',
'helper_fdivq',
'helper_fsmuld',
'helper_fdmulq',
'helper_fnegs',
'helper_fitos',
'helper_fitod',
'helper_fitoq',
'helper_fdtos',
'helper_fstod',
'helper_fqtos',
'helper_fstoq',
'helper_fqtod',
'helper_fdtoq',
'helper_fstoi',
'helper_fdtoi',
'helper_fqtoi',
'helper_fabss',
'helper_fsqrts',
'helper_fsqrtd',
'helper_fsqrtq',
'helper_fcmps',
'helper_fcmpd',
'helper_fcmpes',
'helper_fcmped',
'helper_fcmpq',
'helper_fcmpeq',
'helper_ldfsr',
'helper_debug',
'helper_udiv_cc',
'helper_sdiv_cc',
'helper_taddcctv',
'helper_tsubcctv',
'sparc_cpu_do_interrupt',
'helper_check_align',
'helper_ld_asi',
'helper_st_asi',
'helper_cas_asi',
'helper_ldqf',
'helper_stqf',
'sparc_cpu_unassigned_access',
'sparc_cpu_do_unaligned_access',
'sparc_cpu_handle_mmu_fault',
'dump_mmu',
'sparc_cpu_get_phys_page_debug',
'sparc_reg_reset',
'sparc_reg_read',
'sparc_reg_write',
'gen_intermediate_code_init',
'cpu_set_cwp',
'cpu_get_psr',
'cpu_put_psr',
'cpu_cwp_inc',
'cpu_cwp_dec',
'helper_save',
'helper_restore')
if __name__ == '__main__':
arch = sys.argv[1]
print("/* Autogen header for Unicorn Engine - DONOT MODIFY */")
print("#ifndef UNICORN_AUTOGEN_%s_H" %arch.upper())
print("#define UNICORN_AUTOGEN_%s_H" %arch.upper())
for s in symbols:
print("#define %s %s_%s" %(s, s, arch))
if 'arm' in arch:
for s in arm_symbols:
print("#define %s %s_%s" %(s, s, arch))
if 'aarch64' in arch:
for s in aarch64_symbols:
print("#define %s %s_%s" %(s, s, arch))
if 'mips' in arch:
for s in mips_symbols:
print("#define %s %s_%s" %(s, s, arch))
if 'sparc' in arch:
for s in sparc_symbols:
print("#define %s %s_%s" %(s, s, arch))
print("#endif") | zebracorn | /zebracorn-0.0.1.tar.gz/zebracorn-0.0.1/src/qemu/header_gen.py | header_gen.py |
from ordereddict import OrderedDict
from qapi import *
import re
import sys
import os
import getopt
import errno
implicit_structs = []
def generate_visit_implicit_struct(type):
global implicit_structs
if type in implicit_structs:
return ''
implicit_structs.append(type)
return mcgen('''
static void visit_type_implicit_%(c_type)s(Visitor *m, %(c_type)s **obj, Error **errp)
{
Error *err = NULL;
visit_start_implicit_struct(m, (void **)obj, sizeof(%(c_type)s), &err);
if (!err) {
visit_type_%(c_type)s_fields(m, obj, errp);
visit_end_implicit_struct(m, &err);
}
error_propagate(errp, err);
}
''',
c_type=type_name(type))
def generate_visit_struct_fields(name, field_prefix, fn_prefix, members, base = None):
substructs = []
ret = ''
if not fn_prefix:
full_name = name
else:
full_name = "%s_%s" % (name, fn_prefix)
for argname, argentry, optional, structured in parse_args(members):
if structured:
if not fn_prefix:
nested_fn_prefix = argname
else:
nested_fn_prefix = "%s_%s" % (fn_prefix, argname)
nested_field_prefix = "%s%s." % (field_prefix, argname)
ret += generate_visit_struct_fields(name, nested_field_prefix,
nested_fn_prefix, argentry)
ret += mcgen('''
static void visit_type_%(full_name)s_field_%(c_name)s(Visitor *m, %(name)s **obj, Error **errp)
{
''',
name=name, full_name=full_name, c_name=c_var(argname))
ret += generate_visit_struct_body(full_name, argname, argentry)
ret += mcgen('''
}
''')
if base:
ret += generate_visit_implicit_struct(base)
ret += mcgen('''
static void visit_type_%(full_name)s_fields(Visitor *m, %(name)s **obj, Error **errp)
{
Error *err = NULL;
''',
name=name, full_name=full_name)
push_indent()
if base:
ret += mcgen('''
visit_type_implicit_%(type)s(m, &(*obj)->%(c_prefix)s%(c_name)s, &err);
if (err) {
goto out;
}
''',
c_prefix=c_var(field_prefix),
type=type_name(base), c_name=c_var('base'))
for argname, argentry, optional, structured in parse_args(members):
if optional:
ret += mcgen('''
visit_optional(m, &(*obj)->%(c_prefix)shas_%(c_name)s, "%(name)s", &err);
if (!err && (*obj)->%(prefix)shas_%(c_name)s) {
''',
c_prefix=c_var(field_prefix), prefix=field_prefix,
c_name=c_var(argname), name=argname)
push_indent()
if structured:
ret += mcgen('''
visit_type_%(full_name)s_field_%(c_name)s(m, obj, &err);
''',
full_name=full_name, c_name=c_var(argname))
else:
ret += mcgen('''
visit_type_%(type)s(m, &(*obj)->%(c_prefix)s%(c_name)s, "%(name)s", &err);
''',
c_prefix=c_var(field_prefix), prefix=field_prefix,
type=type_name(argentry), c_name=c_var(argname),
name=argname)
if optional:
pop_indent()
ret += mcgen('''
}
''')
ret += mcgen('''
if (err) {
goto out;
}
''')
pop_indent()
if re.search('^ *goto out\\;', ret, re.MULTILINE):
ret += mcgen('''
out:
''')
ret += mcgen('''
error_propagate(errp, err);
}
''')
return ret
def generate_visit_struct_body(field_prefix, name, members):
ret = mcgen('''
Error *err = NULL;
''')
if not field_prefix:
full_name = name
else:
full_name = "%s_%s" % (field_prefix, name)
if len(field_prefix):
ret += mcgen('''
visit_start_struct(m, NULL, "", "%(name)s", 0, &err);
''',
name=name)
else:
ret += mcgen('''
visit_start_struct(m, (void **)obj, "%(name)s", name, sizeof(%(name)s), &err);
''',
name=name)
ret += mcgen('''
if (!err) {
if (*obj) {
visit_type_%(name)s_fields(m, obj, errp);
}
visit_end_struct(m, &err);
}
error_propagate(errp, err);
''',
name=full_name)
return ret
def generate_visit_struct(expr):
name = expr['type']
members = expr['data']
base = expr.get('base')
ret = generate_visit_struct_fields(name, "", "", members, base)
ret += mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s **obj, const char *name, Error **errp)
{
''',
name=name)
ret += generate_visit_struct_body("", name, members)
ret += mcgen('''
}
''')
return ret
def generate_visit_list(name, members):
return mcgen('''
void visit_type_%(name)sList(Visitor *m, %(name)sList **obj, const char *name, Error **errp)
{
Error *err = NULL;
GenericList *i, **prev;
visit_start_list(m, name, &err);
if (err) {
goto out;
}
for (prev = (GenericList **)obj;
!err && (i = visit_next_list(m, prev, &err)) != NULL;
prev = &i) {
%(name)sList *native_i = (%(name)sList *)i;
visit_type_%(name)s(m, &native_i->value, NULL, &err);
}
error_propagate(errp, err);
err = NULL;
visit_end_list(m, &err);
out:
error_propagate(errp, err);
}
''',
name=name)
def generate_visit_enum(name, members):
return mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s *obj, const char *name, Error **errp)
{
visit_type_enum(m, (int *)obj, %(name)s_lookup, "%(name)s", name, errp);
}
''',
name=name)
def generate_visit_anon_union(name, members):
ret = mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s **obj, const char *name, Error **errp)
{
Error *err = NULL;
visit_start_implicit_struct(m, (void**) obj, sizeof(%(name)s), &err);
if (err) {
goto out;
}
visit_get_next_type(m, (int*) &(*obj)->kind, %(name)s_qtypes, name, &err);
if (err) {
goto out_end;
}
switch ((*obj)->kind) {
''',
name=name)
# For anon union, always use the default enum type automatically generated
# as "'%sKind' % (name)"
disc_type = '%sKind' % (name)
for key in members:
assert (members[key] in builtin_types
or find_struct(members[key])
or find_union(members[key])
or find_enum(members[key])), "Invalid anonymous union member"
enum_full_value = generate_enum_full_value(disc_type, key)
ret += mcgen('''
case %(enum_full_value)s:
visit_type_%(c_type)s(m, &(*obj)->%(c_name)s, name, &err);
break;
''',
enum_full_value = enum_full_value,
c_type = type_name(members[key]),
c_name = c_fun(key))
ret += mcgen('''
default:
abort();
}
out_end:
error_propagate(errp, err);
err = NULL;
visit_end_implicit_struct(m, &err);
out:
error_propagate(errp, err);
}
''')
return ret
def generate_visit_union(expr):
name = expr['union']
members = expr['data']
base = expr.get('base')
discriminator = expr.get('discriminator')
if discriminator == {}:
assert not base
return generate_visit_anon_union(name, members)
enum_define = discriminator_find_enum_define(expr)
if enum_define:
# Use the enum type as discriminator
ret = ""
disc_type = enum_define['enum_name']
else:
# There will always be a discriminator in the C switch code, by default it
# is an enum type generated silently as "'%sKind' % (name)"
ret = generate_visit_enum('%sKind' % name, members.keys())
disc_type = '%sKind' % (name)
if base:
base_fields = find_struct(base)['data']
if discriminator:
base_fields = base_fields.copy()
del base_fields[discriminator]
ret += generate_visit_struct_fields(name, "", "", base_fields)
if discriminator:
for key in members:
ret += generate_visit_implicit_struct(members[key])
ret += mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s **obj, const char *name, Error **errp)
{
Error *err = NULL;
visit_start_struct(m, (void **)obj, "%(name)s", name, sizeof(%(name)s), &err);
if (err) {
goto out;
}
if (*obj) {
''',
name=name)
if base:
ret += mcgen('''
visit_type_%(name)s_fields(m, obj, &err);
if (err) {
goto out_obj;
}
''',
name=name)
if not discriminator:
disc_key = "type"
else:
disc_key = discriminator
ret += mcgen('''
visit_type_%(disc_type)s(m, &(*obj)->kind, "%(disc_key)s", &err);
if (err) {
goto out_obj;
}
if (!visit_start_union(m, !!(*obj)->data, &err) || err) {
goto out_obj;
}
switch ((*obj)->kind) {
''',
disc_type = disc_type,
disc_key = disc_key)
for key in members:
if not discriminator:
fmt = 'visit_type_%(c_type)s(m, &(*obj)->%(c_name)s, "data", &err);'
else:
fmt = 'visit_type_implicit_%(c_type)s(m, &(*obj)->%(c_name)s, &err);'
enum_full_value = generate_enum_full_value(disc_type, key)
ret += mcgen('''
case %(enum_full_value)s:
''' + fmt + '''
break;
''',
enum_full_value = enum_full_value,
c_type=type_name(members[key]),
c_name=c_fun(key))
ret += mcgen('''
default:
abort();
}
out_obj:
error_propagate(errp, err);
err = NULL;
visit_end_union(m, !!(*obj)->data, &err);
error_propagate(errp, err);
err = NULL;
}
visit_end_struct(m, &err);
out:
error_propagate(errp, err);
}
''')
return ret
def generate_declaration(name, members, genlist=True, builtin_type=False):
ret = ""
if not builtin_type:
ret += mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s **obj, const char *name, Error **errp);
''',
name=name)
if genlist:
ret += mcgen('''
void visit_type_%(name)sList(Visitor *m, %(name)sList **obj, const char *name, Error **errp);
''',
name=name)
return ret
def generate_enum_declaration(name, members, genlist=True):
ret = ""
if genlist:
ret += mcgen('''
void visit_type_%(name)sList(Visitor *m, %(name)sList **obj, const char *name, Error **errp);
''',
name=name)
return ret
def generate_decl_enum(name, members, genlist=True):
return mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s *obj, const char *name, Error **errp);
''',
name=name)
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "chbp:i:o:",
["source", "header", "builtins", "prefix=",
"input-file=", "output-dir="])
except getopt.GetoptError as err:
print(str(err))
sys.exit(1)
input_file = ""
output_dir = ""
prefix = ""
c_file = 'qapi-visit.c'
h_file = 'qapi-visit.h'
do_c = False
do_h = False
do_builtins = False
for o, a in opts:
if o in ("-p", "--prefix"):
prefix = a
elif o in ("-i", "--input-file"):
input_file = a
elif o in ("-o", "--output-dir"):
output_dir = a + "/"
elif o in ("-c", "--source"):
do_c = True
elif o in ("-h", "--header"):
do_h = True
elif o in ("-b", "--builtins"):
do_builtins = True
if not do_c and not do_h:
do_c = True
do_h = True
c_file = output_dir + prefix + c_file
h_file = output_dir + prefix + h_file
try:
os.makedirs(output_dir)
except os.error as e:
if e.errno != errno.EEXIST:
raise
def maybe_open(really, name, opt):
if really:
return open(name, opt)
else:
try:
import StringIO
return StringIO.StringIO()
except ImportError:
from io import StringIO
return StringIO()
fdef = maybe_open(do_c, c_file, 'w')
fdecl = maybe_open(do_h, h_file, 'w')
fdef.write(mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QAPI visitor functions
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <[email protected]>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#include "qemu-common.h"
#include "%(header)s"
''',
header=basename(h_file)))
fdecl.write(mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QAPI visitor functions
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <[email protected]>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#ifndef %(guard)s
#define %(guard)s
#include "qapi/visitor.h"
#include "%(prefix)sqapi-types.h"
''',
prefix=prefix, guard=guardname(h_file)))
exprs = parse_schema(input_file)
# to avoid header dependency hell, we always generate declarations
# for built-in types in our header files and simply guard them
fdecl.write(guardstart("QAPI_VISIT_BUILTIN_VISITOR_DECL"))
for typename in builtin_types:
fdecl.write(generate_declaration(typename, None, genlist=True,
builtin_type=True))
fdecl.write(guardend("QAPI_VISIT_BUILTIN_VISITOR_DECL"))
# ...this doesn't work for cases where we link in multiple objects that
# have the functions defined, so we use -b option to provide control
# over these cases
if do_builtins:
for typename in builtin_types:
fdef.write(generate_visit_list(typename, None))
for expr in exprs:
if 'type' in expr:
ret = generate_visit_struct(expr)
ret += generate_visit_list(expr['type'], expr['data'])
fdef.write(ret)
ret = generate_declaration(expr['type'], expr['data'])
fdecl.write(ret)
elif 'union' in expr:
ret = generate_visit_union(expr)
ret += generate_visit_list(expr['union'], expr['data'])
fdef.write(ret)
enum_define = discriminator_find_enum_define(expr)
ret = ""
if not enum_define:
ret = generate_decl_enum('%sKind' % expr['union'],
expr['data'].keys())
ret += generate_declaration(expr['union'], expr['data'])
fdecl.write(ret)
elif 'enum' in expr:
ret = generate_visit_list(expr['enum'], expr['data'])
ret += generate_visit_enum(expr['enum'], expr['data'])
fdef.write(ret)
ret = generate_decl_enum(expr['enum'], expr['data'])
ret += generate_enum_declaration(expr['enum'], expr['data'])
fdecl.write(ret)
fdecl.write('''
#endif
''')
fdecl.flush()
fdecl.close()
fdef.flush()
fdef.close() | zebracorn | /zebracorn-0.0.1.tar.gz/zebracorn-0.0.1/src/qemu/scripts/qapi-visit.py | qapi-visit.py |
try:
from UserDict import UserDict
from UserDict import DictMixin
except ImportError:
from collections import UserDict
try:
from collections import MutableMapping as DictMixin
except ImportError:
from collections.abc import MutableMapping as DictMixin
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
if len(self) != len(other):
return False
for p, q in zip(self.items(), other.items()):
if p != q:
return False
return True
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other | zebracorn | /zebracorn-0.0.1.tar.gz/zebracorn-0.0.1/src/qemu/scripts/ordereddict.py | ordereddict.py |
from ordereddict import OrderedDict
from qapi import *
import sys
import os
import getopt
import errno
def generate_fwd_struct(name, members, builtin_type=False):
if builtin_type:
return mcgen('''
typedef struct %(name)sList
{
union {
%(type)s value;
uint64_t padding;
};
struct %(name)sList *next;
} %(name)sList;
''',
type=c_type(name),
name=name)
return mcgen('''
typedef struct %(name)s %(name)s;
typedef struct %(name)sList
{
union {
%(name)s *value;
uint64_t padding;
};
struct %(name)sList *next;
} %(name)sList;
''',
name=name)
def generate_fwd_enum_struct(name, members):
return mcgen('''
typedef struct %(name)sList
{
union {
%(name)s value;
uint64_t padding;
};
struct %(name)sList *next;
} %(name)sList;
''',
name=name)
def generate_struct_fields(members):
ret = ''
for argname, argentry, optional, structured in parse_args(members):
if optional:
ret += mcgen('''
bool has_%(c_name)s;
''',
c_name=c_var(argname))
if structured:
push_indent()
ret += generate_struct({ "field": argname, "data": argentry})
pop_indent()
else:
ret += mcgen('''
%(c_type)s %(c_name)s;
''',
c_type=c_type(argentry), c_name=c_var(argname))
return ret
def generate_struct(expr):
structname = expr.get('type', "")
fieldname = expr.get('field', "")
members = expr['data']
base = expr.get('base')
ret = mcgen('''
struct %(name)s
{
''',
name=structname)
if base:
ret += generate_struct_fields({'base': base})
ret += generate_struct_fields(members)
if len(fieldname):
fieldname = " " + fieldname
ret += mcgen('''
}%(field)s;
''',
field=fieldname)
return ret
def generate_enum_lookup(name, values):
ret = mcgen('''
const char *%(name)s_lookup[] = {
''',
name=name)
i = 0
for value in values:
ret += mcgen('''
"%(value)s",
''',
value=value)
ret += mcgen('''
NULL,
};
''')
return ret
def generate_enum(name, values):
lookup_decl = mcgen('''
extern const char *%(name)s_lookup[];
''',
name=name)
enum_decl = mcgen('''
typedef enum %(name)s
{
''',
name=name)
# append automatically generated _MAX value
enum_values = values + [ 'MAX' ]
i = 0
for value in enum_values:
enum_full_value = generate_enum_full_value(name, value)
enum_decl += mcgen('''
%(enum_full_value)s = %(i)d,
''',
enum_full_value = enum_full_value,
i=i)
i += 1
enum_decl += mcgen('''
} %(name)s;
''',
name=name)
return lookup_decl + enum_decl
def generate_anon_union_qtypes(expr):
name = expr['union']
members = expr['data']
ret = mcgen('''
const int %(name)s_qtypes[QTYPE_MAX] = {
''',
name=name)
for key in members:
qapi_type = members[key]
if qapi_type in builtin_type_qtypes:
qtype = builtin_type_qtypes[qapi_type]
elif find_struct(qapi_type):
qtype = "QTYPE_QDICT"
elif find_union(qapi_type):
qtype = "QTYPE_QDICT"
elif find_enum(qapi_type):
qtype = "QTYPE_QSTRING"
else:
assert False, "Invalid anonymous union member"
ret += mcgen('''
[ %(qtype)s ] = %(abbrev)s_KIND_%(enum)s,
''',
qtype = qtype,
abbrev = de_camel_case(name).upper(),
enum = c_fun(de_camel_case(key),False).upper())
ret += mcgen('''
};
''')
return ret
def generate_union(expr):
name = expr['union']
typeinfo = expr['data']
base = expr.get('base')
discriminator = expr.get('discriminator')
enum_define = discriminator_find_enum_define(expr)
if enum_define:
discriminator_type_name = enum_define['enum_name']
else:
discriminator_type_name = '%sKind' % (name)
ret = mcgen('''
struct %(name)s
{
%(discriminator_type_name)s kind;
union {
void *data;
''',
name=name,
discriminator_type_name=discriminator_type_name)
for key in typeinfo:
ret += mcgen('''
%(c_type)s %(c_name)s;
''',
c_type=c_type(typeinfo[key]),
c_name=c_fun(key))
ret += mcgen('''
};
''')
if base:
base_fields = find_struct(base)['data']
if discriminator:
base_fields = base_fields.copy()
del base_fields[discriminator]
ret += generate_struct_fields(base_fields)
else:
assert not discriminator
ret += mcgen('''
};
''')
if discriminator == {}:
ret += mcgen('''
extern const int %(name)s_qtypes[];
''',
name=name)
return ret
def generate_type_cleanup_decl(name):
ret = mcgen('''
void qapi_free_%(type)s(%(c_type)s obj);
''',
c_type=c_type(name),type=name)
return ret
def generate_type_cleanup(name):
ret = mcgen('''
void qapi_free_%(type)s(%(c_type)s obj)
{
QapiDeallocVisitor *md;
Visitor *v;
if (!obj) {
return;
}
md = qapi_dealloc_visitor_new();
v = qapi_dealloc_get_visitor(md);
visit_type_%(type)s(v, &obj, NULL, NULL);
qapi_dealloc_visitor_cleanup(md);
}
''',
c_type=c_type(name),type=name)
return ret
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "chbp:i:o:",
["source", "header", "builtins",
"prefix=", "input-file=", "output-dir="])
except getopt.GetoptError as err:
print(str(err))
sys.exit(1)
output_dir = ""
input_file = ""
prefix = ""
c_file = 'qapi-types.c'
h_file = 'qapi-types.h'
do_c = False
do_h = False
do_builtins = False
for o, a in opts:
if o in ("-p", "--prefix"):
prefix = a
elif o in ("-i", "--input-file"):
input_file = a
elif o in ("-o", "--output-dir"):
output_dir = a + "/"
elif o in ("-c", "--source"):
do_c = True
elif o in ("-h", "--header"):
do_h = True
elif o in ("-b", "--builtins"):
do_builtins = True
if not do_c and not do_h:
do_c = True
do_h = True
c_file = output_dir + prefix + c_file
h_file = output_dir + prefix + h_file
try:
os.makedirs(output_dir)
except os.error as e:
if e.errno != errno.EEXIST:
raise
def maybe_open(really, name, opt):
if really:
return open(name, opt)
else:
try:
import StringIO
return StringIO.StringIO()
except ImportError:
from io import StringIO
return StringIO()
fdef = maybe_open(do_c, c_file, 'w')
fdecl = maybe_open(do_h, h_file, 'w')
fdef.write(mcgen('''
/* AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* deallocation functions for schema-defined QAPI types
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <[email protected]>
* Michael Roth <[email protected]>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#include "qapi/dealloc-visitor.h"
#include "%(prefix)sqapi-types.h"
#include "%(prefix)sqapi-visit.h"
''', prefix=prefix))
fdecl.write(mcgen('''
/* AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QAPI types
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <[email protected]>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#ifndef %(guard)s
#define %(guard)s
#include "unicorn/platform.h"
''',
guard=guardname(h_file)))
exprs = parse_schema(input_file)
exprs = filter(lambda expr: 'gen' not in expr, exprs)
exprs = list(exprs)
fdecl.write(guardstart("QAPI_TYPES_BUILTIN_STRUCT_DECL"))
for typename in builtin_types:
fdecl.write(generate_fwd_struct(typename, None, builtin_type=True))
fdecl.write(guardend("QAPI_TYPES_BUILTIN_STRUCT_DECL"))
for expr in exprs:
ret = "\n"
if 'type' in expr:
ret += generate_fwd_struct(expr['type'], expr['data'])
elif 'enum' in expr:
ret += generate_enum(expr['enum'], expr['data']) + "\n"
ret += generate_fwd_enum_struct(expr['enum'], expr['data'])
fdef.write(generate_enum_lookup(expr['enum'], expr['data']))
elif 'union' in expr:
ret += generate_fwd_struct(expr['union'], expr['data']) + "\n"
enum_define = discriminator_find_enum_define(expr)
if not enum_define:
ret += generate_enum('%sKind' % expr['union'], expr['data'].keys())
fdef.write(generate_enum_lookup('%sKind' % expr['union'],
expr['data'].keys()))
if expr.get('discriminator') == {}:
fdef.write(generate_anon_union_qtypes(expr))
else:
continue
fdecl.write(ret)
# to avoid header dependency hell, we always generate declarations
# for built-in types in our header files and simply guard them
fdecl.write(guardstart("QAPI_TYPES_BUILTIN_CLEANUP_DECL"))
for typename in builtin_types:
fdecl.write(generate_type_cleanup_decl(typename + "List"))
fdecl.write(guardend("QAPI_TYPES_BUILTIN_CLEANUP_DECL"))
# ...this doesn't work for cases where we link in multiple objects that
# have the functions defined, so we use -b option to provide control
# over these cases
if do_builtins:
fdef.write(guardstart("QAPI_TYPES_BUILTIN_CLEANUP_DEF"))
for typename in builtin_types:
fdef.write(generate_type_cleanup(typename + "List"))
fdef.write(guardend("QAPI_TYPES_BUILTIN_CLEANUP_DEF"))
for expr in exprs:
ret = "\n"
if 'type' in expr:
ret += generate_struct(expr) + "\n"
ret += generate_type_cleanup_decl(expr['type'] + "List")
fdef.write(generate_type_cleanup(expr['type'] + "List") + "\n")
ret += generate_type_cleanup_decl(expr['type'])
fdef.write(generate_type_cleanup(expr['type']) + "\n")
elif 'union' in expr:
ret += generate_union(expr)
ret += generate_type_cleanup_decl(expr['union'] + "List")
fdef.write(generate_type_cleanup(expr['union'] + "List") + "\n")
ret += generate_type_cleanup_decl(expr['union'])
fdef.write(generate_type_cleanup(expr['union']) + "\n")
elif 'enum' in expr:
ret += generate_type_cleanup_decl(expr['enum'] + "List")
fdef.write(generate_type_cleanup(expr['enum'] + "List") + "\n")
else:
continue
fdecl.write(ret)
fdecl.write('''
#endif
''')
fdecl.flush()
fdecl.close()
fdef.flush()
fdef.close() | zebracorn | /zebracorn-0.0.1.tar.gz/zebracorn-0.0.1/src/qemu/scripts/qapi-types.py | qapi-types.py |
import re
from ordereddict import OrderedDict
import os
import sys
try:
basestring
except NameError:
basestring = str
builtin_types = [
'str', 'int', 'number', 'bool',
'int8', 'int16', 'int32', 'int64',
'uint8', 'uint16', 'uint32', 'uint64'
]
builtin_type_qtypes = {
'str': 'QTYPE_QSTRING',
'int': 'QTYPE_QINT',
'number': 'QTYPE_QFLOAT',
'bool': 'QTYPE_QBOOL',
'int8': 'QTYPE_QINT',
'int16': 'QTYPE_QINT',
'int32': 'QTYPE_QINT',
'int64': 'QTYPE_QINT',
'uint8': 'QTYPE_QINT',
'uint16': 'QTYPE_QINT',
'uint32': 'QTYPE_QINT',
'uint64': 'QTYPE_QINT',
}
def error_path(parent):
res = ""
while parent:
res = ("In file included from %s:%d:\n" % (parent['file'],
parent['line'])) + res
parent = parent['parent']
return res
class QAPISchemaError(Exception):
def __init__(self, schema, msg):
self.input_file = schema.input_file
self.msg = msg
self.col = 1
self.line = schema.line
for ch in schema.src[schema.line_pos:schema.pos]:
if ch == '\t':
self.col = (self.col + 7) % 8 + 1
else:
self.col += 1
self.info = schema.parent_info
def __str__(self):
return error_path(self.info) + \
"%s:%d:%d: %s" % (self.input_file, self.line, self.col, self.msg)
class QAPIExprError(Exception):
def __init__(self, expr_info, msg):
self.info = expr_info
self.msg = msg
def __str__(self):
return error_path(self.info['parent']) + \
"%s:%d: %s" % (self.info['file'], self.info['line'], self.msg)
class QAPISchema:
def __init__(self, fp, input_relname=None, include_hist=[],
previously_included=[], parent_info=None):
""" include_hist is a stack used to detect inclusion cycles
previously_included is a global state used to avoid multiple
inclusions of the same file"""
input_fname = os.path.abspath(fp.name)
if input_relname is None:
input_relname = fp.name
self.input_dir = os.path.dirname(input_fname)
self.input_file = input_relname
self.include_hist = include_hist + [(input_relname, input_fname)]
previously_included.append(input_fname)
self.parent_info = parent_info
self.src = fp.read()
if self.src == '' or self.src[-1] != '\n':
self.src += '\n'
self.cursor = 0
self.line = 1
self.line_pos = 0
self.exprs = []
self.accept()
while self.tok != None:
expr_info = {'file': input_relname, 'line': self.line, 'parent': self.parent_info}
expr = self.get_expr(False)
if isinstance(expr, dict) and "include" in expr:
if len(expr) != 1:
raise QAPIExprError(expr_info, "Invalid 'include' directive")
include = expr["include"]
if not isinstance(include, str):
raise QAPIExprError(expr_info,
'Expected a file name (string), got: %s'
% include)
include_path = os.path.join(self.input_dir, include)
for elem in self.include_hist:
if include_path == elem[1]:
raise QAPIExprError(expr_info, "Inclusion loop for %s"
% include)
# skip multiple include of the same file
if include_path in previously_included:
continue
try:
fobj = open(include_path, 'r')
except IOError as e:
raise QAPIExprError(expr_info,
'%s: %s' % (e.strerror, include))
exprs_include = QAPISchema(fobj, include, self.include_hist,
previously_included, expr_info)
self.exprs.extend(exprs_include.exprs)
else:
expr_elem = {'expr': expr,
'info': expr_info}
self.exprs.append(expr_elem)
def accept(self):
while True:
self.tok = self.src[self.cursor]
self.pos = self.cursor
self.cursor += 1
self.val = None
if self.tok == '#':
self.cursor = self.src.find('\n', self.cursor)
elif self.tok in ['{', '}', ':', ',', '[', ']']:
return
elif self.tok == "'":
string = ''
esc = False
while True:
ch = self.src[self.cursor]
self.cursor += 1
if ch == '\n':
raise QAPISchemaError(self,
'Missing terminating "\'"')
if esc:
string += ch
esc = False
elif ch == "\\":
esc = True
elif ch == "'":
self.val = string
return
else:
string += ch
elif self.tok == '\n':
if self.cursor == len(self.src):
self.tok = None
return
self.line += 1
self.line_pos = self.cursor
elif not self.tok.isspace():
raise QAPISchemaError(self, 'Stray "%s"' % self.tok)
def get_members(self):
expr = OrderedDict()
if self.tok == '}':
self.accept()
return expr
if self.tok != "'":
raise QAPISchemaError(self, 'Expected string or "}"')
while True:
key = self.val
self.accept()
if self.tok != ':':
raise QAPISchemaError(self, 'Expected ":"')
self.accept()
if key in expr:
raise QAPISchemaError(self, 'Duplicate key "%s"' % key)
expr[key] = self.get_expr(True)
if self.tok == '}':
self.accept()
return expr
if self.tok != ',':
raise QAPISchemaError(self, 'Expected "," or "}"')
self.accept()
if self.tok != "'":
raise QAPISchemaError(self, 'Expected string')
def get_values(self):
expr = []
if self.tok == ']':
self.accept()
return expr
if not self.tok in [ '{', '[', "'" ]:
raise QAPISchemaError(self, 'Expected "{", "[", "]" or string')
while True:
expr.append(self.get_expr(True))
if self.tok == ']':
self.accept()
return expr
if self.tok != ',':
raise QAPISchemaError(self, 'Expected "," or "]"')
self.accept()
def get_expr(self, nested):
if self.tok != '{' and not nested:
raise QAPISchemaError(self, 'Expected "{"')
if self.tok == '{':
self.accept()
expr = self.get_members()
elif self.tok == '[':
self.accept()
expr = self.get_values()
elif self.tok == "'":
expr = self.val
self.accept()
else:
raise QAPISchemaError(self, 'Expected "{", "[" or string')
return expr
def find_base_fields(base):
base_struct_define = find_struct(base)
if not base_struct_define:
return None
return base_struct_define['data']
# Return the discriminator enum define if discriminator is specified as an
# enum type, otherwise return None.
def discriminator_find_enum_define(expr):
base = expr.get('base')
discriminator = expr.get('discriminator')
if not (discriminator and base):
return None
base_fields = find_base_fields(base)
if not base_fields:
return None
discriminator_type = base_fields.get(discriminator)
if not discriminator_type:
return None
return find_enum(discriminator_type)
def check_event(expr, expr_info):
params = expr.get('data')
if params:
for argname, argentry, optional, structured in parse_args(params):
if structured:
raise QAPIExprError(expr_info,
"Nested structure define in event is not "
"supported, event '%s', argname '%s'"
% (expr['event'], argname))
def check_union(expr, expr_info):
name = expr['union']
base = expr.get('base')
discriminator = expr.get('discriminator')
members = expr['data']
# If the object has a member 'base', its value must name a complex type.
if base:
base_fields = find_base_fields(base)
if not base_fields:
raise QAPIExprError(expr_info,
"Base '%s' is not a valid type"
% base)
# If the union object has no member 'discriminator', it's an
# ordinary union.
if not discriminator:
enum_define = None
# Else if the value of member 'discriminator' is {}, it's an
# anonymous union.
elif discriminator == {}:
enum_define = None
# Else, it's a flat union.
else:
# The object must have a member 'base'.
if not base:
raise QAPIExprError(expr_info,
"Flat union '%s' must have a base field"
% name)
# The value of member 'discriminator' must name a member of the
# base type.
discriminator_type = base_fields.get(discriminator)
if not discriminator_type:
raise QAPIExprError(expr_info,
"Discriminator '%s' is not a member of base "
"type '%s'"
% (discriminator, base))
enum_define = find_enum(discriminator_type)
# Do not allow string discriminator
if not enum_define:
raise QAPIExprError(expr_info,
"Discriminator '%s' must be of enumeration "
"type" % discriminator)
# Check every branch
for (key, value) in members.items():
# If this named member's value names an enum type, then all members
# of 'data' must also be members of the enum type.
if enum_define and not key in enum_define['enum_values']:
raise QAPIExprError(expr_info,
"Discriminator value '%s' is not found in "
"enum '%s'" %
(key, enum_define["enum_name"]))
# Todo: add checking for values. Key is checked as above, value can be
# also checked here, but we need more functions to handle array case.
def check_exprs(schema):
for expr_elem in schema.exprs:
expr = expr_elem['expr']
if 'union' in expr:
check_union(expr, expr_elem['info'])
if 'event' in expr:
check_event(expr, expr_elem['info'])
def parse_schema(input_file):
try:
schema = QAPISchema(open(input_file, "r"))
except (QAPISchemaError, QAPIExprError) as e:
print >>sys.stderr, e
exit(1)
exprs = []
for expr_elem in schema.exprs:
expr = expr_elem['expr']
if 'enum' in expr:
add_enum(expr['enum'], expr['data'])
elif 'union' in expr:
add_union(expr)
elif 'type' in expr:
add_struct(expr)
exprs.append(expr)
# Try again for hidden UnionKind enum
for expr_elem in schema.exprs:
expr = expr_elem['expr']
if 'union' in expr:
if not discriminator_find_enum_define(expr):
add_enum('%sKind' % expr['union'])
try:
check_exprs(schema)
except QAPIExprError as e:
print >>sys.stderr, e
exit(1)
return exprs
def parse_args(typeinfo):
if isinstance(typeinfo, basestring):
struct = find_struct(typeinfo)
assert struct != None
typeinfo = struct['data']
for member in typeinfo:
argname = member
argentry = typeinfo[member]
optional = False
structured = False
if member.startswith('*'):
argname = member[1:]
optional = True
if isinstance(argentry, OrderedDict):
structured = True
yield (argname, argentry, optional, structured)
def de_camel_case(name):
new_name = ''
for ch in name:
if ch.isupper() and new_name:
new_name += '_'
if ch == '-':
new_name += '_'
else:
new_name += ch.lower()
return new_name
def camel_case(name):
new_name = ''
first = True
for ch in name:
if ch in ['_', '-']:
first = True
elif first:
new_name += ch.upper()
first = False
else:
new_name += ch.lower()
return new_name
def c_var(name, protect=True):
# ANSI X3J11/88-090, 3.1.1
c89_words = set(['auto', 'break', 'case', 'char', 'const', 'continue',
'default', 'do', 'double', 'else', 'enum', 'extern', 'float',
'for', 'goto', 'if', 'int', 'long', 'register', 'return',
'short', 'signed', 'sizeof', 'static', 'struct', 'switch',
'typedef', 'union', 'unsigned', 'void', 'volatile', 'while'])
# ISO/IEC 9899:1999, 6.4.1
c99_words = set(['inline', 'restrict', '_Bool', '_Complex', '_Imaginary'])
# ISO/IEC 9899:2011, 6.4.1
c11_words = set(['_Alignas', '_Alignof', '_Atomic', '_Generic', '_Noreturn',
'_Static_assert', '_Thread_local'])
# GCC http://gcc.gnu.org/onlinedocs/gcc-4.7.1/gcc/C-Extensions.html
# excluding _.*
gcc_words = set(['asm', 'typeof'])
# C++ ISO/IEC 14882:2003 2.11
cpp_words = set(['bool', 'catch', 'class', 'const_cast', 'delete',
'dynamic_cast', 'explicit', 'false', 'friend', 'mutable',
'namespace', 'new', 'operator', 'private', 'protected',
'public', 'reinterpret_cast', 'static_cast', 'template',
'this', 'throw', 'true', 'try', 'typeid', 'typename',
'using', 'virtual', 'wchar_t',
# alternative representations
'and', 'and_eq', 'bitand', 'bitor', 'compl', 'not',
'not_eq', 'or', 'or_eq', 'xor', 'xor_eq'])
# namespace pollution:
polluted_words = set(['unix', 'errno'])
if protect and (name in c89_words | c99_words | c11_words | gcc_words | cpp_words | polluted_words):
return "q_" + name
return name.replace('-', '_').lstrip("*")
def c_fun(name, protect=True):
return c_var(name, protect).replace('.', '_')
def c_list_type(name):
return '%sList' % name
def type_name(name):
if type(name) == list:
return c_list_type(name[0])
return name
enum_types = []
struct_types = []
union_types = []
def add_struct(definition):
global struct_types
struct_types.append(definition)
def find_struct(name):
global struct_types
for struct in struct_types:
if struct['type'] == name:
return struct
return None
def add_union(definition):
global union_types
union_types.append(definition)
def find_union(name):
global union_types
for union in union_types:
if union['union'] == name:
return union
return None
def add_enum(name, enum_values = None):
global enum_types
enum_types.append({"enum_name": name, "enum_values": enum_values})
def find_enum(name):
global enum_types
for enum in enum_types:
if enum['enum_name'] == name:
return enum
return None
def is_enum(name):
return find_enum(name) != None
eatspace = '\033EATSPACE.'
# A special suffix is added in c_type() for pointer types, and it's
# stripped in mcgen(). So please notice this when you check the return
# value of c_type() outside mcgen().
def c_type(name, is_param=False):
if name == 'str':
if is_param:
return 'const char *' + eatspace
return 'char *' + eatspace
elif name == 'int':
return 'int64_t'
elif (name == 'int8' or name == 'int16' or name == 'int32' or
name == 'int64' or name == 'uint8' or name == 'uint16' or
name == 'uint32' or name == 'uint64'):
return name + '_t'
elif name == 'size':
return 'uint64_t'
elif name == 'bool':
return 'bool'
elif name == 'number':
return 'double'
elif type(name) == list:
return '%s *%s' % (c_list_type(name[0]), eatspace)
elif is_enum(name):
return name
elif name == None or len(name) == 0:
return 'void'
elif name == name.upper():
return '%sEvent *%s' % (camel_case(name), eatspace)
else:
return '%s *%s' % (name, eatspace)
def is_c_ptr(name):
suffix = "*" + eatspace
return c_type(name).endswith(suffix)
def genindent(count):
ret = ""
for i in range(count):
ret += " "
return ret
indent_level = 0
def push_indent(indent_amount=4):
global indent_level
indent_level += indent_amount
def pop_indent(indent_amount=4):
global indent_level
indent_level -= indent_amount
def cgen(code, **kwds):
indent = genindent(indent_level)
lines = code.split('\n')
lines = map(lambda x: indent + x, lines)
return '\n'.join(lines) % kwds + '\n'
def mcgen(code, **kwds):
raw = cgen('\n'.join(code.split('\n')[1:-1]), **kwds)
return re.sub(re.escape(eatspace) + ' *', '', raw)
def basename(filename):
return filename.split("/")[-1]
def guardname(filename):
guard = basename(filename).rsplit(".", 1)[0]
for substr in [".", " ", "-"]:
guard = guard.replace(substr, "_")
return guard.upper() + '_H'
def guardstart(name):
return mcgen('''
#ifndef %(name)s
#define %(name)s
''',
name=guardname(name))
def guardend(name):
return mcgen('''
#endif /* %(name)s */
''',
name=guardname(name))
# ENUMName -> ENUM_NAME, EnumName1 -> ENUM_NAME1
# ENUM_NAME -> ENUM_NAME, ENUM_NAME1 -> ENUM_NAME1, ENUM_Name2 -> ENUM_NAME2
# ENUM24_Name -> ENUM24_NAME
def _generate_enum_string(value):
c_fun_str = c_fun(value, False)
if value.isupper():
return c_fun_str
new_name = ''
l = len(c_fun_str)
for i in range(l):
c = c_fun_str[i]
# When c is upper and no "_" appears before, do more checks
if c.isupper() and (i > 0) and c_fun_str[i - 1] != "_":
# Case 1: next string is lower
# Case 2: previous string is digit
if (i < (l - 1) and c_fun_str[i + 1].islower()) or \
c_fun_str[i - 1].isdigit():
new_name += '_'
new_name += c
return new_name.lstrip('_').upper()
def generate_enum_full_value(enum_name, enum_value):
abbrev_string = _generate_enum_string(enum_name)
value_string = _generate_enum_string(enum_value)
return "%s_%s" % (abbrev_string, value_string) | zebracorn | /zebracorn-0.0.1.tar.gz/zebracorn-0.0.1/src/qemu/scripts/qapi.py | qapi.py |
Tiny Code Generator - Fabrice Bellard.
1) Introduction
TCG (Tiny Code Generator) began as a generic backend for a C
compiler. It was simplified to be used in QEMU. It also has its roots
in the QOP code generator written by Paul Brook.
2) Definitions
The TCG "target" is the architecture for which we generate the
code. It is of course not the same as the "target" of QEMU which is
the emulated architecture. As TCG started as a generic C backend used
for cross compiling, it is assumed that the TCG target is different
from the host, although it is never the case for QEMU.
In this document, we use "guest" to specify what architecture we are
emulating; "target" always means the TCG target, the machine on which
we are running QEMU.
A TCG "function" corresponds to a QEMU Translated Block (TB).
A TCG "temporary" is a variable only live in a basic
block. Temporaries are allocated explicitly in each function.
A TCG "local temporary" is a variable only live in a function. Local
temporaries are allocated explicitly in each function.
A TCG "global" is a variable which is live in all the functions
(equivalent of a C global variable). They are defined before the
functions defined. A TCG global can be a memory location (e.g. a QEMU
CPU register), a fixed host register (e.g. the QEMU CPU state pointer)
or a memory location which is stored in a register outside QEMU TBs
(not implemented yet).
A TCG "basic block" corresponds to a list of instructions terminated
by a branch instruction.
An operation with "undefined behavior" may result in a crash.
An operation with "unspecified behavior" shall not crash. However,
the result may be one of several possibilities so may be considered
an "undefined result".
3) Intermediate representation
3.1) Introduction
TCG instructions operate on variables which are temporaries, local
temporaries or globals. TCG instructions and variables are strongly
typed. Two types are supported: 32 bit integers and 64 bit
integers. Pointers are defined as an alias to 32 bit or 64 bit
integers depending on the TCG target word size.
Each instruction has a fixed number of output variable operands, input
variable operands and always constant operands.
The notable exception is the call instruction which has a variable
number of outputs and inputs.
In the textual form, output operands usually come first, followed by
input operands, followed by constant operands. The output type is
included in the instruction name. Constants are prefixed with a '$'.
add_i32 t0, t1, t2 (t0 <- t1 + t2)
3.2) Assumptions
* Basic blocks
- Basic blocks end after branches (e.g. brcond_i32 instruction),
goto_tb and exit_tb instructions.
- Basic blocks start after the end of a previous basic block, or at a
set_label instruction.
After the end of a basic block, the content of temporaries is
destroyed, but local temporaries and globals are preserved.
* Floating point types are not supported yet
* Pointers: depending on the TCG target, pointer size is 32 bit or 64
bit. The type TCG_TYPE_PTR is an alias to TCG_TYPE_I32 or
TCG_TYPE_I64.
* Helpers:
Using the tcg_gen_helper_x_y it is possible to call any function
taking i32, i64 or pointer types. By default, before calling a helper,
all globals are stored at their canonical location and it is assumed
that the function can modify them. By default, the helper is allowed to
modify the CPU state or raise an exception.
This can be overridden using the following function modifiers:
- TCG_CALL_NO_READ_GLOBALS means that the helper does not read globals,
either directly or via an exception. They will not be saved to their
canonical locations before calling the helper.
- TCG_CALL_NO_WRITE_GLOBALS means that the helper does not modify any globals.
They will only be saved to their canonical location before calling helpers,
but they won't be reloaded afterwise.
- TCG_CALL_NO_SIDE_EFFECTS means that the call to the function is removed if
the return value is not used.
Note that TCG_CALL_NO_READ_GLOBALS implies TCG_CALL_NO_WRITE_GLOBALS.
On some TCG targets (e.g. x86), several calling conventions are
supported.
* Branches:
Use the instruction 'br' to jump to a label.
3.3) Code Optimizations
When generating instructions, you can count on at least the following
optimizations:
- Single instructions are simplified, e.g.
and_i32 t0, t0, $0xffffffff
is suppressed.
- A liveness analysis is done at the basic block level. The
information is used to suppress moves from a dead variable to
another one. It is also used to remove instructions which compute
dead results. The later is especially useful for condition code
optimization in QEMU.
In the following example:
add_i32 t0, t1, t2
add_i32 t0, t0, $1
mov_i32 t0, $1
only the last instruction is kept.
3.4) Instruction Reference
********* Function call
* call <ret> <params> ptr
call function 'ptr' (pointer type)
<ret> optional 32 bit or 64 bit return value
<params> optional 32 bit or 64 bit parameters
********* Jumps/Labels
* set_label $label
Define label 'label' at the current program point.
* br $label
Jump to label.
* brcond_i32/i64 t0, t1, cond, label
Conditional jump if t0 cond t1 is true. cond can be:
TCG_COND_EQ
TCG_COND_NE
TCG_COND_LT /* signed */
TCG_COND_GE /* signed */
TCG_COND_LE /* signed */
TCG_COND_GT /* signed */
TCG_COND_LTU /* unsigned */
TCG_COND_GEU /* unsigned */
TCG_COND_LEU /* unsigned */
TCG_COND_GTU /* unsigned */
********* Arithmetic
* add_i32/i64 t0, t1, t2
t0=t1+t2
* sub_i32/i64 t0, t1, t2
t0=t1-t2
* neg_i32/i64 t0, t1
t0=-t1 (two's complement)
* mul_i32/i64 t0, t1, t2
t0=t1*t2
* div_i32/i64 t0, t1, t2
t0=t1/t2 (signed). Undefined behavior if division by zero or overflow.
* divu_i32/i64 t0, t1, t2
t0=t1/t2 (unsigned). Undefined behavior if division by zero.
* rem_i32/i64 t0, t1, t2
t0=t1%t2 (signed). Undefined behavior if division by zero or overflow.
* remu_i32/i64 t0, t1, t2
t0=t1%t2 (unsigned). Undefined behavior if division by zero.
********* Logical
* and_i32/i64 t0, t1, t2
t0=t1&t2
* or_i32/i64 t0, t1, t2
t0=t1|t2
* xor_i32/i64 t0, t1, t2
t0=t1^t2
* not_i32/i64 t0, t1
t0=~t1
* andc_i32/i64 t0, t1, t2
t0=t1&~t2
* eqv_i32/i64 t0, t1, t2
t0=~(t1^t2), or equivalently, t0=t1^~t2
* nand_i32/i64 t0, t1, t2
t0=~(t1&t2)
* nor_i32/i64 t0, t1, t2
t0=~(t1|t2)
* orc_i32/i64 t0, t1, t2
t0=t1|~t2
********* Shifts/Rotates
* shl_i32/i64 t0, t1, t2
t0=t1 << t2. Unspecified behavior if t2 < 0 or t2 >= 32 (resp 64)
* shr_i32/i64 t0, t1, t2
t0=t1 >> t2 (unsigned). Unspecified behavior if t2 < 0 or t2 >= 32 (resp 64)
* sar_i32/i64 t0, t1, t2
t0=t1 >> t2 (signed). Unspecified behavior if t2 < 0 or t2 >= 32 (resp 64)
* rotl_i32/i64 t0, t1, t2
Rotation of t2 bits to the left.
Unspecified behavior if t2 < 0 or t2 >= 32 (resp 64)
* rotr_i32/i64 t0, t1, t2
Rotation of t2 bits to the right.
Unspecified behavior if t2 < 0 or t2 >= 32 (resp 64)
********* Misc
* mov_i32/i64 t0, t1
t0 = t1
Move t1 to t0 (both operands must have the same type).
* ext8s_i32/i64 t0, t1
ext8u_i32/i64 t0, t1
ext16s_i32/i64 t0, t1
ext16u_i32/i64 t0, t1
ext32s_i64 t0, t1
ext32u_i64 t0, t1
8, 16 or 32 bit sign/zero extension (both operands must have the same type)
* bswap16_i32/i64 t0, t1
16 bit byte swap on a 32/64 bit value. It assumes that the two/six high order
bytes are set to zero.
* bswap32_i32/i64 t0, t1
32 bit byte swap on a 32/64 bit value. With a 64 bit value, it assumes that
the four high order bytes are set to zero.
* bswap64_i64 t0, t1
64 bit byte swap
* discard_i32/i64 t0
Indicate that the value of t0 won't be used later. It is useful to
force dead code elimination.
* deposit_i32/i64 dest, t1, t2, pos, len
Deposit T2 as a bitfield into T1, placing the result in DEST.
The bitfield is described by POS/LEN, which are immediate values:
LEN - the length of the bitfield
POS - the position of the first bit, counting from the LSB
For example, pos=8, len=4 indicates a 4-bit field at bit 8.
This operation would be equivalent to
dest = (t1 & ~0x0f00) | ((t2 << 8) & 0x0f00)
* trunc_shr_i32 t0, t1, pos
For 64-bit hosts only, right shift the 64-bit input T1 by POS and
truncate to 32-bit output T0. Depending on the host, this may be
a simple mov/shift, or may require additional canonicalization.
********* Conditional moves
* setcond_i32/i64 dest, t1, t2, cond
dest = (t1 cond t2)
Set DEST to 1 if (T1 cond T2) is true, otherwise set to 0.
* movcond_i32/i64 dest, c1, c2, v1, v2, cond
dest = (c1 cond c2 ? v1 : v2)
Set DEST to V1 if (C1 cond C2) is true, otherwise set to V2.
********* Type conversions
* ext_i32_i64 t0, t1
Convert t1 (32 bit) to t0 (64 bit) and does sign extension
* extu_i32_i64 t0, t1
Convert t1 (32 bit) to t0 (64 bit) and does zero extension
* trunc_i64_i32 t0, t1
Truncate t1 (64 bit) to t0 (32 bit)
* concat_i32_i64 t0, t1, t2
Construct t0 (64-bit) taking the low half from t1 (32 bit) and the high half
from t2 (32 bit).
* concat32_i64 t0, t1, t2
Construct t0 (64-bit) taking the low half from t1 (64 bit) and the high half
from t2 (64 bit).
********* Load/Store
* ld_i32/i64 t0, t1, offset
ld8s_i32/i64 t0, t1, offset
ld8u_i32/i64 t0, t1, offset
ld16s_i32/i64 t0, t1, offset
ld16u_i32/i64 t0, t1, offset
ld32s_i64 t0, t1, offset
ld32u_i64 t0, t1, offset
t0 = read(t1 + offset)
Load 8, 16, 32 or 64 bits with or without sign extension from host memory.
offset must be a constant.
* st_i32/i64 t0, t1, offset
st8_i32/i64 t0, t1, offset
st16_i32/i64 t0, t1, offset
st32_i64 t0, t1, offset
write(t0, t1 + offset)
Write 8, 16, 32 or 64 bits to host memory.
All this opcodes assume that the pointed host memory doesn't correspond
to a global. In the latter case the behaviour is unpredictable.
********* Multiword arithmetic support
* add2_i32/i64 t0_low, t0_high, t1_low, t1_high, t2_low, t2_high
* sub2_i32/i64 t0_low, t0_high, t1_low, t1_high, t2_low, t2_high
Similar to add/sub, except that the double-word inputs T1 and T2 are
formed from two single-word arguments, and the double-word output T0
is returned in two single-word outputs.
* mulu2_i32/i64 t0_low, t0_high, t1, t2
Similar to mul, except two unsigned inputs T1 and T2 yielding the full
double-word product T0. The later is returned in two single-word outputs.
* muls2_i32/i64 t0_low, t0_high, t1, t2
Similar to mulu2, except the two inputs T1 and T2 are signed.
********* 64-bit guest on 32-bit host support
The following opcodes are internal to TCG. Thus they are to be implemented by
32-bit host code generators, but are not to be emitted by guest translators.
They are emitted as needed by inline functions within "tcg-op.h".
* brcond2_i32 t0_low, t0_high, t1_low, t1_high, cond, label
Similar to brcond, except that the 64-bit values T0 and T1
are formed from two 32-bit arguments.
* setcond2_i32 dest, t1_low, t1_high, t2_low, t2_high, cond
Similar to setcond, except that the 64-bit values T1 and T2 are
formed from two 32-bit arguments. The result is a 32-bit value.
********* QEMU specific operations
* exit_tb t0
Exit the current TB and return the value t0 (word type).
* goto_tb index
Exit the current TB and jump to the TB index 'index' (constant) if the
current TB was linked to this TB. Otherwise execute the next
instructions. Only indices 0 and 1 are valid and tcg_gen_goto_tb may be issued
at most once with each slot index per TB.
* qemu_ld_i32/i64 t0, t1, flags, memidx
* qemu_st_i32/i64 t0, t1, flags, memidx
Load data at the guest address t1 into t0, or store data in t0 at guest
address t1. The _i32/_i64 size applies to the size of the input/output
register t0 only. The address t1 is always sized according to the guest,
and the width of the memory operation is controlled by flags.
Both t0 and t1 may be split into little-endian ordered pairs of registers
if dealing with 64-bit quantities on a 32-bit host.
The memidx selects the qemu tlb index to use (e.g. user or kernel access).
The flags are the TCGMemOp bits, selecting the sign, width, and endianness
of the memory access.
For a 32-bit host, qemu_ld/st_i64 is guaranteed to only be used with a
64-bit memory access specified in flags.
*********
Note 1: Some shortcuts are defined when the last operand is known to be
a constant (e.g. addi for add, movi for mov).
Note 2: When using TCG, the opcodes must never be generated directly
as some of them may not be available as "real" opcodes. Always use the
function tcg_gen_xxx(args).
4) Backend
tcg-target.h contains the target specific definitions. tcg-target.c
contains the target specific code.
4.1) Assumptions
The target word size (TCG_TARGET_REG_BITS) is expected to be 32 bit or
64 bit. It is expected that the pointer has the same size as the word.
On a 32 bit target, all 64 bit operations are converted to 32 bits. A
few specific operations must be implemented to allow it (see add2_i32,
sub2_i32, brcond2_i32).
Floating point operations are not supported in this version. A
previous incarnation of the code generator had full support of them,
but it is better to concentrate on integer operations first.
On a 64 bit target, no assumption is made in TCG about the storage of
the 32 bit values in 64 bit registers.
4.2) Constraints
GCC like constraints are used to define the constraints of every
instruction. Memory constraints are not supported in this
version. Aliases are specified in the input operands as for GCC.
The same register may be used for both an input and an output, even when
they are not explicitly aliased. If an op expands to multiple target
instructions then care must be taken to avoid clobbering input values.
GCC style "early clobber" outputs are not currently supported.
A target can define specific register or constant constraints. If an
operation uses a constant input constraint which does not allow all
constants, it must also accept registers in order to have a fallback.
The movi_i32 and movi_i64 operations must accept any constants.
The mov_i32 and mov_i64 operations must accept any registers of the
same type.
The ld/st instructions must accept signed 32 bit constant offsets. It
can be implemented by reserving a specific register to compute the
address if the offset is too big.
The ld/st instructions must accept any destination (ld) or source (st)
register.
4.3) Function call assumptions
- The only supported types for parameters and return value are: 32 and
64 bit integers and pointer.
- The stack grows downwards.
- The first N parameters are passed in registers.
- The next parameters are passed on the stack by storing them as words.
- Some registers are clobbered during the call.
- The function can return 0 or 1 value in registers. On a 32 bit
target, functions must be able to return 2 values in registers for
64 bit return type.
5) Recommended coding rules for best performance
- Use globals to represent the parts of the QEMU CPU state which are
often modified, e.g. the integer registers and the condition
codes. TCG will be able to use host registers to store them.
- Avoid globals stored in fixed registers. They must be used only to
store the pointer to the CPU state and possibly to store a pointer
to a register window.
- Use temporaries. Use local temporaries only when really needed,
e.g. when you need to use a value after a jump. Local temporaries
introduce a performance hit in the current TCG implementation: their
content is saved to memory at end of each basic block.
- Free temporaries and local temporaries when they are no longer used
(tcg_temp_free). Since tcg_const_x() also creates a temporary, you
should free it after it is used. Freeing temporaries does not yield
a better generated code, but it reduces the memory usage of TCG and
the speed of the translation.
- Don't hesitate to use helpers for complicated or seldom used guest
instructions. There is little performance advantage in using TCG to
implement guest instructions taking more than about twenty TCG
instructions. Note that this rule of thumb is more applicable to
helpers doing complex logic or arithmetic, where the C compiler has
scope to do a good job of optimisation; it is less relevant where
the instruction is mostly doing loads and stores, and in those cases
inline TCG may still be faster for longer sequences.
- The hard limit on the number of TCG instructions you can generate
per guest instruction is set by MAX_OP_PER_INSTR in exec-all.h --
you cannot exceed this without risking a buffer overrun.
- Use the 'discard' instruction if you know that TCG won't be able to
prove that a given global is "dead" at a given program point. The
x86 guest uses it to improve the condition codes optimisation. | zebracorn | /zebracorn-0.0.1.tar.gz/zebracorn-0.0.1/src/qemu/tcg/README | README |
<H1 CLASS="western" style="text-align:center;">ZebraZoom</H1>
<p align="center">
<img src="https://zebrazoom.org/videos/gif/output1.gif" height="250">
<img src="https://zebrazoom.org/videos/gif/output2.gif" height="250">
<img src="https://zebrazoom.org/videos/gif/output3.gif" height="250">
<img src="https://zebrazoom.org/videos/gif/output4.gif" height="250">
<img src="https://zebrazoom.org/videos/gif/ER.gif" height="250">
<img src="https://zebrazoom.org/videos/gif/mouse.gif" height="250">
</p>
<p>
ZebraZoom can be used to track the heads and tails of freely swimming and of head-embedded larval and adult zebrafish. It can also be used to track the center of mass of other animal species, such as mice or drosophila. The graphical user interface of ZebraZoom offers options to compare populations based on kinematic parameters and/or on based on unsupervised clustering of bouts of movements. The software operates through an intuitive graphical user interface, making it very simple to use for people with no programming background.
For more information view the <a href="https://zebrazoom.org/documentation/docs/intro/" target="_blank">online documentation</a>, visit <a href="https://zebrazoom.org/" target="_blank">zebrazoom.org</a> or email us [email protected]<br/>
</p>
| zebrazoom | /zebrazoom-1.34.8.tar.gz/zebrazoom-1.34.8/README.md | README.md |
ZEBU /ˈzeɪbuː/ - Bos primigenius indicus or Bos indicus or Bos taurus indicus
sometimes known as indicine cattle or humped cattle, is a species or subspecies of domestic cattle originating
in South Asia. Zebu are characterised by a fatty hump on their shoulders, a large dewlap, and sometimes drooping
ears. They are well adapted to withstanding high temperatures, and are farmed throughout the tropical countries,
both as pure zebu and as hybrids with taurine cattle, the other main type of domestic cattle. Zebu are used as
draught oxen, dairy cattle, and beef cattle, as well as for byproducts such as hides and dung for fuel and manure.
In 1999, researchers at Texas A&M University successfully cloned a zebu.
https://en.wikipedia.org/wiki/Zebu
Minibus was taken, since I am French and ZEBU relies on 0MQ I thought thebus pronounced zebus could be a cool name.
Of course, I could not resist the temptation to name this module after a cow.
Ze service
==========
ZEBU is a uber minimalistic bus which I implemented for fun and turned out to be useful (to me at least).
To run ZEBU you need to set 2 environment variables.
ZEBU_PUBLISH is the endpoint the publishers connect to.
ZEBU_SUBSCRIBE the endpoint the subscribers connect to.
Endpoints have the format transport://address where transport would more than likely be ipc or tcp.
$ ZEBU_PUBLISH=ipc://publish ZEBU_SUBSCRIBE=ipc://subscribe python -m zebu
Ze module
=========
ZEBU is also a module you can import to use carefully crafted helpers to define ...
Subscribers
-----------
>>> from zebu import subscribe
>>> messages = subscribes('a/topic', 'another/topic')
>>> for message in messages:
... # do something with message
Publishers
----------
>>> from zebu import publisher
>>> publish = publisher()
>>> publish('a/topic', 'a message on that topic') | zebu | /zebu-2017.5.0.tar.gz/zebu-2017.5.0/README.rst | README.rst |
# Python Zebull API
[](https://pypi.python.org/pypi/zebull)
Zebull is set of REST-like APIs based platform of all input and output parameters are based on JSON. Zebull rest provide an easy way to place order,and view orderbook.
## Documentation
- [Zebull API Documentation](https://zebull.in/#zebullDoc/intro)
## Installing the client
You can install the pre release via pip
```
pip install --upgrade zebull
```
Its recommended to update `setuptools` to latest if you are facing any issue while installing
```
pip install -U pip setuptools
```
For more details check [official Python documentation](https://wiki.python.org/moin/WindowsCompilers).
# API usage (Sample method calls)
```python
import requests
import json
import hashlib
from zebullconnect.zebullapi import Zebullapi
sas_api = Zebullapi(user_id='Your_user_id',
api_key='Your_api_key')
# # Method will invoke teh get encryption key and get User session Id methods
# # Login with userid and API key and then receive the session Id
# # after got the session Id ,obtain the session Id
# # as follows.
response = sas_api.getEncryptionKey()
# # Market Watch Scrips
# # Search Scrips
scrip_response = sas_api.get_scrips(symbol='search_symbol_name', exchange=['exchange_name'])
# ====> Sample input parameters : symbol='TCS', exchange=[sas_api.EXCHANGE_NSE]
# Other available exchanges are as below
#
# EXCHANGE_NSE --- For NSE Cash
# EXCHANGE_NFO --- For NSE DERIVATIVES
# EXCHANGE_CDS --- For NSE Currency Derivatives
# EXCHANGE_BSE --- For BSE Cash
# EXCHANGE_BSE --- For BSE Derivatives
# EXCHANGE_BSE --- For BSE Currency Derivatives
# EXCHANGE_MCX --- For MCX Contracts
# If the search has to be global, like search on NSE and BSE for example,
# send the exchange parameter as below
# exchange=[sas_api.EXCHANGE_NSE, sas_api.EXCHANGE_BSE]
# SAMPLE RESPONSE
# {.....'exch': 'NSE', 'exchange': None, 'exchange_segment': 'nse_cm', 'symbol': 'TCS-EQ', 'token': '11536', 'instrument_name': 'TATA CONSULTANCY SERV LT'....}
# There will be more parameters in teh response, but you can ignore them
# SYMBOL and TOKEN are the important ones to call further APIs
# Market Watch List
marketwatchrespdata = sas_api.getmarketwatch_list()
#
# =====> Fetch Market Watch List
#
# SAMPLE RESPONSE
# {'stat': 'Ok', 'values': ['mwGrpRM', 'mwGrpLN', 'mwGrpFk', 'mwGrpFF'], 'MaxMWCount': 200, 'logindefaultmw': 'mwGrpLN'}
# There will be no parameters in teh response,
# # Market Watch Scrips
marketwatchresp = sas_api.marketwatch_scripsdata(mwname='Enter_your_market_watch_name')
#
# ====> Sample input parameters :mwname='mwGrpFk'
#
# SAMPLE RESPONSE
# {'stat': 'Ok', 'values': [{'tcksize': '5', 'openinterest': '0', 'optiontype': 'XX', 'BestSellPrice': '132.20', 'ExchSeg': 'nse_cm',......}
#
# values are in Json array with watch names and maximum scrip counts are allowed
# # Add Scrips
addscripsresp = sas_api.addscrips(mwname='Enter_your_market_watch_name', exchange='exchange_name', token='Enter_your_tokenno')
#
# ====> Sample input parameters : mwname='mwGrpFk', exchange='NSE', token='1235'
#
# SAMPLE RESPONSE
# {'emsg': 'Scrip is present in mwGrpFk', 'stat': 'Ok'}
#
# Give the input parameters and status as ok and result as success
# # Delete Scrips
deletescripsresp = sas_api.deletescrips(mwname='Enter_your_market_watch_name', exchange='exchange_name',
token='Enter_your_tokenno')
#
# Sample input parameters : mwname='mwGrpLn', exchange='NSE', token='245'
#
# SAMPLE RESPONSE
# {'emsg': 'Scrip is delete in mwGrpLn', 'stat': 'Ok'}
#
# Delete the parameters value and status as ok then result as success
# # Scrip Details
scripsdetailresp = sas_api.scrips_details(exchange='exchange_name', token='Enter_your_tokenno')
# ====> Sample input parameters : exchange=[sas_api.EXCHANGE_NSE],token='777'
#
#
#
# SAMPLE RESPONSE
# {'vwapAveragePrice': 'NA', 'LTQ': 'NA', 'DecimalPrecision': 2, 'openPrice': 'NA', 'LTP': 'NA', 'Ltp': 'NA', 'BRate': 'NA', 'defmktproval': '3', 'symbolname': 'UTIRGR28P2',...... }
#
# There given exchange and token numbers are given the bunch of response will be displayed on output format
#
# Order Management
# # Position Book
positionbookresp = sas_api.positionbook(ret='retention_type')
# ====> Sample input parameters : ret=[sas_api.RETENTION_DAY]
#
# SAMPLE RESPONSE
# {"emsg": "Success","stat": "Ok"}
#
# The Retention type of input parameters DAY/NET will be given after response are bunch of data's are displayed like that stat,exchange,pcode,symbol,token....
# # Square of position
squareoffresp = sas_api.squareoff_positions(exchange='exchange_name', symbol='Enter_your_symbol',
qty='Enter_your_Qty',
pCode='Entey_your_productcode',
tokenno='Enter_your_tokenno')
# ====> Sample input parameters : ret=[sas_api.RETENTION_DAY], symbol='ASHOKLEY',qty='0',pCode=[sas_api.PRODUCT_INTRADAY], tokenno='12356'
#
# SAMPLE RESPONSE
# {"stat": "Ok","nestOrderNumber:"200626000052824"}
# The input parameters are given and response are ok, nestordernumbers are displayed
# # Place Order
placeorderresp = sas_api.place_order(complexty='Enter_your_ordertype', discqty='Enter_your_discqty',
exch='exchange_name',
pCode='Enter_your_productcode', price='Enter_your_Price',
qty='Enter_your_Quantity',
prctyp='Enter_your_pricetype', ret='Enter_your_retention_type',
symbol_id='Enter_your_symbol_id',
trading_symbol='Enter_your_trading_symbol',
transtype='Enter_your_transaction_type',
trigPrice='Enter_your_trigPrice')
# ====> Sample input parameters : ret=[sas_api.RETENTION_DAY], complexity=[sas_api.REGULAR_ORDER], exchange=[sas_api.EXCHANGE_NSE],pCode='MIS',
# price='1',Qty='1',prctype=[sas_api.LIMIT_ORDER],ret=[sas_api.RETENTION_DAY],trading_symbol='49234',transtype=[sas_api.BUY_ORDER],
# trigprice='1'
# SAMPLE RESPONSE
# {['stat': 'Ok', 'nestOrderNumber': '191015000018737']}
# Same as the process of Square off positions given parameters and response are ok,nestordernumbers are displayed
# Bracket Order
bracketorderresp = sas_api.bracket_order(complexty='Enter_your_ordertype', discqty='Enter_your_discqty',
exch='exchange_name',
pCode='Enter_your_productcode', price='Enter_your_price', qty='Enter_your_qty',
prctyp='Enter_your_pricetype', stopLoss='Enter_your_stopLoss',
ret='Enter_your_retention_type',
symbol_id='Enter_your_symbol_id',
trading_symbol='Enter_your_trading_symbol',
trailing_stop_loss='Enter_your_trailing_stop_loss_value',
target='Enter_your_target_value',
transtype='Enter_your_transaction_type',
trigPrice='Enter_your_trigPrice')
# ====> Sample input parameters : ret=[sas_api.RETENTION_DAY], complexity=[sas_api.REGULAR_ORDER], exchange=[sas_api.EXCHANGE_NSE],pCode='MIS',
# price='1',qty='1',prctype=[sas_api.LIMIT_ORDER],ret=[sas_api.RETENTION_DAY],trading_symbol='49234',transtype=[sas_api.BUY_ORDER],
# trigprice='1',discqty='0',symbol_id='13611',trailimg_stop_loss='3.4',target='28.0',transtype=[sas_api.BUY_ORDER],stoploss='28.0'
# SAMPLE RESPONSE
# [{"stat": "Ok", "nestOrderNumber": "210218000070901"}
# Same process of previous one input parameters like retention type,complexity,exchange....... and response are ok,nestordernumbers displayed on screen
# Fetch Order Book
orderresp = sas_api.order_data()
# ===>No Parameters are passed to get response
# SAMPLE RESPONSE
# [{....... "Prc": "1454.90", "RequestID": "1", "Cancelqty": 0, "discQtyPerc": "10", "Qty": 8, "Prctype": "SL", "Status": "rejected","Exchange": "NSE" ,"Avgprc": "00.00", "Trgprc": "1450.90",.....}]
# This one get method and no input parameters and output will be bunch of data's are shown on response
# Fetch Trade Book
tradebookresp = sas_api.tradebook()
# ===>No Parameters are passed to get response
# SAMPLE RESPONSE
# This one also get method and no input parameters and data's will be shown
# # Exit Bracket Order
exitboorderresp = sas_api.exitboorder(nestOrderNumber='Enter_your_nestOrderNumber',
symbolOrderId="Enter_your_symbolOrderId", status='Enter_your_status')
# ====> Sample input parameters : nestOrderNumber='200626000052824', symbolOrderId='', status='OPEN',
#
# SAMPLE RESPONSE
# {"stat":"Ok}
# The input parameters are nestordernumber,symbolid,and status will be given and response like ok.
# # Modify Order
modifyorderresp = sas_api.modifyorder(discqty='Your_Quantity_No', qty='Enter_your_Quantity', exch='exchange_name',
filledQuantity='Enter_your_Filledquantity',
nestOrderNumber='Enter_your_nestordernumber', prctyp='Enter_your_pricetype',
price='Enter_your_Price',
trading_symbol='Your_Trading_Symbol', trigPrice='Enter_your_trigger_Price',
transtype='Enter_your_transaction_type', pCode='Enter_your_productcode'),
# ====> Sample input parameters : discqty='0', qty='1', exchange=[sas_api.EXCHANGE_NSE],filledQuantity='0',
# nestOrderNumber='191015000018737',prctype=[sas_api.LIMIT_ORDER], price='1'
# ,trading_symbol='ASHOKLEY-EQ',trigPrice='00.OO',transtype=[sas_api.BUY_ORDER],
# pCode=[sas_api.MARKET_ORDER]
#
# SAMPLE RESPONSE
# [{"stat": "Ok", "nestOrderNumber": "210218000070901"}
# The Input parameters are given and results are displayed on Sample response
# # Market Order
marketorderresp = sas_api.marketorder(complexty='Enter_your_ordertype', discqty='Enter_your_discqty',
exch='exchange_name',
pCode='Enter_your_productcode',
prctyp='Enter_your_pricetype', price="", qty='Enter_your_qty',
ret='Enter_your_retention_type',
symbol_id='Enter_your_symbol_id', trading_symbol='Enter_your_trading_symbol',
transtype='Enter_your_transaction_type',
trigPrice="")
# ====> Sample input parameters : complexity=[sas_api.REGULAR_ORDER], discqty='0', qty='1', exchange=[sas_api.EXCHANGE_NSE],filledQuantity='0',nestOrderNumber='191015000018737',prctype=[sas_api.LIMIT_ORDER],price='1',trading_symbol='ASHOKLEY-EQ'
# trigPrice='00.OO',transtype=[sas_api.BUY_ORDER],pCode=[sas_api.MARKET_ORDER]
# SAMPLE RESPONSE
# {"stat": "Ok", "nestOrderNumber": "210218000070991"}
# The market order data's can be input and output's are displayed
# Cancel Order
cancelresp = sas_api.cancel_order(exchange=sas_api.EXCHANGE_NSE, nestordernmbr='Enter_your_nestordernmbr',
tradingsymbol='Enter_your_tradingsymbol')
# ====> Sample input parameters : exchange=[sas_api.EXCHANGE_NSE], nestordernumbrr='191015000018737',tradingsymbol='ASHOKLEY-EQ',
# SAMPLE RESPONSE
# {"stat": "Ok", "nestOrderNumber": "210218000070991"}
# The Input parameters exchange,nestordernumber and trading symbol are given after output's are status ok, and nestordernumber are displayed
# Order History
orderhistoryresp = sas_api.order_history(nextorder='Enter_your_nextorder')
# =====> Sample input parameter: nestOrderNumber": "200628000000004"
# SAMPLE RESPONSE
# {'stat':'Ok' ...}
# The nestordernumber's are displayed
# Fetch Holdingsdata
holdingresp = sas_api.holdingsdata()
# =====> No parameter are passed to get holdings
# SAMPLE RESPONSE
# {'stat':'Ok' ...}
# This are get method no input parameters and output parameters will be displayed
# Funds
# Get Limit
fundsresp = sas_api.fundsdata()
# =====> Fetch Market Watch List
# SAMPLE RESPONSE
# {'emsg': None, 'stat': 'Ok'}
# No parameters to send get fundsdetails
| zebull | /zebull-2.2.tar.gz/zebull-2.2/README.md | README.md |
import requests
import json
import hashlib
def encrypt_string(hashing):
sha = \
hashlib.sha256(hashing.encode()).hexdigest()
return sha
class Zebullapi(object):
# BASE_URL
base_url = "https://zebull.in/rest/MobullService/api/"
api_name = "SAS API Connect - Python Lib "
version = "2.1"
# Products
PRODUCT_INTRADAY = "MIS"
PRODUCT_COVER_ODRER = "CO"
PRODUCT_CNC = "CNC"
PRODUCT_BRACKET_ORDER = "BO"
PRODUCT_NRML = "NRML"
# Order Type
REGULAR_ORDER = "REGULAR"
LIMIT_ORDER = "L"
STOPLOSS_ORDER = "SL"
MARKET_ORDER = "MKT"
# Transaction type
BUY_ORDER = "BUY"
SELL_ORDER = "SELL"
# Positions
RETENTION_DAY = "DAY" or "NET"
# Exchanges
EXCHANGE_NSE = "NSE"
EXCHANGE_NFO = "NFO"
EXCHANGE_CDS = "CDS"
EXCHANGE_BSE = "BSE"
EXCHANGE_BFO = "BFO"
EXCHANGE_BCD = "BCD"
EXCHANGE_MCX = "MCX"
# Status constants
STATUS_COMPLETE = "COMPLETE"
STATUS_REJECTED = "REJECTED"
STATUS_CANCELLED = "CANCELLED"
# response = requests.get(base_url);
# Getscrip URI
_sub_urls = {
# Authorization
"encryption_key": "customer/getAPIEncpkey",
"getsessiondata": "customer/getUserSID",
# Market Watch
"marketwatch_scrips": "marketWatch/fetchMWScrips",
"addscrips": "marketWatch/addScripToMW",
"getmarketwatch_list": "marketWatch/fetchMWList",
"scripdetails": "ScripDetails/getScripQuoteDetails",
"getdelete_scrips": "marketWatch/deleteMWScrip",
# OrderManagement
"squareoffposition": "positionAndHoldings/sqrOofPosition",
"position_conversion": "positionAndHoldings/positionConvertion",
"placeorder": "placeOrder/executePlaceOrder",
"modifyorder": "placeOrder/modifyOrder",
"marketorder": "placeOrder/executePlaceOrder",
"exitboorder": "placeOrder/exitBracketOrder",
"bracketorder": "placeOrder/executePlaceOrder",
"positiondata": "positionAndHoldings/positionBook",
"orderbook": "placeOrder/fetchOrderBook",
"tradebook": "placeOrder/fetchTradeBook",
"holding": "positionAndHoldings/holdings",
"orderhistory": "placeOrder/orderHistory",
"cancelorder": "placeOrder/cancelOrder",
# Funds
"fundsrecord": "limits/getRmsLimits",
}
# Common Method
def __init__(self,
user_id,
api_key,
base=None,
session_id=None,
disable_ssl=False):
self.user_id = user_id
self.api_key = api_key
self.disable_ssl = disable_ssl
self.session_id = session_id
self.base = base or self.base_url
def _get(self, sub_url, data=None):
"""Get method declaration"""
url = self.base + self._sub_urls[sub_url]
return self._request(url, "GET", data=data)
def _post(self, sub_url, data=None):
"""Post method declaration"""
url = self.base + self._sub_urls[sub_url]
return self._request(url, "POST", data=data)
def _dummypost(self, url, data=None):
"""Post method declaration"""
return self._request(url, "POST", data=data)
def _user_agent(self):
return self.api_name + self.version
"""Authorization get to call all requests"""
def _user_authorization(self):
if self.session_id:
return "Bearer " + self.user_id + " " + self.session_id
else:
return ""
"""Common request to call POST and GET method"""
def _request(self, method, req_type, data=None):
"""
Headers with authorization. For some requests authorization
is not required. It will be send as empty String
"""
_headers = {
"X-SAS-Version": "2.0",
"User-Agent": self._user_agent(),
"Authorization": self._user_authorization(),
}
if req_type == "POST":
response = requests.post(method, json=data, headers=_headers, )
return json.loads(response.text)
elif req_type == "GET":
response = requests.get(method, json=data, headers=_headers)
return json.loads(response.text)
# Methods to call HTTP Request
"""Userlogin method with userid and userapi_key"""
def getEncryptionKey(self, data=None):
data = {'userId': self.user_id}
response = self._post("encryption_key", data)
if response['encKey'] is None:
return response['emsg']
else:
data = encrypt_string(self.user_id + self.api_key + response['encKey'])
data = {'userId': self.user_id, 'userData': data}
res = self._post("getsessiondata", data)
if res['stat'] == 'Ok':
self.session_id = res['sessionID']
return res
"""GET Market watchlist"""
def getmarketwatch_list(self):
marketwatchrespdata = self._get("getmarketwatch_list")
return marketwatchrespdata
"""GET Tradebook Records"""
def tradebook(self):
tradebookresp = self._get("tradebook")
return tradebookresp
"""GET Holdings Records"""
def holdingsdata(self):
holdingresp = self._get("holding")
return holdingresp
"""GET Orderbook Records"""
def order_data(self):
orderresp = self._get("orderbook")
return orderresp
def order_history(self, nextorder):
data = {'nestOrderNumber': nextorder}
orderhistoryresp = self._post("orderhistory", data)
return orderhistoryresp
"""Method to call Cancel Orders"""
def cancel_order(self, exchange,
nestordernmbr,
tradingsymbol):
data = {'exch': exchange,
'nestOrderNumber': nestordernmbr,
'trading_symbol': tradingsymbol}
cancelresp = self._post("cancelorder", data)
return cancelresp
def marketwatch_scripsdata(self, mwname, ):
data = {'mwName': mwname, }
marketwatchresp = self._post("marketwatch_scrips", data)
return marketwatchresp
"""Method to call Add Scrips"""
def addscrips(self,
mwname,
exchange,
token):
data = {'mwName': mwname,
'exch': exchange,
'symbol': token, }
addscripsresp = self._post("addscrips", data)
return addscripsresp
"""Method to call Delete Scrips"""
def deletescrips(self,
mwname,
exchange,
token):
data = {'mwName': mwname,
'exch': exchange,
'symbol': token, }
deletescripsresp = self._post("getdelete_scrips", data)
return deletescripsresp
"""Method to call Scrip Details"""
def scrips_details(self,
exchange,
token):
data = {'exch': exchange,
'symbol': token}
scripsdetailresp = self._post("scripdetails", data)
return scripsdetailresp
"""Method to call Squareoff Positions"""
def squareoff_positions(self,
exchange,
pCode,
qty,
tokenno,
symbol):
data = {'exchSeg': exchange,
'pCode': pCode,
'netQty': qty,
'tockenNo': tokenno,
'symbol': symbol}
squareoffresp = self._post("squareoffposition", data)
return squareoffresp
"""Method to call Place Order"""
def place_order(self,
complexty,
discqty,
exch,
pCode,
price,
prctyp,
qty,
ret,
symbol_id,
trading_symbol,
transtype,
trigPrice):
data = [{'complexty': complexty,
'discqty': discqty,
'exch': exch,
'pCode': pCode,
'price': price,
'prctyp': prctyp,
'qty': qty,
'ret': ret,
'symbol_id': symbol_id,
'trading_symbol': trading_symbol,
'transtype': transtype,
'trigPrice': trigPrice}]
placeorderresp = self._post("placeorder", data)
return placeorderresp
"""Method to call Bracket Order"""
def bracket_order(self,
complexty,
discqty,
exch,
pCode,
price,
prctyp,
qty,
ret,
stopLoss,
symbol_id,
target,
trailing_stop_loss,
trading_symbol,
transtype,
trigPrice):
data = [{'complexty': complexty,
'discqty': discqty,
'exch': exch,
'pCode': pCode,
'price': price,
'prctyp': prctyp,
'qty': qty,
'ret': ret,
'target': target,
'stopLoss': stopLoss,
'trailing_stop_loss': trailing_stop_loss,
'symbol_id': symbol_id,
'trading_symbol': trading_symbol,
'transtype': transtype,
'trigPrice': trigPrice}]
bracketorderresp = self._post("bracketorder", data)
return bracketorderresp
"""Method to get Funds Data"""
def fundsdata(self):
fundsresp = self._get("fundsrecord")
return fundsresp
"""Method to call Modify Order"""
def modifyorder(self,
discqty,
exch,
filledQuantity,
nestOrderNumber,
prctyp,
price,
qty,
trading_symbol,
trigPrice,
transtype,
pCode):
data = {'discqty': discqty,
'exch': exch,
'filledQuantity': filledQuantity,
'nestOrderNumber': nestOrderNumber,
'prctyp': prctyp,
'price': price,
'qty': qty,
'trading_symbol': trading_symbol,
'trigPrice': trigPrice,
'transtype': transtype,
'pCode': pCode}
modifyorderresp = self._post("modifyorder", data)
return modifyorderresp
"""Method to call Market Order"""
def marketorder(self,
complexty,
discqty,
exch,
pCode,
prctyp,
price,
qty,
ret,
symbol_id,
trading_symbol,
transtype,
trigPrice):
data = [{'complexty': complexty,
'discqty': discqty,
'exch': exch,
'pCode': pCode,
'prctyp': prctyp,
'price': price,
'qty': qty,
'ret': ret,
'symbol_id': symbol_id,
'trading_symbol': trading_symbol,
'transtype': transtype,
'trigPrice': trigPrice}]
marketorderresp = self._post("marketorder", data)
return marketorderresp
"""Method to call Exitbook Order"""
def exitboorder(self,
nestOrderNumber,
symbolOrderId,
status, ):
data = {'nestOrderNumber': nestOrderNumber,
'symbolOrderId': symbolOrderId,
'status': status, }
exitboorderresp = self._post("exitboorder", data)
return exitboorderresp
"""Method to get Position Book"""
def positionbook(self,
ret, ):
data = {'ret': ret, }
positionbookresp = self._post("positiondata", data)
return positionbookresp
"""Method to get Scripsforsearch"""
def get_scrips(self, symbol, exchange):
scrip_Url = "https://zebull.in/rest/MobullService/exchange/getScripForSearch"
data = {'symbol': symbol, 'exchange': exchange}
scrip_response = self._dummypost(scrip_Url, data)
return scrip_response | zebull | /zebull-2.2.tar.gz/zebull-2.2/zebullconnect/zebullapi.py | zebullapi.py |
# zec2
[](https://badge.fury.io/py/zec2)
[](https://travis-ci.com/arrrlo/zec2)






Easily manage your AWS EC2 instances
## INSTALL
```bash
pip install zec2
```
## CONFIGURE AWS CREDENTIALS
You should have this two files on your computer:
`~/.aws/config`:
```ini
[default]
region=your_aws_region
output=json
```
`~/.aws/credentials`:
```ini
[default]
aws_access_key_id=your_access_key_id
aws_secret_access_key=your_secret_access_key
```
To learn more about AWS credentials and how to install them on your computer, please read this:
[https://docs.aws.amazon.com/rekognition/latest/dg/setting-up.html](https://docs.aws.amazon.com/rekognition/latest/dg/setting-up.html)
## DIFFERENT AWS PROFILES
You can put as many profiles in your aws credentials file and call them with zec2:
```bash
# use default aws profile
> zec2 ls
# use different aws profile
> zec2 -p my_profile ls
```
Use this option with every command.
## CLI COMMANDS
```bash
# list all EC2 instances
> zec2 ls
# list all EC2 instances using custom aws profile (applies to all commands)
> zec2 -p work ls
# live list all EC2 instances
> zec2 ls -f
# ssh to 1st instance from the list
> $(zec2 ssh 1)
# ssh using different user (the default is ec2-user)
> $(zec2 ssh 1 -u ubuntu)
# ssh using different pem key path (the default is ~/.ssh/__instance_key_pair__.pem)
> $(zec2 ssh 1 -i ~/path/to/key.pem)
# stop 1st EC2 instance from the list
> zec2 stop 1
# start 1st EC2 instance from the list
> zec2 start 1
# restart 1st EC2 instance from the list
> zec2 restart 1
# terminate 1st EC2 instance from the list
> zec2 terminate 1
```
| zec2 | /zec2-0.5.0.tar.gz/zec2-0.5.0/README.md | README.md |
# ZE Python Client
The ZE Python Client provides a unified module - ZemaClient for retrieving data from ZEMA web services.
## Prerequisites
Python 3.5 or above and the following packages
* requests
* lxml
* pandas
* zeep
* dicttoxml
The packages will be automatically installed during the installation.
## Installation
```
pip install zeclient
```
## Usage
```python
from zeclient.zema_client import ZemaClient
# ZEMA Data Direct server URL
datadirect_url = "http://host.company.com/datadirect";
# Create ZEMA client instance and start a session
client = ZemaClient(datadirect_url, 'user.name', 'password', 'Client')
# Get profile data
result = client.get_profile('user.name', 'profile group', 'profile name')
```
## Support
Please report bugs to [email protected]
| zeclient | /zeclient-1.1.3.tar.gz/zeclient-1.1.3/README.md | README.md |
# ZE Python REST Client User's Guide
## RestClient class
To utilize ZE Python REST Client, an instance of RestClient class needs to be created. The constructor of the class takes four parameters
1. The URL of ZEMA Data Direct server
2. The name of the user who has ZEMA Data Direct license
3. The password
4. The client name
```python
from zeclient.rest_client import RestClient
datadirect_url = "http://host.company.com/datadirect";
client = RestClient(datadirect_url, 'user.name', 'password', 'Client')
```
## Method Return
All methods return a pandas' DataFrame.
```
date type price
0 2016-08-24 CLOSE 123.4
1 2016-08-25 CLOSE 123.5
2 2016-08-26 CLOSE 123.6
```
## Methods
* get_report(self, data_source, report, start_date, end_date, select=None, filters=None)
The method retrieves data for a specified report in a date range.
Get data for a report on a specific date
```python
result = client.get_report('NYMEX', 'NYMEX_FUTURES_SETTLEMENT', date(2019, 5, 1), date(2019, 5, 1))
```
Get data for a report in a date range by selecting columns and using filters. The filters can include report columns, 'changedSince' and 'maxRecords'.
```python
select = ['TICKER', 'CONTRACT_YEAR', 'CONTRACT_MONTH', 'PRICE']
filters = {'TICKER': ['HA','KR'],
'CONTRACT_YEAR': 2020,
'CONTRACT_MONTH': [1, 2, 3],
'changedSince': datetime(2010, 5, 14, 14, 20, 30),
'maxRecords': 10}
result = client.get_report('NYMEX', 'NYMEX_FUTURES_SETTLEMENT', date(2019, 5, 1), date(2019, 5, 10), select=select, filters=filters)
```
Note that a 'date' column is always included in the result.
* get_result_set(data_source, report, filters=None)
The method retrieves available result sets for a report and the return columns are report columns, 'minDate' and 'maxDate'.
Get all result sets for a report
```python
result = client.get_result_set('NYMEX', 'NYMEX_FUTURES_SETTLEMENT')
```
Get filtered result sets. The filters can include report columns, 'changedSince', 'newResultsetOnly' and 'maxRecords'.
```python
filters = {'TICKER': ['HA','KR'],
'changedSince': datetime(2010, 5, 14, 14, 20, 30),
'newResultsetOnly': True,
'maxRecords': 10}
result = client.get_result_set('NYMEX', 'NYMEX_FUTURES_SETTLEMENT', filters=filters)
```
* find_data_sources()
The method returns all the available data sources for the user. The columns returned are name, displayName, description and url.
* find_reports(data_source)
The method retrieves all data reports under the specified data source. The columns returned are name, displayName, commodityType, marketType, granularity and gmtOffset.
* find_report_observations(data_source, report)
The method retrieves all observations for a report. The columns returned are name, displayName, numerator, denominator, dataType and marketType.
* find_report_attributes(data_source, report)
The method retrieves all attributes for a report. The columns returned are name, displayName, and dataType.
* find_profile_users()
The method returns profile users
* find_profile_groups(profile_user)
The method finds out all profile groups for a specific user
* find_profiles(profile_user, profile_group)
The method finds out all profiles under a group for a specific user
* find_linked_profiles(profile_user, profile_group, template_name)
The method finds out all linked profiles (configs) for a template under a group for a specific user
* get_profile(user_name, group_name, profile_name, config_name=None, eff_date=None, start_date=None, end_date=None)
The method executes a profile and returns the result. Effective date (eff_date), start date and end date are optional.
* find_curve_groups()
The method returns all curve groups
* find_curves(group_name)
The method finds out all curves under a group
* get_curve_validation(curve_group, curve_name, start_date, end_date)
The method returns curve validation status for a curve under a group within a date range
* get_curve_group_validation(curve_group, start_date, end_date)
The method returns curve validation status for all curves under a group within a date range
* get_batch_status(start_date, end_date, batch_name)
The method gets the batch status for a batch within a date range
* get_batch_statuses(start_date, end_date, batch_type=None, batch_status=None)
The method gets the batch statuses for all batche instances that have a specific batch type within a date range
* get_curve(group_name, curve_name, start_date, end_date)
The method retrieves curve data for a curve under a specific group within a date range
* get_curves_in_group(group_name, start_date, end_date)
The method retrieves curve data for curves under a specific group within a date range
* get_forward_curve(start_date, end_date, filters=None, include_properties=None)
The method retrieves futures curve data for filtered curves within a date range
The filters can include curve groups, batches and curve properties. The result can include curve properties specified in include_properties argument.
```python
filters = {'groups': ['group 1','group 2'],
'batches': ['batch 1', 'batch 2'],
'properties': [{'name': 'property name 1', 'values':['value 11', 'value 12']},
{'name': 'property name 2', 'values':['value 21', 'value 22']}]}
include_properties = ['property name 1']
result = client.get_forward_curve(date(2020,2, 1), date(2020, 2, 1), filters, include_properties)
```
* get_timeseries_curve(start_date, end_date, filters=None, include_properties=None)
The method retrieves time series curve data for filtered curves within a date range
* get_options_curve(start_date, end_date, filters=None, include_properties=None)
The method retrieves options curve data for filtered curves within a date range
* upload_curve_data(self, group_name, curve_name, payload, partial_update=None)
The method uploads curve data based on effective date. The payload requires two properties,
'effective_date', and 'data', where data is a list of curve data objects. If partial_update parameter
is passed as True, this will create a PATCH request to only update available 'data' records of the effective date.
If partial_update parameter is not passed, this will call a PUT request to delete and replace all records
on the given effective_date
```python
payload = {
"effectiveDate": datetime.date(2023, 1, 15),
"data": [
{
"date": datetime.date(2023, 1, 15),
"granularity": "value",
"type": "value",
"value": "value",
"comment": "comment"
},
{
"date": datetime.date(2023, 1, 16),
"granularity": "value",
"type": "value",
"value": "value",
"comment": "comment"
}
]
}
response = client.upload_curve_data(curve_group, curve_name, payload, partial_update=True)
```
* close()
Logging out and terminating the session.
* enable_debug()
Enabling debug mode.
* disable_debug()
Disabling debug mode.
| zeclient | /zeclient-1.1.3.tar.gz/zeclient-1.1.3/docs/rest_user_guide.md | rest_user_guide.md |
# ZE Python ZEMA Client User's Guide for SSO (OAuth2)
For OAuth2 authentication, the following four fields for the "auth" parameter are required:
1. idp_type - the type of IDP (only support 3 IdPs for now: ADFS, Okta and Ping Federate). Available options:
* adfs
* okta
* ping
2. idp_url - the IDP URL
3. oauth2_flow - OAuth2 flow type (either OAuth2Flow.ResourceOwnerPasswordCredentialsGrantType or OAuth2Flow.AuthorizationCodeGrantType). Note: ResourceOwnerPasswordCredentialsGrantType is recommended.
4. domain - the domain name (optional for Ping Federate)
**_Note_**: in case of using Okta or Ping Federate, **Client ID** and **Client Secret** might be needed for Resource Owner Password Credential flow.
## ADFS
### Resource Owner Password Credential flow
```python
auth = {
'idp_type': 'adfs',
'idp_url': 'https://adfs.company.com',
'oauth2_flow': OAuth2Flow.ResourceOwnerPasswordCredentialsGrantType,
'domain': 'company.com'
}
client = ZemaClient(datadirect_url, 'user.name', 'password', 'Client', auth = auth)
```
### Authorization Code flow
```python
auth = {
'idp_type': 'adfs',
'idp_url': 'https://adfs.company.com',
'oauth2_flow': OAuth2Flow.AuthorizationCodeGrantType,
'domain': 'company.com'
}
client = ZemaClient(datadirect_url, 'user.name', 'password', 'Client', auth = auth)
```
## Okta
**Note**: the _**idp_url**_ should be the Authorization Server url configured in Okta.
### Resource Owner Password Credential flow
With the current implementation of Okta (Feb 2021), Client ID and Client Secret are required for ROPC flow.
```python
auth = {
'idp_type': 'okta',
'idp_url': 'https://domain.okta.com/oauth2/default',
'oauth2_flow': OAuth2Flow.ResourceOwnerPasswordCredentialsGrantType,
'idp_client_id': 'the_oauth2_client_id',
'idp_client_secret': 'secret',
'domain': 'company.com'
}
```
### Authorization Code flow
```python
auth = {
'idp_type': 'okta',
'idp_url': 'https://domain.okta.com/oauth2/default',
'oauth2_flow': OAuth2Flow.AuthorizationCodeGrantType,
'domain': 'company.com'
}
```
## Ping Federate
### Resource Owner Password Credential flow
Depend on how this OAuth2 client is configured in Ping Federate, ipd_client_id and idp_client_secret will be needed if "CLIENT AUTHENTICATION" is set to "CLIENT SECRET". Please contact your Ping Federate administrator.
```python
auth = {
'idp_type': 'ping',
'idp_url': 'https://ping.company.com:9031',
'oauth2_flow': OAuth2Flow.ResourceOwnerPasswordCredentialsGrantType,
'idp_client_id': 'the_oauth2_client_id',
'idp_client_secret': 'secret'
}
client = ZemaClient(datadirect_url, 'user.name', 'password', 'Client', auth = auth)
```
### Authorization Code flow
```python
auth = {
'idp_type': 'ping',
'idp_url': 'https://ping.company.com:9031',
'oauth2_flow': OAuth2Flow.AuthorizationCodeGrantType
}
client = ZemaClient(datadirect_url, 'user.name', 'password', 'Client', auth = auth)
``` | zeclient | /zeclient-1.1.3.tar.gz/zeclient-1.1.3/docs/sso_user_guide.md | sso_user_guide.md |
# ZE Python ZEMA Client User's Guide
## ZemaClient class
To utilize ZE Python ZEMA Client, an instance of ZemaClient class needs to be created. The constructor of the class takes four mandatory parameters
1. The URL of ZEMA Data Direct server
2. The name of the user who has ZEMA OData license
3. The password
4. The client name
and three optional parameters
5. enable_debug - to enable/disable debug (default is False)
6. proxies - to specify HTTP proxies (default is None)
7. auth - to specify authentication parameters for OAuth2 or disable certificate verification (default is None)
Sample code for simple user name / password authentication
```python
from zeclient.zema_client import ZemaClient
datadirect_url = "https://host.company.com/datadirect";
auth = {'verify_ssl': False}
client = ZemaClient(datadirect_url, 'user.name', 'password', 'Client', auth = auth)
```
For OAuth2 authentication, currently only resource owner password flow is supported. The following six fields for the "auth" parameter are required
1. idp_token_url - the token URL
2. idp_client_id - the client id
3. idp_client_secret - the client secret
4. idp_scope - the scope. it should be "openid" in most situations
5. oauth2_flow - OAuth2 flow type (OAuth2Flow.ResourceOwnerPasswordCredentialsGrantType)
6. domain - the domain name
```python
auth = {
'idp_token_url': 'https://adfs-uat.zepower.com/adfs/oauth2/token',
'idp_client_id': 'client_id_string',
'idp_client_secret': 'client_secret_string',
'idp_scope': 'openid',
'oauth2_flow': OAuth2Flow.ResourceOwnerPasswordCredentialsGrantType,
'domain': 'company.com'
}
client = ZemaClient(datadirect_url, 'user.name', 'password', 'Client', auth = auth)
```
## Common Method Parameters
There are five parameters that are available for OData based methods.
* select
The "select" parameter specifies a list of columns to select.
```python
select = ['name', 'opr_date', 'type', 'value']
```
* filters
The "filters" parameter is used to filter data based on the specified column values. The parameter is an JSON object. A basic filter contains three elements.
```
{'column_name': {'op': 'operator', 'value': ['column value 1', 'column value 2']}}
```
The operator can be 'eq', 'ge', 'gt', 'le', 'lt', 'contains', 'startswith', 'endswith'. Note that the last three are only for string values.
Here is a sample filter
```python
filters = {'name': {'op': 'contains', 'value': 'NYMEX'},
'ticker': {'op': 'eq', 'value': ['OB', 'OD']},
'opr_date': [{'op': 'ge', 'value': date(2019-05-01)}, {'op': 'ge', 'value': date(2019-05-02)}],
'contract_year': {'op': 'ge', 'value': 2020}}
```
* top
The "top" parameter specified the number of records to return
```python
top = 10
```
* skip
The "skip" parameter specified the number of records to skip
```python
skip = 100
```
* order_by
The "order_by" parameter specified a column to be ordered by
```python
order_by = 'opr_date desc'
```
## Method Return
All methods return a pandas' DataFrame.
```
name opr_date type value
0 daily curve name 2016-08-24 CLOSE 123.4
1 daily curve name 2016-08-25 CLOSE 123.5
2 daily curve name 2016-08-26 CLOSE 123.6
```
## Methods
* get_profile(username, group, name, config=None, select=None, filters=None, top=None, order_by=None, skip=None)
The method retrieves data for a specified profile and returns the following columns
* opr_date [,opr_hour, opr_minute], a, b, c, ...
Get data for a profile.
```python
result = client.get_profile('user.name', 'profile group', 'profile name')
```
Get data for a linked profile.
```python
result = client.get_profile('user.name', 'template group', 'template name', 'config name')
```
Add some filters
```python
filters = {'opr_hour': {'op': 'ge', 'value': 20},
'b' : {'op': 'ge', 'value': 10}}
result = client.get_profile('user.name', 'profile group', 'profile name', filters = filters, top=100)
```
* get_report(datasource, report, select=None, filters=None, top=None, order_by=None, skip=None)
This method retrieves data for a specified report and returns the following columns
* The date, attributes and observations of the requested report
Get data for a report in a date range.
```python
filters = {'opr_date': [{'op': 'ge', 'value': date(2019, 5, 1)}, {'op': 'le', 'value': date(2019, 5, 31)}]}
result = client.get_report('NYMEX', 'NYMEX_FUTURES_SETTLEMENT', filters = filters)
```
Select columns and add more filters.
```python
select = ['opr_date', 'ticker', 'contract_month', 'contract_year', 'settle']
filters = {'opr_date': [{'op': 'ge', 'value': date(2019, 5, 1)}, {'op': 'le', 'value': date(2019, 5, 31)}],
'ticker': {'op': 'eq', 'value': ['OB', 'OD']}
result = client.get_report('NYMEX', 'NYMEX_FUTURES_SETTLEMENT', select = select, filters = filters)
```
* get_forward_curve(name=None, select=None, filters=None, top=None, order_by=None, skip=None)
The method retrieves data for forward curves and returns the following columns
* name, opr_date, contract_start, contract_end, contract_code, contract_year, type, value, date_modified
Get data for a curve in a date range.
```python
filters = {'opr_date': {'op': 'eq', 'value': date(2019, 5, 1)}}
name = 'my curve'
result = client.get_forward_curve(name, filters = filters)
```
Select columns and add more filters.
```python
select = ['name', 'opr_date', 'type', 'contract_year', 'contract_code', 'value']
filters = {'opr_date': {'op': 'eq', 'value': date(2019, 5, 1)}}
name = ['my curve 1', 'my curve 2']
result = client.get_forward_curve(name, select = select, filters = filters, top=100)
```
* get_forward_data_and_property(start_date, end_date, curve_filters, include_properties=None, top=None)
The method retrieves data and properties for forward curves filtered by properties, groups and batches and returns the following columns and additional property columns if include_properties argument is set to 'All' or a list of propoery names. Note that this method is based on ZE REST Web Services so some common parameters are not supported.
* name, group, opr_date, contract_start, contract_end, contract_code, contract_year, contract_granularity, type, value, date_modified
Get data for curves with specific properties and values.
```python
curve_filters = {
'properties':[
{'name': 'prop name1', 'values': ['prop value1']},
{'name': 'prop name2', 'values': ['prop value2']}
]
}
result = client.get_forward_data_and_property(date(2019, 5, 1), date(2019, 5, 1), curve_filters, include_properties='All')
```
* get_timeseries_curve(name=None, select=None, filters=None, top=None, order_by=None, skip=None)
The method retrieves data for time series curves and returns the following columns
* name, opr_date, opr_hour, opr_minute, type, value, date_modified
Get data for a curve in a date range.
```python
select = ['name', 'opr_date', 'opr_hour', 'type', 'value']
filters = {'opr_date': {'op': 'eq', 'value': date(2019, 5, 1)}}
name = ['my curve 1', 'my curve 2']
result = client.get_time_series_curve(name, select = select, filters = filters, top=100)
```
* get_timeseries_data_and_property(start_date, end_date, curve_filters, include_properties=None, top=None)
The method retrieves data and properties for time series curves filtered by properties, groups and batches and returns the following columns and additional property columns if include_properties argument is set to 'All' or a list of propoery names. Note that this method is based on ZE REST Web Services so some common parameters are not supported.
* name, group, opr_date, opr_hour, opr_minute, type, value, date_modified
Get data and properties for curves filtered by properties and groups.
```python
curve_filters = {
'groups': ['group1'],
'properties':[
{'name': 'prop name1', 'values': ['prop value1']},
{'name': 'prop name2', 'values': ['prop value2']}
]
}
result = client.get_timeseries_data_and_property(date(2019, 5, 1), date(2019, 5, 1), curve_filters, include_properties=['prop name1', 'prop name2'])
```
* get_options_curve(name=None, select=None, filters=None, top=None, order_by=None, skip=None)
The method retrieves data for options curves and returns the following columns
* name, opr_date, contract_start, contract_end, contract_code, contract_year, type, put_call, level_type, level_value, strip_unit, spread_length, contract_month_2, contract_year_2, contract_start_2, contract_end_2, value, date_modified
* get_options_data_and_property(start_date, end_date, curve_filters, include_properties=None, top=None)
The method retrieves data and properties for options curves filtered by properties, groups and batches and returns the following columns and additional property columns if include_properties argument is set to 'All' or a list of propoery names. Note that this method is based on ZE REST Web Services so some common parameters are not supported.
* name, group, opr_date, contract_start, contract_end, contract_code, contract_year, type, contract_granularity, put_call, level_type, level_value, strip_unit, spread_length, contract_month_2, contract_year_2, contract_start_2, contract_end_2, value, date_modified
* find_profile_users(select=None, filters=None, top=None)
The method retrieves a list of profile owners and returns the following columns
* name, first_name, last_name, group, email
Get all users.
```python
result = client.find_profile_users()
```
Get filtered users.
```python
filters= {'name': {'op': 'startswith', 'value': 'prefix'}}
result = client.find_profile_users(filters = filters)
```
* find_profile_groups(select=None, filters=None, top=None)
The method retrieves a list of profile groups and returns the following columns
* user, name
Get all profile groups.
```python
result = client.find_profile_groups()
```
Get filtered groups.
```python
filters= {'name': {'op': 'contains', 'value': 'nymex'}}
result = client.find_profile_groups(filters = filters)
```
* find_profiles(select=None, filters=None, top=None)
The method retrieves a list of profiles and returns the following columns
* user, group, name, version, is_template, data_entity_id
Get all profiles.
```python
result = client.find_profileps()
```
Get filtered profiles for a list of users.
```python
select = ['user', 'group', 'name', 'data_entity_id']
filters= {'user': {'op': 'eq', 'value': ['user1', 'user2']},
'name': {'op': 'contains', 'value': 'nymex'}}
result = client.find_profiless(select = select, filters = filters)
```
* find_linked_profiles(select=None, filters=None, top=None)
The method retrieves a list of linked profiles and returns the following columns
* user, template_group, template, group, name, version, data_entity_id
Get all linked profiles.
```python
result = client.find_linked_profileps()
```
Get filtered linked profiles for a list of users.
```python
filters= {'user': {'op': 'eq', 'value': ['user1', 'user2']},
'name': {'op': 'contains', 'value': 'nymex'}}
result = client.find_linked_profiless(filters = filters)
```
* find_data_sources(select=None, filters=None, top=None)
The method retrieves a list of data sources and returns the following columns
* name, display_name, description, url
Get all data sources.
```python
result = client.find_data_sources()
```
Get filtered data sources.
```python
filters= {'name': {'op': 'contains', 'value': 'NYMEX'}}
result = client.find_data_sources(filters = filters)
```
* find_reports(select=None, filters=None, top=None)
The method retrieves a list of data reports and returns the following columns
* source, name, display_name, granularity, gmt_offset, commodity, data_entity_id
Get all data reports.
```python
result = client.find_reports()
```
Get filtered data reports from a list of specified data sources
```python
filters= {'source': {'op': 'eq', 'value':['NYMEX', 'ICE']},
'name': {'op': 'contains', 'value': 'NYMEX'}}
result = client.find_data_sources(filters = filters)
```
* find_curve_groups(select=None, filters=None, top=None)
The method retrieves a list of curve groups and returns the following column
* name
Get all curve groups.
```python
result = client.find_curve_groups()
```
Get filtered curve groups.
```python
filters= {'name': {'op': 'contains', 'value': 'nymex'}}
result = client.find_curve_groups(filters = filters)
```
* find_curves(select=None, filters=None, top=None)
The method retrieves a list of curves and returns the following columns
* user, group, name
Get all curves.
```python
result = client.find_curves()
```
Get filtered curves from a list of specified curve groups.
```python
filters= {'group': {'op': 'eq', 'value': ['group 1', 'group 2']},
'name': {'op': 'contains', 'value': 'keyword'}}
result = client.find_curves(filters = filters)
```
* find_holiday_groups(select=None, filters=None, top=None)
The method retrieves a list of holiday groups and returns the following column
* group_name, group_display_name
Get all holiday groups.
```python
result = client.find_holiday_groups()
```
Get filtered holiday groups.
```python
filters= {'group_name': {'op': 'eq', 'value': 'us_holidays'}}
result = client.find_holiday_groups(filters = filters)
```
* get_holidays(select=None, filters=None, top=None, order_by=None, skip=None)
The method retrieves a list of holidays and returns the following column
* group_name, group_display_name, holiday_date, description
Get holidays for a specific group in a date range.
```python
filters = {'group_name': {'op': 'eq', 'value': ['us_holidays']},
'holiday_date': [{'op': 'ge', 'value': date(2022, 1, 1)},
{'op': 'le', 'value': date(2022, 12, 31)}]}
result = client.get_holidays(filters = filters)
```
* upload_curve_data(self, group_name, curve_name, payload, partial_update=None)
The method uploads curve data based on effective date. The payload requires two properties,
'effective_date', and 'data', where data is a list of curve data objects. If partial_update parameter
is passed as True, this will create a PATCH request to only update available 'data' records of the effective date.
If partial_update parameter is not passed, this will call a PUT request to delete and replace all records
on the given effective_date
```python
payload = {
"effectiveDate": datetime.date(2023, 1, 15),
"data": [
{
"date": datetime.date(2023, 1, 15),
"granularity": "value",
"type": "value",
"value": "value",
"comment": "comment"
},
{
"date": datetime.date(2023, 1, 16),
"granularity": "value",
"type": "value",
"value": "value",
"comment": "comment"
}
]
}
response = client.upload_curve_data(curve_group, curve_name, payload, partial_update=True)
```
* close()
Terminating the session.
* enable_debug()
Enabling debug mode.
* disable_debug()
Disabling debug mode.
| zeclient | /zeclient-1.1.3.tar.gz/zeclient-1.1.3/docs/zema_user_guide.md | zema_user_guide.md |
# ZE Python OData Client User's Guide
## ODataClient class
To utilize ZE Python OData Client, an instance of ODataClient class needs to be created. The constructor of the class takes four parameters
1. The URL of ZEMA Data Direct server
2. The name of the user who has ZEMA OData license
3. The password
4. The client name
```python
from zeclient.odata_client import ODataClient
datadirect_url = "http://host.company.com/datadirect";
client = ODataClient(datadirect_url, 'user.name', 'password', 'Client')
```
## Entities and Columns
Here is a list of entities and columns that are accessible by the client methods. All columns can be used in the *filters* parameter of the methods mentioned below.
|Entity|Columns|Description|
| ---- | ----- | --------- |
|data_sources|name, display_name, description, url|a list of data sources|
|reports|source, name, display_name, granularity, gmt_offset, commodity, data_entity_id|a list of data reports|
|report_data____nnnnn|unknown|a specific report data entity and different report data entities have different columns|
|profile_users|name, first_name, last_name, group, email|a list of profile owners|
|profile_groups|user, name|a list of profile groups|
|profiles|user, group, name, version, is_template, data_entity_id|a list of profiles|
|linked_profiles|user, template_group, template, group, name, version, data_entity_id| a list of linked profiles|
|profile_data____nnnnn|unknown|a specific profile data entity and different profile data entities have different columns|
|curve_groups|name|a list of curve groups|
|curves|user, group, name|a list of curves|
|futures_curve_data|name,opr_date, contract_start, contract_end, contract_code, contract_year, type, value, date_modified|forward curve data|
|time_series_curve_data|name,opr_date, opr_hour, opr_minute, type, value, date_modified|time series curve data|
## Common Method Parameters
There are three common parameters used by all the methods.
* select
The "select" parameter specifies a list of columns to select.
```python
select = ['name', 'opr_date', 'type', 'value']
```
* filters
The "filters" parameter is used to filter data based on the specified column values. The parameter is an JSON object. A basic filter contains three elements.
```
{'column_name': {'op': 'operator', 'value': ['column value 1', 'column value 2']}}
```
The operator can be 'eq', 'ge', 'gt', 'le', 'lt', 'contains', 'startswith', 'endswith'. Note that the last three are only for string values.
Here is a sample filter
```python
filters = {'name': {'op': 'contains', 'value': 'NYMEX'},
'ticker': {'op': 'eq', 'value': ['OB', 'OD']},
'opr_date': [{'op': 'ge', 'value': date(2019-05-01)}, {'op': 'ge', 'value': date(2019-05-02)}],
'contract_year': {'op': 'ge', 'value': 2020}}
```
* top
The "top" parameter specified the number of records to return
```python
top = 10
```
## Method Return
All methods return a pandas' DataFrame.
```
name opr_date type value
0 daily curve name 2016-08-24 CLOSE 123.4
1 daily curve name 2016-08-25 CLOSE 123.5
2 daily curve name 2016-08-26 CLOSE 123.6
```
## Methods
* get_profile(username, group, name, config=None, select=None, filters=None, top=None)
The method retrieves data for a specified profile.
Get data for a profile.
```python
result = client.get_profile('user.name', 'profile group', 'profile name')
```
Get data for a linked profile.
```python
result = client.get_profile('user.name', 'template group', 'template name', 'config name')
```
Add some filters
```python
filters = {'opr_hour': {'op': 'ge', 'value': 20},
'b' : {'op': 'ge', 'value': 10}}
result = client.get_profile('user.name', 'profile group', 'profile name', filters = filters, top=100)
```
* get_report(datasource, report, select=None, filters=None, top=None, order_by=None)
This method retrieves data for a specified report.
Get data for a report in a date range.
```python
filters = {'opr_date': [{'op': 'ge', 'value': date(2019, 5, 1)}, {'op': 'le', 'value': date(2019, 5, 31)}]}
result = client.get_report('NYMEX', 'NYMEX_FUTURES_SETTLEMENT', filters = filters)
```
Select columns and add more filters.
```python
select = ['opr_date', 'ticker', 'contract_month', 'contract_year', 'settle']
filters = {'opr_date': [{'op': 'ge', 'value': date(2019, 5, 1)}, {'op': 'le', 'value': date(2019, 5, 31)}],
'ticker': {'op': 'eq', 'value': ['OB', 'OD']}
result = client.get_report('NYMEX', 'NYMEX_FUTURES_SETTLEMENT', select = select, filters = filters)
```
* get_forward_curve(name=None, select=None, filters=None, top=None)
The method retrieves data for forward curves.
Get data for a curve in a date range.
```python
filters = {'opr_date': {'op': 'eq', 'value': date(2019, 5, 1)}}
name = 'my curve'
result = client.get_forward_curve(name, filters = filters)
```
Select columns and add more filters.
```python
select = ['name', 'opr_date', 'type', 'contract_year', 'contract_code', 'value']
filters = {'opr_date': {'op': 'eq', 'value': date(2019, 5, 1)}}
name = ['my curve 1', 'my curve 2']
result = client.get_forward_curve(name, select = select, filters = filters, top=100)
```
* get_timeseries_curve(name=None, select=None, filters=None, top=None)
The method retrieves data for time series curves.
Get data for a curve in a date range.
```python
select = ['name', 'opr_date', 'opr_hour', 'type', 'value']
filters = {'opr_date': {'op': 'eq', 'value': date(2019, 5, 1)}}
name = ['my curve 1', 'my curve 2']
result = client.get_time_series_curve(name, select = select, filters = filters, top=100)
```
* get_options_curve(name=None, select=None, filters=None, top=None)
The method retrieves data for options curves.
* find_profile_users(select=None, filters=None, top=None)
The method retrieves a list of profile owners.
Get all users.
```python
result = client.find_profile_users()
```
Get filtered users.
```python
filters= {'name': {'op': 'startswith', 'value': 'prefix'}}
result = client.find_profile_users(filters = filters)
```
* find_profile_groups(select=None, filters=None, top=None)
The method retrieves a list of profile groups.
Get all profile groups.
```python
result = client.find_profile_groups()
```
Get filtered groups.
```python
filters= {'name': {'op': 'contains', 'value': 'nymex'}}
result = client.find_profile_groups(filters = filters)
```
* find_profiles(select=None, filters=None, top=None)
The method retrieves a list of profiles.
Get all profiles.
```python
result = client.find_profileps()
```
Get filtered profiles for a list of users.
```python
select = ['user', 'group', 'name', 'data_entity_id']
filters= {'user': {'op': 'eq', 'value': ['user1', 'user2']},
'name': {'op': 'contains', 'value': 'nymex'}}
result = client.find_profiless(select = select, filters = filters)
```
* find_linked_profiles(select=None, filters=None, top=None)
The method retrieves a list of linked profiles.
Get all linked profiles.
```python
result = client.find_linked_profileps()
```
Get filtered linked profiles for a list of users.
```python
filters= {'user': {'op': 'eq', 'value': ['user1', 'user2']},
'name': {'op': 'contains', 'value': 'nymex'}}
result = client.find_linked_profiless(filters = filters)
```
* find_data_sources(select=None, filters=None, top=None)
The method retrieves a list of data sources.
Get all data sources.
```python
result = client.find_data_sources()
```
Get filtered data sources.
```python
filters= {'name': {'op': 'contains', 'value': 'NYMEX'}}
result = client.find_data_sources(filters = filters)
```
* find_reports(select=None, filters=None, top=None)
The method retrieves a list of data reports.
Get all data reports.
```python
result = client.find_reports()
```
Get filtered data reports from a list of specified data sources
```python
filters= {'source': {'op': 'eq', 'value':['NYMEX', 'ICE']},
'name': {'op': 'contains', 'value': 'NYMEX'}}
result = client.find_data_sources(filters = filters)
```
* find_curve_groups(select=None, filters=None, top=None)
The method retrieves a list of curve groups.
Get all curve groups.
```python
result = client.find_curve_groups()
```
Get filtered curve groups.
```python
filters= {'name': {'op': 'contains', 'value': 'nymex'}}
result = client.find_curve_groups(filters = filters)
```
* find_curves(select=None, filters=None, top=None)
The method retrieves a list of curves.
Get all curves.
```python
result = client.find_curves()
```
Get filtered curves from a list of specified curve groups.
```python
filters= {'group': {'op': 'eq', 'value': ['group 1', 'group 2']},
'name': {'op': 'contains', 'value': 'keyword'}}
result = client.find_curves(filters = filters)
```
* close()
Terminating the session.
* enable_debug()
Enabling debug mode.
* disable_debug()
Disabling debug mode.
| zeclient | /zeclient-1.1.3.tar.gz/zeclient-1.1.3/docs/odata_user_guide.md | odata_user_guide.md |
# ZE Python SOAP Proxy User's Guide
## SoapClient class
SoapProxy utilizes *zeep* web services python module to convert method calls to SOAP requests. The parameter of each method is a JSON object and each element in the object maps to the xml element in the SOAP requests. The SoapClient class wraps all the methods for the common use cases. The constructor of the class takes four parameters
1. The WSDL URL
2. The name of the user
3. The password
4. The client name
```python
from zeclient.soap_client import SoapClient
import zeclient.soap_util as util
wsdl = "http://host.company.com/datadirect/services/dd?wsdl";
proxy = SoapProxy(wsdl, 'user.name', 'password', 'Client')
```
## Methods
* execute_profile()
The method executes a profile and returns the profile data.
```python
result = proxy.execute_profile({'profileOwner': 'user.name',
'profileGroup': 'profile group',
'profileName': 'profile name'
})
util.print_profile_result(result)
```
* execute_report_query()
The method retrieves data for a specific report.
For Analytic result
```python
result = proxy.execute_report_query('Time Series', {'useDisplayName': 'false',
'dataRequest':
{'options': {'dataOptions': {'precision': '5'},
'timelineOptions': {'startDate': '06/11/2018',
'endDate': '06/15/2018',
'interval': 'Daily'}
}
,'dataSeries': [{'dataSource': 'Foreign Exchange',
'dataReport': 'BANK_CANADA_EXCH_RATES_ALL',
'observation': 'FACTOR',
'attributes': [{'columnName': 'SRC_UNIT',
'values': ['USD']},
{'columnName': 'TGT_UNIT',
'values': ['CAD']}]
}]
}})
util.print_profile_result(result)
```
For Data Sheet result
```python
result = proxy.execute_report_query('Data Sheet', {'useDisplayName': 'false',
'dataRequest':
{'options': {'dataOptions': {'precision': '5'},
'timelineOptions': {'startDate': '06/11/2018',
'endDate': '06/15/2018',
'interval': 'Daily'}
}
,'dataSeries': [{'dataSource': 'Foreign Exchange',
'dataReport': 'BANK_CANADA_EXCH_RATES_ALL',
'observation': 'FACTOR',
'attributes': [{'columnName': 'SRC_UNIT',
'values': ['USD']},
{'columnName': 'TGT_UNIT',
'values': ['CAD', 'JPY']}]
}]
}})
util.print_profile_result(result)
```
* find_curves()
The method retrieves a list of curves according to the search criteria.
Find curves by properties
```python
result = proxy.find_curves({'properties': [{'label': 'Commodity', 'value': 'Gas'},
{'label': 'Hub', 'value': 'NBPG'}]})
util.print_curve_bean(result)
```
Find curves by names
```python
result = proxy.find_curves({'names': ['name1', 'name2'],
'namesStringMatchType': 'CONTAINS'})
util.print_curve_bean(result)
```
* get_forward_curve_data()
The method retrieves forward curve data for specified curves in a date range.
```python
result = proxy.get_forward_curve_data({'curveNames': ['test web service curve 123z'],
'startDate': '2016-09-02',
'endDate': '2016-09-02',
'includePropertiesInResponse': 'false',
'updatedOnly': 'false'})
util.print_forward_curve_data(result)
```
* get_time_series_curve_data()
The method retrieves time series curve data for specified curves in a date range.
```python
result = proxy.get_time_series_curve_data({'curveNames': ['tm - hourly'],
'startDate': '2019-01-03',
'endDate': '2019-01-03',
'groupBy': 'Curve Name', # or 'Effective Date'
'includePropertiesInResponse': 'false',
'updatedOnly': 'false'})
util.print_time_series_curve_data(result)
```
| zeclient | /zeclient-1.1.3.tar.gz/zeclient-1.1.3/docs/soap_proxy_user_guide.md | soap_proxy_user_guide.md |
# ZE Python SOAP Client User's Guide
## SoapClient class
SoapClient utilizes SoapProxy module which is built on *zeep* web services python module to convert method calls to SOAP requests. The SoapClient class wraps all the methods for the common use cases. The constructor of the class takes four parameters
1. The Data Direct server URL
2. The name of the user
3. The password
4. The client name
```python
from zeclient.soap_client import SoapClient
datadirect_url = "http://host.company.com/datadirect";
client = SoapClient(datadirect_url, 'user.name', 'password', 'Client')
```
## Method Return
All methods return a pandas' DataFrame.
## Methods
* get_profile(username, group, name, config=None, effective_date=None)
The method executes a profile (analytic or linked analytic) and returns the profile data.
```python
result = client.get_profile('user.name', 'profile group', 'profile name')
```
Get data for a linked analytic
```python
result = client.get_profile('user.name', 'profile group', 'template name', config='linked anlytic name')
```
Specify an effective date
```python
result = client.get_profile('user.name', 'profile group', 'profile name', effective_date = date(2019, 5, 1))
```
Sample data
```
Date Hour Minute a b
0 2019-05-01 1 0 1.141 2.141
1 2019-05-01 2 0 1.141 3.141
2 2019-05-01 3 0 1.141 4.141
```
* get_report(datasource, report, start_date, end_date, observations=None, filters=None)
The method retrieves data for a specific report.
Get all observations for a report
```python
result = client.get_report('CME', 'CME_NYMEX_OPTIONS', date(2019, 5, 1), date(2019, 5, 2))
```
Get all selected observations for a report and apply attribute filters
```python
result = client.get_report('CME', 'CME_NYMEX_OPTIONS', date(2019, 5, 1), date(2019, 5, 2), observations = ['PX', 'SZ'], filters = {'TICKER': 'BA', 'CONTRACT_YEAR': [2020, 2021]})
```
Sample data
```
Date Commodity Type Ticker Contract Month Contract Year Volume Price
0 2019-05-01 Crude Oil Markets BA 1 2020 789230 14.00000
1 2019-05-01 Crude Oil Markets BA 2 2020 892032 14.00000
2 2019-05-01 Crude Oil Markets BA 3 2020 289934 13.99000
```
* find_curves(filters)
The method retrieves a list of curves according to the filters applied. There are three filters - 'names', 'groups' and 'properties'. The 'names' and 'groups' filters use four matching types - 'startswith', 'endswith', 'contains' and 'equals'.
Find curves by names and properties
```python
result = client.find_curves({'properties': {'Commodity': 'Gas'},
{'Hub': 'NBPG'},
'names': {'contains': ['cme', 'nymex']}})
```
Find curves by groups
```python
result = client.find_curves({'groups': {'equals': 'abc'}})
```
Sample data
```
user group name id class
0 1560 group name curve name 1 39683 Futures
1 1560 group name curve name 2 39684 Futures
2 1560 group name curve name 3 39685 Time Series
```
* get_forward_curve(start_date, end_date, name = None, property_filters = None)
The method retrieves forward curve data for specified curves in a date range. The 'property_filters' is used to filter out curves by curve properties.
Get data by name
```python
result = client.get_forward_curve(date(2019, 5, 1), date(2019, 5, 1), 'curve name')
```
Get data by property filters
```python
result = client.get_forward_curve(date(2019, 5, 1), date(2019, 5, 1), property_filters = {'Commodity': 'Gas'}, {'Hub': 'NBPG'})
```
Sample data
```
name opr_date contract_start contract_end contract_code contract_year type value date_modified
0 name1 2019-05-01 2019-09-01 2019-09-30 9 2019 SETTLE 1.23 2019-09-30 9:27:30.062
```
* get_timeseries_curve(start_date, end_date, name = None, property_filters = None)
The method retrieves time series curve data for specified curves in a date range.
```python
client.get_timeseries_curve(date(2019, 5, 1), date(2019, 5, 31), name = ['name 1', 'name 2'])
```
Sample data
```
name opr_date opr_hour opr_minute type value date_modified
0 name1 2019-05-01 3 0 CLOSE 1.23 2019-09-30 9:27:30.062
```
* get_options_curve(start_date, end_date, name = None, property_filters = None)
The method retrieves options curve data for specified curves in a date range.
```python
client.get_options_curve(date(2019, 5, 1), date(2019, 5, 31), name = ['name 1', 'name 2'])
```
Sample data
```
name opr_date contract_start contract_end contract_code contract_year type put_call level_type level_value strip_unit spread_length contract_start_2 contract_end_2 contract_month_2 contract_year_2 value date_modified
0 name1 2019-05-01 2019-09-01 2019-09-30 9 2019 SETTLE Call Strike Price 3.9 N/A 0 1010-01-01 1010-01-01 -1 -1 1.23 2019-09-30 9:27:30.062
```
| zeclient | /zeclient-1.1.3.tar.gz/zeclient-1.1.3/docs/soap_user_guide.md | soap_user_guide.md |
# Zecwallet-Python
###### A wrapper around Zecwallet Command Line LightClient, written in Python
------------
# Table of Contents
- [About](#about "About")
- [Installation](#installation "Installation")
- [Usage](#usage "Usage")
- [Examples](#examples "Examples")
# About
Zecwallet-Python is a simple wrapper around the Zecwallet Command Line LightClient written in Python, allowing Python programs to easily interact with a fully-capable, lightweight Zcash wallet. Using this package with Zecwallet, one can easily send and receive (shielded) transactions, encrypt and decrypt messages, fetch the current, market value of Zcash, and so much more. This package makes all of the Zecwallet functionality easily available in Python, and uses no dependencies outside of Zecwallet, and the Python Standard Library. Common use cases for this package include cryptocurrency trading bots, online payment processing for Zcash (including support for shielded transactions), and encrypted communication systems.
###### Please note that this project is independent from [Zecwallet](https://www.zecwallet.co/), and has not been audited for security and reliability. Use at your own risk.
# Installation
To use Zecwallet-Python, you will need to install [Zecwallet Command Line LightClient](https://github.com/adityapk00/zecwallet-light-cli/releases) first. You can do this by downloading their latest release, unzipping it, and then making note of the filepath to the `zecwallet-cli` executable on your system.
###### Note: The latest version of Zecwallet to be tested for full compatibility with Zecwallet-Python is v1.7.7
Example installation for most Linux distributions:
```bash
wget https://github.com/adityapk00/zecwallet-light-cli/releases/download/v1.7.7/linux-zecwallet-cli-v1.7.7.zip -O /tmp/zecwallet.zip
unzip /tmp/zecwallet.zip -d /home/ubuntu/ZcashWallet
```
Next, you will need to install Zecwallet-Python, which can by done using [pip](https://pypi.org/project/pip/):
```bash
pip3 install zecwallet
```
Alternatively, you may copy the `wallet.py` file from [our GitHub repository](https://github.com/P5vc/Zecwallet-Python/blob/main/zecwallet/wallet.py), and import that locally into your project.
# Usage
To interact with your Zcash wallet in Python, you must first import the Wallet class, then initialize it, providing the full filepath to the `zecwallet-cli` executable and your wallet decryption key. It is not required nor recommended to provide the wallet decryption key unless you need to take advantage of functionality that requires the key.
```python3
from zecwallet.wallet import Wallet
myWallet = Wallet('/path/to/zecwallet-cli' , 'MyDecryptionKey')
```
Once you've instantiated your wallet, you'll have access to all of the following functions. These functions accept (sometimes optional) arguments as indicated below, and return the same datatypes returned by the Zecwallet CLI (usually a dictionary or a list).
###### Note that, as a wrapper, the descriptions, functionality, and returned results are nearly identical to those provided by Zecwallet.
```
| addresses()
| List current addresses in the wallet
|
| balance()
| Show the current ZEC balance in the wallet
|
| Transparent and Shielded balances, along with the addresses they belong to are displayed
|
| clear()
| Clear the wallet state, rolling back the wallet to an empty state.
|
| This command will clear all notes, utxos and transactions from the wallet, setting up the wallet to be synced from scratch.
|
| communicate(command)
| Send a custom command directly to zecwallet
|
| decrypt()
| Completely remove wallet encryption, storing the wallet in plaintext on disk
| Note 1: This will decrypt the seed and the sapling and transparent private keys and store them on disk.
| Note 2: If you've forgotten the password, the only way to recover the wallet is to restore
| from the seed phrase.
|
| decryptMessage(encryptedMessageBase64)
| Attempt to decrypt a message with all the view keys in the wallet.
|
| defaultFee(blockHeight='')
| Returns the default fee in zats for outgoing transactions
|
| encrypt(WALLET_ENCRYPTION_KEY)
| Encrypt the wallet with a password
| Note 1: This will encrypt the seed and the sapling and transparent private keys.
| Use 'decrypt' to permanatly remove the encryption
| Note 2: If you forget the password, the only way to recover the wallet is to restore
| from the seed phrase.
|
| encryptMessage(address, memo)
| Encrypt a memo to be sent to a z-address offline
|
| NOTE: This command only returns the encrypted payload. It does not broadcast it. You are expected to send the encrypted payload to the recipient offline
|
| encryptionStatus()
| Check if the wallet is encrypted and if it is locked
|
| export()
| Export private key for an individual wallet addresses.
| Note: To backup the whole wallet, use the 'seed' command insted
|
| getOption(optionName)
| Get a wallet option
|
| height()
| Get the latest block height that the wallet is at.
|
| importKey(spendingOrViewingKey, birthday, noRescan=False)
| Import an external spending or viewing key into the wallet
|
| Birthday is the earliest block number that has transactions belonging to the imported key. Rescanning will start from this block. If not sure, you can specify '0', which will start rescanning from the first sapling block.
| Note that you can import only the full spending (private) key or the full viewing key.
|
| info()
| Get info about the lightwalletd we're connected to
|
| lastTXID()
| Show the latest TxId in the wallet
|
| list(allMemos=False)
| List all incoming and outgoing transactions from this wallet
|
| If you include the 'allmemos' argument, all memos are returned in their raw hex format
|
| newShieldedAddress()
| Create a new shielded address in this wallet
|
| newTransparentAddress()
| Create a new transparent address in this wallet
|
| notes(all=False)
| Show all sapling notes and utxos in this wallet
|
| If you supply the "all = True" argument, all previously spent sapling notes and spent utxos are also included
|
| quit()
| Save the wallet to disk and quit
|
| Destroys the wallet instance
|
| rescan()
| Rescan the wallet, rescanning all blocks for new transactions
|
| This command will download all blocks since the intial block again from the light client server
| and attempt to scan each block for transactions belonging to the wallet.
|
| save()
| Save the wallet to disk
|
| The wallet is saved to disk. The wallet is periodically saved to disk (and also saved upon exit)
| but you can use this command to explicitly save it to disk
|
| seed()
| Show the wallet's seed phrase
|
| Your wallet is entirely recoverable from the seed phrase. Please save it carefully and don't share it with anyone
|
| send(destinationAddress, amountInZatoshis, memo='')
| Send ZEC to a given address(es)
|
| NOTE: The fee required to send this transaction is additionally deducted from your balance.
|
| sendProgress()
| Get the progress of any send transactions that are currently computing
|
| setOption(optionName, optionValue)
| Set a wallet option
|
| List of available options:
| download_memos : none | wallet | all
|
| shield(optionalAddress='')
| Shield all your transparent funds
|
| NOTE: The fee required to send this transaction is additionally deducted from your balance.
|
| sync()
| Sync the light client with the server
|
| syncStatus()
| Get the sync status of the wallet
|
| zecPrice()
| Get the latest ZEC price in the wallet's currency (USD)
```
# Examples
```python3
>>> from zecwallet.wallet import Wallet
>>> myWallet = Wallet('/home/ubuntu/ZcashWallet/zecwallet-cli' , 'decryptionKey')
>>> myWallet.zecPrice()
{'zec_price': Decimal('93.12'), 'fetched_at': 1654321098, 'currency': 'USD'}
>>> myWallet.newShieldedAddress()
['zs1tnk62y6sn4mwrwyxrhjxjth6lzlsaggmnkEXAMPLEwsftk760yxrsme44kp997eps0w6z4g7vd9']
>>> myWallet.save()
{'result': 'success'}
>>> myWallet.encryptMessage('zs1d0fx24crh2kuyqs7yp0jf4wswyuEXAMPLE8mgejmf7qev2jnhjhwevhvzgjczcjzptl9xsace80' , 'Hello World!')
{'encrypted_base64': 'WmNhc2hPZmZsaW5lTWVtSHORTENEDEXAMPLEUi0JRXAleZ4ep2yg=='}
>>> myWallet.send('zs1d0fx24crh2kuyqs7yp0jf4wswyuEXAMPLE8mgejmf7qev2jnhjhwevhvzgjczcjzptl9xsace80' , 123456 , 'Paying you back for coffee. Thanks again!')
{'result': 'success'}
```
| zecwallet | /zecwallet-1.2.0.tar.gz/zecwallet-1.2.0/README.md | README.md |
# Zeddo
>News CLI for lazy people
## Installation
```
pip install zeddo
```
## Set up
Create a [News API account](https://newsapi.org/register) and remember the API
key. (You can choose the free plan.)
Then run `zeddo` and enter the API key when prompted.
## Usage
```
$ zeddo
[1] Public Protector finds procurement of 'scooter ambulances' was improper (News24)
[2] Businessinsider.co.za | Salaries for Ramaphosa, ministers set to remain unchanged – for the second year in a row (News24)
[3] JUST IN | SCA rules 2018 ANC Free State election 'unlawful and unconstitutional' (News24)
[4] Specialized's Turbo Como SL Is a Comfy, Lightweight Cruiser (Wired)
[5] 24 Times Teen Dramas Tried To Go Outside Their Genre And It Was Just So Weird (Buzzfeed)
Please enter an article number to open:
```
## Configuration
*Advanced usage:*
```
$ zeddo -h
Usage: zeddo [OPTIONS]
Options:
-k, --api-key TEXT API key for News API
-l, --language TEXT Filter articles by language
-t, --category TEXT Filter by category
-s, --search TEXT Search by key phrase
-n, --max-count INTEGER Limit number of articles
-v, --version Show the version and exit.
-h, --help Show this message and exit.
-c, --config FILE Read configuration from FILE.
```
*Example config file:*
```toml
api_key = "<News API key>"
language = "en"
```
The location of the config file depends on the operating system:
- Mac OS X (not POSIX): `~/Library/Application Support/zeddo/config`
- Unix (not POSIX): `~/.config/zeddo/config`
- Mac OS X and Unix (POSIX): `~/.zeddo/config`
- Windows
- Roaming: `C:\Users\<user>\AppData\Roaming\Foo Bar\config`
- Not roaming: `C:\Users\<user>\AppData\Local\Foo Bar\config`
## License
Licensed under the GNU Public License v3.0
| zeddo | /zeddo-0.2.0.tar.gz/zeddo-0.2.0/README.md | README.md |
# 🎯 zedmath | Math
__Simple library for mathematical expressions__
VERSION: __1.0.0__
## INSTALLATION
- __USING PIP__
```bash
pip install zedmath
```
- __CLONE REPO__
```bash
git clone https://github.com/azizbekQozoqov/
MATH.git
```
## USAGE
| Method names | Description | Arguments |
| --------------| ------------------------------| ----------------------------------|
| pow | Returns the power of a number | __x__ - Required - float or integer|
| abs | Returns the absolute value of the argument | __a__ - Required - float or integer|
| round | Round a number to a given precision in decimal digits. | __a__ - Required - float or integer |
| sum | Returns sum of the given numbers. | __args__ - Required - integer, float, string, list[integer, float, string] |
| is_odd | Returns given numbers is odd or non odd | __a__ - Required - integer |
| is_even | Returns given numbers is even or non even | __a__ - Required - integer |
| ceil | Rounds a number up to its nearest integer | __a__ - Required - integer, float |
| floor | Returns the value of a rounded down to its nearest integer | __a__ - Required - integer, float |
| sign | Returns given number is positive or negative using 1,-1,0 | __a__ - Required - integer, float |
| min | Returns minimum number | __a__ - Required - integer, float, string, list[integer, float, string] |
| max | Returns maximum number | __a__ - Required - integer, float, string, list[integer, float, string] |
| babylonian | <code> none </code> | __S__ - Required - integer, float |
| digits | Returns all digits of given number | __n__ - Required - integer, float |
- ### EXAMPLES
<code>zedmath.pow(x)</code> - Returns the power of a number
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variable called "result" and assign it the result of zd.pow method
result = zm.pow(4, 4)
# Print the result variable
print(result)
```
__output :__
```bash
256
```
<!-- -->
<code>zedmath.abs(a)</code> - Returns the absolute value of the argument
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variable called "result" and assign it the result of zd.abs method
result = zm.abs(-16)
# Print the result variable
print(result)
```
__output :__
```bash
16
```
<!-- -->
<code>zedmath.round(a)</code> - Rounds a number to a given precision in decimal digits.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variables called "result", "result2" and assign its the result of zd.round method
result = zm.round(31.2)
result2 = zm.round(31.6)
# Print variables
print(result)
print(result2)
```
__output :__
```bash
31
32
```
<!-- -->
<code>zedmath.sum(*args)</code> - Returns sum of the given numbers.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variable called "result" and assign it the result of zd.sum method
numbers = [1, 3, 25, 32.5, [21.6, "66.6", ["4", "6"]]]
result = zm.sum(numbers)
# Print the result variables
print(result)
```
__output :__
```bash
159.7
```
<!-- -->
<code>zedmath.is_odd(a)</code> - Returns given numbers is odd or non odd.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variables called "result", "result2" and assign its the result of zd.is_odd method
result = zm.is_odd(17)
result2 = zm.is_odd(16)
# Print the result variables
print(result)
print(result2)
```
__output :__
```bash
True
False
```
<!-- -->
<code>zedmath.is_even(a)</code> - Returns given numbers is even or non even.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variables called "result", "result2" and assign its the result of zd.is_even method
result = zm.is_even(17)
result2 = zm.is_even(16)
# Print the result variables
print(result)
print(result2)
```
__output :__
```bash
False
True
```
<!-- -->
<code>zedmath.ceil(a)</code> - Rounds a number up to its nearest integer.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variables called "result", "result2" and assign its the result of zd.ceil method
result = zm.ceil(11.7)
result2 = zm.ceil(11.3)
# Print the result variables
print(result)
print(result2)
```
__output :__
```bash
11
11
```
<!-- -->
<code>zedmath.floor(a)</code> - Returns the value of a rounded down to its nearest integer.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variables called "result", "result2" and assign its the result of zd.floor method
result = zm.floor(11.7)
result2 = zm.floor(11.3)
# Print the result variables
print(result)
print(result2)
```
__output :__
```bash
11
11
```
<!-- -->
<code>zedmath.sign(a)</code> - Returns given number is positive or negative using 1,-1,0.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variables called "result", "result2", "result3" and assign its the result of zd.sign method
result = zm.sign(17)
result2 = zm.sign(-17)
result3 = zm.sign(0)
# Print the result variables
print(result)
print(result2)
print(result3)
```
__output :__
```bash
1
-1
0
```
<!-- -->
<code>zedmath.ceil(a)</code> - Rounds a number up to its nearest integer.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variables called "result", "result2" and assign its the result of zd.ceil method
result = zm.ceil(11.7)
result2 = zm.ceil(11.3)
# Print the result variables
print(result)
print(result2)
```
__output :__
```bash
11
11
```
<!-- -->
<code>zedmath.floor(a)</code> - Returns the value of a rounded down to its nearest integer.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variables called "result", "result2" and assign its the result of zd.floor method
result = zm.floor(11.7)
result2 = zm.floor(11.3)
# Print the result variables
print(result)
print(result2)
```
__output :__
```bash
11
11
```
<!-- -->
<code>zedmath.min(*args)</code> - Returns minimum number.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variables called "result" and assign its the result of zd.min method
numbers = [1, 3, 25, 32.5, [21.6, "66.6", ["4", "6"]]]
result = zm.min(numbers)
# Print the result variable
print(result)
```
__output :__
```bash
1.0
```
<!-- -->
<code>zedmath.max(*args)</code> - Returns maximum number.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variables called "result" and assign its the result of zd.max method
numbers = [1, 3, 25, 32.5, [21.6, "66.6", ["4", "6"]]]
result = zm.max(numbers)
# Print the result variable
print(result)
```
__output :__
```bash
66.66
```
<!-- -->
<code>zedmath.digits(n)</code> - Returns all digits of given number.
<!-- -->
__code :__
```python
# Import zedmath library
import zedmath as zm
# Create variables called "result" and assign its the result of zd.digits method
numbers = 178
result = zm.digits(numbers)
# Print the result variable
print(result)
```
__output :__
```bash
(1, 7, 8)
```
<!-- --> | zedmath | /zedmath-1.0.2.tar.gz/zedmath-1.0.2/README.md | README.md |
===============
zedstat
===============
.. image:: https://zed.uchicago.edu/logo/logo_zedstat.png
:height: 150px
:align: center
.. image:: https://zenodo.org/badge/529991779.svg
:target: https://zenodo.org/badge/latestdoi/529991779
.. class:: no-web no-pdf
:Author: ZeD@UChicago <zed.uchicago.edu>
:Description: Tools for ML statistics
:Documentation: https://zeroknowledgediscovery.github.io/zedstat/
:Example: https://github.com/zeroknowledgediscovery/zedstat/blob/master/examples/example1.ipynb
**Usage:**
.. code-block::
from zedstat import zedstat
zt=zedstat.processRoc(df=pd.read_csv('roc.csv'),
order=3,
total_samples=100000,
positive_samples=100,
alpha=0.01,
prevalence=.002)
zt.smooth(STEP=0.001)
zt.allmeasures(interpolate=True)
zt.usample(precision=3)
zt.getBounds()
print(zt.auc())
# find the high precision and high sensitivity operating points
zt.operating_zone(LRminus=.65)
rf0,txt0=zt.interpret(fpr=zt._operating_zone.fpr.values[0],number_of_positives=10)
rf1,txt1=zt.interpret(fpr=zt._operating_zone.fpr.values[1],number_of_positives=10)
display(zt._operating_zone)
print('high precision operation:\n','\n '.join(txt0))
print('high recall operation:\n','\n '.join(txt1))
| zedstat | /zedstat-0.0.117.tar.gz/zedstat-0.0.117/README.rst | README.rst |
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from zeebe_grpc import gateway_pb2 as zeebe__grpc_dot_gateway__pb2
class GatewayStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ActivateJobs = channel.unary_stream(
'/gateway_protocol.Gateway/ActivateJobs',
request_serializer=zeebe__grpc_dot_gateway__pb2.ActivateJobsRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.ActivateJobsResponse.FromString,
)
self.CancelProcessInstance = channel.unary_unary(
'/gateway_protocol.Gateway/CancelProcessInstance',
request_serializer=zeebe__grpc_dot_gateway__pb2.CancelProcessInstanceRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.CancelProcessInstanceResponse.FromString,
)
self.CompleteJob = channel.unary_unary(
'/gateway_protocol.Gateway/CompleteJob',
request_serializer=zeebe__grpc_dot_gateway__pb2.CompleteJobRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.CompleteJobResponse.FromString,
)
self.CreateProcessInstance = channel.unary_unary(
'/gateway_protocol.Gateway/CreateProcessInstance',
request_serializer=zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceResponse.FromString,
)
self.CreateProcessInstanceWithResult = channel.unary_unary(
'/gateway_protocol.Gateway/CreateProcessInstanceWithResult',
request_serializer=zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceWithResultRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceWithResultResponse.FromString,
)
self.DeployProcess = channel.unary_unary(
'/gateway_protocol.Gateway/DeployProcess',
request_serializer=zeebe__grpc_dot_gateway__pb2.DeployProcessRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.DeployProcessResponse.FromString,
)
self.DeployResource = channel.unary_unary(
'/gateway_protocol.Gateway/DeployResource',
request_serializer=zeebe__grpc_dot_gateway__pb2.DeployResourceRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.DeployResourceResponse.FromString,
)
self.FailJob = channel.unary_unary(
'/gateway_protocol.Gateway/FailJob',
request_serializer=zeebe__grpc_dot_gateway__pb2.FailJobRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.FailJobResponse.FromString,
)
self.ThrowError = channel.unary_unary(
'/gateway_protocol.Gateway/ThrowError',
request_serializer=zeebe__grpc_dot_gateway__pb2.ThrowErrorRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.ThrowErrorResponse.FromString,
)
self.PublishMessage = channel.unary_unary(
'/gateway_protocol.Gateway/PublishMessage',
request_serializer=zeebe__grpc_dot_gateway__pb2.PublishMessageRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.PublishMessageResponse.FromString,
)
self.ResolveIncident = channel.unary_unary(
'/gateway_protocol.Gateway/ResolveIncident',
request_serializer=zeebe__grpc_dot_gateway__pb2.ResolveIncidentRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.ResolveIncidentResponse.FromString,
)
self.SetVariables = channel.unary_unary(
'/gateway_protocol.Gateway/SetVariables',
request_serializer=zeebe__grpc_dot_gateway__pb2.SetVariablesRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.SetVariablesResponse.FromString,
)
self.Topology = channel.unary_unary(
'/gateway_protocol.Gateway/Topology',
request_serializer=zeebe__grpc_dot_gateway__pb2.TopologyRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.TopologyResponse.FromString,
)
self.UpdateJobRetries = channel.unary_unary(
'/gateway_protocol.Gateway/UpdateJobRetries',
request_serializer=zeebe__grpc_dot_gateway__pb2.UpdateJobRetriesRequest.SerializeToString,
response_deserializer=zeebe__grpc_dot_gateway__pb2.UpdateJobRetriesResponse.FromString,
)
class GatewayServicer(object):
"""Missing associated documentation comment in .proto file."""
def ActivateJobs(self, request, context):
"""
Iterates through all known partitions round-robin and activates up to the requested
maximum and streams them back to the client as they are activated.
Errors:
INVALID_ARGUMENT:
- type is blank (empty string, null)
- worker is blank (empty string, null)
- timeout less than 1
- maxJobsToActivate is less than 1
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CancelProcessInstance(self, request, context):
"""
Cancels a running process instance
Errors:
NOT_FOUND:
- no process instance exists with the given key
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CompleteJob(self, request, context):
"""
Completes a job with the given variables, which allows completing the associated service task.
Errors:
NOT_FOUND:
- no job exists with the given job key. Note that since jobs are removed once completed,
it could be that this job did exist at some point.
FAILED_PRECONDITION:
- the job was marked as failed. In that case, the related incident must be resolved before
the job can be activated again and completed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateProcessInstance(self, request, context):
"""
Creates and starts an instance of the specified process. The process definition to use to
create the instance can be specified either using its unique key (as returned by
DeployProcess), or using the BPMN process ID and a version. Pass -1 as the version to use the
latest deployed version. Note that only processes with none start events can be started through
this command.
Errors:
NOT_FOUND:
- no process with the given key exists (if processDefinitionKey was given)
- no process with the given process ID exists (if bpmnProcessId was given but version was -1)
- no process with the given process ID and version exists (if both bpmnProcessId and version were given)
FAILED_PRECONDITION:
- the process definition does not contain a none start event; only processes with none
start event can be started manually.
INVALID_ARGUMENT:
- the given variables argument is not a valid JSON document; it is expected to be a valid
JSON document where the root node is an object.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateProcessInstanceWithResult(self, request, context):
"""
Behaves similarly to `rpc CreateProcessInstance`, except that a successful response is received when the process completes successfully.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeployProcess(self, request, context):
"""
Deploys one or more processes to Zeebe. Note that this is an atomic call,
i.e. either all processes are deployed, or none of them are.
Errors:
INVALID_ARGUMENT:
- no resources given.
- if at least one resource is invalid. A resource is considered invalid if:
- the resource data is not deserializable (e.g. detected as BPMN, but it's broken XML)
- the process is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeployResource(self, request, context):
"""
Deploys one or more resources (e.g. processes or decision models) to Zeebe.
Note that this is an atomic call, i.e. either all resources are deployed, or none of them are.
Errors:
INVALID_ARGUMENT:
- no resources given.
- if at least one resource is invalid. A resource is considered invalid if:
- the content is not deserializable (e.g. detected as BPMN, but it's broken XML)
- the content is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def FailJob(self, request, context):
"""
Marks the job as failed; if the retries argument is positive, then the job will be immediately
activatable again, and a worker could try again to process it. If it is zero or negative however,
an incident will be raised, tagged with the given errorMessage, and the job will not be
activatable until the incident is resolved.
Errors:
NOT_FOUND:
- no job was found with the given key
FAILED_PRECONDITION:
- the job was not activated
- the job is already in a failed state, i.e. ran out of retries
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ThrowError(self, request, context):
"""
Reports a business error (i.e. non-technical) that occurs while processing a job. The error is handled in the process by an error catch event. If there is no error catch event with the specified errorCode then an incident will be raised instead.
Errors:
NOT_FOUND:
- no job was found with the given key
FAILED_PRECONDITION:
- the job is not in an activated state
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PublishMessage(self, request, context):
"""
Publishes a single message. Messages are published to specific partitions computed from their
correlation keys.
Errors:
ALREADY_EXISTS:
- a message with the same ID was previously published (and is still alive)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ResolveIncident(self, request, context):
"""
Resolves a given incident. This simply marks the incident as resolved; most likely a call to
UpdateJobRetries or SetVariables will be necessary to actually resolve the
problem, following by this call.
Errors:
NOT_FOUND:
- no incident with the given key exists
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetVariables(self, request, context):
"""
Updates all the variables of a particular scope (e.g. process instance, flow element instance)
from the given JSON document.
Errors:
NOT_FOUND:
- no element with the given elementInstanceKey exists
INVALID_ARGUMENT:
- the given variables document is not a valid JSON document; valid documents are expected to
be JSON documents where the root node is an object.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Topology(self, request, context):
"""
Obtains the current topology of the cluster the gateway is part of.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateJobRetries(self, request, context):
"""
Updates the number of retries a job has left. This is mostly useful for jobs that have run out of
retries, should the underlying problem be solved.
Errors:
NOT_FOUND:
- no job exists with the given key
INVALID_ARGUMENT:
- retries is not greater than 0
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_GatewayServicer_to_server(servicer, server):
rpc_method_handlers = {
'ActivateJobs': grpc.unary_stream_rpc_method_handler(
servicer.ActivateJobs,
request_deserializer=zeebe__grpc_dot_gateway__pb2.ActivateJobsRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.ActivateJobsResponse.SerializeToString,
),
'CancelProcessInstance': grpc.unary_unary_rpc_method_handler(
servicer.CancelProcessInstance,
request_deserializer=zeebe__grpc_dot_gateway__pb2.CancelProcessInstanceRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.CancelProcessInstanceResponse.SerializeToString,
),
'CompleteJob': grpc.unary_unary_rpc_method_handler(
servicer.CompleteJob,
request_deserializer=zeebe__grpc_dot_gateway__pb2.CompleteJobRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.CompleteJobResponse.SerializeToString,
),
'CreateProcessInstance': grpc.unary_unary_rpc_method_handler(
servicer.CreateProcessInstance,
request_deserializer=zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceResponse.SerializeToString,
),
'CreateProcessInstanceWithResult': grpc.unary_unary_rpc_method_handler(
servicer.CreateProcessInstanceWithResult,
request_deserializer=zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceWithResultRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceWithResultResponse.SerializeToString,
),
'DeployProcess': grpc.unary_unary_rpc_method_handler(
servicer.DeployProcess,
request_deserializer=zeebe__grpc_dot_gateway__pb2.DeployProcessRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.DeployProcessResponse.SerializeToString,
),
'DeployResource': grpc.unary_unary_rpc_method_handler(
servicer.DeployResource,
request_deserializer=zeebe__grpc_dot_gateway__pb2.DeployResourceRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.DeployResourceResponse.SerializeToString,
),
'FailJob': grpc.unary_unary_rpc_method_handler(
servicer.FailJob,
request_deserializer=zeebe__grpc_dot_gateway__pb2.FailJobRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.FailJobResponse.SerializeToString,
),
'ThrowError': grpc.unary_unary_rpc_method_handler(
servicer.ThrowError,
request_deserializer=zeebe__grpc_dot_gateway__pb2.ThrowErrorRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.ThrowErrorResponse.SerializeToString,
),
'PublishMessage': grpc.unary_unary_rpc_method_handler(
servicer.PublishMessage,
request_deserializer=zeebe__grpc_dot_gateway__pb2.PublishMessageRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.PublishMessageResponse.SerializeToString,
),
'ResolveIncident': grpc.unary_unary_rpc_method_handler(
servicer.ResolveIncident,
request_deserializer=zeebe__grpc_dot_gateway__pb2.ResolveIncidentRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.ResolveIncidentResponse.SerializeToString,
),
'SetVariables': grpc.unary_unary_rpc_method_handler(
servicer.SetVariables,
request_deserializer=zeebe__grpc_dot_gateway__pb2.SetVariablesRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.SetVariablesResponse.SerializeToString,
),
'Topology': grpc.unary_unary_rpc_method_handler(
servicer.Topology,
request_deserializer=zeebe__grpc_dot_gateway__pb2.TopologyRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.TopologyResponse.SerializeToString,
),
'UpdateJobRetries': grpc.unary_unary_rpc_method_handler(
servicer.UpdateJobRetries,
request_deserializer=zeebe__grpc_dot_gateway__pb2.UpdateJobRetriesRequest.FromString,
response_serializer=zeebe__grpc_dot_gateway__pb2.UpdateJobRetriesResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'gateway_protocol.Gateway', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Gateway(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def ActivateJobs(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/gateway_protocol.Gateway/ActivateJobs',
zeebe__grpc_dot_gateway__pb2.ActivateJobsRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.ActivateJobsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CancelProcessInstance(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/CancelProcessInstance',
zeebe__grpc_dot_gateway__pb2.CancelProcessInstanceRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.CancelProcessInstanceResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CompleteJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/CompleteJob',
zeebe__grpc_dot_gateway__pb2.CompleteJobRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.CompleteJobResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateProcessInstance(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/CreateProcessInstance',
zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateProcessInstanceWithResult(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/CreateProcessInstanceWithResult',
zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceWithResultRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.CreateProcessInstanceWithResultResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeployProcess(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/DeployProcess',
zeebe__grpc_dot_gateway__pb2.DeployProcessRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.DeployProcessResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeployResource(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/DeployResource',
zeebe__grpc_dot_gateway__pb2.DeployResourceRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.DeployResourceResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def FailJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/FailJob',
zeebe__grpc_dot_gateway__pb2.FailJobRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.FailJobResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ThrowError(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/ThrowError',
zeebe__grpc_dot_gateway__pb2.ThrowErrorRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.ThrowErrorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def PublishMessage(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/PublishMessage',
zeebe__grpc_dot_gateway__pb2.PublishMessageRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.PublishMessageResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ResolveIncident(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/ResolveIncident',
zeebe__grpc_dot_gateway__pb2.ResolveIncidentRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.ResolveIncidentResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetVariables(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/SetVariables',
zeebe__grpc_dot_gateway__pb2.SetVariablesRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.SetVariablesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Topology(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/Topology',
zeebe__grpc_dot_gateway__pb2.TopologyRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.TopologyResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateJobRetries(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/gateway_protocol.Gateway/UpdateJobRetries',
zeebe__grpc_dot_gateway__pb2.UpdateJobRetriesRequest.SerializeToString,
zeebe__grpc_dot_gateway__pb2.UpdateJobRetriesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | zeebe-grpc | /zeebe_grpc-8.0.1-py3-none-any.whl/zeebe_grpc/gateway_pb2_grpc.py | gateway_pb2_grpc.py |
# Zeebe Worker
A simple Zeebe worker wrapper to let developers focus on what matters.
## Install
`pip install zeebe-worker` or use your preferred package manager.
See https://pypi.org/project/zeebe-worker/#history for available versions.
## Usage
```python
from zeebe_worker import ZeebeWorker
from extensions import zeebe_stub
from config import worker_name
class MyWorker(ZeebeWorker):
def my_task_type_handler(self, job):
"""Handling my_task_type
"""
variables = json.loads(job.variables)
if something_fails:
# This will trigger a FailJobRequest with the exception
raise Exception
return variables
def another_task_type_handler(self, job):
"""Handles another task
"""
# This will always succeed as an exception will never be raised
pass
# Create your own class instance with your own configuration
my_worker = MyWorker(zeebe_stub, worker_name)
# Subscribe to a task type (uses threading.Thread for concurrency)
my_worker.subscribe('my_task_type', 'my_task_type_handler')
my_worker.subscribe('my-task-typo', 'my_task_type_handler')
my_worker.subscribe('another_task_type', 'another_task_type_handler')
```
## API
### `ZeebeWorker.__init__`
Initiates the worker class with the set defaults.
| arg | desc | default |
| --- | ---- | ------- |
| stub | The grpc stub to connect to Zeebe with | - |
| worker_name | The worker_name to send to along to Zeebe (mainly for debugging purposes) | - |
| timeout | Number of milliseconds for a job to timeout | 5\*60\*1000 (5 minutes) |
| request_timeout | Long polling: number of milliseconds for an ActivateJobs request to timeout | 1\*60\*1000 (1 minute) |
| max_jobs_to_activate | Maximum amount of jobs to activate in one request | 1 |
| backoff_interval | Number of milliseconds to backoff when unable to reach Zeebe | 5\*1000 (5 seconds) |
### `ZeebeWorker.subscribe`
Subscribes the target to the task type concurrently.
| arg | desc | default |
| --- | ---- | ------- |
| task_type | The task or job type to subscribe to | - |
| target | The function to execute. When using a string, it will convert that to the method within the current class | - |
| timeout | Number of milliseconds for the jobs which are activated to timeout | set at class instantiation |
| request_timeout | Number of milliseconds for the ActivateJobs request to timeout | set at class instantiation |
| max_jobs_to_activate | Maximum amount of jobs to activate in one request | set at class instantiation |
| autocomplete | Complete jobs when the handler returns a non-error | False |
| backoff_interval | Number of milliseconds to backoff when unable to reach Zeebe | set at class instantiation |
## Target functions
Your own target function must accept one argument, preferably called `job`. This will be provided
as Zeebe's `ActivatedJob` ([ref](https://docs.zeebe.io/reference/grpc.html#output-activatejobsresponse)).
Extract the variables using `variables = json.loads(job.variables)`.
### Fail a job
Raising **any exception** in the function will send a FailJobRequest to zeebe with the raised exception.
### Complete a job
A CompleteJobRequest will be sent for the job if the function excecutes without raising an exception.
#### Setting variables
When the function returns a dict, it will send this dict as variables with the CompleteJobRequest.
## Compatability
| Zeebe Worker | Zeebe |
| --- | --- |
| 0.2.x | >= 0.23 |
| 0.1.0 | 0.22.1 |
| zeebe-worker | /zeebe_worker-0.2.4.tar.gz/zeebe_worker-0.2.4/README.md | README.md |
from threading import Thread
import json
import time
import logging
import traceback
import grpc
from zeebe_grpc import gateway_pb2 as zeebe
logger = logging.getLogger(__name__)
class ZeebeWorker:
def __init__(self, stub, worker_name, timeout=5 * 60 * 1000, request_timeout=1 * 60 * 1000,
max_jobs_to_activate=1, backoff_interval=5 * 1000):
"""Initiate a worker class with a stub, worker_name and some defaults
"""
self.stub = stub
self.worker_name = worker_name
self.timeout = timeout
self.request_timeout = request_timeout
self.max_jobs_to_activate = max_jobs_to_activate
self.backoff_interval = backoff_interval
self.threads = {}
def subscribe(self, task_type, target, timeout=None, request_timeout=None,
max_jobs_to_activate=None, autocomplete=False, backoff_interval=None):
"""Subscribe to a task_type in a separate thread.
Sets defaults based on class instance defaults.
Runs _subscribe in a thread using the callable target or converts the target string to a
method of self.
"""
timeout = timeout or self.timeout
request_timeout = request_timeout or self.request_timeout
max_jobs_to_activate = max_jobs_to_activate or self.max_jobs_to_activate
backoff_interval = backoff_interval or self.backoff_interval
if not callable(target):
target = getattr(self, target)
thread = Thread(target=self._subscribe,
args=[task_type, target, autocomplete],
kwargs={
'timeout': timeout,
'request_timeout': request_timeout,
'max_jobs_to_activate': max_jobs_to_activate,
'backoff_interval': backoff_interval})
thread.start()
self.threads[task_type] = thread
def _subscribe(self, task_type, target, autocomplete, timeout, request_timeout,
max_jobs_to_activate, backoff_interval):
"""Handle communication with Zeebe
It sends an ActivateJobsRequest with given params.
When a job is received, it handles the job with target func.
It catches any unhandled exception (through BaseException) and sends a FailJobRequest with
the exception when this happens.
"""
while True:
logger.debug(f'Polling for {task_type}')
try:
req = zeebe.ActivateJobsRequest(
type=task_type,
worker=self.worker_name,
timeout=timeout,
requestTimeout=request_timeout,
maxJobsToActivate=max_jobs_to_activate)
# ActivateJobsResponse returns as a stream, therefore a loop is used
for resp in self.stub.ActivateJobs(req):
for job in resp.jobs:
logger.info(f'Handling job: {job.key} in instance:\
{job.workflowInstanceKey}')
try:
resp_variables = target(job)
if not isinstance(resp_variables, dict):
resp_variables = {}
if autocomplete:
complete_job_req = zeebe.CompleteJobRequest(
jobKey=job.key, variables=json.dumps(resp_variables))
self.stub.CompleteJob(complete_job_req)
logger.info(f'Job handled: {job.key} in instance:\
{job.workflowInstanceKey}')
# Catches every exception (https://docs.python.org/3.6/library/exceptions.
# html#exception-hierarchy)
except BaseException as e:
logger.exception(e)
fail_job_req = zeebe.FailJobRequest(
jobKey=job.key, errorMessage=traceback.format_exc())
self.stub.FailJob(fail_job_req)
logger.info(f'Job failed: {job.key} in instance:\
{job.workflowInstanceKey}')
except grpc.RpcError as e:
# All gRPC errors are caught, some need a backoff, some don't.
# gRPC Statuscode dcos: https://github.com/grpc/grpc/blob/master/doc/statuscodes.md
# Zeebe error docs: https://docs.zeebe.io/reference/grpc.html#error-handling
# Handling errors with a backoff
if e.code() == grpc.StatusCode.UNAVAILABLE:
logger.error('Cannot connect to Zeebe, '
f'retrying in {backoff_interval} ms')
time.sleep(backoff_interval / 1000)
elif e.code() == grpc.StatusCode.RESOURCE_EXHAUSTED:
logger.warn('Zeebe cannot handle this amount of requests, '
f'retrying in {backoff_interval} ms')
time.sleep(backoff_interval / 1000)
elif e.code() == grpc.StatusCode.INTERNAL:
logger.warn('Zeebe had an internal error between the gateway and the broker '
f'retrying in {backoff_interval} ms')
time.sleep(backoff_interval / 1000)
# Silently handle all other errors and reconnect immediately
elif e.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
logger.debug(f'No jobs for {task_type} found')
elif e.code() == grpc.StatusCode.CANCELLED:
logger.info(f'Request for {task_type} cancelled')
logger.debug(e, exc_info=True) | zeebe-worker | /zeebe_worker-0.2.4.tar.gz/zeebe_worker-0.2.4/zeebe_worker/main.py | main.py |
import torch
import torch.nn as nn
from einops import rearrange
from util import STFT
class Residual(nn.Module):
def __init__(self, in_channels, out_channels):
super(Residual, self).__init__()
if in_channels != out_channels:
stride = 2
self.residual = nn.Sequential(
nn.Conv2d(
in_channels, out_channels, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(out_channels),
nn.ReLU())
else:
stride = 1
self.residual = nn.Sequential()
if in_channels != out_channels:
self.conv1 = nn.Conv2d(
in_channels, out_channels, kernel_size=(1, 9), stride=stride, padding=(0, 4), bias=False)
else:
self.conv1 = nn.Conv2d(
in_channels, out_channels, kernel_size=(1, 9), stride=stride, padding=(0, 4), bias=False)
self.bn1 = nn.BatchNorm2d(out_channels)
self.conv2 = nn.Conv2d(
out_channels, out_channels, kernel_size=(1, 9), stride=1, padding=(0, 4), bias=False)
self.bn2 = nn.BatchNorm2d(out_channels)
self.relu = nn.ReLU()
def forward(self, inputs):
out = self.conv1(inputs)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
res = self.residual(inputs)
out = self.relu(out + res)
return out
class TCResNet(nn.Module):
def __init__(self, bins, n_channels, n_class):
super(TCResNet, self).__init__()
"""
Args:
bin: frequency bin or feature bin
"""
self.conv = nn.Conv2d(bins, n_channels[0], kernel_size=(1, 3), padding=(0, 1), bias=False)
layers = []
for in_channels, out_channels in zip(n_channels[0:-1], n_channels[1:]):
layers.append(Residual(in_channels, out_channels))
self.layers = nn.Sequential(*layers)
self.pool = nn.AdaptiveAvgPool2d(1)
self.linear = nn.Linear(n_channels[-1], n_class)
def forward(self, inputs):
"""
Args:
input
[B, 1, H, W] ~ [B, 1, freq, time]
reshape -> [B, freq, 1, time]
"""
B, C, H, W = inputs.shape
inputs = rearrange(inputs, "b c f t -> b f c t", c=C, f=H)
out = self.conv(inputs)
out = self.layers(out)
out = self.pool(out)
out = out.view(out.shape[0], -1)
out = self.linear(out)
return out
class STFT_TCResnet(nn.Module):
def __init__(self, filter_length, hop_length, bins, channels, channel_scale, num_classes):
super(STFT_TCResnet, self).__init__()
sampling_rate = 16000
self.filter_length = filter_length
self.hop_length = hop_length
self.bins = bins
self.channels = channels
self.channel_scale = channel_scale
self.num_classes = num_classes
self.stft_layer = STFT(self.filter_length, self.hop_length)
self.tc_resnet = TCResNet(self.bins, [int(cha * self.channel_scale) for cha in self.channels], self.num_classes)
def __spectrogram__(self, real, imag):
spectrogram = torch.sqrt(real ** 2 + imag ** 2)
return spectrogram
def forward(self, waveform):
real, imag = self.stft_layer(waveform)
spectrogram = self.__spectrogram__(real, imag)
logits = self.tc_resnet(spectrogram)
return logits | zeef | /zeef-0.1.3-py3-none-any.whl/examples/speech_command/model.py | model.py |
import random
import numpy as np
import torch.optim as optim
import torch.nn.functional as F
from util.data_loader import get_dataloader_keyword, readlines
from model import STFT_TCResnet
from zeef.data import Pool
from zeef.learner.torch import Learner
from zeef.strategy import MarginConfidence
SEED = 1234
# parameters
TOTAL_NUM = 3500
NUM_INIT_LB = 1000
NUM_QUERY = 250
NUM_ROUND = 10
if __name__ == '__main__':
random.seed(SEED)
class_list = ["yes", "no", "unknown", "silence"]
# load dataset
train_filename = readlines(f"./dataset/splits/train.txt")
valid_filename = readlines(f"./dataset/splits/valid.txt")
class_encoding = {category: index for index, category in enumerate(class_list)}
train_loader, test_loader = get_dataloader_keyword('./dataset', class_list, class_encoding, 128)
# prepare the data
X_train = []
Y_train = []
X_test = []
Y_test = []
for batch_idx, (waveform, labels) in enumerate(train_loader):
for i in range(128):
X_train.append(waveform[i])
Y_train.append(labels[i])
for batch_idx, (waveform, labels) in enumerate(test_loader):
for i in range(128):
X_test.append(waveform[i])
Y_test.append(labels[i])
X_train = X_train[:TOTAL_NUM]
Y_train = Y_train[:TOTAL_NUM]
# builds the model
model = STFT_TCResnet(
filter_length=256, hop_length=129, bins=129,
channels=[16, 24, 24, 32, 32, 48, 48], channel_scale=3, num_classes=len(class_list))
optimizer = optim.SGD(model.parameters(), lr=0.01, momentum=0.9)
torch_learner = Learner(model, criterion=F.cross_entropy, optimizer=optimizer)
# generate the data pool and the sampling strategy
data_pool = Pool(X_train)
# strategy = RandomSampling(data_pool, learner=torch_learner)
# strategy = EntropySampling(data_pool, learner=torch_learner)
strategy = MarginConfidence(data_pool, learner=torch_learner)
# strategy = LeastConfidence(data_pool, learner=torch_learner)
# strategy = RatioConfidence(data_pool, learner=torch_learner)
# strategy = KMeansSampling(data_pool, learner=torch_learner)
print("labeled data: ", len(data_pool.labeled_ids))
# init the labels
data_pool.label_by_ids(range(NUM_INIT_LB), Y_train[:NUM_INIT_LB])
print("labeled data: ", len(data_pool.labeled_ids))
# round 0: pretraining
strategy.learn(batch_size=128)
predictions = strategy.infer(X_test, batch_size=128)
acc = np.zeros(NUM_ROUND + 1)
acc[0] = sum(1 for x, y in zip(Y_test, predictions) if x == y) / len(Y_test)
print('Round 0\ntesting accuracy {}'.format(acc[0]))
# start the active learning process.
for r in range(1, NUM_ROUND + 1):
print('Round {}'.format(r))
# query by given strategy.
query_ids = strategy.query(NUM_QUERY)
data_pool.label_by_ids(query_ids,
[Y_train[i] for i in query_ids]) # update the data pool with newly labeled data.
print(f"labeled: {len(data_pool.labeled_ids)}, unlabeled: {len(data_pool.get_unlabeled_ids())}")
strategy.learn(batch_size=128) # update the model.
predictions = strategy.infer(X_test, batch_size=128) # round accuracy
acc[r] = sum(1 for x, y in zip(Y_test, predictions) if x == y) / len(Y_test)
print('testing accuracy {}'.format(acc[r]))
# print results
print(acc) | zeef | /zeef-0.1.3-py3-none-any.whl/examples/speech_command/torch_al.py | torch_al.py |
import numpy as np
from scipy.signal import get_window
import torch
import torch.nn as nn
import torch.nn.functional as F
import librosa.util as librosa_util
from librosa.util import pad_center
from librosa.util import tiny
def window_sumsquare(window, n_frames, hop_length=200, win_length=800,
n_fft=800, dtype=np.float32, norm=None):
"""
# from librosa 0.6
Compute the sum-square envelope of a window function at a given hop length.
This is used to estimate modulation effects induced by windowing
observations in short-time fourier transforms.
Parameters
----------
window : string, tuple, number, callable, or list-like
Window specification, as in `get_window`
n_frames : int > 0
The number of analysis frames
hop_length : int > 0
The number of samples to advance between frames
win_length : [optional]
The length of the window function. By default, this matches `n_fft`.
n_fft : int > 0
The length of each analysis frame.
dtype : np.dtype
The data type of the output
Returns
-------
wss : np.ndarray, shape=`(n_fft + hop_length * (n_frames - 1))`
The sum-squared envelope of the window function
"""
if win_length is None:
win_length = n_fft
n = n_fft + hop_length * (n_frames - 1)
x = np.zeros(n, dtype=dtype)
# Compute the squared window at the desired length
win_sq = get_window(window, win_length, fftbins=True)
win_sq = librosa_util.normalize(win_sq, norm=norm) ** 2
win_sq = librosa_util.pad_center(win_sq, n_fft)
# Fill the envelope
for i in range(n_frames):
sample = i * hop_length
x[sample:min(n, sample + n_fft)] += win_sq[:max(0, min(n_fft, n - sample))]
return x
class STFT(torch.nn.Module):
def __init__(self, filter_length=1024, hop_length=512, win_length=None, window='hann'):
"""
This module implements an STFT using 1D convolution and 1D transpose convolutions.
This is a bit tricky so there are some cases that probably won't work as working
out the same sizes before and after in all overlap add setups is tough. Right now,
this code should work with hop lengths that are half the filter length (50% overlap
between frames).
Keyword Arguments:
filter_length {int} -- Length of filters used (default: {1024})
hop_length {int} -- Hop length of STFT (restrict to 50% overlap between frames) (default: {512})
win_length {[type]} -- Length of the window function applied to each frame (if not specified, it
equals the filter length). (default: {None})
window {str} -- Type of window to use (options are bartlett, hann, hamming, blackman, blackmanharris)
(default: {'hann'})
"""
super(STFT, self).__init__()
self.filter_length = filter_length
self.hop_length = hop_length
self.win_length = win_length if win_length else filter_length
self.window = window
self.forward_transform = None
self.pad_amount = int(self.filter_length / 2)
scale = self.filter_length / self.hop_length
fourier_basis = np.fft.fft(np.eye(self.filter_length))
cutoff = int((self.filter_length / 2 + 1))
fourier_basis = np.vstack([
np.real(fourier_basis[:cutoff, :]),
np.imag(fourier_basis[:cutoff, :])])
forward_basis = torch.FloatTensor(fourier_basis[:, None, :])
assert (filter_length >= self.win_length)
# get window and zero center pad it to filter_length
fft_window = get_window(window, self.win_length, fftbins=True)
fft_window = pad_center(fft_window, filter_length)
fft_window = torch.from_numpy(fft_window).float()
forward_basis *= fft_window # window the bases
self.register_buffer('forward_basis', forward_basis.float())
def transform(self, input_data):
"""
Take input data (audio) to STFT domain.
Arguments:
input_data {tensor}
Tensor of floats, with shape (num_batch, num_samples)
Returns:
magnitude {tensor}
Magnitude of STFT with shape (num_batch,
num_frequencies, num_frames)
phase {tensor}
Phase of STFT with shape (num_batch,
num_frequencies, num_frames)
"""
num_batches = input_data.shape[0]
num_samples = input_data.shape[-1]
self.num_samples = num_samples
# similar to librosa, reflect-pad the input
input_data = input_data.view(num_batches, 1, num_samples)
input_data = F.pad(
input_data.unsqueeze(1), (self.pad_amount, self.pad_amount, 0, 0), mode='reflect')
input_data = input_data.squeeze(1)
forward_transform = F.conv1d(input_data, self.forward_basis, stride=self.hop_length, padding=0)
cutoff = int((self.filter_length / 2) + 1)
real_part = forward_transform[:, :cutoff, :]
imag_part = forward_transform[:, cutoff:, :]
# magnitude = torch.sqrt(real_part**2 + imag_part**2)
# phase = torch.atan2(imag_part.data, real_part.data)
return real_part, imag_part
def forward(self, input_data):
"""
Take input data (audio) to STFT domain and then back to audio.
Arguments:
input_data {tensor}
Tensor of floats, with shape (num_batch, num_samples)
Returns:
reconstruction {tensor}
Reconstructed audio given magnitude and phase. Of
shape (num_batch, num_samples)
"""
# self.magnitude, self.phase = self.transform(input_data)
real_part, imag_part = self.transform(input_data)
real_part = torch.unsqueeze(real_part, dim=1)
imag_part = torch.unsqueeze(imag_part, dim=1)
return real_part, imag_part
class InverseSTFT(nn.Module):
def __init__(self, filter_length=1024, hop_length=512, win_length=None, window='hann'):
super(InverseSTFT, self).__init__()
self.filter_length = filter_length
self.hop_length = hop_length
self.win_length = win_length if win_length else filter_length
self.window = window
self.forward_transform = None
self.pad_amount = int(self.filter_length / 2)
scale = self.filter_length / self.hop_length
fourier_basis = np.fft.fft(np.eye(self.filter_length))
cutoff = int((self.filter_length / 2 + 1))
fourier_basis = np.vstack([np.real(fourier_basis[:cutoff, :]), np.imag(fourier_basis[:cutoff, :])])
inverse_basis = torch.FloatTensor(np.linalg.pinv(scale * fourier_basis).T[:, None, :])
assert (filter_length >= self.win_length)
# get window and zero center pad it to filter_length
fft_window = get_window(window, self.win_length, fftbins=True)
fft_window = pad_center(fft_window, filter_length)
fft_window = torch.from_numpy(fft_window).float()
inverse_basis *= fft_window # inverse basis
self.register_buffer('inverse_basis', inverse_basis.float())
def inverse(self, real_part, imag_part):
"""
Call the inverse STFT (iSTFT), given magnitude and phase tensors (or real and imag tensors)
Arguments:
magnitude {tensor}
Magnitude of STFT with shape (num_batch, num_frequencies, num_frames)
phase {tensor}
Phase of STFT with shape (num_batch, num_frequencies, num_frames)
Returns:
inverse_transform {tensor}
Reconstructed audio given magnitude and phase. Of shape (num_batch, num_samples)
"""
magnitude = torch.sqrt(real_part ** 2 + imag_part ** 2)
phase = torch.atan2(imag_part.data, real_part.data)
recombine_magnitude_phase = torch.cat([magnitude * torch.cos(phase), magnitude * torch.sin(phase)], dim=1)
inverse_transform = F.conv_transpose1d(recombine_magnitude_phase, self.inverse_basis, stride=self.hop_length,
padding=0)
if self.window is not None:
window_sum = window_sumsquare(
self.window, magnitude.size(-1), hop_length=self.hop_length,
win_length=self.win_length, n_fft=self.filter_length,
dtype=np.float32)
# remove modulation effects
approx_nonzero_indices = torch.from_numpy(
np.where(window_sum > tiny(window_sum))[0])
window_sum = torch.from_numpy(window_sum).to(inverse_transform.device)
inverse_transform[:, :, approx_nonzero_indices] /= window_sum[approx_nonzero_indices]
# scale by hop ratio
inverse_transform *= float(self.filter_length) / self.hop_length
inverse_transform = inverse_transform[..., self.pad_amount:]
inverse_transform = inverse_transform[..., :len(inverse_transform) - self.pad_amount]
inverse_transform = inverse_transform.squeeze(1)
return inverse_transform
def forward(self, real_part, imag_part):
inverse_transform = self.inverse(real_part, imag_part)
return inverse_transform | zeef | /zeef-0.1.3-py3-none-any.whl/examples/speech_command/util/stft.py | stft.py |
import os
import torch
import torchaudio
import torch.nn.functional as F
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
def readlines(datapath):
with open(datapath, 'r') as f:
lines = f.read().splitlines()
return lines
class SpeechCommandDataset(Dataset):
def __init__(self, datapath, filename, is_training, class_list, class_encoding):
super(SpeechCommandDataset, self).__init__()
"""
Args:
datapath: "./datapath"
filename: train_filename or valid_filename
is_training: True or False
"""
self.classes = class_list
self.sampling_rate = 16000
self.sample_length = 16000
self.datapath = datapath
self.filename = filename
self.is_training = is_training
self.class_encoding = class_encoding
self.speech_dataset = self.combined_path()
def combined_path(self):
dataset_list = []
for path in self.filename:
category, wave_name = path.split("/")
if category in self.classes and category == "_silence_":
dataset_list.append(["silence", "silence"])
elif category in self.classes:
path = os.path.join(self.datapath, category, wave_name)
dataset_list.append([path, category])
return dataset_list
def load_audio(self, speech_path):
waveform, sr = torchaudio.load(speech_path)
if waveform.shape[1] < self.sample_length:
# padding if the audio length is smaller than samping length.
waveform = F.pad(waveform, [0, self.sample_length - waveform.shape[1]])
if self.is_training == True:
pad_length = int(waveform.shape[1] * 0.1)
waveform = F.pad(waveform, [pad_length, pad_length])
offset = torch.randint(0, waveform.shape[1] - self.sample_length + 1, size=(1,)).item()
waveform = waveform.narrow(1, offset, self.sample_length)
return waveform
def one_hot(self, speech_category):
encoding = self.class_encoding[speech_category]
return encoding
def __len__(self):
return len(self.speech_dataset)
def __getitem__(self, index):
speech_path = self.speech_dataset[index][0]
speech_category = self.speech_dataset[index][1]
label = self.one_hot(speech_category)
if speech_path == "silence":
waveform = torch.zeros(1, self.sampling_rate)
else:
waveform = self.load_audio(speech_path)
return waveform, label
def get_dataloader_keyword(data_path, class_list, class_encoding, batch_size=1):
"""
CL task protocol: keyword split.
To get the GSC data and build the data loader from a list of keywords.
"""
if len(class_list) != 0:
train_filename = readlines(f"{data_path}/splits/train.txt")
valid_filename = readlines(f"{data_path}/splits/valid.txt")
train_dataset = SpeechCommandDataset(f"{data_path}/data", train_filename, True, class_list, class_encoding)
valid_dataset = SpeechCommandDataset(f"{data_path}/data", valid_filename, False, class_list, class_encoding)
train_dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True, drop_last=True)
valid_dataloader = DataLoader(valid_dataset, batch_size=batch_size, shuffle=True, drop_last=True)
return train_dataloader, valid_dataloader
else:
raise ValueError("the class list is empty!") | zeef | /zeef-0.1.3-py3-none-any.whl/examples/speech_command/util/data_loader.py | data_loader.py |
# The Zeek Cluster Management Client
[](https://github.com/zeek/zeek-client/actions/workflows/test.yml)
This is the recommended command-line client for interacting with Zeek's
[Management framework](https://docs.zeek.org/en/master/frameworks/management.html).
Built in Python and using Broker's [WebSocket pub/sub interface](https://docs.zeek.org/projects/broker/en/v2.3.0/web-socket.html), it
connects to a cluster controller to execute management tasks. Here's what it looks like:
```console
$ zeek-client --help
usage: zeek-client [-h] [-c FILE] [--controller HOST:PORT] [--set SECTION.KEY=VAL] [--quiet | --verbose]
[--version]
{deploy,deploy-config,get-config,get-id-value,get-instances,get-nodes,monitor,restart,stage-config,show-settings,test-timeout}
...
A Zeek management client
options:
-h, --help show this help message and exit
-c FILE, --configfile FILE
Path to zeek-client config file. (Default: /home/christian/inst/opt/zeek/etc/zeek-
client.cfg)
--controller HOST:PORT
Address and port of the controller, either of which may be omitted (default:
127.0.0.1:2150)
--set SECTION.KEY=VAL
Adjust a configuration setting. Can use repeatedly. See show-settings.
--quiet, -q Suppress informational output to stderr.
--verbose, -v Increase informational output to stderr. Repeat for more output (e.g. -vvv).
--version Show version number and exit.
commands:
{deploy,deploy-config,get-config,get-id-value,get-instances,get-nodes,monitor,restart,stage-config,show-settings,test-timeout}
See `zeek-client <command> -h` for per-command usage info.
deploy Deploy a staged cluster configuration.
deploy-config Upload a cluster configuration and deploy it.
get-config Retrieve staged or deployed cluster configuration.
get-id-value Show the value of a given identifier in Zeek cluster nodes.
get-instances Show instances connected to the controller.
get-nodes Show active Zeek nodes at each instance.
monitor For troubleshooting: do nothing, just report events.
restart Restart cluster nodes.
stage-config Upload a cluster configuration for later deployment.
show-settings Show zeek-client's own configuration.
test-timeout Send timeout test event.
environment variables:
ZEEK_CLIENT_CONFIG_FILE: Same as `--configfile` argument, but lower precedence.
ZEEK_CLIENT_CONFIG_SETTINGS: Same as a space-separated series of `--set` arguments, but lower precedence.
```
## Installation
The recommended way to run the client is to install it with Zeek, since the
client is part of the distribution. You may also run it directly from the
official Zeek [Docker image](https://hub.docker.com/r/zeekurity/zeek).
The WebSocket-powered `zeek-client` currently requires Zeek built from
the master branch, or via our [development Docker image](https://hub.docker.com/r/zeekurity/zeek-dev).
`zeek-client` will officially become available as a standalone package,
installable via `pip`, with Zeek 5.2.
## Quickstart
Run the following (as root) to launch an all-in-one management instance on your
system:
```console
# zeek -C -j policy/frameworks/management/controller policy/frameworks/management/agent
```
The above will stay in the foreground. In a new shell, save the following
content to a file ``cluster.cfg`` and adapt the worker's sniffing interfaces to
your system:
```ini
[manager]
role = manager
[logger]
role = logger
[worker-01]
role = worker
interface = lo
[worker-02]
role = worker
interface = eth0
```
Run the following command (as any user) to deploy the configuration:
```console
$ zeek-client deploy-config cluster.cfg
{
"errors": [],
"results": {
"id": "9befc56c-f7e8-11ec-8626-7c10c94416bb",
"nodes": {
"logger": {
"instance": "agent-testbox",
"success": true
},
"manager": {
"instance": "agent-testbox",
"success": true
},
"worker-01": {
"instance": "agent-testbox",
"success": true
},
"worker-02": {
"instance": "agent-testbox",
"success": true
}
}
}
}
```
You are now running a Zeek cluster on your system. Try ``zeek-client get-nodes``
to see more details about the cluster's current status. (In the above, "testbox"
is the system's hostname.)
## Documentation
The [Zeek documentation](https://docs.zeek.org/en/master/frameworks/management.html)
covers both the Management framework and the client's commands.
| zeek-client | /zeek-client-1.3.0.tar.gz/zeek-client-1.3.0/README.md | README.md |
import configparser
import enum
import shlex
import socket
from . import brokertypes as bt
from .utils import make_uuid
from .logs import LOG
class ConfigParserMixin():
"""Methods to create and render the object via ConfigParser instances."""
@classmethod
def from_config_parser(cls, cfp, section=None): # pylint: disable=unused-argument
"""Instantiates an object of this class based on the given
ConfigParser, and optional section name in it, as applicable.
Raises ValueError if the provided configuration is invalid for the class
to instantiate.
"""
return None # pragma: no cover
def to_config_parser(self, cfp=None): # pylint: disable=unused-argument,no-self-use
"""Returns this object in a ConfigParser instance. When the optional cfp
argument is not None, the caller requests the implementation to add to
the given parser, not create a new one.
"""
return None # pragma: no cover
@staticmethod
def _get(cfp, typ, section, *keys):
"""Typed config key/val retrieval, with support for key name aliases."""
for key in keys:
val = cfp.get(section, key, fallback=None)
if val is not None:
try:
return typ(val)
except ValueError as err:
raise ValueError('cannot convert "{}.{}" value "{}" to {}'
.format(section, key, val, typ.__name__)) from err
return None
class SerializableZeekType:
"""An interface that supports serializing to and from Broker's data model.
Objects of any class implementing this interface can be rendered to the
Python-level Broker data model in the brokertypes module, and instantiated
from it.
"""
# We are not using abc.abstractmethod and friends here because the metaclass
# magic they introduces clashes with multiple inheritance from other types,
# affecting e.g. Enums below.
def to_brokertype(self): # pylint: disable=no-self-use
"""Returns a brokertype instance representing this object."""
return None # pragma: no cover
@classmethod
def from_brokertype(cls, data): # pylint: disable=unused-argument
"""Returns an instance of this class for the given brokertype data.
data: a brokertype instance
Raises TypeError when the given data doesn't match the expected type.
"""
return None # pragma: no cover
class JsonableZeekType:
"""An interface for objects that can render themselves to JSON.
This is not to be confused with the Broker-internal JSON representation for
WebSockets. Instead, it refers to the JSON-formatted outputs zeek-client
reports to the user.
"""
def to_json_data(self):
"""Returns JSON-suitable datastructure representing the object."""
return self.__dict__ # pragma: no cover
class ZeekType(SerializableZeekType, JsonableZeekType):
"""A does-it-all Zeek type."""
class Enum(ZeekType, enum.Enum):
"""A base class for Zeek's enums, with Python's enum features.
This distinguishes the "flat" Python enums ("FOO") from the fully qualified
way they're rendered via Zeek ("Some::Module::FOO"). To enable a Python enum
to present the full qualification when sending into Broker, derivations
reimplement the module_scope() class method to prefix with a scope string.
"""
def __lt__(self, other):
if type(self) != type(other):
return NotImplemented
return self.qualified_name() < other.qualified_name()
def __eq__(self, other):
return (type(self) == type(other) and
self.qualified_name() == other.qualified_name())
def __hash__(self):
return hash((self.qualified_name(), self.value))
def to_brokertype(self):
scope = self.module_scope()
scope = scope + '::' if scope else ''
return bt.Enum(scope + self.name)
def to_json_data(self):
# A similar concern as above applies here, but the exact enum type will
# often be clear from context and so the un-scoped name alone may
# suffice.
return self.name
def qualified_name(self):
scope = self.module_scope()
scope = scope + '::' if scope else ''
return scope + self.name
@classmethod
def lookup(cls, name):
"""Robust name-based lookup of an enum value.
This removes any Zeek-land or Python-land qualifications, and
automatically upper-cases the looked-up name.
Raises KeyError if the requested enum value isn't defined.
"""
name = name.split('::')[-1]
name = name.split('.')[-1]
return cls[name.upper()]
@classmethod
def module_scope(cls): # pragma: no cover
# Reimplement this in derived classes to convey the Zeek-level enum
# scope. For example, for a Foo.BAR (or Foo::BAR, in Zeek) enum value,
# this should return the string "Foo".
assert False, 'reimplement module_scope() in your Enum derivative'
return ''
@classmethod
def from_brokertype(cls, data):
# The argument should be a brokertype.Enum a scoped value such as
# "Foo::VALUE".
try:
module, name = data.to_py().split('::', 1)
if module != cls.module_scope():
raise ValueError('module scope mismatch for {}: {} != {}.'
.format(cls.__name__, module, cls.module_scope()))
return cls.lookup(data.to_py())
except (ValueError, KeyError) as err:
raise TypeError('unexpected enum value for {}: {}'.format(
cls.__name__, repr(data))) from err
class ClusterRole(Enum):
"""Equivalent of Supervisor::ClusterRole enum in Zeek"""
NONE = 0
LOGGER = 1
MANAGER = 2
PROXY = 3
WORKER = 4
@classmethod
def module_scope(cls):
return 'Supervisor'
class ManagementRole(Enum):
"""Equivalent of Management::Role enum in Zeek"""
NONE = 0
AGENT = 1
CONTROLLER = 2
NODE = 3
@classmethod
def module_scope(cls):
return 'Management'
class State(Enum):
"""Equivalent of Management::State enum in Zeek"""
PENDING = 0
RUNNING = 1
STOPPED = 2
FAILED = 3
CRASHED = 4
UNKNOWN = 5
@classmethod
def module_scope(cls):
return 'Management'
class Option(ZeekType):
"""Equivalent of Management::Option."""
def __init__(self, name, value):
self.name = name
self.value = value
def __eq__(self, other):
return (type(self) == type(other) and
self.name == other.name and
self.value == other.value)
def __hash__(self):
return hash((self.name, self.value))
def to_brokertype(self):
return bt.Vector([
bt.String(self.name),
bt.String(self.value)
])
@classmethod
def from_brokertype(cls, data):
return Option(*data.to_py())
class Instance(ZeekType):
"""Equivalent of Management::Instance."""
def __init__(self, name, addr=None, port=None):
self.name = name
# This is a workaround until we've resolved addresses in instances
self.host = '0.0.0.0' # XXX needs proper optionality
if addr is not None:
self.host = str(addr)
self.port = port # None or integer value; we always mean TCP
def __lt__(self, other):
return self.name < other.name
def __eq__(self, other):
return (type(self) == type(other) and
self.name == other.name and
self.host == other.host and
self.port == other.port)
def __hash__(self):
return hash((self.name, self.host, self.port))
def to_brokertype(self):
return bt.Vector([
bt.String(self.name),
bt.Address(self.host),
bt.from_py(self.port, typ=bt.Port),
])
def to_json_data(self):
if self.port is not None:
return self.__dict__
# Here too, work around 0.0.0.0 until resolved
if str(self.host) != '0.0.0.0':
return { 'name': self.name, 'host': self.host }
return { 'name': self.name }
@classmethod
def from_brokertype(cls, data):
try:
name, addr, port = data.to_py()
return Instance(name, addr, None if port is None else port.number)
except ValueError as err:
raise TypeError('unexpected Broker data for Instance object ({})'
.format(data)) from err
class Node(ZeekType, ConfigParserMixin):
"""Equivalent of Management::Node."""
def __init__(self, name, instance, role, state=State.RUNNING, port=None,
scripts=None, options=None, interface=None, cpu_affinity=None,
env=None):
self.name = name
self.instance = instance
self.role = role
self.state = state
self.port = port
self.scripts = scripts
self.options = options # We use a list, Zeek record uses set
self.interface = interface
self.cpu_affinity = cpu_affinity
self.env = env or {}
def __lt__(self, other):
return self.name < other.name
def __eq__(self, other):
return (type(self) == type(other) and
self.name == other.name and
self.instance == other.instance and
self.role == other.role and
self.state == other.state and
self.port == other.port and
self.scripts == other.scripts and
self.options == other.options and
self.interface == other.interface and
self.cpu_affinity == other.cpu_affinity and
self.env == other.env)
def __hash__(self):
scripts = tuple(self.scripts) if self.scripts else None
options = tuple(self.options) if self.options else None
env = None
if self.env:
env=((key, self.env[key]) for key in sorted(self.env))
return hash((self.name, self.instance, self.role, self.state, self.port,
scripts, options, self.interface, self.cpu_affinity, env))
def to_brokertype(self):
options = bt.NoneType()
if self.options:
options = bt.Set({opt.to_brokertype() for opt in self.options})
return bt.Vector([
bt.String(self.name),
bt.String(self.instance),
self.role.to_brokertype(),
self.state.to_brokertype(),
bt.from_py(self.port, typ=bt.Port),
bt.from_py(self.scripts),
options,
bt.from_py(self.interface),
bt.from_py(self.cpu_affinity, typ=bt.Count),
bt.from_py(self.env),
])
def to_json_data(self):
return {
'name': self.name,
'instance': self.instance,
'role': self.role.to_json_data(),
# We currently omit the state field since it has no effect on
# cluster node operation.
# 'state': self.state.to_json_data(),
'port': self.port,
'scripts': self.scripts,
'options': self.options,
'interface': self.interface,
'cpu_affinity': self.cpu_affinity,
'env': self.env,
}
@classmethod
def from_brokertype(cls, data):
try:
options = None
if isinstance(data[6], bt.Set):
options = [Option.from_brokertype(opt_data) for opt_data in data[6]]
port = None
if isinstance(data[4], bt.Port):
port = data[4].number
return Node(
data[0].to_py(), # name
data[1].to_py(), # instance
ClusterRole.from_brokertype(data[2]),
State.from_brokertype(data[3]),
port,
data[5].to_py(), # scripts
options,
data[7].to_py(), # interface
data[8].to_py(), # cpu_affinity
data[9].to_py(), # env
)
except (IndexError, TypeError, ValueError) as err:
raise TypeError('unexpected Broker data for Node object ({})'.format(
data)) from err
@classmethod
def from_config_parser(cls, cfp, section=None):
def get(typ, *keys):
return cls._get(cfp, typ, section, *keys)
name = section
instance = get(str, 'instance')
role = get(str, 'role', 'type')
# We currently ignore the node state, if provided. The Node class
# defaults to 'RUNNING'.
state = State.RUNNING
if get(str, 'state'):
LOG.warning('ignoring node "%s" state "%s" in configuration',
name, get(str, 'state'))
port = get(int, 'port')
scripts = None
# The Node record type on the Zeek side features a set[Options] that we
# don't use (yet).
interface = get(str, 'interface')
cpu_affinity = get(int, 'cpu_affinity')
env = None
# Validate the specified values
if not instance:
# When a node features no instance name, default to
# "agent-<hostname>", assuming the config targets host-local
# deployment.
hostname = socket.gethostname() or 'localhost'
instance = 'agent-' + hostname
if not role:
raise ValueError('node requires a role')
try:
role = ClusterRole.lookup(role)
except (AttributeError, KeyError) as err:
raise ValueError('role "{}" is invalid'.format(role)) from err
# Optional values follow:
# Ports are optional and filled in by the controller, assuming
# Management::Controller::auto_assign_ports is enabled. But when
# present, we validate:
if port is not None and (port < 1 or port > 65535):
raise ValueError('port {} outside valid range'.format(port))
try:
# We support multiple scripts as a simple space-separated sequence
# of filenames, with possible quotation marks for strings with
# spaces. The shlex module provides a convenient way to parse these.
val = get(str, 'scripts')
if val:
scripts = sorted(shlex.split(val))
except (AttributeError, KeyError) as err:
raise ValueError('scripts value "{}" is invalid'.format(val)) from err
try:
# An environment variable dictionary is represented as a single
# config value: a space-separated sequence of <var>=<val> strings,
# possibly with quotation marks around the val. shlex helps here
# too: shlex.split('foo=bar=baz blum="foo bar baz"') yields
# ['foo=bar=baz', 'blum=foo bar baz']
val = get(str, 'env')
if val:
env = {}
for item in shlex.split(val):
key, kval = item.split('=', 1)
env[key] = kval
except (AttributeError, KeyError, ValueError) as err:
raise ValueError('env value "{}" is invalid'.format(val)) from err
# Warn about unexpected keys:
cfp_subset = cfp[section] if section else cfp
keys = set(cfp_subset.keys())
keys -= set(['instance', 'role', 'scripts', 'port', 'scripts',
'interface', 'cpu_affinity', 'env'])
if len(keys) > 0:
LOG.warning('ignoring unexpected keys: %s', ', '.join(sorted(keys)))
return Node(name=name, instance=instance, role=role, state=state,
port=port, scripts=scripts, interface=interface,
cpu_affinity=cpu_affinity, env=env)
def to_config_parser(self, cfp=None):
if cfp is None:
cfp = configparser.ConfigParser(allow_no_value=True)
if self.name in cfp.sections():
cfp.remove_section(self.name)
cfp.add_section(self.name)
cfp.set(self.name, 'instance', self.instance)
cfp.set(self.name, 'role', self.role.name)
# Skip state for the moment, it has no operational effect
# if self.state is not None:
# cfp.set(self.name, 'state', self.state.name)
if self.port is not None:
cfp.set(self.name, 'port', str(self.port))
if self.scripts:
# See if any of the script paths contain spaces, and use quotation
# marks if so. This does not escape quotation marks or deal with
# other "difficult" characters.
scripts = []
for script in sorted(self.scripts):
if len(script.split()) > 1:
script = '"' + script + '"'
scripts.append(script)
cfp.set(self.name, 'scripts', ' '.join(scripts))
if self.interface is not None:
cfp.set(self.name, 'interface', self.interface)
if self.cpu_affinity is not None:
cfp.set(self.name, 'cpu_affinity', str(self.cpu_affinity))
if self.env:
# If the value has whitespace, use key="val". As with scripts above,
# this does not deal with more complicated escaping/characters.
env = []
for key in sorted(self.env.keys()):
val = self.env[key]
if len(str(val).split()) > 1:
val = '"' + val + '"'
env.append('{}={}'.format(key, val))
cfp.set(self.name, 'env', ' '.join(env))
return cfp
class Configuration(ZeekType, ConfigParserMixin):
"""Equivalent of Management::Configuration."""
def __init__(self):
self.id = make_uuid()
# The following are sets in the Zeek record equivalents. We could
# reflect this, but handling lists is easier. They do get serialized
# to/from sets.
self.instances = []
self.nodes = []
def __eq__(self, other):
return (type(self) == type(other) and
self.id == other.id and
self.instances == other.instances and
self.nodes == other.nodes)
def __hash__(self):
return hash((self.id, tuple(self.instances), tuple(self.nodes)))
def to_brokertype(self):
return bt.Vector([
bt.String(self.id),
bt.Set({inst.to_brokertype() for inst in self.instances}),
bt.Set({node.to_brokertype() for node in self.nodes}),
])
def to_json_data(self):
return {
"id": self.id,
"instances": [inst.to_json_data() for inst in sorted(self.instances)],
"nodes": [node.to_json_data() for node in sorted(self.nodes)],
}
@classmethod
def from_brokertype(cls, data):
res = Configuration()
res.id = data[0].to_py()
for inst_data in data[1]:
res.instances.append(Instance.from_brokertype(inst_data))
for node_data in data[2]:
res.nodes.append(Node.from_brokertype(node_data))
res.instances.sort()
res.nodes.sort()
return res
@classmethod
def from_config_parser(cls, cfp, _section=None):
config = Configuration()
# The nodes in this configuration that do not specify an instance.
# This is a convenience this client offers, so let's be consistent:
# if we use this feature, the entire config must be instance-free.
instance_free_nodes = set()
for section in cfp.sections():
if section == 'instances':
# The [instances] section is special: each key in it is the name of
# an instance, each val is the host:port pair where its agent is
# listening. The val may be absent when it's an instance that
# connects to the controller.
for key, val in cfp.items('instances'):
if not val:
config.instances.append(Instance(key))
else:
hostport = val
parts = hostport.split(':', 1)
if len(parts) != 2 or not parts[0] or not parts[1]:
LOG.error('invalid spec for instance "%s": "%s" should be <host>:<port>', key, val)
return None
config.instances.append(Instance(key, parts[0].strip(), parts[1].strip()))
continue
# All keys for sections other than "instances" need to have a value.
for key, val in cfp.items(section):
if val is None:
LOG.error('config item %s.%s needs a value', section, key)
return None
# The other sections are cluster nodes. Each section name corresponds to
# a node name, with the keys being one of "type", "instance", etc.
if section in [node.name for node in config.nodes]:
LOG.warning('node "%s" defined more than once, skipping repeats"', section)
continue
try:
if 'instance' not in cfp[section]:
instance_free_nodes.add(section)
config.nodes.append(Node.from_config_parser(cfp, section))
except ValueError as err:
LOG.error('invalid node "%s" configuration: %s', section, err)
return None
# Reject if this config mixes instance-free and instance-claiming nodes,
# or if it uses an instances section while omitting instances in nodes.
if len(instance_free_nodes) > 0:
if len(instance_free_nodes) != len(config.nodes):
LOG.error('either all or no nodes must state instances')
return None
if 'instances' in cfp.sections():
LOG.error('omit instances section when skipping instances in node definitions')
return None
# When the configuration has no "instances" section, then any instance
# names given in node sections imply corresponding instances whose
# agents connect to the controller. That is, the instances section is
# just a redundant listing of the instance names and we can synthesize
# it:
if 'instances' not in cfp.sections():
names = set()
for node in config.nodes:
names.add(node.instance)
config.instances = sorted([Instance(name) for name in names])
# We don't cross-check the set of instances claimed by the nodes vs the
# set of instances declared in the config, because the controller
# already does this.
return config
def to_config_parser(self, cfp=None):
if cfp is None:
cfp = configparser.ConfigParser(allow_no_value=True)
if 'instances' in cfp.sections():
cfp.remove_section('instances')
if self.instances:
cfp.add_section('instances')
for inst in sorted(self.instances):
if inst.port is not None:
# An instance the controller connects to
cfp.set('instances', inst.name, '{}:{}'.format(inst.host, inst.port))
else:
# An instance connecting to the controller
cfp.set('instances', inst.name)
for node in sorted(self.nodes):
node.to_config_parser(cfp)
return cfp
class NodeStatus(SerializableZeekType):
"""Equivalent of Management::NodeState."""
def __init__(self, node, state, mgmt_role, cluster_role, pid=None, port=None):
self.node = node # A string containing the name of the node
self.state = state # A State enum value
self.mgmt_role = mgmt_role # A ManagementRole enum value
self.cluster_role = cluster_role # A ClusterRole enum value
self.pid = pid # A numeric process ID
self.port = port # A numeric (TCP) port
def __lt__(self, other):
return self.node < other.node
def __eq__(self, other):
return (type(self) == type(other) and
self.node == other.node and
self.state == other.state and
self.mgmt_role == other.mgmt_role and
self.cluster_role == other.cluster_role and
self.pid == other.pid and
self.port == other.port)
def __hash__(self):
return hash((self.node, self.state, self.mgmt_role, self.cluster_role,
self.pid, self.port))
def to_brokertype(self):
# In normal operation we only ever receive NodeStates, but for testing
# it helps to be able to serialize.
pid = bt.NoneType() if self.pid is None else bt.Integer(self.pid)
port = bt.NoneType() if self.port is None else bt.Port(self.port)
return bt.Vector([
bt.String(self.node),
self.state.to_brokertype(),
self.mgmt_role.to_brokertype(),
self.cluster_role.to_brokertype(),
pid,
port,
])
@classmethod
def from_brokertype(cls, data):
port = data[5].to_py()
if port is not None:
port = port.number
return NodeStatus(
data[0].to_py(),
State.from_brokertype(data[1]),
ManagementRole.from_brokertype(data[2]),
ClusterRole.from_brokertype(data[3]),
data[4].to_py(),
port)
class Result(SerializableZeekType):
"""Equivalent of Management::Result."""
def __init__(self, reqid, success=True, instance=None, data=None, error=None, node=None):
self.reqid = reqid
self.success = success
self.instance = instance
self.data = data
self.error = error
self.node = node
def __lt__(self, other):
"""Support sorting. Sort first by instance name the result comes from,
second by the node name if present.
"""
if self.instance is None and other.instance is not None:
return False
if self.instance is not None and other.instance is None:
return True
if self.instance is not None and other.instance is not None:
if self.instance < other.instance:
return True
if self.instance > other.instance:
return False
# Be more specific if we have a node name -- we can use it to sort when
# two results come from the same instance.
if self.node is not None and other.node is not None:
return self.node < other.node
return False
def __eq__(self, other):
return (type(self) == type(other) and
self.reqid == other.reqid and
self.success == other.success and
self.instance == other.instance and
self.data == other.data and
self.error == other.error and
self.node == other.node)
def hash(self):
return hash((self.reqid, self.success, self.instance, self.data,
self.error, self.node))
def to_brokertype(self):
# In normal operation we only ever receive Results, but for testing it
# helps to be able to serialize.
instance = bt.NoneType() if self.instance is None else bt.String(self.instance)
data = bt.NoneType()
if self.data is not None:
# This is any-typed in Zeek and so a bit special: it is up to the
# caller what exactly this is, an it is assumed to already be in
# Brokertype format. We just pass it through.
data = self.data
error = bt.NoneType() if self.error is None else bt.String(self.error)
node = bt.NoneType() if self.node is None else bt.String(self.node)
return bt.Vector([
bt.String(self.reqid),
bt.Boolean(self.success),
instance,
data,
error,
node,
])
@classmethod
def from_brokertype(cls, data):
# The data field gets special treatment since it can be of any
# type. When it's a brokertype.NoneType (i.e., not present), we turn it
# into None, since that simplifies its handling. Otherwise we leave it
# untouched: the correct type to deserialize into will become clear
# later from surrounding context.
res_data = data[3]
if isinstance(res_data, bt.NoneType):
res_data = None
return Result(reqid=data[0].to_py(), success=data[1].to_py(),
instance=data[2].to_py(), data=res_data,
error=data[4].to_py(), node=data[5].to_py())
class NodeOutputs(SerializableZeekType):
"""Equivalent of Management::NodeOutputs."""
def __init__(self, stdout, stderr):
self.stdout = stdout
self.stderr = stderr
def __eq__(self, other):
return (type(self) == type(other) and
self.stdout == other.stdout and
self.stderr == other.stderr)
def hash(self):
return hash((self.stdout, self.stderr))
def to_brokertype(self):
# In normal operation we only ever receive NodeOutputs, but for testing
# it helps to be able to serialize.
return bt.Vector([
bt.String(self.stdout),
bt.String(self.stderr),
])
@classmethod
def from_brokertype(cls, data):
return NodeOutputs(*(data.to_py())) | zeek-client | /zeek-client-1.3.0.tar.gz/zeek-client-1.3.0/zeekclient/types.py | types.py |
import configparser
import os
import shlex
from .consts import CONFIG_FILE
from .logs import LOG
class Config(configparser.ConfigParser):
"""zeek-client configuration settings.
A specialized ConfigParser that hardwires defaults for select values.
Three levels of overrides apply, if provided:
(1) first, the config file, if available
(2) the ZEEK_CLIENT_CONFIG_SETTINGS environment variable may contain a
series of <section.key>=<val> assignments
(3) Any --set <section.key>=<val> arguments apply final overrides
"""
def __init__(self):
super().__init__()
self.reset()
def reset(self):
self.read_dict({
'client': {
# The default timeout for request state is 15 seconds on the
# Zeek side, so by making it larger here we ensure that timeout
# events can fire & propagate in Zeek before we give up here.
'request_timeout_secs': 20,
# Successful peering requires a successful WebSocket connection
# to the controller and the successful exchange of peering
# handshake and response. We retry both, counting connection as
# well as handshake attempts toward this total:
'peering_attempts': 10,
# How long the client's Broker's endpoint should wait internally
# until it retries a peering upon connection or when the
# connection goes away. Its default is 10 seconds; we dial that
# down to be more interactive.
'peering_retry_delay_secs': 1.0,
# The way zeek-client reports informational messages on stderr
'rich_logging_format': False,
# Whether we pretty-print JSON output by default.
'pretty_json': True,
# Default output verbosity level:
# 0 permits errors
# 1 also warnings
# 2 also informational messages
# 3 also debug messages
# 4+ no additional effect
'verbosity': 0,
},
'controller': {
# Default host name/address where we contact the controller.
'host': '127.0.0.1',
# Default WebSocket port of the controller.
'port': 2149,
},
'ssl': {
# These settings control the security settings of the connection
# to the controller. They mirror Broker's approach and naming:
# by default, SSL is active, but unvalidated. Providing
# certificate, private key, and possibly CA & passphrase secure
# the connection properly. Compare to Zeek's Broker framework.
# Whether to use SSL at all. Disabling this yields plaintext
# communication. This mirrors Broker::disable_ssl on the Zeek
# side.
'disable': False,
# Path to a file containing a X.509 certificate in PEM format.
'certificate': '',
# Path to a file containing the private key for the certificate,
# in PEM format.
'keyfile': '',
# Path to a file containing concatenated, trusted certificates,
# in PEM format.
'cafile': '',
# Path to an OpenSSL-style directory of trusted certificates.
'capath': '',
# A passphrase to decrypt the private key, if required.
'passphrase': '',
}
})
def update_from_file(self, config_file=CONFIG_FILE):
self.read(config_file)
def update_from_env(self):
for item in shlex.split(os.getenv('ZEEK_CLIENT_CONFIG_SETTINGS') or ''):
try:
self.apply(item)
except ValueError:
LOG.error('config item "%s" in ZEEK_CLIENT_CONFIG_SETTINGS '
'invalid. Please use <section.key>=<val>.', item)
def update_from_args(self, args):
for item in args.set:
try:
self.apply(item)
except ValueError:
LOG.error('config item "%s" invalid. Please use '
'<section.key>=<val>.', item)
# The `--controller` argument is a shortcut for two `--set` arguments that
# set controller host and port, so update these manually:
if args.controller:
host_port = args.controller.split(':', 1)
if len(host_port) != 2 or not host_port[1]:
# It's just a hostname
self.set('controller', 'host', host_port[0])
elif not host_port[0]:
# It's just a port (as ":<port>")
self.set('controller', 'port', host_port[1])
else:
self.set('controller', 'host', host_port[0])
self.set('controller', 'port', host_port[1])
# --verbose/-v/-vvv etc set a numeric verbosity level:
if args.verbose:
self.set('client', 'verbosity', str(args.verbose))
def apply(self, item):
"""This is equivalent to set(), but works via a single <section.key>=<val> string."""
try:
identifier, val = item.split('=', 1)
section, key = identifier.split('.', 1)
if not self.has_section(section):
self.add_section(section)
self.set(section, key, val)
except ValueError as err:
raise ValueError('config item "{}" invalid'.format(item)) from err
def completer(self, **_kwargs):
"""A completer suitable for argcomplete."""
ret = []
for section in self.sections():
for key, val in self.items(section):
ret.append(section + '.' + key + '=' + val)
return sorted(ret)
CONFIG = Config() | zeek-client | /zeek-client-1.3.0.tar.gz/zeek-client-1.3.0/zeekclient/config.py | config.py |
import os.path
import ssl
from .config import CONFIG
def get_context(): # pragma: no cover
"""Returns an ssl.SSLContext for TLS-protected websocket communication.
This is a helper for communication protected with SSL, with or without
authentication. This mirrors Broker's default mode of operation:
SSL-protected, but without validation/authentication.
This may raise ssl.SSLError for SSL problems, and OSError if any cert/key
files cannot be loaded.
authenticate: if False (default), configures the context to disable
validation and permit unathenticated ciphersuites.
Returns: ssl.SSLContext.
"""
# Newer OpenSSL versions prefer PROTOCOL_TLS_CLIENT over PROTOCOL_TLS, so
# see if the former is available.
if hasattr(ssl, 'PROTOCOL_TLS_CLIENT'):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
else:
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS)
ssl_certificate = CONFIG.get('ssl', 'certificate') or None
ssl_keyfile = CONFIG.get('ssl', 'keyfile') or None
ssl_cafile = CONFIG.get('ssl', 'cafile') or None
ssl_capath = CONFIG.get('ssl', 'capath') or None
ssl_passphrase = CONFIG.get('ssl', 'passphrase') or None
if ssl_certificate and not os.path.isfile(ssl_certificate):
raise FileNotFoundError('SSL certificate file "{}" not found'
.format(ssl_certificate))
if ssl_keyfile and not os.path.isfile(ssl_keyfile):
raise FileNotFoundError('SSL private key file "{}" not found'
.format(ssl_keyfile))
if ssl_cafile and not os.path.isfile(ssl_cafile):
raise FileNotFoundError('SSL trusted CAs file "{}" not found'
.format(ssl_cafile))
if ssl_capath and not os.path.isdir(ssl_capath):
raise FileNotFoundError('SSL trusted CAs path "{}" not found'
.format(ssl_capath))
if not ssl_certificate:
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
# This mirrors the selection in Broker, and depends on the OpenSSL version:
try:
ctx.set_ciphers('AECDH-AES256-SHA@SECLEVEL=0')
except ssl.SSLError:
ctx.set_ciphers('AECDH-AES256-SHA')
else:
# This too mirrors Broker:
ctx.set_ciphers('HIGH:!aNULL:!MD5')
ctx.load_cert_chain(ssl_certificate, ssl_keyfile, ssl_passphrase)
if ssl_cafile or ssl_capath:
ctx.load_verify_locations(cafile=ssl_cafile, capath=ssl_capath)
return ctx
def get_websocket_sslopt():
"""Returns a TLS options dict for websocket-client.
The resulting dict is suitable for the websocket.WebSocket()
constructor. It's required for older websocket-client versions that don't
yet support passing an SSL context explicitly. This can go when everyone can
easily use websocket-client >= 1.2.2.
"""
ssl_certificate = CONFIG.get('ssl', 'certificate') or None
ssl_keyfile = CONFIG.get('ssl', 'keyfile') or None
ssl_cafile = CONFIG.get('ssl', 'cafile') or None
ssl_capath = CONFIG.get('ssl', 'capath') or None
ssl_passphrase = CONFIG.get('ssl', 'passphrase') or None
if ssl_certificate and not os.path.isfile(ssl_certificate):
raise FileNotFoundError('SSL certificate file "{}" not found'
.format(ssl_certificate))
if ssl_keyfile and not os.path.isfile(ssl_keyfile):
raise FileNotFoundError('SSL private key file "{}" not found'
.format(ssl_keyfile))
if ssl_cafile and not os.path.isfile(ssl_cafile):
raise FileNotFoundError('SSL trusted CAs file "{}" not found'
.format(ssl_cafile))
if ssl_capath and not os.path.isdir(ssl_capath):
raise FileNotFoundError('SSL trusted CAs path "{}" not found'
.format(ssl_capath))
# SSL options as understood by websocket-client
sslopt = {}
if not ssl_certificate:
sslopt['cert_reqs'] = ssl.CERT_NONE
if ssl.OPENSSL_VERSION_NUMBER >= 0x10100000:
sslopt['ciphers'] = 'AECDH-AES256-SHA@SECLEVEL=0'
else:
sslopt['ciphers'] = 'AECDH-AES256-SHA'
else:
sslopt['ciphers'] = 'HIGH:!aNULL:!MD5'
if ssl_certificate:
sslopt['certfile'] = ssl_certificate
if ssl_keyfile:
sslopt['keyfile'] = ssl_keyfile
if ssl_passphrase:
sslopt['password'] = ssl_passphrase
if ssl_cafile:
sslopt['ca_certs'] = ssl_cafile
if ssl_capath:
sslopt['ca_cert_path'] = ssl_capath
return sslopt | zeek-client | /zeek-client-1.3.0.tar.gz/zeek-client-1.3.0/zeekclient/ssl.py | ssl.py |
import ssl
import sys
import time
# This requires the websockets-client package, confusingly imported under the
# name "websocket", and called "python3-websocket" in the Debian/Ubuntu
# world. Not to be confused with "websockets", the alternative, async-oriented
# Python package! Please see: https://websocket-client.readthedocs.io
try:
import websocket
except ImportError:
print("The zeek-client package requires websocket-client.\n"
"If you use 'pip', you can install it as follows:\n"
"\n"
" pip3 install websocket-client\n"
"\n",
file=sys.stderr)
sys.exit(1)
from .brokertypes import (
ZeekEvent,
HandshakeMessage,
HandshakeAckMessage,
DataMessage,
ErrorMessage,
unserialize
)
from .config import CONFIG
from .consts import CONTROLLER_TOPIC
from .events import Registry
from .logs import LOG
from .ssl import get_websocket_sslopt
from .utils import make_uuid
class Error(Exception):
"""Catch-all for exceptions arising from use of Controller objects."""
class ConfigError(Error):
"""A problem occurred while configuring the WebSocket object."""
class UsageError(Error):
"""Invalid sequence of operations on a Controller object."""
class Controller:
"""A class managing a connection to the Zeek cluster controller."""
def __init__(self, controller_host=None, controller_port=None,
controller_topic=CONTROLLER_TOPIC):
"""Controller connection constructor.
This may raise ConfigError in case of trouble with the
connection settings.
"""
self.controller_host = controller_host or CONFIG.get('controller', 'host')
self.controller_port = controller_port or CONFIG.getint('controller', 'port')
self.controller_topic = controller_topic
self.controller_broker_id = None # Defined in Handshake ACK message
try:
if self.controller_port < 1 or self.controller_port > 65535:
raise ValueError('controller port number {} outside valid range'
.format(self.controller_port))
disable_ssl = CONFIG.getboolean('ssl', 'disable')
self.wsock_url = '{}://{}:{}/v1/messages/json'.format(
'ws' if disable_ssl else 'wss',
self.controller_host, self.controller_port)
sslopt = None if disable_ssl else get_websocket_sslopt()
self.wsock = websocket.WebSocket(sslopt=sslopt)
except (ValueError, OSError, ssl.SSLError) as err:
raise ConfigError('cannot configure connection to {}:{}: {}'.format(
self.controller_host, self.controller_port, err)) from err
def connect(self):
"""Connect to the configured controller.
This takes the controller coordonates from the zeek-client configuration
(or the arguments passed to the constructor, if any) and establishes a
fully peered connection. "Fully peered" here means that the object first
establishes the websocket connection, potentially wrapped in TLS as per
the TLS-specific configuration settings, and then conducts the
Broker-level handshake. The latter establishes the Controller's Broker
ID and our topic subscriptions.
Returns True if peering completes successfully, False otherwise, with
according messages written to the log.
"""
LOG.info('connecting to controller %s:%s', self.controller_host,
self.controller_port)
attempts = CONFIG.getint('client', 'peering_attempts')
retry_delay = CONFIG.getfloat('client', 'peering_retry_delay_secs')
handshake = HandshakeMessage([self.controller_topic])
# We accommodate problems during connect() and the Broker handshake,
# attempting these a total of client.peering_attempts times. That is,
# if we use 10 attempts and connect() takes 3 attempts, 7 attempts
# remain for the handshake. Since the kinds of problems that may arise
# in either stage in the (web)socket operations overlap substantially,
# we use a single function that checks them all:
def wsock_operation(op, stage):
nonlocal attempts
while attempts > 0:
try:
attempts -= 1
return op()
except websocket.WebSocketTimeoutException:
time.sleep(retry_delay)
continue
except websocket.WebSocketException as err:
LOG.error('websocket error in %s with controller %s:%s: %s',
stage, self.controller_host, self.controller_port,
err)
return False
except ConnectionRefusedError as err:
# We don't consider these fatal since they can happen
# naturally during tests and other automated setups where
# it's beneficial to keep trying. Also, this is a subclass
# of OSError, so needs to come before it:
LOG.debug('connection refused for controller %s:%s',
self.controller_host, self.controller_port)
time.sleep(retry_delay)
continue
except ssl.SSLError as err:
# Same here, likewise a subclass of OSError:
LOG.error('socket TLS error in %s with controller %s:%s: %s',
stage, self.controller_host, self.controller_port,
err)
return False
except OSError as err:
# From socket.py docs: "Errors related to socket or address
# semantics raise OSError or one of its subclasses".
LOG.error('socket error in %s with controller %s:%s: %s',
stage, self.controller_host, self.controller_port,
err)
return False
except Exception as err:
LOG.exception('unexpected error in %s with controller %s:%s: %s',
stage, self.controller_host, self.controller_port,
err)
return False
if attempts == 0:
LOG.error('websocket connection to %s:%s timed out in %s',
self.controller_host, self.controller_port, stage)
return False
def connect_op():
self.wsock.connect(self.wsock_url, timeout=retry_delay)
self.wsock.send(handshake.serialize())
return True
def handshake_op():
rawdata = self.wsock.recv()
try:
msg = HandshakeAckMessage.unserialize(rawdata)
except TypeError as err:
LOG.error('protocol data error with controller %s:%s: %s, raw data: %s',
self.controller_host, self.controller_port, err, rawdata)
return False
self.controller_broker_id = msg.endpoint
LOG.info('peered with controller %s:%s', self.controller_host,
self.controller_port)
return True
if not wsock_operation(connect_op, 'connect()'):
return False
if not wsock_operation(handshake_op, 'handshake'):
return False
return True
def publish(self, event):
"""Publishes the given event to the controller topic.
Raises UsageError when invoked without an earlier connect().
Args:
event (zeekclient.event.Event): the event to publish.
"""
if self.controller_broker_id is None:
raise UsageError('cannot publish without established peering')
msg = DataMessage(self.controller_topic, event.to_brokertype())
self.wsock.send(msg.serialize())
def receive(self, timeout_secs=None, filter_pred=None):
"""Receive an event from the controller's event subscriber.
Raises UsageError when invoked without an earlier connect().
Args:
timeout_secs (int): number of seconds before we time out.
Has sematics of the poll.poll() timeout argument, i.e.
None and negative values mean no timeout. The default
is client.request_timeout_secs.
filter_pred: a predicate function for filtering out unacceptable
events. The function takes a received event as only input,
returning True if the event is acceptable for returning to the
`receive()` caller, and False otherwise. When not provided,
any received event is acceptable. When the predicate returns
false, the wait for a suitable event continues, subject to the
same overall timeout.
Returns:
A tuple of (1) an instance of one of the Event classes defined for
the client, or None if timeout_secs passed before anything arrived,
and (2) a string indicating any occurring errors. The string is
empty when no error occurs.
"""
if self.controller_broker_id is None:
raise UsageError('cannot receive without established peering')
timeout = timeout_secs or CONFIG.getint('client', 'request_timeout_secs')
old_timeout = self.wsock.gettimeout()
try:
self.wsock.settimeout(timeout)
while True:
# Reading the event proceeds in three steps:
# (1) read data from the websocket
# (2) ensure it's a data message
# (3) try to extract data message payload as event
try:
msg = DataMessage.unserialize(self.wsock.recv())
except TypeError as err:
return None, 'protocol data error with controller {}:{}: {}'.format(
self.controller_host, self.controller_port, err)
except websocket.WebSocketTimeoutException as err:
return None, 'websocket connection to {}:{} timed out'.format(
self.controller_host, self.controller_port)
except Exception as err:
LOG.exception('unexpected error')
return None, 'unexpected error with controller {}:{}: {}'.format(
self.controller_host, self.controller_port, err)
try:
# Events are a specially laid-out vector of vectors:
# https://docs.zeek.org/projects/broker/en/current/web-socket.html#encoding-of-zeek-events
evt = ZeekEvent.from_vector(msg.data)
# Turn Broker-level event data into a zeekclient.event.Event:
res = Registry.make_event(evt.name, *evt.args)
if res is not None and (filter_pred is None or filter_pred(res)):
return res, ''
except TypeError as err:
return None, ('protocol data error with controller {}:{}: '
'invalid event data, {}'.format(
self.controller_host, self.controller_port,
repr(msg.data)))
# This wasn't the event type we wanted, try again.
finally:
self.wsock.settimeout(old_timeout)
def transact(self, request_type, response_type, *request_args, reqid=None):
"""Pairs publishing a request event with receiving its response event.
This is a wrapper around :meth:`.Controller.publish()` with subsequent
:meth:`.Controller.receive()`, with automatic provision of a request ID
in the request event, and validation of a matching request ID in the
response. Mismatching response events are ignored, and lack of a
suitable event in the timeout period leads to an empty result with
according error message, just like :meth:`.Controller.receive()`.
The function works only with request and response event types that take
a "reqid" string as first argument. The function verifies this lightly,
just by looking at the name of the first argument. See
`zeekclient.events` for suitable event types.
Raises UsageError when invoked without an earlier connect().
Args:
request_type (zeekclient.event.Event class): the request event type.
response_type (zeekclient.event.Event class): the response event type.
request_args: any event arguments in addition to the initial "reqid" string.
reqid (str): the request ID to use in the request event, and expect
in the response event. When omitted, the function produces its
own ID.
Returns:
The same as Controller.receive(): tuple of an event instance
and a string indicating any error.
"""
# Verify that the first arguments of the event types are actually a
# request ID -- we just look at the name:
if request_type.ARG_NAMES[0] != 'reqid':
return None, 'type error: event type {} does not have request ID'.format(
request_type.__name__)
if response_type.ARG_NAMES[0] != 'reqid':
return None, 'type error: event type {} does not have request ID'.format(
response_type.__name__)
if reqid is None:
reqid = make_uuid()
evt = request_type(reqid, *request_args)
self.publish(evt)
def is_response(evt):
try:
return isinstance(evt, response_type) and evt.reqid.to_py() == reqid
except AttributeError:
return False
return self.receive(filter_pred=is_response) | zeek-client | /zeek-client-1.3.0.tar.gz/zeek-client-1.3.0/zeekclient/controller.py | controller.py |
import abc
import datetime
import enum
import ipaddress
import json
import re
class Type(abc.ABC):
"""Base class for types we can instantiate from or render to Broker's JSON
data model. For details, see:
https://docs.zeek.org/projects/broker/en/current/web-socket.html
"""
def serialize(self, pretty=False):
"""Serializes the object to Broker-compatible wire data.
pretty: When True, pretty-prints the resulting JSON.
Returns: raw message data ready to transmit.
"""
indent = 4 if pretty else None
return json.dumps(self.to_broker(), indent=indent, sort_keys=True)
def __eq__(self, other):
"""The default equality method for brokertypes.
This implements member-by-member comparison based on the object's
__dict__. The types complement this by each implementing their own
__hash__() method.
"""
if type(self) != type(other):
return NotImplemented
if len(self.__dict__) != len(other.__dict__):
return False
for attr in self.__dict__:
if self.__dict__[attr] != other.__dict__[attr]:
return False
return True
def __repr__(self):
return self.serialize()
def __str__(self):
return self.serialize(pretty=True)
@classmethod
def unserialize(cls, data): # pylint: disable=unused-argument
"""Instantiates an object of this class from Broker wire data.
This assumes the message content in JSON and first unserializes it into
a Python data structure. It then calls from_broker() to instantiate an
object of this class from it.
data: raw wire WebSocket message content
Returns: the resulting brokertype object.
Raises: TypeError in case of invalid data. The exception's message
provides details.
"""
try:
obj = json.loads(data)
except json.JSONDecodeError as err:
raise TypeError('cannot parse JSON data for {}: {} -- {}'.format(
cls.__name__, err.msg, data)) from err
cls.check_broker_data(obj)
try:
# This may raise TypeError directly, which we pass on to the caller
return cls.from_broker(obj)
except (IndexError, KeyError, ValueError) as err:
raise TypeError('invalid data for {}: {}'.format(
cls.__name__, data)) from err
@abc.abstractmethod
def to_py(self): # pylint: disable=no-self-use
"""Returns a Python-"native" rendering of the object.
For most brokertypes this will be a native Python type (such as int or
str), but for some types the closest thing to a natural rendering of the
value in Python will be the object itself.
Return: a Python value
"""
return None
@abc.abstractmethod
def to_broker(self): # pylint: disable=no-self-use
"""Returns a Broker-JSON-compatible Python data structure representing
a value of this type.
"""
return None
@classmethod
@abc.abstractmethod
def check_broker_data(cls, data): # pylint: disable=unused-argument
"""Checks the Broker data for compliance with the expected type.
If you use unserialize() to obtain objects, you can ignore this
method. The module invokes it under the hood.
data: a Python data structure resulting from json.loads().
Raises TypeError in case of problems.
"""
@classmethod
@abc.abstractmethod
def from_broker(cls, data): # pylint: disable=unused-argument
"""Returns an instance of the type given Broker's JSON data.
This is a low-level method that you likely don't want to use. Consider
unserialize() instead: it handles raw wire data unserialization,
type-checking, and exception canonicalization.
data: a JSON-unserialized Python data structure.
Raises: type-specific exceptions resulting from value construction, such
as TypeError, KeyError, or ValueError.
"""
return None
# ---- Basic types -----------------------------------------------------
class DataType(Type):
"""Base class for data types known to Broker."""
def __lt__(self, other):
if not isinstance(other, DataType):
raise TypeError("'<' comparison not supported between instances "
"of '{}' and '{}'".format(type(self).__name__,
type(other).__name__))
# Supporting comparison accross data types allows us to sort the members
# of a set or table keys. We simply compare the type names:
if type(self) != type(other):
return type(self).__name__ < type(other).__name__
return NotImplemented
@classmethod
def check_broker_data(cls, data):
if not isinstance(data, dict):
raise TypeError('invalid data layout for Broker data: not an object')
if '@data-type' not in data or 'data' not in data:
raise TypeError('invalid data layout for Broker data: required keys missing')
class NoneType(DataType):
"""Broker's representation of an absent value."""
def __init__(self, _=None):
# It helps to have a constructor that can be passed None explicitly, for
# symmetry with other constructors below.
pass
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
return False
def __hash__(self):
return hash(None)
def to_py(self):
return None
def to_broker(self):
return {
'@data-type': 'none',
'data': {},
}
@classmethod
def from_broker(cls, data):
return NoneType()
class Boolean(DataType):
def __init__(self, value):
self._value = bool(value)
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
return self._value < other._value
def __hash__(self):
return hash(self._value)
def to_py(self):
return self._value
def to_broker(self):
return {
'@data-type': 'boolean',
'data': self._value,
}
@classmethod
def from_broker(cls, data):
return Boolean(data['data'])
class Count(DataType):
def __init__(self, value):
self._value = int(value)
if self._value < 0:
raise ValueError('Count can only hold non-negative values')
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
return self._value < other._value
def __hash__(self):
return hash(self._value)
def to_py(self):
return self._value
def to_broker(self):
return {
'@data-type': 'count',
'data': self._value,
}
@classmethod
def from_broker(cls, data):
return Count(data['data'])
class Integer(DataType):
def __init__(self, value):
self._value = int(value)
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
return self._value < other._value
def __hash__(self):
return hash(self._value)
def to_py(self):
return self._value
def to_broker(self):
return {
'@data-type': 'integer',
'data': self._value,
}
@classmethod
def from_broker(cls, data):
return Integer(data['data'])
class Real(DataType):
def __init__(self, value):
self._value = float(value)
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
return self._value < other._value
def __hash__(self):
return hash(self._value)
def to_py(self):
return self._value
def to_broker(self):
return {
'@data-type': 'real',
'data': self._value,
}
@classmethod
def from_broker(cls, data):
return Real(data['data'])
class Timespan(DataType):
REGEX = re.compile(r'(\d+(\.\d+)?)(ns|ms|s|min|h|d)')
class Unit(enum.Enum):
"""The time unit shorthands supported by Broker."""
NS = 'ns'
MS = 'ms'
S = 's'
MIN = 'min'
H = 'h'
D = 'd'
def __init__(self, value):
if isinstance(value, datetime.timedelta):
self._value = Timespan.timedelta_to_broker_timespan(value)
self._td = value
else:
self._value = str(value)
self._td = Timespan.broker_to_timedelta(self._value)
def __eq__(self, other):
# Make equality defined by the timedelta instances, not the
# more variable string data (e.g. 1000ms == 1s):
if type(self) != type(other):
return False
return self._td == other._td
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
return self._td < other._td
def __hash__(self):
return hash(self._value)
def to_py(self):
return self._td
def to_broker(self):
return {
'@data-type': 'timespan',
'data': Timespan.timedelta_to_broker_timespan(self._td),
}
@classmethod
def from_broker(cls, data):
return Timespan(cls.broker_to_timedelta(data['data']))
@classmethod
def broker_to_timedelta(cls, data):
"""Converts Broker-compatible timespan string into timedelta object."""
mob = cls.REGEX.fullmatch(data)
if mob is None:
raise ValueError("'{}' is not an acceptable Timespan value"
.format(data))
counter = float(mob[1])
unit = Timespan.Unit(mob[3])
if unit == Timespan.Unit.NS:
return datetime.timedelta(microseconds=counter / 1e3)
if unit == Timespan.Unit.MS:
return datetime.timedelta(milliseconds=counter)
if unit == Timespan.Unit.S:
return datetime.timedelta(seconds=counter)
if unit == Timespan.Unit.MIN:
return datetime.timedelta(minutes=counter)
if unit == Timespan.Unit.H:
return datetime.timedelta(hours=counter)
if unit == Timespan.Unit.D:
if counter % 7 == 0:
return datetime.timedelta(weeks=counter / 7)
return datetime.timedelta(days=counter)
assert False, "unhandled timespan unit '{}'".format(unit)
@classmethod
def timedelta_to_broker_timespan(cls, tdelta):
"""Converts timedelta object to Broker-compatible timespan string."""
# We use the smallest unit that's non-zero in the timespan (which has
# only three relevant members: .microseconds, .seconds, and .days)
# and map it to the closest Broker unit.
def format(val, unit):
# Don't say 10.0, say 10:
val = int(val) if float(val).is_integer() else val
return '{}{}'.format(val, unit)
if tdelta.microseconds != 0:
if tdelta.microseconds % 1000 == 0:
return format(tdelta.microseconds / 1e3
+ tdelta.seconds * 1e3
+ tdelta.days * 86400 * 1e3, 'ms')
# There are no microseconds in the Broker data model,
# so go full plaid to nanoseconds.
return format(tdelta.microseconds * 1e3
+ tdelta.seconds * 1e9
+ tdelta.days * 86400 * 1e9, 'ns')
if tdelta.seconds != 0:
if tdelta.seconds % 3600 == 0:
return format(tdelta.seconds / 3600 + tdelta.days * 24, 'h')
if tdelta.seconds % 60 == 0:
return format(tdelta.seconds / 60 + tdelta.days * 1440, 'min')
return format(tdelta.seconds + tdelta.days * 86400, 's')
return format(tdelta.days, 'd')
class Timestamp(DataType):
def __init__(self, value):
if isinstance(value, datetime.datetime):
self._value = Timestamp.to_broker_iso8601(value)
self._ts = value
else:
self._value = str(value)
# Raise value error if not formatted acceptably
self._ts = datetime.datetime.fromisoformat(self._value)
def __eq__(self, other):
# Make equality defined by the timestamp instances, not the
# more variable ISO 8601 data:
if type(self) != type(other):
return False
return self._ts == other._ts
def __lt__(self, other):
return self._ts < other._ts
def __hash__(self):
return hash(self._value)
def to_py(self):
return self._ts
def to_broker(self):
return {
'@data-type': 'timestamp',
'data': Timestamp.to_broker_iso8601(self._ts)
}
@classmethod
def from_broker(cls, data):
return Timestamp(data['data'])
@classmethod
def to_broker_iso8601(cls, dtime):
# The Broker docs say the timestamp looks like this:
# "2022-04-10T07:00:00.000" -- meaning that given Python's
# microseconds-granularity rendering we need to chop off the last
# three digits:
return dtime.isoformat(sep='T', timespec='microseconds')[:-3]
class String(DataType):
def __init__(self, value):
self._value = str(value)
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
return self._value < other._value
def __hash__(self):
return hash(self._value)
def to_py(self):
return self._value
def to_broker(self):
return {
'@data-type': 'string',
'data': self._value,
}
@classmethod
def from_broker(cls, data):
return String(data['data'])
class Enum(DataType):
def __init__(self, value):
self._value = str(value)
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
return self._value < other._value
def __hash__(self):
return hash(self._value)
def to_py(self):
return self._value
def to_broker(self):
return {
'@data-type': 'enum-value',
'data': self._value,
}
@classmethod
def from_broker(cls, data):
return Enum(data['data'])
class Address(DataType):
def __init__(self, value):
self._value = str(value) # A str or ipaddress.IPv[46]Address
# Throws a derivative of ValueError when not v4/v6 address:
self._addr = ipaddress.ip_address(self._value)
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
return self._addr < other._addr
def __hash__(self):
return hash(self._value)
def to_py(self):
return self._addr
def to_broker(self):
return {
'@data-type': 'address',
'data': self._value,
}
@classmethod
def from_broker(cls, data):
return Address(data['data'])
class Subnet(DataType):
def __init__(self, value):
self._value = str(value) # A str or ipaddress.IPv[46]Network
# Throws a derivative of ValueError when not v4/v6 network:
self._subnet = ipaddress.ip_network(self._value)
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
return self._subnet < other._subnet
def __hash__(self):
return hash(self._value)
def to_py(self):
return self._subnet
def to_broker(self):
return {
'@data-type': 'subnet',
'data': str(self._subnet),
}
@classmethod
def from_broker(cls, data):
return Subnet(data['data'])
class Port(DataType):
class Proto(enum.Enum):
UNKNOWN = '?'
TCP = 'tcp'
UDP = 'udp'
ICMP = 'icmp'
def __init__(self, number, proto=Proto.TCP):
self.number = int(number)
self.proto = proto
if not isinstance(proto, self.Proto):
raise TypeError('Port constructor requires Proto enum')
if self.number < 1 or self.number > 65535:
raise ValueError("Port number '{}' invalid".format(self.number))
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
order = ['?', 'tcp', 'udp', 'icmp']
if order.index(self.proto.value) < order.index(other.proto.value):
return True
return self.number < other.number
def __hash__(self):
return hash((self.number, self.proto))
def to_py(self):
return self
def to_broker(self):
return {
'@data-type': 'port',
'data': '{}/{}'.format(self.number, self.proto.value),
}
@classmethod
def from_broker(cls, data):
return Port(data['data'].split('/', 1)[0],
Port.Proto(data['data'].split('/', 1)[1]))
class Vector(DataType):
def __init__(self, elements=None):
self._elements = elements or []
if not isinstance(self._elements, tuple) and not isinstance(self._elements, list):
raise TypeError('Vector initialization requires tuple or list data')
if not all(isinstance(elem, Type) for elem in self._elements):
raise TypeError('Non-empty Vector construction requires brokertype values.')
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
for el1, el2 in zip(self._elements, other._elements):
if el1 < el2:
return True
if len(self._elements) < len(other._elements):
return True
return False
def __hash__(self):
return hash(tuple(self._elements))
def __iter__(self):
return iter(self._elements)
def __len__(self):
return len(self._elements)
def __getitem__(self, idx):
return self._elements[idx]
def to_py(self):
return [elem.to_py() for elem in self._elements]
def to_broker(self):
return {
'@data-type': 'vector',
'data': [elem.to_broker() for elem in self._elements],
}
@classmethod
def from_broker(cls, data):
res = Vector()
for elem in data['data']:
res._elements.append(from_broker(elem))
return res
class Set(DataType):
def __init__(self, elements=None):
self._elements = elements or set()
if not isinstance(self._elements, set):
raise TypeError('Set initialization requires set data')
if not all(isinstance(elem, Type) for elem in self._elements):
raise TypeError('Non-empty Set construction requires brokertype values.')
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
for el1, el2 in zip(sorted(self._elements), sorted(other._elements)):
if el1 < el2:
return True
if len(self._elements) < len(other._elements):
return True
return False
def __hash__(self):
return hash(tuple(sorted(self._elements)))
def __iter__(self):
return iter(self._elements)
def __len__(self):
return len(self._elements)
def __contains__(self, key):
return key in self._elements
def to_py(self):
return set(elem.to_py() for elem in self._elements)
def to_broker(self):
return {
'@data-type': 'set',
'data': [elem.to_broker() for elem in sorted(self._elements)],
}
@classmethod
def from_broker(cls, data):
res = Set()
for elem in data['data']:
res._elements.add(from_broker(elem))
return res
class Table(DataType):
def __init__(self, elements=None):
self._elements = elements or {}
if not isinstance(self._elements, dict):
raise TypeError('Table initialization requires dict data')
keys_ok = all(isinstance(elem, Type) for elem in self._elements.keys())
vals_ok = all(isinstance(elem, Type) for elem in self._elements.values())
if not keys_ok or not vals_ok:
raise TypeError('Non-empty Table construction requires brokertype values.')
def __lt__(self, other):
res = super().__lt__(other)
if res != NotImplemented:
return res
for key1, key2 in zip(sorted(self._elements), sorted(other._elements)):
if key1 < key2:
return True
if self._elements[key1] < other._elements[key2]:
return True
if len(self._elements) < len(other._elements):
return True
return False
def __hash__(self):
return hash((key, self._elements[key]) for key in sorted(self._elements))
def __iter__(self):
return iter(self._elements)
def __len__(self):
return len(self._elements)
def __contains__(self, key):
return key in self._elements
def keys(self):
return self._elements.keys()
def values(self):
return self._elements.values()
def items(self):
return self._elements.items()
def to_py(self):
res = {}
for key, val in self._elements.items():
res[key.to_py()] = val.to_py()
return res
def to_broker(self):
return {
'@data-type': 'table',
'data': [{'key': key.to_broker(), 'value': self._elements[key].to_broker()}
for key in sorted(self._elements)]
}
@classmethod
def from_broker(cls, data):
res = Table()
for elem in data['data']:
res._elements[from_broker(elem['key'])] = from_broker(elem['value'])
return res
# ---- Special types ---------------------------------------------------
class ZeekEvent(Vector):
"""Broker's event representation, as a vector of vectors.
This specialization isn't an official type in Broker's hierarchy: there's no
distinguishing @data-type for it. Zeek events are a specific interpretation
of nested vectors.
See Broker's websockets docs for an example:
https://docs.zeek.org/projects/broker/en/current/web-socket.html#encoding-of-zeek-events
"""
def __init__(self, name, *args):
super().__init__()
self.name = name.to_py() if isinstance(name, String) else str(name)
self.args = list(args) or [] # list here is to avoid tuple/list type confusion
for arg in self.args:
if not isinstance(arg, Type):
raise TypeError('ZeekEvent constructor requires brokertype arguments')
def to_broker(self):
return {
'@data-type': 'vector',
'data': [
{
"@data-type": "count",
"data": 1
},
{
"@data-type": "count",
"data": 1
},
{
"@data-type": "vector",
"data": [
String(self.name).to_broker(),
{
"@data-type": "vector",
"data": [arg.to_broker() for arg in self.args],
},
],
},
],
}
@classmethod
def from_vector(cls, vec):
"""Special case for an existing Vector instance: recast as Zeek event."""
if not isinstance(vec, Vector):
raise TypeError('cannot convert non-vector to Zeek event')
if (len(vec) < 3 or
not isinstance(vec[2], Vector) or
len(vec[2]) < 2 or
not isinstance(vec[2][0], String) or
not isinstance(vec[2][1], Vector)):
raise TypeError('invalid vector layout for Zeek event')
name = vec[2][0].to_py()
args = vec[2][1]
# TODO: Extend to handle metadata
return ZeekEvent(name, *args._elements)
@classmethod
def from_broker(cls, data):
name = data['data'][2]['data'][0]['data']
res = ZeekEvent(name)
for argdata in data['data'][2]['data'][1]['data']:
res.args.append(from_broker(argdata))
return res
# ---- Message types ---------------------------------------------------
class MessageType(Type):
"""Base class for Broker messages."""
@classmethod
def check_broker_data(cls, data):
if not isinstance(data, dict):
raise TypeError('invalid data layout for Broker {}: not an object'
.format(cls.__name__))
if 'type' not in data:
raise TypeError('invalid data layout for Broker {}: required keys missing'
.format(cls.__name__))
class HandshakeMessage(MessageType):
"""The handshake message sent by the client.
This is just a list of topics to subscribe to. Clients won't receive it.
"""
def __init__(self, topics=None):
self.topics = []
if topics:
if not isinstance(topics, tuple) and not isinstance(topics, list):
raise TypeError('HandshakeMessage construction requires a '
'topics list')
for topic in topics:
if isinstance(topic, str):
self.topics.append(topic)
continue
if isinstance(topic, String):
self.topics.append(topic.to_py())
continue
raise TypeError('topics for HandshakeMessage must be Python or '
'brokertype strings')
def to_py(self):
return self
def to_broker(self):
return self.topics
@classmethod
def check_broker_data(cls, data):
if not isinstance(data, tuple) and not isinstance(data, list):
raise TypeError('invalid data layout for HandshakeMessage: not an '
'object')
@classmethod
def from_broker(cls, data):
return HandshakeMessage(data)
class HandshakeAckMessage(MessageType):
"""The ACK message returned to the client in response to the handshake.
Clients won't need to send this.
"""
def __init__(self, endpoint, version):
self.endpoint = endpoint
self.version = version
def to_py(self):
return self
def to_broker(self):
return {
'type': 'ack',
'endpoint': self.endpoint,
'version': self.version,
}
@classmethod
def check_broker_data(cls, data):
MessageType.check_broker_data(data)
for key in ('type', 'endpoint', 'version'):
if key not in data:
raise TypeError('invalid data layout for HandshakeAckMessage: '
'required key "{}" missing'.format(key))
@classmethod
def from_broker(cls, data):
return HandshakeAckMessage(data['endpoint'], data['version'])
class DataMessage(MessageType):
def __init__(self, topic, data):
self.topic = topic
self.data = data
def to_py(self):
return self
def to_broker(self):
bdata = self.data.to_broker()
return {
'type': 'data-message',
'topic': self.topic,
'@data-type': bdata['@data-type'],
'data': bdata['data'],
}
@classmethod
def check_broker_data(cls, data):
MessageType.check_broker_data(data)
for key in ('type', 'topic', '@data-type', 'data'):
if key not in data:
raise TypeError('invalid data layout for DataMessage: '
'required key "{}" missing'.format(key))
@classmethod
def from_broker(cls, data):
return DataMessage(data['topic'], from_broker({
'@data-type': data['@data-type'],
'data': data['data']}))
class ErrorMessage(Type):
def __init__(self, code, context):
self.code = code # A string representation of a Broker error code
self.context = context
def to_py(self):
return self
def to_broker(self):
return {
'type': 'error',
'code': self.code,
'context': self.context,
}
@classmethod
def check_broker_data(cls, data):
MessageType.check_broker_data(data)
for key in ('type', 'code', 'context'):
if key not in data:
raise TypeError('invalid data layout for ErrorMessage: '
'required key "{}" missing'.format(key))
@classmethod
def from_broker(cls, data):
return ErrorMessage(data['code'], data['context'])
# ---- Factory functions -----------------------------------------------
# This maps the types expressed in Broker's JSON representation to those
# implemented in this module.
_broker_typemap = {
'none': NoneType,
'address': Address,
'boolean': Boolean,
'count': Count,
'enum-value': Enum,
'integer': Integer,
'port': Port,
'real': Real,
'set': Set,
'string': String,
'subnet': Subnet,
'table': Table,
'timespan': Timespan,
'timestamp': Timestamp,
'vector': Vector,
}
# This maps Broker's message types to ones implemented in this module. A
# separate map, because Broker expresses the type information differently from
# the above.
_broker_messagemap = {
'data-message': DataMessage,
'error': ErrorMessage,
}
def unserialize(data):
"""A factory that instantiates a brokertype value from Broker wire data.
This assumes the message content in JSON and first unserializes it into a
Python data structure. It then calls from_python() to instantiate an object
of the appropriate class from it.
"""
try:
obj = json.loads(data)
except json.JSONDecodeError as err:
raise TypeError('cannot parse JSON data: {} -- {}'.format(
err.msg, data)) from err
return from_broker(obj)
def from_broker(data):
"""A factory that turns Python-level data into brokertype instances.
Consider using unserialize() instead, it starts from raw message data, and
provides better error handling.
data: a JSON-unserialized Python data structure.
Returns: a brokerval instance
Raises: TypeError in case of invalid input data.
"""
if not isinstance(data, dict):
raise TypeError('invalid data layout for Broker data: not an object')
try:
typ = _broker_messagemap[data['type']]
typ.check_broker_data(data)
return typ.from_broker(data)
except KeyError:
pass
try:
typ = _broker_typemap[data['@data-type']]
typ.check_broker_data(data)
return typ.from_broker(data)
except KeyError as err:
raise TypeError('unrecognized Broker type: {}'.format(data)) from err
# Python types we can directly map to ones in this module, used by
# from_py(). This is imperfect since, for example, no non-negative integer type
# exists that maps to Count, but a generic factory adds convenience in many
# situations. Callers who need different mappings need to implement code that
# converts their data structures explicitly.
_python_typemap = {
type(None): NoneType,
bool: Boolean,
datetime.timedelta: Timespan,
datetime.datetime: Timestamp,
dict: Table,
float: Real,
int: Integer,
ipaddress.IPv4Address: Address,
ipaddress.IPv6Address: Address,
ipaddress.IPv4Network: Subnet,
ipaddress.IPv6Network: Subnet,
list: Vector,
set: Set,
str: String,
tuple: Vector,
}
def from_py(data, typ=None, check_none=True):
"""Instantiates a brokertype object from the given Python data.
Some Python types map naturally to Broker ones, such as bools and strs. For
those, you can simply provide a value and the function will return the
appropriate brokertype value. For some types this mapping isn't clear, and
you need to specify the type explicitly. For composite types like
sets or dicts the approach applies recursively to their member elements.
When no type match is found, or the type conversion isn't feasible, this
raises a TypeError. This can happen for types that don't have an immediate
equivalent (e.g., Python has no unsigned integers).
This function currently supports only types constructed from a single
argument.
data: a Python-"native" value, such as a str, int, or bool.
typ (Type): if provided, the function attempts to instantiate an object of
this type with the given data. By default, the function attempts type
inference.
check_none (bool): when True (the default), the function checks whether data
is None, and shortcuts to returning a NoneType instance.
Returns: a brokertype instance.
Raises: TypeError in case problems arise in the type mapping or value
construction.
"""
if data is None and check_none:
return NoneType()
if typ is not None:
if not issubclass(typ, Type):
raise TypeError('not a brokertype: {}'.format(typ.__name__))
else:
try:
typ = _python_typemap[type(data)]
except KeyError as err:
raise TypeError('cannot map Python type {} to Broker type'.format(type(data))) from err
if typ == Table:
res = Table()
for key, val in data.items():
res._elements[from_py(key)] = from_py(val)
return res
if typ == Vector:
res = Vector()
for elem in data:
res._elements.append(from_py(elem))
return res
if typ == Set:
res = Set()
for elem in data:
res._elements.add(from_py(elem))
return res
# For others the constructors of the types in this module should naturally
# work with the provided value.
return typ(data) | zeek-client | /zeek-client-1.3.0.tar.gz/zeek-client-1.3.0/zeekclient/brokertypes.py | brokertypes.py |
from .logs import LOG
from .brokertypes import (
Boolean,
Set,
String,
Type,
Vector,
ZeekEvent,
from_py,
)
from .types import (
SerializableZeekType,
)
class Event(SerializableZeekType):
NAME = None # Name of the event, e.g. "Management::Controller::API::deploy_request"
ARG_NAMES = [] # Names of the arguments, e.g. "reqid"
ARG_TYPES = [] # Types in Python, e.g. str
def __init__(self, *args):
"""Creates a Zeek event object.
This expects the number of arguments contextualized above. The event
name is not required since it's defined implicitly via the event class
receiving the arguments.
Raises:
TypeError: when the given arguments, or number of arguments, don't
match the expected ARG_TYPES or their number.
"""
if len(args) != len(self.ARG_NAMES):
raise TypeError('event argument length mismatch: have %d, expected %d'
% (len(args), len(self.ARG_NAMES)))
self.args = []
for idx, arg in enumerate(args):
# If the argument's type matches the required Broker type, we're done.
if isinstance(arg, self.ARG_TYPES[idx]):
self.args.append(arg)
continue
try:
# When creating an event it can be convenient for the caller to
# pass Python-native types. See if we can create brokertypes
# types from them, to match the types actually specified when we
# created the event classes.
maybe_arg = from_py(arg)
except TypeError as err:
raise TypeError('event argument type mismatch: argument '
'{} is {}, {}'.format(idx+1, type(arg), err)) from err
# Again: if we now have a type match, we're done.
if isinstance(maybe_arg, self.ARG_TYPES[idx]):
self.args.append(maybe_arg)
continue
raise TypeError('event argument type mismatch: argument '
'{} is {}, should be {}'.format(
idx+1, type(arg), self.ARG_TYPES[idx]))
def __getattr__(self, name):
"""Allow attribute-like access to event arguments."""
try:
idx = self.ARG_NAMES.index(name)
return self.args[idx]
except ValueError as err:
raise AttributeError('event type {} has no "{}" argument'.format(
self.NAME, name)) from err
def __str__(self):
# A list of pairs (argument name, typename)
zeek_style_args = zip(self.ARG_NAMES, [str(type(arg)) for arg in self.args])
# That list, with each item now a string "<name>: <typename"
zeek_style_arg_strings = [': '.join(arg) for arg in zeek_style_args]
# A Zeek-looking event signature
return self.NAME + '(' + ', '.join(zeek_style_arg_strings) + ')'
def to_brokertype(self):
return ZeekEvent(self.NAME, *self.args)
@classmethod
def from_brokertype(cls, data):
# Verify that data is an event
return Registry.make_event(data.name, data.args)
class Registry:
"""Functionality for event types and to instantiate typed events from data."""
# Map from Zeek-level event names to Event classes. The make_event()
# function uses this map to instantiate the right event class from
# received Broker data.
EVENT_TYPES = {}
@staticmethod
def make_event_class(name, arg_names, arg_types):
"""Factory function to generate a Zeek event class.
Given an event name, event arguments, and corresponding argument types,
the function generates a new Event class, registers it, and returns it.
"""
res = type(name, (Event,), {})
if len(arg_names) != len(arg_types):
raise TypeError('error creating event type {}: number of event '
'argument names and types must match ({}/{})'.format(
name, len(arg_names), len(arg_types)))
for idx, typ in enumerate(arg_types):
if not issubclass(typ, Type):
raise TypeError('event type creation error: argument {}, '
'"{}", is not a brokertype class'.format(
idx+1, arg_names[idx]))
res.NAME = name
res.ARG_NAMES = arg_names
res.ARG_TYPES = arg_types
# Register the new event type
Registry.EVENT_TYPES[name] = res
return res
@staticmethod
def make_event(name, *args):
"""This method allows constructing an Event instance from its name."""
if name not in Registry.EVENT_TYPES:
LOG.warning('received unexpected event "%s", skipping', name)
return None
LOG.debug('received event "%s"', name)
return Registry.EVENT_TYPES[name](*args)
# Any Zeek object/record that's an event argument gets represented as a
# tuple below, reflecting Broker's representation thereof.
DeployRequest = Registry.make_event_class(
'Management::Controller::API::deploy_request',
('reqid',), (String,))
DeployResponse = Registry.make_event_class(
'Management::Controller::API::deploy_response',
('reqid', 'results'), (String, Vector))
GetConfigurationRequest = Registry.make_event_class(
'Management::Controller::API::get_configuration_request',
('reqid', 'deployed'), (String, Boolean))
GetConfigurationResponse = Registry.make_event_class(
'Management::Controller::API::get_configuration_response',
('reqid', 'result'), (String, Vector))
GetIdValueRequest = Registry.make_event_class(
'Management::Controller::API::get_id_value_request',
('reqid', 'id', 'nodes'), (String, String, Set))
GetIdValueResponse = Registry.make_event_class(
'Management::Controller::API::get_id_value_response',
('reqid', 'results'), (String, Vector))
GetInstancesRequest = Registry.make_event_class(
'Management::Controller::API::get_instances_request',
('reqid',), (String,))
GetInstancesResponse = Registry.make_event_class(
'Management::Controller::API::get_instances_response',
('reqid', 'result'), (String, Vector))
GetNodesRequest = Registry.make_event_class(
'Management::Controller::API::get_nodes_request',
('reqid',), (String,))
GetNodesResponse = Registry.make_event_class(
'Management::Controller::API::get_nodes_response',
('reqid', 'results'), (String, Vector))
RestartRequest = Registry.make_event_class(
'Management::Controller::API::restart_request',
('reqid', 'nodes'), (String, Set))
RestartResponse = Registry.make_event_class(
'Management::Controller::API::restart_response',
('reqid', 'results'), (String, Vector))
StageConfigurationRequest = Registry.make_event_class(
'Management::Controller::API::stage_configuration_request',
('reqid', 'config'), (String, Vector))
StageConfigurationResponse = Registry.make_event_class(
'Management::Controller::API::stage_configuration_response',
('reqid', 'results'), (String, Vector))
TestNoopRequest = Registry.make_event_class(
'Management::Controller::API::test_noop_request',
('reqid',), (String,))
TestTimeoutRequest = Registry.make_event_class(
'Management::Controller::API::test_timeout_request',
('reqid', 'with_state'), (String, Boolean))
TestTimeoutResponse = Registry.make_event_class(
'Management::Controller::API::test_timeout_response',
('reqid', 'result'), (String, Vector)) | zeek-client | /zeek-client-1.3.0.tar.gz/zeek-client-1.3.0/zeekclient/events.py | events.py |
import argparse
import configparser
import ipaddress
import json
import os
import sys
import traceback
from . import brokertypes as bt
from . import controller
from .config import CONFIG
from .consts import CONFIG_FILE
from .events import (
DeployRequest,
DeployResponse,
GetConfigurationRequest,
GetConfigurationResponse,
GetIdValueRequest,
GetIdValueResponse,
GetInstancesRequest,
GetInstancesResponse,
GetNodesRequest,
GetNodesResponse,
RestartRequest,
RestartResponse,
StageConfigurationRequest,
StageConfigurationResponse,
TestTimeoutRequest,
TestTimeoutResponse
)
from .logs import LOG
from .types import (
Enum,
ClusterRole,
Configuration,
Instance,
ManagementRole,
NodeStatus,
NodeOutputs,
Result
)
# For unit-testing, a central place to adjust where reads from stdin come from
# and writes to stdout go to. Fiddling with sys.stdin/sys.stdout directly in the
# tests can be tricky.
STDIN = sys.stdin
STDOUT = sys.stdout
# Broker's basic types aren't JSON-serializable, so patch that up
# in this json.dumps() wrapper for JSON serialization of any object.
# Could go into utils.py, but it easier here to keep free of cyclic
# dependencies.
def json_dumps(obj):
def default(obj):
# Check specific Python types:
if isinstance(obj, ipaddress.IPv4Address):
return str(obj)
if isinstance(obj, ipaddress.IPv6Address):
return str(obj)
# Specific zeek-client types (types.py):
if isinstance(obj, Enum):
return obj.to_json_data()
# Specific brokertypes:
if isinstance(obj, bt.Port):
return str(obj.number)
if isinstance(obj, bt.Timespan):
return '{}{}'.format(obj.value, obj.unit.value)
# Fallback: assume the type's own Python representation is right.
# json.dumps() will complain when that does not work.
if isinstance(obj, bt.Type):
return obj.to_py()
raise TypeError('cannot serialize {} ({})'.format(type(obj), str(obj)))
indent = 2 if CONFIG.getboolean('client', 'pretty_json') else None
return json.dumps(obj, default=default, sort_keys=True, indent=indent)
def create_controller():
try:
ctl = controller.Controller()
except controller.Error as err:
LOG.error(str(err))
return None
if not ctl.connect():
return None
return ctl
def create_parser():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='A Zeek management client',
epilog='environment variables:\n\n'
' ZEEK_CLIENT_CONFIG_FILE: '
'Same as `--configfile` argument, but lower precedence.\n'
' ZEEK_CLIENT_CONFIG_SETTINGS: '
'Same as a space-separated series of `--set` arguments, but lower precedence.\n')
controller = '{}:{}'.format(CONFIG.get('controller', 'host'),
CONFIG.get('controller', 'port'))
parser.add_argument('-c', '--configfile', metavar='FILE', default=CONFIG_FILE,
help='Path to zeek-client config file. (Default: {})'.format(CONFIG_FILE))
parser.add_argument('--controller', metavar='HOST:PORT',
help='Address and port of the controller, either of '
'which may be omitted (default: {})'.format(controller))
arg = parser.add_argument('--set', metavar='SECTION.KEY=VAL', action='append', default=[],
help='Adjust a configuration setting. Can use repeatedly. '
'See show-settings.')
# This is for argcomplete users and has no effect otherwise.
arg.completer = CONFIG.completer
verbosity_group = parser.add_mutually_exclusive_group()
verbosity_group.add_argument('--quiet', '-q', action='store_true',
help='Suppress informational output to stderr.')
verbosity_group.add_argument('--verbose', '-v', action='count',
help='Increase informational output to stderr. '
'Repeat for more output (e.g. -vvv).')
parser.add_argument('--version', action='store_true',
help='Show version number and exit.')
command_parser = parser.add_subparsers(
title='commands', dest='command',
help='See `%(prog)s <command> -h` for per-command usage info.')
sub_parser = command_parser.add_parser(
'deploy', help='Deploy a staged cluster configuration.')
sub_parser.set_defaults(run_cmd=cmd_deploy)
sub_parser = command_parser.add_parser(
'deploy-config', help='Upload a cluster configuration and deploy it.')
sub_parser.set_defaults(run_cmd=cmd_deploy_config)
sub_parser.add_argument('config', metavar='FILE',
help='Cluster configuration file, "-" for stdin')
sub_parser = command_parser.add_parser(
'get-config', help='Retrieve staged or deployed cluster configuration.')
sub_parser.set_defaults(run_cmd=cmd_get_config)
sub_parser.add_argument('--filename', '-f', metavar='FILE', default='-',
help='Output file for the configuration, default stdout')
sub_parser.add_argument('--as-json', action='store_true',
help='Report in JSON instead of INI-style config file')
get_config_group = sub_parser.add_mutually_exclusive_group()
get_config_group.add_argument('--deployed', action='store_true',
dest='deployed', default=False,
help='Return deployed configuration')
get_config_group.add_argument('--staged', action='store_false', dest='deployed',
help='Return staged configuration (default)')
sub_parser = command_parser.add_parser(
'get-id-value', help='Show the value of a given identifier in Zeek cluster nodes.')
sub_parser.set_defaults(run_cmd=cmd_get_id_value)
sub_parser.add_argument('id', metavar='IDENTIFIER',
help='Name of the Zeek script identifier to retrieve.')
sub_parser.add_argument('nodes', metavar='NODES', nargs='*', default=[],
help='Name(s) of Zeek cluster nodes to query. '
'When omitted, queries all nodes.')
sub_parser = command_parser.add_parser(
'get-instances', help='Show instances connected to the controller.')
sub_parser.set_defaults(run_cmd=cmd_get_instances)
sub_parser = command_parser.add_parser(
'get-nodes', help='Show active Zeek nodes at each instance.')
sub_parser.set_defaults(run_cmd=cmd_get_nodes)
sub_parser = command_parser.add_parser(
'monitor', help='For troubleshooting: do nothing, just report events.')
sub_parser.set_defaults(run_cmd=cmd_monitor)
sub_parser = command_parser.add_parser(
'restart', help='Restart cluster nodes.')
sub_parser.set_defaults(run_cmd=cmd_restart)
sub_parser.add_argument('nodes', metavar='NODES', nargs='*', default=[],
help='Name(s) of Zeek cluster nodes to restart. '
'When omitted, restarts all nodes.')
sub_parser = command_parser.add_parser(
'stage-config', help='Upload a cluster configuration for later deployment.')
sub_parser.set_defaults(run_cmd=cmd_stage_config)
sub_parser.add_argument('config', metavar='FILE',
help='Cluster configuration file, "-" for stdin')
sub_parser = command_parser.add_parser(
'show-settings', help="Show zeek-client's own configuration.")
sub_parser.set_defaults(run_cmd=cmd_show_settings)
sub_parser = command_parser.add_parser(
'test-timeout', help='Send timeout test event.')
sub_parser.set_defaults(run_cmd=cmd_test_timeout)
sub_parser.add_argument('--with-state', action='store_true',
help='Make request stateful in the controller.')
return parser
def cmd_deploy(args, controller=None):
# The deploy-config command first stages a configuration and then calls this
# function to deploy. We re-use its controller, passed to us.
if controller is None:
controller = create_controller()
if controller is None:
return 1
resp, msg = controller.transact(DeployRequest, DeployResponse)
if resp is None:
LOG.error('no response received: %s', msg)
return 1
retval = 0
json_data = {
'results': {},
'errors': [],
}
for broker_data in resp.results:
res = Result.from_brokertype(broker_data)
if not res.success:
retval = 1
if not res.success and res.node is None and res.error:
# If a failure doesn't mention a node, it's either an agent
# reporting an internal error, or the controller reporting a
# config validation error.
json_data['errors'].append(res.error)
continue
if res.success and res.node is None and res.instance is None and res.data:
# It's success from the controller (since the instance field is
# empty): the data field contains the ID of the deployed config.
json_data['results']['id'] = res.data
continue
# At this point we only expect responses from the agents:
if res.instance is None:
LOG.warning('skipping unexpected response %s', res)
continue
if res.node is None:
# This happens when an agent handles deployment successfully, and
# had no nodes to deploy. We skip this silently.
continue
# Everything else is node-specific results from agents.
if 'nodes' not in json_data['results']:
json_data['results']['nodes'] = {}
json_data['results']['nodes'][res.node] = {
'success': res.success,
'instance': res.instance,
}
# If launching this node failed, we should have a NodeOutputs record as
# data member in the result record. ("should", because on occasion
# buffering in the node -> stem -> supervisor pipeline delays the
# output.)
if res.data:
node_outputs = NodeOutputs.from_brokertype(res.data)
json_data['results']['nodes'][res.node]['stdout'] = node_outputs.stdout
json_data['results']['nodes'][res.node]['stderr'] = node_outputs.stderr
print(json_dumps(json_data), file=STDOUT)
return retval
def cmd_get_config(args):
controller = create_controller()
if controller is None:
return 1
resp, msg = controller.transact(GetConfigurationRequest,
GetConfigurationResponse,
args.deployed)
if resp is None:
LOG.error('no response received: %s', msg)
return 1
res = Result.from_brokertype(resp.result)
if not res.success:
msg = res.error if res.error else 'no reason given'
LOG.error(msg)
return 1
if not res.data:
LOG.error('received result did not contain configuration data: %s', resp)
return 1
config = Configuration.from_brokertype(res.data)
with open(args.filename, 'w') if args.filename and args.filename != '-' else STDOUT as hdl:
if args.as_json:
hdl.write(json_dumps(config.to_json_data()) + '\n')
else:
cfp = config.to_config_parser()
cfp.write(hdl)
return 0
def cmd_get_id_value(args):
controller = create_controller()
if controller is None:
return 1
resp, msg = controller.transact(GetIdValueRequest,
GetIdValueResponse,
args.id, set(args.nodes))
if resp is None:
LOG.error('no response received: %s', msg)
return 1
json_data = {
'results': {},
'errors': [],
}
# The Result records have both instance and node filled in, so use both for
# ordering. While for the JSON serialization we can outsource the ordering
# task to Python, for our error reporting it's up to us, and we want be
# reproducible.
results = [Result.from_brokertype(broker_data) for broker_data in resp.results]
for res in sorted(results):
if not res.success:
json_data['errors'].append({
'source': res.node,
'error': res.error,
})
continue
# Upon success, we should always have res.node filled in. But guard anyway.
if res.node:
# res.data should be a string containing JSON rendered by Zeek's
# to_json() BiF. Parse it into a data structure to render
# seamlessly.
if not isinstance(res.data, bt.String):
json_data['errors'].append({
'source': res.node,
'error': 'invalid result data type {}'.format(repr(res.data))
})
continue
try:
json_data['results'][res.node] = json.loads(res.data.to_py())
except json.JSONDecodeError as err:
json_data['errors'].append({
'source': res.node,
'error': 'JSON decode error: {}'.format(err),
})
continue
json_data['errors'].append({
'error': 'result lacking node: {}'.format(res.data),
})
print(json_dumps(json_data), file=STDOUT)
return 0 if len(json_data['errors']) == 0 else 1
def cmd_get_instances(_args):
controller = create_controller()
if controller is None:
return 1
resp, msg = controller.transact(GetInstancesRequest, GetInstancesResponse)
if resp is None:
LOG.error('no response received: %s', msg)
return 1
res = Result.from_brokertype(resp.result)
if not res.success:
msg = res.error if res.error else 'no reason given'
LOG.error(msg)
return 1
if res.data is None:
LOG.error('received result did not contain instance data: %s', resp)
return 1
json_data = {}
# res.data is a (possibly empty) vector of Instances. Make the list of
# instances easier to comprehend than raw Broker data: turn it into Instance
# objects, then render these JSON-friendly.
try:
for inst in sorted([Instance.from_brokertype(inst) for inst in res.data]):
json_data[inst.name] = inst.to_json_data()
json_data[inst.name].pop('name')
except TypeError as err:
LOG.error('instance data invalid: %s', err)
print(json_dumps(json_data), file=STDOUT)
return 0
def cmd_get_nodes(_args):
controller = create_controller()
if controller is None:
return 1
resp, msg = controller.transact(GetNodesRequest, GetNodesResponse)
if resp is None:
LOG.error('no response received: %s', msg)
return 1
json_data = {
'results': {},
'errors': [],
}
results = [Result.from_brokertype(broker_data) for broker_data in resp.results]
for res in sorted(results):
if not res.success:
json_data['errors'].append({
'source': res.instance,
'error': res.error,
})
continue
if res.data is None:
json_data['errors'].append({
'source': res.instance,
'error': 'result does not contain node status data',
})
continue
json_data['results'][res.instance] = {}
# res.data is a NodeStatusVec
try:
nstats = [NodeStatus.from_brokertype(nstat_data) for nstat_data in res.data]
for nstat in sorted(nstats):
# If either of the two role enums are "NONE", we make them
# None. That way they stay in the reporting, but are more easily
# distinguished from "actual" values.
mgmt_role = nstat.mgmt_role if nstat.mgmt_role != ManagementRole.NONE else None
cluster_role = nstat.cluster_role if nstat.cluster_role != ClusterRole.NONE else None
json_data['results'][res.instance][nstat.node] = {
'state': nstat.state,
'mgmt_role': mgmt_role,
'cluster_role': cluster_role,
}
if nstat.pid is not None:
json_data['results'][res.instance][nstat.node]['pid'] = nstat.pid
if nstat.port is not None:
json_data['results'][res.instance][nstat.node]['port'] = nstat.port
except TypeError as err:
LOG.error('NodeStatus data invalid: %s', err)
LOG.debug(traceback.format_exc())
print(json_dumps(json_data), file=STDOUT)
return 0 if len(json_data['errors']) == 0 else 1
def cmd_monitor(_args):
controller = create_controller()
if controller is None:
return 1
while True:
resp, msg = controller.receive(timeout_secs=None)
if resp is None:
print('no response received: {}'.format(msg))
else:
print('received "{}"'.format(resp))
return 0
def cmd_restart(args):
controller = create_controller()
if controller is None:
return 1
resp, msg = controller.transact(RestartRequest, RestartResponse,
set(args.nodes))
if resp is None:
LOG.error('no response received: %s', msg)
return 1
json_data = {
'results': {},
'errors': [],
}
# The Result records have both instance and node filled in, so use both for
# ordering. While for the JSON serialization we can outsource the ordering
# task to Python, for our error reporting it's up to us, and we want be
# reproducible.
results = [Result.from_brokertype(broker_data) for broker_data in resp.results]
for res in sorted(results):
if not res.success and res.instance is None:
# The controller generated this one, so add to errors section.
json_data['errors'].append({
'source': res.node,
'error': res.error,
})
continue
# Upon success, we should always have a node filled in. But guard anyway.
if res.node:
json_data['results'][res.node] = res.success
continue
json_data['errors'].append({
'error': 'result lacking node: {}'.format(res),
})
print(json_dumps(json_data), file=STDOUT)
return 0 if len(json_data['errors']) == 0 else 1
def cmd_stage_config_impl(args):
"""Internals of cmd_stage_config() to enable chaining with other commands.
Returns a tuple of exit code, any JSON data to show to the user/caller, and
the created controller object, if any.
"""
if not args.config or (args.config != '-' and not os.path.isfile(args.config)):
LOG.error('please provide a cluster configuration file.')
return 1, None, None
# We use a config parser to parse the cluster configuration. For instances,
# we allow names without value to designate agents that connect to the
# controller, like this:
#
# [instances]
# foobar
#
# All other keys must have a value.
config = Configuration()
cfp = configparser.ConfigParser(allow_no_value=True)
if args.config == '-':
cfp.read_file(STDIN)
else:
cfp.read(args.config)
config = Configuration.from_config_parser(cfp)
if config is None:
LOG.error('configuration has errors, not sending')
return 1, None, None
controller = create_controller()
if controller is None:
return 1, None, None
resp, msg = controller.transact(StageConfigurationRequest,
StageConfigurationResponse,
config.to_brokertype())
if resp is None:
LOG.error('no response received: %s', msg)
return 1, None, controller
retval = 0
json_data = {
'results': {},
'errors': [],
}
for broker_data in resp.results:
res = Result.from_brokertype(broker_data)
if not res.success:
retval = 1
# Failures are config validation problems, trouble while
# auto-assigning ports, or internal controller errors.
# They should all come with error messages.
json_data['errors'].append(res.error if res.error else 'no reason given')
continue
if res.data:
json_data['results']['id'] = res.data
return retval, json_data, controller
def cmd_stage_config(args):
ret, json_data, _ = cmd_stage_config_impl(args)
if json_data:
print(json_dumps(json_data), file=STDOUT)
return ret
def cmd_deploy_config(args):
ret, json_data, controller = cmd_stage_config_impl(args)
if ret != 0:
if json_data:
print(json_dumps(json_data), file=STDOUT)
return ret
return cmd_deploy(args, controller=controller)
def cmd_show_settings(_args):
CONFIG.write(STDOUT)
return 0
def cmd_test_timeout(args):
controller = create_controller()
if controller is None:
return 1
resp, msg = controller.transact(TestTimeoutRequest, TestTimeoutResponse,
args.with_state)
if resp is None:
LOG.error('no response received: %s', msg)
return 1
res = Result.from_brokertype(resp.result)
print(json_dumps({'success': res.success, 'error': res.error}),
file=STDOUT)
return 0 | zeek-client | /zeek-client-1.3.0.tar.gz/zeek-client-1.3.0/zeekclient/cli.py | cli.py |
Zeek: Zookeeper CLI for caged animals!
======================================
The Z and K are for `Zookeeper <http://zookeeper.apache.org>`_, the E's are
just for fun.
Break free from the menagerie of configuration. Zeek is a ZooKeeper command
line application that makes it easy to see what is in all those cages. This
CLI works best in ZSH.
Turn On - (Installation)
------------------------
To install zeek:
$ pip install git+https://github.com/krockode/zeek
Tune In - (Configuration)
-------------------------
Zeek connects to localhost:2181 by default. To change this you can either set
the environment variable ZEEK_HOSTS or add the option `-H`/`--hosts` to the
zeek command. The value should be a comma separated list of zookeeper servers
to connect to e.g. host1:2181,host2:2181
Drop Out - (Usage)
------------------
The goal of zeek is to provide reasonable facimilies of the unix `find` and
`grep` commands for the Zookeeper structure, so no new learning is required.
Both find and grep return matches in the form of `<node> - <value>` where
`node` is the full path of the node and `value` is the stringified value of
that node.
``ls``
List nodes underneath the node you specified.
Example::
$ zeek ls /animals
/animals/ -
/animals/mamals -
/animals/reptiles -
``find``
Example of find which will perform a recursive find from the root.
::
$ zeek find /
/ -
/animals -
/animals/mammals -
/animals/reptiles -
/animals/reptiles/foxes - ok
/animals/reptiles/snakes - rad
/animals/reptiles/crocodilia -
/animals/reptiles/crocodilia/alligators - hungry
/animals/reptiles/crocodilia/crocodiles - hungry
Zeek find is like `find / -name ...` and searches for zookeeper nodes that
match your search::
$ zeek find '*crocodile*'
/animals/reptiles/crocodilia/crocodiles - hungry
``grep``
Zeek Grep searches zookeeper node values.
::
$ zeek grep hungry
/animals/reptiles/crocodilia/alligators - hungry
/animals/reptiles/crocodilia/crocodiles - hungry
| zeek | /zeek-0.1.5.tar.gz/zeek-0.1.5/README.rst | README.rst |
import os
from subprocess import call
import tempfile
import click
from kazoo.client import KazooClient
zk = None
def init(hosts):
global zk
zk = KazooClient(hosts=hosts)
zk.start(timeout=5)
def main():
global zk
cli(auto_envvar_prefix='ZEEK')
if zk is not None and zk.connected:
zk.close()
@click.group()
@click.option('--hosts',
'-H',
default='localhost:2181',
help="ZooKeeper connection string",
show_default=True)
def cli(hosts):
"""View your ZooKeeper data from the command line"""
init(hosts)
@cli.command()
@click.argument('path')
def ls(path):
""" List the contents of a specified path.
Arguments:
PATH the path to list the contents of."""
echo(path)
for p in children(path):
echo(p)
@cli.command()
@click.argument('path')
def find(path):
""" Find all children of a specified path.
Arguments:
PATH the path to search for children."""
echo(path)
for p in walk(path):
echo(p)
@cli.command()
@click.argument('path')
@click.option('--recursive',
'-r',
is_flag=True,
help="create parent nodes if they don't exist")
def touch(path, recursive):
""" Create the specified node.
Arguments:
PATH the node to edit."""
create_node(path, recursive)
@cli.command()
@click.argument('path')
@click.argument('value')
@click.option('--create',
'-c',
is_flag=True,
help="create parent nodes if they don't exist")
def set(path, value, create):
""" Set a specified node
Arguments:
PATH the node to edit.
VALUE the value of the node"""
create_node(path, create)
node = zk.set(path, value.encode('utf-16be'))
click.echo(node[0])
@cli.command()
@click.argument('path')
def vi(path):
""" Edit a specified node
Arguments:
PATH the node to edit."""
editor = os.environ.get('EDITOR', 'vim')
create_node(path)
with tempfile.NamedTemporaryFile(suffix=".tmp") as tmp:
if zk.exists(path):
node = zk.get(path)
tmp.write(node[0])
tmp.flush()
call([editor, tmp.name])
zk.set(path, open(tmp.name).read().strip())
@cli.command()
@click.argument('path')
def rm(path):
""" Edit a specified node
Arguments:
PATH the node to edit."""
if zk.exists(path):
zk.delete(path)
else:
click.echo('%s does not exist' % path)
def children(path):
"""Generator that yields the children of the specified path"""
global zk
for c in zk.get_children(path):
if path == '/':
yield '/%s' % c
else:
yield '%s/%s' % (path, c)
def walk(path):
"""Generator that yields the children of the given path recursively"""
for c in children(path):
yield c
for x in walk(c):
yield x
def parents(path, ascending=False):
"""Generator that yields the full path of all parents"""
if path == '/':
yield path
return
parts = path.split('/')
indexes = range(len(parts) - 1)
if not ascending:
indexes.reverse()
for i in indexes:
yield '/' + '/'.join(parts[1:i+1])
def echo(path):
"""Echos a ZooKeeper node path and value"""
click.echo('%s - %s' % (path, zk.get(path)[0]))
def create_node(path, recursive=False):
if recursive:
for parent in parents(path, ascending=True):
if not zk.exists(parent):
zk.create(parent)
if zk.exists(path):
click.echo('%s already exists' % path)
else:
zk.create(path) | zeek | /zeek-0.1.5.tar.gz/zeek-0.1.5/zeek.py | zeek.py |
==============
zeeklog2pandas
==============
.. image:: https://img.shields.io/pypi/v/zeeklog2pandas.svg
:target: https://pypi.python.org/pypi/zeeklog2pandas
.. image:: https://img.shields.io/travis/stratosphereips/zeeklog2pandas.svg
:target: https://travis-ci.com/stratosphereips/zeeklog2pandas
.. image:: https://readthedocs.org/projects/zeeklog2pandas/badge/?version=latest
:target: https://zeeklog2pandas.readthedocs.io/en/latest/?version=latest
:alt: Documentation Status
Read Zeeek/Bro log and log.gz (even broken ones) into a Pandas DataFrame.
* Free software: MIT license
* Documentation: https://zeeklog2pandas.readthedocs.io/en/latest.
Features
--------
* zeeklog2pandas allows to read Zeek/Bro .log files or compressed .log.gz files, transparently into a Pandas DataFrames.
* Best effort reading of corrupted or incomplete compressed .log.gz files.
* Columns filtering.
* Interface compatible with Pandas `read_csv()` function.
Credits
-------
This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
| zeeklog2pandas | /zeeklog2pandas-1.0.2.tar.gz/zeeklog2pandas-1.0.2/README.rst | README.rst |
.. highlight:: shell
============
Contributing
============
Contributions are welcome, and they are greatly appreciated! Every little bit
helps, and credit will always be given.
You can contribute in many ways:
Types of Contributions
----------------------
Report Bugs
~~~~~~~~~~~
Report bugs at https://github.com/stratosphereips/zeeklog2pandas/issues.
If you are reporting a bug, please include:
* Your operating system name and version.
* Any details about your local setup that might be helpful in troubleshooting.
* Detailed steps to reproduce the bug.
Fix Bugs
~~~~~~~~
Look through the GitHub issues for bugs. Anything tagged with "bug" and "help
wanted" is open to whoever wants to implement it.
Implement Features
~~~~~~~~~~~~~~~~~~
Look through the GitHub issues for features. Anything tagged with "enhancement"
and "help wanted" is open to whoever wants to implement it.
Write Documentation
~~~~~~~~~~~~~~~~~~~
zeeklog2pandas could always use more documentation, whether as part of the
official zeeklog2pandas docs, in docstrings, or even on the web in blog posts,
articles, and such.
Submit Feedback
~~~~~~~~~~~~~~~
The best way to send feedback is to file an issue at https://github.com/stratosphereips/zeeklog2pandas/issues.
If you are proposing a feature:
* Explain in detail how it would work.
* Keep the scope as narrow as possible, to make it easier to implement.
* Remember that this is a volunteer-driven project, and that contributions
are welcome :)
Get Started!
------------
Ready to contribute? Here's how to set up `zeeklog2pandas` for local development.
1. Fork the `zeeklog2pandas` repo on GitHub.
2. Clone your fork locally::
$ git clone [email protected]:your_name_here/zeeklog2pandas.git
3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
$ mkvirtualenv zeeklog2pandas
$ cd zeeklog2pandas/
$ python setup.py develop
4. Create a branch for local development::
$ git checkout -b name-of-your-bugfix-or-feature
Now you can make your changes locally.
5. When you're done making changes, check that your changes pass flake8 and the
tests, including testing other Python versions with tox::
$ flake8 zeeklog2pandas tests
$ python setup.py test or pytest
$ tox
To get flake8 and tox, just pip install them into your virtualenv.
6. Commit your changes and push your branch to GitHub::
$ git add .
$ git commit -m "Your detailed description of your changes."
$ git push origin name-of-your-bugfix-or-feature
7. Submit a pull request through the GitHub website.
Pull Request Guidelines
-----------------------
Before you submit a pull request, check that it meets these guidelines:
1. The pull request should include tests.
2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the
feature to the list in README.rst.
3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8, and for PyPy. Check
https://travis-ci.com/stratosphereips/zeeklog2pandas/pull_requests
and make sure that the tests pass for all supported Python versions.
Tips
----
To run a subset of tests::
$ python -m unittest tests.test_zeeklog2pandas
Deploying
---------
A reminder for the maintainers on how to deploy.
Make sure all your changes are committed (including an entry in HISTORY.rst).
Then run::
$ bump2version patch # possible: major / minor / patch
$ git push
$ git push --tags
Travis will then deploy to PyPI if tests pass.
| zeeklog2pandas | /zeeklog2pandas-1.0.2.tar.gz/zeeklog2pandas-1.0.2/CONTRIBUTING.rst | CONTRIBUTING.rst |
=====
Usage
=====
To use zeeklog2pandas just import the read_zeek function::
>>> from zeeklog2pandas import read_zeek
>>> df = read_zeek('ssl.log')
>>> print(df)
ts uid id.orig_h id.orig_p id.resp_h ... validation_status notary.first_seen notary.last_seen notary.times_seen notary.valid
0 2021-12-31 22:59:55.174243072 CDy3UFvdbDmFSrPW9 192.168.1.1 40344 192.168.1.2 ... - - - - -
1 2021-12-31 22:59:55.326785024 CUobJa1lv9mEKpaAY1 192.168.1.2 37676 192.168.2.1 ... - - - - -
[2 rows x 25 columns]
The **read_zeek()** interface should be 100% compatible with the pandas **read_csv()**. We are working to get this done but we are not there yet. Yet it is possible use **usecols** parameter to keep the columns you are interested in::
>>> df = read_zeek('ssl.log', usecols=['ts', 'id.orig_h', 'id.resp_h'])
>>> print(df)
ts id.orig_h id.resp_h
0 2021-12-31 22:59:55.174243072 192.168.1.1 192.168.1.2
1 2021-12-31 22:59:55.326785024 192.168.1.2 192.168.2.1
Also it is possible to read zeek logs in chunks with **chunksize**::
>>> df = read_zeek('ssl.log', chunksize=10)
| zeeklog2pandas | /zeeklog2pandas-1.0.2.tar.gz/zeeklog2pandas-1.0.2/docs/usage.rst | usage.rst |
.. highlight:: shell
============
Installation
============
Stable release
--------------
To install zeeklog2pandas, run this command in your terminal:
.. code-block:: console
$ pip install zeeklog2pandas
This is the preferred method to install zeeklog2pandas, as it will always install the most recent stable release.
If you don't have `pip`_ installed, this `Python installation guide`_ can guide
you through the process.
.. _pip: https://pip.pypa.io
.. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/
From sources
------------
The sources for zeeklog2pandas can be downloaded from the `Github repo`_.
You can either clone the public repository:
.. code-block:: console
$ git clone git://github.com/stratosphereips/zeeklog2pandas
Or download the `tarball`_:
.. code-block:: console
$ curl -OJL https://github.com/stratosphereips/zeeklog2pandas/tarball/master
Once you have a copy of the source, you can install it with:
.. code-block:: console
$ python setup.py install
.. _Github repo: https://github.com/stratosphereips/zeeklog2pandas
.. _tarball: https://github.com/stratosphereips/zeeklog2pandas/tarball/master
| zeeklog2pandas | /zeeklog2pandas-1.0.2.tar.gz/zeeklog2pandas-1.0.2/docs/installation.rst | installation.rst |
📦 setup.py (for humans)
=======================
This repo exists to provide [an example setup.py] file, that can be used
to bootstrap your next Python project. It includes some advanced
patterns and best practices for `setup.py`, as well as some
commented–out nice–to–haves.
For example, this `setup.py` provides a `$ python setup.py upload`
command, which creates a *universal wheel* (and *sdist*) and uploads
your package to [PyPi] using [Twine], without the need for an annoying
`setup.cfg` file. It also creates/uploads a new git tag, automatically.
In short, `setup.py` files can be daunting to approach, when first
starting out — even Guido has been heard saying, "everyone cargo cults
thems". It's true — so, I want this repo to be the best place to
copy–paste from :)
[Check out the example!][an example setup.py]
Installation
-----
```bash
cd your_project
# Download the setup.py file:
# download with wget
wget https://raw.githubusercontent.com/navdeep-G/setup.py/master/setup.py -O setup.py
# download with curl
curl -O https://raw.githubusercontent.com/navdeep-G/setup.py/master/setup.py
```
To Do
-----
- Tests via `$ setup.py test` (if it's concise).
Pull requests are encouraged!
More Resources
--------------
- [What is setup.py?] on Stack Overflow
- [Official Python Packaging User Guide](https://packaging.python.org)
- [The Hitchhiker's Guide to Packaging]
- [Cookiecutter template for a Python package]
License
-------
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any means.
[an example setup.py]: https://github.com/navdeep-G/setup.py/blob/master/setup.py
[PyPi]: https://docs.python.org/3/distutils/packageindex.html
[Twine]: https://pypi.python.org/pypi/twine
[image]: https://farm1.staticflickr.com/628/33173824932_58add34581_k_d.jpg
[What is setup.py?]: https://stackoverflow.com/questions/1471994/what-is-setup-py
[The Hitchhiker's Guide to Packaging]: https://the-hitchhikers-guide-to-packaging.readthedocs.io/en/latest/creation.html
[Cookiecutter template for a Python package]: https://github.com/audreyr/cookiecutter-pypackage
| zeekr | /zeekr-0.1.0.tar.gz/zeekr-0.1.0/README.md | README.md |
# Zeel Publisher
A Library meant to standardize the way zapi services interact with SNS/SQS.
# Getting Started
## Docker
The infrastructure of this library is designed to be run inside of various
docker containers. Specifically there is a container for:
- The python environment the library's code runs in
- A jaeger tracing instance
- The localstack AWS simulator
These containers can be viewed inside the project's docker-compose.yml
Because these containers are all needed to create a functioning local
environment, proceeding without docker is NOT recommended. It can be installed
using the docker for mac installer or via
`brew cask install docker`
## The pipenv Virtual Environment
This service's dependencies are all managed with
[https://github.com/pypa/pipenv](pipenv) and are enumerated inside the project's
Pipfile and Pipfile.lock files. Pipenv is a superset of Pip, and will create a
virtual python environment (the .venv folder) for this Service. To that end,
please ensure you have pipenv installed on your local machine.
`brew install pipenv`
### Configuring your Virtual Environment
To create a virtual environment (the .venv directory) inside your project folder
instead of your home (~) directory, save the following in your .bash_profile or .zshrc:
`export PIPENV_VENV_IN_PROJECT=1`
This is highly recommended for vscode users, as the project's linters and
formatters are configured to use binaries installed to a local .venv
### Running the Library's tests on Docker
Although tests can be run locally, it is recommended to run them through docker,
where they will have access to the infrastructure they need. To do so you can
use this command:
`docker-compose run publisher-app bash test.sh`
# Modules
## Event Publisher
A Class meant for publishing Event Messages to a single SNS topic.
# Distribution
This Code is meant for distribution across multiple projects, namely our various
zapi services which require zeel-publisher as a dependency. The library itself
is hosted on PyPi and can be found at
https://pypi.org/project/zeel-publisher/
## Versioning
Zeel publisher versioning follows the [Semantic Versioning](https://docs.npmjs.com/about-semantic-versioning) syntax:
`Major.Minor.Patch`
Make sure to update setup.py accordingly before publishing a new version.
## Commands for uploading to PyPi
Create build - `pipenv run python3 setup.py sdist`
Publish - `pipenv run twine upload dist/*`
| zeel-publisher | /zeel-publisher-4.1.0.tar.gz/zeel-publisher-4.1.0/README.md | README.md |
import json
import time
import boto3
class EventPublisherError(Exception):
"""Base class for Event Publisher Exceptions."""
pass
class CarrierError(EventPublisherError):
"""Exception raised if Carrier is invalid."""
def __init__(self, message):
self.message = message
class EventPublisher():
"""
Class responsible for publishing event messages to a single topic.
"""
def __init__(self, topic_arn, sns_client_params=None):
"""
Initializes a Publisher object.
Parameters
----------
topic_arn : string
Amazon resource identifier for a particular sns topic.
sns_client_params : dict
Dictionary containing parameters necessary for initializing a SNS
client.
"""
self.topic_arn = topic_arn
if sns_client_params:
self.sns_client = boto3.client('sns', **sns_client_params)
else:
self.sns_client = boto3.client('sns')
def publish(
self,
carrier,
uri,
operation,
before,
after,
service,
event_type,
description,
triggering_account,
triggering_identity
):
"""
Publish an event message to the instance's SNS Topic.
Parameters
----------
carrier : dict
An OpenTrace TEXT_MAP format carrier.
uri : string
The uri at which the event pertains to.
operation : string
The HTTP method which triggered the event.
before : mixed
The resource the event pertains to before it was modified. This can
be None to represent Creation for example.
after : mixed
The resource the event pertains to after it was modified. This can
be None to represent Deletion for example.
service : string
The name of the service which published the event. Eg. zapi-orders,
zapi-invoices etc.
event_type : string
An enum representing the type of change that occurred. Typically
used in decision making for sqs handlers.
description : string
A human readable description of the event. Not standardized.
triggering_account : dict
The account that triggered this change.
triggering_identity : dict
The identity that triggered this change.
Returns
-------
mixed
A Dictionary containing an SNS MessageId if the message was
published successfully, false otherwise.
"""
body = {
'default': {
'carrier': carrier,
'uri': uri,
'operation': operation,
'before': before,
'after': after,
'service': service,
'event_type': event_type,
'description': description,
'triggering_account': triggering_account,
'triggering_identity': triggering_identity,
'timestamp': time.time()
}
}
payload = {'Message': json.dumps(body)}
response = self.sns_client.publish(TopicArn=self.topic_arn, **payload)
return response | zeel-publisher | /zeel-publisher-4.1.0.tar.gz/zeel-publisher-4.1.0/zeel_publisher/event_publisher.py | event_publisher.py |
Authors
=======
* Michael van Tellingen
Contributors
============
* vashek
* Marco Vellinga
* jaceksnet
* Andrew Serong
* Joeri Bekker
* Eric Wong
* Jacek Stępniewski
* Alexey Stepanov
* Julien Delasoie
* bjarnagin
* mcordes
* Sam Denton
* David Baumgold
* fiebiga
* Antonio Cuni
* Alexandre de Mari
* Jason Vertrees
* Nicolas Evrard
* Matt Grimm (mgrimm)
* Marek Wywiał
* Falldog
* btmanm
* Caleb Salt
* Julien Marechal
* Mike Fiedler
* Dave Wapstra
* OrangGeeGee
* Stefano Parmesan
* Jan Murre
* Ben Tucker
* Bruno Duyé
* Christoph Heuel
* Derek Harland
* Eric Waller
* Falk Schuetzenmeister
* Jon Jenkins
* Raymond Piller
* Zoltan Benedek
* Øyvind Heddeland Instefjord
| zeep-adv | /zeep-adv-1.4.4.tar.gz/zeep-adv-1.4.4/CONTRIBUTORS.rst | CONTRIBUTORS.rst |
from __future__ import absolute_import, print_function
import argparse
import logging
import logging.config
import time
import requests
from six.moves.urllib.parse import urlparse
from zeep.cache import SqliteCache
from zeep.client import Client
from zeep.transports import Transport
logger = logging.getLogger('zeep')
def parse_arguments(args=None):
parser = argparse.ArgumentParser(description='Zeep: The SOAP client')
parser.add_argument(
'wsdl_file', type=str, help='Path or URL to the WSDL file',
default=None)
parser.add_argument(
'--cache', action='store_true', help='Enable cache')
parser.add_argument(
'--no-verify', action='store_true', help='Disable SSL verification')
parser.add_argument(
'--verbose', action='store_true', help='Enable verbose output')
parser.add_argument(
'--profile', help="Enable profiling and save output to given file")
parser.add_argument(
'--no-strict', action='store_true', default=False,
help="Disable strict mode")
return parser.parse_args(args)
def main(args):
if args.verbose:
logging.config.dictConfig({
'version': 1,
'formatters': {
'verbose': {
'format': '%(name)20s: %(message)s'
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
},
'loggers': {
'zeep': {
'level': 'DEBUG',
'propagate': True,
'handlers': ['console'],
},
}
})
if args.profile:
import cProfile
profile = cProfile.Profile()
profile.enable()
cache = SqliteCache() if args.cache else None
session = requests.Session()
if args.no_verify:
session.verify = False
result = urlparse(args.wsdl_file)
if result.username or result.password:
session.auth = (result.username, result.password)
transport = Transport(cache=cache, session=session)
st = time.time()
strict = not args.no_strict
client = Client(args.wsdl_file, transport=transport, strict=strict)
logger.debug("Loading WSDL took %sms", (time.time() - st) * 1000)
if args.profile:
profile.disable()
profile.dump_stats(args.profile)
client.wsdl.dump()
if __name__ == '__main__':
args = parse_arguments()
main(args) | zeep-adv | /zeep-adv-1.4.4.tar.gz/zeep-adv-1.4.4/src/zeep/__main__.py | __main__.py |
import base64
import datetime
import errno
import logging
import os
import threading
from contextlib import contextmanager
import appdirs
import pytz
import six
# The sqlite3 is not available on Google App Engine so we handle the
# ImportError here and set the sqlite3 var to None.
# See https://github.com/mvantellingen/python-zeep/issues/243
try:
import sqlite3
except ImportError:
sqlite3 = None
logger = logging.getLogger(__name__)
class Base(object):
def add(self, url, content):
raise NotImplemented()
def get(self, url):
raise NotImplemented()
class InMemoryCache(Base):
"""Simple in-memory caching using dict lookup with support for timeouts"""
_cache = {} # global cache, thread-safe by default
def __init__(self, timeout=3600):
self._timeout = timeout
def add(self, url, content):
logger.debug("Caching contents of %s", url)
self._cache[url] = (datetime.datetime.utcnow(), content)
def get(self, url):
try:
created, content = self._cache[url]
except KeyError:
pass
else:
if not _is_expired(created, self._timeout):
logger.debug("Cache HIT for %s", url)
return content
logger.debug("Cache MISS for %s", url)
return None
class SqliteCache(Base):
"""Cache contents via an sqlite database on the filesystem"""
_version = '1'
def __init__(self, path=None, timeout=3600):
if sqlite3 is None:
raise RuntimeError("sqlite3 module is required for the SqliteCache")
# No way we can support this when we want to achieve thread safety
if path == ':memory:':
raise ValueError(
"The SqliteCache doesn't support :memory: since it is not " +
"thread-safe. Please use zeep.cache.InMemoryCache()")
self._lock = threading.RLock()
self._timeout = timeout
self._db_path = path if path else _get_default_cache_path()
# Initialize db
with self.db_connection() as conn:
cursor = conn.cursor()
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS request
(created timestamp, url text, content text)
""")
conn.commit()
@contextmanager
def db_connection(self):
with self._lock:
connection = sqlite3.connect(
self._db_path, detect_types=sqlite3.PARSE_DECLTYPES)
yield connection
connection.close()
def add(self, url, content):
logger.debug("Caching contents of %s", url)
data = self._encode_data(content)
with self.db_connection() as conn:
cursor = conn.cursor()
cursor.execute("DELETE FROM request WHERE url = ?", (url,))
cursor.execute(
"INSERT INTO request (created, url, content) VALUES (?, ?, ?)",
(datetime.datetime.utcnow(), url, data))
conn.commit()
def get(self, url):
with self.db_connection() as conn:
cursor = conn.cursor()
cursor.execute(
"SELECT created, content FROM request WHERE url=?", (url, ))
rows = cursor.fetchall()
if rows:
created, data = rows[0]
if not _is_expired(created, self._timeout):
logger.debug("Cache HIT for %s", url)
return self._decode_data(data)
logger.debug("Cache MISS for %s", url)
def _encode_data(self, data):
data = base64.b64encode(data)
if six.PY2:
return buffer(self._version_string + data) # noqa
return self._version_string + data
def _decode_data(self, data):
if six.PY2:
data = str(data)
if data.startswith(self._version_string):
return base64.b64decode(data[len(self._version_string):])
@property
def _version_string(self):
prefix = u'$ZEEP:%s$' % self._version
return bytes(prefix.encode('ascii'))
def _is_expired(value, timeout):
"""Return boolean if the value is expired"""
if timeout is None:
return False
now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
max_age = value.replace(tzinfo=pytz.utc)
max_age += datetime.timedelta(seconds=timeout)
return now > max_age
def _get_default_cache_path():
path = appdirs.user_cache_dir('zeep', False)
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
return os.path.join(path, 'cache.db') | zeep-adv | /zeep-adv-1.4.4.tar.gz/zeep-adv-1.4.4/src/zeep/cache.py | cache.py |
import copy
import logging
from contextlib import contextmanager
from zeep.xsd.const import NotSet
from zeep.transports import Transport
from zeep.wsdl import Document
logger = logging.getLogger(__name__)
class OperationProxy(object):
def __init__(self, service_proxy, operation_name):
self._proxy = service_proxy
self._op_name = operation_name
def __call__(self, *args, **kwargs):
"""Call the operation with the given args and kwargs.
:rtype: zeep.xsd.CompoundValue
"""
if self._proxy._client._default_soapheaders:
op_soapheaders = kwargs.get('_soapheaders')
if op_soapheaders:
soapheaders = copy.deepcopy(self._proxy._client._default_soapheaders)
if type(op_soapheaders) != type(soapheaders):
raise ValueError("Incompatible soapheaders definition")
if isinstance(soapheaders, list):
soapheaders.extend(op_soapheaders)
else:
soapheaders.update(op_soapheaders)
else:
soapheaders = self._proxy._client._default_soapheaders
kwargs['_soapheaders'] = soapheaders
return self._proxy._binding.send(
self._proxy._client, self._proxy._binding_options,
self._op_name, args, kwargs)
class ServiceProxy(object):
def __init__(self, client, binding, **binding_options):
self._client = client
self._binding_options = binding_options
self._binding = binding
def __getattr__(self, key):
"""Return the OperationProxy for the given key.
:rtype: OperationProxy()
"""
return self[key]
def __getitem__(self, key):
"""Return the OperationProxy for the given key.
:rtype: OperationProxy()
"""
try:
self._binding.get(key)
except ValueError:
raise AttributeError('Service has no operation %r' % key)
return OperationProxy(self, key)
class Factory(object):
def __init__(self, types, kind, namespace):
self._method = getattr(types, 'get_%s' % kind)
if namespace in types.namespaces:
self._ns = namespace
else:
self._ns = types.get_ns_prefix(namespace)
def __getattr__(self, key):
"""Return the complexType or simpleType for the given localname.
:rtype: zeep.xsd.ComplexType or zeep.xsd.AnySimpleType
"""
return self[key]
def __getitem__(self, key):
"""Return the complexType or simpleType for the given localname.
:rtype: zeep.xsd.ComplexType or zeep.xsd.AnySimpleType
"""
return self._method('{%s}%s' % (self._ns, key))
class Client(object):
"""The zeep Client.
:param wsdl:
:param wsse:
:param transport: Custom transport class.
:param service_name: The service name for the service binding. Defaults to
the first service in the WSDL document.
:param port_name: The port name for the default binding. Defaults to the
first port defined in the service element in the WSDL
document.
:param plugins: a list of Plugin instances
"""
def __init__(self, wsdl, wsse=None, transport=None,
service_name=None, port_name=None, plugins=None, strict=True):
if not wsdl:
raise ValueError("No URL given for the wsdl")
self.transport = transport if transport is not None else Transport()
self.wsdl = Document(wsdl, self.transport, strict=strict)
self.wsse = wsse
self.plugins = plugins if plugins is not None else []
# options
self.raw_response = False
self._default_service = None
self._default_service_name = service_name
self._default_port_name = port_name
self._default_soapheaders = None
@property
def service(self):
"""The default ServiceProxy instance
:rtype: ServiceProxy
"""
if self._default_service:
return self._default_service
self._default_service = self.bind(
service_name=self._default_service_name,
port_name=self._default_port_name)
if not self._default_service:
raise ValueError(
"There is no default service defined. This is usually due to "
"missing wsdl:service definitions in the WSDL")
return self._default_service
@contextmanager
def options(self, timeout=NotSet, raw_response=NotSet):
"""Context manager to temporarily overrule various options.
:param timeout: Set the timeout for POST/GET operations (not used for
loading external WSDL or XSD documents)
To for example set the timeout to 10 seconds use::
client = zeep.Client('foo.wsdl')
with client.options(timeout=10):
client.service.fast_call()
"""
# Store current options
old_raw_raw_response = self.raw_response
# Set new options
self.raw_response = raw_response
if timeout is not NotSet:
timeout_ctx = self.transport._options(timeout=timeout)
timeout_ctx.__enter__()
yield
self.raw_response = old_raw_raw_response
if timeout is not NotSet:
timeout_ctx.__exit__(None, None, None)
def bind(self, service_name=None, port_name=None):
"""Create a new ServiceProxy for the given service_name and port_name.
The default ServiceProxy instance (`self.service`) always referes to
the first service/port in the wsdl Document. Use this when a specific
port is required.
"""
if not self.wsdl.services:
return
service = self._get_service(service_name)
port = self._get_port(service, port_name)
return ServiceProxy(self, port.binding, **port.binding_options)
def create_service(self, binding_name, address):
"""Create a new ServiceProxy for the given binding name and address.
:param binding_name: The QName of the binding
:param address: The address of the endpoint
"""
try:
binding = self.wsdl.bindings[binding_name]
except KeyError:
raise ValueError(
"No binding found with the given QName. Available bindings "
"are: %s" % (', '.join(self.wsdl.bindings.keys())))
return ServiceProxy(self, binding, address=address)
def create_message(self, operation, service_name=None, port_name=None,
args=None, kwargs=None):
"""Create the payload for the given operation.
:rtype: lxml.etree._Element
"""
service = self._get_service(service_name)
port = self._get_port(service, port_name)
args = args or tuple()
kwargs = kwargs or {}
envelope, http_headers = port.binding._create(operation, args, kwargs)
return envelope
def type_factory(self, namespace):
"""Return a type factory for the given namespace.
Example::
factory = client.type_factory('ns0')
user = factory.User(name='John')
:rtype: Factory
"""
return Factory(self.wsdl.types, 'type', namespace)
def get_type(self, name):
"""Return the type for the given qualified name.
:rtype: zeep.xsd.ComplexType or zeep.xsd.AnySimpleType
"""
return self.wsdl.types.get_type(name)
def get_element(self, name):
"""Return the element for the given qualified name.
:rtype: zeep.xsd.Element
"""
return self.wsdl.types.get_element(name)
def set_ns_prefix(self, prefix, namespace):
"""Set a shortcut for the given namespace.
"""
self.wsdl.types.set_ns_prefix(prefix, namespace)
def set_default_soapheaders(self, headers):
"""Set the default soap headers which will be automatically used on
all calls.
Note that if you pass custom soapheaders using a list then you will
also need to use that during the operations. Since mixing these use
cases isn't supported (yet).
"""
self._default_soapheaders = headers
def _get_port(self, service, name):
if name:
port = service.ports.get(name)
if not port:
raise ValueError("Port not found")
else:
port = list(service.ports.values())[0]
return port
def _get_service(self, name):
if name:
service = self.wsdl.services.get(name)
if not service:
raise ValueError("Service not found")
else:
service = next(iter(self.wsdl.services.values()), None)
return service
class CachingClient(Client):
"""Shortcut to create a caching client, for the lazy people.
This enables the SqliteCache by default in the transport as was the default
in earlier versions of zeep.
"""
def __init__(self, *args, **kwargs):
# Don't use setdefault since we want to lazily init the Transport cls
from zeep.cache import SqliteCache
kwargs['transport'] = (
kwargs.get('transport') or Transport(cache=SqliteCache()))
super(CachingClient, self).__init__(*args, **kwargs) | zeep-adv | /zeep-adv-1.4.4.tar.gz/zeep-adv-1.4.4/src/zeep/client.py | client.py |
import os.path
from defusedxml.lxml import fromstring
from lxml import etree
from six.moves.urllib.parse import urljoin, urlparse
from zeep.exceptions import XMLSyntaxError
class ImportResolver(etree.Resolver):
"""Custom lxml resolve to use the transport object"""
def __init__(self, transport):
self.transport = transport
def resolve(self, url, pubid, context):
if urlparse(url).scheme in ('http', 'https'):
content = self.transport.load(url)
return self.resolve_string(content, context)
def parse_xml(content, transport, base_url=None, strict=False):
"""Parse an XML string and return the root Element.
:param content: The XML string
:type content: str
:param transport: The transport instance to load imported documents
:type transport: zeep.transports.Transport
:param base_url: The base url of the document, used to make relative
lookups absolute.
:type base_url: str
:param strict: boolean to indicate if the lxml should be parsed a 'strict'.
If false then the recover mode is enabled which tries to parse invalid
XML as best as it can.
:type strict: boolean
:returns: The document root
:rtype: lxml.etree._Element
"""
recover = not strict
parser = etree.XMLParser(
remove_comments=True, resolve_entities=False, recover=recover)
parser.resolvers.add(ImportResolver(transport))
try:
return fromstring(content, parser=parser, base_url=base_url)
except etree.XMLSyntaxError as exc:
raise XMLSyntaxError("Invalid XML content received (%s)" % exc.msg)
def load_external(url, transport, base_url=None, strict=True):
"""Load an external XML document.
:param url:
:param transport:
:param base_url:
:param strict: boolean to indicate if the lxml should be parsed a 'strict'.
If false then the recover mode is enabled which tries to parse invalid
XML as best as it can.
:type strict: boolean
"""
if hasattr(url, 'read'):
content = url.read()
else:
if base_url:
url = absolute_location(url, base_url)
content = transport.load(url)
return parse_xml(content, transport, base_url, strict=strict)
def absolute_location(location, base):
"""Make an url absolute (if it is optional) via the passed base url.
:param location: The (relative) url
:type location: str
:param base: The base location
:type base: str
:returns: An absolute URL
:rtype: str
"""
if location == base:
return location
if urlparse(location).scheme in ('http', 'https', 'file'):
return location
if base and urlparse(base).scheme in ('http', 'https', 'file'):
return urljoin(base, location)
else:
if os.path.isabs(location):
return location
if base:
return os.path.realpath(
os.path.join(os.path.dirname(base), location))
return location
def is_relative_path(value):
"""Check if the given value is a relative path
:param value: The value
:type value: str
:returns: Boolean indicating if the url is relative. If it is absolute then
False is returned.
:rtype: boolean
"""
if urlparse(value).scheme in ('http', 'https', 'file'):
return False
return not os.path.isabs(value) | zeep-adv | /zeep-adv-1.4.4.tar.gz/zeep-adv-1.4.4/src/zeep/loader.py | loader.py |
from collections import deque
class Plugin(object):
"""Base plugin"""
def ingress(self, envelope, http_headers, operation):
"""Override to update the envelope or http headers when receiving a
message.
:param envelope: The envelope as XML node
:param http_headers: Dict with the HTTP headers
"""
return envelope, http_headers
def egress(self, envelope, http_headers, operation, binding_options):
"""Override to update the envelope or http headers when sending a
message.
:param envelope: The envelope as XML node
:param http_headers: Dict with the HTTP headers
:param operation: The associated Operation instance
:param binding_options: Binding specific options for the operation
"""
return envelope, http_headers
def apply_egress(client, envelope, http_headers, operation, binding_options):
for plugin in client.plugins:
result = plugin.egress(
envelope, http_headers, operation, binding_options)
if result is not None:
envelope, http_headers = result
return envelope, http_headers
def apply_ingress(client, envelope, http_headers, operation):
for plugin in client.plugins:
result = plugin.ingress(envelope, http_headers, operation)
if result is not None:
envelope, http_headers = result
return envelope, http_headers
class HistoryPlugin(object):
def __init__(self, maxlen=1):
self._buffer = deque([], maxlen)
@property
def last_sent(self):
last_tx = self._buffer[-1]
if last_tx:
return last_tx['sent']
@property
def last_received(self):
last_tx = self._buffer[-1]
if last_tx:
return last_tx['received']
def ingress(self, envelope, http_headers, operation):
last_tx = self._buffer[-1]
last_tx['received'] = {
'envelope': envelope,
'http_headers': http_headers,
}
def egress(self, envelope, http_headers, operation, binding_options):
self._buffer.append({
'received': None,
'sent': {
'envelope': envelope,
'http_headers': http_headers,
},
}) | zeep-adv | /zeep-adv-1.4.4.tar.gz/zeep-adv-1.4.4/src/zeep/plugins.py | plugins.py |
import datetime
from collections import OrderedDict
from lxml import etree
from zeep import xsd
from zeep.xsd.valueobjects import CompoundValue
def serialize_object(obj, target_cls=OrderedDict):
"""Serialize zeep objects to native python data structures"""
if isinstance(obj, list):
return [serialize_object(sub, target_cls) for sub in obj]
if isinstance(obj, (dict, CompoundValue)):
result = target_cls()
for key in obj:
result[key] = serialize_object(obj[key], target_cls)
return result
return obj
def create_xml_soap_map(values):
"""Create an http://xml.apache.org/xml-soap#Map value."""
Map = xsd.ComplexType(
xsd.Sequence([
xsd.Element(
'item',
xsd.AnyType(),
min_occurs=1,
max_occurs="unbounded"),
]),
qname=etree.QName('{http://xml.apache.org/xml-soap}Map'))
KeyValueData = xsd.Element(
'{http://xml.apache.org/xml-soap}KeyValueData',
xsd.ComplexType(
xsd.Sequence([
xsd.Element(
'key',
xsd.AnyType(),
),
xsd.Element(
'value',
xsd.AnyType(),
),
]),
),
)
return Map(item=[
KeyValueData(
xsd.AnyObject(xsd.String(), key),
xsd.AnyObject(guess_xsd_type(value), value)
) for key, value in values.items()
])
def guess_xsd_type(obj):
"""Return the XSD Type for the given object"""
if isinstance(obj, bool):
return xsd.Boolean()
if isinstance(obj, int):
return xsd.Integer()
if isinstance(obj, float):
return xsd.Float()
if isinstance(obj, datetime.datetime):
return xsd.DateTime()
if isinstance(obj, datetime.date):
return xsd.Date()
return xsd.String()
def Nil():
"""Return an xsi:nil element"""
return xsd.AnyObject(None, None) | zeep-adv | /zeep-adv-1.4.4.tar.gz/zeep-adv-1.4.4/src/zeep/helpers.py | helpers.py |
import cgi
import re
import inspect
from lxml import etree
from zeep.ns import XSD
from zeep.exceptions import XMLParseError
def str_to_sa(s):
"""
Remove/Replace all 'weird' chars from the string in parameter according to dict bellow.
"""
try:
tab = {"Á": "A", "À": "A", "Ă": "A", "Â": "A", "Å": "A", "Ä": "A", "Ã": "A", "Ą": "A", "Æ": "AE", "Ć": "C",
"Č": "C", "Ç": "C", "Ď": "D", "Đ": "D", "É": "E", "È": "E", "Ê": "E", "Ě": "E", "Ë": "E", "Ę": "E",
"Ğ": "G", "Í": "I", "Ì": "I", "Î": "I", "Ï": "I", "Ĺ": "L", "Ľ": "L", "Ł": "L", "Ń": "N", "Ň": "N",
"Ñ": "N", "Ó": "O", "Ò": "O", "Ô": "O", "Ö": "O", "Õ": "O", "Ø": "O", "Œ": "OE", "Ŕ": "R", "Ř": "R",
"Ś": "S", "Š": "S", "Ş": "S", "Ș": "S", "Ť": "T", "Ț": "T", "Ú": "U", "Ù": "U", "Û": "U", "Ü": "U",
"Ý": "Y", "Ÿ": "Y", "Ź": "Z", "Ž": "Z", "Ż": "Z", "Þ": "T", "'": "", "’": "", "‘": "", '“': "", '”': "",
'"': "", "ø": "o"}
pattern = re.compile('|'.join(tab.keys()))
res = pattern.sub(lambda x: tab[x.group()], s)
return " ".join(res.split())
except Exception as e:
return ""
def qname_attr(node, attr_name, target_namespace=None):
value = node.get(attr_name)
if value is not None:
return as_qname(value, node.nsmap, target_namespace)
def as_qname(value, nsmap, target_namespace=None):
"""Convert the given value to a QName"""
if ':' in value:
prefix, local = value.split(':')
# The xml: prefix is always bound to the XML namespace, see
# https://www.w3.org/TR/xml-names/
if prefix == 'xml':
namespace = 'http://www.w3.org/XML/1998/namespace'
else:
namespace = nsmap.get(prefix)
if not namespace:
raise XMLParseError("No namespace defined for %r" % prefix)
# Workaround for https://github.com/mvantellingen/python-zeep/issues/349
if not local:
return etree.QName(XSD, 'anyType')
return etree.QName(namespace, local)
if target_namespace:
return etree.QName(target_namespace, value)
if nsmap.get(None):
return etree.QName(nsmap[None], value)
return etree.QName(value)
def findall_multiple_ns(node, name, namespace_sets):
result = []
for nsmap in namespace_sets:
result.extend(node.findall(name, namespaces=nsmap))
return result
def get_version():
from zeep import __version__ # cyclic import
return __version__
def get_base_class(objects):
"""Return the best base class for multiple objects.
Implementation is quick and dirty, might be done better.. ;-)
"""
bases = [inspect.getmro(obj.__class__)[::-1] for obj in objects]
num_objects = len(objects)
max_mro = max(len(mro) for mro in bases)
base_class = None
for i in range(max_mro):
try:
if len({bases[j][i] for j in range(num_objects)}) > 1:
break
except IndexError:
break
base_class = bases[0][i]
return base_class
def detect_soap_env(envelope):
root_tag = etree.QName(envelope)
return root_tag.namespace
def get_media_type(value):
"""Parse a HTTP content-type header and return the media-type"""
main_value, parameters = cgi.parse_header(value)
return main_value | zeep-adv | /zeep-adv-1.4.4.tar.gz/zeep-adv-1.4.4/src/zeep/utils.py | utils.py |
import logging
import os
from contextlib import contextmanager
import requests
from six.moves.urllib.parse import urlparse
from zeep.utils import get_version, get_media_type
from zeep.wsdl.utils import etree_to_string
class Transport(object):
"""The transport object handles all communication to the SOAP server.
:param cache: The cache object to be used to cache GET requests
:param timeout: The timeout for loading wsdl and xsd documents.
:param operation_timeout: The timeout for operations (POST/GET). By
default this is None (no timeout).
:param session: A :py:class:`request.Session()` object (optional)
"""
supports_async = False
def __init__(self, cache=None, timeout=300, operation_timeout=None,
session=None):
self.cache = cache
self.load_timeout = timeout
self.operation_timeout = operation_timeout
self.logger = logging.getLogger(__name__)
self.session = session or requests.Session()
self.session.headers['User-Agent'] = (
'Zeep/%s (www.python-zeep.org)' % (get_version()))
def get(self, address, params, headers):
"""Proxy to requests.get()
:param address: The URL for the request
:param params: The query parameters
:param headers: a dictionary with the HTTP headers.
"""
response = self.session.get(
address,
params=params,
headers=headers,
timeout=self.operation_timeout)
return response
def post(self, address, message, headers):
"""Proxy to requests.posts()
:param address: The URL for the request
:param message: The content for the body
:param headers: a dictionary with the HTTP headers.
"""
if self.logger.isEnabledFor(logging.DEBUG):
log_message = message
if isinstance(log_message, bytes):
log_message = log_message.decode('utf-8')
self.logger.debug("HTTP Post to %s:\n%s", address, log_message)
response = self.session.post(
address,
data=message,
headers=headers,
timeout=self.operation_timeout)
if self.logger.isEnabledFor(logging.DEBUG):
media_type = get_media_type(
response.headers.get('Content-Type', 'text/xml'))
if media_type == 'multipart/related':
log_message = response.content
else:
log_message = response.content
if isinstance(log_message, bytes):
log_message = log_message.decode('utf-8')
self.logger.debug(
"HTTP Response from %s (status: %d):\n%s",
address, response.status_code, log_message)
return response
def post_xml(self, address, envelope, headers):
"""Post the envelope xml element to the given address with the headers.
This method is intended to be overriden if you want to customize the
serialization of the xml element. By default the body is formatted
and encoded as utf-8. See ``zeep.wsdl.utils.etree_to_string``.
"""
message = etree_to_string(envelope)
return self.post(address, message, headers)
def load(self, url):
"""Load the content from the given URL"""
if not url:
raise ValueError("No url given to load")
scheme = urlparse(url).scheme
if scheme in ('http', 'https'):
if self.cache:
response = self.cache.get(url)
if response:
return bytes(response)
content = self._load_remote_data(url)
if self.cache:
self.cache.add(url, content)
return content
elif scheme == 'file':
if url.startswith('file://'):
url = url[7:]
with open(os.path.expanduser(url), 'rb') as fh:
return fh.read()
def _load_remote_data(self, url):
response = self.session.get(url, timeout=self.load_timeout)
response.raise_for_status()
return response.content
@contextmanager
def _options(self, timeout=None):
"""Context manager to temporarily overrule options.
Example::
client = zeep.Client('foo.wsdl')
with client.options(timeout=10):
client.service.fast_call()
:param timeout: Set the timeout for POST/GET operations (not used for
loading external WSDL or XSD documents)
"""
old_timeout = self.operation_timeout
self.operation_timeout = timeout
yield
self.operation_timeout = old_timeout | zeep-adv | /zeep-adv-1.4.4.tar.gz/zeep-adv-1.4.4/src/zeep/transports.py | transports.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.