text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from ... import Endpoint, UrlConfig
class TftEndpoint:
def __init__(self, url: str, **kwargs):
self._url = f"/tft{url}"
def __call__(self, **kwargs):
final_url = f"{UrlConfig.tft_url}{self._url}"
endpoint = Endpoint(final_url, **kwargs)
return endpoint(**kwargs)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/team_fight_tactics/urls/TftEndpoint.py
|
Python
|
mit
| 307 | 0 |
# Copyright (2021) Binovo IT Human Project SL
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
import base64
from enum import Enum
from odoo import models, fields, api, _
from ..lroe.lroe_xml_schema import LROEXMLSchema,\
LROEXMLSchemaModeNotSupported,\
LROEOperationTypeEnum
from .lroe_operation import LROEOperationEnum, LROEModelEnum
from odoo.addons.l10n_es_ticketbai_api.models.ticketbai_response\
import TicketBaiResponseState, TicketBaiInvoiceResponseCode, \
TicketBaiCancellationResponseCode
from odoo.addons.l10n_es_ticketbai_api.utils import utils as tbai_utils
from odoo.exceptions import ValidationError
class LROEOperationResponseState(Enum):
BUILD_ERROR = '-2'
REQUEST_ERROR = '-1'
CORRECT = 'Correcto'
PARTIALLY_CORRECT = 'Parcialmente correcto'
INCORRECT = 'Incorrecto'
class LROEOperationResponseLineState(Enum):
CORRECT = 'Correcto'
CORRECT_WITH_ERRORS = 'Aceptado con errores'
INCORRECT = 'Incorrecto'
class LROEOperationResponseLineCode(tbai_utils.EnumValues):
DUPLICATED_RECORD = 'B4_2000003'
ALREADY_CANCELLED_RECORD = 'B4_2000006'
class LROEOperationResponse(models.Model):
_name = 'lroe.operation.response'
_description = "LROE Operation Response"
lroe_operation_id = fields.Many2one(
comodel_name='lroe.operation',
required=True,
ondelete='cascade')
response_line_ids = fields.One2many(
comodel_name='lroe.operation.response.line',
inverse_name='lroe_response_id',
string="Response Line")
xml = fields.Binary(string='XML Response')
xml_fname = fields.Char('XML File Name')
state = fields.Selection(selection=[
(LROEOperationResponseState.BUILD_ERROR.value, 'Build error'),
(LROEOperationResponseState.REQUEST_ERROR.value, 'Request error'),
(LROEOperationResponseState.CORRECT.value, 'Correct'),
(LROEOperationResponseState.PARTIALLY_CORRECT.value, 'Partially correct'),
(LROEOperationResponseState.INCORRECT.value, 'Incorrect')], required=True)
code = fields.Char()
description = fields.Char()
lroe_record_id = fields.Char()
lroe_record_number = fields.Char()
lroe_record_date = fields.Char()
@staticmethod
def get_tbai_state(lroe_response_operation):
if lroe_response_operation == LROEOperationResponseState.BUILD_ERROR.value or\
lroe_response_operation == LROEOperationResponseState.REQUEST_ERROR.value:
return lroe_response_operation
if lroe_response_operation == LROEOperationResponseState.CORRECT.value:
return TicketBaiResponseState.RECEIVED.value
if lroe_response_operation == LROEOperationResponseState.INCORRECT.value:
return TicketBaiResponseState.REJECTED.value
if lroe_response_operation\
== LROEOperationResponseState.PARTIALLY_CORRECT.value\
or lroe_response_operation\
== LROEOperationResponseLineState.CORRECT_WITH_ERRORS.value:
# TODO LROE: en caso de e envío de un único fichero se nos
# puede dar esta respuesta ??? que hacemos ???
return TicketBaiResponseState.RECEIVED.value
return None
@api.model
def prepare_lroe_error_values(self, lroe_operation, msg, **kwargs):
values = kwargs
tbai_response_model = self.env['tbai.response']
tbai_response_dict = {
'tbai_invoice_id': lroe_operation.tbai_invoice_ids[0].id,
'state': LROEOperationResponse.get_tbai_state(
LROEOperationResponseState.REQUEST_ERROR.value
)
}
tbai_response_obj = tbai_response_model.create(tbai_response_dict)
values.update({
'lroe_operation_id': lroe_operation.id,
'state': LROEOperationResponseState.BUILD_ERROR.value,
'description': _("Internal API or Operation error") + msg,
'response_line_ids': [(0, 0, {
'state': LROEOperationResponseLineState.INCORRECT.value,
'tbai_response_id': tbai_response_obj.id
})]
})
return values
@api.model
def prepare_lroe_response_values(self, lroe_srv_response, lroe_operation, **kwargs):
def validate_response_line_state(response_line_record_state):
if response_line_record_state not in [
LROEOperationResponseLineState.CORRECT.value,
LROEOperationResponseLineState.CORRECT_WITH_ERRORS.value,
LROEOperationResponseLineState.INCORRECT.value
]:
raise ValidationError(_('LROEOperationResponseLineState not VALID !'))
def get_lroe_response_xml_header():
return xml_root.get('Cabecera')
def get_lroe_response_xml_presenter():
return xml_root.get('DatosPresentacion')
def get_lroe_response_xml_records():
xml_lroe_records = xml_root.get('Registros').get('Registro')
len_lroe_records = 0
if isinstance(xml_lroe_records, dict):
len_lroe_records = 1
elif isinstance(xml_lroe_records, list):
len_lroe_records = len(xml_lroe_records)
return len_lroe_records, xml_lroe_records
def get_lroe_xml_schema():
if not lroe_operation:
raise ValidationError(_('LROE Operation required!'))
operation_type = None
lroe_operation_model = (
"pj_240"
if LROEModelEnum.model_pj_240.value == lroe_operation.model
else "pf_140"
)
if lroe_operation.type in (
LROEOperationEnum.create.value,
LROEOperationEnum.update.value,
):
lroe_operation_type = "resp_alta"
elif lroe_operation.type == LROEOperationEnum.cancel.value:
lroe_operation_type = 'resp_cancel'
if lroe_operation.lroe_chapter_id.code == '1':
lroe_operation_chapter = 'sg_invoice'
elif lroe_operation.lroe_chapter_id.code == '2':
lroe_operation_chapter = 'invoice_in'
if hasattr(
LROEOperationTypeEnum,
"%s_%s_%s"
% (
lroe_operation_type,
lroe_operation_chapter,
lroe_operation_model,
),
):
operation_type = getattr(
LROEOperationTypeEnum,
"%s_%s_%s"
% (
lroe_operation_type,
lroe_operation_chapter,
lroe_operation_model,
),
).value
xml_schema = LROEXMLSchema(operation_type)
else:
raise LROEXMLSchemaModeNotSupported(
"Batuz LROE XML model not supported!")
return operation_type, xml_schema
def set_tbai_response_lroe_line():
response_line_record_data = response_line_record.get('SituacionRegistro')
response_line_record_state = response_line_record_data.get('EstadoRegistro')
validate_response_line_state(response_line_record_state)
response_line_record_code = ''
response_line_record_message = ''
if not response_line_record_state\
== LROEOperationResponseLineState.CORRECT.value:
response_line_record_code =\
response_line_record_data.get('CodigoErrorRegistro')
response_line_record_message = '(ES): '\
+ response_line_record_data.get('DescripcionErrorRegistroES') \
+ '(EU): '\
+ response_line_record_data.get('DescripcionErrorRegistroEU')
tbai_response_model = tbai_response_obj = self.env['tbai.response']
if lroe_operation.tbai_invoice_ids:
tbai_msg_description = response_line_record_message
tbai_msg_code =\
TicketBaiInvoiceResponseCode.INVOICE_ALREADY_REGISTERED.value \
if LROEOperationResponseLineCode.DUPLICATED_RECORD.value == \
response_line_record_code else \
TicketBaiCancellationResponseCode.INVOICE_ALREADY_CANCELLED.value \
if LROEOperationResponseLineCode.ALREADY_CANCELLED_RECORD.value == \
response_line_record_code else response_line_record_code\
if response_line_record_code else ''
tbai_response_dict = {
'tbai_invoice_id': lroe_operation.tbai_invoice_ids[0].id,
'state': LROEOperationResponse.get_tbai_state(
response_line_record_state),
'tbai_response_message_ids': [(0, 0, {
'code': tbai_msg_code,
'description': tbai_msg_description})]}
for key in kwargs:
tbai_response_dict[key] = kwargs[key]
tbai_response_obj = tbai_response_model.create(tbai_response_dict)
response_line_ids.append((0, 0, {
'state': response_line_record_state,
'code': response_line_record_code,
'description': response_line_record_message,
'tbai_response_id': tbai_response_obj.id}))
if response_line_ids:
values.update({'response_line_ids': response_line_ids})
lroe_operation_type, lroe_xml_schema = get_lroe_xml_schema()
values = {}
lroe_srv_response_type = lroe_srv_response.get_lroe_srv_response_type()
lroe_srv_response_code = lroe_srv_response.get_lroe_srv_response_code()
lroe_srv_response_message = lroe_srv_response.get_lroe_srv_response_message()
lroe_srv_response_date = lroe_srv_response.get_lroe_srv_response_record_date()
errno = lroe_srv_response.errno
strerror = lroe_srv_response.strerror
if lroe_srv_response.error:
tbai_response_model = self.env['tbai.response']
tbai_response_dict = {
'tbai_invoice_id': lroe_operation.tbai_invoice_ids[:1].id,
'state': LROEOperationResponse.get_tbai_state(
LROEOperationResponseState.REQUEST_ERROR.value)
}
for key in kwargs:
tbai_response_dict[key] = kwargs[key]
tbai_response_obj = tbai_response_model.create(tbai_response_dict)
values.update({
'lroe_operation_id': lroe_operation.id,
'state': LROEOperationResponseState.REQUEST_ERROR.value,
'code': lroe_srv_response_code if lroe_srv_response_code else errno,
'description': lroe_srv_response_message
if lroe_srv_response_message else strerror,
'response_line_ids': [(0, 0, {
'state': LROEOperationResponseLineState.INCORRECT.value,
'code': lroe_srv_response_code
if lroe_srv_response_code else errno,
'description': lroe_srv_response_message
if lroe_srv_response_message else strerror,
'tbai_response_id': tbai_response_obj.id
})]
})
else:
values.update({
'lroe_operation_id': lroe_operation.id,
'state': lroe_srv_response_type,
})
if lroe_srv_response_type in [
LROEOperationResponseState.CORRECT.value,
LROEOperationResponseState.PARTIALLY_CORRECT.value]:
lroe_srv_rec_id = lroe_srv_response.get_lroe_srv_response_record_id()
lroe_srv_rec_number = \
lroe_srv_response.get_lroe_srv_response_record_number()
values.update({'lroe_record_date': lroe_srv_response_date,
'lroe_record_id': lroe_srv_rec_id,
'lroe_record_number': lroe_srv_rec_number})
else:
values.update({
'lroe_record_date': lroe_srv_response_date,
'code': lroe_srv_response_code if lroe_srv_response_code else errno,
'description': lroe_srv_response_message
if lroe_srv_response_message else strerror
})
xml_data = lroe_srv_response.data
if xml_data:
values.update({
'xml': base64.encodebytes(xml_data),
'xml_fname': lroe_operation.name + '_response.xml'
})
xml_root = lroe_xml_schema.parse_xml(
xml_data
)[lroe_xml_schema.root_element]
len_response_line_records, response_line_records =\
get_lroe_response_xml_records()
response_line_ids = []
if len_response_line_records == 1:
response_line_record = response_line_records
set_tbai_response_lroe_line()
elif len_response_line_records > 1:
for response_line_record in response_line_records:
set_tbai_response_lroe_line()
else:
tbai_response_model = tbai_response_obj = self.env['tbai.response']
if lroe_operation.tbai_invoice_ids:
tbai_response_dict = {
'tbai_invoice_id': lroe_operation.tbai_invoice_ids[0].id,
'state': LROEOperationResponse.get_tbai_state(
lroe_srv_response_type)
}
for key in kwargs:
tbai_response_dict[key] = kwargs[key]
tbai_response_obj = tbai_response_model.create(tbai_response_dict)
values.update({
'response_line_ids': [(0, 0, {
'state': LROEOperationResponseLineState.INCORRECT.value,
'code': lroe_srv_response_code
if lroe_srv_response_code else errno,
'description': lroe_srv_response_message
if lroe_srv_response_message else strerror,
'tbai_response_id': tbai_response_obj.id
})]
})
return values
class LROEOperationResponseLine(models.Model):
_name = 'lroe.operation.response.line'
_description = "LROE Operation Response Line"
_order = 'id desc'
lroe_response_id = fields.Many2one(
comodel_name='lroe.operation.response',
required=True,
ondelete='cascade')
lroe_operation_id = fields.Many2one(
comodel_name='lroe.operation',
related='lroe_response_id.lroe_operation_id')
tbai_response_id = fields.Many2one(
comodel_name='tbai.response',
ondelete='cascade')
tbai_invoice_id = fields.Many2one(
related='tbai_response_id.tbai_invoice_id',
comodel_name='tbai.invoice',
required=True,
ondelete='cascade')
state = fields.Selection(selection=[
(LROEOperationResponseLineState.CORRECT.value, 'Correct'),
(LROEOperationResponseLineState.CORRECT_WITH_ERRORS.value,
'Correct with errors'),
(LROEOperationResponseLineState.INCORRECT.value, 'Incorrect')
],
required=True)
code = fields.Char()
description = fields.Char()
response_message = fields.Char(
compute='_compute_line_message',
string="LROE Response Message")
@api.multi
@api.depends('code', 'description')
def _compute_line_message(self):
for response_line in self:
if response_line.code and response_line.description:
response_line.response_message = response_line.code\
+ ':' + response_line.description
elif response_line.code:
response_line.response_message = response_line.code
elif response_line.description:
response_line.response_message = response_line.description
|
factorlibre/l10n-spain
|
l10n_es_ticketbai_api_batuz/models/lroe_operation_response.py
|
Python
|
agpl-3.0
| 16,313 | 0.001533 |
"""
(c) Copyright 2014. All Rights Reserved.
qball module setup and package.
"""
from setuptools import setup
setup(
name='qball',
author='Matt Ferrante',
author_email='[email protected]',
description='Python integration for qball',
license='(c) Copyright 2014. All Rights Reserved.',
packages=['qball'],
install_requires=['httplib2 >= 0.8'],
setup_requires=['httplib2'],
version='1.1.0',
url="https://github.com/ferrants/qball-python",
keywords = ['locking', 'resource locking', 'webservice'],
)
|
ferrants/qball-python
|
setup.py
|
Python
|
mit
| 547 | 0.003656 |
# vim: set fileencoding=utf-8 :
# GNU Solfege - free ear training software
# Copyright (C) 2009, 2011 Tom Cato Amundsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import logging
import os
import StringIO
import subprocess
import gtk
from solfege.esel import SearchView
if __name__ == '__main__':
from solfege import i18n
i18n.setup(".", "C")
import solfege.statistics
solfege.db = solfege.statistics.DB()
import solfege
from solfege import cfg
from solfege import filesystem
from solfege import gu
from solfege import frontpage as pd
from solfege import lessonfile
from solfege import osutils
class LessonFilePreviewWidget(gtk.VBox):
def __init__(self, model):
gtk.VBox.__init__(self)
self.m_model = model
self.set_size_request(200, 200)
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup("<b>Title:</b>")
self.pack_start(l, False)
self.g_title = gtk.Label()
self.g_title.set_alignment(0.0, 0.5)
self.pack_start(self.g_title, False)
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup("<b>Module:</b>")
self.pack_start(l, False)
self.g_module = gtk.Label()
self.g_module.set_alignment(0.0, 0.5)
self.pack_start(self.g_module, False)
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup("<b>Used in topcis:</b>")
self.pack_start(l, False)
self.g_topic_box = gtk.VBox()
self.pack_start(self.g_topic_box, False)
self.show_all()
def update(self, dlg):
fn = dlg.get_preview_filename()
if fn:
fn = gu.decode_filename(fn)
for child in self.g_topic_box.get_children():
child.destroy()
fn = lessonfile.mk_uri(fn)
try:
self.set_sensitive(True)
self.g_title.set_text(lessonfile.infocache.get(fn, 'title'))
self.g_module.set_text(lessonfile.infocache.get(fn, 'module'))
self.g_ok_button.set_sensitive(True)
for x in self.m_model.iterate_topics_for_file(fn):
l = gtk.Label(x)
l.set_alignment(0.0, 0.5)
self.g_topic_box.pack_start(l, False)
if not self.g_topic_box.get_children():
l = gtk.Label(u"-")
l.set_alignment(0.0, 0.5)
self.g_topic_box.pack_start(l, False)
except (lessonfile.InfoCache.FileNotFound,
lessonfile.InfoCache.FileNotLessonfile), e:
self.g_title.set_text(u'')
self.g_module.set_text(u'')
self.g_ok_button.set_sensitive(False)
self.set_sensitive(False)
self.show_all()
return True
class SelectLessonFileDialog(gtk.FileChooserDialog):
def __init__(self, parent):
gtk.FileChooserDialog.__init__(self, _("Select lesson file"),
parent=parent,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,))
self.set_select_multiple(True)
pv = LessonFilePreviewWidget(parent.m_model)
pv.g_ok_button = self.add_button("gtk-ok", gtk.RESPONSE_OK)
pv.g_ok_button.set_sensitive(False)
pv.show()
self.set_preview_widget(pv)
self.connect('selection-changed', pv.update)
class SelectLessonfileBySearchDialog(gtk.Dialog):
def __init__(self):
gtk.Dialog.__init__(self, buttons=(gtk.STOCK_CLOSE, gtk.RESPONSE_ACCEPT))
view = SearchView(_('Search for exercises. Each exercise you click will be added to the section of the front page.'),
fields=['link-with-filename-tooltip', 'module'])
view.on_link_clicked = self.on_link_clicked
self.vbox.pack_start(view)
self.show_all()
def on_link_clicked(self, widget, filename):
self.m_filename = filename
self.response(gtk.RESPONSE_OK)
def editor_of(obj):
"""
Return the toplevel page, the one that is a Editor object.
"""
p = obj
while not isinstance(p, Editor):
p = p.m_parent
return p
def parent_page(obj):
"""
Return the parent page of obj. Return None if this is the toplevel page.
"""
p = obj
while True:
try:
p = p.m_parent
except AttributeError:
return None
if isinstance(p, Page):
return p
if p is None:
return None
class Section(gtk.VBox):
"""
A section consists of a heading and a list of links.
self.g_link_box is a vbox that contains the links.
"""
def __init__(self, model, parent):
gtk.VBox.__init__(self)
self.m_model = model
self.m_parent = parent
assert isinstance(model, pd.LinkList)
hbox = gtk.HBox()
hbox.set_spacing(6)
self.pack_start(hbox, False)
# This is displayed and used when we edit the heading
self.g_heading_entry = gtk.Entry()
self.g_heading_entry.set_no_show_all(True)
hbox.pack_start(self.g_heading_entry)
self.g_heading = gtk.Label()
self.g_heading.set_alignment(0.0, 0.5)
# FIXME escape m_name
self.g_heading.set_markup("<b>%s</b>" % model.m_name)
hbox.pack_start(self.g_heading, False)
#
button_hbox = gtk.HBox()
button_hbox.set_spacing(0)
hbox.pack_start(button_hbox, False)
im = gtk.Image()
im.set_from_stock(gtk.STOCK_EDIT, gtk.ICON_SIZE_MENU)
button = gtk.Button()
button.add(im)
button.connect('clicked', self.on_edit_heading)
button_hbox.pack_start(button, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_ADD, gtk.ICON_SIZE_MENU)
button = gtk.Button()
button.add(im)
button.connect('button-release-event', self.on_add)
button_hbox.pack_start(button, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_REMOVE, gtk.ICON_SIZE_MENU)
button = gtk.Button()
button.add(im)
button.connect('button-release-event', self.on_remove)
button_hbox.pack_start(button, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_CUT, gtk.ICON_SIZE_MENU)
b = gtk.Button()
b.add(im)
b.connect('clicked', self.on_cut)
button_hbox.pack_start(b, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_PASTE, gtk.ICON_SIZE_MENU)
b = gtk.Button()
b.add(im)
b.connect('clicked', self.on_paste, -1)
Editor.clipboard.register_paste_button(b, (pd.LinkList, pd.Page, unicode))
button_hbox.pack_start(b, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_GO_DOWN, gtk.ICON_SIZE_MENU)
self.g_move_down_btn = gtk.Button()
self.g_move_down_btn.add(im)
self.g_move_down_btn.connect('clicked',
self.m_parent.move_section_down, self)
button_hbox.pack_start(self.g_move_down_btn, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_GO_UP, gtk.ICON_SIZE_MENU)
self.g_move_up_btn = gtk.Button()
self.g_move_up_btn.add(im)
self.g_move_up_btn.connect('clicked',
self.m_parent.move_section_up, self)
button_hbox.pack_start(self.g_move_up_btn, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_GO_BACK, gtk.ICON_SIZE_MENU)
self.g_move_left_btn = gtk.Button()
self.g_move_left_btn.add(im)
self.g_move_left_btn.connect('clicked',
parent.m_parent.on_move_section_left, self)
button_hbox.pack_start(self.g_move_left_btn, False)
#
im = gtk.Image()
im.set_from_stock(gtk.STOCK_GO_FORWARD, gtk.ICON_SIZE_MENU)
self.g_move_right_btn = gtk.Button()
self.g_move_right_btn.add(im)
self.g_move_right_btn.connect('clicked',
parent.m_parent.on_move_section_right, self)
button_hbox.pack_start(self.g_move_right_btn, False)
#
self.g_link_box = gtk.VBox()
self.pack_start(self.g_link_box, False)
for link in self.m_model:
self.g_link_box.pack_start(self.create_linkrow(link))
# The button to click to add a new link
hbox = gtk.HBox()
self.pack_start(hbox)
def on_edit_heading(self, btn):
self.g_heading_entry.set_text(self.m_model.m_name)
self.g_heading_entry.show()
self.g_heading.hide()
self.g_heading_entry.grab_focus()
def finish_edit(entry):
self.g_heading_entry.disconnect(sid)
self.g_heading_entry.disconnect(keyup_id)
self.g_heading_entry.disconnect(keydown_sid)
self.m_model.m_name = entry.get_text()
self.g_heading.set_markup(u"<b>%s</b>" % entry.get_text())
self.g_heading_entry.hide()
self.g_heading.show()
sid = self.g_heading_entry.connect('activate', finish_edit)
def keydown(entry, event):
if event.keyval == gtk.keysyms.Tab:
finish_edit(entry)
keydown_sid = self.g_heading_entry.connect('key-press-event', keydown)
def keyup(entry, event):
if event.keyval == gtk.keysyms.Escape:
self.g_heading_entry.disconnect(sid)
self.g_heading_entry.disconnect(keyup_id)
self.g_heading_entry.hide()
self.g_heading.show()
return True
keyup_id = self.g_heading_entry.connect('key-release-event', keyup)
def on_add(self, btn, event):
menu = gtk.Menu()
item = gtk.MenuItem(_("Add link to new page"))
item.connect('activate', self.on_add_link_to_new_page)
menu.append(item)
item = gtk.MenuItem(_("Add link to exercise"))
item.connect('activate', self.on_add_link)
menu.append(item)
item = gtk.MenuItem(_("Add link by searching for exercises"))
item.connect('activate', self.on_add_link_by_search)
menu.append(item)
menu.show_all()
menu.popup(None, None, None, event.button, event.time)
def on_remove(self, btn, event):
self.m_parent.remove_section(self)
def on_add_link_by_search(self, btn):
dlg = SelectLessonfileBySearchDialog()
while True:
ret = dlg.run()
if ret == gtk.RESPONSE_OK:
self._add_filenames([os.path.abspath(lessonfile.uri_expand(dlg.m_filename))])
else:
break
dlg.destroy()
def on_add_link(self, btn):
if editor_of(self).m_filename:
open_dir = os.path.split(editor_of(self).m_filename)[0]
else:
open_dir = filesystem.user_data()
dlg = SelectLessonFileDialog(editor_of(self))
dlg.set_current_folder(open_dir)
while 1:
ret = dlg.run()
if ret in (gtk.RESPONSE_REJECT, gtk.RESPONSE_DELETE_EVENT, gtk.RESPONSE_CANCEL):
break
else:
assert ret == gtk.RESPONSE_OK
self._add_filenames(dlg.get_filenames())
break
dlg.destroy()
def _add_filenames(self, filenames):
for filename in filenames:
fn = gu.decode_filename(filename)
assert os.path.isabs(fn)
# If the file name is a file in a subdirectory below
# lessonfile.exercises_dir in the current working directory,
# then the file is a standard lesson file, and it will be
# converted to a uri scheme with:
fn = lessonfile.mk_uri(fn)
# Small test to check that the file actually is a lesson file.
try:
lessonfile.infocache.get(fn, 'title')
except lessonfile.infocache.FileNotLessonfile:
continue
self.m_model.append(fn)
self.g_link_box.pack_start(self.create_linkrow(fn), False)
def on_add_link_to_new_page(self, menuitem):
page = pd.Page(_("Untitled%s") % "", [pd.Column()])
self.m_model.append(page)
self.g_link_box.pack_start(self.create_linkrow(page))
def create_linkrow(self, link_this):
hbox = gtk.HBox()
def ff(btn, page):
if id(page) in editor_of(self).m_page_mapping:
editor_of(self).show_page_id(id(page))
else:
if not page[0]:
page[0].append(pd.LinkList(link_this.m_name))
p = Page(page, parent_page(self))
p.show()
editor_of(self).add_page(p)
if isinstance(link_this, pd.Page):
linkbutton = gu.ClickableLabel(link_this.m_name)
linkbutton.connect('clicked', ff, link_this)
else:
try:
linkbutton = gu.ClickableLabel(lessonfile.infocache.get(link_this, 'title'))
linkbutton.set_tooltip_text(link_this)
except lessonfile.InfoCache.FileNotFound:
linkbutton = gu.ClickableLabel(_(u"«%s» was not found") % link_this)
linkbutton.make_warning()
hbox.pack_start(linkbutton)
linkbutton.connect('button-press-event', self.on_right_click_row, link_this)
hbox.show_all()
return hbox
def on_right_click_row(self, button, event, linked):
idx = self.m_model.index(linked)
if event.button == 3:
m = gtk.Menu()
item = gtk.ImageMenuItem(gtk.STOCK_DELETE)
item.connect('activate', self.on_delete_link, linked)
m.append(item)
item = gtk.ImageMenuItem(gtk.STOCK_CUT)
item.connect('activate', self.on_cut_link, idx)
m.append(item)
item = gtk.ImageMenuItem(gtk.STOCK_PASTE)
item.set_sensitive(bool(Editor.clipboard))
item.connect('activate', self.on_paste, idx)
m.append(item)
item = gtk.ImageMenuItem(gtk.STOCK_EDIT)
item.connect('activate', self.on_edit_linktext, linked)
item.set_sensitive(bool(not isinstance(linked, basestring)))
m.append(item)
item = gtk.ImageMenuItem(gtk.STOCK_GO_UP)
item.connect('activate', self.on_move_link_up, idx)
item.set_sensitive(bool(idx > 0))
m.append(item)
item = gtk.ImageMenuItem(gtk.STOCK_GO_DOWN)
item.connect('activate', self.on_move_link_down, idx)
item.set_sensitive(bool(idx < len(self.m_model) - 1))
m.append(item)
item = gtk.ImageMenuItem(gtk.STOCK_EDIT)
item.set_sensitive(isinstance(linked, unicode))
item.connect('activate', self.on_edit_file, idx)
m.append(item)
m.show_all()
m.popup(None, None, None, event.button, event.time)
return True
def on_delete_link(self, menuitem, linked):
idx = self.m_model.index(linked)
if id(linked) in editor_of(self).m_page_mapping:
editor_of(self).destroy_window(id(linked))
self.g_link_box.get_children()[idx].destroy()
del self.m_model[idx]
def on_edit_linktext(self, menuitem, linked):
idx = self.m_model.index(linked)
# row is the hbox containing the linkbutton
row = self.g_link_box.get_children()[idx]
linkbutton = row.get_children()[0]
entry = gtk.Entry()
entry.set_text(linkbutton.get_label())
row.pack_start(entry)
linkbutton.hide()
entry.show()
entry.grab_focus()
def finish_edit(entry):
linkbutton.set_label(entry.get_text().decode("utf-8"))
linkbutton.get_children()[0].set_alignment(0.0, 0.5)
linkbutton.show()
self.m_model[idx].m_name = entry.get_text().decode("utf-8")
entry.destroy()
sid = entry.connect('activate', finish_edit)
def keydown(entry, event):
if event.keyval == gtk.keysyms.Tab:
finish_edit(entry)
entry.connect('key-press-event', keydown)
def keyup(entry, event):
if event.keyval == gtk.keysyms.Escape:
linkbutton.show()
entry.disconnect(sid)
entry.destroy()
return True
entry.connect('key-release-event', keyup)
def on_edit_file(self, menuitem, linked):
try:
try:
subprocess.call((cfg.get_string("programs/text-editor"),
lessonfile.uri_expand(self.m_model[linked])))
except OSError, e:
raise osutils.BinaryForProgramException("Text editor", cfg.get_string("programs/text-editor"), e)
except osutils.BinaryForProgramException, e:
solfege.win.display_error_message2(e.msg1, e.msg2)
def on_cut(self, btn):
self.m_parent.cut_section(self)
def on_cut_link(self, menuitem, idx):
Editor.clipboard.append(self.m_model[idx])
del self.m_model[idx]
self.g_link_box.get_children()[idx].destroy()
def on_paste(self, btn, idx):
assert Editor.clipboard, "Paste buttons should be insensitive when the clipboard is empty."
pobj = Editor.clipboard.pop()
if isinstance(pobj, pd.LinkList):
mobj = pd.Page(pobj.m_name, [pd.Column(pobj)])
else:
mobj = pobj
if idx == -1:
self.m_model.append(mobj)
self.g_link_box.pack_start(self.create_linkrow(mobj))
else:
self.m_model.insert(idx, mobj)
row = self.create_linkrow(mobj)
self.g_link_box.pack_start(row)
self.g_link_box.reorder_child(row, idx)
def on_move_link_up(self, btn, idx):
"""
Move the link one row up.
"""
assert idx > 0
self.m_model[idx], self.m_model[idx - 1] = self.m_model[idx - 1], self.m_model[idx]
self.g_link_box.reorder_child(self.g_link_box.get_children()[idx], idx - 1)
def on_move_link_down(self, btn, idx=None):
"""
Move the link one row down.
"""
self.m_model[idx], self.m_model[idx + 1] = self.m_model[idx + 1], self.m_model[idx]
self.g_link_box.reorder_child(self.g_link_box.get_children()[idx], idx + 1)
class Column(gtk.VBox):
def __init__(self, model, parent):
gtk.VBox.__init__(self)
self.set_spacing(gu.hig.SPACE_MEDIUM)
self.m_model = model
self.m_parent = parent
assert isinstance(model, pd.Column)
self.g_section_box = gtk.VBox()
self.g_section_box.set_spacing(gu.hig.SPACE_MEDIUM)
self.pack_start(self.g_section_box, False)
for section in model:
assert isinstance(section, pd.LinkList)
gui_section = Section(section, self)
self.g_section_box.pack_start(gui_section, False)
hbox = gtk.HBox()
self.pack_start(hbox, False)
b = gtk.Button(_("Add section"))
hbox.pack_start(b, False)
b.connect('clicked', self.on_add_section)
b = gtk.Button(stock=gtk.STOCK_PASTE)
b.connect('clicked', self.on_paste)
Editor.clipboard.register_paste_button(b, pd.LinkList)
hbox.pack_start(b, False)
def __del__(self):
logging.debug("Column.__del__")
def cut_section(self, section):
idx = self.g_section_box.get_children().index(section)
Editor.clipboard.append(self.m_model[idx])
del self.m_model[idx]
self.g_section_box.get_children()[idx].destroy()
def remove_section(self, section):
idx = self.g_section_box.get_children().index(section)
del self.m_model[idx]
self.g_section_box.get_children()[idx].destroy()
def on_add_section(self, btn):
# We write "Untitled%s" % "" instead of just "Untitled" here
# since "Untitled%s" is already translated in many languages.
section = pd.LinkList(_("Untitled%s" % ""))
self.m_model.append(section)
gui_section = Section(section, self)
self.g_section_box.pack_start(gui_section, False)
gui_section.show_all()
def move_section_down(self, widget, section):
idx = self.g_section_box.get_children().index(section)
if idx < len(self.g_section_box.get_children()) - 1:
self.g_section_box.reorder_child(section, idx + 1)
self.m_model[idx], self.m_model[idx + 1] \
= self.m_model[idx + 1], self.m_model[idx]
self.m_parent.update_buttons()
def move_section_up(self, widget, section):
idx = self.g_section_box.get_children().index(section)
if idx > 0:
self.g_section_box.reorder_child(section, idx - 1)
self.m_model[idx], self.m_model[idx - 1] \
= self.m_model[idx - 1], self.m_model[idx]
self.m_parent.update_buttons()
def on_paste(self, widget):
"""
Paste the clipboard as a new section to this column.
"""
assert Editor.clipboard, "Paste buttons should be insensitive when the clipboard is empty."
assert isinstance(Editor.clipboard[-1], pd.LinkList)
pobj = Editor.clipboard.pop()
self.m_model.append(pobj)
sect = Section(pobj, self)
sect.show_all()
self.g_section_box.pack_start(sect, False)
class Page(gtk.VBox):
def __init__(self, model, parent):
gtk.VBox.__init__(self)
self.m_model = model
self.m_parent = parent
sc = gtk.ScrolledWindow()
sc.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.pack_start(sc)
self.g_column_box = gtk.HBox()
self.g_column_box.set_spacing(gu.hig.SPACE_LARGE)
self.g_column_box.set_border_width(gu.hig.SPACE_SMALL)
# We pack column into this box
sc.add_with_viewport(self.g_column_box)
self.show_all()
if model:
self.update_from_model()
def __del__(self):
logging.debug("Page.__del__:", self.m_model.m_name)
def on_add_column(self, *btn):
column = pd.Column()
self.m_model.append(column)
gcol = Column(column, self)
gcol.show_all()
self.g_column_box.pack_start(gcol)
def on_move_section_left(self, button, section):
column_idx = self.g_column_box.get_children().index(section.m_parent)
section_idx = section.m_parent.g_section_box.get_children().index(section)
if column_idx > 0:
to_column = self.g_column_box.get_children()[column_idx - 1]
section.reparent(to_column.g_section_box)
section.m_parent = to_column
to_column.g_section_box.set_child_packing(section, False, False, 0, gtk.PACK_START)
self.m_model[column_idx - 1].append(self.m_model[column_idx][section_idx])
del self.m_model[column_idx][section_idx]
# Remove the right-most column if we moved the
# last section out of it.
if not self.g_column_box.get_children()[-1].g_section_box.get_children():
assert len(self.m_model[-1]) == 0
del self.m_model[-1]
self.g_column_box.get_children()[-1].destroy()
self.update_buttons()
def on_move_section_right(self, button, section):
# the column we move from
column_idx = self.g_column_box.get_children().index(section.m_parent)
section_idx = section.m_parent.g_section_box.get_children().index(section)
if column_idx == len(self.g_column_box.get_children()) - 1:
self.on_add_column()
to_column = self.g_column_box.get_children()[column_idx + 1]
section.reparent(to_column.g_section_box)
section.m_parent = to_column
to_column.g_section_box.set_child_packing(section, False, False, 0, gtk.PACK_START)
to_section_idx = len(self.m_model[column_idx + 1])
self.m_model[column_idx + 1].append(self.m_model[column_idx][section_idx])
del self.m_model[column_idx][section_idx]
self.update_buttons()
def update_from_model(self):
for child in self.g_column_box.get_children():
child.destroy()
for column in self.m_model:
self.g_column_box.pack_start(Column(column, self))
self.g_column_box.show_all()
self.update_buttons()
def update_buttons(self):
num_cols = len(self.g_column_box.get_children())
for col_idx, column in enumerate(self.g_column_box.get_children()):
num_sects = len(column.g_section_box.get_children())
for sect_idx, section in enumerate(column.g_section_box.get_children()):
section.g_move_up_btn.set_sensitive(sect_idx != 0)
section.g_move_down_btn.set_sensitive(sect_idx != num_sects -1)
section.g_move_left_btn.set_sensitive(col_idx != 0)
if [col for col in self.g_column_box.get_children() if not col.g_section_box.get_children()] and col_idx == num_cols - 1:
section.g_move_right_btn.set_sensitive(False)
else:
section.g_move_right_btn.set_sensitive(True)
class Clipboard(list):
def __init__(self, v=[]):
list.__init__(v)
self.m_paste_buttons = []
def pop(self, i=-1):
ret = list.pop(self, i)
self.update_buttons()
return ret
def append(self, obj):
list.append(self, obj)
self.update_buttons()
def register_paste_button(self, button, accepts_types):
button.set_sensitive(bool(self) and isinstance(self[-1], accepts_types))
self.m_paste_buttons.append((button, accepts_types))
def update_buttons(self):
for button, types in self.m_paste_buttons:
button.set_sensitive(bool(self) and isinstance(self[-1], types))
class Editor(gtk.Window, gu.EditorDialogBase):
savedir = os.path.join(filesystem.user_data(), u'exercises', u'user')
# The clipboard will be shared between all Editor instances
clipboard = Clipboard()
def __init__(self, filename=None):
gtk.Window.__init__(self)
logging.debug("fpeditor.Editor.__init__(%s)", filename)
gu.EditorDialogBase.__init__(self, filename)
self.set_default_size(800, 600)
self.g_main_box = gtk.VBox()
self.add(self.g_main_box)
self.g_actiongroup.add_actions([
('GoBack', gtk.STOCK_GO_BACK, None, None, None, self.go_back),
])
self.setup_toolbar()
self.g_title_hbox = gtk.HBox()
self.g_title_hbox.set_spacing(gu.hig.SPACE_SMALL)
self.g_title_hbox.set_border_width(gu.hig.SPACE_SMALL)
label = gtk.Label()
label.set_markup(u"<b>%s</b>" % _("Front page title:"))
self.g_title_hbox.pack_start(label, False)
self.g_fptitle = gtk.Entry()
self.g_title_hbox.pack_start(self.g_fptitle)
self.g_main_box.pack_start(self.g_title_hbox, False)
# This dict maps the windows created for all pages belonging to
# the file.
self.m_page_mapping = {}
self.m_model = None
if filename:
self.load_file(filename)
else:
self.m_model = pd.Page(_("Untitled%s") % self.m_instance_number,
pd.Column())
self.set_not_modified()
self.add_page(Page(self.m_model, self))
self.clipboard.update_buttons()
self.show_all()
self.add_to_instance_dict()
self.g_fptitle.set_text(self.m_model.m_name)
self.g_fptitle.connect('changed', self.on_frontpage_title_changed)
def __del__(self):
logging.debug("fpeditor.Editor.__del__, filename=%s", self.m_filename)
def add_page(self, page):
"""
Add and show the page.
"""
editor_of(self).m_page_mapping[id(page.m_model)] = page
self.g_main_box.pack_start(page)
self.show_page(page)
def show_page_id(self, page_id):
self.show_page(self.m_page_mapping[page_id])
def show_page(self, page):
"""
Hide the currently visible page, and show PAGE instead.
"""
try:
self.g_visible_page.hide()
except AttributeError:
pass
self.g_visible_page = page
page.show()
if isinstance(page.m_parent, Page):
self.g_title_hbox.hide()
else:
self.g_title_hbox.show()
self.g_ui_manager.get_widget("/Toolbar/GoBack").set_sensitive(
not isinstance(self.g_visible_page.m_parent, Editor))
def go_back(self, *action):
self.show_page(self.g_visible_page.m_parent)
def on_frontpage_title_changed(self, widget):
self.m_model.m_name = widget.get_text()
def setup_toolbar(self):
self.g_ui_manager.insert_action_group(self.g_actiongroup, 0)
uixml = """
<ui>
<toolbar name='Toolbar'>
<toolitem action='GoBack'/>
<toolitem action='New'/>
<toolitem action='Open'/>
<toolitem action='Save'/>
<toolitem action='SaveAs'/>
<toolitem action='Close'/>
<toolitem action='Help'/>
</toolbar>
<accelerator action='Close'/>
<accelerator action='New'/>
<accelerator action='Open'/>
<accelerator action='Save'/>
</ui>
"""
self.g_ui_manager.add_ui_from_string(uixml)
toolbar = self.g_ui_manager.get_widget("/Toolbar")
self.g_main_box.pack_start(toolbar, False)
self.g_main_box.reorder_child(toolbar, 0)
self.g_ui_manager.get_widget("/Toolbar").set_style(gtk.TOOLBAR_BOTH)
def destroy_window(self, window_id):
"""
Destroy the window with the id 'windowid' and all subwindows.
"""
def do_del(wid):
for key in self.m_page_mapping:
parent = parent_page(self.m_page_mapping[key])
if id(parent) == wid:
do_del(key)
editor_of(self).m_page_mapping[wid].destroy()
del editor_of(self).m_page_mapping[wid]
do_del(window_id)
@staticmethod
def edit_file(fn):
if fn in Editor.instance_dict:
Editor.instance_dict[fn].present()
else:
try:
win = Editor(fn)
win.show()
except IOError, e:
gu.dialog_ok(_("Loading file '%(filename)s' failed: %(msg)s") %
{'filename': fn, 'msg': str(e).decode('utf8', 'replace')})
def load_file(self, filename):
"""
Load a file into a empty, newly created Editor object.
"""
assert self.m_model == None
self.m_model = pd.load_tree(filename, C_locale=True)
self.m_filename = filename
#
if not os.path.isabs(filename):
if not os.access(filename, os.W_OK):
m = gtk.MessageDialog(self, gtk.DIALOG_MODAL, gtk.MESSAGE_INFO,
gtk.BUTTONS_CLOSE, _("The front page file is write protected in your install. This is normal. If you want to edit a front page file, you have to select one of the files stored in .solfege/exercises/*/ in your home directory."))
m.run()
m.destroy()
self.set_not_modified()
self.set_title(self.m_filename)
def set_not_modified(self):
"""
Store the current state of the data in self.m_orig_dump so that
is_modified() will return False until we make new changes.
"""
io = StringIO.StringIO()
self.m_model.dump(io)
self.m_orig_dump = io.getvalue()
def is_modified(self):
"""
Return True if the data has changed since the last call to
set_not_modified()
"""
io = StringIO.StringIO()
self.m_model.dump(io)
s = io.getvalue()
return s != self.m_orig_dump
@property
def m_changed(self):
return self.is_modified()
def save(self, w=None):
assert self.m_filename
save_location = os.path.split(self.m_filename)[0] + os.sep
fh = pd.FileHeader(1, self.m_model)
fh.save_file(self.m_filename)
self.set_not_modified()
# We do test for solfege.win since it is not available during testing
if hasattr(solfege, 'win'):
solfege.win.load_frontpage()
def on_show_help(self, *w):
return
def get_save_as_dialog(self):
dialog = gu.EditorDialogBase.get_save_as_dialog(self)
ev2 = gtk.EventBox()
ev2.set_name("DIALOGWARNING2")
ev = gtk.EventBox()
ev.set_border_width(gu.hig.SPACE_SMALL)
ev2.add(ev)
ev.set_name("DIALOGWARNING")
label = gtk.Label()
label.set_padding(gu.hig.SPACE_MEDIUM, gu.hig.SPACE_MEDIUM)
ev.add(label)
label.set_markup(_("<b>IMPORTANT:</b> Your front page file <b>must</b> be saved in a subdirectory below the directory named exercises. See the user manual for details."))
dialog.set_extra_widget(ev2)
ev2.show_all()
return dialog
if __name__ == '__main__':
gtk.link_button_set_uri_hook(lambda a, b: None)
e = Editor()
e.load_file("learningtrees/learningtree.txt")
gtk.main()
|
allancarlos123/Solfege
|
solfege/fpeditor.py
|
Python
|
gpl-3.0
| 34,015 | 0.003381 |
#!/usr/bin/env python
"""Generate AMBA AHB specifications for given number of masters.
Translated and adapted from Perl original distributed with Anzu.
https://www.iaik.tugraz.at/content/research/opensource/anzu/#download
"""
import argparse
import math
from omega.logic.syntax import conj
def build_state_str(state_name, op, num_states, value,
padd_value='0', add_next=''):
result = ''
binary = bin(value).lstrip('-0b').zfill(1)[::-1]
for j in xrange(num_states):
if result != '':
result += op
bin_val = padd_value
if j < len(binary):
bin_val = binary[j]
result += '{add_next}({state_name}{j} = {bin_val})'.format(
add_next=add_next, state_name=state_name, j=j, bin_val=bin_val)
return result
def build_hmaster_str(master_bits, value):
return build_state_str('hmaster', ' & ', master_bits, value)
def generate_spec(num_masters, use_ba):
# init
master_bits = int(math.ceil(math.log(num_masters) / math.log(2.0)))
master_bits_plus_one = math.ceil(math.log(num_masters + 1) / math.log(2))
assert master_bits > 0, master_bits
assert master_bits_plus_one > 0, master_bits_plus_one
env_initial = list()
sys_initial = ''
env_transitions = ''
sys_transitions = list()
env_fairness = ''
sys_fairness = ''
input_vars = list()
output_vars = list()
###############################################
# ENV_INITIAL and INPUT_VARIABLES
###############################################
env_initial.append('hready = 0')
input_vars += ['hready', 'hburst0', 'hburst1']
for i in xrange(num_masters):
s = 'hbusreq{i} = 0'.format(i=i)
env_initial.append(s)
s = 'hlock{i} = 0'.format(i=i)
env_initial.append(s)
s = 'hbusreq{i}'.format(i=i)
input_vars.append(s)
s = 'hlock{i}'.format(i=i)
input_vars.append(s)
env_initial.append('hburst0 = 0')
env_initial.append('hburst1 = 0')
###############################################
# ENV_TRANSITION
###############################################
for i in xrange(num_masters):
# env_transitions += "#Assumption 3:\n"
env_transitions += "[]( hlock{i} = 1 -> hbusreq{i} = 1 ) & \n".format(i=i)
###############################################
# ENV_FAIRNESS
###############################################
env_fairness += (
# "# Assumption 1: \n"
"[](<>(stateA1_1 = 0)) & \n"
# "\n# Assumption 2:\n"
"[](<>(hready = 1))\n")
###############################################
# SYS_INITIAL + OUTPUT_VARIABLES
###############################################
for i in xrange(master_bits):
sys_initial += 'hmaster{i} = 0 & \n'.format(i=i)
output_vars.append('hmaster{i}'.format(i=i))
output_vars += [
"hmastlock", "start", "locked", "decide", 'hgrant0',
"busreq", "stateA1_0", "stateA1_1", "stateG2",
"stateG3_0", "stateG3_1", "stateG3_2"]
c = [
"hmastlock = 0",
"start = 1",
"decide = 1",
"locked = 0",
"hgrant0 = 1"]
sys_initial += '&\n'.join(c) + '&\n'
for i in xrange(1, num_masters):
sys_initial += "hgrant{i} = 0 & \n".format(i=i)
var = 'hgrant{i}'.format(i=i)
output_vars.append(var)
# busreq = hbusreq[hmaster]
sys_initial += (
"busreq=0 & \n"
# Assumption 1:
"stateA1_0 = 0 & \n"
"stateA1_1 = 0 & \n"
# Guarantee 2:
"stateG2 = 0 & \n"
# Guarantee 3:
"stateG3_0 = 0 & \n"
"stateG3_1 = 0 & \n"
"stateG3_2 = 0 & \n")
# Guarantee 10:
for i in xrange(1, num_masters):
sys_initial += "stateG10_{i} = 0 & \n".format(i=i)
var = 'stateG10_{i}'.format(i=i)
output_vars.append(var)
###############################################
# SYS_TRANSITION
###############################################
# busreq = hbusreq[hmaster]
for i in xrange(num_masters):
hmaster = build_hmaster_str(master_bits, i)
hmaster_X = build_state_str("hmaster", " & ", master_bits, i, 0, 'X')
sys_transitions.append((
"[]({hmaster} -> (hbusreq{i} = 0 <-> busreq=0))").format(
i=i, hmaster=hmaster))
# Assumption 1:
# state 00
sys_transitions.append(
# "# Assumption 1:\n"
"[](((stateA1_1 = 0) & (stateA1_0 = 0) & "
"((hmastlock = 0) | (hburst0 = 1) | (hburst1 = 1))) ->\n"
" X((stateA1_1 = 0) & (stateA1_0 = 0))) & \n"
"[](((stateA1_1 = 0) & (stateA1_0 = 0) & "
" (hmastlock = 1) & (hburst0 = 0) & (hburst1 = 0)) ->\n"
" X((stateA1_1 = 1) & (stateA1_0 = 0))) & \n"
# state 10
"[](((stateA1_1 = 1) & (stateA1_0 = 0) & (busreq = 1)) ->\n"
" X((stateA1_1 = 1) & (stateA1_0 = 0))) & \n"
"[](((stateA1_1 = 1) & (stateA1_0 = 0) & (busreq = 0) & "
"((hmastlock = 0) | (hburst0 = 1) | (hburst1 = 1))) ->\n"
" X((stateA1_1 = 0) & (stateA1_0 = 0))) & \n"
"[](((stateA1_1 = 1) & (stateA1_0 = 0) & (busreq = 0) & "
" (hmastlock = 1) & (hburst0 = 0) & (hburst1 = 0)) ->\n"
" X((stateA1_1 = 0) & (stateA1_0 = 1))) & \n"
# state 01
"[](((stateA1_1 = 0) & (stateA1_0 = 1) & (busreq = 1)) ->\n"
" X((stateA1_1 = 1) & (stateA1_0 = 0))) & \n"
"[](((stateA1_1 = 0) & (stateA1_0 = 1) & "
" (hmastlock = 1) & (hburst0 = 0) & (hburst1 = 0)) ->\n"
" X((stateA1_1 = 1) & (stateA1_0 = 0))) & \n"
"[](((stateA1_1 = 0) & (stateA1_0 = 1) & (busreq = 0) & "
"((hmastlock = 0) | (hburst0 = 1) | (hburst1 = 1))) ->\n"
" X((stateA1_1 = 0) & (stateA1_0 = 0))) & \n"
# Guarantee 1:
# sys_transitions += "\n# Guarantee 1:\n"
"[]((hready = 0) -> X(start = 0)) & \n"
# Guarantee 2:
# sys_transitions += "\n# Guarantee 2:\n"
"[](((stateG2 = 0) & "
"((hmastlock = 0) | (start = 0) | "
"(hburst0 = 1) | (hburst1 = 1))) -> "
"X(stateG2 = 0)) & \n"
"[](((stateG2 = 0) & "
" (hmastlock = 1) & (start = 1) & "
"(hburst0 = 0) & (hburst1 = 0)) -> "
"X(stateG2 = 1)) & \n"
"[](((stateG2 = 1) & (start = 0) & (busreq = 1)) -> "
"X(stateG2 = 1)) & \n"
"[](((stateG2 = 1) & (start = 1)) -> false) & \n"
"[](((stateG2 = 1) & (start = 0) & (busreq = 0)) -> "
"X(stateG2 = 0)) & \n"
# Guarantee 3:
# sys_transitions += "\n# Guarantee 3:\n"
'[](((stateG3_0 = 0) & (stateG3_1 = 0) & (stateG3_2 = 0) & \n'
' ((hmastlock = 0) | (start = 0) | ((hburst0 = 1) | (hburst1 = 0)))) ->\n'
' (X(stateG3_0 = 0) & X(stateG3_1 = 0) & X(stateG3_2 = 0))) &\n'
'[](((stateG3_0 = 0) & (stateG3_1 = 0) & (stateG3_2 = 0) & \n'
' ((hmastlock = 1) & (start = 1) & '
'((hburst0 = 0) & (hburst1 = 1)) & (hready = 0))) -> \n'
' (X(stateG3_0 = 1) & X(stateG3_1 = 0) & X(stateG3_2 = 0))) &\n'
'[](((stateG3_0 = 0) & (stateG3_1 = 0) & (stateG3_2 = 0) & \n'
' ((hmastlock = 1) & (start = 1) & '
'((hburst0 = 0) & (hburst1 = 1)) & (hready = 1))) -> \n'
' (X(stateG3_0 = 0) & X(stateG3_1 = 1) & X(stateG3_2 = 0))) &\n'
' \n'
'[](((stateG3_0 = 1) & (stateG3_1 = 0) & '
'(stateG3_2 = 0) & ((start = 0) & (hready = 0))) -> \n'
' (X(stateG3_0 = 1) & X(stateG3_1 = 0) & X(stateG3_2 = 0))) &\n'
'[](((stateG3_0 = 1) & (stateG3_1 = 0) & '
'(stateG3_2 = 0) & ((start = 0) & (hready = 1))) -> \n'
' (X(stateG3_0 = 0) & X(stateG3_1 = 1) & X(stateG3_2 = 0))) &\n'
'\n'
'[](((stateG3_0 = 1) & (stateG3_1 = 0) & '
'(stateG3_2 = 0) & ((start = 1))) -> false) &\n'
'\n'
' \n'
'[](((stateG3_0 = 0) & (stateG3_1 = 1) & '
'(stateG3_2 = 0) & ((start = 0) & (hready = 0))) -> \n'
' (X(stateG3_0 = 0) & X(stateG3_1 = 1) & X(stateG3_2 = 0))) &\n'
'[](((stateG3_0 = 0) & (stateG3_1 = 1) & '
'(stateG3_2 = 0) & ((start = 0) & (hready = 1))) -> \n'
' (X(stateG3_0 = 1) & X(stateG3_1 = 1) & X(stateG3_2 = 0))) &\n'
'[](((stateG3_0 = 0) & (stateG3_1 = 1) & '
'(stateG3_2 = 0) & ((start = 1))) -> false) &\n'
' \n'
'[](((stateG3_0 = 1) & (stateG3_1 = 1) & '
'(stateG3_2 = 0) & ((start = 0) & (hready = 0))) -> \n'
' (X(stateG3_0 = 1) & X(stateG3_1 = 1) & X(stateG3_2 = 0))) &\n'
'[](((stateG3_0 = 1) & (stateG3_1 = 1) & '
'(stateG3_2 = 0) & ((start = 0) & (hready = 1))) -> \n'
' (X(stateG3_0 = 0) & X(stateG3_1 = 0) & X(stateG3_2 = 1))) &\n'
'[](((stateG3_0 = 1) & (stateG3_1 = 1) & '
'(stateG3_2 = 0) & ((start = 1))) -> false) &\n'
' \n'
'[](((stateG3_0 = 0) & (stateG3_1 = 0) & '
'(stateG3_2 = 1) & ((start = 0) & (hready = 0))) -> \n'
' (X(stateG3_0 = 0) & X(stateG3_1 = 0) & X(stateG3_2 = 1))) &\n'
'[](((stateG3_0 = 0) & (stateG3_1 = 0) & '
'(stateG3_2 = 1) & ((start = 0) & (hready = 1))) -> \n'
' (X(stateG3_0 = 0) & X(stateG3_1 = 0) & X(stateG3_2 = 0))) & \n'
'\n'
'[](((stateG3_0 = 0) & (stateG3_1 = 0) & '
'(stateG3_2 = 1) & ((start = 1))) -> false)')
# Guarantee 4 and 5:
# sys_transitions += "\n # Guarantee 4 and 5:\n"
for i in xrange(num_masters):
hmaster_X = build_state_str("hmaster", " & ", master_bits, i, 0, 'X')
# '# Master {i}:\n'.format(i=i)
s = "[]((hready = 1) -> ((hgrant{i} = 1) <-> ({hmaster_X})))".format(
i=i, hmaster_X=hmaster_X)
sys_transitions.append(s)
sys_transitions.append(
# "# HMASTLOCK:\n"
"[]((hready = 1) -> (locked = 0 <-> X(hmastlock = 0)))")
# Guarantee 6.1:
# FIXME: It would be sufficient to have one formula for each bit of hmaster
# sys_transitions += "\n# Guarantee 6.1:\n"
for i in xrange(num_masters):
hmaster = build_hmaster_str(master_bits, i)
hmaster_X = build_state_str("hmaster", " & ", master_bits, i, 0, 'X')
# sys_transitions += '# Master {i}:\n'.format(i=i)
sys_transitions.append(
"[](X(start = 0) -> ((" + hmaster + ") <-> (" +
hmaster_X + ")))")
# Guarantee 6.2:
sys_transitions.append(
# "\n# Guarantee 6.2:\n"
"[](((X(start = 0))) -> ((hmastlock = 1) <-> X(hmastlock = 1)))")
# Guarantee 7:
# FIXME: formula can be written as
# G((decide=1 & X(hgrant{i}=1))-> (hlock{i}=1 <-> X(locked=1)))
# sys_transitions += "\n# Guarantee 7:\n"
norequest = list()
for i in xrange(num_masters):
s = ('[]((decide = 1 & hlock{i} = 1 & X(hgrant{i} = 1))->'
'X(locked = 1))').format(i=i)
sys_transitions.append(s)
s = ('[]((decide = 1 & hlock{i} = 0 & X(hgrant{i} = 1))->'
'X(locked = 0))').format(i=i)
sys_transitions.append(s)
s = 'hbusreq{i} = 0'.format(i=i)
norequest.append(s)
# Guarantee 8:
# MW: this formula changes with respect to the number of grant signals
# sys_transitions += "\n# Guarantee 8:\n"
tmp_g8 = ''
for i in xrange(num_masters):
sys_transitions.append((
'[]((decide = 0) -> (((hgrant{i} = 0)'
'<-> X(hgrant{i} = 0))))').format(i=i))
sys_transitions.append('[]((decide = 0)->(locked = 0 <-> X(locked = 0)))')
# Guarantee 10:
# sys_transitions += "\n#Guarantee 10:\n"
for i in xrange(1, num_masters):
hmaster = build_hmaster_str(master_bits, i)
# sys_transitions += "# Master " + i + ":\n"
sys_transitions.append((
'[](((stateG10_{i} = 0) & (((hgrant{i} = 1) |'
'(hbusreq{i} = 1)))) -> X(stateG10_{i} = 0)) & \n'
'[](((stateG10_{i} = 0) & ((hgrant{i} = 0) & '
'(hbusreq{i} = 0))) -> X(stateG10_{i} = 1)) & \n'
'[](((stateG10_{i} = 1) & ((hgrant{i} = 0) & '
'(hbusreq{i} = 0)))-> X(stateG10_{i} = 1)) & \n'
'[](((stateG10_{i} = 1) & (((hgrant{i} = 1)) & '
'(hbusreq{i} = 0))) -> false) & \n'
'[](((stateG10_{i} = 1) & (hbusreq{i} = 1)) -> '
'X(stateG10_{i} = 0))').format(i=i))
sys_transitions.append(
# "#default master\n"
'[]((decide=1 & {norequest}) -> X(hgrant0=1))'.format(
norequest=conj(norequest, sep='\n')))
###############################################
# SYS_FAIRNESS
###############################################
# Guarantee 2:
sys_fairness += (
# "\n# Guarantee 2:\n"
"[](<>(stateG2 = 0)) & \n")
# Guarantee 3:
sys_fairness += (
# "\n# Guarantee 3:\n"
"[](<>((stateG3_0 = 0) & (stateG3_1 = 0) & (stateG3_2 = 0))) \n")
# Guarantee 9:
# sys_fairness += "\n# Guarantee 9:\n"
c = list()
for i in xrange(num_masters):
c.append((
"[](<>((" + build_hmaster_str(master_bits, i) +
") | hbusreq{i} = 0))").format(i=i))
fairness = '&\n'.join(c)
template = ('''do
:: {guard};
{nested}
:: else
od;
''')
ba_fairness = (
'assert active proctype fairness(){' +
recurse_fairness(0, num_masters, master_bits, template) +
'}')
if not use_ba:
sys_fairness += ' & ' + fairness
ba_fairness = ''
###############################################
ltl = [
'assume ltl {',
conj(env_initial, sep='\n'),
' & ',
env_transitions,
env_fairness,
'}',
'assert ltl {',
sys_initial,
conj(sys_transitions, sep='\n'),
' & ',
sys_fairness,
'}',
ba_fairness]
c = [
'free env bit ' + ',\n'.join(input_vars) + ';',
'free sys bit ' + ',\n'.join(output_vars) + ';',
'\n'.join(ltl)]
s = '\n'.join(c)
s = s.replace('=', '==')
s = s.replace('&', '&&')
s = s.replace('|', '||')
return s
def recurse_fairness(i, num_masters, master_bits, template):
if i >= num_masters:
return 'progress: skip; break'
guard = (
"((" + build_hmaster_str(master_bits, i) +
") | hbusreq{i} = 0)").format(i=i)
nested = recurse_fairness(i + 1, num_masters, master_bits, template)
s = template.format(guard=guard, nested=nested)
if i > 1:
s += '\nbreak;\n'
return s
def dump_range_of_specs(n, m, use_ba):
name = 'jcss12_{i}_masters'
if use_ba:
name += '_merged'
name += '.txt'
for i in xrange(n, m + 1):
pml = generate_spec(i, use_ba)
fname = name.format(i=i)
with open(fname, 'w') as f:
f.write(pml)
def main():
description = 'Generator of AMBA AHB bus arbiter spec'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--min', type=int, help='min number of masters')
parser.add_argument('--max', type=int, help='max number of masters')
parser.add_argument('--merged', action='store_true', help='use BA')
args = parser.parse_args()
n = args.min
m = args.max
use_ba = args.merged
dump_range_of_specs(n, m, use_ba)
if __name__ == '__main__':
main()
|
johnyf/gr1experiments
|
examples/jcss12/amba_generator.py
|
Python
|
bsd-3-clause
| 15,381 | 0.00013 |
import pyowm
owm = pyowm.OWM('fa7813518ed203b759f116a3bac9bcce')
observation = owm.weather_at_place('London,uk')
w = observation.get_weather()
wtemp = str(w.get_temperature('celsius'))
print(wtemp.strip('{}'))
wtemp_list = list(wtemp)
print(wtemp_list)
|
cmac4603/Home-Utilities-App
|
wx_str_test.py
|
Python
|
gpl-2.0
| 253 | 0.003953 |
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class bridgegroup_nsip6_binding(base_resource) :
""" Binding class showing the nsip6 that can be bound to bridgegroup.
"""
def __init__(self) :
self._ipaddress = ""
self._td = 0
self._netmask = ""
self._rnat = False
self._id = 0
self.___count = 0
@property
def id(self) :
"""The integer that uniquely identifies the bridge group.<br/>Minimum value = 1<br/>Maximum value = 1000.
"""
try :
return self._id
except Exception as e:
raise e
@id.setter
def id(self, id) :
"""The integer that uniquely identifies the bridge group.<br/>Minimum value = 1<br/>Maximum value = 1000
"""
try :
self._id = id
except Exception as e:
raise e
@property
def td(self) :
"""Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0.<br/>Minimum value = 0<br/>Maximum value = 4094.
"""
try :
return self._td
except Exception as e:
raise e
@td.setter
def td(self, td) :
"""Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0.<br/>Minimum value = 0<br/>Maximum value = 4094
"""
try :
self._td = td
except Exception as e:
raise e
@property
def netmask(self) :
"""The network mask for the subnet defined for the bridge group.
"""
try :
return self._netmask
except Exception as e:
raise e
@netmask.setter
def netmask(self, netmask) :
"""The network mask for the subnet defined for the bridge group.
"""
try :
self._netmask = netmask
except Exception as e:
raise e
@property
def ipaddress(self) :
"""The IP address assigned to the bridge group.
"""
try :
return self._ipaddress
except Exception as e:
raise e
@ipaddress.setter
def ipaddress(self, ipaddress) :
"""The IP address assigned to the bridge group.
"""
try :
self._ipaddress = ipaddress
except Exception as e:
raise e
@property
def rnat(self) :
"""Temporary flag used for internal purpose.
"""
try :
return self._rnat
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(bridgegroup_nsip6_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.bridgegroup_nsip6_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.id) :
return str(self.id)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = bridgegroup_nsip6_binding()
updateresource.ipaddress = resource.ipaddress
updateresource.netmask = resource.netmask
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [bridgegroup_nsip6_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].ipaddress = resource[i].ipaddress
updateresources[i].netmask = resource[i].netmask
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = bridgegroup_nsip6_binding()
deleteresource.ipaddress = resource.ipaddress
deleteresource.netmask = resource.netmask
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [bridgegroup_nsip6_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].ipaddress = resource[i].ipaddress
deleteresources[i].netmask = resource[i].netmask
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, id) :
""" Use this API to fetch bridgegroup_nsip6_binding resources.
"""
try :
obj = bridgegroup_nsip6_binding()
obj.id = id
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, id, filter_) :
""" Use this API to fetch filtered set of bridgegroup_nsip6_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = bridgegroup_nsip6_binding()
obj.id = id
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, id) :
""" Use this API to count bridgegroup_nsip6_binding resources configued on NetScaler.
"""
try :
obj = bridgegroup_nsip6_binding()
obj.id = id
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, id, filter_) :
""" Use this API to count the filtered set of bridgegroup_nsip6_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = bridgegroup_nsip6_binding()
obj.id = id
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class bridgegroup_nsip6_binding_response(base_response) :
def __init__(self, length=1) :
self.bridgegroup_nsip6_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.bridgegroup_nsip6_binding = [bridgegroup_nsip6_binding() for _ in range(length)]
|
mahabs/nitro
|
nssrc/com/citrix/netscaler/nitro/resource/config/network/bridgegroup_nsip6_binding.py
|
Python
|
apache-2.0
| 7,494 | 0.037764 |
# ===========================================================================
# eXe config
# Copyright 2004-2006, University of Auckland
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
Config settings loaded from exe.conf
Is responsible for the system-wide settings we use
O/S specific config classes are derieved from here
"""
from exe.engine.configparser import ConfigParser
from exe.engine.path import Path
from exe.engine.locales import chooseDefaultLocale
from exe.engine import version
import logging
from logging.handlers import RotatingFileHandler
import sys
import os
import gettext
import tempfile
import twisted
import shutil
from exe import globals as G
from exe.engine.stylestore import StyleStore
from exe.webui import common
x_ = lambda s: s
class Config(object):
"""
The Config class contains the configuration information for eXe.
"""
# To build link to git revision
baseGitWebURL = 'https://forja.cenatic.es/plugins/scmgit/cgi-bin/gitweb.cgi?p=iteexe/iteexe.git'
# Class attributes
optionNames = {
'system': ('webDir', 'jsDir', 'port', 'dataDir',
'configDir', 'localeDir', 'browser', 'mediaProfilePath',
'videoMediaConverter_ogv', 'videoMediaConverter_3gp',
'videoMediaConverter_mpg',
'videoMediaConverter_avi', 'audioMediaConverter_ogg',
'audioMediaConverter_au', 'audioMediaConverter_mp3',
'audioMediaConverter_wav', 'ffmpegPath'),
'user': ('locale', 'lastDir', 'showPreferencesOnStart','defaultStyle', 'showIdevicesGrouped','docType','editorMode'),
}
idevicesCategories = {
'activity': [x_('Non-Interactive Activities')],
'reading activity': [x_('Non-Interactive Activities')],
'dropdown activity': [x_('Interactive Activities')],
'java applet': [x_('Non-Textual Information')],
'wiki article': [x_('Non-Textual Information')],
'case study': [x_('Non-Interactive Activities')],
'preknowledge': [x_('Textual Information')],
'scorm quiz': [x_('Interactive Activities')],
'fpd - multi choice activity': [x_('FPD')],
'fpd - cloze activity': [x_('FPD')],
'fpd - cloze activity (modified)': [x_('FPD')],
'fpd - multi select activity': [x_('FPD')],
'fpd - true/false activity': [x_('FPD')],
'fpd - situation': [x_('FPD')],
'fpd - quotation': [x_('FPD')],
'fpd - you should know': [x_('FPD')],
'fpd - highlighted': [x_('FPD')],
'fpd - translation': [x_('FPD')],
'fpd - guidelines students': [x_('FPD')],
'fpd - guidelines teacher': [x_('FPD')],
'fpd - a step ahead': [x_('FPD')],
'fpd - a piece of advice': [x_('FPD')],
'fpd - think about it (with feedback)': [x_('FPD')],
'fpd - think about it (without feedback)': [x_('FPD')],
'fpd - free text': [x_('FPD')],
'image gallery': [x_('Non-Textual Information')],
'image magnifier': [x_('Non-Textual Information')],
'note': [x_('Textual Information')],
'objectives': [x_('Textual Information')],
'multi-choice': [x_('Interactive Activities')],
'multi-select': [x_('Interactive Activities')],
'true-false question': [x_('Interactive Activities')],
'reflection': [x_('Non-Interactive Activities')],
'cloze activity': [x_('Interactive Activities')],
'rss': [x_('Non-Textual Information')],
'external web site': [x_('Non-Textual Information')],
'free text': [x_('Textual Information')],
'click in order game': [x_('Experimental')],
'hangman game': [x_('Experimental')],
'place the objects': [x_('Interactive Activities')],
'memory match game': [x_('Experimental')],
'file attachments': [x_('Non-Textual Information')],
'sort items': [x_('Experimental')],
'sort items': [x_('Interactive Activities')],
'scorm test cloze': [x_('Interactive Activities')],
'scorm test cloze (multiple options)': [x_('Interactive Activities')],
'scorm test dropdown': [x_('Interactive Activities')],
'scorm test multiple choice': [x_('Interactive Activities')]
}
@classmethod
def getConfigPath(cls):
obj = cls.__new__(cls)
obj.configParser = ConfigParser()
obj._overrideDefaultVals()
obj.__setConfigPath()
return obj.configPath
def __init__(self):
"""
Initialise
"""
self.configPath = None
self.configParser = ConfigParser(self.onWrite)
# Set default values
# exePath is the whole path and filename of the exe executable
self.exePath = Path(sys.argv[0]).abspath()
# webDir is the parent directory for styles,scripts and templates
self.webDir = self.exePath.dirname()
self.jsDir = self.exePath.dirname()
# localeDir is the base directory where all the locales are stored
self.localeDir = self.exePath.dirname()/"locale"
# port is the port the exe webserver will listen on
# (previous default, which earlier users might still use, was 8081)
self.port = 51235
# dataDir is the default directory that is shown to the user
# to save packages and exports in
self.dataDir = Path(".")
# configDir is the dir for storing user profiles
# and user made idevices and the config file
self.configDir = Path(".")
#FM: New Styles Directory path
self.stylesDir =Path(self.configDir/'style').abspath()
#FM: Default Style name
self.defaultStyle= u"KIC-IE"
# browser is the name of a predefined browser specified at http://docs.python.org/library/webbrowser.html.
# None for system default
self.browser = None
# docType is the HTML export format
self.docType = 'XHTML'
# locale is the language of the user
self.locale = chooseDefaultLocale(self.localeDir)
# internalAnchors indicate which exe_tmp_anchor tags to generate for each tinyMCE field
# available values = "enable_all", "disable_autotop", or "disable_all"
self.internalAnchors = "enable_all"
self.lastDir = None
self.showPreferencesOnStart = "1"
self.showIdevicesGrouped = "1"
# tinymce option
self.editorMode = 'permissive'
# styleSecureMode : if this [user] key is = 0 , exelearning can run python files in styles
# as websitepage.py , ... ( deactivate secure mode )
self.styleSecureMode="1"
# styles is the list of style names available for loading
self.styles = []
# The documents that we've recently looked at
self.recentProjects = []
# canonical (English) names of iDevices not to show in the iDevice pane
self.hiddeniDevices = []
#Media conversion programs used for XML export system
self.videoMediaConverter_ogv = ""
self.videoMediaConverter_3gp = ""
self.videoMediaConverter_avi = ""
self.videoMediaConverter_mpg = ""
self.audioMediaConverter_ogg = ""
self.audioMediaConverter_au = ""
self.audioMediaConverter_mp3 = ""
self.audioMediaConverter_wav = ""
self.ffmpegPath = ""
self.mediaProfilePath = self.exePath.dirname()/'mediaprofiles'
# likewise, a canonical (English) names of iDevices not to show in the
# iDevice pane but, contrary to the hiddens, these are ones that the
# configuration can specify to turn ON:
self.deprecatediDevices = [ "flash with text", "flash movie", "mp3", \
"attachment"]
# by default, only allow embedding of media types for which a
# browser plugin is found:
self.assumeMediaPlugins = False;
# Let our children override our defaults depending
# on the OS that we're running on
self._overrideDefaultVals()
# Try to make the defaults a little intelligent
# Under devel trees, webui is the default webdir
self.webDir = Path(self.webDir)
if not (self.webDir/'scripts').isdir() \
and (self.webDir/'webui').isdir():
self.webDir /= 'webui'
self.jsDir = Path(self.jsDir)
if not (self.jsDir/'scripts').isdir() \
and (self.jsDir/'jsui').isdir():
self.jsDir /= 'jsui'
# Find where the config file will be saved
self.__setConfigPath()
# Fill in any undefined config options with our defaults
self._writeDefaultConfigFile()
# Now we are ready to serve the application
self.loadSettings()
self.setupLogging()
self.loadLocales()
self.loadStyles()
def _overrideDefaultVals(self):
"""
Override this to override the
default config values
"""
def _getConfigPathOptions(self):
"""
Override this to give a list of
possible config filenames
in order of preference
"""
return ['exe.conf']
def _writeDefaultConfigFile(self):
"""
[Over]writes 'self.configPath' with a default config file
(auto write is on so we don't need to write the file at the end)
"""
if not G.application.portable:
for sectionName, optionNames in self.optionNames.items():
for optionName in optionNames:
defaultVal = getattr(self, optionName)
self.configParser.setdefault(sectionName,
optionName,
defaultVal)
# Logging can't really be changed from inside the program at the moment...
self.configParser.setdefault('logging', 'root', 'INFO')
def __setConfigPath(self):
"""
sets self.configPath to the filename of the config file that we'll
use.
In descendant classes set self.configFileOptions to a list
of directories where the configDir should be in order of preference.
If no config files can be found in these dirs, it will
force creation of the config file in the top dir
"""
# If there's an EXECONF environment variable, use it
self.configPath = None
configFileOptions = map(Path, self._getConfigPathOptions())
if "EXECONF" in os.environ:
envconf = Path(os.environ["EXECONF"])
if envconf.isfile():
self.configPath = os.environ["EXECONF"]
# Otherwise find the most appropriate existing file
if self.configPath is None:
for confPath in configFileOptions:
if confPath.isfile():
self.configPath = confPath
break
else:
# If no config files exist, create and use the
# first one on the list
self.configPath = configFileOptions[0]
folder = self.configPath.abspath().dirname()
if not folder.exists():
folder.makedirs()
self.configPath.touch()
# Now make our configParser
self.configParser.read(self.configPath)
self.configParser.autoWrite = True
def upgradeFile(self):
"""
Called before loading the config file,
removes or upgrades any old settings.
"""
if self.configParser.has_section('system'):
system = self.configParser.system
if system.has_option('appDataDir'):
# Older config files had configDir stored as appDataDir
self.configDir = Path(system.appDataDir)
self.stylesDir =Path(self.configDir)/'style'
# We'll just upgrade their config file for them for now...
system.configDir = self.configDir
system.stylesDir =Path(self.configDir)/'style'
del system.appDataDir
self.audioMediaConverter_au = system.audioMediaConverter_au
self.audioMediaConverter_wav = system.audioMediaConverter_wav
self.videoMediaConverter_ogv = system.videoMediaConverter_ogv
self.videoMediaConverter_3gp = system.videoMediaConverter_3gp
self.videoMediaConverter_avi = system.videoMediaConverter_avi
self.videoMediaConverter_mpg = system.videoMediaConverter_mpg
self.audioMediaConverter_ogg = system.audioMediaConverter_ogg
self.audioMediaConverter_mp3 = system.audioMediaConverter_mp3
self.ffmpegPath = system.ffmpegPath
self.mediaProfilePath = system.mediaProfilePath
if system.has_option('greDir'):
# No longer used, system should automatically support
del system.greDir
def loadSettings(self):
"""
Loads the settings from the exe.conf file.
Overrides the defaults set in __init__
"""
# Set up the parser so that if a certain value is not in the config
# file, it will use the value from our default values
def defVal(dummy, option):
"""If something is not in the config file, just use the default in
'self'"""
return getattr(self, option)
self.configParser.defaultValue = defVal
self.upgradeFile()
# System Section
if self.configParser.has_section('system'):
system = self.configParser.system
self.port = int(system.port)
self.browser = None if system.browser == u"None" else system.browser
if not G.application.portable:
self.dataDir = Path(system.dataDir)
self.configDir = Path(system.configDir)
self.webDir = Path(system.webDir)
self.stylesDir = Path(self.configDir)/'style'
self.jsDir = Path(system.jsDir)
else:
self.stylesDir = Path(self.webDir/'style').abspath()
self.assumeMediaPlugins = False;
if self.configParser.has_option('system', \
'assumeMediaPlugins'):
value = system.assumeMediaPlugins.strip().lower()
if value == "1" or value == "yes" or value == "true" or \
value == "on":
self.assumeMediaPlugins = True;
# If the dataDir points to some other dir, fix it
if not self.dataDir.isdir():
self.dataDir = tempfile.gettempdir()
# make the webDir absolute, to hide path joins of relative paths
self.webDir = self.webDir.expand().abspath()
# If the configDir doesn't exist (as it may be a default setting with a
# new installation) create it
if not self.configDir.exists():
self.configDir.mkdir()
if not G.application.standalone:
#FM: Copy styles
if not os.path.exists(self.stylesDir) or not os.listdir(self.stylesDir):
self.copyStyles()
else:
self.updateStyles()
else:
if G.application.portable:
if os.name == 'posix':
self.stylesDir = Path(self.webDir/'..'/'..'/'..'/'style')
else:
self.stylesDir = Path(self.webDir/'..'/'style')
if not os.path.exists(self.stylesDir) or not os.listdir(self.stylesDir):
self.copyStyles()
else:
self.stylesDir = Path(self.webDir/'style').abspath()
# Get the list of recently opened projects
self.recentProjects = []
if self.configParser.has_section('recent_projects'):
recentProjectsSection = self.configParser.recent_projects
# recentProjectsSection.items() is in the wrong order, keys are alright.
# Sorting list by key before adding to self.recentProjects, to avoid wrong ordering
# in Recent Projects menu list
recentProjectsItems = recentProjectsSection.items();
recentProjectsItems.sort()
for key, path in recentProjectsItems:
self.recentProjects.append(path)
# Load the list of "hidden" iDevices
self.hiddeniDevices = []
if self.configParser.has_section('idevices'):
idevicesSection = self.configParser.idevices
for key,value in idevicesSection.items():
# emulate standard library's getboolean()
value = value.strip().lower()
if value == "0" or value == "no" or value == "false" or \
value == "off":
self.hiddeniDevices.append(key.lower())
#self.deprecatediDevices = [ "flash with text", "flash movie", ...]
# and UN-Load from the list of "deprecated" iDevices
if self.configParser.has_section('deprecated'):
deprecatedSection = self.configParser.deprecated
for key,value in deprecatedSection.items():
# emulate standard library's getboolean()
value = value.strip().lower()
if value == "1" or value == "yes" or value == "true" or \
value == "on":
if key.lower() in self.deprecatediDevices:
self.deprecatediDevices.remove(key.lower())
# Load the "user" section
if self.configParser.has_section('user'):
if self.configParser.user.has_option('editorMode'):
self.editorMode = self.configParser.user.editorMode
if self.configParser.user.has_option('docType'):
self.docType = self.configParser.user.docType
common.setExportDocType(self.configParser.user.docType)
if self.configParser.user.has_option('defaultStyle'):
self.defaultStyle= self.configParser.user.defaultStyle
if self.configParser.user.has_option('styleSecureMode'):
self.styleSecureMode= self.configParser.user.styleSecureMode
if self.configParser.user.has_option('internalAnchors'):
self.internalAnchors = self.configParser.user.internalAnchors
if self.configParser.user.has_option('lastDir'):
self.lastDir = self.configParser.user.lastDir
if self.configParser.user.has_option('showPreferencesOnStart'):
self.showPreferencesOnStart = self.configParser.user.showPreferencesOnStart
if self.configParser.user.has_option('showIdevicesGrouped'):
self.showIdevicesGrouped = self.configParser.user.showIdevicesGrouped
if self.configParser.user.has_option('locale'):
self.locale = self.configParser.user.locale
return
self.locale = chooseDefaultLocale(self.localeDir)
def onWrite(self, configParser):
"""
Called just before the config file is written.
We use it to fill out any settings that are stored here and
not in the config parser itself
"""
# Recent projects
self.configParser.delete('recent_projects')
recentProjectsSection = self.configParser.addSection('recent_projects')
for num, path in enumerate(self.recentProjects):
recentProjectsSection[str(num)] = path
def setupLogging(self):
"""
setup logging file
"""
try:
hdlr = RotatingFileHandler(self.configDir/'exe.log', 'a',
500000, 10)
hdlr.doRollover()
except OSError:
# ignore the error we get if the log file is logged
hdlr = logging.FileHandler(self.configDir/'exe.log')
format = "%(asctime)s %(name)s %(levelname)s %(message)s"
log = logging.getLogger()
hdlr.setFormatter(logging.Formatter(format))
log.addHandler(hdlr)
loggingLevels = {"DEBUG" : logging.DEBUG,
"INFO" : logging.INFO,
"WARNING" : logging.WARNING,
"ERROR" : logging.ERROR,
"CRITICAL" : logging.CRITICAL }
if self.configParser.has_section('logging'):
for logger, level in self.configParser._sections["logging"].items():
if logger == "root":
logging.getLogger().setLevel(loggingLevels[level])
else:
logging.getLogger(logger).setLevel(loggingLevels[level])
if not G.application.portable:
log.info("************** eXe logging started **************")
log.info("version = %s" % version.version)
log.info("configPath = %s" % self.configPath)
log.info("exePath = %s" % self.exePath)
log.info("libPath = %s" % Path(twisted.__path__[0]).splitpath()[0])
log.info("browser = %s" % self.browser)
log.info("webDir = %s" % self.webDir)
log.info("jsDir = %s" % self.jsDir)
log.info("localeDir = %s" % self.localeDir)
log.info("port = %d" % self.port)
log.info("dataDir = %s" % self.dataDir)
log.info("configDir = %s" % self.configDir)
log.info("locale = %s" % self.locale)
log.info("internalAnchors = %s" % self.internalAnchors)
def loadStyles(self):
"""
Scans the eXe style directory and builds a list of styles
"""
self.styleStore = StyleStore(self)
listStyles = self.styleStore.getStyles()
for style in listStyles:
self.styles.append(style)
#print style
def copyStyles(self):
bkstyle=self.webDir/'style'
dststyle=self.stylesDir
if os.path.exists(bkstyle):
if os.path.exists(dststyle) and not os.listdir(self.stylesDir): shutil.rmtree(dststyle)
shutil.copytree(bkstyle,dststyle )
def updateStyles(self):
bkstyle=self.webDir/'style'
dststyle=self.stylesDir
if os.stat(bkstyle).st_mtime - os.stat(dststyle).st_mtime > 1:
for name in os.listdir(bkstyle):
bksdirstyle=os.path.join(bkstyle, name)
dstdirstyle=os.path.join(dststyle, name)
if os.path.isdir(bksdirstyle):
if os.path.exists(dstdirstyle):shutil.rmtree(dstdirstyle)
shutil.copytree(bksdirstyle, dstdirstyle)
else:
shutil.copy(bksdirstyle, dstdirstyle)
def loadLocales(self):
"""
Scans the eXe locale directory and builds a list of locales
"""
log = logging.getLogger()
log.debug("loadLocales")
gettext.install('exe', self.localeDir, True)
self.locales = {}
for subDir in self.localeDir.dirs():
if (subDir/'LC_MESSAGES'/'exe.mo').exists():
self.locales[subDir.basename()] = \
gettext.translation('exe',
self.localeDir,
languages=[str(subDir.basename())])
if subDir.basename() == self.locale:
locale = subDir.basename()
log.debug(" loading locale %s" % locale)
self.locales[locale].install(unicode=True)
__builtins__['c_'] = lambda s: self.locales[locale].ugettext(s) if s else s
# ===========================================================================
|
kohnle-lernmodule/KITexe201based
|
exe/engine/config.py
|
Python
|
gpl-2.0
| 25,533 | 0.005953 |
# Copyright 2014 Modelling, Simulation and Design Lab (MSDL) at
# McGill University and the University of Antwerp (http://msdl.cs.mcgill.ca/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import subprocess
import os
import sys
from testRealtime import TestRealtime
if __name__ == '__main__':
realtime = unittest.TestLoader().loadTestsFromTestCase(TestRealtime)
allTests = unittest.TestSuite()
allTests.addTest(realtime)
unittest.TextTestRunner(verbosity=2, failfast=True).run(allTests)
|
kdheepak89/pypdevs
|
test/test_realtime.py
|
Python
|
apache-2.0
| 1,022 | 0.001957 |
#!/usr/bin/env python3
import csv
import argparse
FLAG = None
def write_file(feats,lab_list, fn):
with open(fn,'w') as f:
for num, i in enumerate(feats):
for j in range(len(i)):
f.write(str(i[j]) + ',')
f.write(str([len(i)-1]) + '\n')
return
def transform(feats, lens):
dim = FLAG.feat_dim
trans_feats = []
for i in range(len(feats)):
trans_feats.append(feats[i][:single_len[lens[i]*dim]])
return trans_feats
def read_feat(fn):
feats = []
labs = []
with open(fn,'r') as f:
reader = csv.reader(f)
for row in reader:
feats.append(list(map(float,row[:-1])))
labs.append(float(row[-1]))
return feats, labs
def read_len(fn):
len_list = []
with open(fn,'r') as f:
for line in f:
len_list.append(int(line.rstrip()))
return len_list
def main():
len_list = read_len(FLAG.len_file)
ark_list, lab_list = read_feat(FLAG.ark_file)
return
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Transfrom the fulfilled zeros to no')
parser.add_argument('--feat_dim',type=int, default=39,
help='each frame feat dimension')
parser.add_argument('ark_file',
help='the transforming ark file')
parser.add_argument('len_file',
help='meaning the length of each utterance')
parser.add_argument('out_ark',
help='the output file')
FLAG = parser.parse_args()
main()
|
hoaaoh/Audio2Vec
|
src/trans_len.py
|
Python
|
apache-2.0
| 1,528 | 0.013089 |
#!/usr/bin/env python
'''Simple viewer for DDS texture files.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
from ctypes import *
import getopt
import sys
import textwrap
from SDL import *
from pyglet.gl.VERSION_1_1 import *
import pyglet.dds
import pyglet.event
import pyglet.image
import pyglet.sprite
import pyglet.window
from OpenGL.GLU import *
def usage():
print textwrap.dedent('''
Usage: ddsview.py [--header] texture1.dds texture2.dds ...
--header Dump the header of each file instead of displaying.
Within the program, press:
left/right keys Flip between loaded textures
up/down keys Increase/decrease mipmap level for a texture
space Toggle flat or sphere view
Click and drag with mouse to reposition texture with wrapping.
''')
texture_index = 0
textures = []
mipmap_level = 0
last_pos = None
texture_offset = [0, 0]
view = 'flat'
sphere_angle = 0
def keydown(character, symbol, modifiers):
global mipmap_level, texture_index
if symbol == SDLK_DOWN:
mipmap_level = max(0, mipmap_level - 1)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, mipmap_level)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, mipmap_level)
elif symbol == SDLK_UP:
mipmap_level = mipmap_level + 1
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, mipmap_level)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, mipmap_level)
elif symbol == SDLK_LEFT:
texture_index = max(0, texture_index - 1)
elif symbol == SDLK_RIGHT:
texture_index = min(len(textures) - 1, texture_index + 1)
elif symbol == SDLK_SPACE:
toggle_view()
return True
def mousemotion(x, y):
global last_pos
state, x, y = SDL_GetMouseState()
if state & SDL_BUTTON(1):
texture_offset[0] += x - last_pos[0]
texture_offset[1] += y - last_pos[1]
update_texture_matrix()
last_pos = x, y
def update_texture_matrix():
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glTranslatef(-texture_offset[0] / float(textures[texture_index].size[0]),
-texture_offset[1] / float(textures[texture_index].size[1]),
0)
glMatrixMode(GL_MODELVIEW)
def toggle_view():
global view
if view != 'flat':
pyglet.event.pop()
pyglet.window.set_2d()
view = 'flat'
else:
pyglet.event.push()
pyglet.event.on_mousemotion(sphere_mousemotion)
pyglet.window.set_3d()
glEnable(GL_LIGHT0)
glLightfv(GL_LIGHT0, GL_POSITION, (c_float * 4)(0.5, 0.5, 1, 0))
view = 'sphere'
def sphere_mousemotion(x, y):
# TODO: virtual trackball
return True
def draw_sphere():
global sphere_angle
glPushMatrix()
glTranslatef(0., 0., -4)
glRotatef(sphere_angle, 0, 1, 0)
glRotatef(90, 1, 0, 0)
sphere_angle += 0.01
glPushAttrib(GL_ENABLE_BIT)
glEnable(GL_DEPTH_TEST)
glEnable(GL_LIGHTING)
glEnable(GL_TEXTURE_2D)
glBindTexture(GL_TEXTURE_2D, textures[texture_index].id)
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE)
sphere = gluNewQuadric()
gluQuadricTexture(sphere, True)
gluSphere(sphere, 1.0, 100, 100)
gluDeleteQuadric(sphere)
glPopAttrib()
glPopMatrix()
def main(args):
header = False
options, args = getopt.getopt(args[1:], 'h', ['help', 'header'])
for option, value in options:
if option in ('-h', '--help'):
usage()
sys.exit()
elif option == '--header':
header = True
if len(args) < 1:
usage()
sys.exit()
if header:
for arg in args:
print pyglet.dds.DDSURFACEDESC2(open(arg,
'r').read(pyglet.dds.DDSURFACEDESC2.get_size()))
else:
pyglet.window.set_window(resizable=True)
global textures, texture_index
textures = [pyglet.dds.load_dds(arg) for arg in args]
texture_index = 0
pyglet.window.resize(*textures[0].size)
pyglet.event.push()
pyglet.event.on_keydown(keydown)
pyglet.event.on_mousemotion(mousemotion)
global last_pos
state, x, y = SDL_GetMouseState()
last_pos = x, y
glClearColor(0, 0, 0, 0)
while not pyglet.event.is_quit():
pyglet.event.pump()
pyglet.window.clear()
if view == 'flat':
textures[texture_index].draw()
elif view == 'sphere':
draw_sphere()
pyglet.window.flip()
if __name__ == '__main__':
main(sys.argv)
|
shaileshgoogler/pyglet
|
tools/ddsview.py
|
Python
|
bsd-3-clause
| 4,680 | 0.002564 |
"""
Converts argparse parser actions into json "Build Specs"
"""
import argparse
from argparse import (
_CountAction,
_HelpAction,
_StoreConstAction,
_StoreFalseAction,
_StoreTrueAction,
ArgumentParser, _SubParsersAction)
from collections import OrderedDict
from functools import partial
VALID_WIDGETS = (
'FileChooser',
'MultiFileChooser',
'FileSaver',
'DirChooser',
'DateChooser',
'TextField',
'Dropdown',
'Counter',
'RadioGroup',
'CheckBox',
'MultiDirChooser'
)
class UnknownWidgetType(Exception):
pass
class UnsupportedConfiguration(Exception):
pass
def convert(parser):
widget_dict = getattr(parser, 'widgets', {})
actions = parser._actions
if has_subparsers(actions):
if has_required(actions):
raise UnsupportedConfiguration("Gooey doesn't currently support required arguments when subparsers are present.")
layout_type = 'column'
layout_data = {name.lower(): process(sub_parser, widget_dict) for name, sub_parser in get_subparser(actions).choices.iteritems()}
else:
layout_type = 'standard'
layout_data = process(parser, widget_dict)
return {
'layout_type': layout_type,
'widgets': layout_data
}
def process(parser, widget_dict):
mutually_exclusive_group = [
mutex_action
for group_actions in parser._mutually_exclusive_groups
for mutex_action in group_actions._group_actions]
base_actions = [action for action in parser._actions
if action not in mutually_exclusive_group
and action.dest != 'help']
required_actions = filter(is_required, base_actions)
optional_actions = filter(is_optional, base_actions)
return list(categorize(required_actions, widget_dict, required=True)) + \
list(categorize(optional_actions, widget_dict)) + \
build_radio_group(mutually_exclusive_group)
def categorize(actions, widget_dict, required=False):
_get_widget = partial(get_widget, widgets=widget_dict)
for action in actions:
if is_standard(action):
yield as_json(action, _get_widget(action) or 'TextField', required)
elif is_choice(action):
yield as_json(action, _get_widget(action) or 'Dropdown', required)
elif is_flag(action):
yield as_json(action, _get_widget(action) or 'CheckBox', required)
elif is_counter(action):
_json = as_json(action, _get_widget(action) or 'Dropdown', required)
# pre-fill the 'counter' dropdown
_json['data']['choices'] = map(str, range(1, 11))
yield _json
else:
raise UnknownWidgetType(action)
def get_widget(action, widgets):
supplied_widget = widgets.get(action.dest, None)
type_arg_widget = 'FileChooser' if action.type == argparse.FileType else None
return supplied_widget or type_arg_widget or None
def is_required(action):
'''_actions which are positional or possessing the `required` flag '''
return not action.option_strings and not isinstance(action, _SubParsersAction) or action.required == True
def has_required(actions):
return filter(None, filter(is_required, actions))
def is_subparser(action):
return isinstance(action,_SubParsersAction)
def has_subparsers(actions):
return filter(is_subparser, actions)
def get_subparser(actions):
return filter(is_subparser, actions)[0]
def is_optional(action):
'''_actions not positional or possessing the `required` flag'''
return action.option_strings and not action.required
def is_choice(action):
''' action with choices supplied '''
return action.choices
def is_standard(action):
""" actions which are general "store" instructions.
e.g. anything which has an argument style like:
$ script.py -f myfilename.txt
"""
boolean_actions = (
_StoreConstAction, _StoreFalseAction,
_StoreTrueAction
)
return (not action.choices
and not isinstance(action, _CountAction)
and not isinstance(action, _HelpAction)
and type(action) not in boolean_actions)
def is_flag(action):
""" _actions which are either storeconst, store_bool, etc.. """
action_types = [_StoreTrueAction, _StoreFalseAction, _StoreConstAction]
return any(map(lambda Action: isinstance(action, Action), action_types))
def is_counter(action):
""" _actions which are of type _CountAction """
return isinstance(action, _CountAction)
def build_radio_group(mutex_group):
if not mutex_group:
return []
options = [
{
'display_name': mutex_arg.dest,
'help': mutex_arg.help,
'nargs': mutex_arg.nargs or '',
'commands': mutex_arg.option_strings,
'choices': mutex_arg.choices,
} for mutex_arg in mutex_group
]
return [{
'type': 'RadioGroup',
'group_name': 'Choose Option',
'required': False,
'data': options
}]
def as_json(action, widget, required):
if widget not in VALID_WIDGETS:
raise UnknownWidgetType('Widget Type {0} is unrecognized'.format(widget))
return {
'type': widget,
'required': required,
'data': {
'display_name': action.dest,
'help': action.help,
'nargs': action.nargs or '',
'commands': action.option_strings,
'choices': action.choices or [],
'default': action.default
}
}
|
lrq3000/pyFileFixity
|
pyFileFixity/lib/gooey/python_bindings/argparse_to_json.py
|
Python
|
mit
| 5,242 | 0.013163 |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import functools
from functools import partial
import itertools as it
from typing import Any, Callable, Dict
import jax
from jax.interpreters import partial_eval as pe
from jax.config import config
from jax import core
from jax._src.dtypes import dtype, float0
from jax.core import (Trace, Tracer, get_aval, call_p, Primitive, Literal,
raise_to_shaped)
from jax._src.ad_util import (add_jaxvals, add_jaxvals_p, zeros_like_jaxval,
zeros_like_aval, zeros_like_p, Zero)
from jax._src.util import (unzip2, safe_map, safe_zip, split_list, wrap_name,
as_hashable_function, cache)
from jax.tree_util import register_pytree_node
from jax import linear_util as lu
from jax._src.api_util import flatten_fun, flatten_fun_nokwargs
from jax.tree_util import tree_flatten, tree_unflatten, Partial
from jax._src import source_info_util
zip = safe_zip
map = safe_map
def identity(x): return x
def jvp(fun: lu.WrappedFun, has_aux=False, instantiate=True,
transform_stack=True) -> Any:
if not has_aux:
return jvpfun(jvp_subtrace(fun), instantiate, transform_stack)
else:
fun, aux = jvp_subtrace_aux(fun)
return jvpfun(fun, instantiate, transform_stack), aux
@lu.transformation
def jvpfun(instantiate, transform_stack, primals, tangents):
tangents = [Zero.from_value(t) if not isinstance(t, Zero)
and dtype(t) is float0 else t for t in tangents]
ctx = (source_info_util.transform_name_stack('jvp') if transform_stack
else contextlib.nullcontext())
with core.new_main(JVPTrace) as main, ctx:
out_primals, out_tangents = yield (main, primals, tangents), {}
del main
if type(instantiate) is bool:
instantiate = [instantiate] * len(out_tangents)
out_tangents = [instantiate_zeros(t) if inst else t for t, inst
in zip(out_tangents, instantiate)]
yield out_primals, out_tangents
@lu.transformation
def jvp_subtrace(main, primals, tangents):
trace = JVPTrace(main, core.cur_sublevel())
for x in list(primals) + list(tangents):
if isinstance(x, Tracer):
assert x._trace.level < trace.level
in_tracers = [JVPTracer(trace, x, t) if type(t) is not Zero else x
for x, t in zip(primals, tangents)]
ans = yield in_tracers, {}
out_tracers = map(trace.full_raise, ans)
yield unzip2([(out_tracer.primal, out_tracer.tangent)
for out_tracer in out_tracers])
@lu.transformation_with_aux
def jvp_subtrace_aux(main, primals, tangents):
trace = JVPTrace(main, core.cur_sublevel())
for x in list(primals) + list(tangents):
if isinstance(x, Tracer):
assert x._trace.level < trace.level
ans, aux = yield map(partial(JVPTracer, trace), primals, tangents), {}
ans_tracers = map(trace.full_raise, ans)
out_primals, out_tangents = unzip2((t.primal, t.tangent) for t in ans_tracers)
aux_primals = [core.full_lower(x.primal)
if isinstance(x, JVPTracer) and x._trace.level == trace.level
else x for x in aux]
yield (out_primals, out_tangents), aux_primals
def linearize(traceable, *primals, **kwargs):
has_aux = kwargs.pop('has_aux', False)
if not has_aux:
jvpfun = jvp(traceable)
else:
jvpfun, aux = jvp(traceable, has_aux=True)
in_pvals = (tuple(pe.PartialVal.known(p) for p in primals)
+ tuple(pe.PartialVal.unknown(get_aval(p).at_least_vspace())
for p in primals))
_, in_tree = tree_flatten(((primals, primals), {}))
jvpfun_flat, out_tree = flatten_fun(jvpfun, in_tree)
jaxpr, out_pvals, consts = pe.trace_to_jaxpr(jvpfun_flat, in_pvals)
out_primals_pvals, out_tangents_pvals = tree_unflatten(out_tree(), out_pvals)
assert all(out_primal_pval.is_known() for out_primal_pval in out_primals_pvals)
_, out_primals_consts = unzip2(out_primals_pvals)
jaxpr.invars = jaxpr.invars[len(primals):]
jaxpr.outvars = jaxpr.outvars[len(out_primals_pvals):]
if not has_aux:
return out_primals_consts, out_tangents_pvals, jaxpr, consts
else:
return out_primals_consts, out_tangents_pvals, jaxpr, consts, aux()
def vjp(traceable, primals, has_aux=False, reduce_axes=()):
if not has_aux:
out_primals, pvals, jaxpr, consts = linearize(traceable, *primals)
else:
out_primals, pvals, jaxpr, consts, aux = linearize(traceable, *primals, has_aux=True)
def unbound_vjp(pvals, jaxpr, consts, *cts):
cts = tuple(map(ignore_consts, cts, pvals))
dummy_args = [UndefinedPrimal(v.aval) for v in jaxpr.invars]
arg_cts = backward_pass(jaxpr, reduce_axes, True, consts, dummy_args, cts)
return map(instantiate_zeros, arg_cts)
# Ensure that vjp_ is a PyTree so that we can pass it from the forward to the backward
# pass in a custom VJP.
vjp_ = Partial(partial(unbound_vjp, pvals, jaxpr), consts)
if not has_aux:
return out_primals, vjp_
else:
return out_primals, vjp_, aux
def ignore_consts(ct, pval):
aval, const = pval
if isinstance(aval, core.AbstractValue):
return ct
elif aval is None:
return core.unit
else:
raise TypeError(aval)
def unpair_pval(pval):
aval, const = pval
const_1, const_2 = const
if aval is None:
return (None, const_1), (None, const_2)
else:
aval_1, aval_2 = aval
return (aval_1, const_1), (aval_2, const_2)
def replace_float0s(primal, tangent):
if dtype(tangent) is float0:
return zeros_like_jaxval(primal)
else:
return tangent
def recast_to_float0(primal, tangent):
if core.primal_dtype_to_tangent_dtype(dtype(primal)) == float0:
return Zero(get_aval(primal).at_least_vspace())
else:
return tangent
# NOTE: The FIXMEs below are caused by primal/tangent mixups (type errors if you will)
def backward_pass(jaxpr: core.Jaxpr, reduce_axes, transform_stack, consts, primals_in, cotangents_in):
if all(type(ct) is Zero for ct in cotangents_in):
return map(lambda v: Zero(v.aval), jaxpr.invars)
def write_cotangent(prim, v, ct):
# assert v not in primal_env
assert ct is not Zero, (prim, v.aval) # check for an old harmless type error
if ct is None or type(v) is Literal:
return
if type(ct) is Zero:
# FIXME: This triggers a lot of failures!
# assert v.aval == ct.aval, (prim, v.aval, ct.aval)
return
axes_to_reduce = tuple(axis_name for axis_name in reduce_axes
if axis_name in core.get_aval(ct).named_shape
and axis_name not in v.aval.named_shape)
if axes_to_reduce:
ct = jax.lax.psum(ct, axis_name=axes_to_reduce)
ct_env[v] = add_tangents(ct_env[v], ct) if v in ct_env else ct
if config.jax_enable_checks:
ct_aval = core.get_aval(ct_env[v])
joined_aval = core.lattice_join(v.aval, ct_aval).strip_weak_type().strip_named_shape()
assert v.aval.strip_weak_type().strip_named_shape() == joined_aval, (prim, v.aval, ct_aval)
def read_cotangent(v):
return ct_env.pop(v, Zero(v.aval))
def read_primal(v):
if type(v) is Literal:
return v.val
else:
return primal_env.get(v, UndefinedPrimal(v.aval))
def write_primal(v, val):
if not is_undefined_primal(val):
primal_env[v] = val
primal_env: Dict[Any, Any] = {}
write_primal(core.unitvar, core.unit)
map(write_primal, jaxpr.constvars, consts)
# FIXME: invars can contain both primal and tangent values, and this line
# forces primal_in to contain UndefinedPrimals for tangent values!
map(write_primal, jaxpr.invars, primals_in)
ct_env: Dict[Any, Any] = {}
ctx = (source_info_util.transform_name_stack('transpose') if transform_stack
else contextlib.nullcontext())
with ctx:
map(partial(write_cotangent, 'outvars'), jaxpr.outvars, cotangents_in)
for eqn in jaxpr.eqns[::-1]:
# FIXME: Some invars correspond to tangents
invals = map(read_primal, eqn.invars)
if eqn.primitive.multiple_results:
cts_in = map(read_cotangent, eqn.outvars)
else:
cts_in, = map(read_cotangent, eqn.outvars)
name_stack = source_info_util.current_name_stack() + eqn.source_info.name_stack
with source_info_util.user_context(eqn.source_info.traceback, name_stack=name_stack):
if eqn.primitive.call_primitive or eqn.primitive.map_primitive:
cts_in_avals = [v.aval for v in eqn.outvars]
params = dict(eqn.params)
call_jaxpr = params.pop('call_jaxpr')
cts_out = get_primitive_transpose(eqn.primitive)(
params, call_jaxpr, invals, cts_in, cts_in_avals, reduce_axes)
elif eqn.primitive in reducing_transposes:
cts_out = reducing_transposes[eqn.primitive](
reduce_axes, cts_in, *invals, **eqn.params)
else:
cts_out = get_primitive_transpose(eqn.primitive)(
cts_in, *invals, **eqn.params)
cts_out = [Zero(v.aval) for v in eqn.invars] if cts_out is Zero else cts_out
# FIXME: Some invars correspond to primals!
map(partial(write_cotangent, eqn.primitive), eqn.invars, cts_out)
cotangents_out = map(read_cotangent, jaxpr.invars)
return cotangents_out
def closed_backward_pass(jaxpr: core.ClosedJaxpr, reduce_axes, transform_stack, primals_in, cotangents_in):
return backward_pass(jaxpr.jaxpr, reduce_axes, transform_stack, jaxpr.consts, primals_in, cotangents_in)
class UndefinedPrimal:
__slots__ = ['aval']
def __init__(self, aval):
self.aval = aval
def __repr__(self):
return 'UndefinedPrimal({})'.format(self.aval)
def is_undefined_primal(x):
return type(x) is UndefinedPrimal
register_pytree_node(UndefinedPrimal,
lambda z: ((), z.aval),
lambda aval, _: UndefinedPrimal(aval))
def get_primitive_transpose(p):
try:
return primitive_transposes[p]
except KeyError as err:
raise NotImplementedError(
"Transpose rule (for reverse-mode differentiation) for '{}' "
"not implemented".format(p)) from err
@lu.transformation_with_aux
def nonzero_tangent_outputs(*args, **kwargs):
results = (_, tangents_out) = yield args, kwargs
yield results, [type(r) is not Zero for r in tangents_out]
class JVPTrace(Trace):
def pure(self, val):
tangent_zero = Zero(get_aval(val).at_least_vspace())
return JVPTracer(self, val, tangent_zero)
def lift(self, val):
tangent_zero = Zero(get_aval(val).at_least_vspace())
return JVPTracer(self, val, tangent_zero)
def sublift(self, val):
return JVPTracer(self, val.primal, val.tangent)
def process_primitive(self, primitive, tracers, params):
primals_in, tangents_in = unzip2((t.primal, t.tangent) for t in tracers)
jvp = primitive_jvps.get(primitive)
if not jvp:
msg = f"Differentiation rule for '{primitive}' not implemented"
raise NotImplementedError(msg)
primal_out, tangent_out = jvp(primals_in, tangents_in, **params)
if primitive.multiple_results:
return [JVPTracer(self, x, t) for x, t in zip(primal_out, tangent_out)]
else:
return JVPTracer(self, primal_out, tangent_out)
def process_call(self, call_primitive, f: lu.WrappedFun, tracers, params):
assert call_primitive.multiple_results
primals, tangents = unzip2((t.primal, t.tangent) for t in tracers)
nonzero_tangents, tangent_tree_def = tree_flatten(tangents)
nz_tangents = [type(t) is not Zero for t in tangents]
if 'name' in params and not config.jax_experimental_name_stack:
params = dict(params, name=wrap_name(params['name'], 'jvp'))
f_jvp = jvp_subtrace(f, self.main)
f_jvp, nz_tangents_out = nonzero_tangent_outputs(f_jvp)
if isinstance(call_primitive, core.MapPrimitive):
in_axes = params['in_axes']
tangent_in_axes = [ax for ax, nz in zip(in_axes, nz_tangents) if nz]
out_axes_thunk = params['out_axes_thunk']
# The new thunk depends deterministically on the old thunk and the wrapped function.
# Any caching already has to include the wrapped function as part of the key, so we
# only use the previous thunk for equality checks.
# NOTE: This assumes that the output tangents being zero is a deterministic
# function of which input tangents were zero.
@as_hashable_function(closure=(tuple(nz_tangents), out_axes_thunk))
def new_out_axes_thunk():
out_axes = out_axes_thunk()
return (*out_axes, *(ax for ax, nz in zip(out_axes, nz_tangents_out()) if nz))
params = dict(params,
in_axes=(*in_axes, *tangent_in_axes),
out_axes_thunk=new_out_axes_thunk)
f_jvp, out_tree_def = traceable(f_jvp, len(primals), tangent_tree_def)
update_params = call_param_updaters.get(call_primitive)
new_params = (update_params(params, nz_tangents, nz_tangents_out)
if update_params else params)
result = call_primitive.bind(f_jvp, *primals, *nonzero_tangents, **new_params)
primal_out, tangent_out = tree_unflatten(out_tree_def(), result)
return [JVPTracer(self, p, t) for p, t in zip(primal_out, tangent_out)]
def post_process_call(self, call_primitive, out_tracers, params):
primals, tangents = unzip2((t.primal, t.tangent) for t in out_tracers)
out, treedef = tree_flatten((primals, tangents))
tangents_nz = [type(t) is not Zero for t in tangents]
del primals, tangents
main = self.main
def todo(x):
primals, tangents = tree_unflatten(treedef, x)
trace = JVPTrace(main, core.cur_sublevel())
return map(partial(JVPTracer, trace), primals, tangents)
if call_primitive.map_primitive:
def out_axes_transform(out_axes):
return (*out_axes, *(ax for ax, nz in zip(out_axes, tangents_nz) if nz))
todo = (todo, out_axes_transform)
return out, todo
# The only difference between process_map and process_call is that
# the `in_axes` and `out_axes_thunk` params must be updated;
# that's handled in process_call.
process_map = process_call
post_process_map = post_process_call
def process_custom_jvp_call(self, _, __, f_jvp, tracers):
primals_in, tangents_in = unzip2((t.primal, t.tangent) for t in tracers)
primals_in = map(core.full_lower, primals_in)
tangents_in = map(instantiate_zeros, tangents_in)
# Cast float0 to zeros with the primal dtype because custom jvp rules don't
# currently handle float0s
tangents_in = map(replace_float0s, primals_in, tangents_in)
outs = f_jvp.call_wrapped(*it.chain(primals_in, tangents_in))
primals_out, tangents_out = split_list(outs, [len(outs) // 2])
tangents_out = map(recast_to_float0, primals_out, tangents_out)
return map(partial(JVPTracer, self), primals_out, tangents_out)
def post_process_custom_jvp_call(self, out_tracers, _):
raise CustomJVPException()
def process_custom_vjp_call(self, _, __, fwd, bwd, tracers, *, out_trees):
primals_in, tangents_in = unzip2((t.primal, t.tangent) for t in tracers)
tangents_in = map(instantiate_zeros, tangents_in)
res_and_primals_out = fwd.call_wrapped(*map(core.full_lower, primals_in))
out_tree, res_tree = out_trees()
res, primals_out = split_list(res_and_primals_out, [res_tree.num_leaves])
avals_out = [raise_to_shaped(core.get_aval(x)) for x in primals_out]
tangents_out = custom_lin_p.bind(
*res, *tangents_in, num_res=res_tree.num_leaves, bwd=bwd,
out_avals=avals_out)
tangents_out = map(recast_to_float0, primals_out, tangents_out)
return map(partial(JVPTracer, self), primals_out, tangents_out)
def post_process_custom_vjp_call(self, out_tracers, _):
raise CustomVJPException()
def process_custom_transpose(self, prim, call, tracers, **params):
ps_in, ts_in = unzip2((t.primal, t.tangent) for t in tracers)
res_ps_in, lin_ps_in = split_list(ps_in, [params['res_tree'].num_leaves])
res_ts_in, lin_ts_in = split_list(ts_in, [params['res_tree'].num_leaves])
# TODO(frostig): Handle differentiation with respect to residual
# operands. Calling `call` twice on all operands invalid, since it
# isn't linear in the residuals. However, we know that if we
# write:
#
# jvp_call_res = lambda x: partial(jvp, lambda r: call(r, x))
#
# then:
#
# jvp(call, (r, x), (dr, dx)) == jvp_call_res(x)(r, dr) + call(r, dx)
#
# In words: a possible strategy is to take the jvp of `call` with
# respect to residuals, and with linear arguments fixed, then add
# that to a custom-transpose call to `call` (i.e. what we already
# do below in the all-linear argument case).
if any(type(t) is not Zero for t in res_ts_in):
raise NotImplementedError(
'JVP of custom transpose with respect to non-symbolic-zero residuals')
ps_out = prim.bind(call, *ps_in, **params)
lin_ts_in = map(instantiate_zeros, lin_ts_in)
ts_out = prim.bind(call, *res_ps_in, *lin_ts_in, **params)
return map(partial(JVPTracer, self), ps_out, ts_out)
def join(self, xt, yt):
xz, yz = type(xt) is Zero, type(yt) is Zero
if xz == yz:
return xt, yt
elif yz and not xz:
return xt, zeros_like_jaxval(xt)
elif xz and not yz:
return zeros_like_jaxval(yt), yt
else:
raise TypeError((xt, yt))
class JVPTracer(Tracer):
__slots__ = ['primal', 'tangent']
def __init__(self, trace, primal, tangent):
if config.jax_enable_checks:
_primal_tangent_shapes_match(primal, tangent)
self._trace = trace
self.primal = primal
self.tangent = tangent
@property
def aval(self):
# TODO(dougalm): add epsilon ball
return get_aval(self.primal)
def full_lower(self):
if type(self.tangent) is Zero:
return core.full_lower(self.primal)
else:
return self
def _primal_tangent_shapes_match(primal, tangent):
if type(tangent) is not Zero:
primal_aval = raise_to_shaped(get_aval(primal), weak_type=False)
tangent_aval = raise_to_shaped(get_aval(tangent), weak_type=False)
assert primal_aval.shape == tangent_aval.shape, (primal_aval.shape, tangent_aval.shape)
expected_tangent_dtype = core.primal_dtype_to_tangent_dtype(primal_aval.dtype)
assert expected_tangent_dtype == tangent_aval.dtype, (expected_tangent_dtype, tangent_aval.dtype)
call_param_updaters: Dict[core.Primitive, Callable] = {}
call_transpose_param_updaters: Dict[core.Primitive, Callable] = {}
# -------------------- Primitives --------------------
primitive_jvps : Dict[core.Primitive, Callable] = {}
primitive_transposes: Dict[core.Primitive, Callable] = {}
# transpose rules that internally perform reductions over the given named axes
reducing_transposes: Dict[core.Primitive, Callable] = {}
def deflinear(primitive, transpose_rule):
primitive_jvps[primitive] = partial(linear_jvp, primitive)
primitive_transposes[primitive] = partial(linear_transpose, transpose_rule)
def linear_jvp(primitive, primals, tangents, **params):
val_out = primitive.bind(*primals, **params)
if all(type(tangent) is Zero for tangent in tangents):
return val_out, Zero.from_value(val_out)
else:
tangents = map(instantiate_zeros, tangents)
return val_out, primitive.bind(*tangents, **params)
def linear_transpose(transpose_rule, cotangent, *args, **kwargs):
return Zero if type(cotangent) is Zero else transpose_rule(cotangent, **kwargs)
def deflinear2(primitive, transpose_rule):
primitive_jvps[primitive] = partial(linear_jvp, primitive)
primitive_transposes[primitive] = partial(linear_transpose2, transpose_rule)
def linear_transpose2(transpose_rule, cotangent, *args, **kwargs):
return Zero if type(cotangent) is Zero else transpose_rule(cotangent, *args, **kwargs)
def defjvp(primitive, *jvprules):
assert isinstance(primitive, Primitive)
assert not primitive.multiple_results
primitive_jvps[primitive] = partial(standard_jvp, jvprules, primitive)
def standard_jvp(jvprules, primitive, primals, tangents, **params):
val_out = primitive.bind(*primals, **params)
tangents_out = [rule(t, *primals, **params) for rule, t in zip(jvprules, tangents)
if rule is not None and type(t) is not Zero]
return val_out, functools.reduce(add_tangents, tangents_out, Zero.from_value(val_out))
def defjvp2(primitive, *jvprules):
assert isinstance(primitive, Primitive)
assert not primitive.multiple_results
primitive_jvps[primitive] = partial(standard_jvp2, jvprules, primitive)
def standard_jvp2(jvprules, primitive, primals, tangents, **params):
val_out = primitive.bind(*primals, **params)
tangents_out = (rule(t, val_out, *primals, **params) for rule, t in zip(jvprules, tangents)
if rule is not None and type(t) is not Zero)
tangents_out = list(tangents_out)
return val_out, functools.reduce(add_tangents, tangents_out, Zero.from_value(val_out))
def add_tangents(x, y):
if type(x) is Zero:
return y
elif type(y) is Zero:
return x
else:
return add_jaxvals(x, y)
def defbilinear(prim, lhs_rule, rhs_rule):
assert isinstance(prim, Primitive)
lhs_jvp = lambda g, x, y, **kwargs: prim.bind(g, y, **kwargs)
rhs_jvp = lambda g, x, y, **kwargs: prim.bind(x, g, **kwargs)
defjvp(prim, lhs_jvp, rhs_jvp)
primitive_transposes[prim] = partial(bilinear_transpose, lhs_rule, rhs_rule)
def bilinear_transpose(lhs_rule, rhs_rule, cotangent, x, y, **kwargs):
assert is_undefined_primal(x) ^ is_undefined_primal(y)
if type(cotangent) is Zero:
return Zero
if is_undefined_primal(x):
out = lhs_rule(cotangent, y, **kwargs)
return Zero if out is Zero else (out, None)
else:
out = rhs_rule(cotangent, x, **kwargs)
return Zero if out is Zero else (None, out)
def defjvp_zero(primitive):
assert isinstance(primitive, Primitive)
primitive_jvps[primitive] = partial(zero_jvp, primitive)
def zero_jvp(primitive, primals, tangents, **params):
r = primitive.bind(*primals, **params)
return r, Zero.from_value(r)
deflinear2(zeros_like_p, lambda t, _: [Zero.from_value(t)])
deflinear2(add_jaxvals_p, lambda t, *args: (t, t))
def instantiate_zeros(tangent):
if type(tangent) is Zero:
return zeros_like_aval(tangent.aval)
else:
return tangent
# This function seems similar to instantiate_zeros, but it is sometimes used
# to instantiate zero abstract units with a different aval
def instantiate_zeros_aval(aval, tangent):
if type(tangent) is Zero:
assert type(tangent.aval) is core.AbstractUnit or tangent.aval == aval
return zeros_like_aval(aval)
else:
return tangent
@lu.transformation_with_aux
def traceable(num_primals, in_tree_def, *primals_and_tangents):
new_primals = primals_and_tangents[:num_primals]
new_tangents = primals_and_tangents[num_primals:]
new_tangents = tree_unflatten(in_tree_def, new_tangents)
primal_out, tangent_out = yield (new_primals, new_tangents), {}
out_flat, tree_def = tree_flatten((primal_out, tangent_out))
yield out_flat, tree_def
def call_transpose(primitive, params, call_jaxpr, args, ct, _, reduce_axes):
all_args, in_tree_def = tree_flatten(((), args, ct)) # empty consts
fun = lu.hashable_partial(lu.wrap_init(backward_pass), call_jaxpr, reduce_axes, False)
fun, out_tree = flatten_fun_nokwargs(fun, in_tree_def)
if config.jax_experimental_name_stack:
new_params = params
else:
new_params = dict(params, name=wrap_name(params['name'], 'transpose'))
update_params = call_transpose_param_updaters.get(primitive)
if update_params:
new_params = update_params(new_params, map(is_undefined_primal, args),
[type(x) is not Zero for x in ct])
out_flat = primitive.bind(fun, *all_args, **new_params)
return tree_unflatten(out_tree(), out_flat)
primitive_transposes[core.call_p] = partial(call_transpose, call_p)
def remat_transpose(params, call_jaxpr, primals_in, cotangents_in,
cotangent_in_avals, reduce_axes):
# backward_pass can only transpose linear computations, but the call_jaxpr embedded in
# remat contains primal (non-linear) equations too. Hence, we have to eliminate those
# (in this case via partial_eval) before we call into backward_pass again.
typed_call_jaxpr = core.ClosedJaxpr(call_jaxpr, [])
unknowns = map(is_undefined_primal, primals_in)
primal_jaxpr, tangent_jaxpr, out_unknowns = \
pe.partial_eval_jaxpr(typed_call_jaxpr, unknowns=unknowns, instantiate=True) # type: ignore
def do_transpose(primals_in, cotangents_in):
# NOTE: This is passing in undefined primals in place of tangent arguments, but it
# should all work out, because we're only computing the primal part here.
residuals = core.jaxpr_as_fun(primal_jaxpr)(*primals_in)[len(cotangents_in):]
# Now that we have a purely linear jaxpr, we can transpose it
cotangents_out = backward_pass(
tangent_jaxpr.jaxpr, reduce_axes, False, (), primals_in + residuals, cotangents_in)
# backward_pass will return cotangents computed for all invars, but some of them
# are residuals appended by partial eval, so we need to skip those before we return.
return cotangents_out[:len(primals_in)]
flat_args, in_tree_def = tree_flatten((primals_in, cotangents_in))
flat_do_transpose, out_tree = flatten_fun_nokwargs(lu.wrap_init(do_transpose), in_tree_def)
flat_cotangents_out = pe.remat_call_p.bind(flat_do_transpose, *flat_args, **params)
return tree_unflatten(out_tree(), flat_cotangents_out)
primitive_transposes[pe.remat_call_p] = remat_transpose
@lu.transformation_with_aux
def nonzero_outputs(*args, **kwargs):
results = yield args, kwargs
yield results, [type(r) is not Zero for r in results]
def map_transpose(primitive, params, call_jaxpr, args, ct, _, reduce_axes):
all_args, in_tree_def = tree_flatten(((), args, ct)) # empty consts
fun = lu.hashable_partial(lu.wrap_init(backward_pass), call_jaxpr, reduce_axes, False)
fun, nz_arg_cts = nonzero_outputs(fun)
fun, out_tree = flatten_fun_nokwargs(fun, in_tree_def)
# Preserve axis for primal arguments, skip tangents (represented as undefined primals).
in_axes, out_axes = params['in_axes'], params['out_axes']
new_in_axes = (*[axis for axis, x in zip(in_axes, args)
if not is_undefined_primal(x)],
*[axis for axis, x in zip(out_axes, ct)
if type(x) is not Zero])
# The interim strategy we use below (until avals-with-names) only works
# when all outputs are mapped.
assert all(out_axis is not None for out_axis in out_axes), out_axes
# NOTE: This assumes that the output cotangents being zero is a deterministic
# function of which input cotangents were zero.
@as_hashable_function(closure=(in_axes, tuple(type(c) is Zero for c in ct)))
def out_axes_thunk():
return tuple(axis or 0 for axis, nz in zip(in_axes, nz_arg_cts()) if nz)
new_params = dict(params, name=wrap_name(params['name'], 'transpose'),
in_axes=new_in_axes, out_axes_thunk=out_axes_thunk)
del new_params['out_axes']
update_params = call_transpose_param_updaters.get(primitive)
if update_params:
new_params = update_params(new_params, map(is_undefined_primal, args),
[type(x) is not Zero for x in ct])
out_flat = primitive.bind(fun, *all_args, **new_params)
arg_cts = tree_unflatten(out_tree(), out_flat)
# The freevars are being fanned out (not mapped). During transpose the
# dual of fan-out is fan-in-sum. We apply it to the unmapped invars.
assert len(in_axes) == len(arg_cts)
def unmap_zero(zero, in_axis):
return (zero if in_axis is None else
Zero(core.unmapped_aval(params['axis_size'], params['axis_name'], in_axis, zero.aval)))
arg_cts = (unmap_zero(arg_ct, in_axis) if type(arg_ct) is Zero else
arg_ct if in_axis is not None else
arg_ct.sum(0)
for arg_ct, in_axis in zip(arg_cts, in_axes))
return tuple(arg_cts)
def jvp_jaxpr(jaxpr, nonzeros, instantiate):
inst = tuple(instantiate) if isinstance(instantiate, list) else instantiate
return _jvp_jaxpr(jaxpr, tuple(nonzeros), inst)
@cache()
def _jvp_jaxpr(jaxpr, nonzeros, instantiate):
assert len(jaxpr.in_avals) == len(nonzeros)
f = lu.wrap_init(core.jaxpr_as_fun(jaxpr))
f_jvp, out_nonzeros = f_jvp_traceable(jvp(f, instantiate=instantiate, transform_stack=False),
nonzeros)
tangent_avals = [aval for aval, nz in zip(jaxpr.in_avals, nonzeros) if nz]
avals_in = list(it.chain(jaxpr.in_avals, tangent_avals))
jaxpr_out, avals_out, literals_out = pe.trace_to_jaxpr_dynamic(f_jvp, avals_in)
return core.ClosedJaxpr(jaxpr_out, literals_out), out_nonzeros()
@lu.transformation_with_aux
def f_jvp_traceable(nonzeros, *primals_and_nztangents):
num_primals = len(nonzeros)
primals = list(primals_and_nztangents[:num_primals])
nonzero_tangents = iter(primals_and_nztangents[num_primals:])
tangents = [next(nonzero_tangents) if nz else Zero.from_value(p)
for p, nz in zip(primals, nonzeros)]
primals_out, tangents_out = yield (primals, tangents), {}
out_nonzeros = [type(t) is not Zero for t in tangents_out]
nonzero_tangents_out = [t for t in tangents_out if type(t) is not Zero]
yield list(primals_out) + nonzero_tangents_out, out_nonzeros
def rearrange_binders(jaxpr: core.ClosedJaxpr, primals_in, tangents_in, primals_out, tangents_out):
new_invars = _perm(primals_in, tangents_in, jaxpr.jaxpr.invars)
new_outvars = _perm(primals_out, tangents_out, jaxpr.jaxpr.outvars)
new_jaxpr = core.Jaxpr(jaxpr.jaxpr.constvars,
new_invars, new_outvars, jaxpr.jaxpr.eqns)
return core.ClosedJaxpr(new_jaxpr, jaxpr.consts)
def _perm(primal_counts, tangent_counts, lst):
n = sum(primal_counts)
primals, tangents = lst[:n], lst[n:]
primal_groups = split_list(primals, primal_counts[:-1])
tangent_groups = split_list(tangents, tangent_counts[:-1])
return _interleave(primal_groups, tangent_groups)
def _interleave(xs, ys):
assert len(xs) == len(ys)
return [e for pair in zip(xs, ys) for l in pair for e in l]
custom_lin_p: core.Primitive = core.Primitive('custom_lin')
custom_lin_p.def_abstract_eval(lambda *_, out_avals, **__: out_avals)
custom_lin_p.multiple_results = True
def _raise_custom_vjp_error_on_jvp(*_, **__):
raise TypeError("can't apply forward-mode autodiff (jvp) to a custom_vjp "
"function.")
custom_lin_p.def_impl(_raise_custom_vjp_error_on_jvp)
def _custom_lin_transpose(cts_out, *invals, num_res, bwd, out_avals):
res, _ = split_list(invals, [num_res])
cts_out = map(instantiate_zeros_aval, out_avals, cts_out)
cts_in = bwd.call_wrapped(*res, *cts_out)
return [None] * num_res + list(cts_in)
primitive_transposes[custom_lin_p] = _custom_lin_transpose
class CustomJVPException(Exception):
def __init__(self):
# TODO(mattjj): track source provenance on AD tracers, improve error
msg = ("Detected differentiation of a custom_jvp function with respect to "
"a closed-over value. That isn't supported because the custom JVP "
"rule only specifies how to differentiate the custom_jvp function "
"with respect to explicit input parameters. Try passing the "
"closed-over value into the custom_jvp function as an argument, and "
"adapting the custom_jvp rule.")
super().__init__(msg)
class CustomVJPException(Exception):
def __init__(self):
# TODO(mattjj): track source provenance on AD tracers, improve error
msg = ("Detected differentiation of a custom_vjp function with respect to "
"a closed-over value. That isn't supported because the custom VJP "
"rule only specifies how to differentiate the custom_vjp function "
"with respect to explicit input parameters. Try passing the "
"closed-over value into the custom_vjp function as an argument, and "
"adapting the custom_vjp fwd and bwd rules.")
super().__init__(msg)
|
google/jax
|
jax/interpreters/ad.py
|
Python
|
apache-2.0
| 32,505 | 0.012121 |
tile = [[0,1,4,5],
[2,3,6,7],
[8,9,12,13],
[10,11,14,15]]
shift = 0
align = 1
value = 0L
holder = []
import sys
basemask = [0x
fd = sys.stdout
indent = " "*9
for c in range(4):
fd.write(indent + "*pdst++ = \n");
for l,line in enumerate(tile):
fd.write(indent + " %s_mm_shuffle_epi8(line%d, (__m128i){"%(l and '+' or ' ',l))
for i,pos in enumerate(line):
mask = 0x00ffffffff & (~(0xffL << shift))
value = mask | ((pos) << shift)
holder.append(value)
if holder and (i + 1) %2 == 0:
fd.write("0x%8.0x"%(holder[0] + (holder[1] << 32)))
holder = []
if (i) %4 == 1:
fd.write( ',')
fd.write("})%s\n"%((l == 3) and ';' or ''))
print
shift += 8
|
zcbenz/cefode-chromium
|
third_party/mesa/MesaLib/src/gallium/drivers/llvmpipe/lp_tile_shuffle_mask.py
|
Python
|
bsd-3-clause
| 716 | 0.111732 |
import asyncio
import colorsys
import enum
import functools
import psmove
import time
import traceback
import random
SETTINGSFILE = 'joustsettings.yaml'
#Human speeds[slow, mid, fast]
#SLOW_WARNING = [0.1, 0.15, 0.28]
#SLOW_MAX = [0.25, 0.8, 1]
#FAST_WARNING = [0.5, 0.6, 0.8]
#FAST_MAX = [1, 1.4, 1.8]
SLOW_WARNING = [1.2, 1.3, 1.6, 2.0, 2.5]
SLOW_MAX = [1.3, 1.5, 1.8, 2.5, 3.2]
FAST_WARNING = [1.4, 1.6, 1.9, 2.7, 2.8]
FAST_MAX = [1.6, 1.8, 2.8, 3.2, 3.5]
#WERE_SLOW_WARNING = [0.2, 0.3, 0.4]
#WERE_SLOW_MAX = [0.7, 0.9, 1.1]
#WERE_FAST_WARNING = [0.6, 0.7, 0.9]
#WERE_FAST_MAX = [1.1, 1.5, 2.0]
WERE_SLOW_WARNING = [1.2, 1.4, 1.7, 2.1, 2.9]
WERE_SLOW_MAX = [1.3, 1.6, 1.9, 2.6, 3.9]
WERE_FAST_WARNING = [1.4, 1.7, 2.0, 2.8, 3.5]
WERE_FAST_MAX = [1.6, 1.9, 2.9, 3.3, 4.9]
#ZOMBIE_WARNING = [0.5, 0.6, 0.8]
#ZOMBIE_MAX = [0.8, 1, 1.4]
ZOMBIE_WARNING = [1.2, 1.5, 1.8, 2.6, 2.7]
ZOMBIE_MAX = [1.4, 1.7, 2.7, 3.1, 3.4]
def get_move(serial, move_num):
time.sleep(0.02)
move = psmove.PSMove(move_num)
time.sleep(0.05)
if move.get_serial() != serial:
for move_num in range(psmove.count_connected()):
move = psmove.PSMove(move_num)
if move.get_serial() == serial:
print("returning " +str(move.get_serial()))
return move
return None
else:
return move
def lerp(a, b, p):
return a*(1 - p) + b*p
class Games(enum.Enum):
JoustFFA = (0, 'Joust Free-for-All', 2)
JoustTeams = (1, 'Joust Teams', 3)
JoustRandomTeams = (2, 'Joust Random Teams', 3)
Traitor = (3, 'Traitors', 6)
WereJoust = (4, 'Werewolves', 3)
Zombies = (5, 'Zombies', 4)
Commander = (6, 'Commander', 4)
Swapper = (7, 'Swapper', 3)
FightClub = (8, 'Fight Club', 2)
Tournament = (9, 'Tournament', 3)
NonStop = (10, 'Non Stop Joust', 2)
Ninja = (11, 'Ninja Bomb', 2)
Random = (12, 'Random', 2)
def __new__(cls, value, pretty_name, min_players):
"""This odd constructor lets us keep Foo.value as an integer, but also
add some extra properties to each option."""
obj = object.__new__(cls)
obj._value_ = value
obj.pretty_name = pretty_name
obj.minimum_players = min_players
return obj
def next(self):
"""Return the next game mode after this one in the list. Wraps around after hitting bottom."""
return Games((self.value + 1) % len(Games))
def previous(self):
"""Return the previous game mode after this one in the list. Wraps around after hitting bottom."""
return Games((self.value - 1) % len(Games))
#These buttons are based off of
#The mapping of PS Move controllers
class Button(enum.Flag):
NONE = 0
TRIANGLE = psmove.Btn_TRIANGLE
CIRCLE = psmove.Btn_CIRCLE
CROSS = psmove.Btn_CROSS
SQUARE = psmove.Btn_SQUARE
SELECT = psmove.Btn_SELECT
START = psmove.Btn_START
SYNC = psmove.Btn_PS
MIDDLE = psmove.Btn_MOVE
TRIGGER = psmove.Btn_T
SHAPES = TRIANGLE | CIRCLE | CROSS | SQUARE
UPDATE = SELECT | START
all_shapes = [Button.TRIANGLE, Button.CIRCLE, Button.CROSS, Button.SQUARE]
battery_levels = {
psmove.Batt_MIN: "Low",
psmove.Batt_20Percent: "20%",
psmove.Batt_40Percent: "40%",
psmove.Batt_60Percent: "60%",
psmove.Batt_80Percent: "80%",
psmove.Batt_MAX: "100%",
psmove.Batt_CHARGING: "Charging",
psmove.Batt_CHARGING_DONE: "Charged",
}
# Common colors lifted from https://xkcd.com/color/rgb/
# TODO: Add more colors -- probably need to have 14 player colors at least.
class Color(enum.Enum):
BLACK = 0x000000
WHITE = 0xffffff
RED = 0xff0000
GREEN = 0x00ff00
BLUE = 0x0000ff
YELLOW = 0xffff14
PURPLE = 0x7e1e9c
ORANGE = 0xf97306
PINK = 0xff81c0
TURQUOISE = 0x06c2ac
BROWN = 0x653700
def rgb_bytes(self):
v = self.value
return v >> 16, (v >> 8) & 0xff, v & 0xff
# Red is reserved for warnings/knockouts.
PLAYER_COLORS = [ c for c in Color if c not in (Color.RED, Color.WHITE, Color.BLACK) ]
def async_print_exceptions(f):
"""Wraps a coroutine to print exceptions (other than cancellations)."""
@functools.wraps(f)
async def wrapper(*args, **kwargs):
try:
await f(*args, **kwargs)
except asyncio.CancelledError:
raise
except:
traceback.print_exc()
raise
return wrapper
# Represents a pace the game is played at, encapsulating the tempo of the music as well
# as controller sensitivity.
class GamePace:
__slots__ = ['tempo', 'warn_threshold', 'death_threshold']
def __init__(self, tempo, warn_threshold, death_threshold):
self.tempo = tempo
self.warn_threshold = warn_threshold
self.death_threshold = death_threshold
def __str__(self):
return '<GamePace tempo=%s, warn=%s, death=%s>' % (self.tempo, self.warn_threshold, self.death_threshold)
# TODO: These are placeholder values.
# We can't take the values from joust.py, since those are compared to the sum of the
# three accelerometer dimensions, whereas we compute the magnitude of the acceleration
# vector.
SLOW_PACE = GamePace(tempo=0.4, warn_threshold=2, death_threshold=4)
MEDIUM_PACE = GamePace(tempo=1.0, warn_threshold=3, death_threshold=5)
FAST_PACE = GamePace(tempo=1.5, warn_threshold=5, death_threshold=9)
FREEZE_PACE = GamePace(tempo=0, warn_threshold=1.1, death_threshold=1.2)
REQUIRED_SETTINGS = [
'play_audio',
'move_can_be_admin',
'current_game',
'enforce_minimum',
'sensitivity',
'play_instructions',
'random_modes',
'color_lock',
'color_lock_choices',
'red_on_kill',
'random_teams',
'menu_voice',
'random_team_size',
'force_all_start',
]
|
aangert/PiParty
|
common.py
|
Python
|
mit
| 5,854 | 0.01247 |
# Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import os
import re
import threading
import datetime
import traceback
import sickbeard
from common import SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, Quality, SEASON_RESULT, MULTI_EP_RESULT
from sickbeard import logger, db, show_name_helpers, exceptions, helpers
from sickbeard import sab
from sickbeard import nzbget
from sickbeard import clients
from sickbeard import history
from sickbeard import notifiers
from sickbeard import nzbSplitter
from sickbeard import ui
from sickbeard import encodingKludge as ek
from sickbeard import failed_history
from sickbeard.exceptions import ex
from sickbeard.providers.generic import GenericProvider
from sickbeard.blackandwhitelist import BlackAndWhiteList
from sickbeard import common
def _downloadResult(result):
"""
Downloads a result to the appropriate black hole folder.
Returns a bool representing success.
result: SearchResult instance to download.
"""
resProvider = result.provider
if resProvider == None:
logger.log(u"Invalid provider name - this is a coding error, report it please", logger.ERROR)
return False
# nzbs with an URL can just be downloaded from the provider
if result.resultType == "nzb":
newResult = resProvider.downloadResult(result)
# if it's an nzb data result
elif result.resultType == "nzbdata":
# get the final file path to the nzb
fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, result.name + ".nzb")
logger.log(u"Saving NZB to " + fileName)
newResult = True
# save the data to disk
try:
with ek.ek(open, fileName, 'w') as fileOut:
fileOut.write(result.extraInfo[0])
helpers.chmodAsParent(fileName)
except EnvironmentError, e:
logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR)
newResult = False
elif resProvider.providerType == "torrent":
newResult = resProvider.downloadResult(result)
else:
logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR)
newResult = False
return newResult
def snatchEpisode(result, endStatus=SNATCHED):
"""
Contains the internal logic necessary to actually "snatch" a result that
has been found.
Returns a bool representing success.
result: SearchResult instance to be snatched.
endStatus: the episode status that should be used for the episode object once it's snatched.
"""
if result is None:
return False
result.priority = 0 # -1 = low, 0 = normal, 1 = high
if sickbeard.ALLOW_HIGH_PRIORITY:
# if it aired recently make it high priority
for curEp in result.episodes:
if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7):
result.priority = 1
if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None:
endStatus = SNATCHED_PROPER
# NZBs can be sent straight to SAB or saved to disk
if result.resultType in ("nzb", "nzbdata"):
if sickbeard.NZB_METHOD == "blackhole":
dlResult = _downloadResult(result)
elif sickbeard.NZB_METHOD == "sabnzbd":
dlResult = sab.sendNZB(result)
elif sickbeard.NZB_METHOD == "nzbget":
is_proper = True if endStatus == SNATCHED_PROPER else False
dlResult = nzbget.sendNZB(result, is_proper)
else:
logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR)
dlResult = False
# TORRENTs can be sent to clients or saved to disk
elif result.resultType == "torrent":
# torrents are saved to disk when blackhole mode
if sickbeard.TORRENT_METHOD == "blackhole":
dlResult = _downloadResult(result)
else:
# make sure we have the torrent file content
if not result.content:
if not result.url.startswith('magnet'):
result.content = result.provider.getURL(result.url)
if not result.content:
logger.log(
u"Torrent content failed to download from " + result.url, logger.ERROR
)
# Snatches torrent with client
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
dlResult = client.sendTORRENT(result)
else:
logger.log(u"Unknown result type, unable to download it", logger.ERROR)
dlResult = False
if not dlResult:
return False
if sickbeard.USE_FAILED_DOWNLOADS:
failed_history.logSnatch(result)
ui.notifications.message('Episode snatched', result.name)
history.logSnatch(result)
# don't notify when we re-download an episode
sql_l = []
for curEpObj in result.episodes:
with curEpObj.lock:
if isFirstBestMatch(result):
curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
else:
curEpObj.status = Quality.compositeStatus(endStatus, result.quality)
sql_l.append(curEpObj.get_sql())
if curEpObj.status not in Quality.DOWNLOADED:
notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
if len(sql_l) > 0:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
return True
def filter_release_name(name, filter_words):
"""
Filters out results based on filter_words
name: name to check
filter_words : Words to filter on, separated by comma
Returns: False if the release name is OK, True if it contains one of the filter_words
"""
if filter_words:
filters = [re.compile('.*%s.*' % filter.strip(), re.I) for filter in filter_words.split(',')]
for regfilter in filters:
if regfilter.search(name):
logger.log(u"" + name + " contains pattern: " + regfilter.pattern, logger.DEBUG)
return True
return False
def pickBestResult(results, show, quality_list=None):
logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG)
# build the black And white list
bwl = None
if show:
if show.is_anime:
bwl = BlackAndWhiteList(show.indexerid)
else:
logger.log("Could not create black and white list no show was given", logger.DEBUG)
# find the best result for the current episode
bestResult = None
for cur_result in results:
logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality])
if bwl:
if not bwl.is_valid(cur_result):
logger.log(cur_result.name+" does not match the blacklist or the whitelist, rejecting it. Result: " + bwl.get_last_result_msg(), logger.MESSAGE)
continue
if quality_list and cur_result.quality not in quality_list:
logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG)
continue
if show.rls_ignore_words and filter_release_name(cur_result.name, show.rls_ignore_words):
logger.log(u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words,
logger.MESSAGE)
continue
if show.rls_require_words and not filter_release_name(cur_result.name, show.rls_require_words):
logger.log(u"Ignoring " + cur_result.name + " based on required words filter: " + show.rls_require_words,
logger.MESSAGE)
continue
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(cur_result.name, cur_result.size,
cur_result.provider.name):
logger.log(cur_result.name + u" has previously failed, rejecting it")
continue
if not bestResult or bestResult.quality < cur_result.quality and cur_result.quality != Quality.UNKNOWN:
bestResult = cur_result
elif bestResult.quality == cur_result.quality:
if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower():
bestResult = cur_result
elif "internal" in bestResult.name.lower() and "internal" not in cur_result.name.lower():
bestResult = cur_result
elif "xvid" in bestResult.name.lower() and "x264" in cur_result.name.lower():
logger.log(u"Preferring " + cur_result.name + " (x264 over xvid)")
bestResult = cur_result
if bestResult:
logger.log(u"Picked " + bestResult.name + " as the best", logger.DEBUG)
else:
logger.log(u"No result picked.", logger.DEBUG)
return bestResult
def isFinalResult(result):
"""
Checks if the given result is good enough quality that we can stop searching for other ones.
If the result is the highest quality in both the any/best quality lists then this function
returns True, if not then it's False
"""
logger.log(u"Checking if we should keep searching after we've found " + result.name, logger.DEBUG)
show_obj = result.episodes[0].show
bwl = None
if show_obj.is_anime:
bwl = BlackAndWhiteList(show_obj.indexerid)
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
# if there is a redownload that's higher than this then we definitely need to keep looking
if best_qualities and result.quality < max(best_qualities):
return False
# if it does not match the shows black and white list its no good
elif bwl and not bwl.is_valid(result):
return False
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
elif any_qualities and result.quality in any_qualities:
return True
elif best_qualities and result.quality == max(best_qualities):
# if this is the best redownload but we have a higher initial download then keep looking
if any_qualities and result.quality < max(any_qualities):
return False
# if this is the best redownload and we don't have a higher initial download then we're done
else:
return True
# if we got here than it's either not on the lists, they're empty, or it's lower than the highest required
else:
return False
def isFirstBestMatch(result):
"""
Checks if the given result is a best quality match and if we want to archive the episode on first match.
"""
logger.log(u"Checking if we should archive our first best quality match for for episode " + result.name,
logger.DEBUG)
show_obj = result.episodes[0].show
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
# if there is a redownload that's a match to one of our best qualities and we want to archive the episode then we are done
if best_qualities and show_obj.archive_firstmatch and result.quality in best_qualities:
return True
return False
def wantedEpisodes(show, fromDate):
anyQualities, bestQualities = common.Quality.splitQuality(show.quality) # @UnusedVariable
allQualities = list(set(anyQualities + bestQualities))
logger.log(u"Seeing if we need anything from " + show.name)
myDB = db.DBConnection()
if show.air_by_date:
sqlResults = myDB.select(
"SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 AND ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1",
[fromDate.toordinal(), show.indexerid])
else:
sqlResults = myDB.select(
"SELECT status, season, episode FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?",
[show.indexerid, fromDate.toordinal()])
# check through the list of statuses to see if we want any
wanted = []
for result in sqlResults:
curCompositeStatus = int(result["status"])
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
if bestQualities:
highestBestQuality = max(allQualities)
else:
highestBestQuality = 0
# if we need a better one then say yes
if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER,
common.SNATCHED_BEST) and curQuality < highestBestQuality) or curStatus == common.WANTED:
epObj = show.getEpisode(int(result["season"]), int(result["episode"]))
epObj.wantedQuality = [i for i in allQualities if (i > curQuality and i != common.Quality.UNKNOWN)]
wanted.append(epObj)
return wanted
def searchForNeededEpisodes():
foundResults = {}
didSearch = False
origThreadName = threading.currentThread().name
threads = []
show_list = sickbeard.showList
fromDate = datetime.date.fromordinal(1)
episodes = []
for curShow in show_list:
if curShow.paused:
continue
episodes.extend(wantedEpisodes(curShow, fromDate))
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily]
for curProvider in providers:
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation
threads.append(threading.Thread(target=curProvider.cache.updateCache, name=origThreadName +
" :: [" + curProvider.name + "]"))
# start the thread we just created
threads[-1].start()
# wait for all threads to finish
for t in threads:
t.join()
for curProvider in providers:
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
curFoundResults = curProvider.searchRSS(episodes)
didSearch = True
# pick a single result for each episode, respecting existing results
for curEp in curFoundResults:
if curEp.show.paused:
logger.log(
u"Show " + curEp.show.name + " is paused, ignoring all RSS items for " + curEp.prettyName(),
logger.DEBUG)
continue
# find the best result for the current episode
bestResult = None
for curResult in curFoundResults[curEp]:
if not bestResult or bestResult.quality < curResult.quality:
bestResult = curResult
bestResult = pickBestResult(curFoundResults[curEp], curEp.show)
# if all results were rejected move on to the next episode
if not bestResult:
logger.log(u"All found results for " + curEp.prettyName() + " were rejected.", logger.DEBUG)
continue
# if it's already in the list (from another provider) and the newly found quality is no better then skip it
if curEp in foundResults and bestResult.quality <= foundResults[curEp].quality:
continue
# filter out possible bad torrents from providers such as ezrss
if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole":
bestResult.content = None
if not bestResult.url.startswith('magnet'):
bestResult.content = bestResult.provider.getURL(bestResult.url)
if not bestResult.content:
continue
foundResults[curEp] = bestResult
threading.currentThread().name = origThreadName
if not didSearch:
logger.log(
u"No NZB/Torrent providers found or enabled in the SickGear config for daily searches. Please check your settings.",
logger.ERROR)
return foundResults.values()
def searchProviders(show, episodes, manualSearch=False):
foundResults = {}
finalResults = []
didSearch = False
# build name cache for show
sickbeard.name_cache.buildNameCache(show)
origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_backlog]
for providerNum, curProvider in enumerate(providers):
if curProvider.anime_only and not show.is_anime:
logger.log(u"" + str(show.name) + " is not an anime, skiping", logger.DEBUG)
continue
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
foundResults[curProvider.name] = {}
searchCount = 0
search_mode = curProvider.search_mode
while(True):
searchCount += 1
if search_mode == 'eponly':
logger.log(u"Performing episode search for " + show.name)
else:
logger.log(u"Performing season pack search for " + show.name)
try:
curProvider.cache.updateCache()
searchResults = curProvider.findSearchResults(show, episodes, search_mode, manualSearch)
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
break
except Exception, e:
logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
break
finally:
threading.currentThread().name = origThreadName
didSearch = True
if len(searchResults):
# make a list of all the results for this provider
for curEp in searchResults:
# skip non-tv crap
searchResults[curEp] = filter(
lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, searchResults[curEp])
if curEp in foundResults:
foundResults[curProvider.name][curEp] += searchResults[curEp]
else:
foundResults[curProvider.name][curEp] = searchResults[curEp]
break
elif not curProvider.search_fallback or searchCount == 2:
break
if search_mode == 'sponly':
logger.log(u"FALLBACK EPISODE SEARCH INITIATED ...")
search_mode = 'eponly'
else:
logger.log(u"FALLBACK SEASON PACK SEARCH INITIATED ...")
search_mode = 'sponly'
# skip to next provider if we have no results to process
if not len(foundResults[curProvider.name]):
continue
anyQualities, bestQualities = Quality.splitQuality(show.quality)
# pick the best season NZB
bestSeasonResult = None
if SEASON_RESULT in foundResults[curProvider.name]:
bestSeasonResult = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show,
anyQualities + bestQualities)
highest_quality_overall = 0
for cur_episode in foundResults[curProvider.name]:
for cur_result in foundResults[curProvider.name][cur_episode]:
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
highest_quality_overall = cur_result.quality
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall],
logger.DEBUG)
# see if every episode is wanted
if bestSeasonResult:
searchedSeasons = []
searchedSeasons = [str(x.season) for x in episodes]
# get the quality of the season nzb
seasonQual = bestSeasonResult.quality
logger.log(
u"The quality of the season " + bestSeasonResult.provider.providerType + " is " + Quality.qualityStrings[
seasonQual], logger.DEBUG)
myDB = db.DBConnection()
allEps = [int(x["episode"])
for x in myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND ( season IN ( " + ','.join(searchedSeasons) + " ) )",
[show.indexerid])]
logger.log(u"Executed query: [SELECT episode FROM tv_episodes WHERE showid = %s AND season in %s]" % (show.indexerid, ','.join(searchedSeasons)))
logger.log(u"Episode list: " + str(allEps), logger.DEBUG)
allWanted = True
anyWanted = False
for curEpNum in allEps:
for season in set([x.season for x in episodes]):
if not show.wantEpisode(season, curEpNum, seasonQual):
allWanted = False
else:
anyWanted = True
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
if allWanted and bestSeasonResult.quality == highest_quality_overall:
logger.log(
u"Every ep in this season is needed, downloading the whole " + bestSeasonResult.provider.providerType + " " + bestSeasonResult.name)
epObjs = []
for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonResult.episodes = epObjs
return [bestSeasonResult]
elif not anyWanted:
logger.log(
u"No eps from this season are wanted at this quality, ignoring the result of " + bestSeasonResult.name,
logger.DEBUG)
else:
if bestSeasonResult.provider.providerType == GenericProvider.NZB:
logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG)
# if not, break it apart and add them as the lowest priority results
individualResults = nzbSplitter.splitResult(bestSeasonResult)
individualResults = filter(
lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, individualResults)
for curResult in individualResults:
if len(curResult.episodes) == 1:
epNum = curResult.episodes[0].episode
elif len(curResult.episodes) > 1:
epNum = MULTI_EP_RESULT
if epNum in foundResults[curProvider.name]:
foundResults[curProvider.name][epNum].append(curResult)
else:
foundResults[curProvider.name][epNum] = [curResult]
# If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
else:
# Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it.
logger.log(
u"Adding multi-ep result for full-season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!")
epObjs = []
for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonResult.episodes = epObjs
epNum = MULTI_EP_RESULT
if epNum in foundResults[curProvider.name]:
foundResults[curProvider.name][epNum].append(bestSeasonResult)
else:
foundResults[curProvider.name][epNum] = [bestSeasonResult]
# go through multi-ep results and see if we really want them or not, get rid of the rest
multiResults = {}
if MULTI_EP_RESULT in foundResults[curProvider.name]:
for multiResult in foundResults[curProvider.name][MULTI_EP_RESULT]:
logger.log(u"Seeing if we want to bother with multi-episode result " + multiResult.name, logger.DEBUG)
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multiResult.name, multiResult.size,
multiResult.provider.name):
logger.log(multiResult.name + u" has previously failed, rejecting this multi-ep result")
continue
# see how many of the eps that this result covers aren't covered by single results
neededEps = []
notNeededEps = []
for epObj in multiResult.episodes:
epNum = epObj.episode
# if we have results for the episode
if epNum in foundResults[curProvider.name] and len(foundResults[curProvider.name][epNum]) > 0:
neededEps.append(epNum)
else:
notNeededEps.append(epNum)
logger.log(
u"Single-ep check result is neededEps: " + str(neededEps) + ", notNeededEps: " + str(notNeededEps),
logger.DEBUG)
if not notNeededEps:
logger.log(u"All of these episodes were covered by single episode results, ignoring this multi-episode result", logger.DEBUG)
continue
# check if these eps are already covered by another multi-result
multiNeededEps = []
multiNotNeededEps = []
for epObj in multiResult.episodes:
epNum = epObj.episode
if epNum in multiResults:
multiNotNeededEps.append(epNum)
else:
multiNeededEps.append(epNum)
logger.log(
u"Multi-ep check result is multiNeededEps: " + str(multiNeededEps) + ", multiNotNeededEps: " + str(
multiNotNeededEps), logger.DEBUG)
if not multiNeededEps:
logger.log(
u"All of these episodes were covered by another multi-episode nzbs, ignoring this multi-ep result",
logger.DEBUG)
continue
# if we're keeping this multi-result then remember it
for epObj in multiResult.episodes:
multiResults[epObj.episode] = multiResult
# don't bother with the single result if we're going to get it with a multi result
for epObj in multiResult.episodes:
epNum = epObj.episode
if epNum in foundResults[curProvider.name]:
logger.log(
u"A needed multi-episode result overlaps with a single-episode result for ep #" + str(
epNum) + ", removing the single-episode results from the list", logger.DEBUG)
del foundResults[curProvider.name][epNum]
# of all the single ep results narrow it down to the best one for each episode
finalResults += set(multiResults.values())
for curEp in foundResults[curProvider.name]:
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
continue
if len(foundResults[curProvider.name][curEp]) == 0:
continue
bestResult = pickBestResult(foundResults[curProvider.name][curEp], show)
# if all results were rejected move on to the next episode
if not bestResult:
continue
# filter out possible bad torrents from providers such as ezrss
if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole":
bestResult.content = None
if not bestResult.url.startswith('magnet'):
bestResult.content = bestResult.provider.getURL(bestResult.url)
if not bestResult.content:
continue
# add result if its not a duplicate and
found = False
for i, result in enumerate(finalResults):
for bestResultEp in bestResult.episodes:
if bestResultEp in result.episodes:
if result.quality < bestResult.quality:
finalResults.pop(i)
else:
found = True
if not found:
finalResults += [bestResult]
# check that we got all the episodes we wanted first before doing a match and snatch
wantedEpCount = 0
for wantedEp in episodes:
for result in finalResults:
if wantedEp in result.episodes and isFinalResult(result):
wantedEpCount += 1
# make sure we search every provider for results unless we found everything we wanted
if wantedEpCount == len(episodes):
break
if not didSearch:
logger.log(u"No NZB/Torrent providers found or enabled in the SickGear config for backlog searches. Please check your settings.",
logger.ERROR)
return finalResults
|
ressu/SickGear
|
sickbeard/search.py
|
Python
|
gpl-3.0
| 30,487 | 0.004395 |
#!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2017
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains the TypeHandler class."""
from .handler import Handler
class TypeHandler(Handler):
"""Handler class to handle updates of custom types.
Attributes:
type (:obj:`type`): The ``type`` of updates this handler should process.
callback (:obj:`callable`): The callback function for this handler.
strict (:obj:`bool`): Optional. Use ``type`` instead of ``isinstance``.
Default is ``False``
pass_update_queue (:obj:`bool`): Optional. Determines whether ``update_queue`` will be
passed to the callback function.
pass_job_queue (:obj:`bool`): Optional. Determines whether ``job_queue`` will be passed to
the callback function.
Args:
type (:obj:`type`): The ``type`` of updates this handler should process, as
determined by ``isinstance``
callback (:obj:`callable`): A function that takes ``bot, update`` as positional arguments.
It will be called when the :attr:`check_update` has determined that an update should be
processed by this handler.
strict (:obj:`bool`, optional): Use ``type`` instead of ``isinstance``.
Default is ``False``
pass_update_queue (:obj:`bool`, optional): If set to ``True``, a keyword argument called
``update_queue`` will be passed to the callback function. It will be the ``Queue``
instance used by the :class:`telegram.ext.Updater` and :class:`telegram.ext.Dispatcher`
that contains new updates which can be used to insert updates. Default is ``False``.
pass_job_queue (:obj:`bool`, optional): If set to ``True``, a keyword argument called
``job_queue`` will be passed to the callback function. It will be a
:class:`telegram.ext.JobQueue` instance created by the :class:`telegram.ext.Updater`
which can be used to schedule new jobs. Default is ``False``.
"""
def __init__(self, type, callback, strict=False, pass_update_queue=False,
pass_job_queue=False):
super(TypeHandler, self).__init__(
callback, pass_update_queue=pass_update_queue, pass_job_queue=pass_job_queue)
self.type = type
self.strict = strict
def check_update(self, update):
"""Determines whether an update should be passed to this handlers :attr:`callback`.
Args:
update (:class:`telegram.Update`): Incoming telegram update.
Returns:
:obj:`bool`
"""
if not self.strict:
return isinstance(update, self.type)
else:
return type(update) is self.type
def handle_update(self, update, dispatcher):
"""Send the update to the :attr:`callback`.
Args:
update (:class:`telegram.Update`): Incoming telegram update.
dispatcher (:class:`telegram.ext.Dispatcher`): Dispatcher that originated the Update.
"""
optional_args = self.collect_optional_args(dispatcher)
return self.callback(dispatcher.bot, update, **optional_args)
|
rogerscristo/BotFWD
|
env/lib/python3.6/site-packages/telegram/ext/typehandler.py
|
Python
|
mit
| 4,005 | 0.003745 |
# -*- coding: utf-8 -*-
import sys
import time
from config import Config
from multiprocessing import managers, connection
def _new_init_timeout():
return time.time() + 0.2
sys.modules['multiprocessing'].__dict__['managers'].__dict__['connection']._init_timeout = _new_init_timeout
from multiprocessing.managers import BaseManager
class DataManager(BaseManager): pass
def set_data(port, k, v):
# create a shared Data object
DataManager.register('get_data')
manager = DataManager(address=(Config.hostname, port + 1),
authkey=Config.authkey)
manager.connect()
data = manager.get_data()
data[k] = v
|
noah/riddim
|
lib/data.py
|
Python
|
mit
| 674 | 0.014837 |
class Floor_Object(object):
"""docstring for Floor_Object"""
def __init__(self, coordinates, icon, name, interactions, description):
super(Floor_Object, self).__init__()
self.coordinates = coordinates
self.icon = icon
self.name = name
self.interactions = interactions
self.description = description
class Chest(Floor_Object):
"""A container holding items"""
def __init__(self, coordinates, icon, name, interactions, description, item_list, is_locked, key_name):
super(Chest, self).__init__(coordinates, icon, name, interactions, description)
self.item_list = item_list
self.is_locked = is_locked
self.key_name = key_name
class Item_Pile(Floor_Object):
"""A list of items present on a tile"""
def __init__(self, coordinates, icon, name, interactions, description, item_list):
super(Item_Pile, self).__init__(coordinates, icon, name, interactions, description)
self.item_list = item_list
|
Otterpaw/Python-Roguelike
|
floor_object.py
|
Python
|
mit
| 920 | 0.029348 |
import json
import urllib2
import time
import math
from pymongo import MongoClient
from pymongo import ASCENDING, DESCENDING
def debug(info):
print info
def log(info):
print info
def parseJson(url):
try:
data = json.load(urllib2.urlopen(url))
return data
except ValueError as e:
log(e)
exit()
except:
log("Url Error: " + url)
exit()
def openDBCollection(database, collectionName):
client = MongoClient()
db = client[database]
collection = db[collectionName]
# In case we need to make results unique
# collection.ensure_index([("name", ASCENDING), ("start", ASCENDING)], unique=True, dropDups=True)
return collection
def validateData(raw):
data = [];
for key in raw:
value = raw[key]
if isinstance(value, basestring) and value.lower() == "error":
log("Failed retrieve latency for " + key)
else:
value["name"] = key
data.append(value)
return data
def write(collection, posts):
for post in posts:
try:
post_id = collection.insert(post)
debug(post_id)
except Exception:
log("Insertion failed for" + post["name"])
return True
def main(url):
# url = "http://stackoverflow.com/questions/1479776/too-many-values-to-unpack-exception"
data = parseJson(url)
posts = validateData(data)
collection = openDBCollection('latency', 'dmos')
write(collection, posts)
url = "http://api.openweathermap.org/data/2.5/weather?q=London,uk"
main(url)
|
hethune/tutorials
|
pymongo/openweathermap.py
|
Python
|
mit
| 1,579 | 0.008233 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import frappe, unittest
from frappe.defaults import *
class TestDefaults(unittest.TestCase):
def test_global(self):
clear_user_default("key1")
set_global_default("key1", "value1")
self.assertEqual(get_global_default("key1"), "value1")
set_global_default("key1", "value2")
self.assertEqual(get_global_default("key1"), "value2")
add_global_default("key1", "value3")
self.assertEqual(get_global_default("key1"), "value2")
self.assertEqual(get_defaults()["key1"], ["value2", "value3"])
self.assertEqual(get_user_default_as_list("key1"), ["value2", "value3"])
def test_user(self):
set_user_default("key1", "2value1")
self.assertEqual(get_user_default_as_list("key1"), ["2value1"])
set_user_default("key1", "2value2")
self.assertEqual(get_user_default("key1"), "2value2")
add_user_default("key1", "3value3")
self.assertEqual(get_user_default("key1"), "2value2")
self.assertEqual(get_user_default_as_list("key1"), ["2value2", "3value3"])
def test_global_if_not_user(self):
set_global_default("key4", "value4")
self.assertEqual(get_user_default("key4"), "value4")
def test_clear(self):
set_user_default("key5", "value5")
self.assertEqual(get_user_default("key5"), "value5")
clear_user_default("key5")
self.assertEqual(get_user_default("key5"), None)
def test_clear_global(self):
set_global_default("key6", "value6")
self.assertEqual(get_user_default("key6"), "value6")
clear_default("key6", value="value6")
self.assertEqual(get_user_default("key6"), None)
def test_user_permission_on_defaults(self):
self.assertEqual(get_global_default("language"), "en")
self.assertEqual(get_user_default("language"), "en")
self.assertEqual(get_user_default_as_list("language"), ["en"])
old_user = frappe.session.user
user = '[email protected]'
frappe.set_user(user)
perm_doc = frappe.get_doc(dict(
doctype='User Permission',
user=frappe.session.user,
allow="Language",
for_value="en-GB",
)).insert(ignore_permissions = True)
self.assertEqual(get_global_default("language"), None)
self.assertEqual(get_user_default("language"), None)
self.assertEqual(get_user_default_as_list("language"), [])
frappe.delete_doc('User Permission', perm_doc.name)
frappe.set_user(old_user)
|
frappe/frappe
|
frappe/tests/test_defaults.py
|
Python
|
mit
| 2,355 | 0.022505 |
from nose.tools import eq_
from django.test.client import RequestFactory
from airmozilla.base.tests.testbase import DjangoTestCase
from airmozilla.base.helpers import abs_static, show_duration
class TestAbsStaticHelpers(DjangoTestCase):
def tearDown(self):
super(TestAbsStaticHelpers, self).tearDown()
# This is necessary because funfactory (where we use the static()
# helper function) uses staticfiles_storage which gets lazy loaded
# and remembered once in memory.
# By overriding it like this it means we can change settings
# and have it reflected immediately
from funfactory import helpers
from django.contrib.staticfiles.storage import ConfiguredStorage
helpers.staticfiles_storage = ConfiguredStorage()
# cache.clear()
def test_abs_static(self):
context = {}
context['request'] = RequestFactory().get('/')
result = abs_static(context, 'foo.png')
eq_(result, 'http://testserver/static/foo.png')
def test_abs_static_already(self):
context = {}
context['request'] = RequestFactory().get('/')
result = abs_static(context, '/media/foo.png')
eq_(result, 'http://testserver/media/foo.png')
result = abs_static(context, '//my.cdn.com/media/foo.png')
eq_(result, 'http://my.cdn.com/media/foo.png')
def test_abs_static_with_STATIC_URL(self):
context = {}
context['request'] = RequestFactory().get('/')
with self.settings(STATIC_URL='//my.cdn.com/static/'):
result = abs_static(context, 'foo.png')
eq_(result, 'http://my.cdn.com/static/foo.png')
def test_abs_static_with_already_STATIC_URL(self):
context = {}
context['request'] = RequestFactory().get('/')
with self.settings(STATIC_URL='//my.cdn.com/static/'):
result = abs_static(context, '//my.cdn.com/static/foo.png')
eq_(result, 'http://my.cdn.com/static/foo.png')
def test_abs_static_with_HTTPS_STATIC_URL(self):
context = {}
context['request'] = RequestFactory().get('/')
with self.settings(STATIC_URL='https://my.cdn.com/static/'):
result = abs_static(context, 'foo.png')
eq_(result, 'https://my.cdn.com/static/foo.png')
def test_abs_static_with_STATIC_URL_with_https(self):
context = {}
context['request'] = RequestFactory().get('/')
context['request']._is_secure = lambda: True
assert context['request'].is_secure()
with self.settings(STATIC_URL='//my.cdn.com/static/'):
result = abs_static(context, 'foo.png')
eq_(result, 'https://my.cdn.com/static/foo.png')
class TestDuration(DjangoTestCase):
def test_show_duration_long_format(self):
result = show_duration(60 * 60)
eq_(result, "1 hour")
result = show_duration(60)
eq_(result, "1 minute")
result = show_duration(2 * 60 * 60 + 10 * 60)
eq_(result, "2 hours 10 minutes")
result = show_duration(1 * 60 * 60 + 1 * 60)
eq_(result, "1 hour 1 minute")
result = show_duration(1 * 60 * 60 + 1 * 60 + 1)
eq_(result, "1 hour 1 minute")
result = show_duration(2 * 60 * 60 + 2 * 60)
eq_(result, "2 hours 2 minutes")
result = show_duration(1 * 60 * 60 + 1 * 60 + 1, include_seconds=True)
eq_(result, "1 hour 1 minute 1 second")
result = show_duration(1 * 60 * 60 + 1 * 60 + 2, include_seconds=True)
eq_(result, "1 hour 1 minute 2 seconds")
result = show_duration(49)
eq_(result, "49 seconds")
|
tannishk/airmozilla
|
airmozilla/base/tests/test_helpers.py
|
Python
|
bsd-3-clause
| 3,664 | 0 |
# Copyright 2017 The Cobalt Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper class for getting names and paths related to interfaces."""
import os
from cobalt.build.path_conversion import ConvertPath
def _NormalizeSlashes(path):
if os.path.sep == '\\':
return path.replace('\\', '/')
else:
return path
class PathBuilder(object):
"""Provides helper functions for getting paths related to an interface."""
def __init__(self, engine_prefix, info_provider, interfaces_root,
generated_root_directory):
self.interfaces_root = _NormalizeSlashes(interfaces_root)
self.generated_root = _NormalizeSlashes(generated_root_directory)
self.engine_prefix = engine_prefix
self.info_provider = info_provider
self.interfaces_info = info_provider.interfaces_info
@property
def generated_conversion_header_path(self):
return os.path.join(self.generated_root,
'%s_gen_type_conversion.h' % self.engine_prefix)
@property
def generated_conversion_include_path(self):
return os.path.relpath(self.generated_conversion_header_path,
self.generated_root)
def NamespaceComponents(self, interface_name):
"""Get the interface's namespace as a list of namespace components."""
# Get the IDL filename relative to the cobalt directory, and split the
# directory to get the list of namespace components.
if interface_name in self.interfaces_info:
interface_info = self.interfaces_info[interface_name]
idl_path = interface_info['full_path']
elif interface_name in self.info_provider.enumerations:
enum_info = self.info_provider.enumerations[interface_name]
idl_path = enum_info['full_path']
else:
raise KeyError('Unknown interface name %s', interface_name)
rel_idl_path = os.path.relpath(idl_path, self.interfaces_root)
components = os.path.dirname(rel_idl_path).split(os.sep)
# Check if this IDL's path lies in our interfaces root. If it does not,
# we treat it as an extension IDL.
real_interfaces_root = os.path.realpath(self.interfaces_root)
real_idl_path = os.path.realpath(os.path.dirname(idl_path))
interfaces_root_is_in_components_path = (os.path.commonprefix(
[real_interfaces_root, real_idl_path]) == real_interfaces_root)
if interfaces_root_is_in_components_path:
return [os.path.basename(self.interfaces_root)] + components
else:
# If our IDL path lies outside of the cobalt/ directory, assume it is
# an externally defined web extension and assign it the 'webapi_extension'
# namespace.
return [os.path.basename(self.interfaces_root), 'webapi_extension']
def Namespace(self, interface_name):
"""Get the interface's namespace."""
return '::'.join(self.NamespaceComponents(interface_name))
def BindingsClass(self, interface_name):
"""Get the name of the generated bindings class."""
return self.engine_prefix.capitalize() + interface_name
def FullBindingsClassName(self, interface_name):
"""Get the fully qualified name of the generated bindings class."""
return '%s::%s' % (self.Namespace(interface_name),
self.BindingsClass(interface_name))
def FullClassName(self, interface_name):
"""Get the fully qualified name of the implementation class."""
components = self.NamespaceComponents(interface_name)
return '::'.join(components + [interface_name])
def ImplementationHeaderPath(self, interface_name):
"""Get an #include path to the interface's implementation .h file."""
interface_info = self.interfaces_info[interface_name]
path = ConvertPath(
interface_info['full_path'], forward_slashes=True, output_extension='h')
return os.path.relpath(path, os.path.dirname(self.interfaces_root))
def BindingsHeaderIncludePath(self, interface_name):
"""Get an #include path to the interface's generated .h file."""
path = self.BindingsHeaderFullPath(interface_name)
return os.path.relpath(path, self.generated_root)
def BindingsHeaderFullPath(self, interface_name):
"""Get the full path to the interface's implementation .h file."""
interface_info = self.interfaces_info[interface_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_prefix='%s_' % self.engine_prefix,
output_extension='h',
base_directory=os.path.dirname(self.interfaces_root))
def BindingsImplementationPath(self, interface_name):
"""Get the full path to the interface's implementation .cc file."""
interface_info = self.interfaces_info[interface_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_prefix='%s_' % self.engine_prefix,
output_extension='cc',
base_directory=os.path.dirname(self.interfaces_root))
def DictionaryHeaderIncludePath(self, dictionary_name):
"""Get the #include path to the dictionary's header."""
path = self.DictionaryHeaderFullPath(dictionary_name)
return os.path.relpath(path, self.generated_root)
def DictionaryHeaderFullPath(self, dictionary_name):
"""Get the full path to the dictionary's generated implementation header."""
interface_info = self.interfaces_info[dictionary_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_extension='h',
base_directory=os.path.dirname(self.interfaces_root))
def DictionaryConversionImplementationPath(self, dictionary_name):
"""Get the full path to the dictionary's conversion header."""
interface_info = self.interfaces_info[dictionary_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_prefix='%s_' % self.engine_prefix,
output_extension='cc',
base_directory=os.path.dirname(self.interfaces_root))
def EnumHeaderIncludePath(self, enum_name):
"""Get the #include path to the dictionary's header."""
path = self.EnumHeaderFullPath(enum_name)
return os.path.relpath(path, self.generated_root)
def EnumHeaderFullPath(self, enum_name):
"""Get the full path to the dictionary's generated implementation header."""
interface_info = self.info_provider.enumerations[enum_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_extension='h',
base_directory=os.path.dirname(self.interfaces_root))
def EnumConversionImplementationFullPath(self, enum_name):
"""Get the full path to the dictionary's conversion header."""
interface_info = self.info_provider.enumerations[enum_name]
return ConvertPath(
interface_info['full_path'],
forward_slashes=True,
output_directory=self.generated_root,
output_prefix='%s_' % self.engine_prefix,
output_extension='cc',
base_directory=os.path.dirname(self.interfaces_root))
|
youtube/cobalt
|
cobalt/bindings/path_generator.py
|
Python
|
bsd-3-clause
| 7,751 | 0.004774 |
from Tkinter import *
root = Tk()
root.title('first test window')
#root.geometry('300x200')
frm = Frame(root)
frm_l = Frame(frm)
Label(frm_l, text='left_top').pack(side=TOP)
Label(frm_l, text='left_bottom').pack(side=BOTTOM)
frm_l.pack(side=LEFT)
frm_r = Frame(frm)
Label(frm_r, text='right_top').pack(side=TOP)
Label(frm_r, text='right_bottom').pack(side=BOTTOM)
frm_r.pack(side=RIGHT)
frm.pack(side=TOP)
##########################################################
frm1 = Frame(root)
var = StringVar()
Entry(frm1, textvariable=var).pack(side=TOP)
var.set('entry text')
t = Text(frm1)
t.pack(side=TOP)
def print_entry():
t.insert(END, var.get())
Button(frm1, text='copy', command=print_entry).pack(side=TOP)
frm1.pack(side=TOP)
##########################################################
frm2 = Frame(root)
redbutton = Button(frm2, text="Red", fg="red")
redbutton.pack( side = LEFT)
greenbutton = Button(frm2, text="Brown", fg="brown")
greenbutton.pack( side = LEFT )
bluebutton = Button(frm2, text="Blue", fg="blue")
bluebutton.pack( side = LEFT )
blackbutton = Button(frm2, text="Black", fg="black")
blackbutton.pack( side = BOTTOM)
frm2.pack(side=TOP)
######################################################
frm3 = Frame(root)
b = Button(frm3, text='move')
b.place(bordermode=OUTSIDE, height=100, width=100, x=50, y=50)
b.pack()
frm3.pack(side=TOP)
root.mainloop()
|
NUPT-Pig/python_test
|
tkinter_gui.py
|
Python
|
gpl-2.0
| 1,385 | 0.012274 |
import json
import threading
import time
import traceback
import urllib
from vtdb import cursor
from vtdb import dbexceptions
import environment
import framework
class TestStream(framework.TestCase):
def tearDown(self):
self.env.conn.begin()
self.env.execute("delete from vtocc_big")
self.env.conn.commit()
# UNION queries like this used to crash vtocc, only straight SELECT
# would go through. This is a unit test to show it is fixed.
def test_union(self):
cu = self.env.execute("select 1 from dual union select 1 from dual",
cursorclass=cursor.StreamCursor)
count = 0
while True:
row = cu.fetchone()
if row is None:
break
count += 1
self.assertEqual(count, 1)
def test_customrules(self):
bv = {'asdfg': 1}
try:
self.env.execute("select * from vtocc_test where intval=:asdfg", bv,
cursorclass=cursor.StreamCursor)
self.fail("Bindvar asdfg should not be allowed by custom rule")
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "error: Query disallowed")
# Test dynamic custom rule for vttablet
if self.env.env == "vttablet":
if environment.topo_server().flavor() == 'zookeeper':
# Make a change to the rule
self.env.change_customrules()
time.sleep(3)
try:
self.env.execute("select * from vtocc_test where intval=:asdfg", bv,
cursorclass=cursor.StreamCursor)
except dbexceptions.DatabaseError as e:
self.fail("Bindvar asdfg should be allowed after a change of custom rule, Err=" + str(e))
self.env.restore_customrules()
time.sleep(3)
try:
self.env.execute("select * from vtocc_test where intval=:asdfg", bv,
cursorclass=cursor.StreamCursor)
self.fail("Bindvar asdfg should not be allowed by custom rule")
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "error: Query disallowed")
def test_basic_stream(self):
self._populate_vtocc_big_table(100)
loop_count = 1
# select lots of data using a non-streaming query
if True:
for i in xrange(loop_count):
cu = self.env.execute("select * from vtocc_big b1, vtocc_big b2")
rows = cu.fetchall()
self.assertEqual(len(rows), 10000)
self.check_row_10(rows[10])
# select lots of data using a streaming query
if True:
for i in xrange(loop_count):
cu = cursor.StreamCursor(self.env.conn)
cu.execute("select * from vtocc_big b1, vtocc_big b2", {})
count = 0
while True:
row = cu.fetchone()
if row is None:
break
if count == 10:
self.check_row_10(row)
count += 1
self.assertEqual(count, 10000)
def test_streaming_error(self):
with self.assertRaises(dbexceptions.DatabaseError):
cu = self.env.execute("select count(abcd) from vtocc_big b1",
cursorclass=cursor.StreamCursor)
def check_row_10(self, row):
# null the dates so they match
row = list(row)
row[6] = None
row[11] = None
row[20] = None
row[25] = None
self.assertEqual(row, [10L, 'AAAAAAAAAAAAAAAAAA 10', 'BBBBBBBBBBBBBBBBBB 10', 'C', 'DDDDDDDDDDDDDDDDDD 10', 'EEEEEEEEEEEEEEEEEE 10', None, 'FF 10', 'GGGGGGGGGGGGGGGGGG 10', 10L, 10L, None, 10L, 10, 0L, 'AAAAAAAAAAAAAAAAAA 0', 'BBBBBBBBBBBBBBBBBB 0', 'C', 'DDDDDDDDDDDDDDDDDD 0', 'EEEEEEEEEEEEEEEEEE 0', None, 'FF 0', 'GGGGGGGGGGGGGGGGGG 0', 0L, 0L, None, 0L, 0])
def test_streaming_terminate(self):
try:
self._populate_vtocc_big_table(100)
query = 'select * from vtocc_big b1, vtocc_big b2, vtocc_big b3'
cu = cursor.StreamCursor(self.env.conn)
thd = threading.Thread(target=self._stream_exec, args=(cu,query))
thd.start()
tablet_addr = "http://" + self.env.conn.addr
connId = self._get_conn_id(tablet_addr)
self._terminate_query(tablet_addr, connId)
thd.join()
with self.assertRaises(dbexceptions.DatabaseError) as cm:
cu.fetchall()
errMsg1 = "error: the query was killed either because it timed out or was canceled: Lost connectioy to MySQL server during query (errno 2013)"
errMsg2 = "error: Query execution was interrupted (errno 1317)"
self.assertTrue(cm.exception not in (errMsg1, errMsg2), "did not raise interruption error: %s" % str(cm.exception))
cu.close()
except Exception, e:
self.fail("Failed with error %s %s" % (str(e), traceback.print_exc()))
def _populate_vtocc_big_table(self, num_rows):
self.env.conn.begin()
for i in xrange(num_rows):
self.env.execute("insert into vtocc_big values " +
"(" + str(i) + ", " +
"'AAAAAAAAAAAAAAAAAA " + str(i) + "', " +
"'BBBBBBBBBBBBBBBBBB " + str(i) + "', " +
"'C', " +
"'DDDDDDDDDDDDDDDDDD " + str(i) + "', " +
"'EEEEEEEEEEEEEEEEEE " + str(i) + "', " +
"now()," +
"'FF " + str(i) + "', " +
"'GGGGGGGGGGGGGGGGGG " + str(i) + "', " +
str(i) + ", " +
str(i) + ", " +
"now()," +
str(i) + ", " +
str(i%100) + ")")
self.env.conn.commit()
# Initiate a slow stream query
def _stream_exec(self, cu, query):
cu.execute(query, {})
# Get the connection id from status page
def _get_conn_id(self, tablet_addr):
streamqueryz_url = tablet_addr + "/streamqueryz?format=json"
retries = 3
streaming_queries = []
while len(streaming_queries) == 0:
content = urllib.urlopen(streamqueryz_url).read()
streaming_queries = json.loads(content)
retries -= 1
if retries == 0:
self.fail("unable to fetch streaming queries from %s" % streamqueryz_url)
else:
time.sleep(1)
connId = streaming_queries[0]['ConnID']
return connId
# Terminate the query via streamqueryz admin page
def _terminate_query(self, tablet_addr, connId):
terminate_url = tablet_addr + "/streamqueryz/terminate?format=json&connID=" + str(connId)
urllib.urlopen(terminate_url).read()
|
anusornc/vitess
|
test/queryservice_tests/stream_tests.py
|
Python
|
bsd-3-clause
| 6,435 | 0.014452 |
from __future__ import print_function, unicode_literals
import importlib
import os
import sys
from django.apps import apps
from django.db.models.fields import NOT_PROVIDED
from django.utils import datetime_safe, six, timezone
from django.utils.six.moves import input
from .loader import MIGRATIONS_MODULE_NAME
class MigrationQuestioner(object):
"""
Gives the autodetector responses to questions it might have.
This base class has a built-in noninteractive mode, but the
interactive subclass is what the command-line arguments will use.
"""
def __init__(self, defaults=None, specified_apps=None, dry_run=None):
self.defaults = defaults or {}
self.specified_apps = specified_apps or set()
self.dry_run = dry_run
def ask_initial(self, app_label):
"Should we create an initial migration for the app?"
# If it was specified on the command line, definitely true
if app_label in self.specified_apps:
return True
# Otherwise, we look to see if it has a migrations module
# without any Python files in it, apart from __init__.py.
# Apps from the new app template will have these; the python
# file check will ensure we skip South ones.
try:
app_config = apps.get_app_config(app_label)
except LookupError: # It's a fake app.
return self.defaults.get("ask_initial", False)
migrations_import_path = "%s.%s" % (app_config.name, MIGRATIONS_MODULE_NAME)
try:
migrations_module = importlib.import_module(migrations_import_path)
except ImportError:
return self.defaults.get("ask_initial", False)
else:
if hasattr(migrations_module, "__file__"):
filenames = os.listdir(os.path.dirname(migrations_module.__file__))
elif hasattr(migrations_module, "__path__"):
if len(migrations_module.__path__) > 1:
return False
filenames = os.listdir(list(migrations_module.__path__)[0])
return not any(x.endswith(".py") for x in filenames if x != "__init__.py")
def ask_not_null_addition(self, field_name, model_name):
"Adding a NOT NULL field to a model"
# None means quit
return None
def ask_not_null_alteration(self, field_name, model_name):
"Changing a NULL field to NOT NULL"
# None means quit
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"Was this field really renamed?"
return self.defaults.get("ask_rename", False)
def ask_rename_model(self, old_model_state, new_model_state):
"Was this model really renamed?"
return self.defaults.get("ask_rename_model", False)
def ask_merge(self, app_label):
"Do you really want to merge these migrations?"
return self.defaults.get("ask_merge", False)
class InteractiveMigrationQuestioner(MigrationQuestioner):
def _boolean_input(self, question, default=None):
result = input("%s " % question)
if not result and default is not None:
return default
while len(result) < 1 or result[0].lower() not in "yn":
result = input("Please answer yes or no: ")
return result[0].lower() == "y"
def _choice_input(self, question, choices):
print(question)
for i, choice in enumerate(choices):
print(" %s) %s" % (i + 1, choice))
result = input("Select an option: ")
while True:
try:
value = int(result)
if 0 < value <= len(choices):
return value
except ValueError:
pass
result = input("Please select a valid option: ")
def _ask_default(self):
print("Please enter the default value now, as valid Python")
print("The datetime and django.utils.timezone modules are available, so you can do e.g. timezone.now()")
while True:
if six.PY3:
# Six does not correctly abstract over the fact that
# py3 input returns a unicode string, while py2 raw_input
# returns a bytestring.
code = input(">>> ")
else:
code = input(">>> ").decode(sys.stdin.encoding)
if not code:
print("Please enter some code, or 'exit' (with no quotes) to exit.")
elif code == "exit":
sys.exit(1)
else:
try:
return eval(code, {}, {"datetime": datetime_safe, "timezone": timezone})
except (SyntaxError, NameError) as e:
print("Invalid input: %s" % e)
def ask_not_null_addition(self, field_name, model_name):
"Adding a NOT NULL field to a model"
if not self.dry_run:
choice = self._choice_input(
"You are trying to add a non-nullable field '%s' to %s without a default; "
"we can't do that (the database needs something to populate existing rows).\n"
"Please select a fix:" % (field_name, model_name),
[
"Provide a one-off default now (will be set on all existing rows)",
"Quit, and let me add a default in models.py",
]
)
if choice == 2:
sys.exit(3)
else:
return self._ask_default()
return None
def ask_not_null_alteration(self, field_name, model_name):
"Changing a NULL field to NOT NULL"
if not self.dry_run:
choice = self._choice_input(
"You are trying to change the nullable field '%s' on %s to non-nullable "
"without a default; we can't do that (the database needs something to "
"populate existing rows).\n"
"Please select a fix:" % (field_name, model_name),
[
"Provide a one-off default now (will be set on all existing rows)",
("Ignore for now, and let me handle existing rows with NULL myself "
"(e.g. because you added a RunPython or RunSQL operation to handle "
"NULL values in a previous data migration)"),
"Quit, and let me add a default in models.py",
]
)
if choice == 2:
return NOT_PROVIDED
elif choice == 3:
sys.exit(3)
else:
return self._ask_default()
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"Was this field really renamed?"
msg = "Did you rename %s.%s to %s.%s (a %s)? [y/N]"
return self._boolean_input(msg % (model_name, old_name, model_name, new_name,
field_instance.__class__.__name__), False)
def ask_rename_model(self, old_model_state, new_model_state):
"Was this model really renamed?"
msg = "Did you rename the %s.%s model to %s? [y/N]"
return self._boolean_input(msg % (old_model_state.app_label, old_model_state.name,
new_model_state.name), False)
def ask_merge(self, app_label):
return self._boolean_input(
"\nMerging will only work if the operations printed above do not conflict\n" +
"with each other (working on different fields or models)\n" +
"Do you want to merge these migration branches? [y/N]",
False,
)
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/django/db/migrations/questioner.py
|
Python
|
agpl-3.0
| 7,694 | 0.002339 |
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("DecisionTreeClassifier" , "BinaryClass_10" , "db2")
|
antoinecarme/sklearn2sql_heroku
|
tests/classification/BinaryClass_10/ws_BinaryClass_10_DecisionTreeClassifier_db2_code_gen.py
|
Python
|
bsd-3-clause
| 149 | 0.013423 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com [email protected]
from preggy import expect
from tornado.testing import gen_test
from tests.fixtures.watermark_fixtures import (
POSITIONS,
RATIOS,
SOURCE_IMAGE_SIZES,
WATERMARK_IMAGE_SIZES,
)
from thumbor.filters import watermark
from thumbor.testing import FilterTestCase
class WatermarkFilterTestCase(FilterTestCase):
@gen_test
async def test_watermark_filter_centered(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,center,center,60)",
)
expected = self.get_fixture("watermarkCenter.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_centered_x(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,center,40,20)",
)
expected = self.get_fixture("watermarkCenterX.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_centered_y(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,80,center,50)",
)
expected = self.get_fixture("watermarkCenterY.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_repeated(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,repeat,repeat,70)",
)
expected = self.get_fixture("watermarkRepeat.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_repeated_x(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,repeat,center,70)",
)
expected = self.get_fixture("watermarkRepeatX.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_repeated_y(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,30,repeat,30)",
)
expected = self.get_fixture("watermarkRepeatY.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_detect_extension_simple(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark,30,-50,60)",
)
expected = self.get_fixture("watermarkSimple.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_simple(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,30,-50,60)",
)
expected = self.get_fixture("watermarkSimple.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_calculated(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,4p,-30p,60)",
)
expected = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,32,-160,60)",
)
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_calculated_center(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,4p,center,60)",
)
expected = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,32,center,60)",
)
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_calculated_repeat(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,repeat,30p,60)",
)
expected = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,repeat,160,60)",
)
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_calculated_position(self):
watermark.Filter.pre_compile()
filter_instance = watermark.Filter("http://dummy,0,0,0", self.context)
for length, pos, expected in POSITIONS:
test = {
"length": length,
"pos": pos,
}
expect(
filter_instance.detect_and_get_ratio_position(pos, length)
).to_be_equal_with_additional_info(expected, **test)
@gen_test
async def test_watermark_filter_simple_big(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermarkBig.png,-10,-100,50)",
)
expected = self.get_fixture("watermarkSimpleBig.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_simple_50p_width(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,30,-50,20,50)",
)
expected = self.get_fixture("watermarkResize50pWidth.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_simple_70p_height(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,30,-50,20,none,70)",
)
expected = self.get_fixture("watermarkResize70pHeight.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_simple_60p_80p(self):
image = await self.get_filtered(
"source.jpg",
"thumbor.filters.watermark",
"watermark(watermark.png,-30,-200,20,60,80)",
)
expected = self.get_fixture("watermarkResize60p80p.jpg")
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.98)
@gen_test
async def test_watermark_filter_calculated_resizing(self):
watermark.Filter.pre_compile()
filter_instance = watermark.Filter("http://dummy,0,0,0", self.context)
for source_image_width, source_image_height in SOURCE_IMAGE_SIZES:
for (
watermark_source_image_width,
watermark_source_image_height,
) in WATERMARK_IMAGE_SIZES:
for w_ratio, h_ratio in RATIOS:
max_width = (
source_image_width * (float(w_ratio) / 100)
if w_ratio
else float("inf")
)
max_height = (
source_image_height * (float(h_ratio) / 100)
if h_ratio
else float("inf")
)
w_ratio = float(w_ratio) / 100.0 if w_ratio else False
h_ratio = float(h_ratio) / 100.0 if h_ratio else False
ratio = (
float(watermark_source_image_width)
/ watermark_source_image_height
)
(
watermark_image_width,
watermark_image_height,
) = filter_instance.calc_watermark_size(
(source_image_width, source_image_height),
(
watermark_source_image_width,
watermark_source_image_height,
),
w_ratio,
h_ratio,
)
watermark_image = (
float(watermark_image_width) / watermark_image_height
)
test = {
"source_image_width": source_image_width,
"source_image_height": source_image_height,
"watermark_source_image_width": watermark_source_image_width,
"watermark_source_image_height": watermark_source_image_height,
"watermark_image_width": watermark_image_width,
"watermark_image_height": watermark_image_height,
"w_ratio": w_ratio,
"h_ratio": h_ratio,
}
test["topic_name"] = "watermark_image_width"
expect(watermark_image_width).to_fit_into(
max_width, **test
)
test["topic_name"] = "watermark_image_height"
expect(watermark_image_height).to_fit_into(
max_height, **test
)
test["topic_name"] = "fill out"
expect(
(
watermark_image_width == max_width
or watermark_image_height == max_height
)
).to_be_true_with_additional_info(**test)
test["topic_name"] = "image ratio"
expect(watermark_image).to_almost_equal(ratio, 2, **test)
|
thumbor/thumbor
|
tests/filters/test_watermark.py
|
Python
|
mit
| 10,644 | 0.000188 |
# -*- coding: utf-8 -*-
"""
CommandLine:
python -m ibeis.gui.inspect_gui --test-test_review_widget --show
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from functools import partial
from ibeis.viz import viz_helpers as vh
import guitool_ibeis as gt
import numpy as np
import utool as ut
(print, rrr, profile) = ut.inject2(__name__, '[id_review_api]')
MATCHED_STATUS_TEXT = 'Matched'
REVIEWED_STATUS_TEXT = 'Reviewed'
REVIEW_CFG_DEFAULTS = {
'ranks_top': 5,
'directed': False,
'name_scoring': True,
'filter_reviewed': True,
'filter_photobombs': True,
'filter_true_matches': True,
'show_chips': True,
'filter_duplicate_true_matches': False,
}
@profile
def get_review_edges(cm_list, ibs=None, review_cfg={}):
r"""
Needs to be moved to a better file. Maybe something to do with
identification.
Returns a list of matches that should be inspected
This function is more lightweight than orgres or allres.
Used in id_review_api and interact_qres2
Args:
cm_list (list): list of chip match objects
ranks_top (int): put all ranks less than this number into the graph
directed (bool):
Returns:
tuple: review_edges = (qaid_arr, daid_arr, score_arr, rank_arr)
CommandLine:
python -m ibeis.gui.id_review_api get_review_edges:0
Example0:
>>> # ENABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> ibs = ibeis.opendb('PZ_MTEST')
>>> qreq_ = ibeis.main_helpers.testdata_qreq_()
>>> cm_list = qreq_.execute()
>>> review_cfg = dict(ranks_top=5, directed=True, name_scoring=False,
>>> filter_true_matches=True)
>>> review_edges = get_review_edges(cm_list, ibs=ibs, review_cfg=review_cfg)
>>> print(review_edges)
Example1:
>>> # UNSTABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> cm_list, qreq_ = ibeis.testdata_cmlist('PZ_MTEST', a='default:qsize=5,dsize=20')
>>> review_cfg = dict(ranks_top=5, directed=True, name_scoring=False,
>>> filter_reviewed=False, filter_true_matches=True)
>>> review_edges = get_review_edges(cm_list, review_cfg=review_cfg, ibs=ibs)
>>> print(review_edges)
Example3:
>>> # UNSTABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> cm_list, qreq_ = ibeis.testdata_cmlist('PZ_MTEST', a='default:qsize=1,dsize=100')
>>> review_cfg = dict(ranks_top=1, directed=False, name_scoring=False,
>>> filter_reviewed=False, filter_true_matches=True)
>>> review_edges = get_review_edges(cm_list, review_cfg=review_cfg, ibs=ibs)
>>> print(review_edges)
Example4:
>>> # UNSTABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> cm_list, qreq_ = ibeis.testdata_cmlist('PZ_MTEST', a='default:qsize=10,dsize=10')
>>> ranks_top = 3
>>> review_cfg = dict(ranks_top=3, directed=False, name_scoring=False,
>>> filter_reviewed=False, filter_true_matches=True)
>>> review_edges = get_review_edges(cm_list, review_cfg=review_cfg, ibs=ibs)
>>> print(review_edges)
"""
import vtool_ibeis as vt
from ibeis.algo.hots import chip_match
automatch_kw = REVIEW_CFG_DEFAULTS.copy()
automatch_kw = ut.update_existing(automatch_kw, review_cfg)
print('[resorg] get_review_edges(%s)' % (ut.repr2(automatch_kw)))
print('[resorg] len(cm_list) = %d' % (len(cm_list)))
qaids_stack = []
daids_stack = []
ranks_stack = []
scores_stack = []
# For each QueryResult, Extract inspectable candidate matches
if isinstance(cm_list, dict):
cm_list = list(cm_list.values())
if len(cm_list) == 0:
return ([], [], [], [])
for cm in cm_list:
if isinstance(cm, chip_match.ChipMatch):
daids = cm.get_top_aids(ntop=automatch_kw['ranks_top'])
scores = cm.get_top_scores(ntop=automatch_kw['ranks_top'])
ranks = np.arange(len(daids))
qaids = np.full(daids.shape, cm.qaid, dtype=daids.dtype)
else:
(qaids, daids, scores, ranks) = cm.get_match_tbldata(
ranks_top=automatch_kw['ranks_top'],
name_scoring=automatch_kw['name_scoring'],
ibs=ibs)
qaids_stack.append(qaids)
daids_stack.append(daids)
scores_stack.append(scores)
ranks_stack.append(ranks)
# Stack them into a giant array
qaid_arr = np.hstack(qaids_stack)
daid_arr = np.hstack(daids_stack)
score_arr = np.hstack(scores_stack)
rank_arr = np.hstack(ranks_stack)
# Sort by scores
sortx = score_arr.argsort()[::-1]
qaid_arr = qaid_arr[sortx]
daid_arr = daid_arr[sortx]
score_arr = score_arr[sortx]
rank_arr = rank_arr[sortx]
# IS_REVIEWED DOES NOT WORK
if automatch_kw['filter_reviewed']:
_is_reviewed = ibs.get_annot_pair_is_reviewed(qaid_arr.tolist(),
daid_arr.tolist())
is_unreviewed = ~np.array(_is_reviewed, dtype=np.bool)
qaid_arr = qaid_arr.compress(is_unreviewed)
daid_arr = daid_arr.compress(is_unreviewed)
score_arr = score_arr.compress(is_unreviewed)
rank_arr = rank_arr.compress(is_unreviewed)
# Remove directed edges
if not automatch_kw['directed']:
#nodes = np.unique(directed_edges.flatten())
directed_edges = np.vstack((qaid_arr, daid_arr)).T
#idx1, idx2 = vt.intersect2d_indices(directed_edges, directed_edges[:, ::-1])
unique_rowx = vt.find_best_undirected_edge_indexes(directed_edges,
score_arr)
qaid_arr = qaid_arr.take(unique_rowx)
daid_arr = daid_arr.take(unique_rowx)
score_arr = score_arr.take(unique_rowx)
rank_arr = rank_arr.take(unique_rowx)
# Filter Double Name Matches
if automatch_kw['filter_duplicate_true_matches']:
# filter_dup_namepairs
qnid_arr = ibs.get_annot_nids(qaid_arr)
dnid_arr = ibs.get_annot_nids(daid_arr)
if not automatch_kw['directed']:
directed_name_edges = np.vstack((qnid_arr, dnid_arr)).T
unique_rowx2 = vt.find_best_undirected_edge_indexes(
directed_name_edges, score_arr)
else:
namepair_id_list = np.array(vt.compute_unique_data_ids_(
list(zip(qnid_arr, dnid_arr))))
unique_namepair_ids, namepair_groupxs = vt.group_indices(namepair_id_list)
score_namepair_groups = vt.apply_grouping(score_arr, namepair_groupxs)
unique_rowx2 = np.array(sorted([
groupx[score_group.argmax()]
for groupx, score_group in zip(namepair_groupxs, score_namepair_groups)
]), dtype=np.int32)
qaid_arr = qaid_arr.take(unique_rowx2)
daid_arr = daid_arr.take(unique_rowx2)
score_arr = score_arr.take(unique_rowx2)
rank_arr = rank_arr.take(unique_rowx2)
# Filter all true matches
if automatch_kw['filter_true_matches']:
qnid_arr = ibs.get_annot_nids(qaid_arr)
dnid_arr = ibs.get_annot_nids(daid_arr)
valid_flags = qnid_arr != dnid_arr
qaid_arr = qaid_arr.compress(valid_flags)
daid_arr = daid_arr.compress(valid_flags)
score_arr = score_arr.compress(valid_flags)
rank_arr = rank_arr.compress(valid_flags)
if automatch_kw['filter_photobombs']:
unique_aids = ut.unique(ut.flatten([qaid_arr, daid_arr]))
#grouped_aids, unique_nids = ibs.group_annots_by_name(unique_aids)
invalid_nid_map = get_photobomber_map(ibs, qaid_arr)
nid2_aids = ut.group_items(unique_aids, ibs.get_annot_nids(unique_aids))
expanded_aid_map = ut.ddict(set)
for nid1, other_nids in invalid_nid_map.items():
for aid1 in nid2_aids[nid1]:
for nid2 in other_nids:
for aid2 in nid2_aids[nid2]:
expanded_aid_map[aid1].add(aid2)
expanded_aid_map[aid2].add(aid1)
valid_flags = [daid not in expanded_aid_map[qaid]
for qaid, daid in zip(qaid_arr, daid_arr)]
qaid_arr = qaid_arr.compress(valid_flags)
daid_arr = daid_arr.compress(valid_flags)
score_arr = score_arr.compress(valid_flags)
rank_arr = rank_arr.compress(valid_flags)
review_edges = (qaid_arr, daid_arr, score_arr, rank_arr)
return review_edges
def make_review_api(ibs, cm_list, review_cfg, qreq_=None):
"""
Builds columns which are displayable in a ColumnListTableWidget
CommandLine:
python -m ibeis.gui.id_review_api --test-test_review_widget --show
python -m ibeis.gui.id_review_api --test-make_review_api
Example:
>>> # ENABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> import guitool_ibeis as gt
>>> from ibeis.gui import id_review_api
>>> cm_list, qreq_ = ibeis.main_helpers.testdata_cmlist()
>>> tblname = 'chipmatch'
>>> name_scoring = False
>>> ranks_top = 5
>>> review_cfg = dict(ranks_top=ranks_top, name_scoring=name_scoring)
>>> review_api = make_review_api(qreq_.ibs, cm_list, review_cfg, qreq_=qreq_)
>>> print('review_api = %r' % (review_api,))
"""
# TODO: Add in timedelta to column info
if ut.VERBOSE:
print('[inspect] make_review_api')
review_edges = get_review_edges(cm_list, ibs=ibs, review_cfg=review_cfg)
# Get extra info
(qaids, daids, scores, ranks) = review_edges
RES_THUMB_TEXT = 'ResThumb' # NOQA
QUERY_THUMB_TEXT = 'querythumb'
MATCH_THUMB_TEXT = 'MatchThumb'
col_name_list = [
'result_index',
'score',
REVIEWED_STATUS_TEXT,
]
if review_cfg.get('show_chips', True):
col_name_list += [
MATCHED_STATUS_TEXT,
QUERY_THUMB_TEXT,
]
col_name_list += [
RES_THUMB_TEXT,
'qaid',
'aid',
'rank',
'timedelta',
'dnGt',
'qnGt',
'tags',
'qname',
'name',
]
col_types_dict = dict([
('qaid', int),
('aid', int),
('dnGt', int),
('qnGt', int),
('timedelta', float),
#('review', 'BUTTON'),
(MATCHED_STATUS_TEXT, str),
(REVIEWED_STATUS_TEXT, str),
(QUERY_THUMB_TEXT, 'PIXMAP'),
(RES_THUMB_TEXT, 'PIXMAP'),
('qname', str),
('name', str),
('score', float),
('rank', int),
('truth', bool),
('opt', int),
('result_index', int),
])
timedelta_list = np.array(ut.take_column(ibs.get_unflat_annots_timedelta_list(list(zip(qaids, daids))), 0))
# TODO: make a display role
#timediff_list = [ut.get_posix_timedelta_str(t, year=True, approx=True) for t in (timedelta_list * 60 * 60)]
def get_pair_tags(edge):
aid1, aid2 = edge
assert not ut.isiterable(aid1), 'aid1=%r, aid2=%r' % (aid1, aid2)
assert not ut.isiterable(aid2), 'aid1=%r, aid2=%r' % (aid1, aid2)
am_rowids = ibs.get_annotmatch_rowid_from_undirected_superkey(
[aid1], [aid2])
tag_text = ibs.get_annotmatch_tag_text(am_rowids)[0]
if tag_text is None:
tag_text = ''
return str(tag_text)
col_getter_dict = dict([
('qaid', np.array(qaids)),
('aid', np.array(daids)),
('dnGt', ibs.get_annot_num_groundtruth),
('qnGt', ibs.get_annot_num_groundtruth),
('timedelta', np.array(timedelta_list)),
#('review', lambda rowid: get_buttontup),
(MATCHED_STATUS_TEXT, partial(get_match_status, ibs)),
(REVIEWED_STATUS_TEXT, partial(get_reviewed_status, ibs)),
(QUERY_THUMB_TEXT, ibs.get_annot_chip_thumbtup),
(RES_THUMB_TEXT, ibs.get_annot_chip_thumbtup),
('qname', ibs.get_annot_names),
('name', ibs.get_annot_names),
('score', np.array(scores)),
('rank', np.array(ranks)),
('result_index', np.arange(len(ranks))),
('tags', get_pair_tags),
#lambda aid_pair: ibs.get_annotmatch_tag_text(ibs.get_annotmatch_rowid_from_undirected_superkey(ut.ensure_iterable(aid_pair[0]), ut.ensure_iterable(aid_pair[1])))[0]),
#('truth', truths),
#('opt', opts),
])
# default is 100
col_width_dict = {
'score': 75,
REVIEWED_STATUS_TEXT: 75,
MATCHED_STATUS_TEXT: 75,
'rank': 42,
'qaid': 42,
'aid': 42,
'result_index': 42,
'qname': 60,
'name': 60,
'dnGt': 50,
'timedelta': 75,
'tags': 75,
'qnGt': 50,
}
USE_MATCH_THUMBS = 1
if USE_MATCH_THUMBS:
def get_match_thumbtup(ibs, qaid2_cm, qaids, daids, index, qreq_=None,
thumbsize=(128, 128), match_thumbtup_cache={}):
daid = daids[index]
qaid = qaids[index]
cm = qaid2_cm[qaid]
assert cm.qaid == qaid, 'aids do not aggree'
OLD = False
if OLD:
fpath = ensure_match_img(ibs, cm, daid, qreq_=qreq_,
match_thumbtup_cache=match_thumbtup_cache)
if isinstance(thumbsize, int):
thumbsize = (thumbsize, thumbsize)
thumbtup = (ut.augpath(fpath, 'thumb_%d,%d' % thumbsize), fpath, thumbsize,
[], [])
return thumbtup
else:
# Hacky new way of drawing
fpath, func, func2 = make_ensure_match_img_nosql_func(qreq_, cm, daid)
#match_thumbdir = ibs.get_match_thumbdir()
#match_thumb_fname = get_match_thumb_fname(cm, daid, qreq_)
#fpath = ut.unixjoin(match_thumbdir, match_thumb_fname)
thumbdat = {
'fpath': fpath,
'thread_func': func,
'main_func': func2,
#'args': (ibs, cm, daid),
#'kwargs': dict(qreq_=qreq_,
# match_thumbtup_cache=match_thumbtup_cache)
}
return thumbdat
col_name_list.insert(col_name_list.index('qaid'),
MATCH_THUMB_TEXT)
col_types_dict[MATCH_THUMB_TEXT] = 'PIXMAP'
#col_types_dict[MATCH_THUMB_TEXT] = CustomMatchThumbDelegate
qaid2_cm = {cm.qaid: cm for cm in cm_list}
get_match_thumbtup_ = partial(get_match_thumbtup, ibs, qaid2_cm,
qaids, daids, qreq_=qreq_,
match_thumbtup_cache={})
col_getter_dict[MATCH_THUMB_TEXT] = get_match_thumbtup_
col_bgrole_dict = {
MATCHED_STATUS_TEXT : partial(get_match_status_bgrole, ibs),
REVIEWED_STATUS_TEXT: partial(get_reviewed_status_bgrole, ibs),
}
# TODO: remove ider dict.
# it is massively unuseful
col_ider_dict = {
MATCHED_STATUS_TEXT : ('qaid', 'aid'),
REVIEWED_STATUS_TEXT : ('qaid', 'aid'),
'tags' : ('qaid', 'aid'),
QUERY_THUMB_TEXT : ('qaid'),
RES_THUMB_TEXT : ('aid'),
'dnGt' : ('aid'),
'qnGt' : ('qaid'),
'qname' : ('qaid'),
'name' : ('aid'),
}
col_setter_dict = {
'qname': ibs.set_annot_names,
'name': ibs.set_annot_names
}
editable_colnames = ['truth', 'notes', 'qname', 'name', 'opt']
sortby = 'score'
def get_thumb_size():
return ibs.cfg.other_cfg.thumb_size
col_display_role_func_dict = {
'timedelta': ut.partial(ut.get_posix_timedelta_str, year=True, approx=2),
}
if not review_cfg.get('show_chips', True):
del col_getter_dict[QUERY_THUMB_TEXT]
del col_getter_dict[RES_THUMB_TEXT]
del col_types_dict[RES_THUMB_TEXT]
del col_types_dict[QUERY_THUMB_TEXT]
del col_ider_dict[RES_THUMB_TEXT]
del col_ider_dict[QUERY_THUMB_TEXT]
# del col_bgrole_dict[RES_THUMB_TEXT]
# del col_bgrole_dict[QUERY_THUMB_TEXT]
# Insert info into dict
review_api = gt.CustomAPI(
col_name_list=col_name_list,
col_types_dict=col_types_dict,
col_getter_dict=col_getter_dict,
col_bgrole_dict=col_bgrole_dict,
col_ider_dict=col_ider_dict,
col_setter_dict=col_setter_dict,
editable_colnames=editable_colnames,
col_display_role_func_dict=col_display_role_func_dict,
sortby=sortby,
get_thumb_size=get_thumb_size,
sort_reverse=True,
col_width_dict=col_width_dict)
#review_api.review_edges = review_edges
return review_api
def get_match_status(ibs, aid_pair):
""" Data role for status column """
aid1, aid2 = aid_pair
assert not ut.isiterable(aid1), 'aid1=%r, aid2=%r' % (aid1, aid2)
assert not ut.isiterable(aid2), 'aid1=%r, aid2=%r' % (aid1, aid2)
text = ibs.get_match_text(aid1, aid2)
if text is None:
raise AssertionError('impossible state id_review_api')
return text
def get_reviewed_status(ibs, aid_pair):
""" Data role for status column """
aid1, aid2 = aid_pair
assert not ut.isiterable(aid1), 'aid1=%r, aid2=%r' % (aid1, aid2)
assert not ut.isiterable(aid2), 'aid1=%r, aid2=%r' % (aid1, aid2)
# FIXME: use new api
state = ibs.get_annot_pair_is_reviewed([aid1], [aid2])[0]
state_to_text = {
None: 'Unreviewed',
2: 'Auto-reviewed',
1: 'User-reviewed',
}
default = '??? unknown mode %r' % (state,)
text = state_to_text.get(state, default)
return text
def get_match_status_bgrole(ibs, aid_pair):
""" Background role for status column """
aid1, aid2 = aid_pair
truth = ibs.get_match_truth(aid1, aid2)
#print('get status bgrole: %r truth=%r' % (aid_pair, truth))
truth_color = vh.get_truth_color(truth, base255=True, lighten_amount=0.35)
return truth_color
def get_reviewed_status_bgrole(ibs, aid_pair):
""" Background role for status column """
aid1, aid2 = aid_pair
truth = ibs.get_match_truth(aid1, aid2)
annotmach_reviewed = ibs.get_annot_pair_is_reviewed([aid1], [aid2])[0]
if annotmach_reviewed == 0 or annotmach_reviewed is None:
lighten_amount = .9
elif annotmach_reviewed == 2:
lighten_amount = .7
else:
lighten_amount = .35
truth_color = vh.get_truth_color(truth, base255=True,
lighten_amount=lighten_amount)
#truth = ibs.get_match_truth(aid1, aid2)
#print('get status bgrole: %r truth=%r' % (aid_pair, truth))
#truth_color = vh.get_truth_color(truth, base255=True, lighten_amount=0.35)
return truth_color
def get_match_thumb_fname(cm, daid, qreq_, view_orientation='vertical',
draw_matches=True):
"""
CommandLine:
python -m ibeis.gui.id_review_api --exec-get_match_thumb_fname
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> cm, qreq_ = ibeis.testdata_cm('PZ_MTEST')
>>> thumbsize = (128, 128)
>>> daid = cm.get_top_aids()[0]
>>> match_thumb_fname = get_match_thumb_fname(cm, daid, qreq_)
>>> result = match_thumb_fname
>>> print(result)
match_aids=1,1_cfgstr=ubpzwu5k54h6xbnr.jpg
"""
# Make thumbnail name
config_hash = ut.hashstr27(qreq_.get_cfgstr())
qaid = cm.qaid
args = (qaid, daid, config_hash, draw_matches, view_orientation, )
match_thumb_fname = 'match_aids=%d,%d_cfgstr=%s_draw=%s_orientation=%s.jpg' % args
return match_thumb_fname
def ensure_match_img(ibs, cm, daid, qreq_=None, match_thumbtup_cache={}):
r"""
CommandLine:
python -m ibeis.gui.id_review_api --test-ensure_match_img --show
Example:
>>> # ENABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> # build test data
>>> cm, qreq_ = ibeis.testdata_cm()
>>> daid = cm.get_top_aids()[0]
>>> match_thumbtup_cache = {}
>>> # execute function
>>> match_thumb_fpath_ = ensure_match_img(qreq_.ibs, cm, daid, qreq_,
>>> match_thumbtup_cache)
>>> # verify results
>>> result = str(match_thumb_fpath_)
>>> print(result)
>>> ut.quit_if_noshow()
>>> ut.startfile(match_thumb_fpath_, quote=True)
"""
#from os.path import exists
match_thumbdir = ibs.get_match_thumbdir()
match_thumb_fname = get_match_thumb_fname(cm, daid, qreq_)
match_thumb_fpath_ = ut.unixjoin(match_thumbdir, match_thumb_fname)
#if exists(match_thumb_fpath_):
# return match_thumb_fpath_
if match_thumb_fpath_ in match_thumbtup_cache:
fpath = match_thumbtup_cache[match_thumb_fpath_]
else:
# TODO: just draw the image at the correct thumbnail size
# TODO: draw without matplotlib?
#with ut.Timer('render-1'):
fpath = cm.imwrite_single_annotmatch(
qreq_, daid, fpath=match_thumb_fpath_, saveax=True, fnum=32,
notitle=True, verbose=False)
#with ut.Timer('render-2'):
# img = cm.render_single_annotmatch(qreq_, daid, fnum=32, notitle=True, dpi=30)
# cv2.imwrite(match_thumb_fpath_, img)
# fpath = match_thumb_fpath_
#with ut.Timer('render-3'):
#fpath = match_thumb_fpath_
#render_config = {
# 'dpi' : 60,
# 'draw_fmatches' : True,
# #'vert' : view_orientation == 'vertical',
# 'show_aidstr' : False,
# 'show_name' : False,
# 'show_exemplar' : False,
# 'show_num_gt' : False,
# 'show_timedelta' : False,
# 'show_name_rank' : False,
# 'show_score' : False,
# 'show_annot_score' : False,
# 'show_name_score' : False,
# 'draw_lbl' : False,
# 'draw_border' : False,
#}
#cm.imwrite_single_annotmatch2(qreq_, daid, fpath, fnum=32, notitle=True, **render_config)
#print('fpath = %r' % (fpath,))
match_thumbtup_cache[match_thumb_fpath_] = fpath
return fpath
def make_ensure_match_img_nosql_func(qreq_, cm, daid):
r"""
CommandLine:
python -m ibeis.gui.id_review_api --test-ensure_match_img --show
Example:
>>> # ENABLE_DOCTEST
>>> from ibeis.gui.id_review_api import * # NOQA
>>> import ibeis
>>> # build test data
>>> cm, qreq_ = ibeis.testdata_cm()
>>> ibs = qreq_.ibs
>>> daid = cm.get_top_aids()[0]
>>> match_thumbtup_cache = {}
>>> # execute function
>>> match_thumb_fpath_ = ensure_match_img(qreq_.ibs, cm, daid, qreq_, match_thumbtup_cache)
>>> # verify results
>>> result = str(match_thumb_fpath_)
>>> print(result)
>>> ut.quit_if_noshow()
>>> ut.startfile(match_thumb_fpath_, quote=True)
"""
#import ibeis.viz
from ibeis.viz import viz_matches
import cv2
import io
import plottool_ibeis as pt
import vtool_ibeis as vt
import matplotlib as mpl
if cm.__class__.__name__ == 'PairwiseMatch':
# HACK DO THIS THE VTOOL WAY
match = cm
ibs = qreq_ # VERY HACK
match_thumbdir = ibs.get_match_thumbdir()
cfgstr = hash(match.config) # HACK only works if config is already a hashdict
match_thumb_fname = 'tmpmatch-%d-%d-%s.jpg' % (match.annot1['aid'], match.annot2['aid'], cfgstr)
fpath = ut.unixjoin(match_thumbdir, match_thumb_fname)
def main_thread_load2():
rchip1, kpts1 = ut.dict_take(match.annot1, ['rchip', 'kpts'])
rchip2, kpts2 = ut.dict_take(match.annot2, ['rchip', 'kpts'])
return (match,)
def nosql_draw2(check_func, match):
from matplotlib.backends.backend_agg import FigureCanvas
try:
from matplotlib.backends.backend_agg import Figure
except ImportError:
from matplotlib.figure import Figure
was_interactive = mpl.is_interactive()
if was_interactive:
mpl.interactive(False)
#fnum = 32
fig = Figure()
canvas = FigureCanvas(fig) # NOQA
#fig.clf()
ax = fig.add_subplot(1, 1, 1)
if check_func is not None and check_func():
return
ax, xywh1, xywh2 = match.show(ax=ax)
if check_func is not None and check_func():
return
savekw = {
# 'dpi' : 60,
'dpi' : 80,
}
axes_extents = pt.extract_axes_extents(fig)
#assert len(axes_extents) == 1, 'more than one axes'
extent = axes_extents[0]
with io.BytesIO() as stream:
# This call takes 23% - 15% of the time depending on settings
fig.savefig(stream, bbox_inches=extent, **savekw)
stream.seek(0)
data = np.fromstring(stream.getvalue(), dtype=np.uint8)
if check_func is not None and check_func():
return
pt.plt.close(fig)
image = cv2.imdecode(data, 1)
thumbsize = 221
max_dsize = (thumbsize, thumbsize)
dsize, sx, sy = vt.resized_clamped_thumb_dims(vt.get_size(image), max_dsize)
if check_func is not None and check_func():
return
image = vt.resize(image, dsize)
vt.imwrite(fpath, image)
if check_func is not None and check_func():
return
#fig.savefig(fpath, bbox_inches=extent, **savekw)
#match_thumbtup_cache[match_thumb_fpath_] = fpath
return fpath, nosql_draw2, main_thread_load2
aid1 = cm.qaid
aid2 = daid
ibs = qreq_.ibs
resize_factor = .5
match_thumbdir = ibs.get_match_thumbdir()
match_thumb_fname = get_match_thumb_fname(cm, daid, qreq_)
fpath = ut.unixjoin(match_thumbdir, match_thumb_fname)
def main_thread_load():
# This gets executed in the main thread and collects data
# from sql
rchip1_fpath, rchip2_fpath, kpts1, kpts2 = viz_matches._get_annot_pair_info(
ibs, aid1, aid2, qreq_, draw_fmatches=True, as_fpath=True)
return rchip1_fpath, rchip2_fpath, kpts1, kpts2
def nosql_draw(check_func, rchip1_fpath, rchip2_fpath, kpts1, kpts2):
# This gets executed in the child thread and does drawing async style
#from matplotlib.backends.backend_pdf import FigureCanvasPdf as FigureCanvas
#from matplotlib.backends.backend_pdf import Figure
#from matplotlib.backends.backend_svg import FigureCanvas
#from matplotlib.backends.backend_svg import Figure
from matplotlib.backends.backend_agg import FigureCanvas
try:
from matplotlib.backends.backend_agg import Figure
except ImportError:
from matplotlib.figure import Figure
kpts1_ = vt.offset_kpts(kpts1, (0, 0), (resize_factor, resize_factor))
kpts2_ = vt.offset_kpts(kpts2, (0, 0), (resize_factor, resize_factor))
#from matplotlib.figure import Figure
if check_func is not None and check_func():
return
rchip1 = vt.imread(rchip1_fpath)
rchip1 = vt.resize_image_by_scale(rchip1, resize_factor)
if check_func is not None and check_func():
return
rchip2 = vt.imread(rchip2_fpath)
rchip2 = vt.resize_image_by_scale(rchip2, resize_factor)
if check_func is not None and check_func():
return
try:
idx = cm.daid2_idx[daid]
fm = cm.fm_list[idx]
fsv = None if cm.fsv_list is None else cm.fsv_list[idx]
fs = None if fsv is None else fsv.prod(axis=1)
except KeyError:
fm = []
fs = None
fsv = None
maxnum = 200
if fs is not None and len(fs) > maxnum:
# HACK TO ONLY SHOW TOP MATCHES
sortx = fs.argsort()[::-1]
fm = fm.take(sortx[:maxnum], axis=0)
fs = fs.take(sortx[:maxnum], axis=0)
was_interactive = mpl.is_interactive()
if was_interactive:
mpl.interactive(False)
#fnum = 32
fig = Figure()
canvas = FigureCanvas(fig) # NOQA
#fig.clf()
ax = fig.add_subplot(1, 1, 1)
if check_func is not None and check_func():
return
#fig = pt.plt.figure(fnum)
#H1 = np.eye(3)
#H2 = np.eye(3)
#H1[0, 0] = .5
#H1[1, 1] = .5
#H2[0, 0] = .5
#H2[1, 1] = .5
ax, xywh1, xywh2 = pt.show_chipmatch2(rchip1, rchip2, kpts1_, kpts2_, fm,
fs=fs, colorbar_=False, ax=ax)
if check_func is not None and check_func():
return
savekw = {
# 'dpi' : 60,
'dpi' : 80,
}
axes_extents = pt.extract_axes_extents(fig)
#assert len(axes_extents) == 1, 'more than one axes'
extent = axes_extents[0]
with io.BytesIO() as stream:
# This call takes 23% - 15% of the time depending on settings
fig.savefig(stream, bbox_inches=extent, **savekw)
stream.seek(0)
data = np.fromstring(stream.getvalue(), dtype=np.uint8)
if check_func is not None and check_func():
return
pt.plt.close(fig)
image = cv2.imdecode(data, 1)
thumbsize = 221
max_dsize = (thumbsize, thumbsize)
dsize, sx, sy = vt.resized_clamped_thumb_dims(vt.get_size(image), max_dsize)
if check_func is not None and check_func():
return
image = vt.resize(image, dsize)
vt.imwrite(fpath, image)
if check_func is not None and check_func():
return
#fig.savefig(fpath, bbox_inches=extent, **savekw)
#match_thumbtup_cache[match_thumb_fpath_] = fpath
return fpath, nosql_draw, main_thread_load
def get_photobomber_map(ibs, aids, aid_to_nid=None):
"""
Builds map of which names that photobomb other names.
python -m ibeis.gui.id_review_api --test-test_review_widget --show --db PZ_MTEST -a default:qindex=0
>>> import ibeis
>>> dbdir = ut.truepath('~/lev/media/danger/GGR/GGR-IBEIS')
>>> ibs = ibeis.opendb(dbdir='/home/joncrall/lev/media/danger/GGR/GGR-IBEIS')
>>> filter_kw = {
>>> 'multiple': False,
>>> 'minqual': 'good',
>>> 'is_known': True,
>>> 'min_pername': 2,
>>> 'view': ['right'],
>>> }
>>> aids = ibs.filter_annots_general(ibs.get_valid_aids(), filter_kw=filter_kw)
"""
ams_list = ibs.get_annotmatch_rowids_from_aid(aids)
flags_list = ibs.unflat_map(ut.partial(ibs.get_annotmatch_prop, 'Photobomb'), ams_list)
pb_ams = ut.zipcompress(ams_list, flags_list)
has_pb_ams = [len(ams) > 0 for ams in pb_ams]
pb_ams_ = ut.compress(pb_ams, has_pb_ams)
#aids_ = ut.compress(aids, has_pb_ams)
pb_ams_flat = ut.flatten(pb_ams_)
pb_aids1_ = ibs.get_annotmatch_aid1(pb_ams_flat)
pb_aids2_ = ibs.get_annotmatch_aid2(pb_ams_flat)
pb_aid_pairs_ = list(zip(pb_aids1_, pb_aids2_))
if aid_to_nid is None:
pb_nid_pairs_ = ibs.unflat_map(ibs.get_annot_nids, pb_aid_pairs_)
else:
pb_nid_pairs_ = ibs.unflat_map(ut.partial(ut.take, aid_to_nid), pb_aid_pairs_)
#invalid_aid_map = ut.ddict(set)
#for aid1, aid2 in pb_aid_pairs_:
# if aid1 != aid2:
# invalid_aid_map[aid1].add(aid2)
# invalid_aid_map[aid2].add(aid1)
invalid_nid_map = ut.ddict(set)
for nid1, nid2 in pb_nid_pairs_:
if nid1 != nid2:
invalid_nid_map[nid1].add(nid2)
invalid_nid_map[nid2].add(nid1)
return invalid_nid_map
|
Erotemic/ibeis
|
ibeis/gui/id_review_api.py
|
Python
|
apache-2.0
| 32,750 | 0.003908 |
import matplotlib.pyplot as plt
import numpy as np
import scalpplot
from scalpplot import plot_scalp
from positions import POS_10_5
from scipy import signal
def plot_timeseries(frames, time=None, offset=None, color='k', linestyle='-'):
frames = np.asarray(frames)
if offset == None:
offset = np.max(np.std(frames, axis=0)) * 3
if time == None:
time = np.arange(frames.shape[0])
plt.plot(time, frames - np.mean(frames, axis=0) +
np.arange(frames.shape[1]) * offset, color=color, ls=linestyle)
def plot_scalpgrid(scalps, sensors, locs=POS_10_5, width=None,
clim=None, cmap=None, titles=None):
'''
Plots a grid with scalpplots. Scalps contains the different scalps in the
rows, sensors contains the names for the columns of scalps, locs is a dict
that maps the sensor-names to locations.
Width determines the width of the grid that contains the plots. Cmap selects
a colormap, for example plt.cm.RdBu_r is very useful for AUC-ROC plots.
Clim is a list containing the minimim and maximum value mapped to a color.
Titles is an optional list with titles for each subplot.
Returns a list with subplots for further manipulation.
'''
scalps = np.asarray(scalps)
assert scalps.ndim == 2
nscalps = scalps.shape[0]
subplots = []
if not width:
width = int(min(8, np.ceil(np.sqrt(nscalps))))
height = int(np.ceil(nscalps/float(width)))
if not clim:
clim = [np.min(scalps), np.max(scalps)]
plt.clf()
for i in range(nscalps):
subplots.append(plt.subplot(height, width, i + 1))
plot_scalp(scalps[i], sensors, locs, clim=clim, cmap=cmap)
if titles:
plt.title(titles[i])
# plot colorbar next to last scalp
bb = plt.gca().get_position()
plt.colorbar(cax=plt.axes([bb.xmax + bb.width/10, bb.ymin, bb.width/10,
bb.height]), ticks=np.linspace(clim[0], clim[1], 5).round(2))
return subplots
|
breuderink/psychic
|
psychic/plots.py
|
Python
|
bsd-3-clause
| 1,878 | 0.014377 |
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: [email protected]
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""Spreadsheet Package for VisTrails
"""
from __future__ import division
import copy
import os
from PyQt4 import QtCore, QtGui
import sys
from vistrails.core import debug
from vistrails.core.modules import basic_modules
from vistrails.core.modules.module_registry import get_module_registry
from vistrails.core.modules.utils import create_descriptor_string
from vistrails.core.system import vistrails_root_directory
from vistrails.core.upgradeworkflow import UpgradeWorkflowHandler, \
UpgradePackageRemap, UpgradeModuleRemap
from .spreadsheet_controller import spreadsheetController
from .spreadsheet_registry import spreadsheetRegistry
# This must be here because of VisTrails protocol
basicWidgets = None
def importReturnLast(name):
""" importReturnLast(name: str) -> package
Import a package whose name is specified in name and return right-most
package on the package name
"""
mod = __import__(name)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def addWidget(packagePath):
""" addWidget(packagePath: str) -> package
Add a new widget type to the spreadsheet registry supplying a
basic set of spreadsheet widgets
"""
try:
registry = get_module_registry()
widget = importReturnLast(packagePath)
if hasattr(widget, 'widgetName'):
widgetName = widget.widgetName()
else:
widgetName = packagePath
widget.registerWidget(registry, basic_modules, basicWidgets)
spreadsheetRegistry.registerPackage(widget, packagePath)
debug.log(' ==> Successfully import <%s>' % widgetName)
except Exception, e:
debug.log(' ==> Ignored package <%s>' % packagePath, e)
widget = None
return widget
def importWidgetModules(basicWidgets):
""" importWidgetModules(basicWidgets: widget) -> None
Find all widget package under ./widgets/* to add to the spreadsheet registry
"""
packageName = __name__.lower().endswith('.init') and \
__name__[:-5] or __name__
widgetDir = os.path.join(
os.path.join(os.path.dirname(vistrails_root_directory()),
*packageName.split('.')),
'widgets')
candidates = os.listdir(widgetDir)
for folder in candidates:
if os.path.isdir(os.path.join(widgetDir, folder)) and folder != '.svn':
addWidget('.'.join([packageName, 'widgets', folder]))
def initialize(*args, **keywords):
""" initialize() -> None
Package-entry to initialize the package
"""
import vistrails.core.application
if not vistrails.core.application.is_running_gui():
raise RuntimeError, "GUI is not running. The Spreadsheet package requires the GUI"
# initialize widgets
debug.log('Loading Spreadsheet widgets...')
global basicWidgets
if basicWidgets==None:
basicWidgets = addWidget('vistrails.packages.spreadsheet.basic_widgets')
importWidgetModules(basicWidgets)
def menu_items():
"""menu_items() -> tuple of (str,function)
It returns a list of pairs containing text for the menu and a
callback function that will be executed when that menu item is selected.
"""
def show_spreadsheet():
spreadsheetWindow = spreadsheetController.findSpreadsheetWindow()
spreadsheetWindow.show()
spreadsheetWindow.activateWindow()
spreadsheetWindow.raise_()
lst = []
lst.append(("Show Spreadsheet", show_spreadsheet))
return tuple(lst)
def finalize():
spreadsheetWindow = spreadsheetController.findSpreadsheetWindow(
show=False, create=False)
if spreadsheetWindow is not None:
### DO NOT ADD BACK spreadsheetWindow.destroy()
### That will crash VisTrails on Mac.
### It is not supposed to be called directly
spreadsheetWindow.cleanup()
spreadsheetWindow.deleteLater()
def upgrade_cell_to_output(module_remap, module_id, pipeline,
old_name, new_module,
end_version, input_port_name,
start_version=None, output_version=None):
"""This function upgrades a *Cell module to a *Output module.
The upgrade only happens if the original module doesn't have any connection
on the cell input ports that can't be translated.
This is to ease the transition to *Output modules, but we don't want (or
need) to break anything; the *Cell modules still exist, so they can stay.
"""
if not isinstance(module_remap, UpgradePackageRemap):
module_remap = UpgradePackageRemap.from_dict(module_remap)
old_module = pipeline.modules[module_id]
old_module_name = create_descriptor_string(old_module.package,
old_module.name,
old_module.namespace,
False)
if old_module_name != old_name:
return module_remap
used_input_ports = set(old_module.connected_input_ports.keys())
for func in old_module.functions:
used_input_ports.add(func.name)
if used_input_ports != set([input_port_name]):
return module_remap
_old_remap = module_remap
module_remap = copy.copy(module_remap)
assert _old_remap.remaps is not module_remap.remaps
remap = UpgradeModuleRemap(start_version, end_version, output_version,
module_name=old_name,
new_module=new_module)
remap.add_remap('dst_port_remap', input_port_name, 'value')
remap.add_remap('function_remap', input_port_name, 'value')
module_remap.add_module_remap(remap)
return module_remap
def handle_module_upgrade_request(controller, module_id, pipeline):
module_remap = {
'CellLocation': [
(None, '0.9.3', None, {
'src_port_remap': {
'self': 'value'},
}),
],
'SheetReference': [
(None, '0.9.3', None, {
'src_port_remap': {
'self': 'value'},
}),
],
'SingleCellSheetReference': [
(None, '0.9.3', None, {
'src_port_remap': {
'self': 'value'},
}),
],
}
module_remap = upgrade_cell_to_output(
module_remap, module_id, pipeline,
'RichTextCell', 'org.vistrails.vistrails.basic:RichTextOutput',
'0.9.4', 'File')
module_remap = upgrade_cell_to_output(
module_remap, module_id, pipeline,
'ImageViewerCell', 'org.vistrails.vistrails.basic:ImageOutput',
'0.9.4', 'File')
return UpgradeWorkflowHandler.remap_module(controller,
module_id,
pipeline,
module_remap)
|
minesense/VisTrails
|
vistrails/packages/spreadsheet/init.py
|
Python
|
bsd-3-clause
| 8,965 | 0.004016 |
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import sys
import time
import unittest
from datetime import timedelta
from mock import patch
from airflow import DAG, configuration, settings
from airflow.exceptions import (AirflowException,
AirflowSensorTimeout,
AirflowSkipException)
from airflow.models import TaskInstance
from airflow.operators.bash_operator import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.sensors import HttpSensor, BaseSensorOperator, HdfsSensor, ExternalTaskSensor
from airflow.utils.decorators import apply_defaults
from airflow.utils.state import State
from airflow.utils import timezone
from airflow.utils.timezone import datetime
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
configuration.load_test_config()
DEFAULT_DATE = datetime(2015, 1, 1)
TEST_DAG_ID = 'unit_test_dag'
class TimeoutTestSensor(BaseSensorOperator):
"""
Sensor that always returns the return_value provided
:param return_value: Set to true to mark the task as SKIPPED on failure
:type return_value: any
"""
@apply_defaults
def __init__(
self,
return_value=False,
*args,
**kwargs):
self.return_value = return_value
super(TimeoutTestSensor, self).__init__(*args, **kwargs)
def poke(self, context):
return self.return_value
def execute(self, context):
started_at = timezone.utcnow()
time_jump = self.params.get('time_jump')
while not self.poke(context):
if time_jump:
started_at -= time_jump
if (timezone.utcnow() - started_at).total_seconds() > self.timeout:
if self.soft_fail:
raise AirflowSkipException('Snap. Time is OUT.')
else:
raise AirflowSensorTimeout('Snap. Time is OUT.')
time.sleep(self.poke_interval)
self.log.info("Success criteria met. Exiting.")
class SensorTimeoutTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE
}
dag = DAG(TEST_DAG_ID, default_args=args)
self.dag = dag
def test_timeout(self):
t = TimeoutTestSensor(
task_id='test_timeout',
execution_timeout=timedelta(days=2),
return_value=False,
poke_interval=5,
params={'time_jump': timedelta(days=2, seconds=1)},
dag=self.dag
)
self.assertRaises(
AirflowSensorTimeout,
t.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
class HttpSensorTests(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE
}
dag = DAG(TEST_DAG_ID, default_args=args)
self.dag = dag
def test_poke_exception(self):
"""
Exception occurs in poke function should not be ignored.
"""
def resp_check(resp):
raise AirflowException('AirflowException raised here!')
task = HttpSensor(
task_id='http_sensor_poke_exception',
http_conn_id='http_default',
endpoint='',
request_params={},
response_check=resp_check,
poke_interval=5)
with self.assertRaisesRegexp(AirflowException, 'AirflowException raised here!'):
task.execute(None)
@patch("airflow.hooks.http_hook.requests.Session.send")
def test_head_method(self, mock_session_send):
def resp_check(resp):
return True
task = HttpSensor(
dag=self.dag,
task_id='http_sensor_head_method',
http_conn_id='http_default',
endpoint='',
request_params={},
method='HEAD',
response_check=resp_check,
timeout=5,
poke_interval=1)
import requests
task.execute(None)
args, kwargs = mock_session_send.call_args
received_request = args[0]
prep_request = requests.Request(
'HEAD',
'https://www.google.com',
{}).prepare()
self.assertEqual(prep_request.url, received_request.url)
self.assertTrue(prep_request.method, received_request.method)
@patch("airflow.hooks.http_hook.requests.Session.send")
def test_logging_head_error_request(
self,
mock_session_send
):
def resp_check(resp):
return True
import requests
response = requests.Response()
response.status_code = 404
response.reason = 'Not Found'
mock_session_send.return_value = response
task = HttpSensor(
dag=self.dag,
task_id='http_sensor_head_method',
http_conn_id='http_default',
endpoint='',
request_params={},
method='HEAD',
response_check=resp_check,
timeout=5,
poke_interval=1
)
with mock.patch.object(task.hook.log, 'error') as mock_errors:
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
self.assertTrue(mock_errors.called)
mock_errors.assert_called_with('HTTP error: %s', 'Not Found')
class HdfsSensorTests(unittest.TestCase):
def setUp(self):
from tests.core import FakeHDFSHook
self.hook = FakeHDFSHook
def test_legacy_file_exist(self):
"""
Test the legacy behaviour
:return:
"""
# Given
logging.info("Test for existing file with the legacy behaviour")
# When
task = HdfsSensor(task_id='Should_be_file_legacy',
filepath='/datadirectory/datafile',
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
task.execute(None)
# Then
# Nothing happens, nothing is raised exec is ok
def test_legacy_file_exist_but_filesize(self):
"""
Test the legacy behaviour with the filesize
:return:
"""
# Given
logging.info("Test for existing file with the legacy behaviour")
# When
task = HdfsSensor(task_id='Should_be_file_legacy',
filepath='/datadirectory/datafile',
timeout=1,
file_size=20,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
# Then
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
def test_legacy_file_does_not_exists(self):
"""
Test the legacy behaviour
:return:
"""
# Given
logging.info("Test for non existing file with the legacy behaviour")
task = HdfsSensor(task_id='Should_not_be_file_legacy',
filepath='/datadirectory/not_existing_file_or_directory',
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
# Then
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
class ExternalTaskSensorTests(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
self.args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'depends_on_past': False}
def test_external_task_sensor_fn_multiple_execution_dates(self):
bash_command_code = """
{% set s=execution_date.time().second %}
echo "second is {{ s }}"
if [[ $(( {{ s }} % 60 )) == 1 ]]
then
exit 1
fi
exit 0
"""
dag_external_id = TEST_DAG_ID + '_external'
dag_external = DAG(
dag_external_id,
default_args=self.args,
schedule_interval=timedelta(seconds=1))
task_external_with_failure = BashOperator(
task_id="task_external_with_failure",
bash_command=bash_command_code,
retries=0,
dag=dag_external)
task_external_without_failure = DummyOperator(
task_id="task_external_without_failure",
retries=0,
dag=dag_external)
task_external_without_failure.run(
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + timedelta(seconds=1),
ignore_ti_state=True)
session = settings.Session()
TI = TaskInstance
try:
task_external_with_failure.run(
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + timedelta(seconds=1),
ignore_ti_state=True)
# The test_with_failure task is excepted to fail
# once per minute (the run on the first second of
# each minute).
except Exception as e:
failed_tis = session.query(TI).filter(
TI.dag_id == dag_external_id,
TI.state == State.FAILED,
TI.execution_date == DEFAULT_DATE + timedelta(seconds=1)).all()
if (len(failed_tis) == 1 and
failed_tis[0].task_id == 'task_external_with_failure'):
pass
else:
raise e
dag_id = TEST_DAG_ID
dag = DAG(
dag_id,
default_args=self.args,
schedule_interval=timedelta(minutes=1))
task_without_failure = ExternalTaskSensor(
task_id='task_without_failure',
external_dag_id=dag_external_id,
external_task_id='task_external_without_failure',
execution_date_fn=lambda dt: [dt + timedelta(seconds=i)
for i in range(2)],
allowed_states=['success'],
retries=0,
timeout=1,
poke_interval=1,
dag=dag)
task_with_failure = ExternalTaskSensor(
task_id='task_with_failure',
external_dag_id=dag_external_id,
external_task_id='task_external_with_failure',
execution_date_fn=lambda dt: [dt + timedelta(seconds=i)
for i in range(2)],
allowed_states=['success'],
retries=0,
timeout=1,
poke_interval=1,
dag=dag)
task_without_failure.run(
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE,
ignore_ti_state=True)
with self.assertRaises(AirflowSensorTimeout):
task_with_failure.run(
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE,
ignore_ti_state=True)
|
KL-WLCR/incubator-airflow
|
tests/operators/sensors.py
|
Python
|
apache-2.0
| 11,787 | 0.000339 |
#!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for cpp_utils."""
import unittest
import cpp_utils
template = """test1
${#Test}
test2"""
template_reuse = """test1
${#Test}
test2
${#Test}
test3"""
class CppFileWriterUnitTest(unittest.TestCase):
def setUp(self):
self.writer = cpp_utils.CppFileWriter('a.cc', False)
def tearDown(self):
pass
def testSectionTemplate(self):
section = self.writer.CreateSection('test')
section.EmitTemplate(template)
self.assertNotEquals(section.GetSection('Test'), None)
test_section = section.GetSection('Test')
test_section.EmitCode('test3')
lines = section.GetLines()
self.assertTrue(lines[0] == 'test1')
self.assertTrue(lines[1] == 'test3')
self.assertTrue(lines[2] == 'test2')
def testSectionTemplateReuse(self):
section = self.writer.CreateSection('test')
section.EmitTemplate(template_reuse)
self.assertNotEquals(section.GetSection('Test'), None)
test_section = section.GetSection('Test')
test_section.EmitCode('test4')
lines = section.GetLines()
self.assertTrue(lines[0] == 'test1')
self.assertTrue(lines[1] == 'test4')
self.assertTrue(lines[2] == 'test2')
self.assertTrue(lines[3] == 'test4')
self.assertTrue(lines[4] == 'test3')
if __name__ == '__main__':
unittest.main()
|
BladeSmithJohn/nixysa
|
nixysa/cpp_utils_unittest.py
|
Python
|
apache-2.0
| 1,879 | 0.002661 |
# This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
def serialize_ip_network_group(group):
"""Serialize group to JSON-like object"""
return {
'id': group.id,
'name': group.name,
'identifier': 'IPNetworkGroup:{}'.format(group.id),
'_type': 'IPNetworkGroup'
}
|
nop33/indico
|
indico/modules/networks/util.py
|
Python
|
gpl-3.0
| 1,020 | 0 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, unused-argument
"""Arm Compute Library supported operators."""
import tvm
from tvm.relay.expr import const
from tvm.relay import transform
from tvm.relay.build_module import bind_params_by_name
from ...dataflow_pattern import wildcard, is_op, is_constant, is_expr
from .register import register_pattern_table
def is_arm_compute_runtime_enabled():
"""Check if the ACL graph runtime is present.
Returns
-------
ret: bool
True if present, False if not.
"""
check_enabled = tvm.get_global_func("relay.op.is_arm_compute_runtime_enabled", True)
if check_enabled:
return check_enabled()
return False
def partition_for_arm_compute_lib(mod, params=None):
"""Partition the graph greedily offloading supported
operators to Arm Compute Library.
Parameters
----------
mod : Module
The module to run passes on.
params : Optional[Dict[str, NDArray]]
Constant input parameters.
Returns
-------
ret : annotated and partitioned module.
"""
if params:
mod["main"] = bind_params_by_name(mod["main"], params)
seq = tvm.transform.Sequential(
[
transform.InferType(),
transform.MergeComposite(arm_compute_lib_pattern_table()),
transform.AnnotateTarget("arm_compute_lib"),
transform.PartitionGraph(),
]
)
return seq(mod)
@register_pattern_table("arm_compute_lib")
def arm_compute_lib_pattern_table():
"""Get the ACL pattern table."""
def conv_pattern():
"""Create a convolution pattern.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("nn.pad")(wildcard()) | wildcard()
pattern = is_op("nn.conv2d")(pattern, is_constant())
pattern = pattern.optional(lambda x: is_op("nn.bias_add")(x, is_constant()))
pattern = pattern.optional(is_op("nn.relu"))
return pattern
def qnn_conv_pattern():
"""Create a quantized convolution pattern.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("nn.pad")(wildcard()) | wildcard()
pattern = is_op("qnn.conv2d")(
pattern, is_constant(), is_constant(), is_constant(), is_constant(), is_constant()
)
pattern = pattern.optional(lambda x: is_op("nn.bias_add")(x, is_constant()))
pattern = pattern.optional(is_op("nn.relu"))
pattern = is_op("qnn.requantize")(
pattern, wildcard(), wildcard(), is_constant(), is_constant()
)
return pattern
def dense_pattern():
"""Create a dense (fully-connected) pattern.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("nn.dense")(wildcard(), is_constant())
pattern = pattern.optional(lambda x: is_op("nn.bias_add")(x, is_constant()))
return pattern
def qnn_dense_pattern():
"""Create a quantized dense (fully-connected) pattern.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("qnn.dense")(
wildcard(), is_constant(), is_constant(), is_constant(), is_constant(), is_constant()
)
pattern = pattern.optional(lambda x: is_op("nn.bias_add")(x, is_constant()))
pattern = is_op("qnn.requantize")(
pattern, wildcard(), wildcard(), is_constant(), is_constant()
)
return pattern
def avg_pool2d_pattern():
"""Creates a pattern that matches either quantized
avg_pool2d or quantized global_avg_pool2d.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("cast")(wildcard())
pattern = is_op("nn.avg_pool2d")(pattern) | is_op("nn.global_avg_pool2d")(pattern)
pattern = is_op("cast")(pattern)
return pattern
def l2_pool2d_pattern():
"""Create an l2 pooling pattern from equivalent relay operators.
Returns
-------
pattern : dataflow_pattern.AltPattern
Denotes the convolution pattern.
"""
pattern = is_op("power")(wildcard(), is_expr(const(2.0)))
pattern = is_op("nn.avg_pool2d")(pattern)
pattern = is_op("sqrt")(pattern)
return pattern
def check_conv(extract):
"""Check conv pattern is supported by ACL."""
call = extract
while call.op.name != "nn.conv2d":
call = call.args[0]
return conv2d(call.attrs, call.args)
def check_qnn_conv(extract):
"""Check qnn conv pattern is supported by ACL."""
if extract.attrs.out_dtype != "uint8":
return False
call = extract
while call.op.name != "qnn.conv2d":
call = call.args[0]
return qnn_conv2d(call.attrs, call.args)
def check_dense(extract):
"""Check conv pattern is supported by ACL."""
call = extract
while call.op.name != "nn.dense":
call = call.args[0]
return dense(call.attrs, call.args)
def check_qnn_dense(extract):
"""Check qnn conv pattern is supported by ACL."""
if extract.attrs.out_dtype != "uint8":
return False
call = extract
while call.op.name != "qnn.dense":
call = call.args[0]
return qnn_dense(call.attrs, call.args)
def check_avg_pool2d(extract):
"""Check average pool2d pattern is supported by ACL."""
if extract.attrs.dtype != "uint8":
return False
pool = extract.args[0]
if pool.args[0].attrs.dtype != "int32":
return False
return avg_pool2d(pool.attrs, pool.args, from_quantized_composite=True)
def check_l2_pool2d(extract):
"""Check l2 pool2d pattern is supported by ACL."""
pool = extract.args[0]
return avg_pool2d(pool.attrs, pool.args)
return [
("arm_compute_lib.conv2d", conv_pattern(), check_conv),
("arm_compute_lib.qnn_conv2d", qnn_conv_pattern(), check_qnn_conv),
("arm_compute_lib.dense", dense_pattern(), check_dense),
("arm_compute_lib.qnn_dense", qnn_dense_pattern(), check_qnn_dense),
("arm_compute_lib.qnn_conv2d", qnn_conv_pattern(), check_qnn_conv),
("arm_compute_lib.avg_pool2d", avg_pool2d_pattern(), check_avg_pool2d),
("arm_compute_lib.l2_pool2d", l2_pool2d_pattern(), check_l2_pool2d),
]
def _register_external_op_helper(op_name, supported=True):
@tvm.ir.register_op_attr(op_name, "target.arm_compute_lib")
def _func_wrapper(attrs, args):
return supported
return _func_wrapper
_register_external_op_helper("reshape")
@tvm.ir.register_op_attr("nn.conv2d", "target.arm_compute_lib")
def conv2d(attrs, args):
"""Check if the external ACL codegen for conv2d should be used."""
if attrs.groups != 1:
return False
if attrs.data_layout != "NHWC":
return False
if attrs.out_dtype != "float32" and attrs.out_dtype != "":
return False
data_typ = args[0].checked_type
if len(data_typ.shape) != 4 or data_typ.shape[0] != 1 or data_typ.dtype != "float32":
return False
kernel_typ = args[1].checked_type
if len(kernel_typ.shape) != 4 or kernel_typ.dtype != "float32":
return False
return True
def qnn_conv2d(attrs, args):
"""Check if the external ACL codegen for qnn.conv2d should be used."""
if attrs.groups != 1:
return False
if attrs.data_layout != "NHWC":
return False
if attrs.out_dtype != "int32" and attrs.out_dtype != "":
return False
data_typ = args[0].checked_type
if len(data_typ.shape) != 4 or data_typ.shape[0] != 1 or data_typ.dtype != "uint8":
return False
kernel_typ = args[1].checked_type
if len(kernel_typ.shape) != 4 or kernel_typ.dtype != "uint8":
return False
return True
@tvm.ir.register_op_attr("nn.dense", "target.arm_compute_lib")
def dense(attrs, args):
"""Check if the external ACL codegen for dense should be used."""
data_typ = args[0].checked_type
if data_typ.dtype != "float32":
return False
kernel_typ = args[1].checked_type
if len(kernel_typ.shape) != 2 or kernel_typ.dtype != "float32":
return False
if attrs.out_dtype != "float32" and attrs.out_dtype != "":
return False
return True
def qnn_dense(attrs, args):
"""Check if the external ACL codegen for qnn.dense should be used."""
data_typ = args[0].checked_type
if data_typ.dtype != "uint8":
return False
kernel_typ = args[1].checked_type
if len(kernel_typ.shape) != 2 or kernel_typ.dtype != "uint8":
return False
if attrs.out_dtype != "int32":
return False
return True
@tvm.ir.register_op_attr("nn.max_pool2d", "target.arm_compute_lib")
def max_pool2d(attrs, args):
"""Check if the external ACL codegen for maxpool2d should be used."""
if attrs.layout != "NHWC":
return False
typ = args[0].checked_type
if typ.dtype not in ["float32", "uint8"]:
return False
return True
@tvm.ir.register_op_attr("nn.avg_pool2d", "target.arm_compute_lib")
def avg_pool2d(attrs, args, from_quantized_composite=False):
"""Check if the external ACL codegen for avgpool2d should be used."""
typ = args[0].checked_type
if from_quantized_composite:
if typ.dtype != "int32":
return False
else:
if typ.dtype not in ["float32"]:
return False
if attrs.layout != "NHWC":
return False
return True
@tvm.ir.register_op_attr("nn.global_max_pool2d", "target.arm_compute_lib")
def global_max_pool2d(attrs, args):
"""Check if the external ACL codegen for gloval_maxpool2d should be used."""
typ = args[0].checked_type
if typ.dtype not in ["float32", "uint8"]:
return False
if attrs.layout != "NHWC":
return False
return True
@tvm.ir.register_op_attr("nn.global_avg_pool2d", "target.arm_compute_lib")
def global_avg_pool2d(attrs, args):
"""Check if the external ACL codegen for global_avgpool2d should be used."""
typ = args[0].checked_type
if typ.dtype not in ["float32"]:
return False
if attrs.layout != "NHWC":
return False
return True
@tvm.ir.register_op_attr("maximum", "target.arm_compute_lib")
def maximum(attrs, args):
"""Check if the external ACL codegen for maximum should be used."""
type_a = args[0].checked_type
type_b = args[0].checked_type
return (type_a.dtype == "float32") and (type_b.dtype == "float32")
@tvm.ir.register_op_attr("add", "target.arm_compute_lib")
def add(attrs, args):
"""Check if the external ACL codegen for add should be used."""
for typ in [args[0].checked_type, args[1].checked_type]:
if typ.dtype != "float32":
return False
return True
@tvm.ir.register_op_attr("qnn.add", "target.arm_compute_lib")
def qnn_add(attrs, args):
"""Check if the external ACL codegen for add should be used."""
for typ in [args[0].checked_type, args[1].checked_type]:
if typ.dtype != "uint8":
return False
return True
|
sxjscience/tvm
|
python/tvm/relay/op/contrib/arm_compute_lib.py
|
Python
|
apache-2.0
| 12,300 | 0.000976 |
#!/usr/bin/env python
from distutils.core import setup
setup(name='minimalisp',
version='1.0',
description='An implementation of a small lisp language',
author='Joe Jordan',
author_email='[email protected]',
url='https://github.com/joe-jordan/minimalisp',
packages=['minimalisp'],
scripts=['scripts/minimalisp'],
include_package_data=True
)
|
joe-jordan/minimalisp
|
setup.py
|
Python
|
mit
| 377 | 0.023873 |
def get_related_fields(model):
pass
def get_table_size(model):
pass
def get_row_size(model):
pass
|
unbracketed/snowbird
|
snowbird/analyzer.py
|
Python
|
mit
| 112 | 0.017857 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Test.content'
db.alter_column(u'itest_test', 'content', self.gf('django.db.models.fields.CharField')(max_length=1850, null=True))
def backwards(self, orm):
# Changing field 'Test.content'
db.alter_column(u'itest_test', 'content', self.gf('django.db.models.fields.CharField')(max_length=850, null=True))
models = {
'itest.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'word': ('django.db.models.fields.CharField', [], {'max_length': '35'})
},
'itest.test': {
'Meta': {'object_name': 'Test'},
'content': ('django.db.models.fields.CharField', [], {'max_length': '1850', 'null': 'True', 'blank': 'True'}),
'create_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '450', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'tests'", 'symmetrical': 'False', 'to': "orm['itest.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'})
}
}
complete_apps = ['itest']
|
xuender/test
|
testAdmin/itest/migrations/0006_auto__chg_field_test_content.py
|
Python
|
apache-2.0
| 1,755 | 0.006838 |
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtGui import QDragEnterEvent, QDropEvent
from urh.signalprocessing.IQArray import IQArray
from urh.cythonext import util
from urh.signalprocessing.ProtocolAnalyzer import ProtocolAnalyzer
from urh.signalprocessing.Signal import Signal
from urh.ui.painting.SignalSceneManager import SignalSceneManager
from urh.ui.views.ZoomableGraphicView import ZoomableGraphicView
class ZoomAndDropableGraphicView(ZoomableGraphicView):
signal_loaded = pyqtSignal(ProtocolAnalyzer)
def __init__(self, parent=None):
self.signal_tree_root = None # type: ProtocolTreeItem
self.scene_manager = None
self.signal = None # type: Signal
self.proto_analyzer = None # type: ProtocolAnalyzer
super().__init__(parent)
def dragEnterEvent(self, event: QDragEnterEvent):
event.acceptProposedAction()
def dropEvent(self, event: QDropEvent):
mime_data = event.mimeData()
data_str = str(mime_data.text())
indexes = list(data_str.split("/")[:-1])
signal = None
proto_analyzer = None
for index in indexes:
row, column, parent = map(int, index.split(","))
if parent == -1:
parent = self.signal_tree_root
else:
parent = self.signal_tree_root.child(parent)
node = parent.child(row)
if node.protocol is not None and node.protocol.signal is not None:
signal = node.protocol.signal
proto_analyzer = node.protocol
break
if signal is None:
return
if signal is None:
return
self.signal = signal # type: Signal
self.proto_analyzer = proto_analyzer # type: ProtocolAnalyzer
self.scene_manager = SignalSceneManager(signal, self)
self.plot_data(self.signal.real_plot_data)
self.show_full_scene()
self.auto_fit_view()
self.signal_loaded.emit(self.proto_analyzer)
def auto_fit_view(self):
super().auto_fit_view()
plot_min, plot_max = util.minmax(self.signal.real_plot_data)
data_min, data_max = IQArray.min_max_for_dtype(self.signal.real_plot_data.dtype)
self.scale(1, (data_max - data_min) / (plot_max-plot_min))
self.centerOn(self.view_rect().x() + self.view_rect().width() / 2, self.y_center)
def eliminate(self):
# Do _not_ call eliminate() for self.signal and self.proto_analyzer
# as these are references to the original data!
self.signal = None
self.proto_analyzer = None
self.signal_tree_root = None
super().eliminate()
|
jopohl/urh
|
src/urh/ui/views/ZoomAndDropableGraphicView.py
|
Python
|
gpl-3.0
| 2,692 | 0.000743 |
"""
Database Models Library
"""
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
# Model to store information about devices
class Device(db.Model):
__tablename__ = 'clients'
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.Text)
api_key = db.Column(db.Text)
active = db.Column(db.Boolean, default=False)
access_level = db.Column(db.Integer)
status = db.Column(db.Integer)
def __init__(self, name, permission_level):
self.name = name
self.access_level = permission_level
self.api_key = generate_api_token()
# Model to store notifications
class Notification(db.Model):
__tablename__ = 'notifications'
id = db.Column(db.Integer, primary_key = True)
user_id = db.Column(db.Integer)
category = db.Column(db.Text)
title = db.Column(db.Text)
body = db.Column(db.Text)
callback_url = db.Column(db.Text)
dismissed = db.Column(db.Boolean, default=0)
timestamp = db.Column(db.DateTime)
# NOTE -120 -> all admins (also TODO when implementing GUI)
# NOTE -121 -> all users
def __init__(self, user_id, category, title, body, callback_url):
self.user_id = user_id
self.category = category
self.title = title
self.body = body
self.callback_url = callback_url
# Down here to avoid issues with circular dependancies
from helpers import generate_api_token
class Preference(db.Model):
__tablename__ = 'preferences'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
device_id = db.Column(db.Integer, db.ForeignKey('clients.id'))
key = db.Column(db.Text)
value = db.Column(db.Text)
access_required = db.Column(db.Integer)
def __init__(self, user_id, device_id, key, value, access_required):
self.user_id = user_id
self.device_id = device_id
self.key = key
self.value = value
self.access_required = access_required
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key = True)
username = db.Column(db.Text)
password = db.Column(db.Text)
last_login = db.Column(db.DateTime)
create_date = db.Column(db.DateTime)
access_level = db.Column(db.Integer)
preferences = db.relationship('Preference', backref='user', lazy='joined')
def __init__(self, username, password, access_level):
self.username = username
self.password = password
self.access_level = access_level
|
How2Compute/SmartHome
|
hub/Models.py
|
Python
|
mit
| 2,593 | 0.008484 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2006 Donald N. Allingham
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# gui/editors/__init__.py
from .editaddress import EditAddress
from .editattribute import EditAttribute, EditSrcAttribute
from .editchildref import EditChildRef
from .editcitation import EditCitation, DeleteCitationQuery
from .editdate import EditDate
from .editevent import EditEvent, DeleteEventQuery
from .editeventref import EditEventRef
from .editfamily import EditFamily
from .editldsord import EditLdsOrd, EditFamilyLdsOrd
from .editlocation import EditLocation
from .editmedia import EditMedia, DeleteMediaQuery
from .editmediaref import EditMediaRef
from .editname import EditName
from .editnote import EditNote, DeleteNoteQuery
from .editperson import EditPerson
from .editpersonref import EditPersonRef
from .editplace import EditPlace, DeletePlaceQuery
from .editplacename import EditPlaceName
from .editplaceref import EditPlaceRef
from .editrepository import EditRepository, DeleteRepositoryQuery
from .editreporef import EditRepoRef
from .editsource import EditSource, DeleteSrcQuery
from .edittaglist import EditTagList
from .editurl import EditUrl
from .editlink import EditLink
from .filtereditor import FilterEditor, EditFilter
from gramps.gen.lib import (Person, Family, Event, Place, Repository, Source,
Citation, Media, Note)
# Map from gramps.gen.lib name to Editor:
EDITORS = {
'Person': EditPerson,
'Event': EditEvent,
'Family': EditFamily,
'Media': EditMedia,
'Source': EditSource,
'Citation': EditCitation,
'Place': EditPlace,
'Repository': EditRepository,
'Note': EditNote,
}
CLASSES = {
'Person': Person,
'Event': Event,
'Family': Family,
'Media': Media,
'Source': Source,
'Citation': Citation,
'Place': Place,
'Repository': Repository,
'Note': Note,
}
def EditObject(dbstate, uistate, track, obj_class, prop=None, value=None, callback=None):
"""
Generic Object Editor.
obj_class is Person, Source, Repository, etc.
prop is 'handle', 'gramps_id', or None (for new object)
value is string handle, string gramps_id, or None (for new object)
"""
import logging
LOG = logging.getLogger(".Edit")
if obj_class in EDITORS.keys():
if value is None:
obj = CLASSES[obj_class]
try:
EDITORS[obj_class](dbstate, uistate, track, obj, callback=callback)
except Exception as msg:
LOG.warning(str(msg))
elif prop in ("gramps_id", "handle"):
obj = dbstate.db.method('get_%s_from_%s', obj_class, prop)(value)
if obj:
try:
EDITORS[obj_class](dbstate, uistate, track, obj, callback=callback)
except Exception as msg:
LOG.warning(str(msg))
else:
LOG.warning("gramps://%s/%s/%s not found" %
(obj_class, prop, value))
else:
LOG.warning("unknown property to edit '%s'; "
"should be 'gramps_id' or 'handle'" % prop)
else:
LOG.warning("unknown object to edit '%s'; "
"should be one of %s" % (obj_class, list(EDITORS.keys())))
|
prculley/gramps
|
gramps/gui/editors/__init__.py
|
Python
|
gpl-2.0
| 4,028 | 0.001986 |
#!/usr/bin/env python
from __future__ import absolute_import
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "service.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
peragro/peragro-rest
|
manage.py
|
Python
|
bsd-3-clause
| 289 | 0 |
#_PYTHON_INSERT_SAO_COPYRIGHT_HERE_(2007)_
#_PYTHON_INSERT_GPL_LICENSE_HERE_
from numpy import arange
import sherpa.models.basic as basic
from sherpa.utils import SherpaFloat, SherpaTestCase
from sherpa.models.model import ArithmeticModel
def userfunc(pars, x, *args, **kwargs):
return x
class test_basic(SherpaTestCase):
def test_create_and_evaluate(self):
x = arange(1.0, 5.0)
count = 0
for cls in dir(basic):
clsobj = getattr(basic, cls)
if ((not isinstance(clsobj, type)) or
(not issubclass(clsobj, ArithmeticModel)) or
(clsobj is ArithmeticModel)):
continue
# These have very different interfaces than the others
if cls == 'Integrator1D' or cls == 'Integrate1D':
continue
m = clsobj()
if isinstance(m, basic.TableModel):
m.load(x,x)
if isinstance(m, basic.UserModel):
m.calc = userfunc
self.assertEqual(type(m).__name__.lower(), m.name)
count += 1
try:
if m.name.count('2d'):
pt_out = m(x, x)
int_out = m(x, x, x, x)
else:
if m.name in ('log', 'log10'):
xx = -x
else:
xx = x
pt_out = m(xx)
int_out = m(xx, xx)
except ValueError:
self.fail("evaluation of model '%s' failed" % cls)
for out in (pt_out, int_out):
self.assert_(out.dtype.type is SherpaFloat)
self.assertEqual(out.shape, x.shape)
self.assertEqual(count, 31)
|
brefsdal/sherpa
|
sherpa/models/tests/test_basic.py
|
Python
|
gpl-2.0
| 1,765 | 0.005099 |
from devassistant.command_runners import CommandRunner
from devassistant.logger import logger
class CR1(CommandRunner):
@classmethod
def matches(cls, c):
return c.comm_type == 'barbarbar'
@classmethod
def run(cls, c):
logger.info('CR1: Doing something ...')
x = c.input_res + 'bar'
return (True, x)
class CR2(CommandRunner):
@classmethod
def matches(cls, c):
return c.comm_type == 'spamspamspam'
@classmethod
def run(cls, c):
logger.info('CR2: Doing something ...')
x = c.input_res + 'spam'
return (True, x)
|
phracek/devassistant
|
test/fixtures/files/crt/commands/a.py
|
Python
|
gpl-2.0
| 609 | 0.001642 |
#
# TESTS
#
from nose.tools import assert_true, assert_equal, assert_raises
from mixedges import Edges, EdgeKeys, EdgeData, EdgeItems
class BaseEdgeTests(object):
def setup_edges(self):
self.edlist = [{1:"one"}, {1:"two"}, {1:"three"}, {1:"four"}]
ed1, ed2, ed3, ed4 = self.edlist
Ge = self.Ge
Ge.add(0,1,ed1)
Ge.add(0,0,ed2)
Ge.update([(1,0,ed3), (2,3,ed4)])
def test_iter_items(self):
Ge = self.Ge
ed1, ed2, ed3, ed4 = self.edlist
if Ge.directed:
ans = [(0,1), (0,0), (1,0), (2,3)]
else:
ans = [(0,1), (0,0), (2,3)]
assert_equal( sorted(Ge), sorted(ans))
if Ge.directed:
ans = [((0,1),ed1), ((0,0),ed2), ((1,0),ed3), ((2,3),ed4)]
else:
ans = [((0,1),ed3), ((0,0),ed2), ((2,3),ed4)]
print("succ:",Ge._succ)
print("pred:",Ge._pred)
print("items",list(Ge._items()))
assert_equal( sorted(Ge._items()), sorted(ans))
def test_view_data_keys(self):
Ge = self.Ge
ed1, ed2, ed3, ed4 = self.edlist
if Ge.directed:
ans = [((0,1),ed1), ((0,0),ed2), ((1,0),ed3), ((2,3),ed4)]
else:
ans = [((0,1),ed3), ((0,0),ed2), ((2,3),ed4)]
# iter
assert_equal( sorted(Ge.items()), sorted(ans))
assert_equal( sorted(Ge.data()), sorted(d for k,d in ans))
assert_equal( sorted(Ge.keys()), sorted(k for k,d in ans))
# contains
assert_true( (0,1) in Ge.keys() )
assert_true( (0,3) not in Ge.keys() )
assert_true( (0,8) not in Ge.keys() )
extras = [((0,1),{1:"none"}), ((2,3),ed4), ((0,8),ed3)]
assert_true( ed2 in Ge.data() )
assert_true( extras[0][1] not in Ge.data() )
assert_true( ((0,0),ed2) in Ge.items() )
assert_true( extras[0] not in Ge.items() )
assert_true( extras[1] in Ge.items() )
assert_true( extras[2] not in Ge.items() )
def test_len(self):
Ge = self.Ge
assert_equal(len(Ge), 4 if Ge.directed else 3)
assert_equal(len(Ge.items()), len(Ge))
assert_equal(len(Ge.data()), len(Ge))
assert_equal(len(Ge.keys()), len(Ge))
def test_contains_get(self):
Ge = self.Ge
ed1, ed2, ed3, ed4 = self.edlist
assert_true((0,1) in Ge)
assert_true((1,0) in Ge)
assert_true((2,3) in Ge)
assert_true((0,0) in Ge)
if Ge.directed:
assert_true((3,2) not in Ge)
else:
assert_true((3,2) in Ge)
assert_true((4,5) not in Ge)
assert_true((4,4) not in Ge)
# getitem
assert_true(Ge[(0,1)] == (ed1 if Ge.directed else ed3))
assert_true(Ge[(1,0)] == ed3)
assert_true(Ge[(2,3)] == ed4)
assert_true(Ge[(0,0)] == ed2)
def test_remove_clear(self):
Ge = self.Ge
Ge.remove(0,1)
assert_true((0,1) not in Ge)
if Ge.directed:
assert_true((1,0) in Ge)
else:
assert_true((1,0) not in Ge)
Ge.clear()
assert_equal(len(Ge._node), 5)
assert_equal(len(Ge), 0)
def test_set_ops(self):
Ge = self.Ge
extras = [(1,2), (0,1), (3,4)]
if Ge.directed:
edgs = [(0,1), (0,0), (1,0), (2,3)]
else:
edgs = [(0,1), (0,0), (2,3)]
assert_equal(Ge | extras, set(edgs) | set(extras) )
assert_equal(Ge & extras, set(edgs) & set(extras) )
assert_equal(Ge ^ extras, set(edgs) ^ set(extras) )
assert_equal(Ge - extras, set(edgs) - set(extras) )
assert_equal(extras - Ge, set(extras) - set(edgs) )
class TestDiEdges(BaseEdgeTests):
def setUp(self):
node ={4:{}}
succ = {}
pred = {}
self.Ge = Edges(node, succ, pred, directed=False)
self.setup_edges()
class TestUndiEdges(BaseEdgeTests):
def setUp(self):
node ={4:{}}
succ = {}
pred = {}
self.Ge = Edges(node, succ, pred, directed=False)
self.setup_edges()
self.setup_edges()
|
hagberg/nx3k
|
test_edges.py
|
Python
|
bsd-3-clause
| 4,105 | 0.031425 |
#!/usr/bin/python
# Copyright (C) 2013 rapidhere
#
# Author: rapidhere <[email protected]>
# Maintainer: rapidhere <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import skapp
from optparse import OptionParser
import sys
parser = OptionParser(
usage = "%prog [options]",
description = """A simple snake game.Suggest that resize your terminal window at a property size befor playing!""",
epilog = "[email protected]",
version = "0.1"
)
parser.add_option(
"","--key-help",
action = "store_true",default = False,
help = "show game keys"
)
opts,args = parser.parse_args()
parser.destroy()
if opts.key_help:
print "'w' or 'W' or UP-Arrow up"
print "'a' or 'A' or LF-Arrow left"
print "'s' or 'S' or DW-Arrow down"
print "'d' or 'D' or RG-Arrpw right"
print "'q' or 'Q' quit"
sys.exit(0)
else:
app = skapp.SKApp()
app.run()
|
rapidhere/snake_game
|
snake_game.py
|
Python
|
gpl-3.0
| 1,496 | 0.012032 |
# $HeadURL$
__RCSID__ = "$Id$"
#
#
VM_WEB_OPERATION = "VmWebOperation"
#
VM_RPC_OPERATION = "VmRpcOperation"
#...............................................................................
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
|
xianghuzhao/VMDIRAC
|
VMDIRAC/Security/VmProperties.py
|
Python
|
gpl-3.0
| 268 | 0.011194 |
import unittest2
import helper
import simplejson as json
from nose.plugins.attrib import attr
PORTAL_ID = 62515
class ListsClientTest(unittest2.TestCase):
"""
Unit tests for the HubSpot List API Python wrapper (hapipy) client.
This file contains some unittest tests for the List API.
Questions, comments, etc: http://developers.hubspot.com
"""
def setUp(self):
self.client = ListsClient(**helper.get_options())
def tearDown(self):
pass
@attr('api')
def test_get_list(self):
# create a list to get
dummy_data = json.dumps(dict(
name='try_and_get_me',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was created
self.asserTrue(len(created_list['lists']))
# the id number of the list the test is trying to get
id_to_get = created_list['listID']
# try and get it
recieved_lists = self.client.get_list(id_to_get)
# see if the test got the right list
self.assertEqual(recieved_lists['lists'][0]['listId'], created_list['listId'])
print "Got this list: %s" % json.dumps(recieved_list['lists'][0])
# clean up
self.client.delete_list(id_to_get)
@attr('api')
def test_get_batch_lists(self):
# holds the ids of the lists being retrieved
list_ids = []
# make a list to get
dummy_data = json.dumps(dict(
name='first_test_list',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was actually made
self.assertTrue(created_list['listID'])
# put the id of the newly made list in list_ids
list_ids[0] = created_list['listId']
#change the data a little and make another list
dummy_data['name'] = 'second_test_list'
created_list = self.client.create_list(dummy_data)
# make sure itwas actually made
self.assertTrue(created_list['listID'])
# put the id number in list_ids
list_ids[1] = created_list['listId']
# try and get them
batch_lists = self.client.get_batch_lists(list_ids)
# make sure you got as many lists as you were searching for
self.assertEqual(len(list_ids), len(batch_lists['lists']))
# clean up
self.client.delete_list(list_ids[0])
self.client.delete_list(list_ids[1])
@attr('api')
def test_get_lists(self):
# try and get lists
recieved_lists = self.client.get_lists()
# see if the test got at least one
if len(recieved_lists['lists']) == 0:
self.fail("Unable to retrieve any lists")
else:
print "Got these lists %s" % json.dumps(recieved_lists)
@attr('api')
def test_get_static_lists(self):
# create a static list to get
dummy_data = json.dumps(dict(
name='static_test_list',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was actually made
self.assertTrue(created_list['listID'])
# this call will return 20 lists if not given another value
static_lists = self.client.get_static_lists()
if len(static_lists['lists']) == 0:
self.fail("Unable to retrieve any static lists")
else:
print "Found these static lists: %s" % json.dumps(static_lists)
# clean up
self.client.delete_list(created_list['listId'])
@attr('api')
def test_get_dynamic_lists(self):
# make a dynamic list to get
dummy_data = json.dumps(dict(
name='test_dynamic_list',
dynamic=True,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure the dynamic list was made
self.assertTrue(created_list['listId'])
dynamic_lists = self.client.get_dynamic_lists()
if len(dynamic_lists['lists']) == 0:
self.fail("Unable to retrieve any dynamic lists")
else:
print "Found these dynamic lists: %s" % json.dumps(dynamic_lists)
# clean up
self.client.delete_list(created_list['listId'])
@attr('api')
def test_get_list_contacts(self):
# the id number of the list you want the contacts of
# which_list =
# try and get the contacts
contacts = self.client.get_list_contacts(which_list)
# make sure you get at least one
self.assertTrue(len(contacts['contacts'])
print "Got these contacts: %s from this list: %s" % json.dumps(contacts), which_list)
@attr('api')
def test_get_list_contacts_recent(self):
# the id number of the list you want the recent contacts of
which_list =
recent_contacts = self.client.get_list_contacts_recent(which_list)
if len(recent_contacts['lists']) == 0:
self.fail("Did not find any recent contacts")
else:
print "Found these recent contacts: %s" % json.dumps(recent_conacts)
@attr('api')
def test_create_list(self):
# the data for the list the test is making
dummy_data = json.dumps(dict(
list_name='test_list',
dynamic=False,
portalId=PORTAL_ID
))
# try and make the list
created_list = self.client.create_list(dummy_data)
# make sure it was created
if len(created_lists['lists']) == 0:
self.fail("Did not create the list")
else:
print "Created this list: %s" % json.dumps(created_lists)
# clean up
self.client.delete_list(created_lists['lists'][0]['listId'])
@attr('api')
def test_update_list(self):
# make a list to update
dummy_data = json.dumps(dict(
name='delete_me',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was actually made
self.assertTrue(len(created_list['listId']))
# get the id number of the list
update_list_id = created_list['listId']
# this is the data updating the list
update_data = json.dumps(dict(
list_name='really_delete_me',
))
# try and do the update
http_response = self.client.update_list(update_list_id, update_data)
if http_response >= 400:
self.fail("Unable to update list!")
else:
print("Updated a list!")
# clean up
self.client.delete_list(update_list_id)
@attr('api')
def test_add_contacts_to_list_from_emails(self):
# make a list to add contacts to
dummy_data = json.dumps(dict(
name='give_me_contact_emails',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was actually made
self.assertTrue(len(created_list['lists']))
# the id number of the list being added to
which_list = created_list['listId']
# the emails of the contacts being added
emails = json.dumps(dict(
emails
))
# try and add the contacts
self.client.add_contacts_to_list_from_emails(which_list, emails)
@attr('api')
def test_add_contact_to_list(self):
# make a list to add a contact to
dummy_data = json.dumps(dict(
name='add_a_contact',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it was actually made
self.assertTrue(created_list['listId'])
# the id number of the list the contact is being added to
which_list = created_list['listId']
# the id number of the contact being added to the list
which_contact =
added = self.client.add_contact_to_list(which_list, which_contact)
if added['updated'] == which_contact:
print "Succesfully added contact: %s to list: %s" % which_contact, which_list
# if it worked, clean up
self.client.delete_list(which_list)
else:
self.fail("Did not add contact: %s to list: %a" % which_contact, which_list)
@attr('api')
def test_remove_contact_from_list(self):
# make a list to remove a contact from
fake_data = json.dumps(dict(
name='remove_this_contact'
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(fake_data)
# make sure it was actually made
self.assertTrue(created_list['listId'])
# the id number of the list the contact is being deleted from
which_list = created_list['listId']
# the id number of the contact being deleted
which_contact =
# put the contact in the list so it can be removed
added = self.client.add_contact_to_list(which_list, which_contact)
# make sure it was added
self.assertTrue(added['updated'])
# try and remove it
removed = self.client.remove_contact_from_list(which_list, which_contact)
# check if it was actually removed
if removed['updated'] == which_contact:
print "Succesfully removed contact: %s from list: %s" % which_contact, which_list
# clean up
self.client.delete_list(created_list['listId'])
else:
self.fail("Did not remove contact %s from list: %s" % which_contact, which_list)
@attr('api')
def test_delete_list(self):
# make a list to delete
dummy_data = json.dumps(dict(
name='should_be_deleted',
dynamic=False,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# check if it was actually made
self.assertTrue(created_list['listId'])
# the id number of the list being deleted
id_to_delete = created_list['listId']
# try deleting it
self.client.delete_list(id_to_delete)
# try and get the list that should have been deleted
check = self.client.get_list(id_to_delete)
# check should not have any lists
self.assertEqual(len(check['lists']), 0)
print "Sucessfully deleted a test list"
@attr('api')
def test_refresh_list(self):
# make a dynamic list to refresh
dummy_data = json.dumps(dict(
name='refresh_this_list',
dynamic=True,
portalId=PORTAL_ID
))
created_list = self.client.create_list(dummy_data)
# make sure it actually made the list
self.assertTrue(created_list['listId'])
# do the refresh
refresh_response = self.client.refresh_list(created_list['listId'])
# check if it worked
if refresh_response >= 400:
self.fail("Failed to refresh list: %s" % json.dumps(created_list))
else:
print "Succesfully refreshed list: %s" % json.dumps(created_list)
# clean up
self.client.delete_list(created_list['listId'])
if __name__ == "__main__":
unittest2.main()
|
jonathan-s/happy
|
happy/test/test_lists.py
|
Python
|
apache-2.0
| 11,513 | 0.001042 |
# Environment configuration
# Copyright (c) 2016, Tieto Corporation
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
#
# Currently static definition, in the future this could be a config file,
# or even common database with host management.
#
import logging
logger = logging.getLogger()
#
# You can put your settings in cfg.py file with setup_params, devices
# definitions in the format as below. In other case HWSIM cfg will be used.
#
setup_params = {"setup_hw" : "./tests/setup_hw.sh",
"hostapd" : "./tests/hostapd",
"wpa_supplicant" : "./tests/wpa_supplicant",
"iperf" : "iperf",
"wlantest" : "./tests/wlantest",
"wlantest_cli" : "./tests/wlantest_cli",
"country" : "US",
"log_dir" : "/tmp/",
"ipv4_test_net" : "192.168.12.0",
"trace_start" : "./tests/trace_start.sh",
"trace_stop" : "./tests/trace_stop.sh",
"perf_start" : "./tests/perf_start.sh",
"perf_stop" : "./tests/perf_stop.sh"}
#
#devices = [{"hostname": "192.168.254.58", "ifname" : "wlan0", "port": "9877", "name" : "t2-ath9k", "flags" : "AP_HT40 STA_HT40"},
# {"hostname": "192.168.254.58", "ifname" : "wlan1", "port": "9877", "name" : "t2-ath10k", "flags" : "AP_VHT80"},
# {"hostname": "192.168.254.58", "ifname" : "wlan3", "port": "9877", "name" : "t2-intel7260", "flags" : "STA_VHT80"},
# {"hostname": "192.168.254.55", "ifname" : "wlan0, wlan1, wlan2", "port": "", "name" : "t3-monitor"},
# {"hostname": "192.168.254.50", "ifname" : "wlan0", "port": "9877", "name" : "t1-ath9k"},
# {"hostname": "192.168.254.50", "ifname" : "wlan1", "port": "9877", "name" : "t1-ath10k"}]
#
# HWSIM - ifaces available after modprobe mac80211_hwsim
#
devices = [{"hostname": "localhost", "ifname": "wlan0", "port": "9868", "name": "hwsim0", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan1", "port": "9878", "name": "hwsim1", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan2", "port": "9888", "name": "hwsim2", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan3", "port": "9898", "name": "hwsim3", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan4", "port": "9908", "name": "hwsim4", "flags": "AP_VHT80 STA_VHT80"}]
def get_setup_params(filename="cfg.py"):
try:
mod = __import__(filename.split(".")[0])
return mod.setup_params
except:
logger.debug("__import__(" + filename + ") failed, using static settings")
pass
return setup_params
def get_devices(filename="cfg.py"):
try:
mod = __import__(filename.split(".")[0])
return mod.devices
except:
logger.debug("__import__(" + filename + ") failed, using static settings")
pass
return devices
def get_device(devices, name=None, flags=None, lock=False):
if name is None and flags is None:
raise Exception("Failed to get device")
for device in devices:
if device['name'] == name:
return device
for device in devices:
try:
device_flags = device['flags']
if device_flags.find(flags) != -1:
return device
except:
pass
raise Exception("Failed to get device " + name)
def put_device(devices, name):
pass
|
s0lst1c3/eaphammer
|
local/hostapd-eaphammer/tests/remote/config.py
|
Python
|
gpl-3.0
| 3,543 | 0.011572 |
from unittest import TestCase
import dogma
from test_dogma_values import *
class TestDogmaExtra(TestCase):
def test(self):
ctx = dogma.Context()
slot = ctx.add_module(TYPE_125mmGatlingAutoCannonII)
loc = dogma.Location.module(slot)
affectors = ctx.get_affectors(loc)
ctx.set_ship(TYPE_Rifter)
affectors_with_ship = ctx.get_affectors(loc)
self.assertTrue(dogma.type_has_effect(TYPE_125mmGatlingAutoCannonII, dogma.State.ONLINE, EFFECT_HiPower))
self.assertTrue(dogma.type_has_active_effects(TYPE_125mmGatlingAutoCannonII))
self.assertTrue(dogma.type_has_overload_effects(TYPE_125mmGatlingAutoCannonII))
self.assertTrue(dogma.type_has_projectable_effects(TYPE_StasisWebifierI))
self.assertEqual(dogma.type_base_attribute(TYPE_Rifter, ATT_LauncherSlotsLeft), 2)
ctx.add_charge(slot, TYPE_BarrageS)
self.assertEqual(ctx.get_number_of_module_cycles_before_reload(slot), 200)
effect = dogma.get_nth_type_effect_with_attributes(TYPE_125mmGatlingAutoCannonII, 0)
(duration, tracking, discharge, att_range, falloff, usagechance,
) = ctx.get_location_effect_attributes(loc, effect)
self.assertEqual(falloff, 7500)
self.assertEqual(att_range, 1200)
self.assertEqual(discharge, 0)
capacitors = ctx.get_capacitor_all(False)
self.assertEqual(len(capacitors), 1)
self.assertIn(ctx, capacitors)
|
jboning/python-dogma
|
test_dogma_extra.py
|
Python
|
agpl-3.0
| 1,477 | 0.00677 |
import datetime
from django.contrib.gis.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.core import exceptions
from django.db.models import Q
from django.conf import settings
from django_date_extensions.fields import ApproximateDateField
from markitup.fields import MarkupField
from popit.models import ModelBase, date_help_text, Person, Organisation, DataKey, Data
class PositionCategory(ModelBase):
#category_choices = (
# ('political', 'Political'),
# ('education', 'Education (as a learner)'),
# ('other', 'Anything else'),
#)
category = models.CharField(max_length=100)
class Meta:
ordering = [ 'category' ]
app_label = 'popit'
def __unicode__(self):
return self.category
class PositionType(ModelBase):
name = models.CharField(max_length=100)
slug = models.SlugField()
summary = MarkupField(blank=True, default='')
requires_place = models.BooleanField(default=False, help_text="Does this job type require a place to complete the position?")
organisation = models.ForeignKey(Organisation, null=True, blank=True)
category = models.ForeignKey(PositionCategory, null=True, blank=True, help_text="What sort of position is this?")
class Meta:
ordering = [ "name" ]
app_label = 'popit'
def __unicode__(self):
if self.organisation:
return u'%s (%s)' % (self.name, self.organisation)
return self.name
# @models.permalink
# def get_absolute_url(self):
# return ( 'position', [ self.slug ] )
#
# def organisations(self):
# """
# Return a qs of organisations, with the most frequently related first.
#
# Each organisation is also annotated with 'position_count' which might be
# useful.
#
# This is intended as an alternative to assigning a org to each
# position_title. Instead we can deduce it from the postions.
# """
#
# orgs = (
# Organisation
# .objects
# .filter(position__title=self)
# .annotate( position_count=models.Count('position') )
# .order_by( '-position_count' )
# )
#
# return orgs
class Position(ModelBase):
person = models.ForeignKey(Person)
organisation = models.ForeignKey(Organisation, null=True, blank=True)
type = models.ForeignKey(PositionType, null=True, blank=True)
title = models.CharField(max_length=200, blank=True, default='')
# XXX: Working with South here presumably, umm, tricky
if 'mapit' in settings.INSTALLED_APPS:
place = models.ForeignKey('Place', null=True, blank=True, help_text="use if needed to identify the position - eg add constituency for an 'MP'" )
else:
place = models.CharField(max_length=100, blank=True, help_text="use if needed to identify the position - eg add constituency for an 'MP'")
note = models.CharField(max_length=300, blank=True, default='')
start_date = ApproximateDateField(blank=True, help_text=date_help_text)
end_date = ApproximateDateField(blank=True, help_text=date_help_text, default="future")
# Two hidden fields that are only used to do sorting. Filled in by code.
sorting_start_date = models.CharField(editable=True, default='', max_length=10)
sorting_end_date = models.CharField(editable=True, default='', max_length=10)
def __unicode__(self):
if self.organisation:
organisation = self.organisation.name
elif self.type and self.type.organisation:
organisation = self.type.organisation.name
else:
organisation = 'Unknown'
if self.title and self.type:
title = u'%s (%s)' % (self.title, self.type)
elif self.type:
title = self.type
else:
title = self.title or 'Unknown'
if self.place:
place = '(%s)' % self.place
else:
place = ''
out = "%s's position as %s %s at %s (%s-%s)" % ( self.person.name, title, self.place, organisation, self.start_date, self.end_date)
return out
class Meta:
app_label = 'popit'
ordering = ['-sorting_end_date', '-sorting_start_date']
def clean(self):
if not (self.organisation or self.title or self.type):
raise exceptions.ValidationError('Must have at least one of organisation, title or type.')
if self.type and self.type.requires_place and not self.place:
raise exceptions.ValidationError( "The job type '%s' requires a place to be set" % self.type.name )
def display_dates(self):
"""Nice HTML for the display of dates"""
# no dates
if not (self.start_date or self.end_date):
return ''
# start but no end
if self.start_date and not self.end_date:
return "Started %s" % self.start_date
# both dates
if self.start_date and self.end_date:
if self.end_date.future:
return "Started %s" % ( self.start_date )
else:
return "%s → %s" % ( self.start_date, self.end_date )
# end but no start
if not self.start_date and self.end_date:
return 'ongoing'
def display_start_date(self):
"""Return text that represents the start date"""
if self.start_date:
return str(self.start_date)
return '?'
def display_end_date(self):
"""Return text that represents the end date"""
if self.end_date:
return str(self.end_date)
return '?'
def is_ongoing(self):
"""Return True or False for whether the position is currently ongoing"""
if not self.end_date:
return False
elif self.end_date.future:
return True
else:
# turn today's date into an ApproximateDate object and cmp to that
now = datetime.date.today()
now_approx = ApproximateDate(year=now.year, month=now.month, day=now.day )
return now_approx <= self.end_date
def has_known_dates(self):
"""Is there at least one known (not future) date?"""
return (self.start_date and not self.start_date.future) or (self.end_date and not self.end_date.future)
def _set_sorting_dates(self):
"""Set the sorting dates from the actual dates (does not call save())"""
# value can be yyyy-mm-dd, future or None
start = repr( self.start_date ) if self.start_date else ''
end = repr( self.end_date ) if self.end_date else ''
# set the value or default to something sane
sorting_start_date = start or '0000-00-00'
sorting_end_date = end or start or '0000-00-00'
# To make the sorting consistent special case some parts
if not end and start == 'future':
sorting_start_date = 'a-future' # come after 'future'
self.sorting_start_date = sorting_start_date
self.sorting_end_date = sorting_end_date
return True
def save(self, *args, **kwargs):
self._set_sorting_dates()
super(Position, self).save(*args, **kwargs)
class PositionDataKey(DataKey):
class Meta:
app_label = 'popit'
class PositionData(Data):
person = models.ForeignKey(Position, related_name='data')
key = models.ForeignKey(PositionDataKey, related_name='values')
class Meta:
app_label = 'popit'
verbose_name_plural = 'position data'
|
mysociety/polipop
|
polipop/popit/models/positions.py
|
Python
|
agpl-3.0
| 7,783 | 0.010022 |
a = int(input())
s = "odd"
s1 = "even"
for i in range(1, a):
if i%2==0 :
print(str(i) + " even")
else:
print(str(i)+" odd")
|
alekseik1/python_mipt_study_1-2
|
1sem/lesson_1/1.py
|
Python
|
gpl-3.0
| 138 | 0.043478 |
import argparse, json
import boto3
from boto.mturk.connection import MTurkConnection
from boto.mturk.qualification import *
from jinja2 import Environment, FileSystemLoader
"""
A bunch of free functions that we use in all scripts.
"""
def get_jinja_env(config):
"""
Get a jinja2 Environment object that we can use to find templates.
"""
return Environment(loader=FileSystemLoader(config['template_directories']))
def json_file(filename):
with open(filename, 'r') as f:
return json.load(f)
def get_parent_parser():
"""
Get an argparse parser with arguments that are always needed
"""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--prod', action='store_false', dest='sandbox',
default=True,
help="Whether to run on the production AMT site.")
parser.add_argument('--hit_ids_file')
parser.add_argument('--config', default='config.json',
type=json_file)
return parser
def get_mturk_connection_from_args(args):
"""
Utility method to get an MTurkConnection from argparse args.
"""
aws_access_key = args.config.get('aws_access_key')
aws_secret_key = args.config.get('aws_secret_key')
return get_mturk_connection(sandbox=args.sandbox,
aws_access_key=aws_access_key,
aws_secret_key=aws_secret_key)
def get_mturk_connection(sandbox=True, aws_access_key=None,
aws_secret_key=None):
"""
Get a boto mturk connection. This is a thin wrapper over the
MTurkConnection constructor; the only difference is a boolean
flag to indicate sandbox or not.
"""
kwargs = {}
if aws_access_key is not None:
kwargs['aws_access_key_id'] = aws_access_key
if aws_secret_key is not None:
kwargs['aws_secret_access_key'] = aws_secret_key
if sandbox:
host = 'mechanicalturk.sandbox.amazonaws.com'
else:
host='mechanicalturk.amazonaws.com'
return MTurkConnection(host=host, **kwargs)
def setup_qualifications(hit_properties):
"""
Replace some of the human-readable keys from the raw HIT properties
JSON data structure with boto-specific objects.
"""
qual = Qualifications()
if 'country' in hit_properties:
qual.add(LocaleRequirement('In', hit_properties['country']))
del hit_properties['country']
if 'hits_approved' in hit_properties:
qual.add(NumberHitsApprovedRequirement('GreaterThan',
hit_properties['hits_approved']))
del hit_properties['hits_approved']
if 'percent_approved' in hit_properties:
qual.add(PercentAssignmentsApprovedRequirement('GreaterThan',
hit_properties['percent_approved']))
del hit_properties['percent_approved']
# qual.add(Requirement(qualification_type_id="3TDQPWMDS877YXAXCWP6LHT0FJRANT",comparator='GreaterThan',integer_value=9))
# 3TDQPWMDS877YXAXCWP6LHT0FJRANT
hit_properties['qualifications'] = qual
|
choltz95/story-understanding-amt
|
simpleamt.py
|
Python
|
mit
| 2,932 | 0.011596 |
"""AutoComplete.py - An IDLE extension for automatically completing names.
This extension can complete either attribute names of file names. It can pop
a window with all available names, for the user to select from.
"""
import os
import sys
import string
from configHandler import idleConf
import AutoCompleteWindow
from HyperParser import HyperParser
import __main__
# This string includes all chars that may be in a file name (without a path
# separator)
FILENAME_CHARS = string.ascii_letters + string.digits + os.curdir + "._~#$:-"
# This string includes all chars that may be in an identifier
ID_CHARS = string.ascii_letters + string.digits + "_"
# These constants represent the two different types of completions
COMPLETE_ATTRIBUTES, COMPLETE_FILES = range(1, 2+1)
SEPS = os.sep
if os.altsep: # e.g. '/' on Windows...
SEPS += os.altsep
class AutoComplete:
menudefs = [
('edit', [
("Show Completions", "<<force-open-completions>>"),
])
]
popupwait = idleConf.GetOption("extensions", "AutoComplete",
"popupwait", type="int", default=0)
def __init__(self, editwin=None):
self.editwin = editwin
if editwin is None: # subprocess and test
return
self.text = editwin.text
self.autocompletewindow = None
# id of delayed call, and the index of the text insert when the delayed
# call was issued. If _delayed_completion_id is None, there is no
# delayed call.
self._delayed_completion_id = None
self._delayed_completion_index = None
def _make_autocomplete_window(self):
return AutoCompleteWindow.AutoCompleteWindow(self.text)
def _remove_autocomplete_window(self, event=None):
if self.autocompletewindow:
self.autocompletewindow.hide_window()
self.autocompletewindow = None
def force_open_completions_event(self, event):
"""Happens when the user really wants to open a completion list, even
if a function call is needed.
"""
self.open_completions(True, False, True)
def try_open_completions_event(self, event):
"""Happens when it would be nice to open a completion list, but not
really neccesary, for example after an dot, so function
calls won't be made.
"""
lastchar = self.text.get("insert-1c")
if lastchar == ".":
self._open_completions_later(False, False, False,
COMPLETE_ATTRIBUTES)
elif lastchar in SEPS:
self._open_completions_later(False, False, False,
COMPLETE_FILES)
def autocomplete_event(self, event):
"""Happens when the user wants to complete his word, and if neccesary,
open a completion list after that (if there is more than one
completion)
"""
if hasattr(event, "mc_state") and event.mc_state:
# A modifier was pressed along with the tab, continue as usual.
return
if self.autocompletewindow and self.autocompletewindow.is_active():
self.autocompletewindow.complete()
return "break"
else:
opened = self.open_completions(False, True, True)
if opened:
return "break"
def _open_completions_later(self, *args):
self._delayed_completion_index = self.text.index("insert")
if self._delayed_completion_id is not None:
self.text.after_cancel(self._delayed_completion_id)
self._delayed_completion_id = \
self.text.after(self.popupwait, self._delayed_open_completions,
*args)
def _delayed_open_completions(self, *args):
self._delayed_completion_id = None
if self.text.index("insert") != self._delayed_completion_index:
return
self.open_completions(*args)
def open_completions(self, evalfuncs, complete, userWantsWin, mode=None):
"""Find the completions and create the AutoCompleteWindow.
Return True if successful (no syntax error or so found).
if complete is True, then if there's nothing to complete and no
start of completion, won't open completions and return False.
If mode is given, will open a completion list only in this mode.
"""
# Cancel another delayed call, if it exists.
if self._delayed_completion_id is not None:
self.text.after_cancel(self._delayed_completion_id)
self._delayed_completion_id = None
hp = HyperParser(self.editwin, "insert")
curline = self.text.get("insert linestart", "insert")
i = j = len(curline)
if hp.is_in_string() and (not mode or mode==COMPLETE_FILES):
self._remove_autocomplete_window()
mode = COMPLETE_FILES
while i and curline[i-1] in FILENAME_CHARS:
i -= 1
comp_start = curline[i:j]
j = i
while i and curline[i-1] in FILENAME_CHARS + SEPS:
i -= 1
comp_what = curline[i:j]
elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES):
self._remove_autocomplete_window()
mode = COMPLETE_ATTRIBUTES
while i and curline[i-1] in ID_CHARS:
i -= 1
comp_start = curline[i:j]
if i and curline[i-1] == '.':
hp.set_index("insert-%dc" % (len(curline)-(i-1)))
comp_what = hp.get_expression()
if not comp_what or \
(not evalfuncs and comp_what.find('(') != -1):
return
else:
comp_what = ""
else:
return
if complete and not comp_what and not comp_start:
return
comp_lists = self.fetch_completions(comp_what, mode)
if not comp_lists[0]:
return
self.autocompletewindow = self._make_autocomplete_window()
self.autocompletewindow.show_window(comp_lists,
"insert-%dc" % len(comp_start),
complete,
mode,
userWantsWin)
return True
def fetch_completions(self, what, mode):
"""Return a pair of lists of completions for something. The first list
is a sublist of the second. Both are sorted.
If there is a Python subprocess, get the comp. list there. Otherwise,
either fetch_completions() is running in the subprocess itself or it
was called in an IDLE EditorWindow before any script had been run.
The subprocess environment is that of the most recently run script. If
two unrelated modules are being edited some calltips in the current
module may be inoperative if the module was not the last to run.
"""
try:
rpcclt = self.editwin.flist.pyshell.interp.rpcclt
except:
rpcclt = None
if rpcclt:
return rpcclt.remotecall("exec", "get_the_completion_list",
(what, mode), {})
else:
if mode == COMPLETE_ATTRIBUTES:
if what == "":
namespace = __main__.__dict__.copy()
namespace.update(__main__.__builtins__.__dict__)
bigl = eval("dir()", namespace)
bigl.sort()
if "__all__" in bigl:
smalll = eval("__all__", namespace)
smalll.sort()
else:
smalll = filter(lambda s: s[:1] != '_', bigl)
else:
try:
entity = self.get_entity(what)
bigl = dir(entity)
bigl.sort()
if "__all__" in bigl:
smalll = entity.__all__
smalll.sort()
else:
smalll = filter(lambda s: s[:1] != '_', bigl)
except:
return [], []
elif mode == COMPLETE_FILES:
if what == "":
what = "."
try:
expandedpath = os.path.expanduser(what)
bigl = os.listdir(expandedpath)
bigl.sort()
smalll = filter(lambda s: s[:1] != '.', bigl)
except OSError:
return [], []
if not smalll:
smalll = bigl
return smalll, bigl
def get_entity(self, name):
"""Lookup name in a namespace spanning sys.modules and __main.dict__"""
namespace = sys.modules.copy()
namespace.update(__main__.__dict__)
return eval(name, namespace)
|
leighpauls/k2cro4
|
third_party/python_26/Lib/idlelib/AutoComplete.py
|
Python
|
bsd-3-clause
| 9,041 | 0.000553 |
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import sys
import logging
import re
from threading import Event
from copy import copy
from PyQt4.QtCore import QTimer, SIGNAL, QObject, QString, QSize, QVariant, QMutex, Qt
from PyQt4.QtGui import QMainWindow, QApplication, QStyledItemDelegate, \
QStyleOptionViewItemV4, QTextDocument, QStyle, \
QAbstractTextDocumentLayout, QPalette, QMessageBox, \
QSpinBox, QLineEdit, QComboBox, QCheckBox, QInputDialog
from weboob.core.ouiboube import Weboob, VersionsMismatchError
from weboob.core.scheduler import IScheduler
from weboob.core.repositories import ModuleInstallError
from weboob.tools.config.iconfig import ConfigError
from weboob.tools.browser import BrowserUnavailable, BrowserIncorrectPassword, BrowserForbidden
from weboob.tools.value import ValueInt, ValueBool, ValueBackendPassword
from weboob.tools.misc import to_unicode
from weboob.capabilities import UserError
from ..base import BaseApplication, MoreResultsAvailable
__all__ = ['QtApplication', 'QtMainWindow', 'QtDo', 'HTMLDelegate']
class QtScheduler(IScheduler):
def __init__(self, app):
self.app = app
self.count = 0
self.timers = {}
def schedule(self, interval, function, *args):
timer = QTimer()
timer.setInterval(interval * 1000)
timer.setSingleShot(True)
count = self.count
self.count += 1
timer.start()
self.app.connect(timer, SIGNAL("timeout()"), lambda: self.timeout(count, None, function, *args))
self.timers[count] = timer
def repeat(self, interval, function, *args):
timer = QTimer()
timer.setSingleShot(False)
count = self.count
self.count += 1
timer.start(0)
self.app.connect(timer, SIGNAL("timeout()"), lambda: self.timeout(count, interval, function, *args))
self.timers[count] = timer
def timeout(self, _id, interval, function, *args):
function(*args)
if interval is None:
self.timers.pop(_id)
else:
self.timers[_id].setInterval(interval * 1000)
def want_stop(self):
self.app.quit()
def run(self):
self.app.exec_()
class QCallbacksManager(QObject):
class Request(object):
def __init__(self):
self.event = Event()
self.answer = None
def __call__(self):
raise NotImplementedError()
class LoginRequest(Request):
def __init__(self, backend_name, value):
QCallbacksManager.Request.__init__(self)
self.backend_name = backend_name
self.value = value
def __call__(self):
password, ok = QInputDialog.getText(None,
'%s request' % self.value.label,
'Please enter %s for %s' % (self.value.label,
self.backend_name),
QLineEdit.Password)
return password
def __init__(self, weboob, parent=None):
QObject.__init__(self, parent)
self.weboob = weboob
self.weboob.callbacks['login'] = self.callback(self.LoginRequest)
self.mutex = QMutex()
self.requests = []
self.connect(self, SIGNAL('new_request'), self.do_request)
def callback(self, klass):
def cb(*args, **kwargs):
return self.add_request(klass(*args, **kwargs))
return cb
def do_request(self):
self.mutex.lock()
request = self.requests.pop()
request.answer = request()
request.event.set()
self.mutex.unlock()
def add_request(self, request):
self.mutex.lock()
self.requests.append(request)
self.mutex.unlock()
self.emit(SIGNAL('new_request'))
request.event.wait()
return request.answer
class QtApplication(QApplication, BaseApplication):
def __init__(self):
QApplication.__init__(self, sys.argv)
self.setApplicationName(self.APPNAME)
BaseApplication.__init__(self)
self.cbmanager = QCallbacksManager(self.weboob, self)
def create_weboob(self):
return Weboob(scheduler=QtScheduler(self))
def load_backends(self, *args, **kwargs):
while True:
try:
return BaseApplication.load_backends(self, *args, **kwargs)
except VersionsMismatchError as e:
msg = 'Versions of modules mismatch with version of weboob.'
except ConfigError as e:
msg = unicode(e)
res = QMessageBox.question(None, 'Configuration error', u'%s\n\nDo you want to update repositories?' % msg, QMessageBox.Yes|QMessageBox.No)
if res == QMessageBox.No:
raise e
# Do not import it globally, it causes circular imports
from .backendcfg import ProgressDialog
pd = ProgressDialog('Update of repositories', "Cancel", 0, 100)
pd.setWindowModality(Qt.WindowModal)
try:
self.weboob.update(pd)
except ModuleInstallError as err:
QMessageBox.critical(None, self.tr('Update error'),
unicode(self.tr('Unable to update repositories: %s' % err)),
QMessageBox.Ok)
pd.setValue(100)
QMessageBox.information(None, self.tr('Update of repositories'),
self.tr('Repositories updated!'), QMessageBox.Ok)
class QtMainWindow(QMainWindow):
def __init__(self, parent=None):
QMainWindow.__init__(self, parent)
class QtDo(QObject):
def __init__(self, weboob, cb, eb=None):
QObject.__init__(self)
if not eb:
eb = self.default_eb
self.weboob = weboob
self.process = None
self.cb = cb
self.eb = eb
self.connect(self, SIGNAL('cb'), self.local_cb)
self.connect(self, SIGNAL('eb'), self.local_eb)
def do(self, *args, **kwargs):
self.process = self.weboob.do(*args, **kwargs)
self.process.callback_thread(self.thread_cb, self.thread_eb)
def default_eb(self, backend, error, backtrace):
if isinstance(error, MoreResultsAvailable):
# This is not an error, ignore.
return
msg = unicode(error)
if isinstance(error, BrowserIncorrectPassword):
if not msg:
msg = 'Invalid login/password.'
elif isinstance(error, BrowserUnavailable):
if not msg:
msg = 'Website is unavailable.'
elif isinstance(error, BrowserForbidden):
if not msg:
msg = 'This action is forbidden.'
elif isinstance(error, NotImplementedError):
msg = u'This feature is not supported by this backend.\n\n' \
u'To help the maintainer of this backend implement this feature, please contact: %s <%s>' % (backend.MAINTAINER, backend.EMAIL)
elif isinstance(error, UserError):
if not msg:
msg = type(error).__name__
elif logging.root.level == logging.DEBUG:
msg += u'<br />'
ul_opened = False
for line in backtrace.split('\n'):
m = re.match(' File (.*)', line)
if m:
if not ul_opened:
msg += u'<ul>'
ul_opened = True
else:
msg += u'</li>'
msg += u'<li><b>%s</b>' % m.group(1)
else:
msg += u'<br />%s' % to_unicode(line)
if ul_opened:
msg += u'</li></ul>'
print >>sys.stderr, error
print >>sys.stderr, backtrace
QMessageBox.critical(None, unicode(self.tr('Error with backend %s')) % backend.name,
msg, QMessageBox.Ok)
def local_cb(self, backend, data):
self.cb(backend, data)
if not backend:
self.disconnect(self, SIGNAL('cb'), self.local_cb)
self.disconnect(self, SIGNAL('eb'), self.local_eb)
self.process = None
def local_eb(self, backend, error, backtrace):
self.eb(backend, error, backtrace)
def thread_cb(self, backend, data):
self.emit(SIGNAL('cb'), backend, data)
def thread_eb(self, backend, error, backtrace):
self.emit(SIGNAL('eb'), backend, error, backtrace)
class HTMLDelegate(QStyledItemDelegate):
def paint(self, painter, option, index):
optionV4 = QStyleOptionViewItemV4(option)
self.initStyleOption(optionV4, index)
style = optionV4.widget.style() if optionV4.widget else QApplication.style()
doc = QTextDocument()
doc.setHtml(optionV4.text)
# painting item without text
optionV4.text = QString()
style.drawControl(QStyle.CE_ItemViewItem, optionV4, painter)
ctx = QAbstractTextDocumentLayout.PaintContext()
# Hilight text if item is selected
if optionV4.state & QStyle.State_Selected:
ctx.palette.setColor(QPalette.Text, optionV4.palette.color(QPalette.Active, QPalette.HighlightedText))
textRect = style.subElementRect(QStyle.SE_ItemViewItemText, optionV4)
painter.save()
painter.translate(textRect.topLeft())
painter.setClipRect(textRect.translated(-textRect.topLeft()))
doc.documentLayout().draw(painter, ctx)
painter.restore()
def sizeHint(self, option, index):
optionV4 = QStyleOptionViewItemV4(option)
self.initStyleOption(optionV4, index)
doc = QTextDocument()
doc.setHtml(optionV4.text)
doc.setTextWidth(optionV4.rect.width())
return QSize(doc.idealWidth(), max(doc.size().height(), optionV4.decorationSize.height()))
class _QtValueStr(QLineEdit):
def __init__(self, value):
QLineEdit.__init__(self)
self._value = value
if value.default:
self.setText(unicode(value.default))
if value.masked:
self.setEchoMode(self.Password)
def set_value(self, value):
self._value = value
self.setText(self._value.get())
def get_value(self):
self._value.set(unicode(self.text()))
return self._value
class _QtValueBackendPassword(_QtValueStr):
def get_value(self):
self._value._domain = None
return _QtValueStr.get_value(self)
class _QtValueBool(QCheckBox):
def __init__(self, value):
QCheckBox.__init__(self)
self._value = value
if value.default:
self.setChecked(True)
def set_value(self, value):
self._value = value
self.setChecked(self._value.get())
def get_value(self):
self._value.set(self.isChecked())
return self._value
class _QtValueInt(QSpinBox):
def __init__(self, value):
QSpinBox.__init__(self)
self._value = value
if value.default:
self.setValue(int(value.default))
def set_value(self, value):
self._value = value
self.setValue(self._value.get())
def get_value(self):
self._value.set(self.getValue())
return self._value
class _QtValueChoices(QComboBox):
def __init__(self, value):
QComboBox.__init__(self)
self._value = value
for k, l in value.choices.iteritems():
self.addItem(l, QVariant(k))
if value.default == k:
self.setCurrentIndex(self.count()-1)
def set_value(self, value):
self._value = value
for i in xrange(self.count()):
if unicode(self.itemData(i).toString()) == self._value.get():
self.setCurrentIndex(i)
return
def get_value(self):
self._value.set(unicode(self.itemData(self.currentIndex()).toString()))
return self._value
def QtValue(value):
if isinstance(value, ValueBool):
klass = _QtValueBool
elif isinstance(value, ValueInt):
klass = _QtValueInt
elif isinstance(value, ValueBackendPassword):
klass = _QtValueBackendPassword
elif value.choices is not None:
klass = _QtValueChoices
else:
klass = _QtValueStr
return klass(copy(value))
|
blckshrk/Weboob
|
weboob/tools/application/qt/qt.py
|
Python
|
agpl-3.0
| 13,100 | 0.001221 |
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Look if qt is installed, and try out all builders.
"""
import os
import sys
import TestSCons
test = TestSCons.TestSCons()
if not os.environ.get('QTDIR', None):
x ="External environment variable $QTDIR not set; skipping test(s).\n"
test.skip_test(x)
test.Qt_dummy_installation()
QTDIR=os.environ['QTDIR']
test.write('SConstruct', """\
import os
dummy_env = Environment()
ENV = dummy_env['ENV']
try:
PATH=ARGUMENTS['PATH']
if 'PATH' in ENV:
ENV_PATH = PATH + os.pathsep + ENV['PATH']
else:
Exit(0) # this is certainly a weird system :-)
except KeyError:
ENV_PATH=ENV.get('PATH', '')
env = Environment(tools=['default','qt'],
ENV={'PATH':ENV_PATH,
'PATHEXT':os.environ.get('PATHEXT'),
'HOME':os.getcwd(),
'SystemRoot':ENV.get('SystemRoot')},
# moc / uic want to write stuff in ~/.qt
CXXFILESUFFIX=".cpp")
conf = env.Configure()
if not conf.CheckLib(env.subst("$QT_LIB"), autoadd=0):
conf.env['QT_LIB'] = 'qt-mt'
if not conf.CheckLib(env.subst("$QT_LIB"), autoadd=0):
Exit(0)
env = conf.Finish()
VariantDir('bld', '.')
env.Program('bld/test_realqt', ['bld/mocFromCpp.cpp',
'bld/mocFromH.cpp',
'bld/anUiFile.ui',
'bld/main.cpp'])
""")
test.write('mocFromCpp.h', """\
void mocFromCpp();
""")
test.write('mocFromCpp.cpp', """\
#include <qobject.h>
#include "mocFromCpp.h"
class MyClass1 : public QObject {
Q_OBJECT
public:
MyClass1() : QObject() {};
public slots:
void myslot() {};
};
void mocFromCpp() {
MyClass1 myclass;
}
#include "mocFromCpp.moc"
""")
test.write('mocFromH.h', """\
#include <qobject.h>
class MyClass2 : public QObject {
Q_OBJECT;
public:
MyClass2();
public slots:
void myslot();
};
void mocFromH();
""")
test.write('mocFromH.cpp', """\
#include "mocFromH.h"
MyClass2::MyClass2() : QObject() {}
void MyClass2::myslot() {}
void mocFromH() {
MyClass2 myclass;
}
""")
test.write('anUiFile.ui', """\
<!DOCTYPE UI><UI>
<class>MyWidget</class>
<widget>
<class>QWidget</class>
<property name="name">
<cstring>MyWidget</cstring>
</property>
<property name="caption">
<string>MyWidget</string>
</property>
</widget>
<includes>
<include location="local" impldecl="in implementation">anUiFile.ui.h</include>
</includes>
<slots>
<slot>testSlot()</slot>
</slots>
<layoutdefaults spacing="6" margin="11"/>
</UI>
""")
test.write('anUiFile.ui.h', r"""
#include <stdio.h>
#if QT_VERSION >= 0x030100
void MyWidget::testSlot()
{
printf("Hello World\n");
}
#endif
""")
test.write('main.cpp', r"""
#include <qapp.h>
#include "mocFromCpp.h"
#include "mocFromH.h"
#include "anUiFile.h"
#include <stdio.h>
int main(int argc, char **argv) {
QApplication app(argc, argv);
mocFromCpp();
mocFromH();
MyWidget mywidget;
#if QT_VERSION >= 0x030100
mywidget.testSlot();
#else
printf("Hello World\n");
#endif
return 0;
}
""")
test.run(arguments="bld/test_realqt" + TestSCons._exe)
test.run(program=test.workpath("bld", "test_realqt"),
stdout=None,
status=None,
stderr=None)
if test.stdout() != "Hello World\n" or test.stderr() != '' or test.status:
sys.stdout.write(test.stdout())
sys.stderr.write(test.stderr())
# The test might be run on a system that doesn't have an X server
# running, or may be run by an ID that can't connect to the server.
# If so, then print whatever it showed us (which is in and of itself
# an indication that it built correctly) but don't fail the test.
expect = 'cannot connect to X server'
test.fail_test(test.stdout())
test.fail_test(test.stderr().find(expect) == -1)
if test.status != 1 and (test.status>>8) != 1:
sys.stdout.write('test_realqt returned status %s\n' % test.status)
test.fail_test()
QTDIR = os.environ['QTDIR']
PATH = os.environ['PATH']
os.environ['QTDIR']=''
os.environ['PATH']='.'
test.run(stderr=None, arguments="-c bld/test_realqt" + TestSCons._exe)
expect1 = "scons: warning: Could not detect qt, using empty QTDIR"
expect2 = "scons: warning: Could not detect qt, using moc executable as a hint"
test.fail_test(test.stderr().find(expect1) == -1 and
test.stderr().find(expect2) == -1)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
timj/scons
|
test/QT/installed.py
|
Python
|
mit
| 5,726 | 0.001921 |
"""
Linux on Hyper-V and Azure Test Code, ver. 1.0.0
Copyright (c) Microsoft Corporation
All rights reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
See the Apache Version 2.0 License for specific language governing
permissions and limitations under the License.
"""
from __future__ import print_function
import logging
import re
import os
import sys
import csv
import fileinput
import zipfile
import shutil
import decimal
try:
import xml.etree.cElementTree as ElementTree
except ImportError:
import xml.etree.ElementTree as ElementTree
logger = logging.getLogger(__name__)
class ParseXML(object):
"""Class used to parse a specific xml test suite file
"""
def __init__(self, file_path):
self.tree = ElementTree.ElementTree(file=file_path)
self.root = self.tree.getroot()
def get_tests_suite(self):
return self.root.find('testSuites').getchildren()[0]\
.find('suiteName').text
def get_tests(self):
"""Iterates through the xml file looking for <test> sections
and initializes a dict for every test case returning them in
the end
Dict structure:
{ 'testName' : {} }
"""
tests_dict = dict()
for test in self.root.iter('suiteTest'):
tests_dict[test.text.lower()] = dict()
for test_case in self.root.iter('test'):
# Check if testCase was not commented out
if test_case.find('testName').text.lower() == \
test.text.lower():
logger.debug('Getting test details for - %s', test.text)
tests_dict[test.text.lower()] = \
self.get_test_details(test_case)
return tests_dict
@staticmethod
def get_test_details(test_root):
"""Gets and an XML object and iterates through it
parsing the test details into a dictionary
Dict structure:
{ 'testProperty' : [ value(s) ] }
"""
test_dict = dict()
for test_property in test_root.getchildren():
if test_property.tag == 'testName':
continue
elif not test_property.getchildren() and test_property.text:
test_dict[test_property.tag.lower()] = \
test_property.text.strip().split()
else:
test_dict[test_property.tag.lower()] = list()
for item in test_property.getchildren():
if test_property.tag.lower() == 'testparams':
parameter = item.text.split('=')
test_dict[test_property.tag.lower()].append(
(parameter[0], parameter[1])
)
else:
test_dict[test_property.tag.lower()].append(item.text)
return test_dict
def get_vms(self):
"""Method searches for the 'vm' sections in the XML file
saving a dict for each vm found.
Dict structure:
{
vm_name: { vm_details }
}
"""
vm_dict = dict()
for machine in self.root.iter('vm'):
vm_dict[machine.find('vmName').text.lower()] = {
'hvServer': machine.find('hvServer').text.lower(),
'os': machine.find('os').text.lower()
}
return vm_dict
# TODO(bogdancarpusor): Narrow exception field
@staticmethod
def parse_from_string(xml_string):
"""Static method that parses xml content from a string
The method is used to parse the output of the PS command
that is sent to the vm in order to get more details
It returns a dict with the following structure:
{
vm_property: value
}
"""
try:
logger.debug('Converting XML string from KVP Command')
root = ElementTree.fromstring(xml_string.strip())
prop_name = ''
prop_value = ''
for child in root:
if child.attrib['NAME'] == 'Name':
prop_name = child[0].text
elif child.attrib['NAME'] == 'Data':
prop_value = child[0].text
return prop_name, prop_value
except RuntimeError:
logger.error('Failed to parse XML string,', exc_info=True)
logger.info('Terminating execution')
sys.exit(0)
def parse_ica_log(log_path):
""" Parser for the generated log file after a lisa run - ica.log
The method iterates until the start of the test outcome section. After that
it searches, using regex, for predefined fields and saves them in a
dict structure.
:param log_path:
:return:
"""
logger.debug(
'Iterating through %s file until the test results part', log_path
)
parsed_ica = dict()
parsed_ica['vms'] = dict()
parsed_ica['tests'] = dict()
with open(log_path, 'r') as log_file:
for line in log_file:
if line.strip() == 'Test Results Summary':
break
# Get timestamp
parsed_ica['timestamp'] = re.search('([0-9/]+) ([0-9:]+)',
log_file.next()).group(0)
vm_name = ""
for line in log_file:
line = line.strip().lower()
logger.debug('Parsing line %s', line)
if re.search("^vm:", line) and len(line.split()) == 2:
vm_name = line.split()[1]
parsed_ica['vms'][vm_name] = dict()
# Check if there are any details about the VM
try:
parsed_ica['vms'][vm_name]['TestLocation'] = 'Hyper-V'
except KeyError:
parsed_ica['vms'][vm_name] = dict()
parsed_ica['vms'][vm_name]['TestLocation'] = 'Azure'
elif re.search('^test', line) and \
re.search('(passed$|failed$|aborted$|skipped$)', line):
test = line.split()
try:
parsed_ica['tests'][test[1].lower()] = (vm_name, test[3])
except KeyError:
logging.debug('Test %s was not listed in Test Suites '
'section.It will be ignored from the final'
'results', test)
elif re.search('^os', line):
parsed_ica['vms'][vm_name]['hostOS'] = line.split(':')[1]\
.strip()
elif re.search('^server', line):
parsed_ica['vms'][vm_name]['hvServer'] = line.split(':')[1]\
.strip()
elif re.search('^logs can be found at', line):
parsed_ica['logPath'] = line.split()[-1]
elif re.search('^lis version', line):
parsed_ica['lisVersion'] = line.split(':')[1].strip()
return parsed_ica
def parse_from_csv(csv_path):
"""
Strip and read csv file into a dict data type.
:param csv_path: csv file path
:return: <list of dict> e.g. [{'t_col1': 'val1',
't_col2': 'val2',
...
},
...]
None - on error
"""
# python [2.7.10, 3.0) does not support context manager for fileinput
# strip csv of empty spaces or tabs
f_csv = fileinput.input(csv_path, inplace=True)
for line in f_csv:
# redirect std to file write
print(' '.join(line.split()))
f_csv.close()
list_csv_dict = []
with open(csv_path, 'rb') as fl:
try:
csv_dialect = csv.Sniffer().sniff(fl.read(), delimiters=";, ")
except Exception as e:
logger.error('Error reading csv file {}: {}'.format(csv_path, e))
return None
fl.seek(0)
reader = csv.DictReader(fl, dialect=csv_dialect)
for csv_dict in reader:
list_csv_dict.append(csv_dict)
return list_csv_dict
class BaseLogsReader(object):
"""
Base class for collecting data from multiple log files
"""
def __init__(self, log_path):
"""
Init Base logger.
:param log_path: Path containing zipped logs.
"""
self.cleanup = False
self.log_path = self.process_log_path(log_path)
self.headers = None
self.log_matcher = None
self.log_base_path = log_path
def process_log_path(self, log_path):
"""
Detect if log_path is a zip, then unzip it and return log's location.
:param log_path:
:return: log location - if the log_path is not a zip
unzipped location - if log_path is a zip
list of zipped logs - if log_path contains the zipped logs
"""
if zipfile.is_zipfile(log_path):
dir_path = os.path.dirname(os.path.abspath(log_path))
# extracting zip to current path
# it is required that all logs are zipped in a folder
with zipfile.ZipFile(log_path, 'r') as z:
if any('/' in fis for fis in z.namelist()):
unzip_folder = z.namelist()[0].split('/')[0]
else:
unzip_folder = ''
z.extractall(dir_path)
if unzip_folder:
self.cleanup = True
return os.path.join(dir_path, unzip_folder)
elif any(zipfile.is_zipfile(os.path.join(log_path, z))
for z in os.listdir(log_path)):
zip_list = []
for z in os.listdir(log_path):
zip_file_path = os.path.join(log_path, z)
if zipfile.is_zipfile(zip_file_path):
zip_list.append(self.process_log_path(zip_file_path))
return zip_list
else:
return log_path
def teardown(self):
"""
Cleanup files/folders created for setting up the parser.
:return: None
"""
if self.cleanup:
if isinstance(self.log_path, list):
for path in self.log_path:
shutil.rmtree(path)
else:
shutil.rmtree(self.log_path)
@staticmethod
def get_log_files(log_path):
"""
Compute and check all files from a path.
:param: log_path: path to check
:returns: List of checked files
:rtype: List or None
"""
return [os.path.join(log_path, log_name)
for log_name in os.listdir(log_path)
if os.path.isfile(os.path.join(log_path, log_name))]
def collect_data(self, f_match, log_file, log_dict):
"""
Placeholder method for collecting data. Will be overwritten in
subclasses with the logic.
:param f_match: regex file matcher
:param log_file: log file name
:param log_dict: dict constructed from the defined headers
:return: <dict> {'head1': 'val1', ...}
"""
return log_dict
def process_logs(self):
"""
General data collector method parsing through each log file matching the
regex filter and call on self.collect_data() for the customized logic.
:return: <list of dict> e.g. [{'t_col1': 'val1',
't_col2': 'val2',
...
},
...]
[] - on failed parsing
"""
list_log_dict = []
log_files = []
if isinstance(self.log_path, list):
for path in self.log_path:
log_files.extend(self.get_log_files(path))
else:
log_files.extend(self.get_log_files(self.log_path))
for log_file in log_files:
f_match = re.match(self.log_matcher, os.path.basename(log_file))
if not f_match:
continue
log_dict = dict.fromkeys(self.headers, '')
collected_data = self.collect_data(f_match, log_file, log_dict)
try:
if any(d for d in list_log_dict if
(d.get('BlockSize_KB', None)
and d['BlockSize_KB'] == collected_data['BlockSize_KB']
and d['QDepth'] == collected_data['QDepth'])):
for d in list_log_dict:
if d['BlockSize_KB'] == collected_data['BlockSize_KB'] \
and d['QDepth'] == collected_data['QDepth']:
for key, value in collected_data.items():
if value and not d[key]:
d[key] = value
else:
list_log_dict.append(collected_data)
except Exception as e:
print(e)
pass
self.teardown()
return list_log_dict
class NTTTCPLogsReader(BaseLogsReader):
"""
Subclass for parsing NTTTCP log files e.g.
ntttcp-pXXX.log
tcping-ntttcp-pXXX.log - avg latency
"""
# conversion units
CUNIT = {'us': 10**-3,
'ms': 1,
's': 10**3}
def __init__(self, log_path=None):
super(NTTTCPLogsReader, self).__init__(log_path)
self.headers = ['NumberOfConnections', 'Throughput_Gbps',
'AverageLatency_ms', 'PacketSize_KBytes', 'SenderCyclesPerByte',
'ReceiverCyclesPerByte', 'IPVersion', 'Protocol']
self.log_matcher = 'ntttcp-sender-p([0-9X]+).log'
self.eth_log_csv = dict()
self.__get_eth_log_csv()
def __get_eth_log_csv(self):
if isinstance(self.log_path, list):
for path in self.log_path:
self.eth_log_csv[path] = parse_from_csv(os.path.join(
path, 'eth_report.log'))
else:
self.eth_log_csv[self.log_path] = parse_from_csv(os.path.join(
self.log_path, 'eth_report.log'))
def collect_data(self, f_match, log_file, log_dict):
"""
Customized data collect for NTTTCP test case.
:param f_match: regex file matcher
:param log_file: log file name
:param log_dict: dict constructed from the defined headers
:return: <dict> {'head1': 'val1', ...}
"""
# compute the number of connections from the log name
n_conn = reduce(lambda x1, x2: int(x1) * int(x2),
f_match.group(1).split('X'))
log_dict['NumberOfConnections'] = n_conn
log_dict['Throughput_Gbps'] = 0
log_dict['SenderCyclesPerByte'] = 0
log_dict['ReceiverCyclesPerByte'] = 0
log_dict['AverageLatency_ms'] = 0
with open(log_file, 'r') as fl:
for x in fl:
if not log_dict.get('Throughput_Gbps', None):
throughput = re.match('.+INFO.+throughput.+:([0-9.]+)', x)
if throughput:
log_dict['Throughput_Gbps'] = throughput.group(1).strip()
if not log_dict.get('SenderCyclesPerByte', None):
cycle = re.match('.+cycles/byte\s*:\s*([0-9.]+)', x)
if cycle:
log_dict['SenderCyclesPerByte'] = cycle.group(1).strip()
receiver_file = os.path.join(os.path.dirname(os.path.abspath(log_file)),
'ntttcp-receiver-p{}.log'.format(f_match.group(1)))
if os.path.exists(receiver_file):
with open(receiver_file, 'r') as fl:
for x in fl:
if not log_dict.get('ReceiverCyclesPerByte', None):
cycle = re.match('.+cycles/byte\s*:\s*([0-9.]+)', x)
if cycle:
log_dict['ReceiverCyclesPerByte'] = cycle.group(1).strip()
lat_file = os.path.join(os.path.dirname(os.path.abspath(log_file)),
'lagscope-ntttcp-p{}.log'.format(f_match.group(1)))
with open(lat_file, 'r') as fl:
for x in fl:
if not log_dict.get('IPVersion', None):
ip_version = re.match('domain:.+(IPv[4,6])', x)
if ip_version:
log_dict['IPVersion'] = ip_version.group(1).strip()
if not log_dict.get('Protocol', None):
ip_proto = re.match('protocol:.+([A-Z]{3})', x)
if ip_proto:
log_dict['Protocol'] = ip_proto.group(1).strip()
latency = re.match('.+Average\s*=\s*([0-9.]+)\s*([a-z]+)', x)
if latency:
unit = latency.group(2).strip()
log_dict['AverageLatency_ms'] = \
float(latency.group(1).strip()) * self.CUNIT[unit]
avg_pkg_size = [elem['average_packet_size'] for elem in self.eth_log_csv[os.path.dirname(
os.path.abspath(log_file))]
if (int(elem['#test_connections']) == log_dict['NumberOfConnections'])]
try:
log_dict['PacketSize_KBytes'] = avg_pkg_size[0].strip()
except IndexError:
logger.warning('Could not find average_packet size in eth_report.log')
log_dict['PacketSize_KBytes'] = 0
return log_dict
class NTTTCPUDPLogsReader(BaseLogsReader):
"""
Subclass for parsing NTTTCP-UDP log files e.g.
ntttcp-pXXX.log
tcping-ntttcp-pXXX.log - avg latency
"""
# conversion units
CUNIT = {'us': 10**-3,
'ms': 1,
's': 10**3}
def __init__(self, log_path=None):
super(NTTTCPUDPLogsReader, self).__init__(log_path)
self.headers = ['NumberOfConnections', 'TxThroughput_Gbps',
'RxThroughput_Gbps', 'DatagramLoss',
'PacketSize_KBytes', 'IPVersion', 'Protocol',
'SendBufSize_KBytes']
self.eth_log_csv = dict()
self.__get_eth_log_csv()
self.log_matcher = 'ntttcp-sender-p([0-9X]+).log'
def __get_eth_log_csv(self):
if isinstance(self.log_path, list):
for path in self.log_path:
self.eth_log_csv[path] = parse_from_csv(os.path.join(
path, 'eth_report.log'))
else:
self.eth_log_csv[self.log_path] = parse_from_csv(os.path.join(
self.log_path, 'eth_report.log'))
def collect_data(self, f_match, log_file, log_dict):
"""
:param f_match: regex file matcher
:param log_file: log file name
:param log_dict: dict constructed from the defined headers
:return: <dict> {'head1': 'val1', ...}
"""
# compute the number of connections from the log name
n_conn = reduce(lambda x1, x2: int(x1) * int(x2),
f_match.group(1).split('X'))
log_dict['NumberOfConnections'] = n_conn
log_dict['SendBufSize_KBytes'] = 0
log_dict['DatagramLoss'] = 0
log_dict['PacketSize_KBytes'] = 0
log_dict['TxThroughput_Gbps'] = 0
log_dict['RxThroughput_Gbps'] = 0
log_dict['IPVersion'] = 'IPv4'
log_dict['Protocol'] = 'UDP'
log_files = [os.path.join(os.path.dirname(log_file), f)
for f in os.listdir(os.path.dirname(log_file))
if f.startswith('ntttcp-sender-')]
for log_f in log_files:
with open(log_f, 'r') as fl:
for x in fl:
if not log_dict.get('TxThroughput_Gbps', None):
throughput = re.match('.+INFO.+throughput.+:([0-9.]+)', x)
if throughput:
log_dict['TxThroughput_Gbps'] = throughput.group(1).strip()
if not log_dict.get('SenderCyclesPerByte', None):
cycle = re.match('.+cycles/byte\s*:\s*([0-9.]+)', x)
if cycle:
log_dict['SenderCyclesPerByte'] = cycle.group(1).strip()
receiver_file = os.path.join(os.path.dirname(os.path.abspath(log_file)),
'ntttcp-receiver-p{}.log'.format(f_match.group(1)))
if os.path.exists(receiver_file):
with open(receiver_file, 'r') as fl:
for x in fl:
if not log_dict.get('ReceiverCyclesPerByte', None):
cycle = re.match('.+cycles/byte\s*:\s*([0-9.]+)', x)
if cycle:
log_dict['ReceiverCyclesPerByte'] = cycle.group(1).strip()
if not log_dict.get('RxThroughput_Gbps', None):
throughput = re.match('.+INFO.+throughput.+:([0-9.]+)', x)
if throughput:
log_dict['RxThroughput_Gbps'] = throughput.group(1).strip()
lat_file = os.path.join(os.path.dirname(os.path.abspath(log_file)),
'lagscope-ntttcp-p{}.log'.format(f_match.group(1)))
with open(lat_file, 'r') as fl:
for x in fl:
if not log_dict.get('IPVersion', None):
ip_version = re.match('domain:.+(IPv[4,6])', x)
if ip_version:
log_dict['IPVersion'] = ip_version.group(1).strip()
if not log_dict.get('Protocol', None):
ip_proto = re.match('protocol:.+([A-Z]{3})', x)
if ip_proto:
log_dict['Protocol'] = ip_proto.group(1).strip()
latency = re.match('.+Average\s*=\s*([0-9.]+)\s*([a-z]+)', x)
if latency:
unit = latency.group(2).strip()
log_dict['AverageLatency_ms'] = \
float(latency.group(1).strip()) * self.CUNIT[unit]
avg_pkg_size = [elem['average_packet_size'] for elem in self.eth_log_csv[os.path.dirname(os.path.abspath(log_file))]
if (int(elem['#test_connections']) == log_dict['NumberOfConnections'])]
ica_log = os.path.join(self.log_base_path, 'ica.log')
with open(ica_log, 'r') as f2:
lines = f2.readlines()
ip_version_mark = 'v4' if log_dict['IPVersion'] == 'IPv4' else ''
for i in xrange(0, len(lines)):
#Get data from ica.log
ica_mark = re.match('.*Test\s*UDP_{}_{}k.*:\s*Passed'.format(
ip_version_mark, log_dict['SendBufSize_KBytes']),
lines[i])
try:
log_dict['PacketSize_KBytes'] = avg_pkg_size[0].strip()
except IndexError:
logger.warning('Could not find average_packet size in eth_report.log')
log_dict['PacketSize_KBytes'] = 0
return log_dict
class FIOLogsReaderManual(BaseLogsReader):
"""
Subclass for parsing FIO log files e.g.
FIOLog-XXXq.log
"""
# conversion unit dict reference for latency to 'usec'
CUNIT = {'usec': 1,
'msec': 1000,
'sec': 1000000}
CSIZE = {'K': 1,
'M': 1024,
'G': 1048576}
def __init__(self, log_path=None):
super(FIOLogsReaderManual, self).__init__(log_path)
self.headers = ['rand-read:', 'rand-read: latency',
'rand-write: latency', 'seq-write: latency',
'rand-write:', 'seq-write:', 'seq-read:',
'seq-read: latency', 'QDepth', 'BlockSize_KB']
self.log_matcher = 'FIOLog-([0-9]+)q'
def collect_data(self, f_match, log_file, log_dict):
"""
Customized data collect for FIO test case.
:param f_match: regex file matcher
:param log_file: full path log file name
:param log_dict: dict constructed from the defined headers
:return: <dict> {'head1': 'val1', ...}
"""
log_dict['QDepth'] = int(f_match.group(1))
with open(log_file, 'r') as fl:
f_lines = fl.readlines()
for key in log_dict:
if not log_dict[key]:
if 'BlockSize' in key:
block_size = re.match(
'.+rw=read, bs=\s*([0-9])([A-Z])-', f_lines[0])
um = block_size.group(2).strip()
log_dict[key] = \
int(block_size.group(1).strip()) * self.CSIZE[um]
for x in range(0, len(f_lines)):
if all(markers in f_lines[x] for markers in
[key.split(':')[0], 'pid=']):
if 'latency' in key:
lat = re.match(
'\s*lat\s*\(([a-z]+)\).+avg=\s*([0-9.]+)',
f_lines[x + 4])
if lat:
unit = lat.group(1).strip()
log_dict[key] = float(
lat.group(2).strip()) * self.CUNIT[unit]
else:
log_dict[key] = 0
else:
iops = re.match('.+iops=\s*([0-9. ]+)',
f_lines[x + 1])
if iops:
log_dict[key] = iops.group(1).strip()
return log_dict
class FIOLogsReader(BaseLogsReader):
"""
Subclass for parsing FIO log files e.g.
FIOLog-XXXq.log
"""
# conversion unit dict reference for latency to 'usec'
CUNIT = {'usec': 1,
'msec': 1000,
'sec': 1000000}
CSIZE = {'B': 2 ** -10,
'K': 1,
'M': 1024,
'G': 1048576}
def __init__(self, log_path=None):
super(FIOLogsReader, self).__init__(log_path)
self.headers = ['rand-read:', 'rand-read: latency',
'rand-write: latency', 'seq-write: latency',
'rand-write:', 'seq-write:', 'seq-read:',
'seq-read: latency', 'QDepth', 'BlockSize_KB']
self.log_matcher = 'FIOLog-([0-9]+)q'
def collect_data(self, f_match, log_file, log_dict):
"""
Customized data collect for FIO test case.
:param f_match: regex file matcher
:param log_file: full path log file name
:param log_dict: dict constructed from the defined headers
:return: <dict> {'head1': 'val1', ...}
"""
log_dict['QDepth'] = int(f_match.group(1))
with open(log_file, 'r') as fl:
f_lines = fl.readlines()
for key in log_dict:
if not log_dict[key]:
if 'BlockSize' in key:
block_size = re.match('.+rw=read, bs=\(R\)\s*([0-9]+)([A-Z])', f_lines[0])
um = block_size.group(2).strip()
log_dict[key] = \
int(block_size.group(1).strip()) * self.CSIZE[um]
for x in range(0, len(f_lines)):
if all(markers in f_lines[x] for markers in
[key.split(':')[0], 'pid=']):
if 'latency' in key:
lat = re.match('\s*lat\s*\(([a-z]+)\).+avg=\s*([0-9.]+)',
f_lines[x + 4])
if lat:
unit = lat.group(1).strip()
log_dict[key] = float(
lat.group(2).strip()) * self.CUNIT[unit]
else:
iops = re.match('.+iops=([0-9. ]+)', f_lines[x + 1], re.IGNORECASE)
if iops:
log_dict[key] = iops.group(1).strip()
return log_dict
class FIOLogsReaderRaid(BaseLogsReader):
"""
Subclass for parsing FIO log files e.g.
FIOLog-XXXq.log
"""
# conversion unit dict reference for latency to 'usec'
CUNIT = {'usec': 1,
'msec': 1000,
'sec': 1000000}
CSIZE = {'B': 2 ** -10,
'K': 1,
'M': 1024,
'G': 1048576}
def __init__(self, log_path=None):
super(FIOLogsReaderRaid, self).__init__(log_path)
self.headers = ['rand-read:', 'rand-read: latency',
'rand-write: latency', 'seq-write: latency',
'rand-write:', 'seq-write:', 'seq-read:',
'seq-read: latency', 'QDepth', 'BlockSize_KB']
self.log_matcher = '([0-9]+)([A-Z])-([0-9]+)-([a-z]+).fio.log'
def collect_data(self, f_match, log_file, log_dict):
"""
Customized data collect for FIO test case.
:param f_match: regex file matcher
:param log_file: full path log file name
:param log_dict: dict constructed from the defined headers
:return: <dict> {'head1': 'val1', ...}
"""
log_dict['BlockSize_KB'] = \
int(f_match.group(1)) * self.CSIZE[f_match.group(2).strip()]
log_dict['QDepth'] = int(f_match.group(3))
mode = f_match.group(4)
with open(log_file, 'r') as fl:
f_lines = fl.readlines()
for key in log_dict:
if not log_dict[key] and mode == key.split(':')[0].replace(
'-', '').replace('seq', ''):
for x in range(0, len(f_lines)):
if 'latency' in key:
lat = re.match('\s*lat\s*\(([a-z]+)\).+avg=\s*([0-9.]+)',
f_lines[x])
if lat:
unit = lat.group(1).strip()
log_dict[key] = float(
lat.group(2).strip()) * self.CUNIT[unit]
else:
iops = re.match('.+iops=\s*([0-9.]+)\s*([a-z]*),', f_lines[x],
re.IGNORECASE)
if iops:
if 'k' in iops.group(2):
log_dict[key] = float(iops.group(1).strip()) * 1000
else:
log_dict[key] = iops.group(1).strip()
return log_dict
class IPERFLogsReader(BaseLogsReader):
"""
Subclass for parsing iPerf log files e.g.
XXX-pXXX-iperf3.log
"""
# conversion unit dict reference for throughput to 'Gbits'
BUNIT = {'Gbits': 1.0,
'Mbits': 1.0/2 ** 10,
'Kbits': 1.0/2 ** 20,
'bits': 1.0/2 ** 30}
def __init__(self, log_path=None):
super(IPERFLogsReader, self).__init__(log_path)
self.headers = ['NumberOfConnections', 'TxThroughput_Gbps',
'RxThroughput_Gbps', 'DatagramLoss',
'PacketSize_KBytes', 'IPVersion', 'Protocol',
'SendBufSize_KBytes']
self.log_matcher = '([0-9]+)-p8001-l([0-9]+)k-iperf3.log'
def collect_data(self, f_match, log_file, log_dict):
"""
Customized data collect for iPerf test case.
:param f_match: regex file matcher
:param log_file: log file name
:param log_dict: dict constructed from the defined headers
:return: <dict> {'head1': 'val1', ...}
"""
log_dict['NumberOfConnections'] = int(f_match.group(1))
log_dict['SendBufSize_KBytes'] = int(f_match.group(2))
log_dict['DatagramLoss'] = 0
log_dict['PacketSize_KBytes'] = 0
log_dict['TxThroughput_Gbps'] = 0
log_dict['RxThroughput_Gbps'] = 0
log_dict['IPVersion'] = 'IPv4'
log_dict['Protocol'] = 'UDP'
lost_datagrams = 0
total_datagrams = 0
digit_3 = decimal.Decimal(10) ** -3
log_files = [os.path.join(os.path.dirname(log_file), f)
for f in os.listdir(os.path.dirname(log_file))
if f.startswith(str(log_dict['NumberOfConnections']) + '-p')]
for log_f in log_files:
with open(log_f, 'r') as fl:
read_client = True
for line in fl:
if 'Connecting to host' in line:
ip_version = re.match('Connecting\s*to\s*host\s*(.+),\s*port', line)
if ':' in ip_version.group(1):
log_dict['IPVersion'] = 'IPv6'
if 'Server output:' in line:
read_client = False
if int(log_dict['NumberOfConnections']) == 1:
iperf_values = re.match('\[\s*[0-9]\]\s*0[.]00-60[.]00\s*'
'sec\s*([0-9.]+)\s*([A-Za-z]+)\s*'
'([0-9.]+)\s*([A-Za-z]+)/sec\s*'
'([0-9.]+)\s*([A-Za-z]+)\s*'
'([0-9]+)/([0-9]+)\s*'
'\(([a-z\-0-9.]+)%\)', line)
else:
iperf_values = re.match('\[SUM\]\s*0[.]00-60[.]00\s*sec\s*'
'([0-9.]+)\s*([A-Za-z]+)\s*'
'([0-9.]+)\s*([A-Za-z]+)/sec\s*'
'([0-9.]+)\s*([A-Za-z]+)\s*'
'([0-9]+)/([0-9]+)\s*'
'\(([a-z\-+0-9.]+)%\)', line)
if iperf_values is not None:
if read_client:
key = 'TxThroughput_Gbps'
lost_datagrams += float(iperf_values.group(7).strip())
total_datagrams += float(iperf_values.group(8).strip())
else:
key = 'RxThroughput_Gbps'
log_dict[key] += decimal.Decimal(float(iperf_values.group(3).strip()) *
self.BUNIT[iperf_values.group(4).strip()]
).quantize(digit_3)
try:
log_dict['DatagramLoss'] = round(
lost_datagrams / total_datagrams * 100, 2)
except ZeroDivisionError:
log_dict['DatagramLoss'] = 0
if not log_dict.get('PacketSize_KBytes', None):
log_dict['PacketSize_KBytes'] = 0
ica_log = os.path.join(self.log_base_path, 'ica.log')
with open(ica_log, 'r') as f2:
lines = f2.readlines()
ip_version_mark = '-ipv6' if log_dict['IPVersion'] == 'IPv6' else ''
for i in xrange(0, len(lines)):
ica_mark = re.match('.*Test\s*iperf3-{}{}-{}k.*:\s*Passed'.format(
log_dict['Protocol'], ip_version_mark, log_dict['SendBufSize_KBytes']),
lines[i])
if ica_mark:
pkg_size = re.match('.*Packet\s*size:\s*([0-9.]+)', lines[i + 5])
if pkg_size:
log_dict['PacketSize_KBytes'] = float(
pkg_size.group(1).strip())
return log_dict
class LatencyLogsReader(BaseLogsReader):
"""
Subclass for parsing Latency log files e.g.
lagscope.log
"""
# conversion units
CUNIT = {'us': 1,
'ms': 10**3,
's': 10**6}
def __init__(self, log_path=None):
super(LatencyLogsReader, self).__init__(log_path)
self.headers = ['MinLatency_us', 'AverageLatency_us', 'MaxLatency_us',
'Latency95Percentile_us', 'Latency99Percentile_us', 'IPVersion',
'ProtocolType']
self.log_matcher = 'lagscope.log'
def collect_data(self, f_match, log_file, log_dict):
"""
Customized data collect for Latency test case.
:param f_match: regex file matcher
:param log_file: log file name
:param log_dict: dict constructed from the defined headers
:return: <dict> {'head1': 'val1', ...}
"""
log_dict['MinLatency_us'] = 0
log_dict['AverageLatency_us'] = 0
log_dict['MaxLatency_us'] = 0
log_dict['Latency95Percentile_us'] = 0
log_dict['Latency99Percentile_us'] = 0
with open(log_file, 'r') as fl:
for x in fl:
if not log_dict.get('IPVersion', None):
ip_version = re.match('domain:.+(IPv[4,6])', x)
if ip_version:
log_dict['IPVersion'] = ip_version.group(1).strip()
if not log_dict.get('Protocol', None):
ip_proto = re.match('protocol:.+([A-Z]{3})', x)
if ip_proto:
log_dict['ProtocolType'] = ip_proto.group(1).strip()
min_latency = re.match('.+Minimum\s*=\s*([0-9.]+)\s*([a-z]+)', x)
if min_latency:
unit = min_latency.group(2).strip()
log_dict['MinLatency_us'] = \
float(min_latency.group(1).strip()) * self.CUNIT[unit]
avg_latency = re.match('.+Average\s*=\s*([0-9.]+)\s*([a-z]+)', x)
if avg_latency:
unit = avg_latency.group(2).strip()
log_dict['AverageLatency_us'] = \
float(avg_latency.group(1).strip()) * self.CUNIT[unit]
max_latency = re.match('.+Maximum\s*=\s*([0-9.]+)\s*([a-z]+)', x)
if max_latency:
unit = max_latency.group(2).strip()
log_dict['MaxLatency_us'] = \
float(max_latency.group(1).strip()) * self.CUNIT[unit]
return log_dict
|
adriansuhov/lis-test
|
WS2012R2/lisa/Infrastructure/lisa-parser/lisa_parser/file_parser.py
|
Python
|
apache-2.0
| 38,829 | 0.00376 |
#!/usr/bin/python3
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Cloud Functions implementation which takes a patient bundle from a FHIR
Store whenever a questionnaire gets answered, runs prediction against a
pre-trained model and writes the results back to the same FHIR Store.
"""
import base64
import datetime
import googleapiclient.discovery
import google.auth
import json
import logging
import os
from google.auth.transport.urllib3 import AuthorizedHttp
from utils import *
# These should be passed in through deployment.
MODEL = os.environ.get('MODEL')
VERSION = os.environ.get('VERSION')
FHIR_STORE_ENDPOINT_PREFIX = 'https://healthcare.googleapis.com/v1beta1'
CREATE_RESOURCE_ACTION = 'CreateResource'
UPDATE_RESOURCE_ACTION = 'UpdateResource'
RISKS = ['negligible', 'low', 'moderate', 'high', 'certain']
LOGGER = logging.getLogger('main')
def get_resource(http, resource_name):
"""Fetches a resource from the FHIR Store.
Args:
resource_name (str): the name of the resource, e.g. 'projects/my-project
/locations/us-central1/datasets/my-dataset/fhirStores/my-store
/fhir/Patient/patient-id'
Returns:
Object: the resource loaded from the FHIR Store.
"""
response = http.request('GET', format_url(resource_name))
if response.status > 299:
LOGGER.critical("Failed to retrieve resource %s, response: %s" % (
resource_name, response.data))
return None
return json.loads(response.data)
def build_risk_assessment(pid, qid, disease, risk, rid=None):
"""Builds a risk assessment JSON object.
Returns:
Str: JSON representation of a RiskAssessment resource.
"""
risk_assessment = {
'resourceType': RISKASSESSMENT_TYPE,
'basis': [{'reference': pid}, {'reference': qid}],
'status': 'final',
'subject': {'reference': pid},
'occurrenceDateTime':
datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
'prediction': [{
'outcome': {
'coding': [{'display': disease}],
},
'qualitativeRisk': {
'coding': [{
'system': "http://hl7.org/fhir/risk-probability",
'code': risk
}]
}
}]
}
if rid is not None:
risk_assessment['id'] = rid
return json.dumps(risk_assessment)
def get_action(data):
"""Reads operation action (e.g. Create or Update) from pubsub message."""
if data['attributes'] is not None:
return data['attributes']['action']
return None
def format_url(path, query=None):
"""Formats request URL with path and query string."""
if query is None:
return "%s/%s" % (FHIR_STORE_ENDPOINT_PREFIX, path)
else:
return "%s/%s?%s" % (FHIR_STORE_ENDPOINT_PREFIX, path, query)
def create_or_update_resource(http, path, payload):
"""Writes a resource to the FHIR Store.
Args:
path (str): path to the endpoint, e.g. 'projects/my-project
/locations/us-central1/datasets/my-dataset/fhirStores/my-store
/fhir/Patient' for create requests and 'projects/my-project
/locations/us-central1/datasets/my-dataset/fhirStores/my-store
/fhir/Patient/patient-id' for update requests.
payload (str): resource to be written to the FHIR Store.
Returns:
Object: the resource from the server, usually this is an
OperationOutcome resource if there is anything wrong.
"""
# Determine which HTTP method we need to use: POST for create, and PUT for
# update. The path of update requests have one more component than create
# requests.
method = 'POST' if path.count('/') == 9 else 'PUT'
response = http.request(method, format_url(path), body=payload,
headers={'Content-Type': 'application/fhir+json;charset=utf-8'})
if response.status > 299:
LOGGER.error("Failed to create or update resource %s, response: %s" % (
payload, response.data))
return None
return json.loads(response.data)
def search_resource(http, path, query):
"""Searches a resource in the FHIR Store.
Args:
path (str): path to the search endpoint, e.g. 'projects/my-project
/locations/us-central1/datasets/my-dataset/fhirStores/my-store
/fhir/Patient'
query (str): query parameter, e.g. 'age=gt30'
Returns:
List[dict]: a list of resources matching the search criteria.
"""
response = http.request('GET', format_url(path, query=query))
if response.status > 299:
LOGGER.error("Failed to search resource %s, response: %s" % (query,
response.data))
return None
bundle = json.loads(response.data)
return list(map(lambda r: r['resource'], bundle['entry']))
def filter_resource(resources, qid, disease):
"""Finds a RiskAssessment.
The target references a certain QuestionnaireResponse and is about the
specified disease
"""
def match(res):
return extract_qid(res) == qid and extract_disease(res) == disease
return next(filter(match, resources), None)
def build_examples(patient, questionnaire_response):
"""Builds examples to be sent for prediction.
Two examples are created for the two diseases we are targeting at.
"""
def map_example(disease):
return {
'age': calculate_age(patient['birthDate']),
'gender': 1 if patient['gender'] == 'male' else 0,
'country': COUNTRY_MAP[extract_country(questionnaire_response)],
'duration': calculate_duration(
*extract_start_end_date(questionnaire_response)),
'disease': disease
}
return list(map(map_example, range(len(DISEASE_MAP))))
def predict(examples):
"""Sends features to Cloud ML Engine for online prediction.
Args:
examples (list): features to be fed into the model for prediction.
Returns:
Mapping[str: any]: dictionary of prediction results defined by the model.
"""
service = googleapiclient.discovery.build('ml', 'v1', cache_discovery=False)
name = "projects/%s/models/%s/versions/%s" % (
os.environ.get('GCP_PROJECT'), MODEL, VERSION)
response = service.projects().predict(name=name,
body={'instances': examples}).execute()
if 'error' in response:
LOGGER.error("Prediction failed: %s" % response['error'])
return None
return response['predictions']
def main(data, context):
"""Extracts features from a patient bundle for online prediction.
This process is broken down into a few steps:
1. Fetch the QuestionnaireResponse we get triggered on (note that we
only react to this resource type), and extract the patient that
answered it.
2. Fetch everything for the patient from step 1, and extract the
features we are interested in.
3. Send the features to Cloud ML for online prediction, and write the
results back to the FHIR store.
Args:
data (dict): Cloud PubSub payload. The `data` field is what we are
looking for.
context (google.cloud.functions.Context): Metadata for the event.
"""
if 'data' not in data:
LOGGER.info('`data` field is not present, skipping...')
return
resource_name = base64.b64decode(data['data']).decode('utf-8')
if QUESTIONNAIRERESPONSE_TYPE not in resource_name:
LOGGER.info("Skipping resource %s which is irrelevant for prediction." %
resource_name)
return
credentials, _ = google.auth.default()
http = AuthorizedHttp(credentials)
questionnaire_response = get_resource(http, resource_name)
if questionnaire_response is None:
return
patient_id = questionnaire_response['subject']['reference']
project_id, location, dataset_id, fhir_store_id, _ = _parse_resource_name(
resource_name)
patient = get_resource(http, _construct_resource_name(project_id, location,
dataset_id, fhir_store_id, patient_id))
if patient is None:
return
predictions = predict(build_examples(patient, questionnaire_response))
if predictions is None:
return
pid = "%s/%s" % (PATIENT_TYPE, patient['id'])
qid = "%s/%s" % (QUESTIONNAIRERESPONSE_TYPE, questionnaire_response['id'])
action = get_action(data)
for disease, idx in DISEASE_MAP.items():
scores = predictions[idx]['probabilities']
LOGGER.info("Prediction results: %s", scores)
# Last element represents risk.
score = scores[1]
risk = RISKS[-1] if score == 1 else RISKS[int(score / 0.2)]
path = _construct_resource_name(project_id, location, dataset_id,
fhir_store_id, RISKASSESSMENT_TYPE)
if action == UPDATE_RESOURCE_ACTION:
resources = search_resource(http, path, "subject=%s" % pid)
res = filter_resource(resources, qid, disease)
if res is None:
LOGGER.info("No existing RiskAssessment, createing a new one...")
create_or_update_resource(http, path, build_risk_assessment(pid,
qid, disease, risk))
continue
rid = res['id']
path = _construct_resource_name(project_id, location, dataset_id,
fhir_store_id, "%s/%s" % (RISKASSESSMENT_TYPE, rid))
create_or_update_resource(http, path, build_risk_assessment(pid,
qid, disease, risk, rid=rid))
elif action == CREATE_RESOURCE_ACTION:
create_or_update_resource(http, path, build_risk_assessment(pid,
qid, disease, risk))
def _parse_resource_name(name):
"""Extracts project id, location, dataset id etc from the resource name."""
parts = name.split('/')
return parts[1], parts[3], parts[5], parts[7], \
"%s/%s" % (parts[9], parts[10])
def _construct_resource_name(project_id, location, dataset_id, fhir_store_id,
resource_id):
"""Constructs a resource name."""
return '/'.join([
'projects', project_id, 'locations', location, 'datasets', dataset_id,
'fhirStores', fhir_store_id, 'fhir', resource_id
])
|
GoogleCloudPlatform/healthcare
|
fhir/immunizations_demo/inference/main.py
|
Python
|
apache-2.0
| 10,155 | 0.008075 |
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
"""
defines few algorithms, that deals with different properties of C++ types
Do you aware of boost::type_traits library? pygccxml has functionality similar to
it. Using functions defined in this module you can
- find out some properties of the type
- modify type
Those functions are very valuable for code generation. Almost all functions
within this module works on L{type_t} class hierarchy and\\or L{class_t}.
"""
import os
import types
import matchers
import typedef
import calldef
import cpptypes
import variable
import algorithm
import namespace
import templates
import enumeration
import class_declaration
from pygccxml import utils
import types as build_in_types
def __remove_alias(type_):
"""implementation details"""
if isinstance( type_, typedef.typedef_t ):
return __remove_alias( type_.type )
if isinstance( type_, cpptypes.declarated_t ) and isinstance( type_.declaration, typedef.typedef_t ):
return __remove_alias( type_.declaration.type )
if isinstance( type_, cpptypes.compound_t ):
type_.base = __remove_alias( type_.base )
return type_
return type_
def remove_alias(type_):
"""returns type without typedefs"""
type_ref = None
if isinstance( type_, cpptypes.type_t ):
type_ref = type_
elif isinstance( type_, typedef.typedef_t ):
type_ref = type_.type
else:
pass #not a valid input, just return it
if not type_ref:
return type_
if type_ref.cache.remove_alias:
return type_ref.cache.remove_alias
no_alias = __remove_alias( type_ref.clone() )
type_ref.cache.remove_alias = no_alias
return no_alias
def create_cv_types( base ):
"""implementation details"""
return [ base
, cpptypes.const_t( base )
, cpptypes.volatile_t( base )
, cpptypes.volatile_t( cpptypes.const_t( base ) ) ]
def decompose_type(tp):
"""implementation details"""
#implementation of this function is important
if isinstance( tp, cpptypes.compound_t ):
return [tp] + decompose_type( tp.base )
elif isinstance( tp, typedef.typedef_t ):
return decompose_type( tp.type )
elif isinstance( tp, cpptypes.declarated_t ) and isinstance( tp.declaration, typedef.typedef_t ):
return decompose_type( tp.declaration.type )
else:
return [tp]
def decompose_class(type):
"""implementation details"""
types = decompose_type( type )
return [ tp.__class__ for tp in types ]
def base_type(type):
"""returns base type.
For C{const int} will return C{int}
"""
types = decompose_type( type )
return types[-1]
def does_match_definition(given, main, secondary ):
"""implementation details"""
assert isinstance( secondary, build_in_types.TupleType )
assert 2 == len( secondary ) #general solution could be provided
types = decompose_type( given )
if isinstance( types[0], main ):
return True
elif 2 <= len( types ) and \
( ( isinstance( types[0], main ) and isinstance( types[1], secondary ) ) \
or ( isinstance( types[1], main ) and isinstance( types[0], secondary ) ) ):
return True
elif 3 <= len( types ):
classes = set( [tp.__class__ for tp in types[:3]] )
desired = set( [main] + list( secondary ) )
return classes == desired
else:
return False
def is_bool( type_ ):
"""returns True, if type represents C{bool}, False otherwise"""
return remove_alias( type_ ) in create_cv_types( cpptypes.bool_t() )
def is_void( type ):
"""returns True, if type represents C{void}, False otherwise"""
return remove_alias( type ) in create_cv_types( cpptypes.void_t() )
def is_void_pointer( type ):
"""returns True, if type represents C{void*}, False otherwise"""
return is_same( type, cpptypes.pointer_t( cpptypes.void_t() ) )
def is_integral( type ):
"""returns True, if type represents C++ integral type, False otherwise"""
integral_def = create_cv_types( cpptypes.char_t() ) \
+ create_cv_types( cpptypes.unsigned_char_t() ) \
+ create_cv_types( cpptypes.signed_char_t() ) \
+ create_cv_types( cpptypes.wchar_t() ) \
+ create_cv_types( cpptypes.short_int_t() ) \
+ create_cv_types( cpptypes.short_unsigned_int_t() ) \
+ create_cv_types( cpptypes.bool_t() ) \
+ create_cv_types( cpptypes.int_t() ) \
+ create_cv_types( cpptypes.unsigned_int_t() ) \
+ create_cv_types( cpptypes.long_int_t() ) \
+ create_cv_types( cpptypes.long_unsigned_int_t() ) \
+ create_cv_types( cpptypes.long_long_int_t() ) \
+ create_cv_types( cpptypes.long_long_unsigned_int_t() )
return remove_alias( type ) in integral_def
def is_floating_point( type ):
"""returns True, if type represents C++ floating point type, False otherwise"""
float_def = create_cv_types( cpptypes.float_t() ) \
+ create_cv_types( cpptypes.double_t() ) \
+ create_cv_types( cpptypes.long_double_t() )
return remove_alias( type ) in float_def
def is_arithmetic( type ):
"""returns True, if type represents C++ integral or floating point type, False otherwise"""
return is_integral( type ) or is_floating_point( type )
def is_pointer(type):
"""returns True, if type represents C++ pointer type, False otherwise"""
return does_match_definition( type
, cpptypes.pointer_t
, (cpptypes.const_t, cpptypes.volatile_t) )
def is_calldef_pointer(type):
"""returns True, if type represents pointer to free/member function, False otherwise"""
if not is_pointer(type):
return False
nake_type = remove_alias( type )
nake_type = remove_const( nake_type )
nake_type = remove_volatile( nake_type )
return isinstance( nake_type, cpptypes.compound_t ) \
and isinstance( nake_type.base, cpptypes.calldef_type_t )
def remove_pointer(type):
"""removes pointer from the type definition
If type is not pointer type, it will be returned as is.
"""
nake_type = remove_alias( type )
if not is_pointer( nake_type ):
return type
elif isinstance( nake_type, cpptypes.volatile_t ) and isinstance( nake_type.base, cpptypes.pointer_t ):
return cpptypes.volatile_t( nake_type.base.base )
elif isinstance( nake_type, cpptypes.const_t ) and isinstance( nake_type.base, cpptypes.pointer_t ):
return cpptypes.const_t( nake_type.base.base )
elif isinstance( nake_type.base, cpptypes.calldef_type_t ):
return type
else:
return nake_type.base
def is_reference(type):
"""returns True, if type represents C++ reference type, False otherwise"""
nake_type = remove_alias( type )
return isinstance( nake_type, cpptypes.reference_t )
def is_array(type):
"""returns True, if type represents C++ array type, False otherwise"""
nake_type = remove_alias( type )
nake_type = remove_reference( nake_type )
nake_type = remove_cv( nake_type )
return isinstance( nake_type, cpptypes.array_t )
def array_size(type):
"""returns array size"""
nake_type = remove_alias( type )
nake_type = remove_reference( nake_type )
nake_type = remove_cv( nake_type )
assert isinstance( nake_type, cpptypes.array_t )
return nake_type.size
def array_item_type(type_):
"""returns array item type"""
if is_array(type_):
type_ = remove_alias( type_ )
type_ = remove_cv( type_ )
return type_.base
elif is_pointer( type_ ):
return remove_pointer( type_ )
else:
assert 0
def remove_reference(type):
"""removes reference from the type definition
If type is not reference type, it will be returned as is.
"""
nake_type = remove_alias( type )
if not is_reference( nake_type ):
return type
else:
return nake_type.base
def is_const(type):
"""returns True, if type represents C++ const type, False otherwise"""
nake_type = remove_alias( type )
return isinstance( nake_type, cpptypes.const_t )
def remove_const(type):
"""removes const from the type definition
If type is not const type, it will be returned as is
"""
nake_type = remove_alias( type )
if not is_const( nake_type ):
return type
else:
return nake_type.base
def remove_declarated( type ):
"""removes type-declaration class-binder L{declarated_t} from the type
If type is not L{declarated_t}, it will be returned as is
"""
type = remove_alias( type )
if isinstance( type, cpptypes.declarated_t ):
type = type.declaration
return type
def is_same(type1, type2):
"""returns True, if type1 and type2 are same types"""
nake_type1 = remove_declarated( type1 )
nake_type2 = remove_declarated( type2 )
return nake_type1 == nake_type2
def is_volatile(type):
"""returns True, if type represents C++ volatile type, False otherwise"""
nake_type = remove_alias( type )
return isinstance( nake_type, cpptypes.volatile_t )
def remove_volatile(type):
"""removes volatile from the type definition
If type is not volatile type, it will be returned as is
"""
nake_type = remove_alias( type )
if not is_volatile( nake_type ):
return type
else:
return nake_type.base
def remove_cv(type):
"""removes const and volatile from the type definition"""
nake_type = remove_alias(type)
if not is_const( nake_type ) and not is_volatile( nake_type ):
return type
result = nake_type
if is_const( nake_type ):
result = nake_type.base
if is_volatile( result ):
result = result.base
return result
def is_fundamental(type):
"""returns True, if type represents C++ fundamental type"""
return does_match_definition( type
, cpptypes.fundamental_t
, (cpptypes.const_t, cpptypes.volatile_t) )
class declaration_xxx_traits:
"""this class implements the functionality needed for convinient work with
declaration classes
Implemented functionality:
- find out whether a declaration is a desired one
- get reference to the declaration
"""
sequence = [ remove_alias, remove_cv, remove_declarated ]
def __init__( self, declaration_class ):
self.declaration_class = declaration_class
def __apply_sequence( self, type ):
for f in self.sequence:
type = f( type )
return type
def is_my_case( self, type ):
"""returns True, if type represents the desired declaration, False otherwise"""
return isinstance( self.__apply_sequence( type ), self.declaration_class )
def get_declaration( self, type ):
"""returns reference to the declaration
Precondition: self.is_my_case( type ) == True
"""
assert self.is_my_case( type )
return self.__apply_sequence( type )
enum_traits = declaration_xxx_traits( enumeration.enumeration_t )
"""implements functionality, needed for convinient work with C++ enums"""
is_enum = enum_traits.is_my_case
"""returns True, if type represents C++ enumeration declaration, False otherwise"""
enum_declaration = enum_traits.get_declaration
"""returns reference to enum declaration"""
class_traits = declaration_xxx_traits( class_declaration.class_t )
"""implements functionality, needed for convinient work with C++ classes"""
is_class = class_traits.is_my_case
"""returns True, if type represents C++ class definition, False otherwise"""
class_declaration_traits = declaration_xxx_traits( class_declaration.class_declaration_t )
"""implements functionality, needed for convinient work with C++ class declarations"""
is_class_declaration = class_declaration_traits.is_my_case
"""returns True, if type represents C++ class declaration, False otherwise"""
def find_trivial_constructor( type ):
"""returns reference to trivial constructor or None"""
assert isinstance( type, class_declaration.class_t )
return type.find_trivial_constructor()
def has_trivial_constructor( class_ ):
"""if class has public trivial constructor, this function will return reference to it, None otherwise"""
class_ = class_traits.get_declaration( class_ )
trivial = class_.find_trivial_constructor()
if trivial and trivial.access_type == 'public':
return trivial
def has_copy_constructor( class_ ):
"""if class has public copy constructor, this function will return reference to it, None otherwise"""
class_ = class_traits.get_declaration( class_ )
copy_constructor = class_.find_copy_constructor()
if copy_constructor and copy_constructor.access_type == 'public':
return copy_constructor
def has_destructor(class_):
"""if class has destructor, this function will return reference to it, None otherwise"""
class_ = class_traits.get_declaration( class_ )
destructor = class_.decls( decl_type=calldef.destructor_t, recursive=False, allow_empty=True )
if destructor:
return destructor[0]
def has_public_constructor(class_):
"""if class has any public constructor, this function will return list of them, otherwise None"""
class_ = class_traits.get_declaration(class_)
decls = class_.constructors( lambda c: not c.is_copy_constructor and c.access_type == 'public'
, recursive=False, allow_empty=True )
if decls:
return decls
def has_public_assign(class_):
"""returns True, if class has public assign operator, False otherwise"""
class_ = class_traits.get_declaration( class_ )
decls = class_.mem_opers( lambda o: o.symbol == '=' and o.access_type == 'public'
, recursive=False, allow_empty=True )
return bool( decls )
def has_public_destructor(type):
"""returns True, if class has public destructor, False otherwise"""
d = has_destructor( type )
return d and d.access_type == 'public'
def is_base_and_derived( based, derived ):
"""returns True, if there is "base and derived" relationship between classes, False otherwise"""
assert isinstance( based, class_declaration.class_t )
assert isinstance( derived, ( class_declaration.class_t, tuple ) )
all_derived = None
if isinstance( derived, class_declaration.class_t ):
all_derived = ( [derived] )
else: #tuple
all_derived = derived
for derived_cls in all_derived:
for base_desc in derived_cls.recursive_bases:
if base_desc.related_class == based:
return True
return False
def has_any_non_copyconstructor( type):
"""if class has any public constructor, which is not copy constructor, this function will return list of them, otherwise None"""
class_ = class_traits.get_declaration( type )
decls = class_.constructors( lambda c: not c.is_copy_constructor and c.access_type == 'public'
, recursive=False, allow_empty=True )
if decls:
return decls
def has_public_binary_operator( type, operator_symbol ):
"""returns True, if type has public binary operator, otherwise False"""
not_artificial = lambda decl: decl.is_artificial == False
type = remove_alias( type )
type = remove_cv( type )
type = remove_declarated( type )
assert isinstance( type, class_declaration.class_t )
if is_std_string( type ) or is_std_wstring( type ):
#In some case compare operators of std::basic_string are not instantiated
return True
operators = type.member_operators( function=matchers.custom_matcher_t( not_artificial ) \
& matchers.access_type_matcher_t( 'public' )
, symbol=operator_symbol
, allow_empty=True
, recursive=False )
if operators:
return True
t = cpptypes.declarated_t( type )
t = cpptypes.const_t( t )
t = cpptypes.reference_t( t )
operators = type.top_parent.operators( function=not_artificial
, arg_types=[t, None]
, symbol=operator_symbol
, allow_empty=True
, recursive=True )
if operators:
return True
for bi in type.recursive_bases:
assert isinstance( bi, class_declaration.hierarchy_info_t )
if bi.access_type != class_declaration.ACCESS_TYPES.PUBLIC:
continue
operators = bi.related_class.member_operators( function=matchers.custom_matcher_t( not_artificial ) \
& matchers.access_type_matcher_t( 'public' )
, symbol=operator_symbol
, allow_empty=True
, recursive=False )
if operators:
return True
return False
def has_public_equal( type ):
"""returns True, if class has public operator==, otherwise False"""
return has_public_binary_operator( type, '==' )
def has_public_less( type ):
"""returns True, if class has public operator<, otherwise False"""
return has_public_binary_operator( type, '<' )
def is_unary_operator( oper ):
"""returns True, if operator is unary operator, otherwise False"""
#~ definition:
#~ memeber in class
#~ ret-type operator symbol()
#~ ret-type operator [++ --](int)
#~ globally
#~ ret-type operator symbol( arg )
#~ ret-type operator [++ --](X&, int)
symbols = [ '!', '&', '~', '*', '+', '++', '-', '--' ]
if not isinstance( oper, calldef.operator_t ):
return False
if oper.symbol not in symbols:
return False
if isinstance( oper, calldef.member_operator_t ):
if 0 == len( oper.arguments ):
return True
elif oper.symbol in [ '++', '--' ] and isinstance( oper.arguments[0].type, cpptypes.int_t ):
return True
else:
return False
else:
if 1 == len( oper.arguments ):
return True
elif oper.symbol in [ '++', '--' ] \
and 2 == len( oper.arguments ) \
and isinstance( oper.arguments[1].type, cpptypes.int_t ):
#may be I need to add additional check whether first argument is reference or not?
return True
else:
return False
def is_binary_operator( oper ):
"""returns True, if operator is binary operator, otherwise False"""
#~ definition:
#~ memeber in class
#~ ret-type operator symbol(arg)
#~ globally
#~ ret-type operator symbol( arg1, arg2 )
symbols = [ ',', '()', '[]', '!=', '%', '%=', '&', '&&', '&=', '*', '*=', '+', '+='
, '-', '-=', '->', '->*', '/', '/=', '<', '<<', '<<=', '<='
, '=', '==', '>', '>=', '>>', '>>=', '^', '^=', '|', '|=', '||'
]
if not isinstance( oper, calldef.operator_t ):
return False
if oper.symbol not in symbols:
return False
if isinstance( oper, calldef.member_operator_t ):
if 1 == len( oper.arguments ):
return True
else:
return False
else:
if 2 == len( oper.arguments ):
return True
else:
return False
class __is_convertible_t:
"""implementation details"""
def __init__( self, source, target ):
self.__source = self.__normalize( source )
self.__target = self.__normalize( target )
def __find_class_by_class_declaration( self, class_decl ):
found = algorithm.find_declaration( class_decl.parent.declarations
, name=class_decl.name
, type=class_declaration.class_t )
return found
def __normalize( self, type_ ):
type_ = remove_alias( type_ )
bt_of_type = base_type( type_ )
if isinstance( bt_of_type, cpptypes.declarated_t ) \
and isinstance( bt_of_type.declaration, class_declaration.class_declaration_t ):
type_ = type_.clone()
bt_of_type = base_type( type_ )
bt_of_type.declaration = self.__find_class_by_class_declaration( bt_of_type.declaration )
return type_
def __test_trivial( self, source, target ):
if not ( source and target ):
return False
if is_same( source, target ):
return True #X => X
if is_const( target ) and is_same( source, target.base ):
return True #X => const X
if is_reference( target ) and is_same( source, target.base ):
return True #X => X&
if is_reference( target ) and is_const( target.base ) and is_same( source, target.base.base ):
return True #X => const X&
if is_same( target, cpptypes.pointer_t( cpptypes.void_t() ) ):
if is_integral( source ) or is_enum( source ):
return False
else:
return True #X => void*
if is_pointer( source ) and is_pointer( target ):
if is_const( target.base ) and is_same( source.base, target.base.base ):
return True#X* => const X*
if is_reference( source ) and is_reference( target ):
if is_const( target.base ) and is_same( source.base, target.base.base ):
return True#X& => const X&
if not is_const( source ) and is_array( source ) and is_pointer( target ):
if is_same( base_type(source), target.base ):
return True#X[2] => X*
if is_array( source ) and is_pointer( target ) and is_const( target.base ):
if is_same( base_type(source), target.base.base ):
return True
def __test_pointer_to_func_or_mv__to__func_or_mv( self, source, target ):
if is_pointer( source ) \
and is_reference( target ) \
and isinstance( target.base
, ( cpptypes.free_function_type_t
, cpptypes.member_function_type_t
, cpptypes.member_variable_type_t ) ) \
and is_same( source.base, target.base ):
return True
if is_pointer( source ) \
and isinstance( target
, ( cpptypes.free_function_type_t
, cpptypes.member_function_type_t
, cpptypes.member_variable_type_t ) ) \
and is_same( source.base, target ):
return True
if is_pointer( target ) \
and is_reference( source ) \
and isinstance( source.base
, ( cpptypes.free_function_type_t
, cpptypes.member_function_type_t
, cpptypes.member_variable_type_t ) ) \
and is_same( source.base, target.base ):
return True
if is_pointer( target ) \
and isinstance( source
, ( cpptypes.free_function_type_t
, cpptypes.member_function_type_t
, cpptypes.member_variable_type_t ) ) \
and is_same( target.base, source ):
return True
def __test_const_x_ref__to__x( self, source, target ):
if not is_reference( source ) \
or not is_const( source.base ) \
or not is_same( source.base.base, target ):
return False
if is_fundamental( target ):
return True
if is_enum( target ):
return True
if isinstance( target, cpptypes.declarated_t ):
assert isinstance( target.declaration, class_declaration.class_t )
if has_copy_constructor( target.declaration ):
return True #we have copy constructor
return False
def __test_const_ref_x__to__y(self, source, target):
if not is_reference( source ) or not is_const( source.base ):
return False
if is_fundamental( source.base.base ) and is_fundamental( target ):
return True
if is_convertible( source.base.base, cpptypes.int_t() ) and is_enum( target ):
return True
if isinstance( target, cpptypes.declarated_t ):
assert isinstance( target.declaration, class_declaration.class_t )
if has_copy_constructor( target.declaration ):
return True #we have copy constructor
return False
def __test_ref_x__to__x( self, source, target ):
if not is_reference( source ) or not is_same( source.base, target ):
return False
if is_fundamental( target ):
return True
if is_enum( target ):
return True
if isinstance( target, cpptypes.declarated_t ):
assert isinstance( target.declaration, class_declaration.class_t )
if has_copy_constructor( target.declaration ):
return True #we have copy constructor
return False
def __test_ref_x__to__y(self, source, target):
if not is_reference( source ):
return False
if is_fundamental( source.base ) and is_fundamental( target ):
return True
if is_convertible( source.base, cpptypes.int_t() ) and is_enum( target ):
return True
if isinstance( target, cpptypes.declarated_t ):
assert isinstance( target.declaration, class_declaration.class_t )
if has_copy_constructor( target.declaration ):
return True #we have copy constructor
return False
def __test_fundamental__to__fundamental(self, source, target):
if not is_fundamental( base_type( source ) ) or not is_fundamental( base_type( target ) ):
return False
if is_void( base_type( source ) ) or is_void( base_type( target ) ):
return False
if is_fundamental( source ) and is_fundamental( target ):
return True
if not is_pointer( source ) and is_fundamental( target ):
return True
if not is_pointer( source ) and is_const( target ) and is_fundamental( target.base ):
return True
if is_fundamental( source ) \
and is_reference( target ) \
and is_const( target.base ) \
and is_fundamental( target.base.base ):
return True #X => const Y&
return False
def __test_derived_to_based( self, source, target ):
derived = base_type( source )
base = base_type( target )
if not ( isinstance( derived, cpptypes.declarated_t ) \
and isinstance( derived.declaration, class_declaration.class_t ) ):
return False
if not ( isinstance( base, cpptypes.declarated_t ) \
and isinstance( base.declaration, class_declaration.class_t ) ):
return False
base = base.declaration
derived = derived.declaration
if not is_base_and_derived( base, derived ):
return False
for b in derived.recursive_bases:
if ( b.related_class is base ) and b.access_type != class_declaration.ACCESS_TYPES.PRIVATE:
break
else:
return False
base = target
derived = source
is_both_declarated = lambda x, y: isinstance( x, cpptypes.declarated_t ) \
and isinstance( y, cpptypes.declarated_t )
#d => b
if is_both_declarated( base, derived ):
return True
#d* => b*
if is_pointer( derived ) and is_pointer( base ) \
and is_both_declarated( base.base, derived.base ):
return True
#const d* => const b*
if is_pointer( derived ) and is_pointer( base ) \
and is_const( derived.base ) and is_const( base.base ) \
and is_both_declarated( base.base.base, derived.base.base ):
return True
#d* => const b*
if is_pointer( derived ) and is_pointer( base ) \
and is_const( derived.base )\
and is_both_declarated( base.base.base, derived.base ):
return True
#d& => b&
if is_reference( derived ) and is_reference( base ) \
and is_both_declarated( base.base, derived.base ):
return True
#const d& => const b&
if is_reference( derived ) and is_reference( base ) \
and is_const( derived.base ) and is_const( base.base ) \
and is_both_declarated( base.base.base, derived.base.base ):
return True
#d& => const b&
if is_reference( derived ) and is_reference( base ) \
and is_const( derived.base )\
and is_both_declarated( base.base.base, derived.base ):
return True
return False
def is_convertible( self ):
source = self.__source
target = self.__target
if self.__test_trivial(source, target):
return True
if is_array( source ) or is_array( target ):
return False
if self.__test_const_x_ref__to__x(source, target):
return True
if self.__test_const_ref_x__to__y(source, target):
return True
if self.__test_ref_x__to__x(source, target):
return True
if self.__test_ref_x__to__y(source, target):
return True
if self.__test_fundamental__to__fundamental( source, target ):
return True
if self.__test_pointer_to_func_or_mv__to__func_or_mv( source, target ):
return True
if self.__test_derived_to_based( source, target ):
return True
if isinstance( source, cpptypes.declarated_t ):
if isinstance( source.declaration, enumeration.enumeration_t ) \
and is_fundamental( target ) \
and not is_void( target ):
return True # enum could be converted to any integral type
if isinstance( source.declaration, class_declaration.class_t ):
source_inst = source.declaration
#class instance could be convertible to something else if it has operator
casting_operators = algorithm.find_all_declarations( source_inst.declarations
, type=calldef.casting_operator_t
, recursive=False )
if casting_operators:
for operator in casting_operators:
if is_convertible( operator.return_type, target ):
return True
#may be target is class too, so in this case we should check whether is
#has constructor from source
if isinstance( target, cpptypes.declarated_t ):
if isinstance( target.declaration, class_declaration.class_t ):
constructors = algorithm.find_all_declarations( target.declaration.declarations
, type=calldef.constructor_t
, recursive=False )
if constructors:
for constructor in constructors:
if 1 != len( constructor.arguments ):
continue
#TODO: add test to check explicitness
if is_convertible( source, constructor.arguments[0].type ):
return True
return False
def is_convertible( source, target ):
"""returns True, if source could be converted to target, otherwise False"""
return __is_convertible_t( source, target ).is_convertible()
def __is_noncopyable_single( class_):
"""implementation details"""
#It is not enough to check base classes, we should also to check
#member variables.
logger = utils.loggers.cxx_parser
if has_copy_constructor( class_ ) \
and has_public_constructor( class_ ) \
and has_public_assign( class_ ) \
and has_public_destructor( class_ ):
msg = os.linesep.join([
"__is_noncopyable_single - %s - COPYABLE:" % class_.decl_string
, " trivial copy constructor: yes"
, " public constructor: yes"
, " public assign: yes"
, " public destructor: yes"
])
logger.debug( msg )
return False
if class_.find_noncopyable_vars():
logger.debug( "__is_noncopyable_single(TRUE) - %s - contains noncopyable members" % class_.decl_string )
return True
else:
logger.debug( "__is_noncopyable_single(FALSE) - %s - COPYABLE, because is doesn't contains noncopyable members" % class_.decl_string )
return False
def is_noncopyable( class_ ):
"""returns True, if class is noncopyable, False otherwise"""
logger = utils.loggers.cxx_parser
class_ = class_traits.get_declaration( class_ )
true_header = "is_noncopyable(TRUE) - %s - " % class_.decl_string
false_header = "is_noncopyable(false) - %s - " % class_.decl_string
if class_.class_type == class_declaration.CLASS_TYPES.UNION:
return False
if class_.is_abstract:
logger.debug( true_header + "abstract client" )
return True
#if class has public, user defined copy constructor, than this class is
#copyable
copy_ = class_.find_copy_constructor()
if copy_ and copy_.access_type == 'public' and not copy_.is_artificial:
return False
for base_desc in class_.recursive_bases:
assert isinstance( base_desc, class_declaration.hierarchy_info_t )
if base_desc.related_class.decl_string in ('::boost::noncopyable', '::boost::noncopyable_::noncopyable' ):
logger.debug( true_header + "derives from boost::noncopyable" )
return True
if not has_copy_constructor( base_desc.related_class ):
base_copy_ = base_desc.related_class.find_copy_constructor()
if base_copy_:
if base_copy_.access_type == 'private':
logger.debug( true_header + "there is private copy constructor" )
return True
else:
if __is_noncopyable_single( base_desc.related_class ):
logger.debug( true_header + "__is_noncopyable_single returned True" )
return True
if __is_noncopyable_single( base_desc.related_class ):
logger.debug( true_header + "__is_noncopyable_single returned True" )
return True
if not has_copy_constructor( class_ ):
logger.debug( true_header + "does not have trival copy constructor" )
return True
elif not has_public_constructor( class_ ):
logger.debug( true_header + "does not have a public constructor" )
return True
elif has_destructor( class_ ) and not has_public_destructor( class_ ):
logger.debug( true_header + "has private destructor")
return True
else:
return __is_noncopyable_single( class_ )
def is_defined_in_xxx( xxx, cls ):
"""small helper function, that checks whether class ( C{cls} ) is defined
under C{::xxx} namespace"""
if not cls.parent:
return False
if not isinstance( cls.parent, namespace.namespace_t ):
return False
if xxx != cls.parent.name:
return False
xxx_ns = cls.parent
if not xxx_ns.parent:
return False
if not isinstance( xxx_ns.parent, namespace.namespace_t ):
return False
if '::' != xxx_ns.parent.name:
return False
global_ns = xxx_ns.parent
return None is global_ns.parent
class impl_details:
"""implementation details"""
@staticmethod
def is_defined_in_xxx( xxx, cls ):
"""implementation details"""
if not cls.parent:
return False
if not isinstance( cls.parent, namespace.namespace_t ):
return False
if xxx != cls.parent.name:
return False
xxx_ns = cls.parent
if not xxx_ns.parent:
return False
if not isinstance( xxx_ns.parent, namespace.namespace_t ):
return False
if '::' != xxx_ns.parent.name:
return False
global_ns = xxx_ns.parent
return None is global_ns.parent
@staticmethod
def find_value_type( global_ns, value_type_str ):
"""implementation details"""
if not value_type_str.startswith( '::' ):
value_type_str = '::' + value_type_str
found = global_ns.decls( name=value_type_str
, function=lambda decl: not isinstance( decl, calldef.calldef_t )
, allow_empty=True )
if not found:
no_global_ns_value_type_str = value_type_str[2:]
if cpptypes.FUNDAMENTAL_TYPES.has_key( no_global_ns_value_type_str ):
return cpptypes.FUNDAMENTAL_TYPES[ no_global_ns_value_type_str ]
elif is_std_string( value_type_str ):
string_ = global_ns.typedef( '::std::string' )
return remove_declarated( string_ )
elif is_std_wstring( value_type_str ):
string_ = global_ns.typedef( '::std::wstring' )
return remove_declarated( string_ )
else:
value_type_str = no_global_ns_value_type_str
has_const = value_type_str.startswith( 'const ' )
if has_const:
value_type_str = value_type_str[ len('const '): ]
has_pointer = value_type_str.endswith( '*' )
if has_pointer:
value_type_str = value_type_str[:-1]
found = None
if has_const or has_pointer:
found = impl_details.find_value_type( global_ns, value_type_str )
if not found:
return None
else:
if isinstance( found, class_declaration.class_types ):
found = cpptypes.declarated_t( found )
if has_const:
found = cpptypes.const_t( found )
if has_pointer:
found = cpptypes.pointer_t( found )
return found
if len( found ) == 1:
return found[0]
else:
return None
class smart_pointer_traits:
"""implements functionality, needed for convinient work with smart pointers"""
@staticmethod
def is_smart_pointer( type ):
"""returns True, if type represents instantiation of C{boost::shared_ptr}, False otherwise"""
type = remove_alias( type )
type = remove_cv( type )
type = remove_declarated( type )
if not isinstance( type, ( class_declaration.class_declaration_t, class_declaration.class_t ) ):
return False
if not impl_details.is_defined_in_xxx( 'boost', type ):
return False
return type.decl_string.startswith( '::boost::shared_ptr<' )
@staticmethod
def value_type( type ):
"""returns reference to boost::shared_ptr value type"""
if not smart_pointer_traits.is_smart_pointer( type ):
raise TypeError( 'Type "%s" is not instantiation of boost::shared_ptr' % type.decl_string )
type = remove_alias( type )
cls = remove_cv( type )
cls = remove_declarated( type )
if isinstance( cls, class_declaration.class_t ):
return remove_declarated( cls.typedef( "value_type", recursive=False ).type )
elif not isinstance( cls, ( class_declaration.class_declaration_t, class_declaration.class_t ) ):
raise RuntimeError( "Unable to find out shared_ptr value type. shared_ptr class is: %s" % cls.decl_string )
else:
value_type_str = templates.args( cls.name )[0]
ref = impl_details.find_value_type( cls.top_parent, value_type_str )
if None is ref:
raise RuntimeError( "Unable to find out shared_ptr value type. shared_ptr class is: %s" % cls.decl_string )
return ref
class auto_ptr_traits:
"""implements functionality, needed for convinient work with std::auto_ptr pointers"""
@staticmethod
def is_smart_pointer( type ):
"""returns True, if type represents instantiation of C{boost::shared_ptr}, False otherwise"""
type = remove_alias( type )
type = remove_cv( type )
type = remove_declarated( type )
if not isinstance( type, ( class_declaration.class_declaration_t, class_declaration.class_t ) ):
return False
if not impl_details.is_defined_in_xxx( 'std', type ):
return False
return type.decl_string.startswith( '::std::auto_ptr<' )
@staticmethod
def value_type( type ):
"""returns reference to boost::shared_ptr value type"""
if not auto_ptr_traits.is_smart_pointer( type ):
raise TypeError( 'Type "%s" is not instantiation of std::auto_ptr' % type.decl_string )
type = remove_alias( type )
cls = remove_cv( type )
cls = remove_declarated( type )
if isinstance( cls, class_declaration.class_t ):
return remove_declarated( cls.typedef( "element_type", recursive=False ).type )
elif not isinstance( cls, ( class_declaration.class_declaration_t, class_declaration.class_t ) ):
raise RuntimeError( "Unable to find out auto_ptr value type. auto_ptr class is: %s" % cls.decl_string )
else:
value_type_str = templates.args( cls.name )[0]
ref = impl_details.find_value_type( cls.top_parent, value_type_str )
if None is ref:
raise RuntimeError( "Unable to find out auto_ptr value type. shared_ptr class is: %s" % cls.decl_string )
return ref
def is_std_string( type ):
"""returns True, if type represents C++ std::string, False otherwise"""
decl_strings = [
'::std::basic_string<char,std::char_traits<char>,std::allocator<char> >'
, '::std::basic_string<char, std::char_traits<char>, std::allocator<char> >'
, '::std::string' ]
if isinstance( type, types.StringTypes ):
return type in decl_strings
else:
type = remove_alias( type )
return remove_cv( type ).decl_string in decl_strings
def is_std_wstring( type ):
"""returns True, if type represents C++ std::wstring, False otherwise"""
decl_strings = [
'::std::basic_string<wchar_t,std::char_traits<wchar_t>,std::allocator<wchar_t> >'
, '::std::basic_string<wchar_t, std::char_traits<wchar_t>, std::allocator<wchar_t> >'
, '::std::wstring' ]
if isinstance( type, types.StringTypes ):
return type in decl_strings
else:
type = remove_alias( type )
return remove_cv( type ).decl_string in decl_strings
def is_std_ostream( type ):
"""returns True, if type represents C++ std::string, False otherwise"""
decl_strings = [
'::std::basic_ostream<char, std::char_traits<char> >'
, '::std::basic_ostream<char,std::char_traits<char> >'
, '::std::ostream' ]
if isinstance( type, types.StringTypes ):
return type in decl_strings
else:
type = remove_alias( type )
return remove_cv( type ).decl_string in decl_strings
def is_std_wostream( type ):
"""returns True, if type represents C++ std::string, False otherwise"""
decl_strings = [
'::std::basic_ostream<wchar_t, std::char_traits<wchar_t> >'
, '::std::basic_ostream<wchar_t,std::char_traits<wchar_t> >'
, '::std::wostream' ]
if isinstance( type, types.StringTypes ):
return type in decl_strings
else:
type = remove_alias( type )
return remove_cv( type ).decl_string in decl_strings
|
eile/ITK
|
Modules/ThirdParty/pygccxml/src/pygccxml/declarations/type_traits.py
|
Python
|
apache-2.0
| 44,926 | 0.02838 |
def check(msg):
if msg == 'hello':
print(hello)
else:
print(goodbye)
check('greetings')
|
erc7as/cs3240-labdemo
|
check.py
|
Python
|
mit
| 94 | 0.06383 |
az.plot_dist(b, rug=True, quantiles=[.25, .5, .75], cumulative=True)
|
mcmcplotlib/mcmcplotlib
|
api/generated/arviz-plot_dist-5.py
|
Python
|
apache-2.0
| 69 | 0 |
#!/usr/bin/python
import os
from setuptools import setup, find_packages
SRC_DIR = os.path.dirname(__file__)
CHANGES_FILE = os.path.join(SRC_DIR, "CHANGES")
with open(CHANGES_FILE) as fil:
version = fil.readline().split()[0]
setup(
name="state-machine-crawler",
description="A library for following automata based programming model.",
version=version,
packages=find_packages(),
setup_requires=["nose"],
tests_require=["mock==1.0.1", "coverage"],
install_requires=["werkzeug", "pydot2", "pyparsing==1.5.2"],
test_suite='nose.collector',
author="Anton Berezin",
author_email="[email protected]",
entry_points={
"console_scripts": [
'state-machine-crawler = state_machine_crawler:entry_point'
]
},
include_package_data=True
)
|
gurunars/state_machine_crawler
|
setup.py
|
Python
|
apache-2.0
| 812 | 0 |
# This file is part of jobservice.
# Copyright 2010 Jacob Peddicord <[email protected]>
#
# jobservice is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# jobservice is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with jobservice. If not, see <http://www.gnu.org/licenses/>.
import logging
from dbus import SystemBus, DBusException, Interface, UInt64
log = logging.getLogger('policy')
class DeniedByPolicy(DBusException):
_dbus_error_name = 'com.ubuntu.JobService.DeniedByPolicy'
class Policy:
def __init__(self, enforce=True):
self.enforce = enforce
self.bus = SystemBus()
self.dbus_iface = None
self.pk = Interface(self.bus.get_object('org.freedesktop.PolicyKit1',
'/org/freedesktop/PolicyKit1/Authority'),
'org.freedesktop.PolicyKit1.Authority')
if not enforce:
log.warn('Not enforcing PolicyKit privileges!')
def check(self, sender, conn, priv='com.ubuntu.jobservice.manage'):
"""
Check or ask for authentication for job management.
"""
if not self.enforce: return
log.debug('Asking for PolicyKit authorization')
# get the PID of the sender
if not self.dbus_iface:
self.dbus_iface = Interface(conn.get_object('org.freedesktop.DBus',
'/org/freedesktop/DBus/Bus'), 'org.freedesktop.DBus')
pid = self.dbus_iface.GetConnectionUnixProcessID(sender)
# ask PolicyKit
auth, challenge, details = self.pk.CheckAuthorization(
('unix-process', {'pid': pid, 'start-time': UInt64(0)}),
priv, {'': ''}, 1, '', timeout=500)
if not auth:
log.info('Authorization failed')
raise DeniedByPolicy('Not authorized to manage jobs.')
log.debug('Authorization passed')
|
jpeddicord/jobservice
|
JobService/policy.py
|
Python
|
gpl-3.0
| 2,319 | 0.004743 |
from sys import maxsize
class Contact:
def __init__(self, Firstname=None, Middlename=None, Lastname=None, Nickname=None, Title=None, Company=None, Address=None, Home=None, Mobile=None, Work=None,
Fax=None, Email=None, Email2=None, Email3=None, Homepage=None, Bday=None, Bmonth=None, Byear=None, Aday=None, Amonth=None, Ayear=None, Address2=None, Phone2=None,
Notes=None, id=None, all_phones_from_home_page=None, all_address_from_home_page=None, all_emails=None):
self.Firstname = Firstname
self.Middlename = Middlename
self.Lastname = Lastname
self.Nickname = Nickname
self.Title = Title
self.Company = Company
self.Address = Address
self.Home = Home
self.Mobile = Mobile
self.Work = Work
self.Fax = Fax
self.Email = Email
self.Email2 = Email2
self.Email3 = Email3
self.Homepage = Homepage
self.Bday = Bday
self.Bmonth = Bmonth
self.Byear = Byear
self.Aday = Aday
self.Amonth = Amonth
self.Ayear = Ayear
self.Address2 = Address2
self.Phone2 = Phone2
self.Notes = Notes
self.id = id
self.all_phones_from_home_page = all_phones_from_home_page
self.all_address_from_home_page = all_address_from_home_page
self.all_emails=all_emails
def __eq__(self, other):
return (self.id is None or other.id is None or self.id == other.id) and self.Firstname == other.Firstname and self.Lastname == other.Lastname
def __repr__(self):
return "%s:%s;%s" % (self.Firstname, self.Lastname, self.Middlename)
def id_or_max(self):
if self.id:
return int(self.id)
else:
return maxsize
|
IrinaZI/Python_training
|
model/contact.py
|
Python
|
apache-2.0
| 1,808 | 0.005531 |
"""
Estimation of the total magnetization vector of homogeneous bodies.
It estimates parameters related to the magnetization vector of homogeneous
bodies.
**Algorithms**
* :class:`~fatiando.gravmag.magdir.DipoleMagDir`: This class estimates
the Cartesian components of the magnetization vector of homogeneous
dipolar bodies with known center. The estimated magnetization vector
is converted to dipole moment, inclination (positive down) and declination
(with respect to x, North).
----
"""
from __future__ import division
import numpy
from ..inversion.base import Misfit
from .. import mesher
from ..utils import ang2vec, vec2ang, safe_dot
from . import sphere
from ..constants import G, CM, T2NT, SI2EOTVOS
class DipoleMagDir(Misfit):
"""
Estimate the magnetization vector of a set of dipoles from magnetic
total field anomaly.
By using the well-known first-order approximation of the total field
anomaly (Blakely, 1996, p. 179) produced by a set of dipoles, the
estimation of the Cartesian components of the magnetization vectors is
formulated as linear inverse problem. After estimating the magnetization
vectors, they are converted to dipole moment, inclination (positive down)
and declination (with respect to x, North).
Reference
Blakely, R. (1996), Potential theory in gravity and magnetic applications:
CUP
.. note:: Assumes x = North, y = East, z = Down.
Parameters:
* x, y, z : 1d-arrays
The x, y, z coordinates of each data point.
* data : 1d-array
The total field magnetic anomaly data at each point.
* inc, dec : floats
The inclination and declination of the inducing field
* points : list of points [x, y, z]
Each point [x, y, z] is the center of a dipole. Will invert for
the Cartesian components of the magnetization vector of each
dipole. Subsequently, the estimated magnetization vectors are
converted to dipole moment, inclination and declination.
.. note:: Inclination is positive down and declination is measured with
respect to x (North).
Examples:
Estimation of the total magnetization vector of dipoles with known centers
>>> import numpy
>>> from fatiando import gridder, utils
>>> from fatiando.gravmag import sphere
>>> from fatiando.mesher import Sphere, Prism
>>> # Produce some synthetic data
>>> area = (0, 10000, 0, 10000)
>>> x, y, z = gridder.scatter(area, 500, z=-150, seed=0)
>>> model = [Sphere(3000, 3000, 1000, 1000,
... {'magnetization': utils.ang2vec(6.0, -20.0, -10.0)}),
... Sphere(7000, 7000, 1000, 1000,
... {'magnetization': utils.ang2vec(6.0, 30.0, -40.0)})]
>>> inc, dec = -9.5, -13
>>> tf = sphere.tf(x, y, z, model, inc, dec)
>>> # Give the coordinates of the dipoles
>>> points = [[3000.0, 3000.0, 1000.0], [7000.0, 7000.0, 1000.0]]
>>> p_true = numpy.hstack((ang2vec(CM*(4.*numpy.pi/3.)*6.0*1000**3,
... -20.0, -10.0),
... ang2vec(CM*(4.*numpy.pi/3.)*6.0*1000**3,
... 30.0, -40.0)))
>>> estimate_true = [utils.vec2ang(p_true[3*i : 3*i + 3]) for i
... in range(len(points))]
>>> # Make a solver and fit it to the data
>>> solver = DipoleMagDir(x, y, z, tf, inc, dec, points).fit()
>>> # Check the fit
>>> numpy.allclose(tf, solver.predicted(), rtol=0.001, atol=0.001)
True
>>> # solver.p_ returns the Cartesian components of the
>>> # estimated magnetization vectors
>>> for p in solver.p_: print "%.10f" % p
2325.8255393651
-410.1057950109
-859.5903757213
1667.3411086852
-1399.0653093445
1256.6370614359
>>> # Check the estimated parameter vector
>>> numpy.allclose(p_true, solver.p_, rtol=0.001, atol=0.001)
True
>>> # The parameter vector is not that useful so use solver.estimate_
>>> # to convert the estimated magnetization vectors in dipole moment,
>>> # inclination and declination.
>>> for e in solver.estimate_:
... print "%.10f %.10f %.10f" % (e[0], e[1], e[2])
2513.2741228718 -20.0000000000 -10.0000000000
2513.2741228718 30.0000000000 -40.0000000000
>>> # Check the converted estimate
>>> numpy.allclose(estimate_true, solver.estimate_, rtol=0.001,
... atol=0.001)
True
"""
def __init__(self, x, y, z, data, inc, dec, points):
super(DipoleMagDir, self).__init__(
data=data,
positional={'x': x, 'y': y, 'z': z},
model={'inc': inc, 'dec': dec, 'points': points},
nparams=3 * len(points),
islinear=True)
# Constants
self.ndipoles = len(points)
self.cte = 1.0 / ((4.0 * numpy.pi / 3.0) * G * SI2EOTVOS)
# Geomagnetic Field versor
self.F_versor = ang2vec(1.0, self.model['inc'], self.model['dec'])
def _get_predicted(self, p):
return safe_dot(self.jacobian(p), p)
def _get_jacobian(self, p):
x = self.positional['x']
y = self.positional['y']
z = self.positional['z']
dipoles = [mesher.Sphere(xp, yp, zp, 1.) for xp, yp, zp in
self.model['points']]
jac = numpy.empty((self.ndata, self.nparams), dtype=float)
for i, dipole in enumerate(dipoles):
k = 3 * i
derivative_gxx = sphere.gxx(x, y, z, [dipole], dens=self.cte)
derivative_gxy = sphere.gxy(x, y, z, [dipole], dens=self.cte)
derivative_gxz = sphere.gxz(x, y, z, [dipole], dens=self.cte)
derivative_gyy = sphere.gyy(x, y, z, [dipole], dens=self.cte)
derivative_gyz = sphere.gyz(x, y, z, [dipole], dens=self.cte)
derivative_gzz = sphere.gzz(x, y, z, [dipole], dens=self.cte)
jac[:, k] = T2NT * ((self.F_versor[0] * derivative_gxx) +
(self.F_versor[1] * derivative_gxy) +
(self.F_versor[2] * derivative_gxz))
jac[:, k + 1] = T2NT * ((self.F_versor[0] * derivative_gxy) +
(self.F_versor[1] * derivative_gyy) +
(self.F_versor[2] * derivative_gyz))
jac[:, k + 2] = T2NT * ((self.F_versor[0] * derivative_gxz) +
(self.F_versor[1] * derivative_gyz) +
(self.F_versor[2] * derivative_gzz))
return jac
def fit(self):
"""
Solve for the magnetization direction of a set of dipoles.
After solving, use the ``estimate_`` attribute to get the
estimated magnetization vectors in dipole moment, inclination
and declination.
The estimated magnetization vectors in Cartesian coordinates can
be accessed through the ``p_`` attribute.
See the the docstring of :class:`~fatiando.gravmag.magdir.DipoleMagDir`
for examples.
"""
super(DipoleMagDir, self).fit()
self._estimate = [vec2ang(self.p_[3 * i: 3 * i + 3]) for i in
range(len(self.model['points']))]
return self
|
eusoubrasileiro/fatiando
|
fatiando/gravmag/magdir.py
|
Python
|
bsd-3-clause
| 7,336 | 0 |
#!/usr/bin/env python
#
# asn2wrs.py
# ASN.1 to Wireshark dissector compiler
# Copyright 2004 Tomas Kukosa
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, and/or sell copies of the Software, and to permit persons
# to whom the Software is furnished to do so, provided that the above
# copyright notice(s) and this permission notice appear in all copies of
# the Software and that both the above copyright notice(s) and this
# permission notice appear in supporting documentation.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
# OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL
# INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING
# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# Except as contained in this notice, the name of a copyright holder
# shall not be used in advertising or otherwise to promote the sale, use
# or other dealings in this Software without prior written authorization
# of the copyright holder.
"""ASN.1 to Wireshark dissector compiler"""
#
# Compiler from ASN.1 specification to the Wireshark dissector
#
# Based on ASN.1 to Python compiler from Aaron S. Lav's PyZ3950 package licensed under the X Consortium license
# http://www.pobox.com/~asl2/software/PyZ3950/
# (ASN.1 to Python compiler functionality is broken but not removed, it could be revived if necessary)
#
# It requires Dave Beazley's PLY parsing package licensed under the LGPL (tested with version 2.3)
# http://www.dabeaz.com/ply/
#
#
# ITU-T Recommendation X.680 (07/2002),
# Information technology - Abstract Syntax Notation One (ASN.1): Specification of basic notation
#
# ITU-T Recommendation X.681 (07/2002),
# Information technology - Abstract Syntax Notation One (ASN.1): Information object specification
#
# ITU-T Recommendation X.682 (07/2002),
# Information technology - Abstract Syntax Notation One (ASN.1): Constraint specification
#
# ITU-T Recommendation X.683 (07/2002),
# Information technology - Abstract Syntax Notation One (ASN.1): Parameterization of ASN.1 specifications
#
# ITU-T Recommendation X.880 (07/1994),
# Information technology - Remote Operations: Concepts, model and notation
#
import warnings
import re
import sys
import os
import os.path
import time
import getopt
import traceback
import lex
import yacc
if sys.version_info[0] < 3:
from string import maketrans
# OID name -> number conversion table
oid_names = {
'/itu-t' : 0,
'/itu' : 0,
'/ccitt' : 0,
'/itu-r' : 0,
'0/recommendation' : 0,
'0.0/a' : 1,
'0.0/b' : 2,
'0.0/c' : 3,
'0.0/d' : 4,
'0.0/e' : 5,
'0.0/f' : 6,
'0.0/g' : 7,
'0.0/h' : 8,
'0.0/i' : 9,
'0.0/j' : 10,
'0.0/k' : 11,
'0.0/l' : 12,
'0.0/m' : 13,
'0.0/n' : 14,
'0.0/o' : 15,
'0.0/p' : 16,
'0.0/q' : 17,
'0.0/r' : 18,
'0.0/s' : 19,
'0.0/t' : 20,
'0.0/tseries' : 20,
'0.0/u' : 21,
'0.0/v' : 22,
'0.0/w' : 23,
'0.0/x' : 24,
'0.0/y' : 25,
'0.0/z' : 26,
'0/question' : 1,
'0/administration' : 2,
'0/network-operator' : 3,
'0/identified-organization' : 4,
'0/r-recommendation' : 5,
'0/data' : 9,
'/iso' : 1,
'1/standard' : 0,
'1/registration-authority' : 1,
'1/member-body' : 2,
'1/identified-organization' : 3,
'/joint-iso-itu-t' : 2,
'/joint-iso-ccitt' : 2,
'2/presentation' : 0,
'2/asn1' : 1,
'2/association-control' : 2,
'2/reliable-transfer' : 3,
'2/remote-operations' : 4,
'2/ds' : 5,
'2/directory' : 5,
'2/mhs' : 6,
'2/mhs-motis' : 6,
'2/ccr' : 7,
'2/oda' : 8,
'2/ms' : 9,
'2/osi-management' : 9,
'2/transaction-processing' : 10,
'2/dor' : 11,
'2/distinguished-object-reference' : 11,
'2/reference-data-transfe' : 12,
'2/network-layer' : 13,
'2/network-layer-management' : 13,
'2/transport-layer' : 14,
'2/transport-layer-management' : 14,
'2/datalink-layer' : 15,
'2/datalink-layer-managemen' : 15,
'2/datalink-layer-management-information' : 15,
'2/country' : 16,
'2/registration-procedures' : 17,
'2/registration-procedure' : 17,
'2/physical-layer' : 18,
'2/physical-layer-management' : 18,
'2/mheg' : 19,
'2/genericULS' : 20,
'2/generic-upper-layers-security' : 20,
'2/guls' : 20,
'2/transport-layer-security-protocol' : 21,
'2/network-layer-security-protocol' : 22,
'2/international-organizations' : 23,
'2/internationalRA' : 23,
'2/sios' : 24,
'2/uuid' : 25,
'2/odp' : 26,
'2/upu' : 40,
}
ITEM_FIELD_NAME = '_item'
UNTAG_TYPE_NAME = '_untag'
def asn2c(id):
return id.replace('-', '_').replace('.', '_').replace('&', '_')
input_file = None
g_conform = None
lexer = None
in_oid = False
class LexError(Exception):
def __init__(self, tok, filename=None):
self.tok = tok
self.filename = filename
self.msg = "Unexpected character %r" % (self.tok.value[0])
Exception.__init__(self, self.msg)
def __repr__(self):
return "%s:%d: %s" % (self.filename, self.tok.lineno, self.msg)
__str__ = __repr__
class ParseError(Exception):
def __init__(self, tok, filename=None):
self.tok = tok
self.filename = filename
self.msg = "Unexpected token %s(%r)" % (self.tok.type, self.tok.value)
Exception.__init__(self, self.msg)
def __repr__(self):
return "%s:%d: %s" % (self.filename, self.tok.lineno, self.msg)
__str__ = __repr__
class DuplicateError(Exception):
def __init__(self, type, ident):
self.type = type
self.ident = ident
self.msg = "Duplicate %s for %s" % (self.type, self.ident)
Exception.__init__(self, self.msg)
def __repr__(self):
return self.msg
__str__ = __repr__
class CompError(Exception):
def __init__(self, msg):
self.msg = msg
Exception.__init__(self, self.msg)
def __repr__(self):
return self.msg
__str__ = __repr__
states = (
('braceignore','exclusive'),
)
precedence = (
('left', 'UNION', 'BAR'),
('left', 'INTERSECTION', 'CIRCUMFLEX'),
)
# 11 ASN.1 lexical items
static_tokens = {
r'::=' : 'ASSIGNMENT', # 11.16 Assignment lexical item
r'\.\.' : 'RANGE', # 11.17 Range separator
r'\.\.\.' : 'ELLIPSIS', # 11.18 Ellipsis
r'\[\[' : 'LVERBRACK', # 11.19 Left version brackets
r'\]\]' : 'RVERBRACK', # 11.20 Right version brackets
# 11.26 Single character lexical items
r'\{' : 'LBRACE',
r'\}' : 'RBRACE',
r'<' : 'LT',
#r'>' : 'GT',
r',' : 'COMMA',
r'\.' : 'DOT',
r'\(' : 'LPAREN',
r'\)' : 'RPAREN',
r'\[' : 'LBRACK',
r'\]' : 'RBRACK',
r'-' : 'MINUS',
r':' : 'COLON',
#r'=' : 'EQ',
#r'"' : 'QUOTATION',
#r"'" : 'APOSTROPHE',
r';' : 'SEMICOLON',
r'@' : 'AT',
r'\!' : 'EXCLAMATION',
r'\^' : 'CIRCUMFLEX',
r'\&' : 'AMPERSAND',
r'\|' : 'BAR'
}
# 11.27 Reserved words
# all keys in reserved_words must start w/ upper case
reserved_words = {
'ABSENT' : 'ABSENT',
'ABSTRACT-SYNTAX' : 'ABSTRACT_SYNTAX',
'ALL' : 'ALL',
'APPLICATION' : 'APPLICATION',
'AUTOMATIC' : 'AUTOMATIC',
'BEGIN' : 'BEGIN',
'BIT' : 'BIT',
'BOOLEAN' : 'BOOLEAN',
'BY' : 'BY',
'CHARACTER' : 'CHARACTER',
'CHOICE' : 'CHOICE',
'CLASS' : 'CLASS',
'COMPONENT' : 'COMPONENT',
'COMPONENTS' : 'COMPONENTS',
'CONSTRAINED' : 'CONSTRAINED',
'CONTAINING' : 'CONTAINING',
'DEFAULT' : 'DEFAULT',
'DEFINITIONS' : 'DEFINITIONS',
'EMBEDDED' : 'EMBEDDED',
# 'ENCODED' : 'ENCODED',
'END' : 'END',
'ENUMERATED' : 'ENUMERATED',
# 'EXCEPT' : 'EXCEPT',
'EXPLICIT' : 'EXPLICIT',
'EXPORTS' : 'EXPORTS',
# 'EXTENSIBILITY' : 'EXTENSIBILITY',
'EXTERNAL' : 'EXTERNAL',
'FALSE' : 'FALSE',
'FROM' : 'FROM',
'GeneralizedTime' : 'GeneralizedTime',
'IDENTIFIER' : 'IDENTIFIER',
'IMPLICIT' : 'IMPLICIT',
# 'IMPLIED' : 'IMPLIED',
'IMPORTS' : 'IMPORTS',
'INCLUDES' : 'INCLUDES',
'INSTANCE' : 'INSTANCE',
'INTEGER' : 'INTEGER',
'INTERSECTION' : 'INTERSECTION',
'MAX' : 'MAX',
'MIN' : 'MIN',
'MINUS-INFINITY' : 'MINUS_INFINITY',
'NULL' : 'NULL',
'OBJECT' : 'OBJECT',
'ObjectDescriptor' : 'ObjectDescriptor',
'OCTET' : 'OCTET',
'OF' : 'OF',
'OPTIONAL' : 'OPTIONAL',
'PATTERN' : 'PATTERN',
'PDV' : 'PDV',
'PLUS-INFINITY' : 'PLUS_INFINITY',
'PRESENT' : 'PRESENT',
'PRIVATE' : 'PRIVATE',
'REAL' : 'REAL',
'RELATIVE-OID' : 'RELATIVE_OID',
'SEQUENCE' : 'SEQUENCE',
'SET' : 'SET',
'SIZE' : 'SIZE',
'STRING' : 'STRING',
'SYNTAX' : 'SYNTAX',
'TAGS' : 'TAGS',
'TRUE' : 'TRUE',
'TYPE-IDENTIFIER' : 'TYPE_IDENTIFIER',
'UNION' : 'UNION',
'UNIQUE' : 'UNIQUE',
'UNIVERSAL' : 'UNIVERSAL',
'UTCTime' : 'UTCTime',
'WITH' : 'WITH',
# X.208 obsolete but still used
'ANY' : 'ANY',
'DEFINED' : 'DEFINED',
}
for k in list(static_tokens.keys()):
if static_tokens [k] == None:
static_tokens [k] = k
StringTypes = ['Numeric', 'Printable', 'IA5', 'BMP', 'Universal', 'UTF8',
'Teletex', 'T61', 'Videotex', 'Graphic', 'ISO646', 'Visible',
'General']
for s in StringTypes:
reserved_words[s + 'String'] = s + 'String'
tokens = list(static_tokens.values()) \
+ list(reserved_words.values()) \
+ ['BSTRING', 'HSTRING', 'QSTRING',
'UCASE_IDENT', 'LCASE_IDENT', 'LCASE_IDENT_ASSIGNED', 'CLASS_IDENT',
'REAL_NUMBER', 'NUMBER', 'PYQUOTE']
cur_mod = __import__ (__name__) # XXX blech!
for (k, v) in list(static_tokens.items ()):
cur_mod.__dict__['t_' + v] = k
# 11.10 Binary strings
def t_BSTRING (t):
r"'[01]*'B"
return t
# 11.12 Hexadecimal strings
def t_HSTRING (t):
r"'[0-9A-Fa-f]*'H"
return t
def t_QSTRING (t):
r'"([^"]|"")*"'
return t
def t_UCASE_IDENT (t):
r"[A-Z](-[a-zA-Z0-9]|[a-zA-Z0-9])*" # can't end w/ '-'
if (is_class_ident(t.value)): t.type = 'CLASS_IDENT'
if (is_class_syntax(t.value)): t.type = t.value
t.type = reserved_words.get(t.value, t.type)
return t
lcase_ident_assigned = {}
def t_LCASE_IDENT (t):
r"[a-z](-[a-zA-Z0-9]|[a-zA-Z0-9])*" # can't end w/ '-'
if (not in_oid and (t.value in lcase_ident_assigned)): t.type = 'LCASE_IDENT_ASSIGNED'
return t
# 11.9 Real numbers
def t_REAL_NUMBER (t):
r"[0-9]+\.[0-9]*(?!\.)"
return t
# 11.8 Numbers
def t_NUMBER (t):
r"0|([1-9][0-9]*)"
return t
# 11.6 Comments
pyquote_str = 'PYQUOTE'
def t_COMMENT(t):
r"--(-[^\-\n]|[^\-\n])*(--|\n|-\n|$|-$)"
if (t.value.find("\n") >= 0) : t.lexer.lineno += 1
if t.value[2:2+len (pyquote_str)] == pyquote_str:
t.value = t.value[2+len(pyquote_str):]
t.value = t.value.lstrip ()
t.type = pyquote_str
return t
return None
t_ignore = " \t\r"
def t_NEWLINE(t):
r'\n+'
t.lexer.lineno += t.value.count("\n")
def t_error(t):
global input_file
raise LexError(t, input_file)
# state 'braceignore'
def t_braceignore_lbrace(t):
r'\{'
t.lexer.level +=1
def t_braceignore_rbrace(t):
r'\}'
t.lexer.level -=1
# If closing brace, return token
if t.lexer.level == 0:
t.type = 'RBRACE'
return t
def t_braceignore_QSTRING (t):
r'"([^"]|"")*"'
t.lexer.lineno += t.value.count("\n")
def t_braceignore_COMMENT(t):
r"--(-[^\-\n]|[^\-\n])*(--|\n|-\n|$|-$)"
if (t.value.find("\n") >= 0) : t.lexer.lineno += 1
def t_braceignore_nonspace(t):
r'[^\s\{\}\"-]+|-(?!-)'
t_braceignore_ignore = " \t\r"
def t_braceignore_NEWLINE(t):
r'\n+'
t.lexer.lineno += t.value.count("\n")
def t_braceignore_error(t):
t.lexer.skip(1)
class Ctx:
def __init__ (self, defined_dict, indent = 0):
self.tags_def = 'EXPLICIT' # default = explicit
self.indent_lev = 0
self.assignments = {}
self.dependencies = {}
self.pyquotes = []
self.defined_dict = defined_dict
self.name_ctr = 0
def spaces (self):
return " " * (4 * self.indent_lev)
def indent (self):
self.indent_lev += 1
def outdent (self):
self.indent_lev -= 1
assert (self.indent_lev >= 0)
def register_assignment (self, ident, val, dependencies):
if ident in self.assignments:
raise DuplicateError("assignment", ident)
if ident in self.defined_dict:
raise Exception("cross-module duplicates for %s" % ident)
self.defined_dict [ident] = 1
self.assignments[ident] = val
self.dependencies [ident] = dependencies
return ""
# return "#%s depends on %s" % (ident, str (dependencies))
def register_pyquote (self, val):
self.pyquotes.append (val)
return ""
def output_assignments (self):
already_output = {}
text_list = []
assign_keys = list(self.assignments.keys())
to_output_count = len (assign_keys)
while True:
any_output = 0
for (ident, val) in list(self.assignments.items ()):
if ident in already_output:
continue
ok = 1
for d in self.dependencies [ident]:
if ((d not in already_output) and
(d in assign_keys)):
ok = 0
if ok:
text_list.append ("%s=%s" % (ident,
self.assignments [ident]))
already_output [ident] = 1
any_output = 1
to_output_count -= 1
assert (to_output_count >= 0)
if not any_output:
if to_output_count == 0:
break
# OK, we detected a cycle
cycle_list = []
for ident in list(self.assignments.keys ()):
if ident not in already_output:
depend_list = [d for d in self.dependencies[ident] if d in assign_keys]
cycle_list.append ("%s(%s)" % (ident, ",".join (depend_list)))
text_list.append ("# Cycle XXX " + ",".join (cycle_list))
for (ident, val) in list(self.assignments.items ()):
if ident not in already_output:
text_list.append ("%s=%s" % (ident, self.assignments [ident]))
break
return "\n".join (text_list)
def output_pyquotes (self):
return "\n".join (self.pyquotes)
def make_new_name (self):
self.name_ctr += 1
return "_compiler_generated_name_%d" % (self.name_ctr,)
#--- Flags for EXPORT, USER_DEFINED, NO_EMIT, MAKE_ENUM -------------------------------
EF_TYPE = 0x0001
EF_VALS = 0x0002
EF_ENUM = 0x0004
EF_WS_DLL = 0x0010 # exported from shared library
EF_EXTERN = 0x0020
EF_NO_PROT = 0x0040
EF_NO_TYPE = 0x0080
EF_UCASE = 0x0100
EF_TABLE = 0x0400
EF_DEFINE = 0x0800
EF_MODULE = 0x1000
#--- common dependency computation ---
# Input : list of items
# dictionary with lists of dependency
#
#
# Output : list of two outputs:
# [0] list of items in dependency
# [1] list of cycle dependency cycles
def dependency_compute(items, dependency, map_fn = lambda t: t, ignore_fn = lambda t: False):
item_ord = []
item_cyc = []
x = {} # already emitted
#print '# Dependency computation'
for t in items:
if map_fn(t) in x:
#print 'Continue: %s : %s' % (t, (map_fn(t))
continue
stack = [t]
stackx = {t : dependency.get(t, [])[:]}
#print 'Push: %s : %s' % (t, str(stackx[t]))
while stack:
if stackx[stack[-1]]: # has dependencies
d = stackx[stack[-1]].pop(0)
if map_fn(d) in x or ignore_fn(d):
continue
if d in stackx: # cyclic dependency
c = stack[:]
c.reverse()
c = [d] + c[0:c.index(d)+1]
c.reverse()
item_cyc.append(c)
#print 'Cyclic: %s ' % (' -> '.join(c))
continue
stack.append(d)
stackx[d] = dependency.get(d, [])[:]
#print 'Push: %s : %s' % (d, str(stackx[d]))
else:
#print 'Pop: %s' % (stack[-1])
del stackx[stack[-1]]
e = map_fn(stack.pop())
if e in x:
continue
#print 'Add: %s' % (e)
item_ord.append(e)
x[e] = True
return (item_ord, item_cyc)
# Given a filename, return a relative path from epan/dissectors
def rel_dissector_path(filename):
path_parts = os.path.abspath(filename).split(os.sep)
while (len(path_parts) > 3 and path_parts[0] != 'asn1'):
path_parts.pop(0)
path_parts.insert(0, '..')
path_parts.insert(0, '..')
return '/'.join(path_parts)
#--- EthCtx -------------------------------------------------------------------
class EthCtx:
def __init__(self, conform, output, indent = 0):
self.conform = conform
self.output = output
self.conform.ectx = self
self.output.ectx = self
self.encoding = 'per'
self.aligned = False
self.default_oid_variant = ''
self.default_opentype_variant = ''
self.default_containing_variant = '_pdu_new'
self.default_embedded_pdv_cb = None
self.default_external_type_cb = None
self.remove_prefix = None
self.srcdir = None
self.emitted_pdu = {}
self.module = {}
self.module_ord = []
self.all_type_attr = {}
self.all_tags = {}
self.all_vals = {}
def encp(self): # encoding protocol
encp = self.encoding
return encp
# Encoding
def Per(self): return self.encoding == 'per'
def Ber(self): return self.encoding == 'ber'
def Aligned(self): return self.aligned
def Unaligned(self): return not self.aligned
def NeedTags(self): return self.tag_opt or self.Ber()
def NAPI(self): return False # disable planned features
def Module(self): # current module name
return self.modules[-1][0]
def groups(self):
return self.group_by_prot or (self.conform.last_group > 0)
def dbg(self, d):
if (self.dbgopt.find(d) >= 0):
return True
else:
return False
def value_max(self, a, b):
if (a == 'MAX') or (b == 'MAX'): return 'MAX';
if a == 'MIN': return b;
if b == 'MIN': return a;
try:
if (int(a) > int(b)):
return a
else:
return b
except (ValueError, TypeError):
pass
return "MAX((%s),(%s))" % (a, b)
def value_min(self, a, b):
if (a == 'MIN') or (b == 'MIN'): return 'MIN';
if a == 'MAX': return b;
if b == 'MAX': return a;
try:
if (int(a) < int(b)):
return a
else:
return b
except (ValueError, TypeError):
pass
return "MIN((%s),(%s))" % (a, b)
def value_get_eth(self, val):
if isinstance(val, Value):
return val.to_str(self)
ethname = val
if val in self.value:
ethname = self.value[val]['ethname']
return ethname
def value_get_val(self, nm):
val = asn2c(nm)
if nm in self.value:
if self.value[nm]['import']:
v = self.get_val_from_all(nm, self.value[nm]['import'])
if v is None:
msg = 'Need value of imported value identifier %s from %s (%s)' % (nm, self.value[nm]['import'], self.value[nm]['proto'])
warnings.warn_explicit(msg, UserWarning, '', 0)
else:
val = v
else:
val = self.value[nm]['value']
if isinstance (val, Value):
val = val.to_str(self)
else:
msg = 'Need value of unknown value identifier %s' % (nm)
warnings.warn_explicit(msg, UserWarning, '', 0)
return val
def eth_get_type_attr(self, type):
#print "eth_get_type_attr(%s)" % (type)
types = [type]
while (not self.type[type]['import']):
val = self.type[type]['val']
#print val
ttype = type
while (val.type == 'TaggedType'):
val = val.val
ttype += '/' + UNTAG_TYPE_NAME
if (val.type != 'Type_Ref'):
if (type != ttype):
types.append(ttype)
break
type = val.val
types.append(type)
attr = {}
#print " ", types
while len(types):
t = types.pop()
if (self.type[t]['import']):
attr.update(self.type[t]['attr'])
attr.update(self.eth_get_type_attr_from_all(t, self.type[t]['import']))
elif (self.type[t]['val'].type == 'SelectionType'):
val = self.type[t]['val']
(ftype, display) = val.eth_ftype(self)
attr.update({ 'TYPE' : ftype, 'DISPLAY' : display,
'STRINGS' : val.eth_strings(), 'BITMASK' : '0' });
else:
attr.update(self.type[t]['attr'])
attr.update(self.eth_type[self.type[t]['ethname']]['attr'])
#print " ", attr
return attr
def eth_get_type_attr_from_all(self, type, module):
attr = {}
if module in self.all_type_attr and type in self.all_type_attr[module]:
attr = self.all_type_attr[module][type]
return attr
def get_ttag_from_all(self, type, module):
ttag = None
if module in self.all_tags and type in self.all_tags[module]:
ttag = self.all_tags[module][type]
return ttag
def get_val_from_all(self, nm, module):
val = None
if module in self.all_vals and nm in self.all_vals[module]:
val = self.all_vals[module][nm]
return val
def get_obj_repr(self, ident, flds=[], not_flds=[]):
def set_type_fn(cls, field, fnfield):
obj[fnfield + '_fn'] = 'NULL'
obj[fnfield + '_pdu'] = 'NULL'
if field in val and isinstance(val[field], Type_Ref):
p = val[field].eth_type_default_pars(self, '')
obj[fnfield + '_fn'] = p['TYPE_REF_FN']
obj[fnfield + '_fn'] = obj[fnfield + '_fn'] % p # one iteration
if (self.conform.check_item('PDU', cls + '.' + field)):
obj[fnfield + '_pdu'] = 'dissect_' + self.field[val[field].val]['ethname']
return
# end of get_type_fn()
obj = { '_name' : ident, '_ident' : asn2c(ident)}
obj['_class'] = self.oassign[ident].cls
obj['_module'] = self.oassign[ident].module
val = self.oassign[ident].val
for f in flds:
if f not in val:
return None
for f in not_flds:
if f in val:
return None
for f in list(val.keys()):
if isinstance(val[f], Node):
obj[f] = val[f].fld_obj_repr(self)
else:
obj[f] = str(val[f])
if (obj['_class'] == 'TYPE-IDENTIFIER') or (obj['_class'] == 'ABSTRACT-SYNTAX'):
set_type_fn(obj['_class'], '&Type', '_type')
if (obj['_class'] == 'OPERATION'):
set_type_fn(obj['_class'], '&ArgumentType', '_argument')
set_type_fn(obj['_class'], '&ResultType', '_result')
if (obj['_class'] == 'ERROR'):
set_type_fn(obj['_class'], '&ParameterType', '_parameter')
return obj
#--- eth_reg_module -----------------------------------------------------------
def eth_reg_module(self, module):
#print "eth_reg_module(module='%s')" % (module)
name = module.get_name()
self.modules.append([name, module.get_proto(self)])
if name in self.module:
raise DuplicateError("module", name)
self.module[name] = []
self.module_ord.append(name)
#--- eth_module_dep_add ------------------------------------------------------------
def eth_module_dep_add(self, module, dep):
self.module[module].append(dep)
#--- eth_exports ------------------------------------------------------------
def eth_exports(self, exports):
self.exports_all = False
if ((len(exports) == 1) and (exports[0] == 'ALL')):
self.exports_all = True
return
for e in (exports):
if isinstance(e, Type_Ref):
self.exports.append(e.val)
elif isinstance(e, Class_Ref):
self.cexports.append(e.val)
else:
self.vexports.append(e)
#--- eth_reg_assign ---------------------------------------------------------
def eth_reg_assign(self, ident, val, virt=False):
#print "eth_reg_assign(ident='%s')" % (ident)
if ident in self.assign:
raise DuplicateError("assignment", ident)
self.assign[ident] = { 'val' : val , 'virt' : virt }
self.assign_ord.append(ident)
if (self.exports_all):
self.exports.append(ident)
#--- eth_reg_vassign --------------------------------------------------------
def eth_reg_vassign(self, vassign):
ident = vassign.ident
#print "eth_reg_vassign(ident='%s')" % (ident)
if ident in self.vassign:
raise DuplicateError("value assignment", ident)
self.vassign[ident] = vassign
self.vassign_ord.append(ident)
if (self.exports_all):
self.vexports.append(ident)
#--- eth_reg_oassign --------------------------------------------------------
def eth_reg_oassign(self, oassign):
ident = oassign.ident
#print "eth_reg_oassign(ident='%s')" % (ident)
if ident in self.oassign:
if self.oassign[ident] == oassign:
return # OK - already defined
else:
raise DuplicateError("information object assignment", ident)
self.oassign[ident] = oassign
self.oassign_ord.append(ident)
self.oassign_cls.setdefault(oassign.cls, []).append(ident)
#--- eth_import_type --------------------------------------------------------
def eth_import_type(self, ident, mod, proto):
#print "eth_import_type(ident='%s', mod='%s', prot='%s')" % (ident, mod, proto)
if ident in self.type:
#print "already defined '%s' import=%s, module=%s" % (ident, str(self.type[ident]['import']), self.type[ident].get('module', '-'))
if not self.type[ident]['import'] and (self.type[ident]['module'] == mod) :
return # OK - already defined
elif self.type[ident]['import'] and (self.type[ident]['import'] == mod) :
return # OK - already imported
else:
raise DuplicateError("type", ident)
self.type[ident] = {'import' : mod, 'proto' : proto,
'ethname' : '' }
self.type[ident]['attr'] = { 'TYPE' : 'FT_NONE', 'DISPLAY' : 'BASE_NONE',
'STRINGS' : 'NULL', 'BITMASK' : '0' }
mident = "$%s$%s" % (mod, ident)
if (self.conform.check_item('TYPE_ATTR', mident)):
self.type[ident]['attr'].update(self.conform.use_item('TYPE_ATTR', mident))
else:
self.type[ident]['attr'].update(self.conform.use_item('TYPE_ATTR', ident))
if (self.conform.check_item('IMPORT_TAG', mident)):
self.conform.copy_item('IMPORT_TAG', ident, mident)
self.type_imp.append(ident)
#--- dummy_import_type --------------------------------------------------------
def dummy_import_type(self, ident):
# dummy imported
if ident in self.type:
raise Exception("Try to dummy import for existing type :%s" % ident)
ethtype = asn2c(ident)
self.type[ident] = {'import' : 'xxx', 'proto' : 'xxx',
'ethname' : ethtype }
self.type[ident]['attr'] = { 'TYPE' : 'FT_NONE', 'DISPLAY' : 'BASE_NONE',
'STRINGS' : 'NULL', 'BITMASK' : '0' }
self.eth_type[ethtype] = { 'import' : 'xxx', 'proto' : 'xxx' , 'attr' : {}, 'ref' : []}
print("Dummy imported: %s (%s)" % (ident, ethtype))
return ethtype
#--- eth_import_class --------------------------------------------------------
def eth_import_class(self, ident, mod, proto):
#print "eth_import_class(ident='%s', mod='%s', prot='%s')" % (ident, mod, proto)
if ident in self.objectclass:
#print "already defined import=%s, module=%s" % (str(self.objectclass[ident]['import']), self.objectclass[ident]['module'])
if not self.objectclass[ident]['import'] and (self.objectclass[ident]['module'] == mod) :
return # OK - already defined
elif self.objectclass[ident]['import'] and (self.objectclass[ident]['import'] == mod) :
return # OK - already imported
else:
raise DuplicateError("object class", ident)
self.objectclass[ident] = {'import' : mod, 'proto' : proto,
'ethname' : '' }
self.objectclass_imp.append(ident)
#--- eth_import_value -------------------------------------------------------
def eth_import_value(self, ident, mod, proto):
#print "eth_import_value(ident='%s', mod='%s', prot='%s')" % (ident, mod, prot)
if ident in self.value:
#print "already defined import=%s, module=%s" % (str(self.value[ident]['import']), self.value[ident]['module'])
if not self.value[ident]['import'] and (self.value[ident]['module'] == mod) :
return # OK - already defined
elif self.value[ident]['import'] and (self.value[ident]['import'] == mod) :
return # OK - already imported
else:
raise DuplicateError("value", ident)
self.value[ident] = {'import' : mod, 'proto' : proto,
'ethname' : ''}
self.value_imp.append(ident)
#--- eth_sel_req ------------------------------------------------------------
def eth_sel_req(self, typ, sel):
key = typ + '.' + sel
if key not in self.sel_req:
self.sel_req[key] = { 'typ' : typ , 'sel' : sel}
self.sel_req_ord.append(key)
return key
#--- eth_comp_req ------------------------------------------------------------
def eth_comp_req(self, type):
self.comp_req_ord.append(type)
#--- eth_dep_add ------------------------------------------------------------
def eth_dep_add(self, type, dep):
if type not in self.type_dep:
self.type_dep[type] = []
self.type_dep[type].append(dep)
#--- eth_reg_type -----------------------------------------------------------
def eth_reg_type(self, ident, val):
#print "eth_reg_type(ident='%s', type='%s')" % (ident, val.type)
if ident in self.type:
if self.type[ident]['import'] and (self.type[ident]['import'] == self.Module()) :
# replace imported type
del self.type[ident]
self.type_imp.remove(ident)
else:
raise DuplicateError("type", ident)
val.ident = ident
self.type[ident] = { 'val' : val, 'import' : None }
self.type[ident]['module'] = self.Module()
self.type[ident]['proto'] = self.proto
if len(ident.split('/')) > 1:
self.type[ident]['tname'] = val.eth_tname()
else:
self.type[ident]['tname'] = asn2c(ident)
self.type[ident]['export'] = self.conform.use_item('EXPORTS', ident)
self.type[ident]['enum'] = self.conform.use_item('MAKE_ENUM', ident)
self.type[ident]['vals_ext'] = self.conform.use_item('USE_VALS_EXT', ident)
self.type[ident]['user_def'] = self.conform.use_item('USER_DEFINED', ident)
self.type[ident]['no_emit'] = self.conform.use_item('NO_EMIT', ident)
self.type[ident]['tname'] = self.conform.use_item('TYPE_RENAME', ident, val_dflt=self.type[ident]['tname'])
self.type[ident]['ethname'] = ''
if (val.type == 'Type_Ref') or (val.type == 'TaggedType') or (val.type == 'SelectionType') :
self.type[ident]['attr'] = {}
else:
(ftype, display) = val.eth_ftype(self)
self.type[ident]['attr'] = { 'TYPE' : ftype, 'DISPLAY' : display,
'STRINGS' : val.eth_strings(), 'BITMASK' : '0' }
self.type[ident]['attr'].update(self.conform.use_item('TYPE_ATTR', ident))
self.type_ord.append(ident)
# PDU
if (self.conform.check_item('PDU', ident)):
self.eth_reg_field(ident, ident, impl=val.HasImplicitTag(self), pdu=self.conform.use_item('PDU', ident))
#--- eth_reg_objectclass ----------------------------------------------------------
def eth_reg_objectclass(self, ident, val):
#print "eth_reg_objectclass(ident='%s')" % (ident)
if ident in self.objectclass:
if self.objectclass[ident]['import'] and (self.objectclass[ident]['import'] == self.Module()) :
# replace imported object class
del self.objectclass[ident]
self.objectclass_imp.remove(ident)
elif isinstance(self.objectclass[ident]['val'], Class_Ref) and \
isinstance(val, Class_Ref) and \
(self.objectclass[ident]['val'].val == val.val):
pass # ignore duplicated CLASS1 ::= CLASS2
else:
raise DuplicateError("object class", ident)
self.objectclass[ident] = { 'import' : None, 'module' : self.Module(), 'proto' : self.proto }
self.objectclass[ident]['val'] = val
self.objectclass[ident]['export'] = self.conform.use_item('EXPORTS', ident)
self.objectclass_ord.append(ident)
#--- eth_reg_value ----------------------------------------------------------
def eth_reg_value(self, ident, type, value, ethname=None):
#print "eth_reg_value(ident='%s')" % (ident)
if ident in self.value:
if self.value[ident]['import'] and (self.value[ident]['import'] == self.Module()) :
# replace imported value
del self.value[ident]
self.value_imp.remove(ident)
elif ethname:
self.value[ident]['ethname'] = ethname
return
else:
raise DuplicateError("value", ident)
self.value[ident] = { 'import' : None, 'module' : self.Module(), 'proto' : self.proto,
'type' : type, 'value' : value,
'no_emit' : False }
self.value[ident]['export'] = self.conform.use_item('EXPORTS', ident)
self.value[ident]['ethname'] = ''
if (ethname): self.value[ident]['ethname'] = ethname
self.value_ord.append(ident)
#--- eth_reg_field ----------------------------------------------------------
def eth_reg_field(self, ident, type, idx='', parent=None, impl=False, pdu=None):
#print "eth_reg_field(ident='%s', type='%s')" % (ident, type)
if ident in self.field:
if pdu and (type == self.field[ident]['type']):
pass # OK already created PDU
else:
raise DuplicateError("field", ident)
self.field[ident] = {'type' : type, 'idx' : idx, 'impl' : impl, 'pdu' : pdu,
'modified' : '', 'attr' : {} }
name = ident.split('/')[-1]
if self.remove_prefix and name.startswith(self.remove_prefix):
name = name[len(self.remove_prefix):]
if len(ident.split('/')) > 1 and name == ITEM_FIELD_NAME: # Sequence/Set of type
if len(self.field[ident]['type'].split('/')) > 1:
self.field[ident]['attr']['NAME'] = '"%s item"' % ident.split('/')[-2]
self.field[ident]['attr']['ABBREV'] = asn2c(ident.split('/')[-2] + name)
else:
self.field[ident]['attr']['NAME'] = '"%s"' % self.field[ident]['type']
self.field[ident]['attr']['ABBREV'] = asn2c(self.field[ident]['type'])
else:
self.field[ident]['attr']['NAME'] = '"%s"' % name
self.field[ident]['attr']['ABBREV'] = asn2c(name)
if self.conform.check_item('FIELD_ATTR', ident):
self.field[ident]['modified'] = '#' + str(id(self))
self.field[ident]['attr'].update(self.conform.use_item('FIELD_ATTR', ident))
if (pdu):
self.field[ident]['pdu']['export'] = (self.conform.use_item('EXPORTS', ident + '_PDU') != 0)
self.pdu_ord.append(ident)
else:
self.field_ord.append(ident)
if parent:
self.eth_dep_add(parent, type)
def eth_dummy_eag_field_required(self):
if (not self.dummy_eag_field):
self.dummy_eag_field = 'eag_field'
#--- eth_clean --------------------------------------------------------------
def eth_clean(self):
self.proto = self.proto_opt;
#--- ASN.1 tables ----------------
self.assign = {}
self.assign_ord = []
self.field = {}
self.pdu_ord = []
self.field_ord = []
self.type = {}
self.type_ord = []
self.type_imp = []
self.type_dep = {}
self.sel_req = {}
self.sel_req_ord = []
self.comp_req_ord = []
self.vassign = {}
self.vassign_ord = []
self.value = {}
self.value_ord = []
self.value_imp = []
self.objectclass = {}
self.objectclass_ord = []
self.objectclass_imp = []
self.oassign = {}
self.oassign_ord = []
self.oassign_cls = {}
#--- Modules ------------
self.modules = []
self.exports_all = False
self.exports = []
self.cexports = []
self.vexports = []
#--- types -------------------
self.eth_type = {}
self.eth_type_ord = []
self.eth_export_ord = []
self.eth_type_dupl = {}
self.named_bit = []
#--- value dependencies -------------------
self.value_dep = {}
#--- values -------------------
self.eth_value = {}
self.eth_value_ord = []
#--- fields -------------------------
self.eth_hf = {}
self.eth_hf_ord = []
self.eth_hfpdu_ord = []
self.eth_hf_dupl = {}
self.dummy_eag_field = None
#--- type dependencies -------------------
self.eth_type_ord1 = []
self.eth_dep_cycle = []
self.dep_cycle_eth_type = {}
#--- value dependencies and export -------------------
self.eth_value_ord1 = []
self.eth_vexport_ord = []
#--- eth_prepare ------------------------------------------------------------
def eth_prepare(self):
self.eproto = asn2c(self.proto)
#--- dummy types/fields for PDU registration ---
nm = 'NULL'
if (self.conform.check_item('PDU', nm)):
self.eth_reg_type('_dummy/'+nm, NullType())
self.eth_reg_field(nm, '_dummy/'+nm, pdu=self.conform.use_item('PDU', nm))
#--- required PDUs ----------------------------
for t in self.type_ord:
pdu = self.type[t]['val'].eth_need_pdu(self)
if not pdu: continue
f = pdu['type']
pdu['reg'] = None
pdu['hidden'] = False
pdu['need_decl'] = True
if f not in self.field:
self.eth_reg_field(f, f, pdu=pdu)
#--- values -> named values -------------------
t_for_update = {}
for v in self.value_ord:
if (self.value[v]['type'].type == 'Type_Ref') or self.conform.check_item('ASSIGN_VALUE_TO_TYPE', v):
if self.conform.check_item('ASSIGN_VALUE_TO_TYPE', v):
tnm = self.conform.use_item('ASSIGN_VALUE_TO_TYPE', v)
else:
tnm = self.value[v]['type'].val
if tnm in self.type \
and not self.type[tnm]['import'] \
and (self.type[tnm]['val'].type == 'IntegerType'):
self.type[tnm]['val'].add_named_value(v, self.value[v]['value'])
self.value[v]['no_emit'] = True
t_for_update[tnm] = True
for t in list(t_for_update.keys()):
self.type[t]['attr']['STRINGS'] = self.type[t]['val'].eth_strings()
self.type[t]['attr'].update(self.conform.use_item('TYPE_ATTR', t))
#--- required components of ---------------------------
#print "self.comp_req_ord = ", self.comp_req_ord
for t in self.comp_req_ord:
self.type[t]['val'].eth_reg_sub(t, self, components_available=True)
#--- required selection types ---------------------------
#print "self.sel_req_ord = ", self.sel_req_ord
for t in self.sel_req_ord:
tt = self.sel_req[t]['typ']
if tt not in self.type:
self.dummy_import_type(t)
elif self.type[tt]['import']:
self.eth_import_type(t, self.type[tt]['import'], self.type[tt]['proto'])
else:
self.type[tt]['val'].sel_req(t, self.sel_req[t]['sel'], self)
#--- types -------------------
for t in self.type_imp: # imported types
nm = asn2c(t)
self.eth_type[nm] = { 'import' : self.type[t]['import'],
'proto' : asn2c(self.type[t]['proto']),
'attr' : {}, 'ref' : []}
self.eth_type[nm]['attr'].update(self.conform.use_item('ETYPE_ATTR', nm))
self.type[t]['ethname'] = nm
for t in self.type_ord: # dummy import for missing type reference
tp = self.type[t]['val']
#print "X : %s %s " % (t, tp.type)
if isinstance(tp, TaggedType):
#print "%s : %s " % (tp.type, t)
tp = tp.val
if isinstance(tp, Type_Ref):
#print "%s : %s ::= %s " % (tp.type, t, tp.val)
if tp.val not in self.type:
self.dummy_import_type(tp.val)
for t in self.type_ord:
nm = self.type[t]['tname']
if ((nm.find('#') >= 0) or
((len(t.split('/'))>1) and
(self.conform.get_fn_presence(t) or self.conform.check_item('FN_PARS', t) or
self.conform.get_fn_presence('/'.join((t,ITEM_FIELD_NAME))) or self.conform.check_item('FN_PARS', '/'.join((t,ITEM_FIELD_NAME)))) and
not self.conform.check_item('TYPE_RENAME', t))):
if len(t.split('/')) == 2 and t.split('/')[1] == ITEM_FIELD_NAME: # Sequence of type at the 1st level
nm = t.split('/')[0] + t.split('/')[1]
elif t.split('/')[-1] == ITEM_FIELD_NAME: # Sequence/Set of type at next levels
nm = 'T_' + self.conform.use_item('FIELD_RENAME', '/'.join(t.split('/')[0:-1]), val_dflt=t.split('/')[-2]) + t.split('/')[-1]
elif t.split('/')[-1] == UNTAG_TYPE_NAME: # Untagged type
nm = self.type['/'.join(t.split('/')[0:-1])]['ethname'] + '_U'
else:
nm = 'T_' + self.conform.use_item('FIELD_RENAME', t, val_dflt=t.split('/')[-1])
nm = asn2c(nm)
if nm in self.eth_type:
if nm in self.eth_type_dupl:
self.eth_type_dupl[nm].append(t)
else:
self.eth_type_dupl[nm] = [self.eth_type[nm]['ref'][0], t]
nm += '_%02d' % (len(self.eth_type_dupl[nm])-1)
if nm in self.eth_type:
self.eth_type[nm]['ref'].append(t)
else:
self.eth_type_ord.append(nm)
self.eth_type[nm] = { 'import' : None, 'proto' : self.eproto, 'export' : 0, 'enum' : 0, 'vals_ext' : 0,
'user_def' : EF_TYPE|EF_VALS, 'no_emit' : EF_TYPE|EF_VALS,
'val' : self.type[t]['val'],
'attr' : {}, 'ref' : [t]}
self.type[t]['ethname'] = nm
if (not self.eth_type[nm]['export'] and self.type[t]['export']): # new export
self.eth_export_ord.append(nm)
self.eth_type[nm]['export'] |= self.type[t]['export']
self.eth_type[nm]['enum'] |= self.type[t]['enum']
self.eth_type[nm]['vals_ext'] |= self.type[t]['vals_ext']
self.eth_type[nm]['user_def'] &= self.type[t]['user_def']
self.eth_type[nm]['no_emit'] &= self.type[t]['no_emit']
if self.type[t]['attr'].get('STRINGS') == '$$':
use_ext = self.type[t]['vals_ext']
if (use_ext):
self.eth_type[nm]['attr']['STRINGS'] = '&%s_ext' % (self.eth_vals_nm(nm))
else:
self.eth_type[nm]['attr']['STRINGS'] = 'VALS(%s)' % (self.eth_vals_nm(nm))
self.eth_type[nm]['attr'].update(self.conform.use_item('ETYPE_ATTR', nm))
for t in self.eth_type_ord:
bits = self.eth_type[t]['val'].eth_named_bits()
if (bits):
for (val, id) in bits:
self.named_bit.append({'name' : id, 'val' : val,
'ethname' : 'hf_%s_%s_%s' % (self.eproto, t, asn2c(id)),
'ftype' : 'FT_BOOLEAN', 'display' : '8',
'strings' : 'NULL',
'bitmask' : '0x'+('80','40','20','10','08','04','02','01')[val%8]})
if self.eth_type[t]['val'].eth_need_tree():
self.eth_type[t]['tree'] = "ett_%s_%s" % (self.eth_type[t]['proto'], t)
else:
self.eth_type[t]['tree'] = None
#--- register values from enums ------------
for t in self.eth_type_ord:
if (self.eth_type[t]['val'].eth_has_enum(t, self)):
self.eth_type[t]['val'].reg_enum_vals(t, self)
#--- value dependencies -------------------
for v in self.value_ord:
if isinstance (self.value[v]['value'], Value):
dep = self.value[v]['value'].get_dep()
else:
dep = self.value[v]['value']
if dep and dep in self.value:
self.value_dep.setdefault(v, []).append(dep)
#--- exports all necessary values
for v in self.value_ord:
if not self.value[v]['export']: continue
deparr = self.value_dep.get(v, [])
while deparr:
d = deparr.pop()
if not self.value[d]['import']:
if not self.value[d]['export']:
self.value[d]['export'] = EF_TYPE
deparr.extend(self.value_dep.get(d, []))
#--- values -------------------
for v in self.value_imp:
nm = asn2c(v)
self.eth_value[nm] = { 'import' : self.value[v]['import'],
'proto' : asn2c(self.value[v]['proto']),
'ref' : []}
self.value[v]['ethname'] = nm
for v in self.value_ord:
if (self.value[v]['ethname']):
continue
if (self.value[v]['no_emit']):
continue
nm = asn2c(v)
self.eth_value[nm] = { 'import' : None,
'proto' : asn2c(self.value[v]['proto']),
'export' : self.value[v]['export'], 'ref' : [v] }
self.eth_value[nm]['value'] = self.value[v]['value']
self.eth_value_ord.append(nm)
self.value[v]['ethname'] = nm
#--- fields -------------------------
for f in (self.pdu_ord + self.field_ord):
if len(f.split('/')) > 1 and f.split('/')[-1] == ITEM_FIELD_NAME: # Sequence/Set of type
nm = self.conform.use_item('FIELD_RENAME', '/'.join(f.split('/')[0:-1]), val_dflt=f.split('/')[-2]) + f.split('/')[-1]
else:
nm = f.split('/')[-1]
nm = self.conform.use_item('FIELD_RENAME', f, val_dflt=nm)
nm = asn2c(nm)
if (self.field[f]['pdu']):
nm += '_PDU'
if (not self.merge_modules or self.field[f]['pdu']['export']):
nm = self.eproto + '_' + nm
t = self.field[f]['type']
if t in self.type:
ethtype = self.type[t]['ethname']
else: # undefined type
ethtype = self.dummy_import_type(t)
ethtypemod = ethtype + self.field[f]['modified']
if nm in self.eth_hf:
if nm in self.eth_hf_dupl:
if ethtypemod in self.eth_hf_dupl[nm]:
nm = self.eth_hf_dupl[nm][ethtypemod]
self.eth_hf[nm]['ref'].append(f)
self.field[f]['ethname'] = nm
continue
else:
nmx = nm + ('_%02d' % (len(self.eth_hf_dupl[nm])))
self.eth_hf_dupl[nm][ethtype] = nmx
nm = nmx
else:
if (self.eth_hf[nm]['ethtype']+self.eth_hf[nm]['modified']) == ethtypemod:
self.eth_hf[nm]['ref'].append(f)
self.field[f]['ethname'] = nm
continue
else:
nmx = nm + '_01'
self.eth_hf_dupl[nm] = {self.eth_hf[nm]['ethtype']+self.eth_hf[nm]['modified'] : nm, \
ethtypemod : nmx}
nm = nmx
if (self.field[f]['pdu']):
self.eth_hfpdu_ord.append(nm)
else:
self.eth_hf_ord.append(nm)
fullname = 'hf_%s_%s' % (self.eproto, nm)
attr = self.eth_get_type_attr(self.field[f]['type']).copy()
attr.update(self.field[f]['attr'])
if (self.NAPI() and 'NAME' in attr):
attr['NAME'] += self.field[f]['idx']
attr.update(self.conform.use_item('EFIELD_ATTR', nm))
use_vals_ext = self.eth_type[ethtype].get('vals_ext')
if (use_vals_ext):
attr['DISPLAY'] += '|BASE_EXT_STRING'
self.eth_hf[nm] = {'fullname' : fullname, 'pdu' : self.field[f]['pdu'],
'ethtype' : ethtype, 'modified' : self.field[f]['modified'],
'attr' : attr.copy(),
'ref' : [f]}
self.field[f]['ethname'] = nm
if (self.dummy_eag_field):
# Prepending "dummy_" avoids matching checkhf.pl.
self.dummy_eag_field = 'dummy_hf_%s_%s' % (self.eproto, self.dummy_eag_field)
#--- type dependencies -------------------
(self.eth_type_ord1, self.eth_dep_cycle) = dependency_compute(self.type_ord, self.type_dep, map_fn = lambda t: self.type[t]['ethname'], ignore_fn = lambda t: self.type[t]['import'])
i = 0
while i < len(self.eth_dep_cycle):
t = self.type[self.eth_dep_cycle[i][0]]['ethname']
self.dep_cycle_eth_type.setdefault(t, []).append(i)
i += 1
#--- value dependencies and export -------------------
for v in self.eth_value_ord:
if self.eth_value[v]['export']:
self.eth_vexport_ord.append(v)
else:
self.eth_value_ord1.append(v)
#--- export tags, values, ... ---
for t in self.exports:
if t not in self.type:
continue
if self.type[t]['import']:
continue
m = self.type[t]['module']
if not self.Per():
if m not in self.all_tags:
self.all_tags[m] = {}
self.all_tags[m][t] = self.type[t]['val'].GetTTag(self)
if m not in self.all_type_attr:
self.all_type_attr[m] = {}
self.all_type_attr[m][t] = self.eth_get_type_attr(t).copy()
for v in self.vexports:
if v not in self.value:
continue
if self.value[v]['import']:
continue
m = self.value[v]['module']
if m not in self.all_vals:
self.all_vals[m] = {}
vv = self.value[v]['value']
if isinstance (vv, Value):
vv = vv.to_str(self)
self.all_vals[m][v] = vv
#--- eth_vals_nm ------------------------------------------------------------
def eth_vals_nm(self, tname):
out = ""
if (not self.eth_type[tname]['export'] & EF_NO_PROT):
out += "%s_" % (self.eproto)
out += "%s_vals" % (tname)
return out
#--- eth_vals ---------------------------------------------------------------
def eth_vals(self, tname, vals):
out = ""
has_enum = self.eth_type[tname]['enum'] & EF_ENUM
use_ext = self.eth_type[tname]['vals_ext']
if (use_ext):
vals.sort(key=lambda vals_entry: int(vals_entry[0]))
if (not self.eth_type[tname]['export'] & EF_VALS):
out += 'static '
if (self.eth_type[tname]['export'] & EF_VALS) and (self.eth_type[tname]['export'] & EF_TABLE):
out += 'static '
out += "const value_string %s[] = {\n" % (self.eth_vals_nm(tname))
for (val, id) in vals:
if (has_enum):
vval = self.eth_enum_item(tname, id)
else:
vval = val
out += ' { %3s, "%s" },\n' % (vval, id)
out += " { 0, NULL }\n};\n"
if (use_ext):
out += "\nstatic value_string_ext %s_ext = VALUE_STRING_EXT_INIT(%s);\n" % (self.eth_vals_nm(tname), self.eth_vals_nm(tname))
return out
#--- eth_enum_prefix ------------------------------------------------------------
def eth_enum_prefix(self, tname, type=False):
out = ""
if (self.eth_type[tname]['export'] & EF_ENUM):
no_prot = self.eth_type[tname]['export'] & EF_NO_PROT
else:
no_prot = self.eth_type[tname]['enum'] & EF_NO_PROT
if (not no_prot):
out += self.eproto
if ((not self.eth_type[tname]['enum'] & EF_NO_TYPE) or type):
if (out): out += '_'
out += tname
if (self.eth_type[tname]['enum'] & EF_UCASE):
out = out.upper()
if (out): out += '_'
return out
#--- eth_enum_nm ------------------------------------------------------------
def eth_enum_nm(self, tname):
out = self.eth_enum_prefix(tname, type=True)
out += "enum"
return out
#--- eth_enum_item ---------------------------------------------------------------
def eth_enum_item(self, tname, ident):
out = self.eth_enum_prefix(tname)
out += asn2c(ident)
if (self.eth_type[tname]['enum'] & EF_UCASE):
out = out.upper()
return out
#--- eth_enum ---------------------------------------------------------------
def eth_enum(self, tname, vals):
out = ""
if (self.eth_type[tname]['enum'] & EF_DEFINE):
out += "/* enumerated values for %s */\n" % (tname)
for (val, id) in vals:
out += '#define %-12s %3s\n' % (self.eth_enum_item(tname, id), val)
else:
out += "typedef enum _%s {\n" % (self.eth_enum_nm(tname))
first_line = 1
for (val, id) in vals:
if (first_line == 1):
first_line = 0
else:
out += ",\n"
out += ' %-12s = %3s' % (self.eth_enum_item(tname, id), val)
out += "\n} %s;\n" % (self.eth_enum_nm(tname))
return out
#--- eth_bits ---------------------------------------------------------------
def eth_bits(self, tname, bits):
out = ""
out += "static const "
out += "asn_namedbit %(TABLE)s[] = {\n"
for (val, id) in bits:
out += ' { %2d, &hf_%s_%s_%s, -1, -1, "%s", NULL },\n' % (val, self.eproto, tname, asn2c(id), id)
out += " { 0, NULL, 0, 0, NULL, NULL }\n};\n"
return out
#--- eth_type_fn_h ----------------------------------------------------------
def eth_type_fn_h(self, tname):
out = ""
if (not self.eth_type[tname]['export'] & EF_TYPE):
out += 'static '
out += "int "
if (self.Ber()):
out += "dissect_%s_%s(gboolean implicit_tag _U_, tvbuff_t *tvb _U_, int offset _U_, asn1_ctx_t *actx _U_, proto_tree *tree _U_, int hf_index _U_)" % (self.eth_type[tname]['proto'], tname)
elif (self.Per()):
out += "dissect_%s_%s(tvbuff_t *tvb _U_, int offset _U_, asn1_ctx_t *actx _U_, proto_tree *tree _U_, int hf_index _U_)" % (self.eth_type[tname]['proto'], tname)
out += ";\n"
return out
#--- eth_fn_call ------------------------------------------------------------
def eth_fn_call(self, fname, ret=None, indent=2, par=None):
out = indent * ' '
if (ret):
if (ret == 'return'):
out += 'return '
else:
out += ret + ' = '
out += fname + '('
ind = len(out)
for i in range(len(par)):
if (i>0): out += ind * ' '
out += ', '.join(par[i])
if (i<(len(par)-1)): out += ',\n'
out += ');\n'
return out
#--- eth_type_fn_hdr --------------------------------------------------------
def eth_type_fn_hdr(self, tname):
out = '\n'
if (not self.eth_type[tname]['export'] & EF_TYPE):
out += 'static '
out += "int\n"
if (self.Ber()):
out += "dissect_%s_%s(gboolean implicit_tag _U_, tvbuff_t *tvb _U_, int offset _U_, asn1_ctx_t *actx _U_, proto_tree *tree _U_, int hf_index _U_) {\n" % (self.eth_type[tname]['proto'], tname)
elif (self.Per()):
out += "dissect_%s_%s(tvbuff_t *tvb _U_, int offset _U_, asn1_ctx_t *actx _U_, proto_tree *tree _U_, int hf_index _U_) {\n" % (self.eth_type[tname]['proto'], tname)
#if self.conform.get_fn_presence(tname):
# out += self.conform.get_fn_text(tname, 'FN_HDR')
#el
if self.conform.get_fn_presence(self.eth_type[tname]['ref'][0]):
out += self.conform.get_fn_text(self.eth_type[tname]['ref'][0], 'FN_HDR')
return out
#--- eth_type_fn_ftr --------------------------------------------------------
def eth_type_fn_ftr(self, tname):
out = '\n'
#if self.conform.get_fn_presence(tname):
# out += self.conform.get_fn_text(tname, 'FN_FTR')
#el
if self.conform.get_fn_presence(self.eth_type[tname]['ref'][0]):
out += self.conform.get_fn_text(self.eth_type[tname]['ref'][0], 'FN_FTR')
out += " return offset;\n"
out += "}\n"
return out
#--- eth_type_fn_body -------------------------------------------------------
def eth_type_fn_body(self, tname, body, pars=None):
out = body
#if self.conform.get_fn_body_presence(tname):
# out = self.conform.get_fn_text(tname, 'FN_BODY')
#el
if self.conform.get_fn_body_presence(self.eth_type[tname]['ref'][0]):
out = self.conform.get_fn_text(self.eth_type[tname]['ref'][0], 'FN_BODY')
if pars:
try:
out = out % pars
except (TypeError):
pass
return out
#--- eth_out_pdu_decl ----------------------------------------------------------
def eth_out_pdu_decl(self, f):
t = self.eth_hf[f]['ethtype']
is_new = self.eth_hf[f]['pdu']['new']
out = ''
if (not self.eth_hf[f]['pdu']['export']):
out += 'static '
if (is_new):
out += 'int '
out += 'dissect_'+f+'(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, void *data _U_);\n'
else:
out += 'void '
out += 'dissect_'+f+'(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_);\n'
return out
#--- eth_output_hf ----------------------------------------------------------
def eth_output_hf (self):
if not len(self.eth_hf_ord) and not len(self.eth_hfpdu_ord) and not len(self.named_bit): return
fx = self.output.file_open('hf')
for f in (self.eth_hfpdu_ord + self.eth_hf_ord):
fx.write("%-50s/* %s */\n" % ("static int %s = -1; " % (self.eth_hf[f]['fullname']), self.eth_hf[f]['ethtype']))
if (self.named_bit):
fx.write('/* named bits */\n')
for nb in self.named_bit:
fx.write("static int %s = -1;\n" % (nb['ethname']))
if (self.dummy_eag_field):
fx.write("static int %s = -1; /* never registered */\n" % (self.dummy_eag_field))
self.output.file_close(fx)
#--- eth_output_hf_arr ------------------------------------------------------
def eth_output_hf_arr (self):
if not len(self.eth_hf_ord) and not len(self.eth_hfpdu_ord) and not len(self.named_bit): return
fx = self.output.file_open('hfarr')
for f in (self.eth_hfpdu_ord + self.eth_hf_ord):
t = self.eth_hf[f]['ethtype']
if self.remove_prefix and t.startswith(self.remove_prefix):
t = t[len(self.remove_prefix):]
name=self.eth_hf[f]['attr']['NAME']
try: # Python < 3
trantab = maketrans("- ", "__")
except:
trantab = str.maketrans("- ", "__")
name = name.translate(trantab)
namelower = name.lower()
tquoted_lower = '"' + t.lower() + '"'
# Try to avoid giving blurbs that give no more info than the name
if tquoted_lower == namelower or \
t == "NULL" or \
tquoted_lower.replace("t_", "") == namelower:
blurb = 'NULL'
else:
blurb = '"%s"' % (t)
attr = self.eth_hf[f]['attr'].copy()
if attr['TYPE'] == 'FT_NONE':
attr['ABBREV'] = '"%s.%s_element"' % (self.proto, attr['ABBREV'])
else:
attr['ABBREV'] = '"%s.%s"' % (self.proto, attr['ABBREV'])
if 'BLURB' not in attr:
attr['BLURB'] = blurb
fx.write(' { &%s,\n' % (self.eth_hf[f]['fullname']))
fx.write(' { %(NAME)s, %(ABBREV)s,\n' % attr)
fx.write(' %(TYPE)s, %(DISPLAY)s, %(STRINGS)s, %(BITMASK)s,\n' % attr)
fx.write(' %(BLURB)s, HFILL }},\n' % attr)
for nb in self.named_bit:
fx.write(' { &%s,\n' % (nb['ethname']))
fx.write(' { "%s", "%s.%s",\n' % (nb['name'], self.proto, nb['name']))
fx.write(' %s, %s, %s, %s,\n' % (nb['ftype'], nb['display'], nb['strings'], nb['bitmask']))
fx.write(' NULL, HFILL }},\n')
self.output.file_close(fx)
#--- eth_output_ett ---------------------------------------------------------
def eth_output_ett (self):
fx = self.output.file_open('ett')
fempty = True
#fx.write("static gint ett_%s = -1;\n" % (self.eproto))
for t in self.eth_type_ord:
if self.eth_type[t]['tree']:
fx.write("static gint %s = -1;\n" % (self.eth_type[t]['tree']))
fempty = False
self.output.file_close(fx, discard=fempty)
#--- eth_output_ett_arr -----------------------------------------------------
def eth_output_ett_arr(self):
fx = self.output.file_open('ettarr')
fempty = True
#fx.write(" &ett_%s,\n" % (self.eproto))
for t in self.eth_type_ord:
if self.eth_type[t]['tree']:
fx.write(" &%s,\n" % (self.eth_type[t]['tree']))
fempty = False
self.output.file_close(fx, discard=fempty)
#--- eth_output_export ------------------------------------------------------
def eth_output_export(self):
fx = self.output.file_open('exp', ext='h')
for t in self.eth_export_ord: # vals
if (self.eth_type[t]['export'] & EF_ENUM) and self.eth_type[t]['val'].eth_has_enum(t, self):
fx.write(self.eth_type[t]['val'].eth_type_enum(t, self))
if (self.eth_type[t]['export'] & EF_VALS) and self.eth_type[t]['val'].eth_has_vals():
if not self.eth_type[t]['export'] & EF_TABLE:
if self.eth_type[t]['export'] & EF_WS_DLL:
fx.write("WS_DLL_PUBLIC ")
else:
fx.write("extern ")
fx.write("const value_string %s[];\n" % (self.eth_vals_nm(t)))
else:
fx.write(self.eth_type[t]['val'].eth_type_vals(t, self))
for t in self.eth_export_ord: # functions
if (self.eth_type[t]['export'] & EF_TYPE):
if self.eth_type[t]['export'] & EF_EXTERN:
if self.eth_type[t]['export'] & EF_WS_DLL:
fx.write("WS_DLL_PUBLIC ")
else:
fx.write("extern ")
fx.write(self.eth_type_fn_h(t))
for f in self.eth_hfpdu_ord: # PDUs
if (self.eth_hf[f]['pdu'] and self.eth_hf[f]['pdu']['export']):
fx.write(self.eth_out_pdu_decl(f))
self.output.file_close(fx)
#--- eth_output_expcnf ------------------------------------------------------
def eth_output_expcnf(self):
fx = self.output.file_open('exp', ext='cnf')
fx.write('#.MODULE\n')
maxw = 0
for (m, p) in self.modules:
if (len(m) > maxw): maxw = len(m)
for (m, p) in self.modules:
fx.write("%-*s %s\n" % (maxw, m, p))
fx.write('#.END\n\n')
for cls in self.objectclass_ord:
if self.objectclass[cls]['export']:
cnm = cls
if self.objectclass[cls]['export'] & EF_MODULE:
cnm = "$%s$%s" % (self.objectclass[cls]['module'], cnm)
fx.write('#.CLASS %s\n' % (cnm))
maxw = 2
for fld in self.objectclass[cls]['val'].fields:
w = len(fld.fld_repr()[0])
if (w > maxw): maxw = w
for fld in self.objectclass[cls]['val'].fields:
repr = fld.fld_repr()
fx.write('%-*s %s\n' % (maxw, repr[0], ' '.join(repr[1:])))
fx.write('#.END\n\n')
if self.Ber():
fx.write('#.IMPORT_TAG\n')
for t in self.eth_export_ord: # tags
if (self.eth_type[t]['export'] & EF_TYPE):
fx.write('%-24s ' % self.eth_type[t]['ref'][0])
fx.write('%s %s\n' % self.eth_type[t]['val'].GetTag(self))
fx.write('#.END\n\n')
fx.write('#.TYPE_ATTR\n')
for t in self.eth_export_ord: # attributes
if (self.eth_type[t]['export'] & EF_TYPE):
tnm = self.eth_type[t]['ref'][0]
if self.eth_type[t]['export'] & EF_MODULE:
tnm = "$%s$%s" % (self.type[tnm]['module'], tnm)
fx.write('%-24s ' % tnm)
attr = self.eth_get_type_attr(self.eth_type[t]['ref'][0]).copy()
fx.write('TYPE = %(TYPE)-9s DISPLAY = %(DISPLAY)-9s STRINGS = %(STRINGS)s BITMASK = %(BITMASK)s\n' % attr)
fx.write('#.END\n\n')
self.output.file_close(fx, keep_anyway=True)
#--- eth_output_val ------------------------------------------------------
def eth_output_val(self):
fx = self.output.file_open('val', ext='h')
for v in self.eth_value_ord1:
vv = self.eth_value[v]['value']
if isinstance (vv, Value):
vv = vv.to_str(self)
fx.write("#define %-30s %s\n" % (v, vv))
for t in self.eth_type_ord1:
if self.eth_type[t]['import']:
continue
if self.eth_type[t]['val'].eth_has_enum(t, self) and not (self.eth_type[t]['export'] & EF_ENUM):
fx.write(self.eth_type[t]['val'].eth_type_enum(t, self))
self.output.file_close(fx)
#--- eth_output_valexp ------------------------------------------------------
def eth_output_valexp(self):
if (not len(self.eth_vexport_ord)): return
fx = self.output.file_open('valexp', ext='h')
for v in self.eth_vexport_ord:
vv = self.eth_value[v]['value']
if isinstance (vv, Value):
vv = vv.to_str(self)
fx.write("#define %-30s %s\n" % (v, vv))
self.output.file_close(fx)
#--- eth_output_types -------------------------------------------------------
def eth_output_types(self):
def out_pdu(f):
t = self.eth_hf[f]['ethtype']
is_new = self.eth_hf[f]['pdu']['new']
impl = 'FALSE'
out = ''
if (not self.eth_hf[f]['pdu']['export']):
out += 'static '
if (is_new):
out += 'int '
out += 'dissect_'+f+'(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, void *data _U_) {\n'
else:
out += 'void '
out += 'dissect_'+f+'(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_) {\n'
if (is_new):
out += ' int offset = 0;\n'
off_par = 'offset'
ret_par = 'offset'
else:
off_par = '0'
ret_par = None
if (self.Per()):
if (self.Aligned()):
aligned = 'TRUE'
else:
aligned = 'FALSE'
out += " asn1_ctx_t asn1_ctx;\n"
out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_PER', aligned, 'pinfo'),))
if (self.Ber()):
out += " asn1_ctx_t asn1_ctx;\n"
out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_BER', 'TRUE', 'pinfo'),))
par=((impl, 'tvb', off_par,'&asn1_ctx', 'tree', self.eth_hf[f]['fullname']),)
elif (self.Per()):
par=(('tvb', off_par, '&asn1_ctx', 'tree', self.eth_hf[f]['fullname']),)
else:
par=((),)
out += self.eth_fn_call('dissect_%s_%s' % (self.eth_type[t]['proto'], t), ret=ret_par, par=par)
if (self.Per() and is_new):
out += ' offset += 7; offset >>= 3;\n'
if (is_new):
out += ' return offset;\n'
out += '}\n'
return out
#end out_pdu()
fx = self.output.file_open('fn')
pos = fx.tell()
if (len(self.eth_hfpdu_ord)):
first_decl = True
for f in self.eth_hfpdu_ord:
if (self.eth_hf[f]['pdu'] and self.eth_hf[f]['pdu']['need_decl']):
if first_decl:
fx.write('/*--- PDUs declarations ---*/\n')
first_decl = False
fx.write(self.eth_out_pdu_decl(f))
if not first_decl:
fx.write('\n')
if self.eth_dep_cycle:
fx.write('/*--- Cyclic dependencies ---*/\n\n')
i = 0
while i < len(self.eth_dep_cycle):
t = self.type[self.eth_dep_cycle[i][0]]['ethname']
if self.dep_cycle_eth_type[t][0] != i: i += 1; continue
fx.write(''.join(['/* %s */\n' % ' -> '.join(self.eth_dep_cycle[i]) for i in self.dep_cycle_eth_type[t]]))
fx.write(self.eth_type_fn_h(t))
fx.write('\n')
i += 1
fx.write('\n')
for t in self.eth_type_ord1:
if self.eth_type[t]['import']:
continue
if self.eth_type[t]['val'].eth_has_vals():
if self.eth_type[t]['no_emit'] & EF_VALS:
pass
elif self.eth_type[t]['user_def'] & EF_VALS:
fx.write("extern const value_string %s[];\n" % (self.eth_vals_nm(t)))
elif (self.eth_type[t]['export'] & EF_VALS) and (self.eth_type[t]['export'] & EF_TABLE):
pass
else:
fx.write(self.eth_type[t]['val'].eth_type_vals(t, self))
if self.eth_type[t]['no_emit'] & EF_TYPE:
pass
elif self.eth_type[t]['user_def'] & EF_TYPE:
fx.write(self.eth_type_fn_h(t))
else:
fx.write(self.eth_type[t]['val'].eth_type_fn(self.eth_type[t]['proto'], t, self))
fx.write('\n')
if (len(self.eth_hfpdu_ord)):
fx.write('/*--- PDUs ---*/\n\n')
for f in self.eth_hfpdu_ord:
if (self.eth_hf[f]['pdu']):
if (f in self.emitted_pdu):
fx.write(" /* %s already emitted */\n" % (f))
else:
fx.write(out_pdu(f))
self.emitted_pdu[f] = True
fx.write('\n')
fempty = pos == fx.tell()
self.output.file_close(fx, discard=fempty)
#--- eth_output_dis_hnd -----------------------------------------------------
def eth_output_dis_hnd(self):
fx = self.output.file_open('dis-hnd')
fempty = True
for f in self.eth_hfpdu_ord:
pdu = self.eth_hf[f]['pdu']
if (pdu and pdu['reg'] and not pdu['hidden']):
dis = self.proto
if (pdu['reg'] != '.'):
dis += '.' + pdu['reg']
fx.write('static dissector_handle_t %s_handle;\n' % (asn2c(dis)))
fempty = False
fx.write('\n')
self.output.file_close(fx, discard=fempty)
#--- eth_output_dis_reg -----------------------------------------------------
def eth_output_dis_reg(self):
fx = self.output.file_open('dis-reg')
fempty = True
for f in self.eth_hfpdu_ord:
pdu = self.eth_hf[f]['pdu']
if (pdu and pdu['reg']):
new_prefix = ''
if (pdu['new']): new_prefix = 'new_'
dis = self.proto
if (pdu['reg'] != '.'): dis += '.' + pdu['reg']
fx.write(' %sregister_dissector("%s", dissect_%s, proto_%s);\n' % (new_prefix, dis, f, self.eproto))
if (not pdu['hidden']):
fx.write(' %s_handle = find_dissector("%s");\n' % (asn2c(dis), dis))
fempty = False
fx.write('\n')
self.output.file_close(fx, discard=fempty)
#--- eth_output_dis_tab -----------------------------------------------------
def eth_output_dis_tab(self):
fx = self.output.file_open('dis-tab')
fempty = True
for k in self.conform.get_order('REGISTER'):
reg = self.conform.use_item('REGISTER', k)
if reg['pdu'] not in self.field: continue
f = self.field[reg['pdu']]['ethname']
pdu = self.eth_hf[f]['pdu']
new_prefix = ''
if (pdu['new']): new_prefix = 'new_'
if (reg['rtype'] in ('NUM', 'STR')):
rstr = ''
if (reg['rtype'] == 'STR'):
rstr = 'string'
else:
rstr = 'uint'
if (pdu['reg']):
dis = self.proto
if (pdu['reg'] != '.'): dis += '.' + pdu['reg']
if (not pdu['hidden']):
hnd = '%s_handle' % (asn2c(dis))
else:
hnd = 'find_dissector("%s")' % (dis)
else:
hnd = '%screate_dissector_handle(dissect_%s, proto_%s)' % (new_prefix, f, self.eproto)
rport = self.value_get_eth(reg['rport'])
fx.write(' dissector_add_%s("%s", %s, %s);\n' % (rstr, reg['rtable'], rport, hnd))
elif (reg['rtype'] in ('BER', 'PER')):
roid = self.value_get_eth(reg['roid'])
fx.write(' %sregister_%s_oid_dissector(%s, dissect_%s, proto_%s, %s);\n' % (new_prefix, reg['rtype'].lower(), roid, f, self.eproto, reg['roidname']))
fempty = False
fx.write('\n')
self.output.file_close(fx, discard=fempty)
#--- eth_output_syn_reg -----------------------------------------------------
def eth_output_syn_reg(self):
fx = self.output.file_open('syn-reg')
fempty = True
first_decl = True
for k in self.conform.get_order('SYNTAX'):
reg = self.conform.use_item('SYNTAX', k)
if reg['pdu'] not in self.field: continue
f = self.field[reg['pdu']]['ethname']
pdu = self.eth_hf[f]['pdu']
new_prefix = ''
if (pdu['new']): new_prefix = 'new_'
if first_decl:
fx.write(' /*--- Syntax registrations ---*/\n')
first_decl = False
fx.write(' %sregister_ber_syntax_dissector(%s, proto_%s, dissect_%s_PDU);\n' % (new_prefix, k, self.eproto, reg['pdu']));
fempty=False
self.output.file_close(fx, discard=fempty)
#--- eth_output_tables -----------------------------------------------------
def eth_output_tables(self):
for num in list(self.conform.report.keys()):
fx = self.output.file_open('table' + num)
for rep in self.conform.report[num]:
self.eth_output_table(fx, rep)
self.output.file_close(fx)
#--- eth_output_table -----------------------------------------------------
def eth_output_table(self, fx, rep):
if rep['type'] == 'HDR':
fx.write('\n')
if rep['var']:
var = rep['var']
var_list = var.split('.', 1)
cls = var_list[0]
del var_list[0]
flds = []
not_flds = []
sort_flds = []
for f in var_list:
if f[0] == '!':
not_flds.append(f[1:])
continue
if f[0] == '#':
flds.append(f[1:])
sort_flds.append(f)
continue
if f[0] == '@':
flds.append(f[1:])
sort_flds.append(f[1:])
continue
flds.append(f)
objs = {}
objs_ord = []
if (cls in self.oassign_cls):
for ident in self.oassign_cls[cls]:
obj = self.get_obj_repr(ident, flds, not_flds)
if not obj:
continue
obj['_LOOP'] = var
obj['_DICT'] = str(obj)
objs[ident] = obj
objs_ord.append(ident)
if (sort_flds):
# Sort identifiers according to the matching object in objs.
# The order is determined by sort_flds, keys prefixed by a
# '#' are compared numerically.
def obj_key_fn(name):
obj = objs[name]
return list(
int(obj[f[1:]]) if f[0] == '#' else obj[f]
for f in sort_flds
)
objs_ord.sort(key=obj_key_fn)
for ident in objs_ord:
obj = objs[ident]
try:
text = rep['text'] % obj
except (KeyError):
raise sys.exc_info()[0]("%s:%s invalid key %s for information object %s of %s" % (rep['fn'], rep['lineno'], sys.exc_info()[1], ident, var))
fx.write(text)
else:
fx.write("/* Unknown or empty loop list %s */\n" % (var))
else:
fx.write(rep['text'])
if rep['type'] == 'FTR':
fx.write('\n')
#--- dupl_report -----------------------------------------------------
def dupl_report(self):
# types
tmplist = sorted(self.eth_type_dupl.keys())
for t in tmplist:
msg = "The same type names for different types. Explicit type renaming is recommended.\n"
msg += t + "\n"
for tt in self.eth_type_dupl[t]:
msg += " %-20s %s\n" % (self.type[tt]['ethname'], tt)
warnings.warn_explicit(msg, UserWarning, '', 0)
# fields
tmplist = list(self.eth_hf_dupl.keys())
tmplist.sort()
for f in tmplist:
msg = "The same field names for different types. Explicit field renaming is recommended.\n"
msg += f + "\n"
for tt in list(self.eth_hf_dupl[f].keys()):
msg += " %-20s %-20s " % (self.eth_hf_dupl[f][tt], tt)
msg += ", ".join(self.eth_hf[self.eth_hf_dupl[f][tt]]['ref'])
msg += "\n"
warnings.warn_explicit(msg, UserWarning, '', 0)
#--- eth_do_output ------------------------------------------------------------
def eth_do_output(self):
if self.dbg('a'):
print("\n# Assignments")
for a in self.assign_ord:
v = ' '
if (self.assign[a]['virt']): v = '*'
print(v, a)
print("\n# Value assignments")
for a in self.vassign_ord:
print(' ', a)
print("\n# Information object assignments")
for a in self.oassign_ord:
print(" %-12s (%s)" % (a, self.oassign[a].cls))
if self.dbg('t'):
print("\n# Imported Types")
print("%-40s %-24s %-24s" % ("ASN.1 name", "Module", "Protocol"))
print("-" * 100)
for t in self.type_imp:
print("%-40s %-24s %-24s" % (t, self.type[t]['import'], self.type[t]['proto']))
print("\n# Imported Values")
print("%-40s %-24s %-24s" % ("ASN.1 name", "Module", "Protocol"))
print("-" * 100)
for t in self.value_imp:
print("%-40s %-24s %-24s" % (t, self.value[t]['import'], self.value[t]['proto']))
print("\n# Imported Object Classes")
print("%-40s %-24s %-24s" % ("ASN.1 name", "Module", "Protocol"))
print("-" * 100)
for t in self.objectclass_imp:
print("%-40s %-24s %-24s" % (t, self.objectclass[t]['import'], self.objectclass[t]['proto']))
print("\n# Exported Types")
print("%-31s %s" % ("Wireshark type", "Export Flag"))
print("-" * 100)
for t in self.eth_export_ord:
print("%-31s 0x%02X" % (t, self.eth_type[t]['export']))
print("\n# Exported Values")
print("%-40s %s" % ("Wireshark name", "Value"))
print("-" * 100)
for v in self.eth_vexport_ord:
vv = self.eth_value[v]['value']
if isinstance (vv, Value):
vv = vv.to_str(self)
print("%-40s %s" % (v, vv))
print("\n# ASN.1 Object Classes")
print("%-40s %-24s %-24s" % ("ASN.1 name", "Module", "Protocol"))
print("-" * 100)
for t in self.objectclass_ord:
print("%-40s " % (t))
print("\n# ASN.1 Types")
print("%-49s %-24s %-24s" % ("ASN.1 unique name", "'tname'", "Wireshark type"))
print("-" * 100)
for t in self.type_ord:
print("%-49s %-24s %-24s" % (t, self.type[t]['tname'], self.type[t]['ethname']))
print("\n# Wireshark Types")
print("Wireshark type References (ASN.1 types)")
print("-" * 100)
for t in self.eth_type_ord:
sys.stdout.write("%-31s %d" % (t, len(self.eth_type[t]['ref'])))
print(', '.join(self.eth_type[t]['ref']))
print("\n# ASN.1 Values")
print("%-40s %-18s %-20s %s" % ("ASN.1 unique name", "Type", "Value", "Wireshark value"))
print("-" * 100)
for v in self.value_ord:
vv = self.value[v]['value']
if isinstance (vv, Value):
vv = vv.to_str(self)
print("%-40s %-18s %-20s %s" % (v, self.value[v]['type'].eth_tname(), vv, self.value[v]['ethname']))
#print "\n# Wireshark Values"
#print "%-40s %s" % ("Wireshark name", "Value")
#print "-" * 100
#for v in self.eth_value_ord:
# vv = self.eth_value[v]['value']
# if isinstance (vv, Value):
# vv = vv.to_str(self)
# print "%-40s %s" % (v, vv)
print("\n# ASN.1 Fields")
print("ASN.1 unique name Wireshark name ASN.1 type")
print("-" * 100)
for f in (self.pdu_ord + self.field_ord):
print("%-40s %-20s %s" % (f, self.field[f]['ethname'], self.field[f]['type']))
print("\n# Wireshark Fields")
print("Wireshark name Wireshark type References (ASN.1 fields)")
print("-" * 100)
for f in (self.eth_hfpdu_ord + self.eth_hf_ord):
sys.stdout.write("%-30s %-20s %s" % (f, self.eth_hf[f]['ethtype'], len(self.eth_hf[f]['ref'])))
print(', '.join(self.eth_hf[f]['ref']))
#print "\n# Order after dependencies"
#print '\n'.join(self.eth_type_ord1)
print("\n# Cyclic dependencies")
for c in self.eth_dep_cycle:
print(' -> '.join(c))
self.dupl_report()
self.output.outnm = self.outnm_opt
if (not self.output.outnm):
self.output.outnm = self.proto
self.output.outnm = self.output.outnm.replace('.', '-')
if not self.justexpcnf:
self.eth_output_hf()
self.eth_output_ett()
self.eth_output_types()
self.eth_output_hf_arr()
self.eth_output_ett_arr()
self.eth_output_export()
self.eth_output_val()
self.eth_output_valexp()
self.eth_output_dis_hnd()
self.eth_output_dis_reg()
self.eth_output_dis_tab()
self.eth_output_syn_reg()
self.eth_output_tables()
if self.expcnf:
self.eth_output_expcnf()
def dbg_modules(self):
def print_mod(m):
sys.stdout.write("%-30s " % (m))
dep = self.module[m][:]
for i in range(len(dep)):
if dep[i] not in self.module:
dep[i] = '*' + dep[i]
print(', '.join(dep))
# end of print_mod()
(mod_ord, mod_cyc) = dependency_compute(self.module_ord, self.module, ignore_fn = lambda t: t not in self.module)
print("\n# ASN.1 Moudules")
print("Module name Dependency")
print("-" * 100)
new_ord = False
for m in (self.module_ord):
print_mod(m)
new_ord = new_ord or (self.module_ord.index(m) != mod_ord.index(m))
if new_ord:
print("\n# ASN.1 Moudules - in dependency order")
print("Module name Dependency")
print("-" * 100)
for m in (mod_ord):
print_mod(m)
if mod_cyc:
print("\nCyclic dependencies:")
for i in (list(range(len(mod_cyc)))):
print("%02d: %s" % (i + 1, str(mod_cyc[i])))
#--- EthCnf -------------------------------------------------------------------
class EthCnf:
def __init__(self):
self.ectx = None
self.tblcfg = {}
self.table = {}
self.order = {}
self.fn = {}
self.report = {}
self.suppress_line = False
self.include_path = []
# Value name Default value Duplicity check Usage check
self.tblcfg['EXPORTS'] = { 'val_nm' : 'flag', 'val_dflt' : 0, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['MAKE_ENUM'] = { 'val_nm' : 'flag', 'val_dflt' : 0, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['USE_VALS_EXT'] = { 'val_nm' : 'flag', 'val_dflt' : 0, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['PDU'] = { 'val_nm' : 'attr', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['SYNTAX'] = { 'val_nm' : 'attr', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['REGISTER'] = { 'val_nm' : 'attr', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['USER_DEFINED'] = { 'val_nm' : 'flag', 'val_dflt' : 0, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['NO_EMIT'] = { 'val_nm' : 'flag', 'val_dflt' : 0, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['MODULE'] = { 'val_nm' : 'proto', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : False }
self.tblcfg['OMIT_ASSIGNMENT'] = { 'val_nm' : 'omit', 'val_dflt' : False, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['NO_OMIT_ASSGN'] = { 'val_nm' : 'omit', 'val_dflt' : True, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['VIRTUAL_ASSGN'] = { 'val_nm' : 'name', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['SET_TYPE'] = { 'val_nm' : 'type', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['TYPE_RENAME'] = { 'val_nm' : 'eth_name', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['FIELD_RENAME'] = { 'val_nm' : 'eth_name', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['IMPORT_TAG'] = { 'val_nm' : 'ttag', 'val_dflt' : (), 'chk_dup' : True, 'chk_use' : False }
self.tblcfg['FN_PARS'] = { 'val_nm' : 'pars', 'val_dflt' : {}, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['TYPE_ATTR'] = { 'val_nm' : 'attr', 'val_dflt' : {}, 'chk_dup' : True, 'chk_use' : False }
self.tblcfg['ETYPE_ATTR'] = { 'val_nm' : 'attr', 'val_dflt' : {}, 'chk_dup' : True, 'chk_use' : False }
self.tblcfg['FIELD_ATTR'] = { 'val_nm' : 'attr', 'val_dflt' : {}, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['EFIELD_ATTR'] = { 'val_nm' : 'attr', 'val_dflt' : {}, 'chk_dup' : True, 'chk_use' : True }
self.tblcfg['ASSIGNED_ID'] = { 'val_nm' : 'ids', 'val_dflt' : {}, 'chk_dup' : False,'chk_use' : False }
self.tblcfg['ASSIGN_VALUE_TO_TYPE'] = { 'val_nm' : 'name', 'val_dflt' : None, 'chk_dup' : True, 'chk_use' : True }
for k in list(self.tblcfg.keys()) :
self.table[k] = {}
self.order[k] = []
def add_item(self, table, key, fn, lineno, **kw):
if self.tblcfg[table]['chk_dup'] and key in self.table[table]:
warnings.warn_explicit("Duplicated %s for %s. Previous one is at %s:%d" %
(table, key, self.table[table][key]['fn'], self.table[table][key]['lineno']),
UserWarning, fn, lineno)
return
self.table[table][key] = {'fn' : fn, 'lineno' : lineno, 'used' : False}
self.table[table][key].update(kw)
self.order[table].append(key)
def update_item(self, table, key, fn, lineno, **kw):
if key not in self.table[table]:
self.table[table][key] = {'fn' : fn, 'lineno' : lineno, 'used' : False}
self.order[table].append(key)
self.table[table][key][self.tblcfg[table]['val_nm']] = {}
self.table[table][key][self.tblcfg[table]['val_nm']].update(kw[self.tblcfg[table]['val_nm']])
def get_order(self, table):
return self.order[table]
def check_item(self, table, key):
return key in self.table[table]
def copy_item(self, table, dst_key, src_key):
if (src_key in self.table[table]):
self.table[table][dst_key] = self.table[table][src_key]
def check_item_value(self, table, key, **kw):
return key in self.table[table] and kw.get('val_nm', self.tblcfg[table]['val_nm']) in self.table[table][key]
def use_item(self, table, key, **kw):
vdflt = kw.get('val_dflt', self.tblcfg[table]['val_dflt'])
if key not in self.table[table]: return vdflt
vname = kw.get('val_nm', self.tblcfg[table]['val_nm'])
#print "use_item() - set used for %s %s" % (table, key)
self.table[table][key]['used'] = True
return self.table[table][key].get(vname, vdflt)
def omit_assignment(self, type, ident, module):
if self.ectx.conform.use_item('OMIT_ASSIGNMENT', ident):
return True
if self.ectx.conform.use_item('OMIT_ASSIGNMENT', '*') or \
self.ectx.conform.use_item('OMIT_ASSIGNMENT', '*'+type) or \
self.ectx.conform.use_item('OMIT_ASSIGNMENT', '*/'+module) or \
self.ectx.conform.use_item('OMIT_ASSIGNMENT', '*'+type+'/'+module):
return self.ectx.conform.use_item('NO_OMIT_ASSGN', ident)
return False
def add_fn_line(self, name, ctx, line, fn, lineno):
if name not in self.fn:
self.fn[name] = {'FN_HDR' : None, 'FN_FTR' : None, 'FN_BODY' : None}
if (self.fn[name][ctx]):
self.fn[name][ctx]['text'] += line
else:
self.fn[name][ctx] = {'text' : line, 'used' : False,
'fn' : fn, 'lineno' : lineno}
def get_fn_presence(self, name):
#print "get_fn_presence('%s'):%s" % (name, str(self.fn.has_key(name)))
#if self.fn.has_key(name): print self.fn[name]
return name in self.fn
def get_fn_body_presence(self, name):
return name in self.fn and self.fn[name]['FN_BODY']
def get_fn_text(self, name, ctx):
if (name not in self.fn):
return '';
if (not self.fn[name][ctx]):
return '';
self.fn[name][ctx]['used'] = True
out = self.fn[name][ctx]['text']
if (not self.suppress_line):
out = '#line %u "%s"\n%s\n' % (self.fn[name][ctx]['lineno'], rel_dissector_path(self.fn[name][ctx]['fn']), out);
return out
def add_pdu(self, par, is_new, fn, lineno):
#print "add_pdu(par=%s, %s, %d)" % (str(par), fn, lineno)
(reg, hidden) = (None, False)
if (len(par) > 1): reg = par[1]
if (reg and reg[0]=='@'): (reg, hidden) = (reg[1:], True)
attr = {'new' : is_new, 'reg' : reg, 'hidden' : hidden, 'need_decl' : False, 'export' : False}
self.add_item('PDU', par[0], attr=attr, fn=fn, lineno=lineno)
return
def add_syntax(self, par, fn, lineno):
#print "add_syntax(par=%s, %s, %d)" % (str(par), fn, lineno)
if( (len(par) >=2)):
name = par[1]
else:
name = '"'+par[0]+'"'
attr = { 'pdu' : par[0] }
self.add_item('SYNTAX', name, attr=attr, fn=fn, lineno=lineno)
return
def add_register(self, pdu, par, fn, lineno):
#print "add_register(pdu=%s, par=%s, %s, %d)" % (pdu, str(par), fn, lineno)
if (par[0] in ('N', 'NUM')): rtype = 'NUM'; (pmin, pmax) = (2, 2)
elif (par[0] in ('S', 'STR')): rtype = 'STR'; (pmin, pmax) = (2, 2)
elif (par[0] in ('B', 'BER')): rtype = 'BER'; (pmin, pmax) = (1, 2)
elif (par[0] in ('P', 'PER')): rtype = 'PER'; (pmin, pmax) = (1, 2)
else: warnings.warn_explicit("Unknown registration type '%s'" % (par[2]), UserWarning, fn, lineno); return
if ((len(par)-1) < pmin):
warnings.warn_explicit("Too few parameters for %s registration type. At least %d parameters are required" % (rtype, pmin), UserWarning, fn, lineno)
return
if ((len(par)-1) > pmax):
warnings.warn_explicit("Too many parameters for %s registration type. Only %d parameters are allowed" % (rtype, pmax), UserWarning, fn, lineno)
attr = {'pdu' : pdu, 'rtype' : rtype}
if (rtype in ('NUM', 'STR')):
attr['rtable'] = par[1]
attr['rport'] = par[2]
rkey = '/'.join([rtype, attr['rtable'], attr['rport']])
elif (rtype in ('BER', 'PER')):
attr['roid'] = par[1]
attr['roidname'] = '""'
if (len(par)>=3):
attr['roidname'] = par[2]
elif attr['roid'][0] != '"':
attr['roidname'] = '"' + attr['roid'] + '"'
rkey = '/'.join([rtype, attr['roid']])
self.add_item('REGISTER', rkey, attr=attr, fn=fn, lineno=lineno)
def check_par(self, par, pmin, pmax, fn, lineno):
for i in range(len(par)):
if par[i] == '-':
par[i] = None
continue
if par[i][0] == '#':
par[i:] = []
break
if len(par) < pmin:
warnings.warn_explicit("Too few parameters. At least %d parameters are required" % (pmin), UserWarning, fn, lineno)
return None
if (pmax >= 0) and (len(par) > pmax):
warnings.warn_explicit("Too many parameters. Only %d parameters are allowed" % (pmax), UserWarning, fn, lineno)
return par[0:pmax]
return par
def read(self, fn):
def get_par(line, pmin, pmax, fn, lineno):
par = line.split(None, pmax)
par = self.check_par(par, pmin, pmax, fn, lineno)
return par
def get_par_nm(line, pmin, pmax, fn, lineno):
if pmax:
par = line.split(None, pmax)
else:
par = [line,]
for i in range(len(par)):
if par[i][0] == '#':
par[i:] = []
break
if len(par) < pmin:
warnings.warn_explicit("Too few parameters. At least %d parameters are required" % (pmin), UserWarning, fn, lineno)
return None
if len(par) > pmax:
nmpar = par[pmax]
else:
nmpar = ''
nmpars = {}
nmpar_first = re.compile(r'^\s*(?P<attr>[_A-Z][_A-Z0-9]*)\s*=\s*')
nmpar_next = re.compile(r'\s+(?P<attr>[_A-Z][_A-Z0-9]*)\s*=\s*')
nmpar_end = re.compile(r'\s*$')
result = nmpar_first.search(nmpar)
pos = 0
while result:
k = result.group('attr')
pos = result.end()
result = nmpar_next.search(nmpar, pos)
p1 = pos
if result:
p2 = result.start()
else:
p2 = nmpar_end.search(nmpar, pos).start()
v = nmpar[p1:p2]
nmpars[k] = v
if len(par) > pmax:
par[pmax] = nmpars
return par
f = open(fn, "r")
lineno = 0
is_import = False
directive = re.compile(r'^\s*#\.(?P<name>[A-Z_][A-Z_0-9]*)(\s+|$)')
cdirective = re.compile(r'^\s*##')
report = re.compile(r'^TABLE(?P<num>\d*)_(?P<type>HDR|BODY|FTR)$')
comment = re.compile(r'^\s*#[^.#]')
empty = re.compile(r'^\s*$')
ctx = None
name = ''
default_flags = 0x00
stack = []
while True:
if not f.closed:
line = f.readline()
lineno += 1
else:
line = None
if not line:
if not f.closed:
f.close()
if stack:
frec = stack.pop()
fn, f, lineno, is_import = frec['fn'], frec['f'], frec['lineno'], frec['is_import']
continue
else:
break
if comment.search(line): continue
result = directive.search(line)
if result: # directive
rep_result = report.search(result.group('name'))
if result.group('name') == 'END_OF_CNF':
f.close()
elif result.group('name') == 'OPT':
ctx = result.group('name')
par = get_par(line[result.end():], 0, -1, fn=fn, lineno=lineno)
if not par: continue
self.set_opt(par[0], par[1:], fn, lineno)
ctx = None
elif result.group('name') in ('PDU', 'PDU_NEW', 'REGISTER', 'REGISTER_NEW',
'MODULE', 'MODULE_IMPORT',
'OMIT_ASSIGNMENT', 'NO_OMIT_ASSGN',
'VIRTUAL_ASSGN', 'SET_TYPE', 'ASSIGN_VALUE_TO_TYPE',
'TYPE_RENAME', 'FIELD_RENAME', 'TF_RENAME', 'IMPORT_TAG',
'TYPE_ATTR', 'ETYPE_ATTR', 'FIELD_ATTR', 'EFIELD_ATTR',
'SYNTAX', 'SYNTAX_NEW'):
ctx = result.group('name')
elif result.group('name') in ('OMIT_ALL_ASSIGNMENTS', 'OMIT_ASSIGNMENTS_EXCEPT',
'OMIT_ALL_TYPE_ASSIGNMENTS', 'OMIT_TYPE_ASSIGNMENTS_EXCEPT',
'OMIT_ALL_VALUE_ASSIGNMENTS', 'OMIT_VALUE_ASSIGNMENTS_EXCEPT'):
ctx = result.group('name')
key = '*'
if ctx in ('OMIT_ALL_TYPE_ASSIGNMENTS', 'OMIT_TYPE_ASSIGNMENTS_EXCEPT'):
key += 'T'
if ctx in ('OMIT_ALL_VALUE_ASSIGNMENTS', 'OMIT_VALUE_ASSIGNMENTS_EXCEPT'):
key += 'V'
par = get_par(line[result.end():], 0, 1, fn=fn, lineno=lineno)
if par:
key += '/' + par[0]
self.add_item('OMIT_ASSIGNMENT', key, omit=True, fn=fn, lineno=lineno)
if ctx in ('OMIT_ASSIGNMENTS_EXCEPT', 'OMIT_TYPE_ASSIGNMENTS_EXCEPT', 'OMIT_VALUE_ASSIGNMENTS_EXCEPT'):
ctx = 'NO_OMIT_ASSGN'
else:
ctx = None
elif result.group('name') in ('EXPORTS', 'MODULE_EXPORTS', 'USER_DEFINED', 'NO_EMIT'):
ctx = result.group('name')
default_flags = EF_TYPE|EF_VALS
if ctx == 'MODULE_EXPORTS':
ctx = 'EXPORTS'
default_flags |= EF_MODULE
if ctx == 'EXPORTS':
par = get_par(line[result.end():], 0, 5, fn=fn, lineno=lineno)
else:
par = get_par(line[result.end():], 0, 1, fn=fn, lineno=lineno)
if not par: continue
p = 1
if (par[0] == 'WITH_VALS'): default_flags |= EF_TYPE|EF_VALS
elif (par[0] == 'WITHOUT_VALS'): default_flags |= EF_TYPE; default_flags &= ~EF_TYPE
elif (par[0] == 'ONLY_VALS'): default_flags &= ~EF_TYPE; default_flags |= EF_VALS
elif (ctx == 'EXPORTS'): p = 0
else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[0]), UserWarning, fn, lineno)
for i in range(p, len(par)):
if (par[i] == 'ONLY_ENUM'): default_flags &= ~(EF_TYPE|EF_VALS); default_flags |= EF_ENUM
elif (par[i] == 'WITH_ENUM'): default_flags |= EF_ENUM
elif (par[i] == 'VALS_WITH_TABLE'): default_flags |= EF_TABLE
elif (par[i] == 'WS_DLL'): default_flags |= EF_WS_DLL
elif (par[i] == 'EXTERN'): default_flags |= EF_EXTERN
elif (par[i] == 'NO_PROT_PREFIX'): default_flags |= EF_NO_PROT
else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[i]), UserWarning, fn, lineno)
elif result.group('name') in ('MAKE_ENUM', 'MAKE_DEFINES'):
ctx = result.group('name')
default_flags = EF_ENUM
if ctx == 'MAKE_ENUM': default_flags |= EF_NO_PROT|EF_NO_TYPE
if ctx == 'MAKE_DEFINES': default_flags |= EF_DEFINE|EF_UCASE|EF_NO_TYPE
par = get_par(line[result.end():], 0, 3, fn=fn, lineno=lineno)
for i in range(0, len(par)):
if (par[i] == 'NO_PROT_PREFIX'): default_flags |= EF_NO_PROT
elif (par[i] == 'PROT_PREFIX'): default_flags &= ~ EF_NO_PROT
elif (par[i] == 'NO_TYPE_PREFIX'): default_flags |= EF_NO_TYPE
elif (par[i] == 'TYPE_PREFIX'): default_flags &= ~ EF_NO_TYPE
elif (par[i] == 'UPPER_CASE'): default_flags |= EF_UCASE
elif (par[i] == 'NO_UPPER_CASE'): default_flags &= ~EF_UCASE
else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[i]), UserWarning, fn, lineno)
elif result.group('name') == 'USE_VALS_EXT':
ctx = result.group('name')
default_flags = 0xFF
elif result.group('name') == 'FN_HDR':
minp = 1
if (ctx in ('FN_PARS',)) and name: minp = 0
par = get_par(line[result.end():], minp, 1, fn=fn, lineno=lineno)
if (not par) and (minp > 0): continue
ctx = result.group('name')
if par: name = par[0]
elif result.group('name') == 'FN_FTR':
minp = 1
if (ctx in ('FN_PARS','FN_HDR')) and name: minp = 0
par = get_par(line[result.end():], minp, 1, fn=fn, lineno=lineno)
if (not par) and (minp > 0): continue
ctx = result.group('name')
if par: name = par[0]
elif result.group('name') == 'FN_BODY':
par = get_par_nm(line[result.end():], 1, 1, fn=fn, lineno=lineno)
if not par: continue
ctx = result.group('name')
name = par[0]
if len(par) > 1:
self.add_item('FN_PARS', name, pars=par[1], fn=fn, lineno=lineno)
elif result.group('name') == 'FN_PARS':
par = get_par_nm(line[result.end():], 0, 1, fn=fn, lineno=lineno)
ctx = result.group('name')
if not par:
name = None
elif len(par) == 1:
name = par[0]
self.add_item(ctx, name, pars={}, fn=fn, lineno=lineno)
elif len(par) > 1:
self.add_item(ctx, par[0], pars=par[1], fn=fn, lineno=lineno)
ctx = None
elif result.group('name') == 'CLASS':
par = get_par(line[result.end():], 1, 1, fn=fn, lineno=lineno)
if not par: continue
ctx = result.group('name')
name = par[0]
add_class_ident(name)
if not name.split('$')[-1].isupper():
warnings.warn_explicit("No lower-case letters shall be included in information object class name (%s)" % (name),
UserWarning, fn, lineno)
elif result.group('name') == 'ASSIGNED_OBJECT_IDENTIFIER':
par = get_par(line[result.end():], 1, 1, fn=fn, lineno=lineno)
if not par: continue
self.update_item('ASSIGNED_ID', 'OBJECT_IDENTIFIER', ids={par[0] : par[0]}, fn=fn, lineno=lineno)
elif rep_result: # Reports
num = rep_result.group('num')
type = rep_result.group('type')
if type == 'BODY':
par = get_par(line[result.end():], 1, 1, fn=fn, lineno=lineno)
if not par: continue
else:
par = get_par(line[result.end():], 0, 0, fn=fn, lineno=lineno)
rep = { 'type' : type, 'var' : None, 'text' : '', 'fn' : fn, 'lineno' : lineno }
if len(par) > 0:
rep['var'] = par[0]
self.report.setdefault(num, []).append(rep)
ctx = 'TABLE'
name = num
elif result.group('name') in ('INCLUDE', 'IMPORT') :
is_imp = result.group('name') == 'IMPORT'
par = get_par(line[result.end():], 1, 1, fn=fn, lineno=lineno)
if not par:
warnings.warn_explicit("%s requires parameter" % (result.group('name'),), UserWarning, fn, lineno)
continue
fname = par[0]
#print "Try include: %s" % (fname)
if (not os.path.exists(fname)):
fname = os.path.join(os.path.split(fn)[0], par[0])
#print "Try include: %s" % (fname)
i = 0
while not os.path.exists(fname) and (i < len(self.include_path)):
fname = os.path.join(self.include_path[i], par[0])
#print "Try include: %s" % (fname)
i += 1
if (not os.path.exists(fname)):
if is_imp:
continue # just ignore
else:
fname = par[0] # report error
fnew = open(fname, "r")
stack.append({'fn' : fn, 'f' : f, 'lineno' : lineno, 'is_import' : is_import})
fn, f, lineno, is_import = par[0], fnew, 0, is_imp
elif result.group('name') == 'END':
ctx = None
else:
warnings.warn_explicit("Unknown directive '%s'" % (result.group('name')), UserWarning, fn, lineno)
continue
if not ctx:
if not empty.match(line):
warnings.warn_explicit("Non-empty line in empty context", UserWarning, fn, lineno)
elif ctx == 'OPT':
if empty.match(line): continue
par = get_par(line, 1, -1, fn=fn, lineno=lineno)
if not par: continue
self.set_opt(par[0], par[1:], fn, lineno)
elif ctx in ('EXPORTS', 'USER_DEFINED', 'NO_EMIT'):
if empty.match(line): continue
if ctx == 'EXPORTS':
par = get_par(line, 1, 6, fn=fn, lineno=lineno)
else:
par = get_par(line, 1, 2, fn=fn, lineno=lineno)
if not par: continue
flags = default_flags
p = 2
if (len(par)>=2):
if (par[1] == 'WITH_VALS'): flags |= EF_TYPE|EF_VALS
elif (par[1] == 'WITHOUT_VALS'): flags |= EF_TYPE; flags &= ~EF_TYPE
elif (par[1] == 'ONLY_VALS'): flags &= ~EF_TYPE; flags |= EF_VALS
elif (ctx == 'EXPORTS'): p = 1
else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[1]), UserWarning, fn, lineno)
for i in range(p, len(par)):
if (par[i] == 'ONLY_ENUM'): flags &= ~(EF_TYPE|EF_VALS); flags |= EF_ENUM
elif (par[i] == 'WITH_ENUM'): flags |= EF_ENUM
elif (par[i] == 'VALS_WITH_TABLE'): flags |= EF_TABLE
elif (par[i] == 'WS_DLL'): flags |= EF_WS_DLL
elif (par[i] == 'EXTERN'): flags |= EF_EXTERN
elif (par[i] == 'NO_PROT_PREFIX'): flags |= EF_NO_PROT
else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[i]), UserWarning, fn, lineno)
self.add_item(ctx, par[0], flag=flags, fn=fn, lineno=lineno)
elif ctx in ('MAKE_ENUM', 'MAKE_DEFINES'):
if empty.match(line): continue
par = get_par(line, 1, 4, fn=fn, lineno=lineno)
if not par: continue
flags = default_flags
for i in range(1, len(par)):
if (par[i] == 'NO_PROT_PREFIX'): flags |= EF_NO_PROT
elif (par[i] == 'PROT_PREFIX'): flags &= ~ EF_NO_PROT
elif (par[i] == 'NO_TYPE_PREFIX'): flags |= EF_NO_TYPE
elif (par[i] == 'TYPE_PREFIX'): flags &= ~ EF_NO_TYPE
elif (par[i] == 'UPPER_CASE'): flags |= EF_UCASE
elif (par[i] == 'NO_UPPER_CASE'): flags &= ~EF_UCASE
else: warnings.warn_explicit("Unknown parameter value '%s'" % (par[i]), UserWarning, fn, lineno)
self.add_item('MAKE_ENUM', par[0], flag=flags, fn=fn, lineno=lineno)
elif ctx == 'USE_VALS_EXT':
if empty.match(line): continue
par = get_par(line, 1, 1, fn=fn, lineno=lineno)
if not par: continue
flags = default_flags
self.add_item('USE_VALS_EXT', par[0], flag=flags, fn=fn, lineno=lineno)
elif ctx in ('PDU', 'PDU_NEW'):
if empty.match(line): continue
par = get_par(line, 1, 5, fn=fn, lineno=lineno)
if not par: continue
is_new = False
if (ctx == 'PDU_NEW'): is_new = True
self.add_pdu(par[0:2], is_new, fn, lineno)
if (len(par)>=3):
self.add_register(par[0], par[2:5], fn, lineno)
elif ctx in ('SYNTAX', 'SYNTAX_NEW'):
if empty.match(line): continue
par = get_par(line, 1, 2, fn=fn, lineno=lineno)
if not par: continue
if not self.check_item('PDU', par[0]):
is_new = False
if (ctx == 'SYNTAX_NEW'): is_new = True
self.add_pdu(par[0:1], is_new, fn, lineno)
self.add_syntax(par, fn, lineno)
elif ctx in ('REGISTER', 'REGISTER_NEW'):
if empty.match(line): continue
par = get_par(line, 3, 4, fn=fn, lineno=lineno)
if not par: continue
if not self.check_item('PDU', par[0]):
is_new = False
if (ctx == 'REGISTER_NEW'): is_new = True
self.add_pdu(par[0:1], is_new, fn, lineno)
self.add_register(par[0], par[1:4], fn, lineno)
elif ctx in ('MODULE', 'MODULE_IMPORT'):
if empty.match(line): continue
par = get_par(line, 2, 2, fn=fn, lineno=lineno)
if not par: continue
self.add_item('MODULE', par[0], proto=par[1], fn=fn, lineno=lineno)
elif ctx == 'IMPORT_TAG':
if empty.match(line): continue
par = get_par(line, 3, 3, fn=fn, lineno=lineno)
if not par: continue
self.add_item(ctx, par[0], ttag=(par[1], par[2]), fn=fn, lineno=lineno)
elif ctx == 'OMIT_ASSIGNMENT':
if empty.match(line): continue
par = get_par(line, 1, 1, fn=fn, lineno=lineno)
if not par: continue
self.add_item(ctx, par[0], omit=True, fn=fn, lineno=lineno)
elif ctx == 'NO_OMIT_ASSGN':
if empty.match(line): continue
par = get_par(line, 1, 1, fn=fn, lineno=lineno)
if not par: continue
self.add_item(ctx, par[0], omit=False, fn=fn, lineno=lineno)
elif ctx == 'VIRTUAL_ASSGN':
if empty.match(line): continue
par = get_par(line, 2, -1, fn=fn, lineno=lineno)
if not par: continue
if (len(par[1].split('/')) > 1) and not self.check_item('SET_TYPE', par[1]):
self.add_item('SET_TYPE', par[1], type=par[0], fn=fn, lineno=lineno)
self.add_item('VIRTUAL_ASSGN', par[1], name=par[0], fn=fn, lineno=lineno)
for nm in par[2:]:
self.add_item('SET_TYPE', nm, type=par[0], fn=fn, lineno=lineno)
if not par[0][0].isupper():
warnings.warn_explicit("Virtual assignment should have uppercase name (%s)" % (par[0]),
UserWarning, fn, lineno)
elif ctx == 'SET_TYPE':
if empty.match(line): continue
par = get_par(line, 2, 2, fn=fn, lineno=lineno)
if not par: continue
if not self.check_item('VIRTUAL_ASSGN', par[0]):
self.add_item('SET_TYPE', par[0], type=par[1], fn=fn, lineno=lineno)
if not par[1][0].isupper():
warnings.warn_explicit("Set type should have uppercase name (%s)" % (par[1]),
UserWarning, fn, lineno)
elif ctx == 'ASSIGN_VALUE_TO_TYPE':
if empty.match(line): continue
par = get_par(line, 2, 2, fn=fn, lineno=lineno)
if not par: continue
self.add_item(ctx, par[0], name=par[1], fn=fn, lineno=lineno)
elif ctx == 'TYPE_RENAME':
if empty.match(line): continue
par = get_par(line, 2, 2, fn=fn, lineno=lineno)
if not par: continue
self.add_item('TYPE_RENAME', par[0], eth_name=par[1], fn=fn, lineno=lineno)
if not par[1][0].isupper():
warnings.warn_explicit("Type should be renamed to uppercase name (%s)" % (par[1]),
UserWarning, fn, lineno)
elif ctx == 'FIELD_RENAME':
if empty.match(line): continue
par = get_par(line, 2, 2, fn=fn, lineno=lineno)
if not par: continue
self.add_item('FIELD_RENAME', par[0], eth_name=par[1], fn=fn, lineno=lineno)
if not par[1][0].islower():
warnings.warn_explicit("Field should be renamed to lowercase name (%s)" % (par[1]),
UserWarning, fn, lineno)
elif ctx == 'TF_RENAME':
if empty.match(line): continue
par = get_par(line, 2, 2, fn=fn, lineno=lineno)
if not par: continue
tmpu = par[1][0].upper() + par[1][1:]
tmpl = par[1][0].lower() + par[1][1:]
self.add_item('TYPE_RENAME', par[0], eth_name=tmpu, fn=fn, lineno=lineno)
if not tmpu[0].isupper():
warnings.warn_explicit("Type should be renamed to uppercase name (%s)" % (par[1]),
UserWarning, fn, lineno)
self.add_item('FIELD_RENAME', par[0], eth_name=tmpl, fn=fn, lineno=lineno)
if not tmpl[0].islower():
warnings.warn_explicit("Field should be renamed to lowercase name (%s)" % (par[1]),
UserWarning, fn, lineno)
elif ctx in ('TYPE_ATTR', 'ETYPE_ATTR', 'FIELD_ATTR', 'EFIELD_ATTR'):
if empty.match(line): continue
par = get_par_nm(line, 1, 1, fn=fn, lineno=lineno)
if not par: continue
self.add_item(ctx, par[0], attr=par[1], fn=fn, lineno=lineno)
elif ctx == 'FN_PARS':
if empty.match(line): continue
if name:
par = get_par_nm(line, 0, 0, fn=fn, lineno=lineno)
else:
par = get_par_nm(line, 1, 1, fn=fn, lineno=lineno)
if not par: continue
if name:
self.update_item(ctx, name, pars=par[0], fn=fn, lineno=lineno)
else:
self.add_item(ctx, par[0], pars=par[1], fn=fn, lineno=lineno)
elif ctx in ('FN_HDR', 'FN_FTR', 'FN_BODY'):
result = cdirective.search(line)
if result: # directive
line = '#' + line[result.end():]
self.add_fn_line(name, ctx, line, fn=fn, lineno=lineno)
elif ctx == 'CLASS':
if empty.match(line): continue
par = get_par(line, 1, 3, fn=fn, lineno=lineno)
if not par: continue
if not set_type_to_class(name, par[0], par[1:]):
warnings.warn_explicit("Could not set type of class member %s.&%s to %s" % (name, par[0], par[1]),
UserWarning, fn, lineno)
elif ctx == 'TABLE':
self.report[name][-1]['text'] += line
def set_opt(self, opt, par, fn, lineno):
#print "set_opt: %s, %s" % (opt, par)
if opt in ("-I",):
par = self.check_par(par, 1, 1, fn, lineno)
if not par: return
self.include_path.append(par[0])
elif opt in ("-b", "BER", "CER", "DER"):
par = self.check_par(par, 0, 0, fn, lineno)
self.ectx.encoding = 'ber'
elif opt in ("PER",):
par = self.check_par(par, 0, 0, fn, lineno)
self.ectx.encoding = 'per'
elif opt in ("-p", "PROTO"):
par = self.check_par(par, 1, 1, fn, lineno)
if not par: return
self.ectx.proto_opt = par[0]
self.ectx.merge_modules = True
elif opt in ("ALIGNED",):
par = self.check_par(par, 0, 0, fn, lineno)
self.ectx.aligned = True
elif opt in ("-u", "UNALIGNED"):
par = self.check_par(par, 0, 0, fn, lineno)
self.ectx.aligned = False
elif opt in ("-d",):
par = self.check_par(par, 1, 1, fn, lineno)
if not par: return
self.ectx.dbgopt = par[0]
elif opt in ("-e",):
par = self.check_par(par, 0, 0, fn, lineno)
self.ectx.expcnf = True
elif opt in ("-S",):
par = self.check_par(par, 0, 0, fn, lineno)
self.ectx.merge_modules = True
elif opt in ("GROUP_BY_PROT",):
par = self.check_par(par, 0, 0, fn, lineno)
self.ectx.group_by_prot = True
elif opt in ("-o",):
par = self.check_par(par, 1, 1, fn, lineno)
if not par: return
self.ectx.outnm_opt = par[0]
elif opt in ("-O",):
par = self.check_par(par, 1, 1, fn, lineno)
if not par: return
self.ectx.output.outdir = par[0]
elif opt in ("-s",):
par = self.check_par(par, 1, 1, fn, lineno)
if not par: return
self.ectx.output.single_file = par[0]
elif opt in ("-k",):
par = self.check_par(par, 0, 0, fn, lineno)
self.ectx.output.keep = True
elif opt in ("-L",):
par = self.check_par(par, 0, 0, fn, lineno)
self.suppress_line = True
elif opt in ("EMBEDDED_PDV_CB",):
par = self.check_par(par, 1, 1, fn, lineno)
if not par: return
self.ectx.default_embedded_pdv_cb = par[0]
elif opt in ("EXTERNAL_TYPE_CB",):
par = self.check_par(par, 1, 1, fn, lineno)
if not par: return
self.ectx.default_external_type_cb = par[0]
elif opt in ("-r",):
par = self.check_par(par, 1, 1, fn, lineno)
if not par: return
self.ectx.remove_prefix = par[0]
else:
warnings.warn_explicit("Unknown option %s" % (opt),
UserWarning, fn, lineno)
def dbg_print(self):
print("\n# Conformance values")
print("%-15s %-4s %-15s %-20s %s" % ("File", "Line", "Table", "Key", "Value"))
print("-" * 100)
tbls = sorted(self.table.keys())
for t in tbls:
keys = sorted(self.table[t].keys())
for k in keys:
print("%-15s %4s %-15s %-20s %s" % (
self.table[t][k]['fn'], self.table[t][k]['lineno'], t, k, str(self.table[t][k][self.tblcfg[t]['val_nm']])))
def unused_report(self):
tbls = sorted(self.table.keys())
for t in tbls:
if not self.tblcfg[t]['chk_use']: continue
keys = sorted(self.table[t].keys())
for k in keys:
if not self.table[t][k]['used']:
warnings.warn_explicit("Unused %s for %s" % (t, k),
UserWarning, self.table[t][k]['fn'], self.table[t][k]['lineno'])
fnms = list(self.fn.keys())
fnms.sort()
for f in fnms:
keys = sorted(self.fn[f].keys())
for k in keys:
if not self.fn[f][k]: continue
if not self.fn[f][k]['used']:
warnings.warn_explicit("Unused %s for %s" % (k, f),
UserWarning, self.fn[f][k]['fn'], self.fn[f][k]['lineno'])
#--- EthOut -------------------------------------------------------------------
class EthOut:
def __init__(self):
self.ectx = None
self.outnm = None
self.outdir = '.'
self.single_file = None
self.created_files = {}
self.created_files_ord = []
self.keep = False
def outcomment(self, ln, comment=None):
if comment:
return '%s %s\n' % (comment, ln)
else:
return '/* %-74s */\n' % (ln)
def created_file_add(self, name, keep_anyway):
name = os.path.normcase(os.path.abspath(name))
if name not in self.created_files:
self.created_files_ord.append(name)
self.created_files[name] = keep_anyway
else:
self.created_files[name] = self.created_files[name] or keep_anyway
def created_file_exists(self, name):
name = os.path.normcase(os.path.abspath(name))
return name in self.created_files
#--- output_fname -------------------------------------------------------
def output_fname(self, ftype, ext='c'):
fn = ''
if not ext in ('cnf',):
fn += 'packet-'
fn += self.outnm
if (ftype):
fn += '-' + ftype
fn += '.' + ext
return fn
#--- file_open -------------------------------------------------------
def file_open(self, ftype, ext='c'):
fn = self.output_fname(ftype, ext=ext)
if self.created_file_exists(fn):
fx = open(fn, 'a')
else:
fx = open(fn, 'w')
comment = None
if ext in ('cnf',):
comment = '#'
fx.write(self.fhdr(fn, comment = comment))
else:
if (not self.single_file and not self.created_file_exists(fn)):
fx.write(self.fhdr(fn))
if not self.ectx.merge_modules:
fx.write('\n')
mstr = "--- "
if self.ectx.groups():
mstr += "Module"
if (len(self.ectx.modules) > 1):
mstr += "s"
for (m, p) in self.ectx.modules:
mstr += " %s" % (m)
else:
mstr += "Module %s" % (self.ectx.Module())
mstr += " --- --- ---"
fx.write(self.outcomment(mstr, comment))
fx.write('\n')
return fx
#--- file_close -------------------------------------------------------
def file_close(self, fx, discard=False, keep_anyway=False):
fx.close()
if discard and not self.created_file_exists(fx.name):
os.unlink(fx.name)
else:
self.created_file_add(fx.name, keep_anyway)
#--- fhdr -------------------------------------------------------
def fhdr(self, fn, comment=None):
out = ''
out += self.outcomment('Do not modify this file. Changes will be overwritten.', comment)
out += self.outcomment('Generated automatically by the ASN.1 to Wireshark dissector compiler', comment)
out += self.outcomment(os.path.basename(fn), comment)
out += self.outcomment(' '.join(sys.argv), comment)
out += '\n'
# Make Windows path separator look like Unix path separator
out = out.replace('\\', '/')
# Change absolute paths and relative paths generated outside
# source directory to paths relative to asn1/<proto> subdir.
out = re.sub(r'(\s)[./]\S*(/tools/|/epan/)', r'\1../..\2', out)
out = re.sub(r'(\s)[./]\S*/asn1/\S*?([\s/])', r'\1.\2', out)
return out
#--- dbg_print -------------------------------------------------------
def dbg_print(self):
print("\n# Output files")
print("\n".join(self.created_files_ord))
print("\n")
#--- make_single_file -------------------------------------------------------
def make_single_file(self):
if (not self.single_file): return
in_nm = self.single_file + '.c'
out_nm = os.path.join(self.outdir, self.output_fname(''))
self.do_include(out_nm, in_nm)
in_nm = self.single_file + '.h'
if (os.path.exists(in_nm)):
out_nm = os.path.join(self.outdir, self.output_fname('', ext='h'))
self.do_include(out_nm, in_nm)
if (not self.keep):
for fn in self.created_files_ord:
if not self.created_files[fn]:
os.unlink(fn)
#--- do_include -------------------------------------------------------
def do_include(self, out_nm, in_nm):
def check_file(fn, fnlist):
fnfull = os.path.normcase(os.path.abspath(fn))
if (fnfull in fnlist and os.path.exists(fnfull)):
return os.path.normpath(fn)
return None
fin = open(in_nm, "r")
fout = open(out_nm, "w")
fout.write(self.fhdr(out_nm))
fout.write('/* Input file: ' + os.path.basename(in_nm) +' */\n')
fout.write('\n')
fout.write('#line %u "%s"\n' % (1, rel_dissector_path(in_nm)))
include = re.compile(r'^\s*#\s*include\s+[<"](?P<fname>[^>"]+)[>"]', re.IGNORECASE)
cont_linenum = 0;
while (True):
cont_linenum = cont_linenum + 1;
line = fin.readline()
if (line == ''): break
ifile = None
result = include.search(line)
#if (result): print os.path.normcase(os.path.abspath(result.group('fname')))
if (result):
ifile = check_file(os.path.join(os.path.split(in_nm)[0], result.group('fname')), self.created_files)
if (not ifile):
ifile = check_file(os.path.join(self.outdir, result.group('fname')), self.created_files)
if (not ifile):
ifile = check_file(result.group('fname'), self.created_files)
if (ifile):
fout.write('\n')
fout.write('/*--- Included file: ' + ifile + ' ---*/\n')
fout.write('#line %u "%s"\n' % (1, rel_dissector_path(ifile)))
finc = open(ifile, "r")
fout.write(finc.read())
fout.write('\n')
fout.write('/*--- End of included file: ' + ifile + ' ---*/\n')
fout.write('#line %u "%s"\n' % (cont_linenum+1, rel_dissector_path(in_nm)) )
finc.close()
else:
fout.write(line)
fout.close()
fin.close()
#--- Node ---------------------------------------------------------------------
class Node:
def __init__(self,*args, **kw):
if len (args) == 0:
self.type = self.__class__.__name__
else:
assert (len(args) == 1)
self.type = args[0]
self.__dict__.update (kw)
def str_child (self, key, child, depth):
indent = " " * (2 * depth)
keystr = indent + key + ": "
if key == 'type': # already processed in str_depth
return ""
if isinstance (child, Node): # ugh
return keystr + "\n" + child.str_depth (depth+1)
if isinstance(child, type ([])):
l = []
for x in child:
if isinstance (x, Node):
l.append (x.str_depth (depth+1))
else:
l.append (indent + " " + str(x) + "\n")
return keystr + "[\n" + ''.join(l) + indent + "]\n"
else:
return keystr + str (child) + "\n"
def str_depth (self, depth): # ugh
indent = " " * (2 * depth)
l = ["%s%s" % (indent, self.type)]
l.append ("".join ([self.str_child (k_v[0], k_v[1], depth + 1) for k_v in list(self.__dict__.items ())]))
return "\n".join (l)
def __repr__(self):
return "\n" + self.str_depth (0)
def to_python (self, ctx):
return self.str_depth (ctx.indent_lev)
def eth_reg(self, ident, ectx):
pass
def fld_obj_repr(self, ectx):
return "/* TO DO %s */" % (str(self))
#--- ValueAssignment -------------------------------------------------------------
class ValueAssignment (Node):
def __init__(self,*args, **kw) :
Node.__init__ (self,*args, **kw)
def eth_reg(self, ident, ectx):
if ectx.conform.omit_assignment('V', self.ident, ectx.Module()): return # Assignment to omit
ectx.eth_reg_vassign(self)
ectx.eth_reg_value(self.ident, self.typ, self.val)
#--- ObjectAssignment -------------------------------------------------------------
class ObjectAssignment (Node):
def __init__(self,*args, **kw) :
Node.__init__ (self,*args, **kw)
def __eq__(self, other):
if self.cls != other.cls:
return False
if len(self.val) != len(other.val):
return False
for f in (list(self.val.keys())):
if f not in other.val:
return False
if isinstance(self.val[f], Node) and isinstance(other.val[f], Node):
if not self.val[f].fld_obj_eq(other.val[f]):
return False
else:
if str(self.val[f]) != str(other.val[f]):
return False
return True
def eth_reg(self, ident, ectx):
def make_virtual_type(cls, field, prefix):
if isinstance(self.val, str): return
if field in self.val and not isinstance(self.val[field], Type_Ref):
vnm = prefix + '-' + self.ident
virtual_tr = Type_Ref(val = vnm)
t = self.val[field]
self.val[field] = virtual_tr
ectx.eth_reg_assign(vnm, t, virt=True)
ectx.eth_reg_type(vnm, t)
t.eth_reg_sub(vnm, ectx)
if field in self.val and ectx.conform.check_item('PDU', cls + '.' + field):
ectx.eth_reg_field(self.val[field].val, self.val[field].val, impl=self.val[field].HasImplicitTag(ectx), pdu=ectx.conform.use_item('PDU', cls + '.' + field))
return
# end of make_virtual_type()
if ectx.conform.omit_assignment('V', self.ident, ectx.Module()): return # Assignment to omit
self.module = ectx.Module()
ectx.eth_reg_oassign(self)
if (self.cls == 'TYPE-IDENTIFIER') or (self.cls == 'ABSTRACT-SYNTAX'):
make_virtual_type(self.cls, '&Type', 'TYPE')
if (self.cls == 'OPERATION'):
make_virtual_type(self.cls, '&ArgumentType', 'ARG')
make_virtual_type(self.cls, '&ResultType', 'RES')
if (self.cls == 'ERROR'):
make_virtual_type(self.cls, '&ParameterType', 'PAR')
#--- Type ---------------------------------------------------------------------
class Type (Node):
def __init__(self,*args, **kw) :
self.name = None
self.constr = None
self.tags = []
self.named_list = None
Node.__init__ (self,*args, **kw)
def IsNamed(self):
if self.name is None :
return False
else:
return True
def HasConstraint(self):
if self.constr is None :
return False
else :
return True
def HasSizeConstraint(self):
return self.HasConstraint() and self.constr.IsSize()
def HasValueConstraint(self):
return self.HasConstraint() and self.constr.IsValue()
def HasPermAlph(self):
return self.HasConstraint() and self.constr.IsPermAlph()
def HasContentsConstraint(self):
return self.HasConstraint() and self.constr.IsContents()
def HasOwnTag(self):
return len(self.tags) > 0
def HasImplicitTag(self, ectx):
return (self.HasOwnTag() and self.tags[0].IsImplicit(ectx))
def IndetermTag(self, ectx):
return False
def AddTag(self, tag):
self.tags[0:0] = [tag]
def GetTag(self, ectx):
#print "GetTag(%s)\n" % self.name;
if (self.HasOwnTag()):
return self.tags[0].GetTag(ectx)
else:
return self.GetTTag(ectx)
def GetTTag(self, ectx):
print("#Unhandled GetTTag() in %s" % (self.type))
print(self.str_depth(1))
return ('BER_CLASS_unknown', 'TAG_unknown')
def SetName(self, name):
self.name = name
def AddConstraint(self, constr):
if not self.HasConstraint():
self.constr = constr
else:
self.constr = Constraint(type = 'Intersection', subtype = [self.constr, constr])
def eth_tname(self):
return '#' + self.type + '_' + str(id(self))
def eth_ftype(self, ectx):
return ('FT_NONE', 'BASE_NONE')
def eth_strings(self):
return 'NULL'
def eth_omit_field(self):
return False
def eth_need_tree(self):
return False
def eth_has_vals(self):
return False
def eth_has_enum(self, tname, ectx):
return self.eth_has_vals() and (ectx.eth_type[tname]['enum'] & EF_ENUM)
def eth_need_pdu(self, ectx):
return None
def eth_named_bits(self):
return None
def eth_reg_sub(self, ident, ectx):
pass
def get_components(self, ectx):
print("#Unhandled get_components() in %s" % (self.type))
print(self.str_depth(1))
return []
def sel_req(self, sel, ectx):
print("#Selection '%s' required for non-CHOICE type %s" % (sel, self.type))
print(self.str_depth(1))
def fld_obj_eq(self, other):
return isinstance(other, Type) and (self.eth_tname() == other.eth_tname())
def eth_reg(self, ident, ectx, tstrip=0, tagflag=False, selflag=False, idx='', parent=None):
#print "eth_reg(): %s, ident=%s, tstrip=%d, tagflag=%s, selflag=%s, parent=%s" %(self.type, ident, tstrip, str(tagflag), str(selflag), str(parent))
#print " ", self
if (ectx.NeedTags() and (len(self.tags) > tstrip)):
tagged_type = self
for i in range(len(self.tags)-1, tstrip-1, -1):
tagged_type = TaggedType(val=tagged_type, tstrip=i)
tagged_type.AddTag(self.tags[i])
if not tagflag: # 1st tagged level
if self.IsNamed() and not selflag:
tagged_type.SetName(self.name)
tagged_type.eth_reg(ident, ectx, tstrip=1, tagflag=tagflag, idx=idx, parent=parent)
return
nm = ''
if ident and self.IsNamed() and not tagflag and not selflag:
nm = ident + '/' + self.name
elif ident:
nm = ident
elif self.IsNamed():
nm = self.name
if not ident and ectx.conform.omit_assignment('T', nm, ectx.Module()): return # Assignment to omit
if not ident: # Assignment
ectx.eth_reg_assign(nm, self)
if self.type == 'Type_Ref' and not self.tr_need_own_fn(ectx):
ectx.eth_reg_type(nm, self)
virtual_tr = Type_Ref(val=ectx.conform.use_item('SET_TYPE', nm))
if (self.type == 'Type_Ref') or ectx.conform.check_item('SET_TYPE', nm):
if ident and (ectx.conform.check_item('TYPE_RENAME', nm) or ectx.conform.get_fn_presence(nm) or selflag):
if ectx.conform.check_item('SET_TYPE', nm):
ectx.eth_reg_type(nm, virtual_tr) # dummy Type Reference
else:
ectx.eth_reg_type(nm, self) # new type
trnm = nm
elif ectx.conform.check_item('SET_TYPE', nm):
trnm = ectx.conform.use_item('SET_TYPE', nm)
elif (self.type == 'Type_Ref') and self.tr_need_own_fn(ectx):
ectx.eth_reg_type(nm, self) # need own function, e.g. for constraints
trnm = nm
else:
trnm = self.val
else:
ectx.eth_reg_type(nm, self)
trnm = nm
if ectx.conform.check_item('VIRTUAL_ASSGN', nm):
vnm = ectx.conform.use_item('VIRTUAL_ASSGN', nm)
ectx.eth_reg_assign(vnm, self, virt=True)
ectx.eth_reg_type(vnm, self)
self.eth_reg_sub(vnm, ectx)
if parent and (ectx.type[parent]['val'].type == 'TaggedType'):
ectx.type[parent]['val'].eth_set_val_name(parent, trnm, ectx)
if ident and not tagflag and not self.eth_omit_field():
ectx.eth_reg_field(nm, trnm, idx=idx, parent=parent, impl=self.HasImplicitTag(ectx))
if ectx.conform.check_item('SET_TYPE', nm):
virtual_tr.eth_reg_sub(nm, ectx)
else:
self.eth_reg_sub(nm, ectx)
def eth_get_size_constr(self, ectx):
(minv, maxv, ext) = ('MIN', 'MAX', False)
if self.HasSizeConstraint():
if self.constr.IsSize():
(minv, maxv, ext) = self.constr.GetSize(ectx)
if (self.constr.type == 'Intersection'):
if self.constr.subtype[0].IsSize():
(minv, maxv, ext) = self.constr.subtype[0].GetSize(ectx)
elif self.constr.subtype[1].IsSize():
(minv, maxv, ext) = self.constr.subtype[1].GetSize(ectx)
if minv == 'MIN': minv = 'NO_BOUND'
if maxv == 'MAX': maxv = 'NO_BOUND'
if (ext): ext = 'TRUE'
else: ext = 'FALSE'
return (minv, maxv, ext)
def eth_get_value_constr(self, ectx):
(minv, maxv, ext) = ('MIN', 'MAX', False)
if self.HasValueConstraint():
(minv, maxv, ext) = self.constr.GetValue(ectx)
if minv == 'MIN': minv = 'NO_BOUND'
if maxv == 'MAX': maxv = 'NO_BOUND'
if str(minv).isdigit():
minv += 'U'
elif (str(minv)[0] == "-") and str(minv)[1:].isdigit():
if (int(minv) == -(2**31)):
minv = "G_MININT32"
elif (int(minv) < -(2**31)):
minv = "G_GINT64_CONSTANT(%s)" % (str(minv))
if str(maxv).isdigit():
if (int(maxv) >= 2**32):
maxv = "G_GUINT64_CONSTANT(%s)" % (str(maxv))
else:
maxv += 'U'
if (ext): ext = 'TRUE'
else: ext = 'FALSE'
return (minv, maxv, ext)
def eth_get_alphabet_constr(self, ectx):
(alph, alphlen) = ('NULL', '0')
if self.HasPermAlph():
alph = self.constr.GetPermAlph(ectx)
if not alph:
alph = 'NULL'
if (alph != 'NULL'):
if (((alph[0] + alph[-1]) == '""') and (not alph.count('"', 1, -1))):
alphlen = str(len(alph) - 2)
else:
alphlen = 'strlen(%s)' % (alph)
return (alph, alphlen)
def eth_type_vals(self, tname, ectx):
if self.eth_has_vals():
print("#Unhandled eth_type_vals('%s') in %s" % (tname, self.type))
print(self.str_depth(1))
return ''
def eth_type_enum(self, tname, ectx):
if self.eth_has_enum(tname, ectx):
print("#Unhandled eth_type_enum('%s') in %s" % (tname, self.type))
print(self.str_depth(1))
return ''
def eth_type_default_table(self, ectx, tname):
return ''
def eth_type_default_body(self, ectx):
print("#Unhandled eth_type_default_body() in %s" % (self.type))
print(self.str_depth(1))
return ''
def eth_type_default_pars(self, ectx, tname):
pars = {
'TNAME' : tname,
'ER' : ectx.encp(),
'FN_VARIANT' : '',
'TREE' : 'tree',
'TVB' : 'tvb',
'OFFSET' : 'offset',
'ACTX' : 'actx',
'HF_INDEX' : 'hf_index',
'VAL_PTR' : 'NULL',
'IMPLICIT_TAG' : 'implicit_tag',
}
if (ectx.eth_type[tname]['tree']):
pars['ETT_INDEX'] = ectx.eth_type[tname]['tree']
if (ectx.merge_modules):
pars['PROTOP'] = ''
else:
pars['PROTOP'] = ectx.eth_type[tname]['proto'] + '_'
return pars
def eth_type_fn(self, proto, tname, ectx):
body = self.eth_type_default_body(ectx, tname)
pars = self.eth_type_default_pars(ectx, tname)
if ectx.conform.check_item('FN_PARS', tname):
pars.update(ectx.conform.use_item('FN_PARS', tname))
elif ectx.conform.check_item('FN_PARS', ectx.eth_type[tname]['ref'][0]):
pars.update(ectx.conform.use_item('FN_PARS', ectx.eth_type[tname]['ref'][0]))
pars['DEFAULT_BODY'] = body
for i in range(4):
for k in list(pars.keys()):
try:
pars[k] = pars[k] % pars
except (ValueError,TypeError):
raise sys.exc_info()[0]("%s\n%s" % (str(pars), sys.exc_info()[1]))
out = '\n'
out += self.eth_type_default_table(ectx, tname) % pars
out += ectx.eth_type_fn_hdr(tname)
out += ectx.eth_type_fn_body(tname, body, pars=pars)
out += ectx.eth_type_fn_ftr(tname)
return out
#--- Value --------------------------------------------------------------------
class Value (Node):
def __init__(self,*args, **kw) :
self.name = None
Node.__init__ (self,*args, **kw)
def SetName(self, name) :
self.name = name
def to_str(self, ectx):
return str(self.val)
def get_dep(self):
return None
def fld_obj_repr(self, ectx):
return self.to_str(ectx)
#--- Value_Ref -----------------------------------------------------------------
class Value_Ref (Value):
def to_str(self, ectx):
return asn2c(self.val)
#--- ObjectClass ---------------------------------------------------------------------
class ObjectClass (Node):
def __init__(self,*args, **kw) :
self.name = None
Node.__init__ (self,*args, **kw)
def SetName(self, name):
self.name = name
add_class_ident(self.name)
def eth_reg(self, ident, ectx):
if ectx.conform.omit_assignment('C', self.name, ectx.Module()): return # Assignment to omit
ectx.eth_reg_objectclass(self.name, self)
#--- Class_Ref -----------------------------------------------------------------
class Class_Ref (ObjectClass):
pass
#--- ObjectClassDefn ---------------------------------------------------------------------
class ObjectClassDefn (ObjectClass):
def reg_types(self):
for fld in self.fields:
repr = fld.fld_repr()
set_type_to_class(self.name, repr[0], repr[1:])
#--- Tag ---------------------------------------------------------------
class Tag (Node):
def to_python (self, ctx):
return 'asn1.TYPE(%s,%s)' % (mk_tag_str (ctx, self.tag.cls,
self.tag_typ,
self.tag.num),
self.typ.to_python (ctx))
def IsImplicit(self, ectx):
return ((self.mode == 'IMPLICIT') or ((self.mode == 'default') and (ectx.tag_def != 'EXPLICIT')))
def GetTag(self, ectx):
tc = ''
if (self.cls == 'UNIVERSAL'): tc = 'BER_CLASS_UNI'
elif (self.cls == 'APPLICATION'): tc = 'BER_CLASS_APP'
elif (self.cls == 'CONTEXT'): tc = 'BER_CLASS_CON'
elif (self.cls == 'PRIVATE'): tc = 'BER_CLASS_PRI'
return (tc, self.num)
def eth_tname(self):
n = ''
if (self.cls == 'UNIVERSAL'): n = 'U'
elif (self.cls == 'APPLICATION'): n = 'A'
elif (self.cls == 'CONTEXT'): n = 'C'
elif (self.cls == 'PRIVATE'): n = 'P'
return n + str(self.num)
#--- Constraint ---------------------------------------------------------------
constr_cnt = 0
class Constraint (Node):
def to_python (self, ctx):
print("Ignoring constraint:", self.type)
return self.subtype.typ.to_python (ctx)
def __str__ (self):
return "Constraint: type=%s, subtype=%s" % (self.type, self.subtype)
def eth_tname(self):
return '#' + self.type + '_' + str(id(self))
def IsSize(self):
return (self.type == 'Size' and self.subtype.IsValue()) \
or (self.type == 'Intersection' and (self.subtype[0].IsSize() or self.subtype[1].IsSize())) \
def GetSize(self, ectx):
(minv, maxv, ext) = ('MIN', 'MAX', False)
if self.IsSize():
if self.type == 'Size':
(minv, maxv, ext) = self.subtype.GetValue(ectx)
elif self.type == 'Intersection':
if self.subtype[0].IsSize() and not self.subtype[1].IsSize():
(minv, maxv, ext) = self.subtype[0].GetSize(ectx)
elif not self.subtype[0].IsSize() and self.subtype[1].IsSize():
(minv, maxv, ext) = self.subtype[1].GetSize(ectx)
return (minv, maxv, ext)
def IsValue(self):
return self.type == 'SingleValue' \
or self.type == 'ValueRange' \
or (self.type == 'Intersection' and (self.subtype[0].IsValue() or self.subtype[1].IsValue())) \
or (self.type == 'Union' and (self.subtype[0].IsValue() and self.subtype[1].IsValue()))
def GetValue(self, ectx):
(minv, maxv, ext) = ('MIN', 'MAX', False)
if self.IsValue():
if self.type == 'SingleValue':
minv = ectx.value_get_eth(self.subtype)
maxv = ectx.value_get_eth(self.subtype)
ext = hasattr(self, 'ext') and self.ext
elif self.type == 'ValueRange':
minv = ectx.value_get_eth(self.subtype[0])
maxv = ectx.value_get_eth(self.subtype[1])
ext = hasattr(self, 'ext') and self.ext
elif self.type == 'Intersection':
if self.subtype[0].IsValue() and not self.subtype[1].IsValue():
(minv, maxv, ext) = self.subtype[0].GetValue(ectx)
elif not self.subtype[0].IsValue() and self.subtype[1].IsValue():
(minv, maxv, ext) = self.subtype[1].GetValue(ectx)
elif self.subtype[0].IsValue() and self.subtype[1].IsValue():
v0 = self.subtype[0].GetValue(ectx)
v1 = self.subtype[1].GetValue(ectx)
(minv, maxv, ext) = (ectx.value_max(v0[0],v1[0]), ectx.value_min(v0[1],v1[1]), v0[2] and v1[2])
elif self.type == 'Union':
if self.subtype[0].IsValue() and self.subtype[1].IsValue():
v0 = self.subtype[0].GetValue(ectx)
v1 = self.subtype[1].GetValue(ectx)
(minv, maxv, ext) = (ectx.value_min(v0[0],v1[0]), ectx.value_max(v0[1],v1[1]), v0[2] or v1[2])
return (minv, maxv, ext)
def IsAlphabet(self):
return self.type == 'SingleValue' \
or self.type == 'ValueRange' \
or (self.type == 'Intersection' and (self.subtype[0].IsAlphabet() or self.subtype[1].IsAlphabet())) \
or (self.type == 'Union' and (self.subtype[0].IsAlphabet() and self.subtype[1].IsAlphabet()))
def GetAlphabet(self, ectx):
alph = None
if self.IsAlphabet():
if self.type == 'SingleValue':
alph = ectx.value_get_eth(self.subtype)
elif self.type == 'ValueRange':
if ((len(self.subtype[0]) == 3) and ((self.subtype[0][0] + self.subtype[0][-1]) == '""') \
and (len(self.subtype[1]) == 3) and ((self.subtype[1][0] + self.subtype[1][-1]) == '""')):
alph = '"'
for c in range(ord(self.subtype[0][1]), ord(self.subtype[1][1]) + 1):
alph += chr(c)
alph += '"'
elif self.type == 'Union':
if self.subtype[0].IsAlphabet() and self.subtype[1].IsAlphabet():
a0 = self.subtype[0].GetAlphabet(ectx)
a1 = self.subtype[1].GetAlphabet(ectx)
if (((a0[0] + a0[-1]) == '""') and not a0.count('"', 1, -1) \
and ((a1[0] + a1[-1]) == '""') and not a1.count('"', 1, -1)):
alph = '"' + a0[1:-1] + a1[1:-1] + '"'
else:
alph = a0 + ' ' + a1
return alph
def IsPermAlph(self):
return self.type == 'From' and self.subtype.IsAlphabet() \
or (self.type == 'Intersection' and (self.subtype[0].IsPermAlph() or self.subtype[1].IsPermAlph())) \
def GetPermAlph(self, ectx):
alph = None
if self.IsPermAlph():
if self.type == 'From':
alph = self.subtype.GetAlphabet(ectx)
elif self.type == 'Intersection':
if self.subtype[0].IsPermAlph() and not self.subtype[1].IsPermAlph():
alph = self.subtype[0].GetPermAlph(ectx)
elif not self.subtype[0].IsPermAlph() and self.subtype[1].IsPermAlph():
alph = self.subtype[1].GetPermAlph(ectx)
return alph
def IsContents(self):
return self.type == 'Contents' \
or (self.type == 'Intersection' and (self.subtype[0].IsContents() or self.subtype[1].IsContents())) \
def GetContents(self, ectx):
contents = None
if self.IsContents():
if self.type == 'Contents':
if self.subtype.type == 'Type_Ref':
contents = self.subtype.val
elif self.type == 'Intersection':
if self.subtype[0].IsContents() and not self.subtype[1].IsContents():
contents = self.subtype[0].GetContents(ectx)
elif not self.subtype[0].IsContents() and self.subtype[1].IsContents():
contents = self.subtype[1].GetContents(ectx)
return contents
def IsNegativ(self):
def is_neg(sval):
return isinstance(sval, str) and (sval[0] == '-')
if self.type == 'SingleValue':
return is_neg(self.subtype)
elif self.type == 'ValueRange':
if self.subtype[0] == 'MIN': return True
return is_neg(self.subtype[0])
return False
def eth_constrname(self):
def int2str(val):
if isinstance(val, Value_Ref):
return asn2c(val.val)
try:
if (int(val) < 0):
return 'M' + str(-int(val))
else:
return str(int(val))
except (ValueError, TypeError):
return asn2c(str(val))
ext = ''
if hasattr(self, 'ext') and self.ext:
ext = '_'
if self.type == 'SingleValue':
return int2str(self.subtype) + ext
elif self.type == 'ValueRange':
return int2str(self.subtype[0]) + '_' + int2str(self.subtype[1]) + ext
elif self.type == 'Size':
return 'SIZE_' + self.subtype.eth_constrname() + ext
else:
if (not hasattr(self, 'constr_num')):
global constr_cnt
constr_cnt += 1
self.constr_num = constr_cnt
return 'CONSTR%03d%s' % (self.constr_num, ext)
def Needs64b(self, ectx):
(minv, maxv, ext) = self.GetValue(ectx)
if (str(minv).isdigit() or ((str(minv)[0] == "-") and str(minv)[1:].isdigit())) \
and str(maxv).isdigit() and (abs(int(maxv) - int(minv)) >= 2**32):
return True
return False
class Module (Node):
def to_python (self, ctx):
ctx.tag_def = self.tag_def.dfl_tag
return """#%s
%s""" % (self.ident, self.body.to_python (ctx))
def get_name(self):
return self.ident.val
def get_proto(self, ectx):
if (ectx.proto):
prot = ectx.proto
else:
prot = ectx.conform.use_item('MODULE', self.get_name(), val_dflt=self.get_name())
return prot
def to_eth(self, ectx):
ectx.tags_def = 'EXPLICIT' # default = explicit
ectx.proto = self.get_proto(ectx)
ectx.tag_def = self.tag_def.dfl_tag
ectx.eth_reg_module(self)
self.body.to_eth(ectx)
class Module_Body (Node):
def to_python (self, ctx):
# XXX handle exports, imports.
l = [x.to_python (ctx) for x in self.assign_list]
l = [a for a in l if a != '']
return "\n".join (l)
def to_eth(self, ectx):
# Exports
ectx.eth_exports(self.exports)
# Imports
for i in self.imports:
mod = i.module.val
proto = ectx.conform.use_item('MODULE', mod, val_dflt=mod)
ectx.eth_module_dep_add(ectx.Module(), mod)
for s in i.symbol_list:
if isinstance(s, Type_Ref):
ectx.eth_import_type(s.val, mod, proto)
elif isinstance(s, Value_Ref):
ectx.eth_import_value(s.val, mod, proto)
elif isinstance(s, Class_Ref):
ectx.eth_import_class(s.val, mod, proto)
else:
msg = 'Unknown kind of imported symbol %s from %s' % (str(s), mod)
warnings.warn_explicit(msg, UserWarning, '', 0)
# AssignmentList
for a in self.assign_list:
a.eth_reg('', ectx)
class Default_Tags (Node):
def to_python (self, ctx): # not to be used directly
assert (0)
# XXX should just calculate dependencies as we go along.
def calc_dependencies (node, dict, trace = 0):
if not hasattr (node, '__dict__'):
if trace: print("#returning, node=", node)
return
if isinstance (node, Type_Ref):
dict [node.val] = 1
if trace: print("#Setting", node.val)
return
for (a, val) in list(node.__dict__.items ()):
if trace: print("# Testing node ", node, "attr", a, " val", val)
if a[0] == '_':
continue
elif isinstance (val, Node):
calc_dependencies (val, dict, trace)
elif isinstance (val, type ([])):
for v in val:
calc_dependencies (v, dict, trace)
class Type_Assign (Node):
def __init__ (self, *args, **kw):
Node.__init__ (self, *args, **kw)
if isinstance (self.val, Tag): # XXX replace with generalized get_typ_ignoring_tag (no-op for Node, override in Tag)
to_test = self.val.typ
else:
to_test = self.val
if isinstance (to_test, SequenceType):
to_test.sequence_name = self.name.name
def to_python (self, ctx):
dep_dict = {}
calc_dependencies (self.val, dep_dict, 0)
depend_list = list(dep_dict.keys ())
return ctx.register_assignment (self.name.name,
self.val.to_python (ctx),
depend_list)
class PyQuote (Node):
def to_python (self, ctx):
return ctx.register_pyquote (self.val)
#--- Type_Ref -----------------------------------------------------------------
class Type_Ref (Type):
def to_python (self, ctx):
return self.val
def eth_reg_sub(self, ident, ectx):
ectx.eth_dep_add(ident, self.val)
def eth_tname(self):
if self.HasSizeConstraint():
return asn2c(self.val) + '_' + self.constr.eth_constrname()
else:
return asn2c(self.val)
def tr_need_own_fn(self, ectx):
return ectx.Per() and self.HasSizeConstraint()
def fld_obj_repr(self, ectx):
return self.val
def get_components(self, ectx):
if self.val not in ectx.type or ectx.type[self.val]['import']:
msg = "Can not get COMPONENTS OF %s which is imported type" % (self.val)
warnings.warn_explicit(msg, UserWarning, '', 0)
return []
else:
return ectx.type[self.val]['val'].get_components(ectx)
def GetTTag(self, ectx):
#print "GetTTag(%s)\n" % self.val;
if (ectx.type[self.val]['import']):
if 'ttag' not in ectx.type[self.val]:
ttag = ectx.get_ttag_from_all(self.val, ectx.type[self.val]['import'])
if not ttag and not ectx.conform.check_item('IMPORT_TAG', self.val):
msg = 'Missing tag information for imported type %s from %s (%s)' % (self.val, ectx.type[self.val]['import'], ectx.type[self.val]['proto'])
warnings.warn_explicit(msg, UserWarning, '', 0)
ttag = ('-1/*imported*/', '-1/*imported*/')
ectx.type[self.val]['ttag'] = ectx.conform.use_item('IMPORT_TAG', self.val, val_dflt=ttag)
return ectx.type[self.val]['ttag']
else:
return ectx.type[self.val]['val'].GetTag(ectx)
def IndetermTag(self, ectx):
if (ectx.type[self.val]['import']):
return False
else:
return ectx.type[self.val]['val'].IndetermTag(ectx)
def eth_type_default_pars(self, ectx, tname):
if tname:
pars = Type.eth_type_default_pars(self, ectx, tname)
else:
pars = {}
t = ectx.type[self.val]['ethname']
pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto']
pars['TYPE_REF_TNAME'] = t
pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s'
if self.HasSizeConstraint():
(pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx)
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('%(TYPE_REF_FN)s', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),))
elif (ectx.Per()):
if self.HasSizeConstraint():
body = ectx.eth_fn_call('dissect_%(ER)s_size_constrained_type', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),
('"%(TYPE_REF_TNAME)s"', '%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s',),))
else:
body = ectx.eth_fn_call('%(TYPE_REF_FN)s', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- SelectionType ------------------------------------------------------------
class SelectionType (Type):
def to_python (self, ctx):
return self.val
def sel_of_typeref(self):
return self.typ.type == 'Type_Ref'
def eth_reg_sub(self, ident, ectx):
if not self.sel_of_typeref():
self.seltype = ''
return
self.seltype = ectx.eth_sel_req(self.typ.val, self.sel)
ectx.eth_dep_add(ident, self.seltype)
def eth_ftype(self, ectx):
(ftype, display) = ('FT_NONE', 'BASE_NONE')
if self.sel_of_typeref() and not ectx.type[self.seltype]['import']:
(ftype, display) = ectx.type[self.typ.val]['val'].eth_ftype_sel(self.sel, ectx)
return (ftype, display)
def GetTTag(self, ectx):
#print "GetTTag(%s)\n" % self.seltype;
if (ectx.type[self.seltype]['import']):
if 'ttag' not in ectx.type[self.seltype]:
if not ectx.conform.check_item('IMPORT_TAG', self.seltype):
msg = 'Missing tag information for imported type %s from %s (%s)' % (self.seltype, ectx.type[self.seltype]['import'], ectx.type[self.seltype]['proto'])
warnings.warn_explicit(msg, UserWarning, '', 0)
ectx.type[self.seltype]['ttag'] = ectx.conform.use_item('IMPORT_TAG', self.seltype, val_dflt=('-1 /*imported*/', '-1 /*imported*/'))
return ectx.type[self.seltype]['ttag']
else:
return ectx.type[self.typ.val]['val'].GetTTagSel(self.sel, ectx)
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
if self.sel_of_typeref():
t = ectx.type[self.seltype]['ethname']
pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto']
pars['TYPE_REF_TNAME'] = t
pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s'
return pars
def eth_type_default_body(self, ectx, tname):
if not self.sel_of_typeref():
body = '#error Can not decode %s' % (tname)
elif (ectx.Ber()):
body = ectx.eth_fn_call('%(TYPE_REF_FN)s', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),))
elif (ectx.Per()):
body = ectx.eth_fn_call('%(TYPE_REF_FN)s', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- TaggedType -----------------------------------------------------------------
class TaggedType (Type):
def eth_tname(self):
tn = ''
for i in range(self.tstrip, len(self.val.tags)):
tn += self.val.tags[i].eth_tname()
tn += '_'
tn += self.val.eth_tname()
return tn
def eth_set_val_name(self, ident, val_name, ectx):
#print "TaggedType::eth_set_val_name(): ident=%s, val_name=%s" % (ident, val_name)
self.val_name = val_name
ectx.eth_dep_add(ident, self.val_name)
def eth_reg_sub(self, ident, ectx):
self.val_name = ident + '/' + UNTAG_TYPE_NAME
self.val.eth_reg(self.val_name, ectx, tstrip=self.tstrip+1, tagflag=True, parent=ident)
def GetTTag(self, ectx):
#print "GetTTag(%s)\n" % self.seltype;
return self.GetTag(ectx)
def eth_ftype(self, ectx):
return self.val.eth_ftype(ectx)
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
t = ectx.type[self.val_name]['ethname']
pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto']
pars['TYPE_REF_TNAME'] = t
pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s'
(pars['TAG_CLS'], pars['TAG_TAG']) = self.GetTag(ectx)
if self.HasImplicitTag(ectx):
pars['TAG_IMPL'] = 'TRUE'
else:
pars['TAG_IMPL'] = 'FALSE'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_tagged_type', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(HF_INDEX)s', '%(TAG_CLS)s', '%(TAG_TAG)s', '%(TAG_IMPL)s', '%(TYPE_REF_FN)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- SqType -----------------------------------------------------------
class SqType (Type):
def out_item(self, f, val, optional, ext, ectx):
if (val.eth_omit_field()):
t = ectx.type[val.ident]['ethname']
fullname = ectx.dummy_eag_field
else:
ef = ectx.field[f]['ethname']
t = ectx.eth_hf[ef]['ethtype']
fullname = ectx.eth_hf[ef]['fullname']
if (ectx.Ber()):
#print "optional=%s, e.val.HasOwnTag()=%s, e.val.IndetermTag()=%s" % (str(e.optional), str(e.val.HasOwnTag()), str(e.val.IndetermTag(ectx)))
#print val.str_depth(1)
opt = ''
if (optional):
opt = 'BER_FLAGS_OPTIONAL'
if (not val.HasOwnTag()):
if (opt): opt += '|'
opt += 'BER_FLAGS_NOOWNTAG'
elif (val.HasImplicitTag(ectx)):
if (opt): opt += '|'
opt += 'BER_FLAGS_IMPLTAG'
if (val.IndetermTag(ectx)):
if (opt): opt += '|'
opt += 'BER_FLAGS_NOTCHKTAG'
if (not opt): opt = '0'
else:
if optional:
opt = 'ASN1_OPTIONAL'
else:
opt = 'ASN1_NOT_OPTIONAL'
if (ectx.Ber()):
(tc, tn) = val.GetTag(ectx)
out = ' { %-24s, %-13s, %s, %s, dissect_%s_%s },\n' \
% ('&'+fullname, tc, tn, opt, ectx.eth_type[t]['proto'], t)
elif (ectx.Per()):
out = ' { %-24s, %-23s, %-17s, dissect_%s_%s },\n' \
% ('&'+fullname, ext, opt, ectx.eth_type[t]['proto'], t)
else:
out = ''
return out
#--- SeqType -----------------------------------------------------------
class SeqType (SqType):
def all_components(self):
lst = self.elt_list[:]
if hasattr(self, 'ext_list'):
lst.extend(self.ext_list)
if hasattr(self, 'elt_list2'):
lst.extend(self.elt_list2)
return lst
def need_components(self):
lst = self.all_components()
for e in (lst):
if e.type == 'components_of':
return True
return False
def expand_components(self, ectx):
while self.need_components():
for i in range(len(self.elt_list)):
if self.elt_list[i].type == 'components_of':
comp = self.elt_list[i].typ.get_components(ectx)
self.elt_list[i:i+1] = comp
break
if hasattr(self, 'ext_list'):
for i in range(len(self.ext_list)):
if self.ext_list[i].type == 'components_of':
comp = self.ext_list[i].typ.get_components(ectx)
self.ext_list[i:i+1] = comp
break
if hasattr(self, 'elt_list2'):
for i in range(len(self.elt_list2)):
if self.elt_list2[i].type == 'components_of':
comp = self.elt_list2[i].typ.get_components(ectx)
self.elt_list2[i:i+1] = comp
break
def get_components(self, ectx):
lst = self.elt_list[:]
if hasattr(self, 'elt_list2'):
lst.extend(self.elt_list2)
return lst
def eth_reg_sub(self, ident, ectx, components_available=False):
# check if autotag is required
autotag = False
if (ectx.NeedTags() and (ectx.tag_def == 'AUTOMATIC')):
autotag = True
lst = self.all_components()
for e in (self.elt_list):
if e.val.HasOwnTag(): autotag = False; break;
# expand COMPONENTS OF
if self.need_components():
if components_available:
self.expand_components(ectx)
else:
ectx.eth_comp_req(ident)
return
# extension addition groups
if hasattr(self, 'ext_list'):
if (ectx.Per()): # add names
eag_num = 1
for e in (self.ext_list):
if isinstance(e.val, ExtensionAdditionGroup):
e.val.parent_ident = ident
e.val.parent_tname = ectx.type[ident]['tname']
if (e.val.ver):
e.val.SetName("eag_v%s" % (e.val.ver))
else:
e.val.SetName("eag_%d" % (eag_num))
eag_num += 1;
else: # expand
new_ext_list = []
for e in (self.ext_list):
if isinstance(e.val, ExtensionAdditionGroup):
new_ext_list.extend(e.val.elt_list)
else:
new_ext_list.append(e)
self.ext_list = new_ext_list
# do autotag
if autotag:
atag = 0
for e in (self.elt_list):
e.val.AddTag(Tag(cls = 'CONTEXT', num = str(atag), mode = 'IMPLICIT'))
atag += 1
if autotag and hasattr(self, 'elt_list2'):
for e in (self.elt_list2):
e.val.AddTag(Tag(cls = 'CONTEXT', num = str(atag), mode = 'IMPLICIT'))
atag += 1
if autotag and hasattr(self, 'ext_list'):
for e in (self.ext_list):
e.val.AddTag(Tag(cls = 'CONTEXT', num = str(atag), mode = 'IMPLICIT'))
atag += 1
# register components
for e in (self.elt_list):
e.val.eth_reg(ident, ectx, tstrip=1, parent=ident)
if hasattr(self, 'ext_list'):
for e in (self.ext_list):
e.val.eth_reg(ident, ectx, tstrip=1, parent=ident)
if hasattr(self, 'elt_list2'):
for e in (self.elt_list2):
e.val.eth_reg(ident, ectx, tstrip=1, parent=ident)
def eth_type_default_table(self, ectx, tname):
#print "eth_type_default_table(tname='%s')" % (tname)
fname = ectx.eth_type[tname]['ref'][0]
table = "static const %(ER)s_sequence_t %(TABLE)s[] = {\n"
if hasattr(self, 'ext_list'):
ext = 'ASN1_EXTENSION_ROOT'
else:
ext = 'ASN1_NO_EXTENSIONS'
empty_ext_flag = '0'
if (len(self.elt_list)==0) and hasattr(self, 'ext_list') and (len(self.ext_list)==0) and (not hasattr(self, 'elt_list2') or (len(self.elt_list2)==0)):
empty_ext_flag = ext
for e in (self.elt_list):
f = fname + '/' + e.val.name
table += self.out_item(f, e.val, e.optional, ext, ectx)
if hasattr(self, 'ext_list'):
for e in (self.ext_list):
f = fname + '/' + e.val.name
table += self.out_item(f, e.val, e.optional, 'ASN1_NOT_EXTENSION_ROOT', ectx)
if hasattr(self, 'elt_list2'):
for e in (self.elt_list2):
f = fname + '/' + e.val.name
table += self.out_item(f, e.val, e.optional, ext, ectx)
if (ectx.Ber()):
table += " { NULL, 0, 0, 0, NULL }\n};\n"
else:
table += " { NULL, %s, 0, NULL }\n};\n" % (empty_ext_flag)
return table
#--- SeqOfType -----------------------------------------------------------
class SeqOfType (SqType):
def eth_type_default_table(self, ectx, tname):
#print "eth_type_default_table(tname='%s')" % (tname)
fname = ectx.eth_type[tname]['ref'][0]
if self.val.IsNamed ():
f = fname + '/' + self.val.name
else:
f = fname + '/' + ITEM_FIELD_NAME
table = "static const %(ER)s_sequence_t %(TABLE)s[1] = {\n"
table += self.out_item(f, self.val, False, 'ASN1_NO_EXTENSIONS', ectx)
table += "};\n"
return table
#--- SequenceOfType -----------------------------------------------------------
class SequenceOfType (SeqOfType):
def to_python (self, ctx):
# name, tag (None for no tag, EXPLICIT() for explicit), typ)
# or '' + (1,) for optional
sizestr = ''
if self.size_constr != None:
print("#Ignoring size constraint:", self.size_constr.subtype)
return "%sasn1.SEQUENCE_OF (%s%s)" % (ctx.spaces (),
self.val.to_python (ctx),
sizestr)
def eth_reg_sub(self, ident, ectx):
itmnm = ident
if not self.val.IsNamed ():
itmnm += '/' + ITEM_FIELD_NAME
self.val.eth_reg(itmnm, ectx, tstrip=1, idx='[##]', parent=ident)
def eth_tname(self):
if self.val.type != 'Type_Ref':
return '#' + self.type + '_' + str(id(self))
if not self.HasConstraint():
return "SEQUENCE_OF_" + self.val.eth_tname()
elif self.constr.IsSize():
return 'SEQUENCE_' + self.constr.eth_constrname() + '_OF_' + self.val.eth_tname()
else:
return '#' + self.type + '_' + str(id(self))
def eth_ftype(self, ectx):
return ('FT_UINT32', 'BASE_DEC')
def eth_need_tree(self):
return True
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_SEQUENCE')
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
(pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx)
pars['TABLE'] = '%(PROTOP)s%(TNAME)s_sequence_of'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
if (ectx.constraints_check and self.HasSizeConstraint()):
body = ectx.eth_fn_call('dissect_%(ER)s_constrained_sequence_of', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),))
else:
body = ectx.eth_fn_call('dissect_%(ER)s_sequence_of', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),))
elif (ectx.Per() and not self.HasConstraint()):
body = ectx.eth_fn_call('dissect_%(ER)s_sequence_of', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(ETT_INDEX)s', '%(TABLE)s',),))
elif (ectx.Per() and self.constr.type == 'Size'):
body = ectx.eth_fn_call('dissect_%(ER)s_constrained_sequence_of', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(ETT_INDEX)s', '%(TABLE)s',),
('%(MIN_VAL)s', '%(MAX_VAL)s','%(EXT)s'),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- SetOfType ----------------------------------------------------------------
class SetOfType (SeqOfType):
def eth_reg_sub(self, ident, ectx):
itmnm = ident
if not self.val.IsNamed ():
itmnm += '/' + ITEM_FIELD_NAME
self.val.eth_reg(itmnm, ectx, tstrip=1, idx='(##)', parent=ident)
def eth_tname(self):
if self.val.type != 'Type_Ref':
return '#' + self.type + '_' + str(id(self))
if not self.HasConstraint():
return "SET_OF_" + self.val.eth_tname()
elif self.constr.IsSize():
return 'SET_' + self.constr.eth_constrname() + '_OF_' + self.val.eth_tname()
else:
return '#' + self.type + '_' + str(id(self))
def eth_ftype(self, ectx):
return ('FT_UINT32', 'BASE_DEC')
def eth_need_tree(self):
return True
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_SET')
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
(pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx)
pars['TABLE'] = '%(PROTOP)s%(TNAME)s_set_of'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
if (ectx.constraints_check and self.HasSizeConstraint()):
body = ectx.eth_fn_call('dissect_%(ER)s_constrained_set_of', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),))
else:
body = ectx.eth_fn_call('dissect_%(ER)s_set_of', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),))
elif (ectx.Per() and not self.HasConstraint()):
body = ectx.eth_fn_call('dissect_%(ER)s_set_of', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(ETT_INDEX)s', '%(TABLE)s',),))
elif (ectx.Per() and self.constr.type == 'Size'):
body = ectx.eth_fn_call('dissect_%(ER)s_constrained_set_of', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(ETT_INDEX)s', '%(TABLE)s',),
('%(MIN_VAL)s', '%(MAX_VAL)s','%(EXT)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
def mk_tag_str (ctx, cls, typ, num):
# XXX should do conversion to int earlier!
val = int (num)
typ = typ.upper()
if typ == 'DEFAULT':
typ = ctx.tags_def
return 'asn1.%s(%d,cls=asn1.%s_FLAG)' % (typ, val, cls) # XXX still ned
#--- SequenceType -------------------------------------------------------------
class SequenceType (SeqType):
def to_python (self, ctx):
# name, tag (None for no tag, EXPLICIT() for explicit), typ)
# or '' + (1,) for optional
# XXX should also collect names for SEQUENCE inside SEQUENCE or
# CHOICE or SEQUENCE_OF (where should the SEQUENCE_OF name come
# from? for others, element or arm name would be fine)
seq_name = getattr (self, 'sequence_name', None)
if seq_name == None:
seq_name = 'None'
else:
seq_name = "'" + seq_name + "'"
if 'ext_list' in self.__dict__:
return "%sasn1.SEQUENCE ([%s], ext=[%s], seq_name = %s)" % (ctx.spaces (),
self.elts_to_py (self.elt_list, ctx),
self.elts_to_py (self.ext_list, ctx), seq_name)
else:
return "%sasn1.SEQUENCE ([%s]), seq_name = %s" % (ctx.spaces (),
self.elts_to_py (self.elt_list, ctx), seq_name)
def elts_to_py (self, list, ctx):
# we have elt_type, val= named_type, maybe default=, optional=
# named_type node: either ident = or typ =
# need to dismember these in order to generate Python output syntax.
ctx.indent ()
def elt_to_py (e):
assert (e.type == 'elt_type')
nt = e.val
optflag = e.optional
#assert (not hasattr (e, 'default')) # XXX add support for DEFAULT!
assert (nt.type == 'named_type')
tagstr = 'None'
identstr = nt.ident
if hasattr (nt.typ, 'type') and nt.typ.type == 'tag': # ugh
tagstr = mk_tag_str (ctx,nt.typ.tag.cls,
nt.typ.tag.tag_typ,nt.typ.tag.num)
nt = nt.typ
return "('%s',%s,%s,%d)" % (identstr, tagstr,
nt.typ.to_python (ctx), optflag)
indentstr = ",\n" + ctx.spaces ()
rv = indentstr.join ([elt_to_py (e) for e in list])
ctx.outdent ()
return rv
def eth_need_tree(self):
return True
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_SEQUENCE')
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
pars['TABLE'] = '%(PROTOP)s%(TNAME)s_sequence'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_sequence', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),))
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_sequence', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(ETT_INDEX)s', '%(TABLE)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- ExtensionAdditionGroup ---------------------------------------------------
class ExtensionAdditionGroup (SeqType):
def __init__(self,*args, **kw) :
self.parent_ident = None
self.parent_tname = None
SeqType.__init__ (self,*args, **kw)
def eth_omit_field(self):
return True
def eth_tname(self):
if (self.parent_tname and self.IsNamed()):
return self.parent_tname + "_" + self.name
else:
return SeqType.eth_tname(self)
def eth_reg_sub(self, ident, ectx):
ectx.eth_dummy_eag_field_required()
ectx.eth_dep_add(self.parent_ident, ident)
SeqType.eth_reg_sub(self, ident, ectx)
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
pars['TABLE'] = '%(PROTOP)s%(TNAME)s_sequence'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_sequence_eag', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(TABLE)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- SetType ------------------------------------------------------------------
class SetType (SeqType):
def eth_need_tree(self):
return True
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_SET')
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
pars['TABLE'] = '%(PROTOP)s%(TNAME)s_set'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_set', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),))
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_set', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(ETT_INDEX)s', '%(TABLE)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- ChoiceType ---------------------------------------------------------------
class ChoiceType (Type):
def to_python (self, ctx):
# name, tag (None for no tag, EXPLICIT() for explicit), typ)
# or '' + (1,) for optional
if 'ext_list' in self.__dict__:
return "%sasn1.CHOICE ([%s], ext=[%s])" % (ctx.spaces (),
self.elts_to_py (self.elt_list, ctx),
self.elts_to_py (self.ext_list, ctx))
else:
return "%sasn1.CHOICE ([%s])" % (ctx.spaces (), self.elts_to_py (self.elt_list, ctx))
def elts_to_py (self, list, ctx):
ctx.indent ()
def elt_to_py (nt):
assert (nt.type == 'named_type')
tagstr = 'None'
if hasattr (nt, 'ident'):
identstr = nt.ident
else:
if hasattr (nt.typ, 'val'):
identstr = nt.typ.val # XXX, making up name
elif hasattr (nt.typ, 'name'):
identstr = nt.typ.name
else:
identstr = ctx.make_new_name ()
if hasattr (nt.typ, 'type') and nt.typ.type == 'tag': # ugh
tagstr = mk_tag_str (ctx,nt.typ.tag.cls,
nt.typ.tag.tag_typ,nt.typ.tag.num)
nt = nt.typ
return "('%s',%s,%s)" % (identstr, tagstr,
nt.typ.to_python (ctx))
indentstr = ",\n" + ctx.spaces ()
rv = indentstr.join ([elt_to_py (e) for e in list])
ctx.outdent ()
return rv
def eth_reg_sub(self, ident, ectx):
#print "eth_reg_sub(ident='%s')" % (ident)
# check if autotag is required
autotag = False
if (ectx.NeedTags() and (ectx.tag_def == 'AUTOMATIC')):
autotag = True
for e in (self.elt_list):
if e.HasOwnTag(): autotag = False; break;
if autotag and hasattr(self, 'ext_list'):
for e in (self.ext_list):
if e.HasOwnTag(): autotag = False; break;
# do autotag
if autotag:
atag = 0
for e in (self.elt_list):
e.AddTag(Tag(cls = 'CONTEXT', num = str(atag), mode = 'IMPLICIT'))
atag += 1
if autotag and hasattr(self, 'ext_list'):
for e in (self.ext_list):
e.AddTag(Tag(cls = 'CONTEXT', num = str(atag), mode = 'IMPLICIT'))
atag += 1
for e in (self.elt_list):
e.eth_reg(ident, ectx, tstrip=1, parent=ident)
if ectx.conform.check_item('EXPORTS', ident + '.' + e.name):
ectx.eth_sel_req(ident, e.name)
if hasattr(self, 'ext_list'):
for e in (self.ext_list):
e.eth_reg(ident, ectx, tstrip=1, parent=ident)
if ectx.conform.check_item('EXPORTS', ident + '.' + e.name):
ectx.eth_sel_req(ident, e.name)
def sel_item(self, ident, sel, ectx):
lst = self.elt_list[:]
if hasattr(self, 'ext_list'):
lst.extend(self.ext_list)
ee = None
for e in (self.elt_list):
if e.IsNamed() and (e.name == sel):
ee = e
break
if not ee:
print("#CHOICE %s does not contain item %s" % (ident, sel))
return ee
def sel_req(self, ident, sel, ectx):
#print "sel_req(ident='%s', sel=%s)\n%s" % (ident, sel, str(self))
ee = self.sel_item(ident, sel, ectx)
if ee:
ee.eth_reg(ident, ectx, tstrip=0, selflag=True)
def eth_ftype(self, ectx):
return ('FT_UINT32', 'BASE_DEC')
def eth_ftype_sel(self, sel, ectx):
ee = self.sel_item('', sel, ectx)
if ee:
return ee.eth_ftype(ectx)
else:
return ('FT_NONE', 'BASE_NONE')
def eth_strings(self):
return '$$'
def eth_need_tree(self):
return True
def eth_has_vals(self):
return True
def GetTTag(self, ectx):
lst = self.elt_list
cls = 'BER_CLASS_ANY/*choice*/'
#if hasattr(self, 'ext_list'):
# lst.extend(self.ext_list)
#if (len(lst) > 0):
# cls = lst[0].GetTag(ectx)[0]
#for e in (lst):
# if (e.GetTag(ectx)[0] != cls):
# cls = '-1/*choice*/'
return (cls, '-1/*choice*/')
def GetTTagSel(self, sel, ectx):
ee = self.sel_item('', sel, ectx)
if ee:
return ee.GetTag(ectx)
else:
return ('BER_CLASS_ANY/*unknown selection*/', '-1/*unknown selection*/')
def IndetermTag(self, ectx):
#print "Choice IndetermTag()=%s" % (str(not self.HasOwnTag()))
return not self.HasOwnTag()
def detect_tagval(self, ectx):
tagval = False
lst = self.elt_list[:]
if hasattr(self, 'ext_list'):
lst.extend(self.ext_list)
if (len(lst) > 0) and (not ectx.Per() or lst[0].HasOwnTag()):
t = lst[0].GetTag(ectx)[0]
tagval = True
else:
t = ''
tagval = False
if (t == 'BER_CLASS_UNI'):
tagval = False
for e in (lst):
if not ectx.Per() or e.HasOwnTag():
tt = e.GetTag(ectx)[0]
else:
tt = ''
tagval = False
if (tt != t):
tagval = False
return tagval
def get_vals(self, ectx):
tagval = self.detect_tagval(ectx)
vals = []
cnt = 0
for e in (self.elt_list):
if (tagval): val = e.GetTag(ectx)[1]
else: val = str(cnt)
vals.append((val, e.name))
cnt += 1
if hasattr(self, 'ext_list'):
for e in (self.ext_list):
if (tagval): val = e.GetTag(ectx)[1]
else: val = str(cnt)
vals.append((val, e.name))
cnt += 1
return vals
def eth_type_vals(self, tname, ectx):
out = '\n'
vals = self.get_vals(ectx)
out += ectx.eth_vals(tname, vals)
return out
def reg_enum_vals(self, tname, ectx):
vals = self.get_vals(ectx)
for (val, id) in vals:
ectx.eth_reg_value(id, self, val, ethname=ectx.eth_enum_item(tname, id))
def eth_type_enum(self, tname, ectx):
out = '\n'
vals = self.get_vals(ectx)
out += ectx.eth_enum(tname, vals)
return out
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
pars['TABLE'] = '%(PROTOP)s%(TNAME)s_choice'
return pars
def eth_type_default_table(self, ectx, tname):
def out_item(val, e, ext, ectx):
has_enum = ectx.eth_type[tname]['enum'] & EF_ENUM
if (has_enum):
vval = ectx.eth_enum_item(tname, e.name)
else:
vval = val
f = fname + '/' + e.name
ef = ectx.field[f]['ethname']
t = ectx.eth_hf[ef]['ethtype']
if (ectx.Ber()):
opt = ''
if (not e.HasOwnTag()):
opt = 'BER_FLAGS_NOOWNTAG'
elif (e.HasImplicitTag(ectx)):
if (opt): opt += '|'
opt += 'BER_FLAGS_IMPLTAG'
if (not opt): opt = '0'
if (ectx.Ber()):
(tc, tn) = e.GetTag(ectx)
out = ' { %3s, %-24s, %-13s, %s, %s, dissect_%s_%s },\n' \
% (vval, '&'+ectx.eth_hf[ef]['fullname'], tc, tn, opt, ectx.eth_type[t]['proto'], t)
elif (ectx.Per()):
out = ' { %3s, %-24s, %-23s, dissect_%s_%s },\n' \
% (vval, '&'+ectx.eth_hf[ef]['fullname'], ext, ectx.eth_type[t]['proto'], t)
else:
out = ''
return out
# end out_item()
#print "eth_type_default_table(tname='%s')" % (tname)
fname = ectx.eth_type[tname]['ref'][0]
tagval = self.detect_tagval(ectx)
table = "static const %(ER)s_choice_t %(TABLE)s[] = {\n"
cnt = 0
if hasattr(self, 'ext_list'):
ext = 'ASN1_EXTENSION_ROOT'
else:
ext = 'ASN1_NO_EXTENSIONS'
empty_ext_flag = '0'
if (len(self.elt_list)==0) and hasattr(self, 'ext_list') and (len(self.ext_list)==0):
empty_ext_flag = ext
for e in (self.elt_list):
if (tagval): val = e.GetTag(ectx)[1]
else: val = str(cnt)
table += out_item(val, e, ext, ectx)
cnt += 1
if hasattr(self, 'ext_list'):
for e in (self.ext_list):
if (tagval): val = e.GetTag(ectx)[1]
else: val = str(cnt)
table += out_item(val, e, 'ASN1_NOT_EXTENSION_ROOT', ectx)
cnt += 1
if (ectx.Ber()):
table += " { 0, NULL, 0, 0, 0, NULL }\n};\n"
else:
table += " { 0, NULL, %s, NULL }\n};\n" % (empty_ext_flag)
return table
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_choice', ret='offset',
par=(('%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s'),
('%(VAL_PTR)s',),))
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_choice', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(ETT_INDEX)s', '%(TABLE)s',),
('%(VAL_PTR)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- ChoiceValue ----------------------------------------------------
class ChoiceValue (Value):
def to_str(self, ectx):
return self.val.to_str(ectx)
def fld_obj_eq(self, other):
return isinstance(other, ChoiceValue) and (self.choice == other.choice) and (str(self.val.val) == str(other.val.val))
#--- EnumeratedType -----------------------------------------------------------
class EnumeratedType (Type):
def to_python (self, ctx):
def strify_one (named_num):
return "%s=%s" % (named_num.ident, named_num.val)
return "asn1.ENUM(%s)" % ",".join (map (strify_one, self.val))
def eth_ftype(self, ectx):
return ('FT_UINT32', 'BASE_DEC')
def eth_strings(self):
return '$$'
def eth_has_vals(self):
return True
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_ENUMERATED')
def get_vals_etc(self, ectx):
vals = []
lastv = 0
used = {}
maxv = 0
root_num = 0
ext_num = 0
map_table = []
for e in (self.val):
if e.type == 'NamedNumber':
used[int(e.val)] = True
for e in (self.val):
if e.type == 'NamedNumber':
val = int(e.val)
else:
while lastv in used:
lastv += 1
val = lastv
used[val] = True
vals.append((val, e.ident))
map_table.append(val)
root_num += 1
if val > maxv:
maxv = val
if self.ext is not None:
for e in (self.ext):
if e.type == 'NamedNumber':
used[int(e.val)] = True
for e in (self.ext):
if e.type == 'NamedNumber':
val = int(e.val)
else:
while lastv in used:
lastv += 1
val = lastv
used[val] = True
vals.append((val, e.ident))
map_table.append(val)
ext_num += 1
if val > maxv:
maxv = val
need_map = False
for i in range(len(map_table)):
need_map = need_map or (map_table[i] != i)
if (not need_map):
map_table = None
return (vals, root_num, ext_num, map_table)
def eth_type_vals(self, tname, ectx):
out = '\n'
vals = self.get_vals_etc(ectx)[0]
out += ectx.eth_vals(tname, vals)
return out
def reg_enum_vals(self, tname, ectx):
vals = self.get_vals_etc(ectx)[0]
for (val, id) in vals:
ectx.eth_reg_value(id, self, val, ethname=ectx.eth_enum_item(tname, id))
def eth_type_enum(self, tname, ectx):
out = '\n'
vals = self.get_vals_etc(ectx)[0]
out += ectx.eth_enum(tname, vals)
return out
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
(root_num, ext_num, map_table) = self.get_vals_etc(ectx)[1:]
if (self.ext != None):
ext = 'TRUE'
else:
ext = 'FALSE'
pars['ROOT_NUM'] = str(root_num)
pars['EXT'] = ext
pars['EXT_NUM'] = str(ext_num)
if (map_table):
pars['TABLE'] = '%(PROTOP)s%(TNAME)s_value_map'
else:
pars['TABLE'] = 'NULL'
return pars
def eth_type_default_table(self, ectx, tname):
if (not ectx.Per()): return ''
map_table = self.get_vals_etc(ectx)[3]
if (map_table == None): return ''
table = "static guint32 %(TABLE)s[%(ROOT_NUM)s+%(EXT_NUM)s] = {"
table += ", ".join([str(v) for v in map_table])
table += "};\n"
return table
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
if (ectx.constraints_check and self.HasValueConstraint()):
body = ectx.eth_fn_call('dissect_%(ER)s_constrained_integer', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),))
else:
body = ectx.eth_fn_call('dissect_%(ER)s_integer', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'),
('%(VAL_PTR)s',),))
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_enumerated', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(ROOT_NUM)s', '%(VAL_PTR)s', '%(EXT)s', '%(EXT_NUM)s', '%(TABLE)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- EmbeddedPDVType -----------------------------------------------------------
class EmbeddedPDVType (Type):
def eth_tname(self):
return 'EMBEDDED_PDV'
def eth_ftype(self, ectx):
return ('FT_NONE', 'BASE_NONE')
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_EMBEDDED_PDV')
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
if ectx.default_embedded_pdv_cb:
pars['TYPE_REF_FN'] = ectx.default_embedded_pdv_cb
else:
pars['TYPE_REF_FN'] = 'NULL'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_EmbeddedPDV_Type', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),))
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_embedded_pdv', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- ExternalType -----------------------------------------------------------
class ExternalType (Type):
def eth_tname(self):
return 'EXTERNAL'
def eth_ftype(self, ectx):
return ('FT_NONE', 'BASE_NONE')
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_EXTERNAL')
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
if ectx.default_external_type_cb:
pars['TYPE_REF_FN'] = ectx.default_external_type_cb
else:
pars['TYPE_REF_FN'] = 'NULL'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_external_type', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),))
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_external_type', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- OpenType -----------------------------------------------------------
class OpenType (Type):
def to_python (self, ctx):
return "asn1.ANY"
def single_type(self):
if (self.HasConstraint() and
self.constr.type == 'Type' and
self.constr.subtype.type == 'Type_Ref'):
return self.constr.subtype.val
return None
def eth_reg_sub(self, ident, ectx):
t = self.single_type()
if t:
ectx.eth_dep_add(ident, t)
def eth_tname(self):
t = self.single_type()
if t:
return 'OpenType_' + t
else:
return Type.eth_tname(self)
def eth_ftype(self, ectx):
return ('FT_NONE', 'BASE_NONE')
def GetTTag(self, ectx):
return ('BER_CLASS_ANY', '0')
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
pars['FN_VARIANT'] = ectx.default_opentype_variant
t = self.single_type()
if t:
t = ectx.type[t]['ethname']
pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto']
pars['TYPE_REF_TNAME'] = t
pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s'
else:
pars['TYPE_REF_FN'] = 'NULL'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_open_type%(FN_VARIANT)s', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- InstanceOfType -----------------------------------------------------------
class InstanceOfType (Type):
def eth_tname(self):
return 'INSTANCE_OF'
def eth_ftype(self, ectx):
return ('FT_NONE', 'BASE_NONE')
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_EXTERNAL')
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
if ectx.default_external_type_cb:
pars['TYPE_REF_FN'] = ectx.default_external_type_cb
else:
pars['TYPE_REF_FN'] = 'NULL'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_external_type', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(HF_INDEX)s', '%(TYPE_REF_FN)s',),))
elif (ectx.Per()):
body = '#error Can not decode %s' % (tname)
else:
body = '#error Can not decode %s' % (tname)
return body
#--- AnyType -----------------------------------------------------------
class AnyType (Type):
def to_python (self, ctx):
return "asn1.ANY"
def eth_ftype(self, ectx):
return ('FT_NONE', 'BASE_NONE')
def GetTTag(self, ectx):
return ('BER_CLASS_ANY', '0')
def eth_type_default_body(self, ectx, tname):
body = '#error Can not decode %s' % (tname)
return body
class Literal (Node):
def to_python (self, ctx):
return self.val
#--- NullType -----------------------------------------------------------------
class NullType (Type):
def to_python (self, ctx):
return 'asn1.NULL'
def eth_tname(self):
return 'NULL'
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_NULL')
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_null', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'),))
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_null', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- NullValue ----------------------------------------------------
class NullValue (Value):
def to_str(self, ectx):
return 'NULL'
#--- RealType -----------------------------------------------------------------
class RealType (Type):
def to_python (self, ctx):
return 'asn1.REAL'
def eth_tname(self):
return 'REAL'
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_REAL')
def eth_ftype(self, ectx):
return ('FT_DOUBLE', 'BASE_NONE')
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_real', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'),
('%(VAL_PTR)s',),))
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_real', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- BooleanType --------------------------------------------------------------
class BooleanType (Type):
def to_python (self, ctx):
return 'asn1.BOOLEAN'
def eth_tname(self):
return 'BOOLEAN'
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_BOOLEAN')
def eth_ftype(self, ectx):
return ('FT_BOOLEAN', 'BASE_NONE')
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_boolean', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s', '%(VAL_PTR)s'),))
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_boolean', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- OctetStringType ----------------------------------------------------------
class OctetStringType (Type):
def to_python (self, ctx):
return 'asn1.OCTSTRING'
def eth_tname(self):
if not self.HasConstraint():
return 'OCTET_STRING'
elif self.constr.type == 'Size':
return 'OCTET_STRING' + '_' + self.constr.eth_constrname()
else:
return '#' + self.type + '_' + str(id(self))
def eth_ftype(self, ectx):
return ('FT_BYTES', 'BASE_NONE')
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_OCTETSTRING')
def eth_need_pdu(self, ectx):
pdu = None
if self.HasContentsConstraint():
t = self.constr.GetContents(ectx)
if t and (ectx.default_containing_variant in ('_pdu', '_pdu_new')):
pdu = { 'type' : t,
'new' : ectx.default_containing_variant == '_pdu_new' }
return pdu
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
(pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx)
if self.HasContentsConstraint():
pars['FN_VARIANT'] = ectx.default_containing_variant
t = self.constr.GetContents(ectx)
if t:
if pars['FN_VARIANT'] in ('_pdu', '_pdu_new'):
t = ectx.field[t]['ethname']
pars['TYPE_REF_PROTO'] = ''
pars['TYPE_REF_TNAME'] = t
pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_TNAME)s'
else:
t = ectx.type[t]['ethname']
pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto']
pars['TYPE_REF_TNAME'] = t
pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s'
else:
pars['TYPE_REF_FN'] = 'NULL'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
if (ectx.constraints_check and self.HasSizeConstraint()):
body = ectx.eth_fn_call('dissect_%(ER)s_constrained_octet_string', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),))
else:
body = ectx.eth_fn_call('dissect_%(ER)s_octet_string', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'),
('%(VAL_PTR)s',),))
elif (ectx.Per()):
if self.HasContentsConstraint():
body = ectx.eth_fn_call('dissect_%(ER)s_octet_string_containing%(FN_VARIANT)s', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s', '%(TYPE_REF_FN)s',),))
else:
body = ectx.eth_fn_call('dissect_%(ER)s_octet_string', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s', '%(VAL_PTR)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- CharacterStringType ------------------------------------------------------
class CharacterStringType (Type):
def eth_tname(self):
if not self.HasConstraint():
return self.eth_tsname()
elif self.constr.type == 'Size':
return self.eth_tsname() + '_' + self.constr.eth_constrname()
else:
return '#' + self.type + '_' + str(id(self))
def eth_ftype(self, ectx):
return ('FT_STRING', 'BASE_NONE')
class RestrictedCharacterStringType (CharacterStringType):
def to_python (self, ctx):
return 'asn1.' + self.eth_tsname()
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_' + self.eth_tsname())
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
(pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx)
(pars['STRING_TYPE'], pars['STRING_TAG']) = (self.eth_tsname(), self.GetTTag(ectx)[1])
(pars['ALPHABET'], pars['ALPHABET_LEN']) = self.eth_get_alphabet_constr(ectx)
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
if (ectx.constraints_check and self.HasSizeConstraint()):
body = ectx.eth_fn_call('dissect_%(ER)s_constrained_restricted_string', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(STRING_TAG)s'),
('%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),))
else:
body = ectx.eth_fn_call('dissect_%(ER)s_restricted_string', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(STRING_TAG)s'),
('%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'),
('%(VAL_PTR)s',),))
elif (ectx.Per() and self.HasPermAlph()):
body = ectx.eth_fn_call('dissect_%(ER)s_restricted_character_string', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s', '%(ALPHABET)s', '%(ALPHABET_LEN)s'),
('%(VAL_PTR)s',),))
elif (ectx.Per()):
if (self.eth_tsname() == 'GeneralString'):
body = ectx.eth_fn_call('dissect_%(ER)s_%(STRING_TYPE)s', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),))
elif (self.eth_tsname() == 'GeneralizedTime'):
body = ectx.eth_fn_call('dissect_%(ER)s_VisibleString', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s',),))
elif (self.eth_tsname() == 'UTCTime'):
body = ectx.eth_fn_call('dissect_%(ER)s_VisibleString', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s',),))
else:
body = ectx.eth_fn_call('dissect_%(ER)s_%(STRING_TYPE)s', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
class BMPStringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'BMPString'
class GeneralStringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'GeneralString'
class GraphicStringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'GraphicString'
class IA5StringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'IA5String'
class NumericStringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'NumericString'
class PrintableStringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'PrintableString'
class TeletexStringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'TeletexString'
class T61StringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'T61String'
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_TeletexString')
class UniversalStringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'UniversalString'
class UTF8StringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'UTF8String'
class VideotexStringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'VideotexString'
class VisibleStringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'VisibleString'
class ISO646StringType (RestrictedCharacterStringType):
def eth_tsname(self):
return 'ISO646String'
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_VisibleString')
class UnrestrictedCharacterStringType (CharacterStringType):
def to_python (self, ctx):
return 'asn1.UnrestrictedCharacterString'
def eth_tsname(self):
return 'CHARACTER_STRING'
#--- UsefulType ---------------------------------------------------------------
class GeneralizedTime (RestrictedCharacterStringType):
def eth_tsname(self):
return 'GeneralizedTime'
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_%(STRING_TYPE)s', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'),))
return body
else:
return RestrictedCharacterStringType.eth_type_default_body(self, ectx, tname)
class UTCTime (RestrictedCharacterStringType):
def eth_tsname(self):
return 'UTCTime'
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_%(STRING_TYPE)s', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'),))
return body
else:
return RestrictedCharacterStringType.eth_type_default_body(self, ectx, tname)
class ObjectDescriptor (RestrictedCharacterStringType):
def eth_tsname(self):
return 'ObjectDescriptor'
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = RestrictedCharacterStringType.eth_type_default_body(self, ectx, tname)
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_object_descriptor', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- ObjectIdentifierType -----------------------------------------------------
class ObjectIdentifierType (Type):
def to_python (self, ctx):
return 'asn1.OBJECT_IDENTIFIER'
def eth_tname(self):
return 'OBJECT_IDENTIFIER'
def eth_ftype(self, ectx):
return ('FT_OID', 'BASE_NONE')
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_OID')
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
pars['FN_VARIANT'] = ectx.default_oid_variant
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_object_identifier%(FN_VARIANT)s', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),))
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_object_identifier%(FN_VARIANT)s', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- ObjectIdentifierValue ----------------------------------------------------
class ObjectIdentifierValue (Value):
def get_num(self, path, val):
return str(oid_names.get(path + '/' + val, val))
def to_str(self, ectx):
out = ''
path = ''
first = True
sep = ''
for v in self.comp_list:
if isinstance(v, Node) and (v.type == 'name_and_number'):
vstr = v.number
elif v.isdigit():
vstr = v
else:
vstr = self.get_num(path, v)
if not first and not vstr.isdigit():
vstr = ectx.value_get_val(vstr)
if first:
if vstr.isdigit():
out += '"' + vstr
else:
out += ectx.value_get_eth(vstr) + '"'
else:
out += sep + vstr
path += sep + vstr
first = False
sep = '.'
out += '"'
return out
def get_dep(self):
v = self.comp_list[0]
if isinstance(v, Node) and (v.type == 'name_and_number'):
return None
elif v.isdigit():
return None
else:
vstr = self.get_num('', v)
if vstr.isdigit():
return None
else:
return vstr
class NamedNumber(Node):
def to_python (self, ctx):
return "('%s',%s)" % (self.ident, self.val)
class NamedNumListBase(Node):
def to_python (self, ctx):
return "asn1.%s_class ([%s])" % (self.asn1_typ,",".join (
[x.to_python (ctx) for x in self.named_list]))
#--- RelativeOIDType ----------------------------------------------------------
class RelativeOIDType (Type):
def eth_tname(self):
return 'RELATIVE_OID'
def eth_ftype(self, ectx):
return ('FT_REL_OID', 'BASE_NONE')
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_RELATIVE_OID')
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
pars['FN_VARIANT'] = ectx.default_oid_variant
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
body = ectx.eth_fn_call('dissect_%(ER)s_relative_oid%(FN_VARIANT)s', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),))
elif (ectx.Per()):
body = ectx.eth_fn_call('dissect_%(ER)s_relative_oid%(FN_VARIANT)s', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- IntegerType --------------------------------------------------------------
class IntegerType (Type):
def to_python (self, ctx):
return "asn1.INTEGER_class ([%s])" % (",".join (
[x.to_python (ctx) for x in self.named_list]))
def add_named_value(self, ident, val):
e = NamedNumber(ident = ident, val = val)
if not self.named_list:
self.named_list = []
self.named_list.append(e)
def eth_tname(self):
if self.named_list:
return Type.eth_tname(self)
if not self.HasConstraint():
return 'INTEGER'
elif self.constr.type == 'SingleValue' or self.constr.type == 'ValueRange':
return 'INTEGER' + '_' + self.constr.eth_constrname()
else:
return 'INTEGER' + '_' + self.constr.eth_tname()
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_INTEGER')
def eth_ftype(self, ectx):
if self.HasConstraint():
if not self.constr.IsNegativ():
if self.constr.Needs64b(ectx):
return ('FT_UINT64', 'BASE_DEC')
else:
return ('FT_UINT32', 'BASE_DEC')
if self.constr.Needs64b(ectx):
return ('FT_INT64', 'BASE_DEC')
return ('FT_INT32', 'BASE_DEC')
def eth_strings(self):
if (self.named_list):
return '$$'
else:
return 'NULL'
def eth_has_vals(self):
if (self.named_list):
return True
else:
return False
def get_vals(self, ectx):
vals = []
for e in (self.named_list):
vals.append((int(e.val), e.ident))
return vals
def eth_type_vals(self, tname, ectx):
if not self.eth_has_vals(): return ''
out = '\n'
vals = self.get_vals(ectx)
out += ectx.eth_vals(tname, vals)
return out
def reg_enum_vals(self, tname, ectx):
vals = self.get_vals(ectx)
for (val, id) in vals:
ectx.eth_reg_value(id, self, val, ethname=ectx.eth_enum_item(tname, id))
def eth_type_enum(self, tname, ectx):
if not self.eth_has_enum(tname, ectx): return ''
out = '\n'
vals = self.get_vals(ectx)
out += ectx.eth_enum(tname, vals)
return out
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
if self.HasValueConstraint():
(pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_value_constr(ectx)
if (pars['FN_VARIANT'] == '') and self.constr.Needs64b(ectx):
if ectx.Ber(): pars['FN_VARIANT'] = '64'
else: pars['FN_VARIANT'] = '_64b'
return pars
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
if (ectx.constraints_check and self.HasValueConstraint()):
body = ectx.eth_fn_call('dissect_%(ER)s_constrained_integer%(FN_VARIANT)s', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(HF_INDEX)s', '%(VAL_PTR)s',),))
else:
body = ectx.eth_fn_call('dissect_%(ER)s_integer%(FN_VARIANT)s', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s', '%(HF_INDEX)s'),
('%(VAL_PTR)s',),))
elif (ectx.Per() and not self.HasValueConstraint()):
body = ectx.eth_fn_call('dissect_%(ER)s_integer%(FN_VARIANT)s', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s', '%(VAL_PTR)s'),))
elif (ectx.Per() and self.HasValueConstraint()):
body = ectx.eth_fn_call('dissect_%(ER)s_constrained_integer%(FN_VARIANT)s', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(VAL_PTR)s', '%(EXT)s'),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- BitStringType ------------------------------------------------------------
class BitStringType (Type):
def to_python (self, ctx):
return "asn1.BITSTRING_class ([%s])" % (",".join (
[x.to_python (ctx) for x in self.named_list]))
def eth_tname(self):
if self.named_list:
return Type.eth_tname(self)
elif not self.HasConstraint():
return 'BIT_STRING'
elif self.constr.IsSize():
return 'BIT_STRING' + '_' + self.constr.eth_constrname()
else:
return '#' + self.type + '_' + str(id(self))
def GetTTag(self, ectx):
return ('BER_CLASS_UNI', 'BER_UNI_TAG_BITSTRING')
def eth_ftype(self, ectx):
return ('FT_BYTES', 'BASE_NONE')
def eth_need_tree(self):
return self.named_list
def eth_need_pdu(self, ectx):
pdu = None
if self.HasContentsConstraint():
t = self.constr.GetContents(ectx)
if t and (ectx.default_containing_variant in ('_pdu', '_pdu_new')):
pdu = { 'type' : t,
'new' : ectx.default_containing_variant == '_pdu_new' }
return pdu
def eth_named_bits(self):
bits = []
if (self.named_list):
for e in (self.named_list):
bits.append((int(e.val), e.ident))
return bits
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
pars['LEN_PTR'] = 'NULL'
(pars['MIN_VAL'], pars['MAX_VAL'], pars['EXT']) = self.eth_get_size_constr(ectx)
if 'ETT_INDEX' not in pars:
pars['ETT_INDEX'] = '-1'
pars['TABLE'] = 'NULL'
if self.eth_named_bits():
pars['TABLE'] = '%(PROTOP)s%(TNAME)s_bits'
if self.HasContentsConstraint():
pars['FN_VARIANT'] = ectx.default_containing_variant
t = self.constr.GetContents(ectx)
if t:
if pars['FN_VARIANT'] in ('_pdu', '_pdu_new'):
t = ectx.field[t]['ethname']
pars['TYPE_REF_PROTO'] = ''
pars['TYPE_REF_TNAME'] = t
pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_TNAME)s'
else:
t = ectx.type[t]['ethname']
pars['TYPE_REF_PROTO'] = ectx.eth_type[t]['proto']
pars['TYPE_REF_TNAME'] = t
pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s'
else:
pars['TYPE_REF_FN'] = 'NULL'
return pars
def eth_type_default_table(self, ectx, tname):
#print "eth_type_default_table(tname='%s')" % (tname)
table = ''
bits = self.eth_named_bits()
if (bits and ectx.Ber()):
table = ectx.eth_bits(tname, bits)
return table
def eth_type_default_body(self, ectx, tname):
if (ectx.Ber()):
if (ectx.constraints_check and self.HasSizeConstraint()):
body = ectx.eth_fn_call('dissect_%(ER)s_constrained_bitstring', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),
('%(VAL_PTR)s',),))
else:
body = ectx.eth_fn_call('dissect_%(ER)s_bitstring', ret='offset',
par=(('%(IMPLICIT_TAG)s', '%(ACTX)s', '%(TREE)s', '%(TVB)s', '%(OFFSET)s'),
('%(TABLE)s', '%(HF_INDEX)s', '%(ETT_INDEX)s',),
('%(VAL_PTR)s',),))
elif (ectx.Per()):
if self.HasContentsConstraint():
body = ectx.eth_fn_call('dissect_%(ER)s_bit_string_containing%(FN_VARIANT)s', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s', '%(TYPE_REF_FN)s'),))
else:
body = ectx.eth_fn_call('dissect_%(ER)s_bit_string', ret='offset',
par=(('%(TVB)s', '%(OFFSET)s', '%(ACTX)s', '%(TREE)s', '%(HF_INDEX)s'),
('%(MIN_VAL)s', '%(MAX_VAL)s', '%(EXT)s', '%(VAL_PTR)s', '%(LEN_PTR)s'),))
else:
body = '#error Can not decode %s' % (tname)
return body
#--- BStringValue ------------------------------------------------------------
bstring_tab = {
'0000' : '0',
'0001' : '1',
'0010' : '2',
'0011' : '3',
'0100' : '4',
'0101' : '5',
'0110' : '6',
'0111' : '7',
'1000' : '8',
'1001' : '9',
'1010' : 'A',
'1011' : 'B',
'1100' : 'C',
'1101' : 'D',
'1110' : 'E',
'1111' : 'F',
}
class BStringValue (Value):
def to_str(self, ectx):
v = self.val[1:-2]
if len(v) % 8:
v += '0' * (8 - len(v) % 8)
vv = '0x'
for i in (list(range(0, len(v), 4))):
vv += bstring_tab[v[i:i+4]]
return vv
#--- HStringValue ------------------------------------------------------------
class HStringValue (Value):
def to_str(self, ectx):
vv = '0x'
vv += self.val[1:-2]
return vv
def __int__(self):
return int(self.val[1:-2], 16)
#--- FieldSpec ----------------------------------------------------------------
class FieldSpec (Node):
def __init__(self,*args, **kw) :
self.name = None
Node.__init__ (self,*args, **kw)
def SetName(self, name):
self.name = name
def get_repr(self):
return ['#UNSUPPORTED_' + self.type]
def fld_repr(self):
repr = [self.name]
repr.extend(self.get_repr())
return repr
class TypeFieldSpec (FieldSpec):
def get_repr(self):
return []
class FixedTypeValueFieldSpec (FieldSpec):
def get_repr(self):
if isinstance(self.typ, Type_Ref):
repr = ['TypeReference', self.typ.val]
else:
repr = [self.typ.type]
return repr
class VariableTypeValueFieldSpec (FieldSpec):
def get_repr(self):
return ['_' + self.type]
class FixedTypeValueSetFieldSpec (FieldSpec):
def get_repr(self):
return ['_' + self.type]
class ObjectFieldSpec (FieldSpec):
def get_repr(self):
return ['ClassReference', self.cls.val]
class ObjectSetFieldSpec (FieldSpec):
def get_repr(self):
return ['ClassReference', self.cls.val]
#==============================================================================
def p_module_list_1 (t):
'module_list : module_list ModuleDefinition'
t[0] = t[1] + [t[2]]
def p_module_list_2 (t):
'module_list : ModuleDefinition'
t[0] = [t[1]]
#--- ITU-T Recommendation X.680 -----------------------------------------------
# 11 ASN.1 lexical items --------------------------------------------------------
# 11.2 Type references
def p_type_ref (t):
'type_ref : UCASE_IDENT'
t[0] = Type_Ref(val=t[1])
# 11.3 Identifiers
def p_identifier (t):
'identifier : LCASE_IDENT'
t[0] = t[1]
# 11.4 Value references
# cause reduce/reduce conflict
#def p_valuereference (t):
# 'valuereference : LCASE_IDENT'
# t[0] = Value_Ref(val=t[1])
# 11.5 Module references
def p_modulereference (t):
'modulereference : UCASE_IDENT'
t[0] = t[1]
# 12 Module definition --------------------------------------------------------
# 12.1
def p_ModuleDefinition (t):
'ModuleDefinition : ModuleIdentifier DEFINITIONS TagDefault ASSIGNMENT ModuleBegin BEGIN ModuleBody END'
t[0] = Module (ident = t[1], tag_def = t[3], body = t[7])
def p_ModuleBegin (t):
'ModuleBegin : '
if t[-4].val == 'Remote-Operations-Information-Objects':
x880_module_begin()
def p_TagDefault_1 (t):
'''TagDefault : EXPLICIT TAGS
| IMPLICIT TAGS
| AUTOMATIC TAGS '''
t[0] = Default_Tags (dfl_tag = t[1])
def p_TagDefault_2 (t):
'TagDefault : '
# 12.2 The "TagDefault" is taken as EXPLICIT TAGS if it is "empty".
t[0] = Default_Tags (dfl_tag = 'EXPLICIT')
def p_ModuleIdentifier_1 (t):
'ModuleIdentifier : modulereference DefinitiveIdentifier' # name, oid
t [0] = Node('module_ident', val = t[1], ident = t[2])
def p_ModuleIdentifier_2 (t):
'ModuleIdentifier : modulereference' # name, oid
t [0] = Node('module_ident', val = t[1], ident = None)
def p_DefinitiveIdentifier (t):
'DefinitiveIdentifier : ObjectIdentifierValue'
t[0] = t[1]
#def p_module_ref (t):
# 'module_ref : UCASE_IDENT'
# t[0] = t[1]
def p_ModuleBody_1 (t):
'ModuleBody : Exports Imports AssignmentList'
t[0] = Module_Body (exports = t[1], imports = t[2], assign_list = t[3])
def p_ModuleBody_2 (t):
'ModuleBody : '
t[0] = Node ('module_body', exports = [], imports = [], assign_list = [])
def p_Exports_1 (t):
'Exports : EXPORTS syms_exported SEMICOLON'
t[0] = t[2]
def p_Exports_2 (t):
'Exports : EXPORTS ALL SEMICOLON'
t[0] = [ 'ALL' ]
def p_Exports_3 (t):
'Exports : '
t[0] = [ 'ALL' ]
def p_syms_exported_1 (t):
'syms_exported : exp_sym_list'
t[0] = t[1]
def p_syms_exported_2 (t):
'syms_exported : '
t[0] = []
def p_exp_sym_list_1 (t):
'exp_sym_list : Symbol'
t[0] = [t[1]]
def p_exp_sym_list_2 (t):
'exp_sym_list : exp_sym_list COMMA Symbol'
t[0] = t[1] + [t[3]]
def p_Imports_1 (t):
'Imports : importsbegin IMPORTS SymbolsImported SEMICOLON'
t[0] = t[3]
global lcase_ident_assigned
lcase_ident_assigned = {}
def p_importsbegin (t):
'importsbegin : '
global lcase_ident_assigned
global g_conform
lcase_ident_assigned = {}
lcase_ident_assigned.update(g_conform.use_item('ASSIGNED_ID', 'OBJECT_IDENTIFIER'))
def p_Imports_2 (t):
'Imports : '
t[0] = []
def p_SymbolsImported_1(t):
'SymbolsImported : '
t[0] = []
def p_SymbolsImported_2 (t):
'SymbolsImported : SymbolsFromModuleList'
t[0] = t[1]
def p_SymbolsFromModuleList_1 (t):
'SymbolsFromModuleList : SymbolsFromModuleList SymbolsFromModule'
t[0] = t[1] + [t[2]]
def p_SymbolsFromModuleList_2 (t):
'SymbolsFromModuleList : SymbolsFromModule'
t[0] = [t[1]]
def p_SymbolsFromModule (t):
'SymbolsFromModule : SymbolList FROM GlobalModuleReference'
t[0] = Node ('SymbolList', symbol_list = t[1], module = t[3])
for s in (t[0].symbol_list):
if (isinstance(s, Value_Ref)): lcase_ident_assigned[s.val] = t[3]
import_symbols_from_module(t[0].module, t[0].symbol_list)
def import_symbols_from_module(module, symbol_list):
if module.val == 'Remote-Operations-Information-Objects':
for i in range(len(symbol_list)):
s = symbol_list[i]
if isinstance(s, Type_Ref) or isinstance(s, Class_Ref):
x880_import(s.val)
if isinstance(s, Type_Ref) and is_class_ident(s.val):
symbol_list[i] = Class_Ref (val = s.val)
return
for i in range(len(symbol_list)):
s = symbol_list[i]
if isinstance(s, Type_Ref) and is_class_ident("$%s$%s" % (module.val, s.val)):
import_class_from_module(module.val, s.val)
if isinstance(s, Type_Ref) and is_class_ident(s.val):
symbol_list[i] = Class_Ref (val = s.val)
def p_GlobalModuleReference (t):
'GlobalModuleReference : modulereference AssignedIdentifier'
t [0] = Node('module_ident', val = t[1], ident = t[2])
def p_AssignedIdentifier_1 (t):
'AssignedIdentifier : ObjectIdentifierValue'
t[0] = t[1]
def p_AssignedIdentifier_2 (t):
'AssignedIdentifier : LCASE_IDENT_ASSIGNED'
t[0] = t[1]
def p_AssignedIdentifier_3 (t):
'AssignedIdentifier : '
pass
def p_SymbolList_1 (t):
'SymbolList : Symbol'
t[0] = [t[1]]
def p_SymbolList_2 (t):
'SymbolList : SymbolList COMMA Symbol'
t[0] = t[1] + [t[3]]
def p_Symbol (t):
'''Symbol : Reference
| ParameterizedReference'''
t[0] = t[1]
def p_Reference_1 (t):
'''Reference : type_ref
| objectclassreference '''
t[0] = t[1]
def p_Reference_2 (t):
'''Reference : LCASE_IDENT_ASSIGNED
| identifier ''' # instead of valuereference wich causes reduce/reduce conflict
t[0] = Value_Ref(val=t[1])
def p_AssignmentList_1 (t):
'AssignmentList : AssignmentList Assignment'
t[0] = t[1] + [t[2]]
def p_AssignmentList_2 (t):
'AssignmentList : Assignment SEMICOLON'
t[0] = [t[1]]
def p_AssignmentList_3 (t):
'AssignmentList : Assignment'
t[0] = [t[1]]
def p_Assignment (t):
'''Assignment : TypeAssignment
| ValueAssignment
| ValueSetTypeAssignment
| ObjectClassAssignment
| ObjectAssignment
| ObjectSetAssignment
| ParameterizedAssignment
| pyquote '''
t[0] = t[1]
# 13 Referencing type and value definitions -----------------------------------
# 13.1
def p_DefinedType (t):
'''DefinedType : ExternalTypeReference
| type_ref
| ParameterizedType'''
t[0] = t[1]
def p_DefinedValue_1(t):
'''DefinedValue : ExternalValueReference'''
t[0] = t[1]
def p_DefinedValue_2(t):
'''DefinedValue : identifier ''' # instead of valuereference wich causes reduce/reduce conflict
t[0] = Value_Ref(val=t[1])
# 13.6
def p_ExternalTypeReference (t):
'ExternalTypeReference : modulereference DOT type_ref'
t[0] = Node ('ExternalTypeReference', module = t[1], typ = t[3])
def p_ExternalValueReference (t):
'ExternalValueReference : modulereference DOT identifier'
t[0] = Node ('ExternalValueReference', module = t[1], ident = t[3])
# 15 Assigning types and values -----------------------------------------------
# 15.1
def p_TypeAssignment (t):
'TypeAssignment : UCASE_IDENT ASSIGNMENT Type'
t[0] = t[3]
t[0].SetName(t[1])
# 15.2
def p_ValueAssignment (t):
'ValueAssignment : LCASE_IDENT ValueType ASSIGNMENT Value'
t[0] = ValueAssignment(ident = t[1], typ = t[2], val = t[4])
# only "simple" types are supported to simplify grammer
def p_ValueType (t):
'''ValueType : type_ref
| BooleanType
| IntegerType
| ObjectIdentifierType
| OctetStringType
| RealType '''
t[0] = t[1]
# 15.6
def p_ValueSetTypeAssignment (t):
'ValueSetTypeAssignment : UCASE_IDENT ValueType ASSIGNMENT ValueSet'
t[0] = Node('ValueSetTypeAssignment', name=t[1], typ=t[2], val=t[4])
# 15.7
def p_ValueSet (t):
'ValueSet : lbraceignore rbraceignore'
t[0] = None
# 16 Definition of types and values -------------------------------------------
# 16.1
def p_Type (t):
'''Type : BuiltinType
| ReferencedType
| ConstrainedType'''
t[0] = t[1]
# 16.2
def p_BuiltinType (t):
'''BuiltinType : AnyType
| BitStringType
| BooleanType
| CharacterStringType
| ChoiceType
| EmbeddedPDVType
| EnumeratedType
| ExternalType
| InstanceOfType
| IntegerType
| NullType
| ObjectClassFieldType
| ObjectIdentifierType
| OctetStringType
| RealType
| RelativeOIDType
| SequenceType
| SequenceOfType
| SetType
| SetOfType
| TaggedType'''
t[0] = t[1]
# 16.3
def p_ReferencedType (t):
'''ReferencedType : DefinedType
| UsefulType
| SelectionType'''
t[0] = t[1]
# 16.5
def p_NamedType (t):
'NamedType : identifier Type'
t[0] = t[2]
t[0].SetName (t[1])
# 16.7
def p_Value (t):
'''Value : BuiltinValue
| ReferencedValue
| ObjectClassFieldValue'''
t[0] = t[1]
# 16.9
def p_BuiltinValue (t):
'''BuiltinValue : BooleanValue
| ChoiceValue
| IntegerValue
| ObjectIdentifierValue
| RealValue
| SequenceValue
| hex_string
| binary_string
| char_string''' # XXX we don't support {data} here
t[0] = t[1]
# 16.11
def p_ReferencedValue (t):
'''ReferencedValue : DefinedValue
| ValueFromObject'''
t[0] = t[1]
# 16.13
#def p_NamedValue (t):
# 'NamedValue : identifier Value'
# t[0] = Node ('NamedValue', ident = t[1], value = t[2])
# 17 Notation for the boolean type --------------------------------------------
# 17.1
def p_BooleanType (t):
'BooleanType : BOOLEAN'
t[0] = BooleanType ()
# 17.2
def p_BooleanValue (t):
'''BooleanValue : TRUE
| FALSE'''
t[0] = t[1]
# 18 Notation for the integer type --------------------------------------------
# 18.1
def p_IntegerType_1 (t):
'IntegerType : INTEGER'
t[0] = IntegerType (named_list = None)
def p_IntegerType_2 (t):
'IntegerType : INTEGER LBRACE NamedNumberList RBRACE'
t[0] = IntegerType(named_list = t[3])
def p_NamedNumberList_1 (t):
'NamedNumberList : NamedNumber'
t[0] = [t[1]]
def p_NamedNumberList_2 (t):
'NamedNumberList : NamedNumberList COMMA NamedNumber'
t[0] = t[1] + [t[3]]
def p_NamedNumber (t):
'''NamedNumber : identifier LPAREN SignedNumber RPAREN
| identifier LPAREN DefinedValue RPAREN'''
t[0] = NamedNumber(ident = t[1], val = t[3])
def p_SignedNumber_1 (t):
'SignedNumber : NUMBER'
t[0] = t [1]
def p_SignedNumber_2 (t):
'SignedNumber : MINUS NUMBER'
t[0] = '-' + t[2]
# 18.9
def p_IntegerValue (t):
'IntegerValue : SignedNumber'
t[0] = t [1]
# 19 Notation for the enumerated type -----------------------------------------
# 19.1
def p_EnumeratedType (t):
'EnumeratedType : ENUMERATED LBRACE Enumerations RBRACE'
t[0] = EnumeratedType (val = t[3]['val'], ext = t[3]['ext'])
def p_Enumerations_1 (t):
'Enumerations : Enumeration'
t[0] = { 'val' : t[1], 'ext' : None }
def p_Enumerations_2 (t):
'Enumerations : Enumeration COMMA ELLIPSIS ExceptionSpec'
t[0] = { 'val' : t[1], 'ext' : [] }
def p_Enumerations_3 (t):
'Enumerations : Enumeration COMMA ELLIPSIS ExceptionSpec COMMA Enumeration'
t[0] = { 'val' : t[1], 'ext' : t[6] }
def p_Enumeration_1 (t):
'Enumeration : EnumerationItem'
t[0] = [t[1]]
def p_Enumeration_2 (t):
'Enumeration : Enumeration COMMA EnumerationItem'
t[0] = t[1] + [t[3]]
def p_EnumerationItem (t):
'''EnumerationItem : Identifier
| NamedNumber'''
t[0] = t[1]
def p_Identifier (t):
'Identifier : identifier'
t[0] = Node ('Identifier', ident = t[1])
# 20 Notation for the real type -----------------------------------------------
# 20.1
def p_RealType (t):
'RealType : REAL'
t[0] = RealType ()
# 20.6
def p_RealValue (t):
'''RealValue : REAL_NUMBER
| SpecialRealValue'''
t[0] = t [1]
def p_SpecialRealValue (t):
'''SpecialRealValue : PLUS_INFINITY
| MINUS_INFINITY'''
t[0] = t[1]
# 21 Notation for the bitstring type ------------------------------------------
# 21.1
def p_BitStringType_1 (t):
'BitStringType : BIT STRING'
t[0] = BitStringType (named_list = None)
def p_BitStringType_2 (t):
'BitStringType : BIT STRING LBRACE NamedBitList RBRACE'
t[0] = BitStringType (named_list = t[4])
def p_NamedBitList_1 (t):
'NamedBitList : NamedBit'
t[0] = [t[1]]
def p_NamedBitList_2 (t):
'NamedBitList : NamedBitList COMMA NamedBit'
t[0] = t[1] + [t[3]]
def p_NamedBit (t):
'''NamedBit : identifier LPAREN NUMBER RPAREN
| identifier LPAREN DefinedValue RPAREN'''
t[0] = NamedNumber (ident = t[1], val = t[3])
# 22 Notation for the octetstring type ----------------------------------------
# 22.1
def p_OctetStringType (t):
'OctetStringType : OCTET STRING'
t[0] = OctetStringType ()
# 23 Notation for the null type -----------------------------------------------
# 23.1
def p_NullType (t):
'NullType : NULL'
t[0] = NullType ()
# 23.3
def p_NullValue (t):
'NullValue : NULL'
t[0] = NullValue ()
# 24 Notation for sequence types ----------------------------------------------
# 24.1
def p_SequenceType_1 (t):
'SequenceType : SEQUENCE LBRACE RBRACE'
t[0] = SequenceType (elt_list = [])
def p_SequenceType_2 (t):
'SequenceType : SEQUENCE LBRACE ComponentTypeLists RBRACE'
t[0] = SequenceType (elt_list = t[3]['elt_list'])
if 'ext_list' in t[3]:
t[0].ext_list = t[3]['ext_list']
if 'elt_list2' in t[3]:
t[0].elt_list2 = t[3]['elt_list2']
def p_ExtensionAndException_1 (t):
'ExtensionAndException : ELLIPSIS'
t[0] = []
def p_OptionalExtensionMarker_1 (t):
'OptionalExtensionMarker : COMMA ELLIPSIS'
t[0] = True
def p_OptionalExtensionMarker_2 (t):
'OptionalExtensionMarker : '
t[0] = False
def p_ComponentTypeLists_1 (t):
'ComponentTypeLists : ComponentTypeList'
t[0] = {'elt_list' : t[1]}
def p_ComponentTypeLists_2 (t):
'ComponentTypeLists : ComponentTypeList COMMA ExtensionAndException OptionalExtensionMarker'
t[0] = {'elt_list' : t[1], 'ext_list' : []}
def p_ComponentTypeLists_3 (t):
'ComponentTypeLists : ComponentTypeList COMMA ExtensionAndException ExtensionAdditionList OptionalExtensionMarker'
t[0] = {'elt_list' : t[1], 'ext_list' : t[4]}
def p_ComponentTypeLists_4 (t):
'ComponentTypeLists : ComponentTypeList COMMA ExtensionAndException ExtensionEndMarker COMMA ComponentTypeList'
t[0] = {'elt_list' : t[1], 'ext_list' : [], 'elt_list2' : t[6]}
def p_ComponentTypeLists_5 (t):
'ComponentTypeLists : ComponentTypeList COMMA ExtensionAndException ExtensionAdditionList ExtensionEndMarker COMMA ComponentTypeList'
t[0] = {'elt_list' : t[1], 'ext_list' : t[4], 'elt_list2' : t[7]}
def p_ComponentTypeLists_6 (t):
'ComponentTypeLists : ExtensionAndException OptionalExtensionMarker'
t[0] = {'elt_list' : [], 'ext_list' : []}
def p_ComponentTypeLists_7 (t):
'ComponentTypeLists : ExtensionAndException ExtensionAdditionList OptionalExtensionMarker'
t[0] = {'elt_list' : [], 'ext_list' : t[2]}
def p_ExtensionEndMarker (t):
'ExtensionEndMarker : COMMA ELLIPSIS'
pass
def p_ExtensionAdditionList_1 (t):
'ExtensionAdditionList : COMMA ExtensionAddition'
t[0] = [t[2]]
def p_ExtensionAdditionList_2 (t):
'ExtensionAdditionList : ExtensionAdditionList COMMA ExtensionAddition'
t[0] = t[1] + [t[3]]
def p_ExtensionAddition_1 (t):
'ExtensionAddition : ExtensionAdditionGroup'
t[0] = Node ('elt_type', val = t[1], optional = 0)
def p_ExtensionAddition_2 (t):
'ExtensionAddition : ComponentType'
t[0] = t[1]
def p_ExtensionAdditionGroup (t):
'ExtensionAdditionGroup : LVERBRACK VersionNumber ComponentTypeList RVERBRACK'
t[0] = ExtensionAdditionGroup (ver = t[2], elt_list = t[3])
def p_VersionNumber_1 (t):
'VersionNumber : '
def p_VersionNumber_2 (t):
'VersionNumber : NUMBER COLON'
t[0] = t[1]
def p_ComponentTypeList_1 (t):
'ComponentTypeList : ComponentType'
t[0] = [t[1]]
def p_ComponentTypeList_2 (t):
'ComponentTypeList : ComponentTypeList COMMA ComponentType'
t[0] = t[1] + [t[3]]
def p_ComponentType_1 (t):
'ComponentType : NamedType'
t[0] = Node ('elt_type', val = t[1], optional = 0)
def p_ComponentType_2 (t):
'ComponentType : NamedType OPTIONAL'
t[0] = Node ('elt_type', val = t[1], optional = 1)
def p_ComponentType_3 (t):
'ComponentType : NamedType DEFAULT DefaultValue'
t[0] = Node ('elt_type', val = t[1], optional = 1, default = t[3])
def p_ComponentType_4 (t):
'ComponentType : COMPONENTS OF Type'
t[0] = Node ('components_of', typ = t[3])
def p_DefaultValue_1 (t):
'''DefaultValue : ReferencedValue
| BooleanValue
| ChoiceValue
| IntegerValue
| RealValue
| hex_string
| binary_string
| char_string
| ObjectClassFieldValue'''
t[0] = t[1]
def p_DefaultValue_2 (t):
'DefaultValue : lbraceignore rbraceignore'
t[0] = ''
# 24.17
def p_SequenceValue_1 (t):
'SequenceValue : LBRACE RBRACE'
t[0] = []
#def p_SequenceValue_2 (t):
# 'SequenceValue : LBRACE ComponentValueList RBRACE'
# t[0] = t[2]
#def p_ComponentValueList_1 (t):
# 'ComponentValueList : NamedValue'
# t[0] = [t[1]]
#def p_ComponentValueList_2 (t):
# 'ComponentValueList : ComponentValueList COMMA NamedValue'
# t[0] = t[1] + [t[3]]
# 25 Notation for sequence-of types -------------------------------------------
# 25.1
def p_SequenceOfType (t):
'''SequenceOfType : SEQUENCE OF Type
| SEQUENCE OF NamedType'''
t[0] = SequenceOfType (val = t[3], size_constr = None)
# 26 Notation for set types ---------------------------------------------------
# 26.1
def p_SetType_1 (t):
'SetType : SET LBRACE RBRACE'
t[0] = SetType (elt_list = [])
def p_SetType_2 (t):
'SetType : SET LBRACE ComponentTypeLists RBRACE'
t[0] = SetType (elt_list = t[3]['elt_list'])
if 'ext_list' in t[3]:
t[0].ext_list = t[3]['ext_list']
if 'elt_list2' in t[3]:
t[0].elt_list2 = t[3]['elt_list2']
# 27 Notation for set-of types ------------------------------------------------
# 27.1
def p_SetOfType (t):
'''SetOfType : SET OF Type
| SET OF NamedType'''
t[0] = SetOfType (val = t[3])
# 28 Notation for choice types ------------------------------------------------
# 28.1
def p_ChoiceType (t):
'ChoiceType : CHOICE LBRACE AlternativeTypeLists RBRACE'
if 'ext_list' in t[3]:
t[0] = ChoiceType (elt_list = t[3]['elt_list'], ext_list = t[3]['ext_list'])
else:
t[0] = ChoiceType (elt_list = t[3]['elt_list'])
def p_AlternativeTypeLists_1 (t):
'AlternativeTypeLists : AlternativeTypeList'
t[0] = {'elt_list' : t[1]}
def p_AlternativeTypeLists_2 (t):
'AlternativeTypeLists : AlternativeTypeList COMMA ExtensionAndException ExtensionAdditionAlternatives OptionalExtensionMarker'
t[0] = {'elt_list' : t[1], 'ext_list' : t[4]}
def p_ExtensionAdditionAlternatives_1 (t):
'ExtensionAdditionAlternatives : ExtensionAdditionAlternativesList'
t[0] = t[1]
def p_ExtensionAdditionAlternatives_2 (t):
'ExtensionAdditionAlternatives : '
t[0] = []
def p_ExtensionAdditionAlternativesList_1 (t):
'ExtensionAdditionAlternativesList : COMMA ExtensionAdditionAlternative'
t[0] = t[2]
def p_ExtensionAdditionAlternativesList_2 (t):
'ExtensionAdditionAlternativesList : ExtensionAdditionAlternativesList COMMA ExtensionAdditionAlternative'
t[0] = t[1] + t[3]
def p_ExtensionAdditionAlternative_1 (t):
'ExtensionAdditionAlternative : NamedType'
t[0] = [t[1]]
def p_ExtensionAdditionAlternative_2 (t):
'ExtensionAdditionAlternative : ExtensionAdditionAlternativesGroup'
t[0] = t[1]
def p_ExtensionAdditionAlternativesGroup (t):
'ExtensionAdditionAlternativesGroup : LVERBRACK VersionNumber AlternativeTypeList RVERBRACK'
t[0] = t[3]
def p_AlternativeTypeList_1 (t):
'AlternativeTypeList : NamedType'
t[0] = [t[1]]
def p_AlternativeTypeList_2 (t):
'AlternativeTypeList : AlternativeTypeList COMMA NamedType'
t[0] = t[1] + [t[3]]
# 28.10
def p_ChoiceValue_1 (t):
'''ChoiceValue : identifier COLON Value
| identifier COLON NullValue '''
val = t[3]
if not isinstance(val, Value):
val = Value(val=val)
t[0] = ChoiceValue (choice = t[1], val = val)
# 29 Notation for selection types
# 29.1
def p_SelectionType (t): #
'SelectionType : identifier LT Type'
t[0] = SelectionType (typ = t[3], sel = t[1])
# 30 Notation for tagged types ------------------------------------------------
# 30.1
def p_TaggedType_1 (t):
'TaggedType : Tag Type'
t[1].mode = 'default'
t[0] = t[2]
t[0].AddTag(t[1])
def p_TaggedType_2 (t):
'''TaggedType : Tag IMPLICIT Type
| Tag EXPLICIT Type'''
t[1].mode = t[2]
t[0] = t[3]
t[0].AddTag(t[1])
def p_Tag (t):
'Tag : LBRACK Class ClassNumber RBRACK'
t[0] = Tag(cls = t[2], num = t[3])
def p_ClassNumber_1 (t):
'ClassNumber : number'
t[0] = t[1]
def p_ClassNumber_2 (t):
'ClassNumber : DefinedValue'
t[0] = t[1]
def p_Class_1 (t):
'''Class : UNIVERSAL
| APPLICATION
| PRIVATE'''
t[0] = t[1]
def p_Class_2 (t):
'Class :'
t[0] = 'CONTEXT'
# 31 Notation for the object identifier type ----------------------------------
# 31.1
def p_ObjectIdentifierType (t):
'ObjectIdentifierType : OBJECT IDENTIFIER'
t[0] = ObjectIdentifierType()
# 31.3
def p_ObjectIdentifierValue (t):
'ObjectIdentifierValue : LBRACE oid_comp_list RBRACE'
t[0] = ObjectIdentifierValue (comp_list=t[2])
def p_oid_comp_list_1 (t):
'oid_comp_list : oid_comp_list ObjIdComponents'
t[0] = t[1] + [t[2]]
def p_oid_comp_list_2 (t):
'oid_comp_list : ObjIdComponents'
t[0] = [t[1]]
def p_ObjIdComponents (t):
'''ObjIdComponents : NameForm
| NumberForm
| NameAndNumberForm'''
t[0] = t[1]
def p_NameForm (t):
'''NameForm : LCASE_IDENT
| LCASE_IDENT_ASSIGNED'''
t [0] = t[1]
def p_NumberForm (t):
'''NumberForm : NUMBER'''
# | DefinedValue'''
t [0] = t[1]
def p_NameAndNumberForm (t):
'''NameAndNumberForm : LCASE_IDENT_ASSIGNED LPAREN NumberForm RPAREN
| LCASE_IDENT LPAREN NumberForm RPAREN'''
t[0] = Node('name_and_number', ident = t[1], number = t[3])
# 32 Notation for the relative object identifier type -------------------------
# 32.1
def p_RelativeOIDType (t):
'RelativeOIDType : RELATIVE_OID'
t[0] = RelativeOIDType()
# 33 Notation for the embedded-pdv type ---------------------------------------
# 33.1
def p_EmbeddedPDVType (t):
'EmbeddedPDVType : EMBEDDED PDV'
t[0] = EmbeddedPDVType()
# 34 Notation for the external type -------------------------------------------
# 34.1
def p_ExternalType (t):
'ExternalType : EXTERNAL'
t[0] = ExternalType()
# 36 Notation for character string types --------------------------------------
# 36.1
def p_CharacterStringType (t):
'''CharacterStringType : RestrictedCharacterStringType
| UnrestrictedCharacterStringType'''
t[0] = t[1]
# 37 Definition of restricted character string types --------------------------
def p_RestrictedCharacterStringType_1 (t):
'RestrictedCharacterStringType : BMPString'
t[0] = BMPStringType ()
def p_RestrictedCharacterStringType_2 (t):
'RestrictedCharacterStringType : GeneralString'
t[0] = GeneralStringType ()
def p_RestrictedCharacterStringType_3 (t):
'RestrictedCharacterStringType : GraphicString'
t[0] = GraphicStringType ()
def p_RestrictedCharacterStringType_4 (t):
'RestrictedCharacterStringType : IA5String'
t[0] = IA5StringType ()
def p_RestrictedCharacterStringType_5 (t):
'RestrictedCharacterStringType : ISO646String'
t[0] = ISO646StringType ()
def p_RestrictedCharacterStringType_6 (t):
'RestrictedCharacterStringType : NumericString'
t[0] = NumericStringType ()
def p_RestrictedCharacterStringType_7 (t):
'RestrictedCharacterStringType : PrintableString'
t[0] = PrintableStringType ()
def p_RestrictedCharacterStringType_8 (t):
'RestrictedCharacterStringType : TeletexString'
t[0] = TeletexStringType ()
def p_RestrictedCharacterStringType_9 (t):
'RestrictedCharacterStringType : T61String'
t[0] = T61StringType ()
def p_RestrictedCharacterStringType_10 (t):
'RestrictedCharacterStringType : UniversalString'
t[0] = UniversalStringType ()
def p_RestrictedCharacterStringType_11 (t):
'RestrictedCharacterStringType : UTF8String'
t[0] = UTF8StringType ()
def p_RestrictedCharacterStringType_12 (t):
'RestrictedCharacterStringType : VideotexString'
t[0] = VideotexStringType ()
def p_RestrictedCharacterStringType_13 (t):
'RestrictedCharacterStringType : VisibleString'
t[0] = VisibleStringType ()
# 40 Definition of unrestricted character string types ------------------------
# 40.1
def p_UnrestrictedCharacterStringType (t):
'UnrestrictedCharacterStringType : CHARACTER STRING'
t[0] = UnrestrictedCharacterStringType ()
# 41 Notation for types defined in clauses 42 to 44 ---------------------------
# 42 Generalized time ---------------------------------------------------------
def p_UsefulType_1 (t):
'UsefulType : GeneralizedTime'
t[0] = GeneralizedTime()
# 43 Universal time -----------------------------------------------------------
def p_UsefulType_2 (t):
'UsefulType : UTCTime'
t[0] = UTCTime()
# 44 The object descriptor type -----------------------------------------------
def p_UsefulType_3 (t):
'UsefulType : ObjectDescriptor'
t[0] = ObjectDescriptor()
# 45 Constrained types --------------------------------------------------------
# 45.1
def p_ConstrainedType_1 (t):
'ConstrainedType : Type Constraint'
t[0] = t[1]
t[0].AddConstraint(t[2])
def p_ConstrainedType_2 (t):
'ConstrainedType : TypeWithConstraint'
t[0] = t[1]
# 45.5
def p_TypeWithConstraint_1 (t):
'''TypeWithConstraint : SET Constraint OF Type
| SET SizeConstraint OF Type'''
t[0] = SetOfType (val = t[4], constr = t[2])
def p_TypeWithConstraint_2 (t):
'''TypeWithConstraint : SEQUENCE Constraint OF Type
| SEQUENCE SizeConstraint OF Type'''
t[0] = SequenceOfType (val = t[4], constr = t[2])
def p_TypeWithConstraint_3 (t):
'''TypeWithConstraint : SET Constraint OF NamedType
| SET SizeConstraint OF NamedType'''
t[0] = SetOfType (val = t[4], constr = t[2])
def p_TypeWithConstraint_4 (t):
'''TypeWithConstraint : SEQUENCE Constraint OF NamedType
| SEQUENCE SizeConstraint OF NamedType'''
t[0] = SequenceOfType (val = t[4], constr = t[2])
# 45.6
# 45.7
def p_Constraint (t):
'Constraint : LPAREN ConstraintSpec ExceptionSpec RPAREN'
t[0] = t[2]
def p_ConstraintSpec (t):
'''ConstraintSpec : ElementSetSpecs
| GeneralConstraint'''
t[0] = t[1]
# 46 Element set specification ------------------------------------------------
# 46.1
def p_ElementSetSpecs_1 (t):
'ElementSetSpecs : RootElementSetSpec'
t[0] = t[1]
def p_ElementSetSpecs_2 (t):
'ElementSetSpecs : RootElementSetSpec COMMA ELLIPSIS'
t[0] = t[1]
t[0].ext = True
def p_ElementSetSpecs_3 (t):
'ElementSetSpecs : RootElementSetSpec COMMA ELLIPSIS COMMA AdditionalElementSetSpec'
t[0] = t[1]
t[0].ext = True
def p_RootElementSetSpec (t):
'RootElementSetSpec : ElementSetSpec'
t[0] = t[1]
def p_AdditionalElementSetSpec (t):
'AdditionalElementSetSpec : ElementSetSpec'
t[0] = t[1]
def p_ElementSetSpec (t):
'ElementSetSpec : Unions'
t[0] = t[1]
def p_Unions_1 (t):
'Unions : Intersections'
t[0] = t[1]
def p_Unions_2 (t):
'Unions : UElems UnionMark Intersections'
t[0] = Constraint(type = 'Union', subtype = [t[1], t[3]])
def p_UElems (t):
'UElems : Unions'
t[0] = t[1]
def p_Intersections_1 (t):
'Intersections : IntersectionElements'
t[0] = t[1]
def p_Intersections_2 (t):
'Intersections : IElems IntersectionMark IntersectionElements'
t[0] = Constraint(type = 'Intersection', subtype = [t[1], t[3]])
def p_IElems (t):
'IElems : Intersections'
t[0] = t[1]
def p_IntersectionElements (t):
'IntersectionElements : Elements'
t[0] = t[1]
def p_UnionMark (t):
'''UnionMark : BAR
| UNION'''
def p_IntersectionMark (t):
'''IntersectionMark : CIRCUMFLEX
| INTERSECTION'''
# 46.5
def p_Elements_1 (t):
'Elements : SubtypeElements'
t[0] = t[1]
def p_Elements_2 (t):
'Elements : LPAREN ElementSetSpec RPAREN'
t[0] = t[2]
# 47 Subtype elements ---------------------------------------------------------
# 47.1 General
def p_SubtypeElements (t):
'''SubtypeElements : SingleValue
| ContainedSubtype
| ValueRange
| PermittedAlphabet
| SizeConstraint
| TypeConstraint
| InnerTypeConstraints
| PatternConstraint'''
t[0] = t[1]
# 47.2 Single value
# 47.2.1
def p_SingleValue (t):
'SingleValue : Value'
t[0] = Constraint(type = 'SingleValue', subtype = t[1])
# 47.3 Contained subtype
# 47.3.1
def p_ContainedSubtype (t):
'ContainedSubtype : Includes Type'
t[0] = Constraint(type = 'ContainedSubtype', subtype = t[2])
def p_Includes (t):
'''Includes : INCLUDES
| '''
# 47.4 Value range
# 47.4.1
def p_ValueRange (t):
'ValueRange : LowerEndpoint RANGE UpperEndpoint'
t[0] = Constraint(type = 'ValueRange', subtype = [t[1], t[3]])
# 47.4.3
def p_LowerEndpoint_1 (t):
'LowerEndpoint : LowerEndValue'
t[0] = t[1]
def p_LowerEndpoint_2 (t):
'LowerEndpoint : LowerEndValue LT'
t[0] = t[1] # but not inclusive range
def p_UpperEndpoint_1 (t):
'UpperEndpoint : UpperEndValue'
t[0] = t[1]
def p_UpperEndpoint_2 (t):
'UpperEndpoint : LT UpperEndValue'
t[0] = t[1] # but not inclusive range
# 47.4.4
def p_LowerEndValue (t):
'''LowerEndValue : Value
| MIN'''
t[0] = t[1] # XXX
def p_UpperEndValue (t):
'''UpperEndValue : Value
| MAX'''
t[0] = t[1]
# 47.5 Size constraint
# 47.5.1
def p_SizeConstraint (t):
'SizeConstraint : SIZE Constraint'
t[0] = Constraint (type = 'Size', subtype = t[2])
# 47.6 Type constraint
# 47.6.1
def p_TypeConstraint (t):
'TypeConstraint : Type'
t[0] = Constraint (type = 'Type', subtype = t[1])
# 47.7 Permitted alphabet
# 47.7.1
def p_PermittedAlphabet (t):
'PermittedAlphabet : FROM Constraint'
t[0] = Constraint (type = 'From', subtype = t[2])
# 47.8 Inner subtyping
# 47.8.1
def p_InnerTypeConstraints (t):
'''InnerTypeConstraints : WITH COMPONENT SingleTypeConstraint
| WITH COMPONENTS MultipleTypeConstraints'''
pass # ignore PER invisible constraint
# 47.8.3
def p_SingleTypeConstraint (t):
'SingleTypeConstraint : Constraint'
t[0] = t[1]
# 47.8.4
def p_MultipleTypeConstraints (t):
'''MultipleTypeConstraints : FullSpecification
| PartialSpecification'''
t[0] = t[1]
def p_FullSpecification (t):
'FullSpecification : LBRACE TypeConstraints RBRACE'
t[0] = t[2]
def p_PartialSpecification (t):
'PartialSpecification : LBRACE ELLIPSIS COMMA TypeConstraints RBRACE'
t[0] = t[4]
def p_TypeConstraints_1 (t):
'TypeConstraints : named_constraint'
t [0] = [t[1]]
def p_TypeConstraints_2 (t):
'TypeConstraints : TypeConstraints COMMA named_constraint'
t[0] = t[1] + [t[3]]
def p_named_constraint_1 (t):
'named_constraint : identifier constraint'
return Node ('named_constraint', ident = t[1], constr = t[2])
def p_named_constraint_2 (t):
'named_constraint : constraint'
return Node ('named_constraint', constr = t[1])
def p_constraint (t):
'constraint : value_constraint presence_constraint'
t[0] = Node ('constraint', value = t[1], presence = t[2])
def p_value_constraint_1 (t):
'value_constraint : Constraint'
t[0] = t[1]
def p_value_constraint_2 (t):
'value_constraint : '
pass
def p_presence_constraint_1 (t):
'''presence_constraint : PRESENT
| ABSENT
| OPTIONAL'''
t[0] = t[1]
def p_presence_constraint_2 (t):
'''presence_constraint : '''
pass
# 47.9 Pattern constraint
# 47.9.1
def p_PatternConstraint (t):
'PatternConstraint : PATTERN Value'
t[0] = Constraint (type = 'Pattern', subtype = t[2])
# 49 The exception identifier
# 49.4
def p_ExceptionSpec_1 (t):
'ExceptionSpec : EXCLAMATION ExceptionIdentification'
pass
def p_ExceptionSpec_2 (t):
'ExceptionSpec : '
pass
def p_ExceptionIdentification (t):
'''ExceptionIdentification : SignedNumber
| DefinedValue
| Type COLON Value '''
pass
# /*-----------------------------------------------------------------------*/
# /* Value Notation Productions */
# /*-----------------------------------------------------------------------*/
def p_binary_string (t):
'binary_string : BSTRING'
t[0] = BStringValue(val = t[1])
def p_hex_string (t):
'hex_string : HSTRING'
t[0] = HStringValue(val = t[1])
def p_char_string (t):
'char_string : QSTRING'
t[0] = t[1]
def p_number (t):
'number : NUMBER'
t[0] = t[1]
#--- ITU-T Recommendation X.208 -----------------------------------------------
# 27 Notation for the any type ------------------------------------------------
# 27.1
def p_AnyType (t):
'''AnyType : ANY
| ANY DEFINED BY identifier'''
t[0] = AnyType()
#--- ITU-T Recommendation X.681 -----------------------------------------------
# 7 ASN.1 lexical items -------------------------------------------------------
# 7.1 Information object class references
def p_objectclassreference (t):
'objectclassreference : CLASS_IDENT'
t[0] = Class_Ref(val=t[1])
# 7.2 Information object references
def p_objectreference (t):
'objectreference : LCASE_IDENT'
t[0] = t[1]
# 7.3 Information object set references
#def p_objectsetreference (t):
# 'objectsetreference : UCASE_IDENT'
# t[0] = t[1]
# 7.4 Type field references
# ucasefieldreference
# 7.5 Value field references
# lcasefieldreference
# 7.6 Value set field references
# ucasefieldreference
# 7.7 Object field references
# lcasefieldreference
# 7.8 Object set field references
# ucasefieldreference
def p_ucasefieldreference (t):
'ucasefieldreference : AMPERSAND UCASE_IDENT'
t[0] = '&' + t[2]
def p_lcasefieldreference (t):
'lcasefieldreference : AMPERSAND LCASE_IDENT'
t[0] = '&' + t[2]
# 8 Referencing definitions
# 8.1
def p_DefinedObjectClass (t):
'''DefinedObjectClass : objectclassreference
| UsefulObjectClassReference'''
t[0] = t[1]
global obj_class
obj_class = t[0].val
def p_DefinedObject (t):
'''DefinedObject : objectreference'''
t[0] = t[1]
# 8.4
def p_UsefulObjectClassReference (t):
'''UsefulObjectClassReference : TYPE_IDENTIFIER
| ABSTRACT_SYNTAX'''
t[0] = Class_Ref(val=t[1])
# 9 Information object class definition and assignment
# 9.1
def p_ObjectClassAssignment (t):
'''ObjectClassAssignment : CLASS_IDENT ASSIGNMENT ObjectClass
| UCASE_IDENT ASSIGNMENT ObjectClass'''
t[0] = t[3]
t[0].SetName(t[1])
if isinstance(t[0], ObjectClassDefn):
t[0].reg_types()
# 9.2
def p_ObjectClass (t):
'''ObjectClass : DefinedObjectClass
| ObjectClassDefn
| ParameterizedObjectClass '''
t[0] = t[1]
# 9.3
def p_ObjectClassDefn (t):
'''ObjectClassDefn : CLASS LBRACE FieldSpecs RBRACE
| CLASS LBRACE FieldSpecs RBRACE WithSyntaxSpec'''
t[0] = ObjectClassDefn(fields = t[3])
def p_FieldSpecs_1 (t):
'FieldSpecs : FieldSpec'
t[0] = [t[1]]
def p_FieldSpecs_2 (t):
'FieldSpecs : FieldSpecs COMMA FieldSpec'
t[0] = t[1] + [t[3]]
def p_WithSyntaxSpec (t):
'WithSyntaxSpec : WITH SYNTAX lbraceignore rbraceignore'
t[0] = None
# 9.4
def p_FieldSpec (t):
'''FieldSpec : TypeFieldSpec
| FixedTypeValueFieldSpec
| VariableTypeValueFieldSpec
| FixedTypeValueSetFieldSpec
| ObjectFieldSpec
| ObjectSetFieldSpec '''
t[0] = t[1]
# 9.5
def p_TypeFieldSpec (t):
'''TypeFieldSpec : ucasefieldreference
| ucasefieldreference TypeOptionalitySpec '''
t[0] = TypeFieldSpec()
t[0].SetName(t[1])
def p_TypeOptionalitySpec_1 (t):
'TypeOptionalitySpec ::= OPTIONAL'
pass
def p_TypeOptionalitySpec_2 (t):
'TypeOptionalitySpec ::= DEFAULT Type'
pass
# 9.6
def p_FixedTypeValueFieldSpec (t):
'''FixedTypeValueFieldSpec : lcasefieldreference Type
| lcasefieldreference Type UNIQUE
| lcasefieldreference Type ValueOptionalitySpec
| lcasefieldreference Type UNIQUE ValueOptionalitySpec '''
t[0] = FixedTypeValueFieldSpec(typ = t[2])
t[0].SetName(t[1])
def p_ValueOptionalitySpec_1 (t):
'ValueOptionalitySpec ::= OPTIONAL'
pass
def p_ValueOptionalitySpec_2 (t):
'ValueOptionalitySpec ::= DEFAULT Value'
pass
# 9.8
def p_VariableTypeValueFieldSpec (t):
'''VariableTypeValueFieldSpec : lcasefieldreference FieldName
| lcasefieldreference FieldName ValueOptionalitySpec '''
t[0] = VariableTypeValueFieldSpec()
t[0].SetName(t[1])
# 9.9
def p_FixedTypeValueSetFieldSpec (t):
'''FixedTypeValueSetFieldSpec : ucasefieldreference Type
| ucasefieldreference Type ValueSetOptionalitySpec '''
t[0] = FixedTypeValueSetFieldSpec()
t[0].SetName(t[1])
def p_ValueSetOptionalitySpec_1 (t):
'ValueSetOptionalitySpec ::= OPTIONAL'
pass
def p_ValueSetOptionalitySpec_2 (t):
'ValueSetOptionalitySpec ::= DEFAULT ValueSet'
pass
# 9.11
def p_ObjectFieldSpec (t):
'''ObjectFieldSpec : lcasefieldreference DefinedObjectClass
| lcasefieldreference DefinedObjectClass ObjectOptionalitySpec '''
t[0] = ObjectFieldSpec(cls=t[2])
t[0].SetName(t[1])
global obj_class
obj_class = None
def p_ObjectOptionalitySpec_1 (t):
'ObjectOptionalitySpec ::= OPTIONAL'
pass
def p_ObjectOptionalitySpec_2 (t):
'ObjectOptionalitySpec ::= DEFAULT Object'
pass
# 9.12
def p_ObjectSetFieldSpec (t):
'''ObjectSetFieldSpec : ucasefieldreference DefinedObjectClass
| ucasefieldreference DefinedObjectClass ObjectSetOptionalitySpec '''
t[0] = ObjectSetFieldSpec(cls=t[2])
t[0].SetName(t[1])
def p_ObjectSetOptionalitySpec_1 (t):
'ObjectSetOptionalitySpec ::= OPTIONAL'
pass
def p_ObjectSetOptionalitySpec_2 (t):
'ObjectSetOptionalitySpec ::= DEFAULT ObjectSet'
pass
# 9.13
def p_PrimitiveFieldName (t):
'''PrimitiveFieldName : ucasefieldreference
| lcasefieldreference '''
t[0] = t[1]
# 9.13
def p_FieldName_1 (t):
'FieldName : PrimitiveFieldName'
t[0] = t[1]
def p_FieldName_2 (t):
'FieldName : FieldName DOT PrimitiveFieldName'
t[0] = t[1] + '.' + t[3]
# 11 Information object definition and assignment
# 11.1
def p_ObjectAssignment (t):
'ObjectAssignment : objectreference DefinedObjectClass ASSIGNMENT Object'
t[0] = ObjectAssignment (ident = t[1], cls=t[2].val, val=t[4])
global obj_class
obj_class = None
# 11.3
def p_Object (t):
'''Object : DefinedObject
| ObjectDefn
| ParameterizedObject'''
t[0] = t[1]
# 11.4
def p_ObjectDefn (t):
'ObjectDefn : lbraceobject bodyobject rbraceobject'
t[0] = t[2]
# {...} block of object definition
def p_lbraceobject(t):
'lbraceobject : braceobjectbegin LBRACE'
t[0] = t[1]
def p_braceobjectbegin(t):
'braceobjectbegin : '
global lexer
global obj_class
if set_class_syntax(obj_class):
state = 'INITIAL'
else:
lexer.level = 1
state = 'braceignore'
lexer.push_state(state)
def p_rbraceobject(t):
'rbraceobject : braceobjectend RBRACE'
t[0] = t[2]
def p_braceobjectend(t):
'braceobjectend : '
global lexer
lexer.pop_state()
set_class_syntax(None)
def p_bodyobject_1 (t):
'bodyobject : '
t[0] = { }
def p_bodyobject_2 (t):
'bodyobject : cls_syntax_list'
t[0] = t[1]
def p_cls_syntax_list_1 (t):
'cls_syntax_list : cls_syntax_list cls_syntax'
t[0] = t[1]
t[0].update(t[2])
def p_cls_syntax_list_2 (t):
'cls_syntax_list : cls_syntax'
t[0] = t[1]
# X.681
def p_cls_syntax_1 (t):
'cls_syntax : Type IDENTIFIED BY Value'
t[0] = { get_class_fieled(' ') : t[1], get_class_fieled(' '.join((t[2], t[3]))) : t[4] }
def p_cls_syntax_2 (t):
'cls_syntax : HAS PROPERTY Value'
t[0] = { get_class_fieled(' '.join(t[1:-1])) : t[-1:][0] }
# X.880
def p_cls_syntax_3 (t):
'''cls_syntax : ERRORS ObjectSet
| LINKED ObjectSet
| RETURN RESULT BooleanValue
| SYNCHRONOUS BooleanValue
| INVOKE PRIORITY Value
| RESULT_PRIORITY Value
| PRIORITY Value
| ALWAYS RESPONDS BooleanValue
| IDEMPOTENT BooleanValue '''
t[0] = { get_class_fieled(' '.join(t[1:-1])) : t[-1:][0] }
def p_cls_syntax_4 (t):
'''cls_syntax : ARGUMENT Type
| RESULT Type
| PARAMETER Type '''
t[0] = { get_class_fieled(t[1]) : t[2] }
def p_cls_syntax_5 (t):
'cls_syntax : CODE Value'
fld = get_class_fieled(t[1]);
t[0] = { fld : t[2] }
if isinstance(t[2], ChoiceValue):
fldt = fld + '.' + t[2].choice
t[0][fldt] = t[2]
def p_cls_syntax_6 (t):
'''cls_syntax : ARGUMENT Type OPTIONAL BooleanValue
| RESULT Type OPTIONAL BooleanValue
| PARAMETER Type OPTIONAL BooleanValue '''
t[0] = { get_class_fieled(t[1]) : t[2], get_class_fieled(' '.join((t[1], t[3]))) : t[4] }
# 12 Information object set definition and assignment
# 12.1
def p_ObjectSetAssignment (t):
'ObjectSetAssignment : UCASE_IDENT CLASS_IDENT ASSIGNMENT ObjectSet'
t[0] = Node('ObjectSetAssignment', name=t[1], cls=t[2], val=t[4])
# 12.3
def p_ObjectSet (t):
'ObjectSet : lbraceignore rbraceignore'
t[0] = None
# 14 Notation for the object class field type ---------------------------------
# 14.1
def p_ObjectClassFieldType (t):
'ObjectClassFieldType : DefinedObjectClass DOT FieldName'
t[0] = get_type_from_class(t[1], t[3])
# 14.6
def p_ObjectClassFieldValue (t):
'''ObjectClassFieldValue : OpenTypeFieldVal'''
t[0] = t[1]
def p_OpenTypeFieldVal (t):
'''OpenTypeFieldVal : Type COLON Value
| NullType COLON NullValue'''
t[0] = t[3]
# 15 Information from objects -------------------------------------------------
# 15.1
def p_ValueFromObject (t):
'ValueFromObject : LCASE_IDENT DOT FieldName'
t[0] = t[1] + '.' + t[3]
# Annex C - The instance-of type ----------------------------------------------
# C.2
def p_InstanceOfType (t):
'InstanceOfType : INSTANCE OF DefinedObjectClass'
t[0] = InstanceOfType()
# --- tables ---
useful_object_class_types = {
# Annex A
'TYPE-IDENTIFIER.&id' : lambda : ObjectIdentifierType(),
'TYPE-IDENTIFIER.&Type' : lambda : OpenType(),
# Annex B
'ABSTRACT-SYNTAX.&id' : lambda : ObjectIdentifierType(),
'ABSTRACT-SYNTAX.&Type' : lambda : OpenType(),
'ABSTRACT-SYNTAX.&property' : lambda : BitStringType(),
}
object_class_types = { }
object_class_typerefs = { }
object_class_classrefs = { }
# dummy types
class _VariableTypeValueFieldSpec (AnyType):
pass
class _FixedTypeValueSetFieldSpec (AnyType):
pass
class_types_creator = {
'BooleanType' : lambda : BooleanType(),
'IntegerType' : lambda : IntegerType(),
'ObjectIdentifierType' : lambda : ObjectIdentifierType(),
'OpenType' : lambda : OpenType(),
# dummy types
'_VariableTypeValueFieldSpec' : lambda : _VariableTypeValueFieldSpec(),
'_FixedTypeValueSetFieldSpec' : lambda : _FixedTypeValueSetFieldSpec(),
}
class_names = { }
x681_syntaxes = {
'TYPE-IDENTIFIER' : {
' ' : '&Type',
'IDENTIFIED' : 'IDENTIFIED',
#'BY' : 'BY',
'IDENTIFIED BY' : '&id',
},
'ABSTRACT-SYNTAX' : {
' ' : '&Type',
'IDENTIFIED' : 'IDENTIFIED',
#'BY' : 'BY',
'IDENTIFIED BY' : '&id',
'HAS' : 'HAS',
'PROPERTY' : 'PROPERTY',
'HAS PROPERTY' : '&property',
},
}
class_syntaxes_enabled = {
'TYPE-IDENTIFIER' : True,
'ABSTRACT-SYNTAX' : True,
}
class_syntaxes = {
'TYPE-IDENTIFIER' : x681_syntaxes['TYPE-IDENTIFIER'],
'ABSTRACT-SYNTAX' : x681_syntaxes['ABSTRACT-SYNTAX'],
}
class_current_syntax = None
def get_syntax_tokens(syntaxes):
tokens = { }
for s in (syntaxes):
for k in (list(syntaxes[s].keys())):
if k.find(' ') < 0:
tokens[k] = k
tokens[k] = tokens[k].replace('-', '_')
return list(tokens.values())
tokens = tokens + get_syntax_tokens(x681_syntaxes)
def set_class_syntax(syntax):
global class_syntaxes_enabled
global class_current_syntax
#print "set_class_syntax", syntax, class_current_syntax
if class_syntaxes_enabled.get(syntax, False):
class_current_syntax = syntax
return True
else:
class_current_syntax = None
return False
def is_class_syntax(name):
global class_syntaxes
global class_current_syntax
#print "is_class_syntax", name, class_current_syntax
if not class_current_syntax:
return False
return name in class_syntaxes[class_current_syntax]
def get_class_fieled(name):
if not class_current_syntax:
return None
return class_syntaxes[class_current_syntax][name]
def is_class_ident(name):
return name in class_names
def add_class_ident(name):
#print "add_class_ident", name
class_names[name] = name
def get_type_from_class(cls, fld):
flds = fld.split('.')
if (isinstance(cls, Class_Ref)):
key = cls.val + '.' + flds[0]
else:
key = cls + '.' + flds[0]
if key in object_class_classrefs:
return get_type_from_class(object_class_classrefs[key], '.'.join(flds[1:]))
if key in object_class_typerefs:
return Type_Ref(val=object_class_typerefs[key])
creator = lambda : AnyType()
creator = useful_object_class_types.get(key, creator)
creator = object_class_types.get(key, creator)
return creator()
def set_type_to_class(cls, fld, pars):
#print "set_type_to_class", cls, fld, pars
key = cls + '.' + fld
typename = 'OpenType'
if (len(pars) > 0):
typename = pars[0]
else:
pars.append(typename)
typeref = None
if (len(pars) > 1):
if (isinstance(pars[1], Class_Ref)):
pars[1] = pars[1].val
typeref = pars[1]
msg = None
if key in object_class_types:
msg = object_class_types[key]().type
if key in object_class_typerefs:
msg = "TypeReference " + object_class_typerefs[key]
if key in object_class_classrefs:
msg = "ClassReference " + object_class_classrefs[key]
if msg == ' '.join(pars):
msg = None
if msg:
msg0 = "Can not define CLASS field %s as '%s'\n" % (key, ' '.join(pars))
msg1 = "Already defined as '%s'" % (msg)
raise CompError(msg0 + msg1)
if (typename == 'ClassReference'):
if not typeref: return False
object_class_classrefs[key] = typeref
return True
if (typename == 'TypeReference'):
if not typeref: return False
object_class_typerefs[key] = typeref
return True
creator = class_types_creator.get(typename)
if creator:
object_class_types[key] = creator
return True
else:
return False
def import_class_from_module(mod, cls):
add_class_ident(cls)
mcls = "$%s$%s" % (mod, cls)
for k in list(object_class_classrefs.keys()):
kk = k.split('.', 1)
if kk[0] == mcls:
object_class_classrefs[cls + '.' + kk[0]] = object_class_classrefs[k]
for k in list(object_class_typerefs.keys()):
kk = k.split('.', 1)
if kk[0] == mcls:
object_class_typerefs[cls + '.' + kk[0]] = object_class_typerefs[k]
for k in list(object_class_types.keys()):
kk = k.split('.', 1)
if kk[0] == mcls:
object_class_types[cls + '.' + kk[0]] = object_class_types[k]
#--- ITU-T Recommendation X.682 -----------------------------------------------
# 8 General constraint specification ------------------------------------------
# 8.1
def p_GeneralConstraint (t):
'''GeneralConstraint : UserDefinedConstraint
| TableConstraint
| ContentsConstraint'''
t[0] = t[1]
# 9 User-defined constraints --------------------------------------------------
# 9.1
def p_UserDefinedConstraint (t):
'UserDefinedConstraint : CONSTRAINED BY LBRACE UserDefinedConstraintParameterList RBRACE'
t[0] = Constraint(type = 'UserDefined', subtype = t[4])
def p_UserDefinedConstraintParameterList_1 (t):
'UserDefinedConstraintParameterList : '
t[0] = []
def p_UserDefinedConstraintParameterList_2 (t):
'UserDefinedConstraintParameterList : UserDefinedConstraintParameter'
t[0] = [t[1]]
def p_UserDefinedConstraintParameterList_3 (t):
'UserDefinedConstraintParameterList : UserDefinedConstraintParameterList COMMA UserDefinedConstraintParameter'
t[0] = t[1] + [t[3]]
# 9.3
def p_UserDefinedConstraintParameter (t):
'UserDefinedConstraintParameter : Type'
t[0] = t[1]
# 10 Table constraints, including component relation constraints --------------
# 10.3
def p_TableConstraint (t):
'''TableConstraint : SimpleTableConstraint
| ComponentRelationConstraint'''
t[0] = Constraint(type = 'Table', subtype = t[1])
def p_SimpleTableConstraint (t):
'SimpleTableConstraint : LBRACE UCASE_IDENT RBRACE'
t[0] = t[2]
# 10.7
def p_ComponentRelationConstraint (t):
'ComponentRelationConstraint : LBRACE UCASE_IDENT RBRACE LBRACE AtNotations RBRACE'
t[0] = t[2] + str(t[5])
def p_AtNotations_1 (t):
'AtNotations : AtNotation'
t[0] = [t[1]]
def p_AtNotations_2 (t):
'AtNotations : AtNotations COMMA AtNotation'
t[0] = t[1] + [t[3]]
def p_AtNotation_1 (t):
'AtNotation : AT ComponentIdList'
t[0] = '@' + t[2]
def p_AtNotation_2 (t):
'AtNotation : AT DOT Level ComponentIdList'
t[0] = '@.' + t[3] + t[4]
def p_Level_1 (t):
'Level : DOT Level'
t[0] = '.' + t[2]
def p_Level_2 (t):
'Level : '
t[0] = ''
def p_ComponentIdList_1 (t):
'ComponentIdList : LCASE_IDENT'
t[0] = t[1]
def p_ComponentIdList_2 (t):
'ComponentIdList : ComponentIdList DOT LCASE_IDENT'
t[0] = t[1] + '.' + t[3]
# 11 Contents constraints -----------------------------------------------------
# 11.1
def p_ContentsConstraint (t):
'ContentsConstraint : CONTAINING type_ref'
t[0] = Constraint(type = 'Contents', subtype = t[2])
#--- ITU-T Recommendation X.683 -----------------------------------------------
# 8 Parameterized assignments -------------------------------------------------
# 8.1
def p_ParameterizedAssignment (t):
'''ParameterizedAssignment : ParameterizedTypeAssignment
| ParameterizedObjectClassAssignment
| ParameterizedObjectAssignment
| ParameterizedObjectSetAssignment'''
t[0] = t[1]
# 8.2
def p_ParameterizedTypeAssignment (t):
'ParameterizedTypeAssignment : UCASE_IDENT ParameterList ASSIGNMENT Type'
t[0] = t[4]
t[0].SetName(t[1]) # t[0].SetName(t[1] + 'xxx')
def p_ParameterizedObjectClassAssignment (t):
'''ParameterizedObjectClassAssignment : CLASS_IDENT ParameterList ASSIGNMENT ObjectClass
| UCASE_IDENT ParameterList ASSIGNMENT ObjectClass'''
t[0] = t[4]
t[0].SetName(t[1])
if isinstance(t[0], ObjectClassDefn):
t[0].reg_types()
def p_ParameterizedObjectAssignment (t):
'ParameterizedObjectAssignment : objectreference ParameterList DefinedObjectClass ASSIGNMENT Object'
t[0] = ObjectAssignment (ident = t[1], cls=t[3].val, val=t[5])
global obj_class
obj_class = None
def p_ParameterizedObjectSetAssignment (t):
'ParameterizedObjectSetAssignment : UCASE_IDENT ParameterList DefinedObjectClass ASSIGNMENT ObjectSet'
t[0] = Node('ObjectSetAssignment', name=t[1], cls=t[3].val, val=t[5])
# 8.3
def p_ParameterList (t):
'ParameterList : lbraceignore rbraceignore'
#def p_ParameterList (t):
# 'ParameterList : LBRACE Parameters RBRACE'
# t[0] = t[2]
#def p_Parameters_1 (t):
# 'Parameters : Parameter'
# t[0] = [t[1]]
#def p_Parameters_2 (t):
# 'Parameters : Parameters COMMA Parameter'
# t[0] = t[1] + [t[3]]
#def p_Parameter_1 (t):
# 'Parameter : Type COLON Reference'
# t[0] = [t[1], t[3]]
#def p_Parameter_2 (t):
# 'Parameter : Reference'
# t[0] = t[1]
# 9 Referencing parameterized definitions -------------------------------------
# 9.1
def p_ParameterizedReference (t):
'ParameterizedReference : Reference LBRACE RBRACE'
t[0] = t[1]
#t[0].val += 'xxx'
# 9.2
def p_ParameterizedType (t):
'ParameterizedType : type_ref ActualParameterList'
t[0] = t[1]
#t[0].val += 'xxx'
def p_ParameterizedObjectClass (t):
'ParameterizedObjectClass : DefinedObjectClass ActualParameterList'
t[0] = t[1]
#t[0].val += 'xxx'
def p_ParameterizedObject (t):
'ParameterizedObject : DefinedObject ActualParameterList'
t[0] = t[1]
#t[0].val += 'xxx'
# 9.5
def p_ActualParameterList (t):
'ActualParameterList : lbraceignore rbraceignore'
#def p_ActualParameterList (t):
# 'ActualParameterList : LBRACE ActualParameters RBRACE'
# t[0] = t[2]
#def p_ActualParameters_1 (t):
# 'ActualParameters : ActualParameter'
# t[0] = [t[1]]
#def p_ActualParameters_2 (t):
# 'ActualParameters : ActualParameters COMMA ActualParameter'
# t[0] = t[1] + [t[3]]
#def p_ActualParameter (t):
# '''ActualParameter : Type
# | Value'''
# t[0] = t[1]
#--- ITU-T Recommendation X.880 -----------------------------------------------
x880_classes = {
'OPERATION' : {
'&ArgumentType' : [],
'&argumentTypeOptional' : [ 'BooleanType' ],
'&returnResult' : [ 'BooleanType' ],
'&ResultType' : [],
'&resultTypeOptional' : [ 'BooleanType' ],
'&Errors' : [ 'ClassReference', 'ERROR' ],
'&Linked' : [ 'ClassReference', 'OPERATION' ],
'&synchronous' : [ 'BooleanType' ],
'&idempotent' : [ 'BooleanType' ],
'&alwaysReturns' : [ 'BooleanType' ],
'&InvokePriority' : [ '_FixedTypeValueSetFieldSpec' ],
'&ResultPriority' : [ '_FixedTypeValueSetFieldSpec' ],
'&operationCode' : [ 'TypeReference', 'Code' ],
},
'ERROR' : {
'&ParameterType' : [],
'¶meterTypeOptional' : [ 'BooleanType' ],
'&ErrorPriority' : [ '_FixedTypeValueSetFieldSpec' ],
'&errorCode' : [ 'TypeReference', 'Code' ],
},
'OPERATION-PACKAGE' : {
'&Both' : [ 'ClassReference', 'OPERATION' ],
'&Consumer' : [ 'ClassReference', 'OPERATION' ],
'&Supplier' : [ 'ClassReference', 'OPERATION' ],
'&id' : [ 'ObjectIdentifierType' ],
},
'CONNECTION-PACKAGE' : {
'&bind' : [ 'ClassReference', 'OPERATION' ],
'&unbind' : [ 'ClassReference', 'OPERATION' ],
'&responderCanUnbind' : [ 'BooleanType' ],
'&unbindCanFail' : [ 'BooleanType' ],
'&id' : [ 'ObjectIdentifierType' ],
},
'CONTRACT' : {
'&connection' : [ 'ClassReference', 'CONNECTION-PACKAGE' ],
'&OperationsOf' : [ 'ClassReference', 'OPERATION-PACKAGE' ],
'&InitiatorConsumerOf' : [ 'ClassReference', 'OPERATION-PACKAGE' ],
'&InitiatorSupplierOf' : [ 'ClassReference', 'OPERATION-PACKAGE' ],
'&id' : [ 'ObjectIdentifierType' ],
},
'ROS-OBJECT-CLASS' : {
'&Is' : [ 'ClassReference', 'ROS-OBJECT-CLASS' ],
'&Initiates' : [ 'ClassReference', 'CONTRACT' ],
'&Responds' : [ 'ClassReference', 'CONTRACT' ],
'&InitiatesAndResponds' : [ 'ClassReference', 'CONTRACT' ],
'&id' : [ 'ObjectIdentifierType' ],
},
}
x880_syntaxes = {
'OPERATION' : {
'ARGUMENT' : '&ArgumentType',
'ARGUMENT OPTIONAL' : '&argumentTypeOptional',
'RESULT' : '&ResultType',
'RESULT OPTIONAL' : '&resultTypeOptional',
'RETURN' : 'RETURN',
'RETURN RESULT' : '&returnResult',
'ERRORS' : '&Errors',
'LINKED' : '&Linked',
'SYNCHRONOUS' : '&synchronous',
'IDEMPOTENT' : '&idempotent',
'ALWAYS' : 'ALWAYS',
'RESPONDS' : 'RESPONDS',
'ALWAYS RESPONDS' : '&alwaysReturns',
'INVOKE' : 'INVOKE',
'PRIORITY' : 'PRIORITY',
'INVOKE PRIORITY' : '&InvokePriority',
'RESULT-PRIORITY': '&ResultPriority',
'CODE' : '&operationCode',
},
'ERROR' : {
'PARAMETER' : '&ParameterType',
'PARAMETER OPTIONAL' : '¶meterTypeOptional',
'PRIORITY' : '&ErrorPriority',
'CODE' : '&errorCode',
},
# 'OPERATION-PACKAGE' : {
# },
# 'CONNECTION-PACKAGE' : {
# },
# 'CONTRACT' : {
# },
# 'ROS-OBJECT-CLASS' : {
# },
}
def x880_module_begin():
#print "x880_module_begin()"
for name in list(x880_classes.keys()):
add_class_ident(name)
def x880_import(name):
if name in x880_syntaxes:
class_syntaxes_enabled[name] = True
class_syntaxes[name] = x880_syntaxes[name]
if name in x880_classes:
add_class_ident(name)
for f in (list(x880_classes[name].keys())):
set_type_to_class(name, f, x880_classes[name][f])
tokens = tokens + get_syntax_tokens(x880_syntaxes)
# {...} OID value
#def p_lbrace_oid(t):
# 'lbrace_oid : brace_oid_begin LBRACE'
# t[0] = t[1]
#def p_brace_oid_begin(t):
# 'brace_oid_begin : '
# global in_oid
# in_oid = True
#def p_rbrace_oid(t):
# 'rbrace_oid : brace_oid_end RBRACE'
# t[0] = t[2]
#def p_brace_oid_end(t):
# 'brace_oid_end : '
# global in_oid
# in_oid = False
# {...} block to be ignored
def p_lbraceignore(t):
'lbraceignore : braceignorebegin LBRACE'
t[0] = t[1]
def p_braceignorebegin(t):
'braceignorebegin : '
global lexer
lexer.level = 1
lexer.push_state('braceignore')
def p_rbraceignore(t):
'rbraceignore : braceignoreend RBRACE'
t[0] = t[2]
def p_braceignoreend(t):
'braceignoreend : '
global lexer
lexer.pop_state()
def p_error(t):
global input_file
raise ParseError(t, input_file)
def p_pyquote (t):
'''pyquote : PYQUOTE'''
t[0] = PyQuote (val = t[1])
def testlex (s):
lexer.input (s)
while True:
token = lexer.token ()
if not token:
break
print(token)
def do_module (ast, defined_dict):
assert (ast.type == 'Module')
ctx = Ctx (defined_dict)
print(ast.to_python (ctx))
print(ctx.output_assignments ())
print(ctx.output_pyquotes ())
def eth_do_module (ast, ectx):
assert (ast.type == 'Module')
if ectx.dbg('s'): print(ast.str_depth(0))
ast.to_eth(ectx)
def testyacc(s, fn, defined_dict):
ast = yacc.parse(s, debug=0)
time_str = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())
print("""#!/usr/bin/env python
# Auto-generated from %s at %s
from PyZ3950 import asn1""" % (fn, time_str))
for module in ast:
eth_do_module (module, defined_dict)
# Wireshark compiler
def eth_usage():
print("""
asn2wrs [-h|?] [-d dbg] [-b] [-p proto] [-c cnf_file] [-e] input_file(s) ...
-h|? : Usage
-b : BER (default is PER)
-u : Unaligned (default is aligned)
-p proto : Protocol name (implies -S). Default is module-name
from input_file (renamed by #.MODULE if present)
-o name : Output files name core (default is <proto>)
-O dir : Output directory for dissector
-c cnf_file : Conformance file
-I path : Path for conformance file includes
-e : Create conformance file for exported types
-E : Just create conformance file for exported types
-S : Single output for multiple modules
-s template : Single file output (template is input file
without .c/.h extension)
-k : Keep intermediate files though single file output is used
-L : Suppress #line directive from .cnf file
-D dir : Directory for input_file(s) (default: '.')
-C : Add check for SIZE constraints
-r prefix : Remove the prefix from type names
input_file(s) : Input ASN.1 file(s)
-d dbg : Debug output, dbg = [l][y][p][s][a][t][c][m][o]
l - lex
y - yacc
p - parsing
s - internal ASN.1 structure
a - list of assignments
t - tables
c - conformance values
m - list of compiled modules with dependency
o - list of output files
""")
def eth_main():
global input_file
global g_conform
global lexer
print("ASN.1 to Wireshark dissector compiler");
try:
opts, args = getopt.getopt(sys.argv[1:], "h?d:D:buXp:FTo:O:c:I:eESs:kLCr:");
except getopt.GetoptError:
eth_usage(); sys.exit(2)
if len(args) < 1:
eth_usage(); sys.exit(2)
conform = EthCnf()
conf_to_read = None
output = EthOut()
ectx = EthCtx(conform, output)
ectx.encoding = 'per'
ectx.proto_opt = None
ectx.fld_opt = {}
ectx.tag_opt = False
ectx.outnm_opt = None
ectx.aligned = True
ectx.dbgopt = ''
ectx.new = True
ectx.expcnf = False
ectx.justexpcnf = False
ectx.merge_modules = False
ectx.group_by_prot = False
ectx.conform.last_group = 0
ectx.conform.suppress_line = False;
ectx.output.outnm = None
ectx.output.single_file = None
ectx.constraints_check = False;
for o, a in opts:
if o in ("-h", "-?"):
eth_usage(); sys.exit(2)
if o in ("-c",):
conf_to_read = a
if o in ("-I",):
ectx.conform.include_path.append(a)
if o in ("-E",):
ectx.expcnf = True
ectx.justexpcnf = True
if o in ("-D",):
ectx.srcdir = a
if o in ("-C",):
ectx.constraints_check = True
if o in ("-X",):
warnings.warn("Command line option -X is obsolete and can be removed")
if o in ("-T",):
warnings.warn("Command line option -T is obsolete and can be removed")
if conf_to_read:
ectx.conform.read(conf_to_read)
for o, a in opts:
if o in ("-h", "-?", "-c", "-I", "-E", "-D", "-C", "-X", "-T"):
pass # already processed
else:
par = []
if a: par.append(a)
ectx.conform.set_opt(o, par, "commandline", 0)
(ld, yd, pd) = (0, 0, 0);
if ectx.dbg('l'): ld = 1
if ectx.dbg('y'): yd = 1
if ectx.dbg('p'): pd = 2
lexer = lex.lex(debug=ld)
yacc.yacc(method='LALR', debug=yd)
g_conform = ectx.conform
ast = []
for fn in args:
input_file = fn
lexer.lineno = 1
if (ectx.srcdir): fn = ectx.srcdir + '/' + fn
# Read ASN.1 definition, trying one of the common encodings.
data = open(fn, "rb").read()
for encoding in ('utf-8', 'windows-1252'):
try:
data = data.decode(encoding)
break
except:
warnings.warn_explicit("Decoding %s as %s failed, trying next." % (fn, encoding), UserWarning, '', 0)
# Py2 compat, name.translate in eth_output_hf_arr fails with unicode
if not isinstance(data, str):
data = data.encode('utf-8')
ast.extend(yacc.parse(data, lexer=lexer, debug=pd))
ectx.eth_clean()
if (ectx.merge_modules): # common output for all module
ectx.eth_clean()
for module in ast:
eth_do_module(module, ectx)
ectx.eth_prepare()
ectx.eth_do_output()
elif (ectx.groups()): # group by protocols/group
groups = []
pr2gr = {}
if (ectx.group_by_prot): # group by protocols
for module in ast:
prot = module.get_proto(ectx)
if prot not in pr2gr:
pr2gr[prot] = len(groups)
groups.append([])
groups[pr2gr[prot]].append(module)
else: # group by groups
pass
for gm in (groups):
ectx.eth_clean()
for module in gm:
eth_do_module(module, ectx)
ectx.eth_prepare()
ectx.eth_do_output()
else: # output for each module
for module in ast:
ectx.eth_clean()
eth_do_module(module, ectx)
ectx.eth_prepare()
ectx.eth_do_output()
if ectx.dbg('m'):
ectx.dbg_modules()
if ectx.dbg('c'):
ectx.conform.dbg_print()
if not ectx.justexpcnf:
ectx.conform.unused_report()
if ectx.dbg('o'):
ectx.output.dbg_print()
ectx.output.make_single_file()
# Python compiler
def main():
testfn = testyacc
if len (sys.argv) == 1:
while True:
s = input ('Query: ')
if len (s) == 0:
break
testfn (s, 'console', {})
else:
defined_dict = {}
for fn in sys.argv [1:]:
f = open (fn, "r")
testfn (f.read (), fn, defined_dict)
f.close ()
lexer.lineno = 1
#--- BODY ---------------------------------------------------------------------
if __name__ == '__main__':
if (os.path.splitext(os.path.basename(sys.argv[0]))[0].lower() in ('asn2wrs', 'asn2eth')):
eth_main()
else:
main()
#------------------------------------------------------------------------------
#
# Editor modelines - http://www.wireshark.org/tools/modelines.html
#
# c-basic-offset: 4; tab-width: 8; indent-tabs-mode: nil
# vi: set shiftwidth=4 tabstop=8 expandtab:
# :indentSize=4:tabSize=8:noTabs=true:
|
weinrank/wireshark
|
tools/asn2wrs.py
|
Python
|
gpl-2.0
| 308,942 | 0.01106 |
#!/usr/bin/env python
# vim:fileencoding=utf-8:noet
from __future__ import unicode_literals
import os
import sys
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst'), 'rb').read().decode('utf-8')
except IOError:
README = ''
old_python = sys.version_info < (2, 7)
setup(
name='Powerline',
version='beta',
description='The ultimate statusline/prompt utility.',
long_description=README,
classifiers=[],
author='Kim Silkebaekken',
author_email='[email protected]',
url='https://github.com/Lokaltog/powerline',
scripts=[
'scripts/powerline',
'scripts/powerline-lint',
],
keywords='',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
install_requires=[],
extras_require={
'docs': [
'Sphinx',
],
},
test_suite='tests' if not old_python else None,
)
|
keelerm84/powerline
|
setup.py
|
Python
|
mit
| 931 | 0.026853 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-11-22 07:11
from __future__ import unicode_literals
from django.core.management.sql import emit_post_migrate_signal
from django.db import migrations
def add_executive_group(apps, schema_editor):
# create group
db_alias = schema_editor.connection.alias
emit_post_migrate_signal(2, False, db_alias)
Group = apps.get_model('auth', 'Group')
Permission = apps.get_model('auth', 'Permission')
executive_group, created = Group.objects.get_or_create(name='executive')
if created:
# Learning unit
can_access_learningunit = Permission.objects.get(codename='can_access_learningunit')
executive_group.permissions.add(can_access_learningunit)
class Migration(migrations.Migration):
dependencies = [
('base', '0207_auto_20171220_1035'),
]
operations = [
migrations.RunPython(add_executive_group),
]
|
uclouvain/osis_louvain
|
base/migrations/0208_create_role_executive.py
|
Python
|
agpl-3.0
| 935 | 0.003209 |
# Twitter profile image updater
# http://twitter.com/account/update_profile_image.json
# image = [imagefile]
import sys
import os
import random
import re
import urllib
import json
import urllib2
import oauth2 as oauth
import time
import wcommon
def encode_file_data(image):
boundary = hex(int(time.time()))[2:]
headers = {}
headers['Content-Type'] = 'multipart/form-data; boundary="%s"' % (boundary)
data = [] #to be joined later (faster)
data.append("--"+boundary)
data.append("\r\n")
data.append('Content-Disposition: form-data; name="image"; filename="%s"\r\n' % image.name)
if image.name.endswith("jpg") or image.name.endswith("jpeg"):
data.append("Content-Type: image/jpeg\r\n\r\n")
elif image.name.endswith("png"):
data.append("Content-Type: image/png\r\n\r\n")
elif image.name.endswith("gif"):
data.append("Content-Type: image/gif\r\n\r\n")
else:
data.append("Content-Type: application/octet-stream\r\n\r\n")
data.append(image.read())
data.append("\r\n--")
data.append(boundary)
data.append("--\r\n\r\n")
body = ''.join(data)
headers['Content-Length'] = str(len(body))
return (headers, body)
os.chdir("./pics/")
files = os.listdir(os.getcwd())
images = []
for filename in files:
if filename.endswith(".jpg") or filename.endswith(".png") or filename.endswith(".gif"):
images.append(filename)
imagefile = random.choice(images)
image = open(imagefile, 'r')
url = 'http://api.twitter.com/1/account/update_profile_image.json'
headers, postdata = encode_file_data(image)
r, c = wcommon.oauth_req(url, http_method="POST", post_body=postdata, http_headers=headers)
if r.status != 200:
print "Updating profile image did not succeed: Status %d" % (r.status)
|
mhielscher/wasabiscripts
|
twitterpic.py
|
Python
|
bsd-2-clause
| 1,695 | 0.020649 |
# ptext module: place this in your import directory.
# ptext.draw(text, pos=None, **options)
# Please see README.md for explanation of options.
# https://github.com/cosmologicon/pygame-text
from __future__ import division
from math import ceil, sin, cos, radians, exp
import pygame
DEFAULT_FONT_SIZE = 24
REFERENCE_FONT_SIZE = 100
DEFAULT_LINE_HEIGHT = 1.0
DEFAULT_PARAGRAPH_SPACE = 0.0
DEFAULT_FONT_NAME = None
FONT_NAME_TEMPLATE = "%s"
DEFAULT_COLOR = "white"
DEFAULT_BACKGROUND = None
DEFAULT_SHADE = 0
DEFAULT_OUTLINE_COLOR = "black"
DEFAULT_SHADOW_COLOR = "black"
OUTLINE_UNIT = 1 / 24
SHADOW_UNIT = 1 / 18
DEFAULT_ALIGN = "left" # left, center, or right
DEFAULT_ANCHOR = 0, 0 # 0, 0 = top left ; 1, 1 = bottom right
DEFAULT_STRIP = True
ALPHA_RESOLUTION = 16
ANGLE_RESOLUTION_DEGREES = 3
AUTO_CLEAN = True
MEMORY_LIMIT_MB = 64
MEMORY_REDUCTION_FACTOR = 0.5
pygame.font.init()
_font_cache = {}
def getfont(fontname=None, fontsize=None, sysfontname=None,
bold=None, italic=None, underline=None):
if fontname is not None and sysfontname is not None:
raise ValueError("Can't set both fontname and sysfontname")
if fontname is None and sysfontname is None:
fontname = DEFAULT_FONT_NAME
if fontsize is None:
fontsize = DEFAULT_FONT_SIZE
key = fontname, fontsize, sysfontname, bold, italic, underline
if key in _font_cache:
return _font_cache[key]
if sysfontname is not None:
font = pygame.font.SysFont(sysfontname, fontsize, bold or False, italic or False)
else:
if fontname is not None:
fontname = FONT_NAME_TEMPLATE % fontname
try:
font = pygame.font.Font(fontname, fontsize)
except IOError:
raise IOError("unable to read font filename: %s" % fontname)
if bold is not None:
font.set_bold(bold)
if italic is not None:
font.set_italic(italic)
if underline is not None:
font.set_underline(underline)
_font_cache[key] = font
return font
def wrap(text, fontname=None, fontsize=None, sysfontname=None,
bold=None, italic=None, underline=None, width=None, widthem=None, strip=None):
if widthem is None:
font = getfont(fontname, fontsize, sysfontname, bold, italic, underline)
elif width is not None:
raise ValueError("Can't set both width and widthem")
else:
font = getfont(fontname, REFERENCE_FONT_SIZE, sysfontname, bold, italic, underline)
width = widthem * REFERENCE_FONT_SIZE
if strip is None:
strip = DEFAULT_STRIP
paras = text.replace("\t", " ").split("\n")
lines = []
for jpara, para in enumerate(paras):
if strip:
para = para.rstrip(" ")
if width is None:
lines.append((para, jpara))
continue
if not para:
lines.append(("", jpara))
continue
# Preserve leading spaces in all cases.
a = len(para) - len(para.lstrip(" "))
# At any time, a is the rightmost known index you can legally split a line. I.e. it's legal
# to add para[:a] to lines, and line is what will be added to lines if para is split at a.
a = para.index(" ", a) if " " in para else len(para)
line = para[:a]
while a + 1 < len(para):
# b is the next legal place to break the line, with bline the corresponding line to add.
if " " not in para[a + 1:]:
b = len(para)
bline = para
elif strip:
# Lines may be split at any space character that immediately follows a non-space
# character.
b = para.index(" ", a + 1)
while para[b - 1] == " ":
if " " in para[b + 1:]:
b = para.index(" ", b + 1)
else:
b = len(para)
break
bline = para[:b]
else:
# Lines may be split at any space character, or any character immediately following
# a space character.
b = a + 1 if para[a] == " " else para.index(" ", a + 1)
bline = para[:b]
if font.size(bline)[0] <= width:
a, line = b, bline
else:
lines.append((line, jpara))
para = para[a:].lstrip(" ") if strip else para[a:]
a = para.index(" ", 1) if " " in para[1:] else len(para)
line = para[:a]
if para:
lines.append((line, jpara))
return lines
_fit_cache = {}
def _fitsize(text, fontname, sysfontname, bold, italic, underline, width, height, lineheight, pspace, strip):
key = text, fontname, sysfontname, bold, italic, underline, width, height, lineheight, pspace, strip
if key in _fit_cache:
return _fit_cache[key]
def fits(fontsize):
texts = wrap(text, fontname, fontsize, sysfontname, bold, italic, underline, width, strip)
font = getfont(fontname, fontsize, sysfontname, bold, italic, underline)
w = max(font.size(line)[0] for line, jpara in texts)
linesize = font.get_linesize() * lineheight
paraspace = font.get_linesize() * pspace
h = int(round((len(texts) - 1) * linesize + texts[-1][1] * paraspace)) + font.get_height()
return w <= width and h <= height
a, b = 1, 256
if not fits(a):
fontsize = a
elif fits(b):
fontsize = b
else:
while b - a > 1:
c = (a + b) // 2
if fits(c):
a = c
else:
b = c
fontsize = a
_fit_cache[key] = fontsize
return fontsize
def _resolvecolor(color, default):
if color is None:
color = default
if color is None:
return None
try:
return tuple(pygame.Color(color))
except ValueError:
return tuple(color)
def _applyshade(color, shade):
f = exp(-0.4 * shade)
r, g, b = [
min(max(int(round((c + 50) * f - 50)), 0), 255)
for c in color[:3]
]
return (r, g, b) + tuple(color[3:])
def _resolvealpha(alpha):
if alpha >= 1:
return 1
return max(int(round(alpha * ALPHA_RESOLUTION)) / ALPHA_RESOLUTION, 0)
def _resolveangle(angle):
if not angle:
return 0
angle %= 360
return int(round(angle / ANGLE_RESOLUTION_DEGREES)) * ANGLE_RESOLUTION_DEGREES
# Return the set of points in the circle radius r, using Bresenham's circle algorithm
_circle_cache = {}
def _circlepoints(r):
r = int(round(r))
if r in _circle_cache:
return _circle_cache[r]
x, y, e = r, 0, 1 - r
_circle_cache[r] = points = []
while x >= y:
points.append((x, y))
y += 1
if e < 0:
e += 2 * y - 1
else:
x -= 1
e += 2 * (y - x) - 1
points += [(y, x) for x, y in points if x > y]
points += [(-x, y) for x, y in points if x]
points += [(x, -y) for x, y in points if y]
points.sort()
return points
_surf_cache = {}
_surf_tick_usage = {}
_surf_size_total = 0
_unrotated_size = {}
_tick = 0
def getsurf(text, fontname=None, fontsize=None, sysfontname=None, bold=None, italic=None,
underline=None, width=None, widthem=None, strip=None, color=None,
background=None, antialias=True, ocolor=None, owidth=None, scolor=None, shadow=None,
gcolor=None, shade=None, alpha=1.0, align=None, lineheight=None, pspace=None, angle=0,
cache=True):
global _tick, _surf_size_total
if fontname is None:
fontname = DEFAULT_FONT_NAME
if fontsize is None:
fontsize = DEFAULT_FONT_SIZE
fontsize = int(round(fontsize))
if align is None:
align = DEFAULT_ALIGN
if align in ["left", "center", "right"]:
align = [0, 0.5, 1][["left", "center", "right"].index(align)]
if lineheight is None:
lineheight = DEFAULT_LINE_HEIGHT
if pspace is None:
pspace = DEFAULT_PARAGRAPH_SPACE
color = _resolvecolor(color, DEFAULT_COLOR)
background = _resolvecolor(background, DEFAULT_BACKGROUND)
gcolor = _resolvecolor(gcolor, None)
if shade is None:
shade = DEFAULT_SHADE
if shade:
gcolor = _applyshade(gcolor or color, shade)
shade = 0
ocolor = None if owidth is None else _resolvecolor(ocolor, DEFAULT_OUTLINE_COLOR)
scolor = None if shadow is None else _resolvecolor(scolor, DEFAULT_SHADOW_COLOR)
opx = None if owidth is None else ceil(owidth * fontsize * OUTLINE_UNIT)
spx = None if shadow is None else tuple(ceil(s * fontsize * SHADOW_UNIT) for s in shadow)
alpha = _resolvealpha(alpha)
angle = _resolveangle(angle)
strip = DEFAULT_STRIP if strip is None else strip
key = (text, fontname, fontsize, sysfontname, bold, italic, underline, width, widthem, strip,
color, background, antialias, ocolor, opx, scolor, spx, gcolor, alpha, align, lineheight,
pspace, angle)
if key in _surf_cache:
_surf_tick_usage[key] = _tick
_tick += 1
return _surf_cache[key]
texts = wrap(text, fontname, fontsize, sysfontname, bold, italic, underline,
width=width, widthem=widthem, strip=strip)
if angle:
surf0 = getsurf(text, fontname, fontsize, sysfontname, bold, italic, underline,
width, widthem, strip, color, background, antialias,
ocolor, owidth, scolor, shadow, gcolor, 0, alpha, align, lineheight, pspace,
cache=cache)
if angle in (90, 180, 270):
surf = pygame.transform.rotate(surf0, angle)
else:
surf = pygame.transform.rotozoom(surf0, angle, 1.0)
_unrotated_size[(surf.get_size(), angle, text)] = surf0.get_size()
elif alpha < 1.0:
surf0 = getsurf(text, fontname, fontsize, sysfontname, bold, italic, underline,
width, widthem, strip, color, background, antialias,
ocolor, owidth, scolor, shadow, gcolor=gcolor, shade=0, align=align,
lineheight=lineheight, pspace=pspace, cache=cache)
surf = surf0.copy()
_surf = surf0.copy()
_surf.fill((255, 255, 255, int(alpha * 255.0)))
surf.blit(_surf, (0, 0), None, pygame.BLEND_RGBA_MULT)
del _surf
# array = pygame.surfarray.pixels_alpha(surf)
# array[:, :] = (array[:, :] * alpha).astype(array.dtype)
# del array
elif spx is not None:
surf0 = getsurf(text, fontname, fontsize, sysfontname, bold, italic, underline,
width, widthem, strip, color=color, background=(0, 0, 0, 0), antialias=antialias,
gcolor=gcolor, shade=0, align=align, lineheight=lineheight, pspace=pspace, cache=cache)
ssurf = getsurf(text, fontname, fontsize, sysfontname, bold, italic, underline,
width, widthem, strip, color=scolor, background=(0, 0, 0, 0), antialias=antialias,
align=align, lineheight=lineheight, pspace=pspace, cache=cache)
w0, h0 = surf0.get_size()
sx, sy = spx
surf = pygame.Surface((w0 + abs(sx), h0 + abs(sy))).convert_alpha()
surf.fill(background or (0, 0, 0, 0))
dx, dy = max(sx, 0), max(sy, 0)
surf.blit(ssurf, (dx, dy))
x0, y0 = abs(sx) - dx, abs(sy) - dy
if len(color) > 3 and color[3] == 0:
raise Exception("spx, color[3]==0")
# array = pygame.surfarray.pixels_alpha(surf)
# array0 = pygame.surfarray.pixels_alpha(surf0)
# array[x0:x0 + w0, y0:y0 + h0] -= array0.clip(max=array[x0:x0 + w0, y0:y0 + h0])
# del array, array0
pass
else:
surf.blit(surf0, (x0, y0))
elif opx is not None:
surf0 = getsurf(text, fontname, fontsize, sysfontname, bold, italic, underline,
width, widthem, strip, color=color, background=(0, 0, 0, 0), antialias=antialias,
gcolor=gcolor, shade=0, align=align, lineheight=lineheight, pspace=pspace, cache=cache)
osurf = getsurf(text, fontname, fontsize, sysfontname, bold, italic, underline,
width, widthem, strip, color=ocolor, background=(0, 0, 0, 0), antialias=antialias,
align=align, lineheight=lineheight, pspace=pspace, cache=cache)
w0, h0 = surf0.get_size()
surf = pygame.Surface((w0 + 2 * opx, h0 + 2 * opx)).convert_alpha()
surf.fill(background or (0, 0, 0, 0))
for dx, dy in _circlepoints(opx):
surf.blit(osurf, (dx + opx, dy + opx))
if len(color) > 3 and color[3] == 0:
# array = pygame.surfarray.pixels_alpha(surf)
# array0 = pygame.surfarray.pixels_alpha(surf0)
# array[opx:-opx, opx:-opx] -= array0.clip(max=array[opx:-opx, opx:-opx])
# del array, array0
# raise Exception("opx, color[3] == 0")
# _surf = surf0.copy()
# _surf.fill((0, 0, 0, 0))
# _surf.blit(surf, (0, 0), None, pygame.BLEND_RGBA_MAX)
# surf0.blit(_surf, (0, 0), None, pygame.BLEND_RGBA_MULT)
_surf = surf0.copy()
_surf.fill((0, 0, 0, 255), None, pygame.BLEND_RGBA_MULT)
surf.blit(_surf, (opx, opx), None, pygame.BLEND_RGBA_SUB)
del _surf
else:
surf.blit(surf0, (opx, opx))
else:
font = getfont(fontname, fontsize, sysfontname, bold, italic, underline)
# pygame.Font.render does not allow passing None as an argument value for background.
if background is None or (len(background) > 3 and background[3] == 0) or gcolor is not None:
lsurfs = [font.render(text, antialias, color).convert_alpha() for text, jpara in texts]
else:
lsurfs = [font.render(text, antialias, color, background).convert_alpha() for text, jpara in texts]
if gcolor is not None:
# import numpy
# m = numpy.clip(numpy.arange(lsurfs[0].get_height()) * 2.0 / font.get_ascent() - 1.0, 0, 1)
# for lsurf in lsurfs:
# array = pygame.surfarray.pixels3d(lsurf)
# for j in (0, 1, 2):
# array[:, :, j] = ((1.0 - m) * array[:, :, j] + m * gcolor[j]).astype(array.dtype)
# del array
_surf_height = lsurfs[0].get_height()
m = (_x * 2.0 / font.get_ascent() - 1.0 for _x in range(_surf_height))
m = [0 if _x < 0 else (1 if _x > 1 else _x) for _x in m]
for lsurf in lsurfs:
grad1 = pygame.Surface((1, _surf_height))
grad2 = pygame.Surface((1, _surf_height))
for idx, _m_val in enumerate(m):
_inv_m_val = 1.0 - _m_val
_color = (int(round(_inv_m_val * 255)),
int(round(_inv_m_val * 255)),
int(round(_inv_m_val * 255)))
grad1.set_at((0, idx), _color)
_color = (int(round(_m_val * gcolor[0])),
int(round(_m_val * gcolor[1])),
int(round(_m_val * gcolor[2])))
grad2.set_at((0, idx), _color)
grad1 = pygame.transform.scale(grad1, lsurf.get_size())
grad2 = pygame.transform.scale(grad2, lsurf.get_size())
lsurf.blit(grad1, (0, 0), None, pygame.BLEND_RGB_MULT)
lsurf.blit(grad2, (0, 0), None, pygame.BLEND_RGB_ADD)
del grad1
del grad2
if len(lsurfs) == 1 and gcolor is None:
surf = lsurfs[0]
else:
w = max(lsurf.get_width() for lsurf in lsurfs)
linesize = font.get_linesize() * lineheight
parasize = font.get_linesize() * pspace
ys = [int(round(k * linesize + jpara * parasize)) for k, (text, jpara) in enumerate(texts)]
h = ys[-1] + font.get_height()
surf = pygame.Surface((w, h)).convert_alpha()
surf.fill(background or (0, 0, 0, 0))
for y, lsurf in zip(ys, lsurfs):
x = int(round(align * (w - lsurf.get_width())))
surf.blit(lsurf, (x, y))
if cache:
w, h = surf.get_size()
_surf_size_total += 4 * w * h
_surf_cache[key] = surf
_surf_tick_usage[key] = _tick
_tick += 1
return surf
_default_surf_sentinel = ()
def draw(text, pos=None,
fontname=None, fontsize=None, sysfontname=None,
antialias=True, bold=None, italic=None, underline=None,
color=None, background=None,
top=None, left=None, bottom=None, right=None,
topleft=None, bottomleft=None, topright=None, bottomright=None,
midtop=None, midleft=None, midbottom=None, midright=None,
center=None, centerx=None, centery=None,
width=None, widthem=None, lineheight=None, pspace=None, strip=None,
align=None,
owidth=None, ocolor=None,
shadow=None, scolor=None,
gcolor=None, shade=None,
alpha=1.0,
anchor=None,
angle=0,
surf=_default_surf_sentinel,
cache=True):
if topleft:
left, top = topleft
if bottomleft:
left, bottom = bottomleft
if topright:
right, top = topright
if bottomright:
right, bottom = bottomright
if midtop:
centerx, top = midtop
if midleft:
left, centery = midleft
if midbottom:
centerx, bottom = midbottom
if midright:
right, centery = midright
if center:
centerx, centery = center
x, y = pos or (None, None)
hanchor, vanchor = anchor or (None, None)
if left is not None:
x, hanchor = left, 0
if centerx is not None:
x, hanchor = centerx, 0.5
if right is not None:
x, hanchor = right, 1
if top is not None:
y, vanchor = top, 0
if centery is not None:
y, vanchor = centery, 0.5
if bottom is not None:
y, vanchor = bottom, 1
if x is None:
raise ValueError("Unable to determine horizontal position")
if y is None:
raise ValueError("Unable to determine vertical position")
if align is None:
align = hanchor
if hanchor is None:
hanchor = DEFAULT_ANCHOR[0]
if vanchor is None:
vanchor = DEFAULT_ANCHOR[1]
tsurf = getsurf(text, fontname, fontsize, sysfontname, bold, italic, underline, width, widthem,
strip, color, background, antialias, ocolor, owidth, scolor, shadow, gcolor, shade, alpha,
align, lineheight, pspace, angle, cache)
angle = _resolveangle(angle)
if angle:
w0, h0 = _unrotated_size[(tsurf.get_size(), angle, text)]
S, C = sin(radians(angle)), cos(radians(angle))
dx, dy = (0.5 - hanchor) * w0, (0.5 - vanchor) * h0
x += dx * C + dy * S - 0.5 * tsurf.get_width()
y += -dx * S + dy * C - 0.5 * tsurf.get_height()
else:
x -= hanchor * tsurf.get_width()
y -= vanchor * tsurf.get_height()
x = int(round(x))
y = int(round(y))
if surf is _default_surf_sentinel:
surf = pygame.display.get_surface()
if surf is not None:
surf.blit(tsurf, (x, y))
if AUTO_CLEAN:
clean()
return tsurf, (x, y)
def drawbox(text, rect, fontname=None, sysfontname=None, lineheight=None, pspace=None, anchor=None,
bold=None, italic=None, underline=None, strip=None, **kwargs):
if fontname is None:
fontname = DEFAULT_FONT_NAME
if lineheight is None:
lineheight = DEFAULT_LINE_HEIGHT
if pspace is None:
pspace = DEFAULT_PARAGRAPH_SPACE
hanchor, vanchor = anchor = anchor or (0.5, 0.5)
rect = pygame.Rect(rect)
x = rect.x + hanchor * rect.width
y = rect.y + vanchor * rect.height
fontsize = _fitsize(text, fontname, sysfontname, bold, italic, underline,
rect.width, rect.height, lineheight, pspace, strip)
return draw(text, (x, y), fontname=fontname, fontsize=fontsize, lineheight=lineheight,
pspace=pspace, width=rect.width, strip=strip, anchor=anchor, **kwargs)
def clean():
global _surf_size_total
memory_limit = MEMORY_LIMIT_MB * (1 << 20)
if _surf_size_total < memory_limit:
return
memory_limit *= MEMORY_REDUCTION_FACTOR
keys = sorted(_surf_cache, key=_surf_tick_usage.get)
for key in keys:
w, h = _surf_cache[key].get_size()
del _surf_cache[key]
del _surf_tick_usage[key]
_surf_size_total -= 4 * w * h
if _surf_size_total < memory_limit:
break
|
gentooza/Freedom-Fighters-of-Might-Magic
|
src/gamelib/gummworld2/pygametext.py
|
Python
|
gpl-3.0
| 21,402 | 0.00271 |
# -*- coding: utf-8 -*-
"""
equip.analysis.flow
~~~~~~~~~~~~~~~~~~~
Extract the control flow graphs from the bytecode.
:copyright: (c) 2014 by Romain Gaucher (@rgaucher)
:license: Apache 2, see LICENSE for more details.
"""
import opcode
from operator import itemgetter, attrgetter
from itertools import tee, izip
from ..utils.log import logger
from ..utils.structures import intervalmap
from ..bytecode.utils import show_bytecode
from .graph import DiGraph, Edge, Node, Walker, EdgeVisitor, Tree, TreeNode
from .graph import DominatorTree, ControlDependence
from .block import BasicBlock
from .ast import Statement, Expression
from .constraint import Constraint
from .python.effects import get_stack_effect
from .python.opcodes import *
class ControlFlow(object):
"""
Performs the control-flow analysis on a ``Declaration`` object. It iterates
over its bytecode and builds the basic block. The final representation
leverages the ``DiGraph`` structure, and contains an instance of the
``DominatorTree``.
"""
E_TRUE = 'TRUE'
E_FALSE = 'FALSE'
E_UNCOND = 'UNCOND'
E_COND = 'COND'
E_EXCEPT = 'EXCEPT'
E_FINALLY = 'FINALLY'
E_RETURN = 'RETURN'
E_RAISE = 'RAISE'
E_END_LOOP = 'END_LOOP'
N_ENTRY = 'ENTRY'
N_IMPLICIT_RETURN = 'IMPLICIT_RETURN'
N_UNKNOWN = 'UNKNOWN'
N_LOOP = 'LOOP'
N_IF = 'IF'
N_EXCEPT = 'EXCEPT'
N_CONDITION = 'CONDITION'
CFG_TMP_RETURN = -1
CFG_TMP_BREAK = -2
CFG_TMP_RAISE = -3
CFG_TMP_CONTINUE = -4
def __init__(self, decl):
self._decl = decl
self._blocks = None
self._block_idx_map = {}
self._block_nodes = {}
self._block_intervals = None
self._conds = None
self._frames = None
self._graph = None
self._entry = None
self._exit = None
self._entry_node = None
self._exit_node = None
self._dom = None
self._cdg = None
self.analyze()
@property
def decl(self):
return self._decl
@decl.setter
def decl(self, value):
self._decl = value
@property
def entry(self):
return self._entry
@entry.setter
def entry(self, value):
self._entry = value
@property
def entry_node(self):
return self._entry_node
@entry_node.setter
def entry_node(self, value):
self._entry_node = value
@property
def exit(self):
return self._exit
@exit.setter
def exit(self, value):
self._exit = value
@property
def exit_node(self):
return self._exit_node
@exit_node.setter
def exit_node(self, value):
self._exit_node = value
@property
def blocks(self):
"""
Returns the basic blocks created during the control flow analysis.
"""
return self._blocks
@property
def block_indices_dict(self):
"""
Returns the mapping of a bytecode indices and a basic blocks.
"""
return self._block_idx_map
@property
def block_nodes_dict(self):
"""
Returns the mapping of a basic bocks and CFG nodes.
"""
return self._block_nodes
@property
def blocks_intervals(self):
if self._block_intervals is None:
self._block_intervals = intervalmap()
for block in self.blocks:
self._block_intervals[block.index:block.index + block.length] = block
return self._block_intervals
@property
def block_constraints(self):
"""
Returns the constraints associated with each ``N_CONDITION`` node
in the CFG. This is lazily computed.
"""
if self._conds is None:
self.compute_conditions()
return self._conds
@property
def frames(self):
return self._frames
@property
def graph(self):
"""
Returns the underlying graph that holds the CFG.
"""
return self._graph
@property
def dominators(self):
"""
Returns the ``DominatorTree`` that contains:
- Dominator/Post-dominator tree (dict of IDom/PIDom)
- Dominance/Post-domimance frontier (dict of CFG node -> set CFG nodes)
This is lazily computed.
"""
if self._dom is None:
self._dom = DominatorTree(self)
return self._dom
@property
def control_dependence(self):
"""
Returns the ``ControlDependence`` graph. This is lazily computed.
"""
if self._cdg is None:
self._cdg = ControlDependence(self)
return self._cdg
def analyze(self):
"""
Performs the CFA and stores the resulting CFG.
"""
bytecode = self.decl.bytecode
self.entry = BasicBlock(BasicBlock.ENTRY, self.decl, -1)
self.exit = BasicBlock(BasicBlock.IMPLICIT_RETURN, self.decl, -1)
self._blocks = ControlFlow.make_blocks(self.decl, bytecode)
self.__build_flowgraph(bytecode)
# logger.debug("CFG(%s) :=\n%s", self.decl, self.graph.to_dot())
def __build_flowgraph(self, bytecode):
g = DiGraph(multiple_edges=False)
self.entry_node = g.make_add_node(kind=ControlFlow.N_ENTRY, data=self._entry)
self.exit_node = g.make_add_node(kind=ControlFlow.N_IMPLICIT_RETURN, data=self._exit)
self._block_idx_map = {}
self._block_nodes = {}
# Connect entry/implicit return blocks
last_block_index, last_block = -1, None
for block in self.blocks:
self._block_idx_map[block.index] = block
node_kind = ControlFlow.get_kind_from_block(block)
block_node = g.make_add_node(kind=node_kind, data=block)
self._block_nodes[block] = block_node
if block.index == 0:
g.make_add_edge(self.entry_node, self._block_nodes[block], kind=ControlFlow.E_UNCOND)
if block.index >= last_block_index:
last_block = block
last_block_index = block.index
g.make_add_edge(self._block_nodes[last_block], self.exit_node, kind=ControlFlow.E_UNCOND)
sorted_blocks = sorted(self.blocks, key=attrgetter('_index'))
i, length = 0, len(sorted_blocks)
while i < length:
cur_block = sorted_blocks[i]
if cur_block.jumps:
# Connect the current block to its jump targets
for (jump_index, branch_kind) in cur_block.jumps:
if jump_index <= ControlFlow.CFG_TMP_RETURN:
continue
target_block = self._block_idx_map[jump_index]
g.make_add_edge(
self._block_nodes[cur_block], self._block_nodes[target_block], kind=branch_kind)
i += 1
self._graph = g
self.__finalize()
self._graph.freeze()
logger.debug("CFG :=\n%s", self._graph.to_dot())
def __finalize(self):
def has_true_false_branches(list_edges):
has_true, has_false = False, False
for edge in list_edges:
if edge.kind == ControlFlow.E_TRUE: has_true = True
elif edge.kind == ControlFlow.E_FALSE: has_false = True
return has_true and has_false
def get_cfg_tmp_values(node):
values = set()
for (jump_index, branch_kind) in node.data.jumps:
if jump_index <= ControlFlow.CFG_TMP_RETURN:
values.add(jump_index)
return values
def get_parent_loop(node):
class BwdEdges(EdgeVisitor):
def __init__(self):
EdgeVisitor.__init__(self)
self.edges = []
def visit(self, edge):
self.edges.append(edge)
visitor = BwdEdges()
walker = Walker(self.graph, visitor, backwards=True)
walker.traverse(node)
parents = visitor.edges
node_bc_index = node.data.index
for parent_edge in parents:
parent = parent_edge.source
if parent.kind != ControlFlow.N_LOOP:
continue
# Find the loop in which the break/current node is nested in
if parent.data.index < node_bc_index and parent.data.end_target > node_bc_index:
return parent
return None
# Burn N_CONDITION nodes
for node in self.graph.nodes:
out_edges = self.graph.out_edges(node)
if len(out_edges) < 2 or not has_true_false_branches(out_edges):
continue
node.kind = ControlFlow.N_CONDITION
# Handle continue/return/break statements:
# - blocks with continue are simply connected to the parent loop
# - blocks with returns are simply connected to the IMPLICIT_RETURN
# and previous out edges removed
# - blocks with breaks are connected to the end of the current loop
# and previous out edges removed
for node in self.graph.nodes:
cfg_tmp_values = get_cfg_tmp_values(node)
if not cfg_tmp_values:
continue
if ControlFlow.CFG_TMP_BREAK in cfg_tmp_values:
parent_loop = get_parent_loop(node)
if not parent_loop:
logger.error("Cannot find parent loop for %s", node)
continue
target_block = self._block_idx_map[parent_loop.data.end_target]
out_edges = self.graph.out_edges(node)
for edge in out_edges:
self.graph.remove_edge(edge)
self.graph.make_add_edge(
node, self.block_nodes_dict[target_block], kind=ControlFlow.E_UNCOND)
if ControlFlow.CFG_TMP_RETURN in cfg_tmp_values:
# Remove existing out edges and add a RETURN edge to the IMPLICIT_RETURN
out_edges = self.graph.out_edges(node)
for edge in out_edges:
self.graph.remove_edge(edge)
self.graph.make_add_edge(node, self._exit_node, kind=ControlFlow.E_RETURN)
if ControlFlow.CFG_TMP_CONTINUE in cfg_tmp_values:
parent_loop = get_parent_loop(node)
if not parent_loop:
logger.error("Cannot find parent loop for %s", node)
continue
out_edges = self.graph.out_edges(node)
for edge in out_edges:
self.graph.remove_edge(edge)
self.graph.make_add_edge(node, parent_loop, kind=ControlFlow.E_UNCOND)
# Handle optimizations that left unreachable JUMPS
for node in self.graph.roots():
if node.kind == ControlFlow.N_ENTRY:
continue
index, lineno, op, arg, cflow_in, code_object = node.data.bytecode[0]
if op in JUMP_OPCODES:
self.graph.remove_node(node)
def compute_conditions(self):
"""
Force the computation of condition constraints on the entire CFG.
"""
self._conds = {}
for node in self.graph.nodes:
if node.kind != ControlFlow.N_CONDITION:
continue
self.__record_condition(node)
# Parses the conditions and convert them into symbolic conditions for
# using in path-sensitive analysis. This essentially builds a simple
# AST for the conditional only.
def __record_condition(self, node):
block = node.data
bytecode = node.data.bytecode
length = len(bytecode)
if bytecode[length - 1][2] in NO_FALL_THROUGH:
return
i = length - 2
cond_stack_size = 1 # Current operator takes one from the stack
condition_bytecode = []
k = i
while cond_stack_size != 0:
condition_bytecode.insert(0, bytecode[k])
try:
pop, push = get_stack_effect(bytecode[k][2], bytecode[k][3])
except:
# Skip entirely the creation of conditionals
return
cond_stack_size += (pop - push)
k -= 1
if not condition_bytecode:
return
def process_children(parent, j):
if j < 0:
return j - 1, None
index, lineno, op, arg, cflow_in, code_object = condition_bytecode[j]
pop, push = get_stack_effect(op, arg)
current_node = TreeNode(kind=opcode.opname[op], data=(op, arg))
if pop < 1:
return j - 1, current_node
current_node.reserve_children(pop)
prev_offset = new_offset = j - 1
while pop > 0:
offset, child = process_children(current_node, new_offset)
if child is None:
break
current_node.insert_child(pop - 1, child)
prev_offset = new_offset
new_offset = offset
pop -= 1
return new_offset, current_node
cstr = Constraint()
i = len(condition_bytecode) - 1
_, root = process_children(None, i)
cstr.root = root
# Associate the out-constraint with the block
self._conds[block] = cstr
BLOCK_NODE_KIND = {
BasicBlock.UNKNOWN: N_UNKNOWN,
BasicBlock.ENTRY: N_ENTRY,
BasicBlock.IMPLICIT_RETURN: N_IMPLICIT_RETURN,
BasicBlock.LOOP: N_LOOP,
BasicBlock.IF: N_IF,
BasicBlock.EXCEPT: N_EXCEPT,
}
@staticmethod
def get_kind_from_block(block):
return ControlFlow.BLOCK_NODE_KIND[block.kind]
@staticmethod
def get_pairs(iterable):
a, b = tee(iterable)
next(b, None)
return izip(a, b)
@staticmethod
def make_blocks(decl, bytecode):
"""
Returns the set of ``BasicBlock`` that are encountered in the current bytecode.
Each block is annotated with its qualified jump targets (if any).
:param decl: The current declaration object.
:param bytecode: The bytecode associated with the declaration object.
"""
blocks = set()
block_map = {} # bytecode index -> block
logger.debug("CFG:\n%s", show_bytecode(bytecode))
i, length = 0, len(bytecode)
start_index = [j for j in range(length) if bytecode[j][0] == 0][0]
prev_co = bytecode[start_index][5]
prev_op = None
slice_bytecode = [tpl for tpl in bytecode[start_index:] if tpl[5] == prev_co]
slice_length = len(slice_bytecode)
known_targets = ControlFlow.find_targets(slice_bytecode)
known_targets.add(0)
known_targets.add(1 + max([tpl[0] for tpl in slice_bytecode]))
known_targets = list(known_targets)
known_targets.sort()
slice_bytecode_indexed = {}
idx = 0
for l in slice_bytecode:
index = l[0]
slice_bytecode_indexed[index] = (l, idx)
idx += 1
for start_index, end_index in ControlFlow.get_pairs(known_targets):
index, lineno, op, arg, cflow_in, code_object = slice_bytecode_indexed[start_index][0]
block_kind = ControlFlow.block_kind_from_op(op)
cur_block = BasicBlock(block_kind, decl, start_index)
cur_block.length = end_index - start_index - 1
i = slice_bytecode_indexed[start_index][1]
try:
length = slice_bytecode_indexed[end_index][1]
if length >= slice_length:
length = slice_length
except:
length = slice_length
while i < length:
index, lineno, op, arg, cflow_in, code_object = slice_bytecode[i]
if op in JUMP_OPCODES:
jump_address = arg
if op in opcode.hasjrel:
jump_address = arg + index + 3
if op in (SETUP_FINALLY, SETUP_EXCEPT, SETUP_WITH):
kind = ControlFlow.E_UNCOND
if op == SETUP_FINALLY: kind = ControlFlow.E_FINALLY
if op in (SETUP_EXCEPT, SETUP_WITH): kind = ControlFlow.E_EXCEPT
cur_block.end_target = jump_address
cur_block.add_jump(jump_address, kind)
elif op in (JUMP_ABSOLUTE, JUMP_FORWARD):
cur_block.add_jump(jump_address, ControlFlow.E_UNCOND)
elif op in (POP_JUMP_IF_FALSE, JUMP_IF_FALSE_OR_POP, FOR_ITER):
cur_block.add_jump(jump_address, ControlFlow.E_FALSE)
elif op in (POP_JUMP_IF_TRUE, JUMP_IF_TRUE_OR_POP):
cur_block.add_jump(jump_address, ControlFlow.E_TRUE)
elif op == SETUP_LOOP:
cur_block.kind = BasicBlock.LOOP
cur_block.end_target = jump_address
elif op == RETURN_VALUE:
cur_block.has_return_path = True
cur_block.add_jump(ControlFlow.CFG_TMP_RETURN, ControlFlow.E_RETURN)
elif op == BREAK_LOOP:
cur_block.has_return_path = True
cur_block.add_jump(ControlFlow.CFG_TMP_BREAK, ControlFlow.E_UNCOND)
elif op == CONTINUE_LOOP:
cur_block.has_return_path = False
cur_block.add_jump(ControlFlow.CFG_TMP_CONTINUE, ControlFlow.E_UNCOND)
elif op == RAISE_VARARGS:
cur_block.has_return_path = False
cur_block.add_jump(ControlFlow.CFG_TMP_RAISE, ControlFlow.E_UNCOND)
prev_op = op
i += 1
# If the last block is not a NO_FALL_THROUGH, we connect the fall through
if not cur_block.has_return_path and op not in NO_FALL_THROUGH and i < slice_length:
kind = ControlFlow.E_UNCOND
if op in (POP_JUMP_IF_FALSE, JUMP_IF_FALSE_OR_POP, FOR_ITER):
kind = ControlFlow.E_TRUE
if op in (POP_JUMP_IF_TRUE, JUMP_IF_TRUE_OR_POP):
kind = ControlFlow.E_FALSE
cur_block.fallthrough = True
fallthrough_address = slice_bytecode[i][0]
cur_block.add_jump(fallthrough_address, kind)
else:
cur_block.fallthrough = False
block_map[start_index] = cur_block
blocks.add(cur_block)
logger.debug("Blocks := %s", blocks)
return blocks
@staticmethod
def create_statements(block, bytecode, container):
"""
Creates a set of statements out of the basic block.
:param block: The parent basic block.
:param bytecode: The bytecode of the basic block.
:param container: The list that will contain all the statements in this basic block.
"""
i = len(bytecode) - 1
while i >= 0:
stack_effect = 0
j = i
is_first = True
while j >= 0:
try:
pop, push = get_stack_effect(bytecode[j][2], bytecode[j][3])
stack_effect += (pop - push) if not is_first else pop
is_first = False
if stack_effect == 0:
stmt = Statement(block, j, i)
container.insert(0, stmt)
break
j -= 1
except Exception, ex:
stmt = Statement(block, i, i)
container.insert(0, stmt)
break
i = j
i -= 1
@staticmethod
def block_kind_from_op(op):
if op in (FOR_ITER,):
return BasicBlock.LOOP
# Cannot make the decision at this point, need to await the finalization
# of the CFG
return BasicBlock.UNKNOWN
@staticmethod
def find_targets(bytecode):
targets = set()
i, length = 0, len(bytecode)
while i < length:
index, lineno, op, arg, cflow_in, code_object = bytecode[i]
if op in JUMP_OPCODES:
jump_address = arg
if op in opcode.hasjrel:
jump_address = arg + index + 3
targets.add(jump_address)
if op not in NO_FALL_THROUGH:
targets.add(bytecode[i + 1][0])
i += 1
return targets
def __repr__(self):
return 'ControlFlow(decl=%s, blocks=%d)' % (self.decl, len(self.blocks))
|
neuroo/equip
|
equip/analysis/flow.py
|
Python
|
apache-2.0
| 18,228 | 0.012783 |
import os
import time
import hashlib
import logging
from base64 import b64encode
from collections import OrderedDict, defaultdict
from twisted.internet.task import LoopingCall
from twisted.internet.defer import Deferred
from dispersy.authentication import MemberAuthentication
from dispersy.candidate import Candidate
from dispersy.community import Community
from dispersy.conversion import DefaultConversion
from dispersy.destination import CandidateDestination
from dispersy.distribution import DirectDistribution
from dispersy.message import Message
from dispersy.resolution import PublicResolution
from dispersy.requestcache import RandomNumberCache
from market.community.blockchain.conversion import BlockchainConversion
from market.community.payload import ProtobufPayload
from market.database.datamanager import BlockchainDataManager
from market.models.block import Block
from market.models.block_index import BlockIndex
from market.models.contract import Contract
from market.util.misc import median
from market.util.uint256 import full_to_uint256, compact_to_uint256, uint256_to_compact
from market.models import ObjectType
COMMIT_INTERVAL = 60
BLOCK_CREATION_INTERNAL = 1
BLOCK_TARGET_SPACING = 30 # 10 * 60
BLOCK_TARGET_TIMESPAN = 300 # 14 * 24 * 60 * 60
BLOCK_TARGET_BLOCKSPAN = BLOCK_TARGET_TIMESPAN / BLOCK_TARGET_SPACING
BLOCK_DIFFICULTY_INIT = 0x05ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
BLOCK_DIFFICULTY_MIN = 0x05ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
BLOCK_GENESIS_HASH = '\00' * 32
MAX_CLOCK_DRIFT = 15 * 60
MAX_PACKET_SIZE = 1500
class SignatureRequestCache(RandomNumberCache):
def __init__(self, community):
super(SignatureRequestCache, self).__init__(community.request_cache, u'signature-request')
def on_timeout(self):
pass
class BlockRequestCache(RandomNumberCache):
def __init__(self, community, block_id):
super(BlockRequestCache, self).__init__(community.request_cache, u'block-request')
self.community = community
self.block_id = block_id
def on_timeout(self):
# Retry to download block
self.community.send_block_request(self.block_id)
class TraversalRequestCache(RandomNumberCache):
def __init__(self, community, contract_id, contract_type, deferred, min_responses, max_responses):
super(TraversalRequestCache, self).__init__(community.request_cache, u'traversal-request')
self.logger = community.logger
self.contract_id = contract_id
self.contract_type = contract_type
self.deferred = deferred
self.min_responses = min_responses
self.max_responses = max_responses
self.responses = {}
self.public_keys = []
def callback(self):
responses_sorted = sorted(self.responses.items(), key=lambda item: item[1])
if responses_sorted and responses_sorted[-1][1] >= self.min_responses:
self.deferred.callback(responses_sorted[-1][0])
else:
self.logger.warning('Not enough similar responses to traversal-request')
self.deferred.errback()
def add_response(self, public_key, response_tuple):
# Only allow 1 response per peer
if public_key in self.public_keys:
return False
self.public_keys.append(public_key)
self.responses[response_tuple] = self.responses.get(response_tuple, 0) + 1
# If we already have all responses there is not need to wait for the timeout
if sum(self.responses.values()) >= self.max_responses:
self.callback()
return True
return False
def on_timeout(self):
self.callback()
class BlockchainCommunity(Community):
def __init__(self, dispersy, master, my_member):
super(BlockchainCommunity, self).__init__(dispersy, master, my_member)
self.logger = logging.getLogger('BlockchainLogger')
self.incoming_contracts = OrderedDict()
self.incoming_blocks = {}
self.data_manager = None
def initialize(self, verifier=True, **db_kwargs):
super(BlockchainCommunity, self).initialize()
self.initialize_database(**db_kwargs)
if verifier:
self.register_task('create_block', LoopingCall(self.create_block)).start(BLOCK_CREATION_INTERNAL)
self.register_task('commit', LoopingCall(self.data_manager.commit)).start(COMMIT_INTERVAL)
self.logger.info('BlockchainCommunity initialized')
def initialize_database(self, database_fn=''):
if database_fn:
database_fn = os.path.join(self.dispersy.working_directory, database_fn)
self.data_manager = BlockchainDataManager(database_fn)
self.data_manager.initialize()
@classmethod
def get_master_members(cls, dispersy):
# generated: Fri Feb 24 11:22:22 2017
# curve: None
# len: 571 bits ~ 144 bytes signature
# pub: 170 3081a7301006072a8648ce3d020106052b81040027038192000407b
# acf5ae4d3fe94d49a7f94b7239e9c2d878b29f0fbdb7374d5b6a09d9d6fba80d
# 3807affd0ba45ba1ac1c278ca59bec422d8a44b5fefaabcdd62c2778414c01da
# 4578b304b104b00eec74de98dcda803b79fd1783d76cc1bd7aab75cfd8fff982
# 7a9647ae3c59423c2a9a984700e7cb43b881a6455574032cc11dba806dba9699
# f54f2d30b10eed5c7c0381a0915a5
# pub-sha1 56553661e30b342b2fc39f1a425eb612ef8b8c33
# -----BEGIN PUBLIC KEY-----
# MIGnMBAGByqGSM49AgEGBSuBBAAnA4GSAAQHus9a5NP+lNSaf5S3I56cLYeLKfD7
# 23N01bagnZ1vuoDTgHr/0LpFuhrBwnjKWb7EItikS1/vqrzdYsJ3hBTAHaRXizBL
# EEsA7sdN6Y3NqAO3n9F4PXbMG9eqt1z9j/+YJ6lkeuPFlCPCqamEcA58tDuIGmRV
# V0AyzBHbqAbbqWmfVPLTCxDu1cfAOBoJFaU=
# -----END PUBLIC KEY-----
master_key = '3081a7301006072a8648ce3d020106052b81040027038192000407bacf5ae4d3fe94d49a7f94b7239e9c2d878b29' + \
'f0fbdb7374d5b6a09d9d6fba80d3807affd0ba45ba1ac1c278ca59bec422d8a44b5fefaabcdd62c2778414c01da4' + \
'578b304b104b00eec74de98dcda803b79fd1783d76cc1bd7aab75cfd8fff9827a9647ae3c59423c2a9a984700e7c' + \
'b43b881a6455574032cc11dba806dba9699f54f2d30b10eed5c7c0381a0915a5'
master = dispersy.get_member(public_key=master_key.decode('hex'))
return [master]
def initiate_meta_messages(self):
meta_messages = super(BlockchainCommunity, self).initiate_meta_messages()
return meta_messages + [
Message(self, u"signature-request",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_signature_request),
Message(self, u"signature-response",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_signature_response),
Message(self, u"contract",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_contract),
Message(self, u"block-request",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_block_request),
Message(self, u"block",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_block),
Message(self, u"traversal-request",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_traversal_request),
Message(self, u"traversal-response",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_traversal_response)
]
def initiate_conversions(self):
return [DefaultConversion(self), BlockchainConversion(self)]
def get_verifiers(self):
return list(self.dispersy_yield_verified_candidates())
def send_message(self, msg_type, candidates, payload_dict):
self.logger.debug('Sending %s message to %d candidate(s)', msg_type, len(candidates))
meta = self.get_meta_message(msg_type)
message = meta.impl(authentication=(self.my_member,),
distribution=(self.claim_global_time(),),
destination=candidates,
payload=(payload_dict,))
return self.dispersy.store_update_forward([message], False, False, True)
def multicast_message(self, msg_type, payload_dict, exclude=None):
candidates = self.get_verifiers()
if exclude in candidates:
candidates.remove(exclude)
return self.send_message(msg_type, tuple(candidates), payload_dict)
def send_signature_request(self, contract, candidate):
cache = self.request_cache.add(SignatureRequestCache(self))
return self.send_message(u'signature-request', (candidate,), {'identifier': cache.number,
'contract': contract.to_dict()})
def on_signature_request(self, messages):
for message in messages:
contract = Contract.from_dict(message.payload.dictionary['contract'])
if contract is None:
self.logger.warning('Dropping invalid signature-request from %s', message.candidate.sock_addr)
continue
elif not contract.verify(message.candidate.get_member()):
self.logger.warning('Dropping signature-request with incorrect signature')
continue
self.logger.debug('Got signature-request from %s', message.candidate.sock_addr)
if self.finalize_contract(contract, sign=True):
self.send_signature_response(message.candidate, contract, message.payload.dictionary['identifier'])
self.incoming_contracts[contract.id] = contract
self.multicast_message(u'contract', {'contract': contract.to_dict()})
def send_signature_response(self, candidate, contract, identifier):
return self.send_message(u'signature-response', (candidate,), {'identifier': identifier,
'contract': contract.to_dict()})
def on_signature_response(self, messages):
for message in messages:
cache = self.request_cache.get(u'signature-request', message.payload.dictionary['identifier'])
if not cache:
self.logger.warning("Dropping unexpected signature-response from %s", message.candidate.sock_addr)
continue
contract = Contract.from_dict(message.payload.dictionary['contract'])
if contract is None:
self.logger.warning('Dropping invalid signature-response from %s', message.candidate.sock_addr)
continue
elif not contract.verify(message.candidate.get_member()):
self.logger.warning('Dropping signature-response with incorrect signature')
continue
self.logger.debug('Got signature-response from %s', message.candidate.sock_addr)
if self.finalize_contract(contract):
self.incoming_contracts[contract.id] = contract
self.multicast_message(u'contract', {'contract': contract.to_dict()})
def on_contract(self, messages):
for message in messages:
contract = Contract.from_dict(message.payload.dictionary['contract'])
if contract is None:
self.logger.warning('Dropping invalid contract from %s', message.candidate.sock_addr)
continue
elif self.incoming_contracts.get(contract.id) or self.data_manager.get_contract(contract.id):
self.logger.debug('Dropping contract %s (duplicate)', b64encode(contract.id))
continue
# Preliminary check to see if contract is allowed. A final check will be performed in check_block.
if not self.check_contract(contract, fail_without_parent=False):
self.logger.warning('Dropping contract %s (check failed)', b64encode(contract.id))
continue
self.logger.debug('Got contract %s', b64encode(contract.id))
# Forward if needed
if contract.id not in self.incoming_contracts:
self.incoming_contracts[contract.id] = contract
self.multicast_message(u'contract', {'contract': contract.to_dict()}, exclude=message.candidate)
def send_block_request(self, block_id):
self.request_cache.add(BlockRequestCache(self, block_id))
verifiers = self.get_verifiers()
if verifiers:
self.send_message(u'block-request', (verifiers[0],), {'block_id': block_id})
def on_block_request(self, messages):
for message in messages:
block_id = message.payload.dictionary['block_id']
self.logger.debug('Got block-request for id %s', b64encode(block_id))
block = self.data_manager.get_block(block_id)
if block is not None:
self.send_message(u'block', (message.candidate,), {'block': block.to_dict()})
def on_block(self, messages):
for message in messages:
block = Block.from_dict(message.payload.dictionary['block'])
if not block:
self.logger.warning('Dropping invalid block from %s', message.candidate.sock_addr)
continue
# If we're trying to download this block, stop it. This needs to happen before any additional checks.
# TODO: fix this
for cache in self.request_cache._identifiers.values():
if isinstance(cache, BlockRequestCache) and cache.block_id == block.id:
self.request_cache.pop(cache.prefix, cache.number)
if not self.check_block(block):
self.logger.warning('Dropping illegal block from %s', message.candidate.sock_addr)
continue
self.logger.debug('Got block %s', b64encode(block.id))
# Are we dealing with an orphan block?
if block.previous_hash != BLOCK_GENESIS_HASH and not self.data_manager.get_block(block.previous_hash):
# Postpone processing the current block and request missing blocks
self.incoming_blocks[block.id] = block
# TODO: address issues with memory filling up
self.send_block_request(block.previous_hash)
self.logger.debug('Postpone block %s', b64encode(block.id))
continue
if self.process_block(block):
self.logger.debug('Added received block with %s contract(s)', len(block.contracts))
self.process_blocks_after(block)
def process_blocks_after(self, block):
# Process any orphan blocks that depend on the current block
for orphan in self.incoming_blocks.values():
if orphan.previous_hash == block.id:
del self.incoming_blocks[orphan.id]
if self.process_block(orphan):
self.logger.debug('Added postponed block with %s contract(s)', len(orphan.contracts))
self.process_blocks_after(orphan)
def process_block(self, block):
# We have already checked the proof of this block, but not whether the target_difficulty itself is as expected.
# Note that we can't to this in check_block, because at that time the previous block may not be known yet.
prev_block = self.data_manager.get_block(block.previous_hash)
if block.target_difficulty != self.get_next_difficulty(prev_block):
self.logger.debug('Block processing failed (unexpected target difficulty)')
return False
# Save block
self.data_manager.add_block(block)
# Get best chain
latest_index = self.data_manager.get_block_indexes(limit=1)[0]
# Calculate height of the chain this block is the head of
block_ids = []
from_height = 0
cur_block = block
while cur_block:
block_ids.append(cur_block.id)
block_index = self.data_manager.get_block_index(cur_block.previous_hash)
if block_index is not None:
# We can connect to the best chain
from_height = block_index.height
break
cur_block = self.data_manager.get_block(cur_block.previous_hash)
# Make sure that we are not dealing with a chain of orphan blocks
if cur_block is None and block_ids[-1] != BLOCK_GENESIS_HASH:
self.logger.error('Block processing failed (chain of orphan blocks)')
return False
# For now, the longest chain wins
if len(block_ids) + from_height > latest_index.height:
self.data_manager.remove_block_indexes(from_height + 1)
for index, block_id in enumerate(reversed(block_ids)):
self.data_manager.add_block_index(BlockIndex(block_id, from_height + 1 + index))
# Make sure we stop trying to create blocks with the contracts in this block
for contract in block.contracts:
if contract.id in self.incoming_contracts:
del self.incoming_contracts[contract.id]
return True
def check_block(self, block):
if self.get_block_packet_size(block) > MAX_PACKET_SIZE:
self.logger.debug('Block failed check (block too large)')
return False
if not self.check_proof(block):
# Don't log message when we created the block
if block.creator != self.my_member.public_key:
self.logger.debug('Block failed check (incorrect proof)')
return False
if not block.verify():
self.logger.debug('Block failed check (invalid signature)')
return False
if self.data_manager.get_block(block.id):
self.logger.debug('Block failed check (duplicate block)')
return False
if block.time > int(time.time()) + MAX_CLOCK_DRIFT:
self.logger.debug('Block failed check (max clock drift exceeded)')
return False
for contract in block.contracts:
if block.time < contract.time:
self.logger.debug('Block failed check (block created before contract)')
return False
if not self.check_contract(contract):
self.logger.warning('Block check failed (contract check failed)')
self.incoming_contracts.pop(contract.id, None)
return False
if len(block.contracts) != len(set([contract.id for contract in block.contracts])):
self.logger.debug('Block failed check (duplicate contracts)')
return False
if block.merkle_root_hash != block.merkle_tree.build():
self.logger.debug('Block failed check (incorrect merkle root hash)')
return False
past_blocks = self.get_past_blocks(block, 11)
if past_blocks and block.time < median([b.time for b in past_blocks]):
self.logger.debug('Block failed check (block time smaller than median time of past 11 blocks)')
return False
return True
def check_proof(self, block):
proof = hashlib.sha256(str(block)).digest()
return full_to_uint256(proof) < block.target_difficulty
def create_block(self):
latest_index = self.data_manager.get_block_indexes(limit=1)[0]
prev_block = self.data_manager.get_block(latest_index.block_id) if latest_index is not None else None
block = Block()
block.previous_hash = prev_block.id if prev_block is not None else BLOCK_GENESIS_HASH
block.target_difficulty = self.get_next_difficulty(prev_block)
block.time = int(time.time())
# Placeholder information (for calculating packet size)
block.merkle_root_hash = block.merkle_tree.build()
block.sign(self.my_member)
# Find dependencies
contracts = []
dependencies = defaultdict(list)
for contract in self.incoming_contracts.itervalues():
if contract.previous_hash:
# Get the previous contract from memory or the database
prev_contract = self.incoming_contracts.get(contract.previous_hash) or \
self.data_manager.get_contract(contract.previous_hash)
on_blockchain = self.data_manager.contract_on_blockchain(prev_contract.id) if prev_contract else False
# We need to wait until the previous contract is received and on the blockchain
if not on_blockchain:
dependencies[contract.id].append(prev_contract)
continue
contracts.append(contract)
# Add contracts to block
while contracts:
contract = contracts.pop(0)
block.contracts.append(contract)
if self.get_block_packet_size(block) > MAX_PACKET_SIZE:
block.contracts.pop()
break
if contract.id in dependencies:
# Put dependencies at the front of the list, so they will be processed in the next iterations
for index, dependency in enumerate(dependencies[contract.id]):
contracts.insert(index, dependency)
# Calculate final merkle root hash + sign block
block.merkle_root_hash = block.merkle_tree.build()
block.sign(self.my_member)
if self.check_block(block):
self.logger.debug('Created block with target difficulty 0x%064x', block.target_difficulty)
if self.process_block(block):
self.logger.debug('Added created block with %s contract(s)', len(block.contracts))
self.multicast_message(u'block', {'block': block.to_dict()})
return block
def get_next_difficulty(self, block):
# Determine difficulty for the next block
if block is not None:
target_difficulty = block.target_difficulty
# Go back BLOCK_TARGET_BLOCKSPAN
past_blocks = self.get_past_blocks(block, BLOCK_TARGET_BLOCKSPAN)
if past_blocks:
target_difficulty *= float(block.time - past_blocks[-1].time) / BLOCK_TARGET_TIMESPAN
else:
target_difficulty = BLOCK_DIFFICULTY_INIT
target_difficulty = min(target_difficulty, BLOCK_DIFFICULTY_MIN)
return compact_to_uint256(uint256_to_compact(target_difficulty))
def get_past_blocks(self, block, num_past):
result = []
current = block
for _ in range(num_past):
current = self.data_manager.get_block(current.previous_hash)
if current is None:
return None
result.append(current)
return result
def get_block_packet_size(self, block):
meta = self.get_meta_message(u'block')
message = meta.impl(authentication=(self.my_member,),
distribution=(self.claim_global_time(),),
destination=(Candidate(('1.1.1.1', 1), False),),
payload=({'block': block.to_dict()},))
return len(message.packet)
def check_contract(self, contract, fail_without_parent=True):
if not contract.verify():
self.logger.debug('Contract failed check (invalid signature)')
return False
if contract.previous_hash and fail_without_parent:
prev_contract = self.incoming_contracts.get(contract.previous_hash) or \
self.data_manager.get_contract(contract.previous_hash)
if prev_contract is None:
self.logger.error('Contract failed check (parent is unknown)')
return False
return True
def begin_contract(self, candidate, document, contract_type, from_public_key, to_public_key, previous_hash=''):
assert to_public_key == self.my_member.public_key or from_public_key == self.my_member.public_key
contract = Contract()
contract.from_public_key = from_public_key
contract.to_public_key = to_public_key
contract.document = document
contract.type = contract_type
contract.previous_hash = previous_hash
contract.time = int(time.time())
contract.sign(self.my_member)
return self.send_signature_request(contract, candidate)
def finalize_contract(self, contract, sign=False):
# Final checks?
if sign:
contract.sign(self.my_member)
# Add contract to database
self.data_manager.add_contract(contract)
return True
def send_traversal_request(self, contract_id, contract_type=None, max_requests=5, min_responses=1):
# Send a message to a limited number of verifiers
verifiers = self.get_verifiers()[:max_requests]
if len(verifiers) < min_responses:
self.logger.warning('Not enough verifiers to send traversal-request')
return
# Use a request cache to keep track of the responses. We require a minimum number of (equal) responses
deferred = Deferred()
cache = self.request_cache.add(TraversalRequestCache(self, contract_id, contract_type,
deferred, min_responses, len(verifiers)))
msg_dict = {'identifier': cache.number,
'contract_id': contract_id}
if contract_type != None:
msg_dict['contract_type'] = contract_type
self.send_message(u'traversal-request', tuple(verifiers), msg_dict)
return deferred
def on_traversal_request(self, messages):
for message in messages:
msg_dict = {'identifier': message.payload.dictionary['identifier']}
try:
contract_type = ObjectType(message.payload.dictionary['contract_type'])
except (ValueError, KeyError):
contract_type = None
contract = self.traverse_contracts(message.payload.dictionary['contract_id'],
contract_type)
if contract is not None:
msg_dict['contract'] = contract.to_dict()
# Add the number of confirmations this contract has
confirmations = self.find_confirmation_count(message.payload.dictionary['contract_id'])
if confirmations is not None:
msg_dict['confirmations'] = confirmations
self.send_message(u'traversal-response', (message.candidate,), msg_dict)
def on_traversal_response(self, messages):
for message in messages:
cache = self.request_cache.get(u'traversal-request', message.payload.dictionary['identifier'])
if not cache:
self.logger.warning("Dropping unexpected traversal-response from %s", message.candidate.sock_addr)
continue
self.logger.debug('Got traversal-response from %s', message.candidate.sock_addr)
contract = Contract.from_dict(message.payload.dictionary['contract']) \
if 'contract' in message.payload.dictionary else None
confirmations = message.payload.dictionary.get('confirmations', None)
if cache.add_response(message.candidate.get_member().public_key, (contract, confirmations)):
# If all responses are received remove the cache
self.request_cache.pop(u'traversal-request', message.payload.dictionary['identifier'])
def traverse_contracts(self, contract_id, contract_type):
contract_of_type = None
contract = self.data_manager.get_contract(contract_id) \
if self.data_manager.contract_on_blockchain(contract_id) else None
# Traverse contract chain
while contract:
if contract.type == contract_type:
contract_of_type = contract
contracts = self.data_manager.find_contracts(Contract.previous_hash == contract.id)
contracts = [contract for contract in list(contracts) if self.data_manager.contract_on_blockchain(contract.id)]
if len(contracts) == 1:
# Keep traversing the contract chain
contract = contracts[0]
continue
elif len(contracts) == 0:
# Found end of contract chain
return contract if contract_type is None else contract_of_type
break
def find_confirmation_count(self, contract_id):
# Find the number of confirmations this contract has
block_id = self.data_manager.get_blockchain_block_id(contract_id)
block = self.data_manager.get_block(block_id)
if block:
first_index = self.data_manager.get_block_index(block.id)
last_index = self.data_manager.get_block_indexes(limit=1)[0]
if first_index and last_index:
return last_index.height - first_index.height
|
Tribler/decentralized-mortgage-market
|
market/community/blockchain/community.py
|
Python
|
gpl-3.0
| 30,409 | 0.00342 |
from __future__ import absolute_import
import os.path
from dpark.util import get_logger
logger = get_logger(__name__)
# workdir used in slaves for internal files
#
DPARK_WORK_DIR = '/tmp/dpark'
if os.path.exists('/dev/shm'):
DPARK_WORK_DIR = '/dev/shm,/tmp/dpark'
# uri of mesos master, host[:5050] or or zk://...
MESOS_MASTER = 'localhost'
# mount points of MooseFS, must be available on all slaves
# for example: '/mfs' : 'mfsmaster',
MOOSEFS_MOUNT_POINTS = {
}
# consistant dir cache in client, need patched mfsmaster
MOOSEFS_DIR_CACHE = False
# memory used per task, like -M (--m) option in context.
MEM_PER_TASK = 200.0
def load_conf(path):
if not os.path.exists(path):
logger.error("conf %s do not exists", path)
raise Exception("conf %s do not exists" % path)
try:
data = open(path).read()
exec(data, globals(), globals())
except Exception as e:
logger.error("error while load conf from %s: %s", path, e)
raise
|
windreamer/dpark
|
dpark/conf.py
|
Python
|
bsd-3-clause
| 991 | 0.001009 |
import cPickle as pickle
import numpy as np
import re
from math import log
from dic import Info
from config.config import config
from tools import tokenlize, comp_tuple, weights
class query_voc(object):
def __init__(self, tokens, dic):
self.tokens = tokens
self.dic = dic
class query_entry(object):
def __init__(self, doc_id, tokens, voc):
self.docID = doc_id
self.tokens = tokens
self.vector = np.zeros([1, len(voc.tokens)])
for token in self.tokens:
if config.WEIGHT_TYPE == 'wf-idf':
self.vector[0, voc.dic[token]['index']] = (1 + log(self.tokens[token])) * voc.dic[token]['idf']
elif config.WEIGHT_TYPE == 'tf-idf':
self.vector[0, voc.dic[token]['index']] = self.tokens[token] * voc.dic[token]['idf']
class query_index(object):
def __init__(self, tiered_index):
self.tiered_index = tiered_index
def load_and_calc(info):
# assert isinstance(pkl_path, str), "pkl_path is not the instance of string.\n"
#
# pkl_file = open(pkl_path, 'r')
# info = pickle.load(pkl_file)
voc = query_voc(info.voc_tokens, info.voc_dic)
tiered_index = query_index(info.tiered_index)
entries = {}
for item in info.entry_tokens:
entries[item] = query_entry(item, info.entry_tokens[item], voc)
return tiered_index, voc, entries
def construct_query_vector(tokens, voc):
query_vector = np.zeros([1, len(voc.tokens)])
print tokens
for token in tokens:
if token in voc.tokens:
query_vector[0, voc.dic[token]['index']] = tokens[token]
return query_vector
def topK_get_result(index, voc, entries, tokens):
result = []
query_vector = construct_query_vector(tokens, voc)
for level in index.tiered_index:
for token in tokens:
if token not in voc.tokens:
continue
docs = level[voc.dic[token]['index']]
for doc_id in docs:
if doc_id not in result:
weight = weights(query_vector, entries[doc_id].vector)
result.append((doc_id, weight))
if len(result) >= config.PARA_TOP_K:
return result[:config.PARA_TOP_K]
# if config.DEBUG:
# print '----------------query result--------------------'
# print result
# print '------------------------------------------------'
return result
def topK_query(index, voc, entries, query, index_type='tiered'):
result = []
if index_type == 'tiered':
result = topK_get_result(index, voc, entries, tokenlize(query))
result.sort(comp_tuple)
return result
def wildcard_query(index, voc, entries, query, index_type='tiered'):
tokens = tokenlize(query)
query_match = [[]]
for token in tokens:
match_tokens = []
if '*' in token:
regex = re.compile(token)
match_tokens = [string for string in voc.tokens if re.match(regex, string)]
else:
match_tokens.append(token)
tmp = []
if len(match_tokens) > 0:
for t1 in match_tokens:
for t2 in query_match:
tmp.append(t2 + [t1])
query_match = tmp
tmp = []
for item in query_match:
q = {}
for token in item:
if token in q:
q[token] += 1
else:
q[token] = 1
tmp.append(q)
query_match = tmp
result = []
if index_type == 'tiered':
for match in query_match:
result += topK_get_result(index, voc, entries, match)
result.sort(comp_tuple)
match = []
match_id = []
for doc in result:
if doc[0] in match_id:
continue
else:
match_id.append(doc[0])
match.append(doc)
if len(match_id) > config.PARA_TOP_K:
return match
return match
|
Impavidity/SearchEngine
|
WebSite/engine/query.py
|
Python
|
mit
| 3,939 | 0.001269 |
'''
Created on 21 Dec 2013
@author: huw
'''
from ConfigParser import ConfigParser
class TftpudSettings:
'''
A class to hold the settings for the TftpudServerGui application.
'''
def __init__(self):
'''
Constructor
'''
self.saveLastUsed = False
self.defaultDirectory = ''
self.defaultIpAddress = ''
self.defaultPort = 69
self.ephemeralPorts = [2048, 65535]
self.tftpTimeout = 6.0
self.tftpRetries = 3
def write(self, f):
'''Write these TFTPUD settings to the given file handle.'''
cfg = ConfigParser()
netSection = 'Network'
cfg.add_section(netSection)
cfg.set(netSection, 'defaultIpAddress', self.defaultIpAddress)
cfg.set(netSection, 'defaultPort', self.defaultPort)
cfg.set(netSection, 'ephemeralPortsFrom', self.ephemeralPorts[0])
cfg.set(netSection, 'ephemeralPortsTo', self.ephemeralPorts[1])
tftpSection = 'TFTP'
cfg.add_section(tftpSection)
cfg.set(tftpSection, 'timeout', self.tftpTimeout)
cfg.set(tftpSection, 'retries', self.tftpRetries)
serverSection = 'Server'
cfg.add_section(serverSection)
cfg.set(serverSection, 'defaultDirectory', self.defaultDirectory)
cfg.set(serverSection, 'saveLastUsed', self.saveLastUsed)
cfg.write(f)
def read(self, f):
'''Read the settings from the given file handle.'''
cfg = ConfigParser()
cfg.readfp(f)
netSection = 'Network'
if cfg.has_section(netSection):
if cfg.has_option(netSection, 'defaultIpAddress'):
self.defaultIpAddress = cfg.get(netSection, 'defaultIpAddress')
if cfg.has_option(netSection, 'defaultPort'):
self.defaultPort = cfg.getint(netSection, 'defaultPort')
if cfg.has_option(netSection, 'ephemeralPortsFrom'):
self.ephemeralPorts[0] = cfg.getint(netSection, 'ephemeralPortsFrom')
if cfg.has_option(netSection, 'ephemeralPortsTo'):
self.ephemeralPorts[1] = cfg.getint(netSection, 'ephemeralPortsTo')
tftpSection = 'TFTP'
if cfg.has_section(tftpSection):
if cfg.has_option(tftpSection, 'timeout'):
self.tftpTimeout = cfg.getfloat(tftpSection, 'timeout')
if cfg.has_option(tftpSection, 'retries'):
self.tftpRetries = cfg.getint(tftpSection, 'retries')
serverSection = 'Server'
if cfg.has_section(serverSection):
if cfg.has_option(serverSection, 'defaultDirectory'):
self.defaultDirectory = cfg.get(serverSection, 'defaultDirectory')
if cfg.has_option(serverSection, 'saveLastUsed'):
self.saveLastUsed = cfg.getboolean(serverSection, 'saveLastUsed')
|
javert/tftpudGui
|
src/tftpudgui/qt4/TftpudSettings.py
|
Python
|
mit
| 2,975 | 0.005714 |
# -*- coding: utf-8 -*-
# EDIS - a simple cross-platform IDE for C
#
# This file is part of Edis
# Copyright 2014-2015 - Gabriel Acosta <acostadariogabriel at gmail>
# License: GPLv3 (see http://www.gnu.org/licenses/gpl.html)
from PyQt4.Qsci import QsciLexerCPP
from PyQt4.QtGui import QColor
from src import editor_scheme
from src.core import settings
class Lexer(QsciLexerCPP):
""" Lexer class """
def __init__(self, *args, **kwargs):
super(Lexer, self).__init__(*args, **kwargs)
# Configuración
self.setStylePreprocessor(True)
self.setFoldComments(True)
self.setFoldPreprocessor(True)
self.setFoldCompact(False)
self.load_highlighter()
def load_highlighter(self):
""" Método público: carga el resaltado de sintáxis """
scheme = editor_scheme.get_scheme(
settings.get_setting('editor/scheme'))
self.setDefaultPaper(QColor(scheme['BackgroundEditor']))
self.setPaper(self.defaultPaper(0))
self.setColor(QColor(scheme['Color']))
types = dir(self)
for _type in types:
if _type in scheme:
atr = getattr(self, _type)
self.setColor(QColor(scheme[_type]), atr)
def keywords(self, kset):
super(Lexer, self).keywords(kset)
if kset == 1:
# Palabras reservadas
return ('auto break case const continue default do else enum '
'extern for goto if register return short sizeof static '
'struct switch typedef union unsigned void volatile while '
'char float int long double')
elif kset == 2:
# Funciones definidas en stdio.h y stdlib.h
return ('fprintf fscanf printf scanf sprintf sscanf vfprintf '
'vprintf vsprintf fclose fflush fopen freopen remove '
'rename setbuf tmpfile tmpnam fgetc fgets fputc fputs '
'getc getchar gets putc putchar puts ungetc fread fseek '
'fsetpos ftell rewind clearerr feof ferror perror '
'abort atexit exit getenv system abs div labs ldiv '
'rand srand atof atoi atol strtod strtod strtoll '
'strtoul bsearch qsort calloc realloc malloc free '
'mblen mbtowc wctomb mbstowcs wcstombs')
super(Lexer, self).keywords(kset)
|
centaurialpha/edis
|
src/ui/editor/lexer.py
|
Python
|
gpl-3.0
| 2,435 | 0.000411 |
import pytest
from unittest.mock import patch
from case import mock
from kombu import Connection
class test_get_manager:
@mock.mask_modules('pyrabbit')
def test_without_pyrabbit(self):
with pytest.raises(ImportError):
Connection('amqp://').get_manager()
@mock.module_exists('pyrabbit')
def test_with_pyrabbit(self):
with patch('pyrabbit.Client', create=True) as Client:
manager = Connection('amqp://').get_manager()
assert manager is not None
Client.assert_called_with(
'localhost:15672', 'guest', 'guest',
)
@mock.module_exists('pyrabbit')
def test_transport_options(self):
with patch('pyrabbit.Client', create=True) as Client:
manager = Connection('amqp://', transport_options={
'manager_hostname': 'admin.mq.vandelay.com',
'manager_port': 808,
'manager_userid': 'george',
'manager_password': 'bosco',
}).get_manager()
assert manager is not None
Client.assert_called_with(
'admin.mq.vandelay.com:808', 'george', 'bosco',
)
|
ZoranPavlovic/kombu
|
t/unit/utils/test_amq_manager.py
|
Python
|
bsd-3-clause
| 1,196 | 0 |
# Script Version: 1.0
# Author: Te Chen
# Project: AMA3D
# Task Step: 1
import sys
import urllib2
import time
VERSION = '4.0.0'
def prepare_cath():
ver = VERSION.replace('.', '_')
download_file(ver, 'CathDomainList')
download_file(ver, 'CathNames')
download_file(ver, 'CathDomainDescriptionFile')
def download_file(ver, file_name):
url = "ftp://ftp.biochem.ucl.ac.uk/pub/cath/v%s/%s" % (ver, file_name)
file_name = url.split('/')[-1]
u = urllib2.urlopen(url)
f = open('C:/AMA3D/Nh3D/' + file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
f.close()
print "Downloaded file" + file_name
if __name__ == '__main__':
# Download necessary files when start
prepare_cath()
# This part triggers all the tasks afterwards.
print "trigger\t%s\t%d\t%d"%('', 2, 1)
sys.stdout.flush()
print "trigger\t%s\t%d\t%d"%('', 3, 1)
sys.stdout.flush()
# Write result to a file as well just for testing
with open("Domain_Result", "w") as f:
f.write("Topology\tPDB ID\tR\tResolution\tChain Length\tScore\n")
|
teheavy/AMA3D
|
Nh3D/1_AMA3D_start.py
|
Python
|
gpl-2.0
| 1,285 | 0.035798 |
'''
This script helps you scrap stock data avaliable on Bloomberg Finance
and store them locally.
Please obey applicable local and federal laws and applicable API term of use
when using this scripts. I, the creater of this script, will not be responsible
for any legal issues resulting from the use of this script.
@author Gan Tu
@version python 2 or python 3
[HOW TO CHANGE PYTHON VERSION]
This script by default should be run by Python 2.
To use this in Python 3, change the followings:
1) change ALL occurrences of "urllib" to "urllib.request".
'''
import urllib
import re
import json
import os
# Stock Symbols Initialization
# Feel free to modify the file source to contain stock symbols you plan to scrap fro
stocks = open("nasdaq_symbols.txt", "r").read().split("\n")
# URL Initialization
urlPrefix = "http://www.bloomberg.com/markets/api/bulk-time-series/price/"
urlAffix = "%3AUS?timeFrame="
# Only four of these are valid options for now
# 1_Day will scrap minute by minute data for one day, while others will be daily close price
# Feel free to modify them for your own need
options = ["1_DAY", "1_MONTH", "1_YEAR", "5_YEAR"]
def setup():
try:
os.mkdir("data")
except Exception as e:
pass
for option in options:
try:
os.mkdir("data/" + option + "/")
except Exception as e:
pass
def scrap():
i = 0
while i < len(stocks):
for option in options:
file = open("data/" + option + "/" + stocks[i] + ".txt", "w")
file.close()
htmltext = urllib.urlopen(urlPrefix + stocks[i] + urlAffix + option)
try:
data = json.load(htmltext)[0]["price"]
key = "date"
if option == "1_DAY":
key = "dateTime"
file = open("data/" + option + "/" + stocks[i] + ".txt", "a")
for price in data:
file.write(stocks[i] + "," + price[key] + "," + str(price["value"]) + "\n")
file.close()
except Exception as e:
pass
i += 1
if __name__ == "__main__":
setup()
scrap()
|
Michael-Tu/tools
|
stock_scraping/stock_price_scraping_to_local.py
|
Python
|
mit
| 2,169 | 0.003688 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = "Stefan Mauerberger"
__copyright__ = "Copyright (C) 2017 Stefan Mauerberger"
__license__ = "GPLv3"
import numpy as np
from sys import stdout
from matplotlib import pyplot as plt
from matplotlib import animation
from plotting import prepare_map, lllat, lllon, urlat, urlon, cmap_mu, cmap_sd
from reference import c_act, dt_latlon
import h5py
dpi=150
fh = h5py.File('../dat/example.hdf5', 'r')
points = fh['points']
stations = fh['stations']
mu_C = fh['mu']
sd_C = fh['sd']
# Ratio 16:9
fig = plt.figure(figsize=(8,4.5))
fig.subplots_adjust(left=0.06, right=0.97, top=0.95, wspace=0.02, bottom=0.05)
ax_mu = fig.add_subplot(121)
ax_sd = fig.add_subplot(122)
# Subplot on the left
mu_delta = max(c_act._v0 - c_act.min, c_act.max - c_act._v0)
mu_vmax = (c_act._v0 + mu_delta).round(0)
mu_vmin = (c_act._v0 - mu_delta).round(0)
m = prepare_map(ax_mu)
x, y = m(points['lon'], points['lat'])
tpc_mu = ax_mu.tripcolor(x, y, mu_C[0,:], \
vmin=mu_vmin, vmax=mu_vmax, cmap=cmap_mu, shading='gouraud')
cbar = m.colorbar(tpc_mu, location='bottom')
cbar.set_ticks( range(mu_vmin.astype(np.int), mu_vmax.astype(np.int), 40)[1:])
#cbar.set_label('mean')
m.scatter(stations['lon'], stations['lat'], latlon=True, lw=0, color='g')
# Make a lat, lon grid with extent of the map
N = 60j
grid = np.rec.fromarrays(np.mgrid[lllat:urlat:N, lllon:urlon:N], dtype=dt_latlon)
c = c_act(grid) # Actual velocity model
# Contour lines
cnt = m.contour(grid['lon'], grid['lat'], c, levels=c_act.levels(20), latlon=True, colors='k', linewidths=0.5)
# Subplot right
m = prepare_map(ax_sd, pls=[0,0,0,0])
tpc_sd = ax_sd.tripcolor(x, y, sd_C[0,:], \
vmin=np.min(sd_C), vmax=np.max(sd_C), cmap=cmap_sd, shading='gouraud')
cbar = m.colorbar(tpc_sd, location='bottom')
vmin_sd = np.min(sd_C).round().astype(np.integer)
vmax_sd = np.max(sd_C).round().astype(np.integer)
cbar.set_ticks(range(vmin_sd, vmax_sd, 5))
#cbar.set_label('standard deviation')
m.scatter(stations['lon'], stations['lat'], latlon=True, lw=0, color='g')
# First frame; Necessary for LaTeX beamer
plt.savefig('../animation_pri.png', dpi=dpi)
def animate(i):
global mu_C, cov_CC
tpc_mu.set_array(mu_C[i,:])
tpc_sd.set_array(sd_C[i,:])
# Screen output; a very basic progress bar
p = int(100.*(i+1)/mu_C.shape[0]) # Progress
stdout.write('\r[' + p*'#' + (100-p)*'-' + '] %3i' % p + '%' )
if (i+1) == mu_C.shape[0]:
stdout.write('\n')
return tpc_mu, tpc_sd
frames = mu_C.shape[0]
duration = 30. # s
interval = 1000.*duration/frames # ms
anim = animation.FuncAnimation(fig, animate, save_count=0, \
frames=frames, interval=interval, blit=False)
# Save video
anim.save('../animation.avi', dpi=dpi, extra_args=['-vcodec', 'msmpeg4v2'])
# Last frame; Necessary for LaTeX beamer
plt.savefig('../animation_pst.png', dpi=dpi)
#plt.close()
|
mauimuc/gptt
|
src/animation.py
|
Python
|
gpl-3.0
| 2,920 | 0.012671 |
class Solution(object):
def minPathSum(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
if not grid or not grid[0]:
return 0
m = len(grid)
n = len(grid[0])
dp = []
for _ in range(m):
dp.append([None] * (n))
dp[m-1][n-1] = grid[m-1][n-1]
def solve(row, col):
if dp[row][col] is not None:
return dp[row][col]
if row == m-1:
cost = grid[row][col] + solve(row, col+1)
elif col == n-1:
cost = grid[row][col] + solve(row+1, col)
else:
cost = grid[row][col] + min(solve(row, col+1), solve(row+1, col))
dp[row][col] = cost
# print 'dp(%s,%s) is %s' % (row, col, ans)
return cost
return solve(0, 0)
|
daicang/Leetcode-solutions
|
064-minimum-path-sum.py
|
Python
|
mit
| 877 | 0.002281 |
from django.test import TestCase
from morelia.decorators import tags
from smarttest.decorators import no_db_testcase
from tasks.factories import TaskFactory, UserFactory
@no_db_testcase
@tags(['unit'])
class TaskGetAbsoluteUrlTest(TestCase):
''' :py:meth:`tasks.models.Task.get_absolute_url` '''
def test_should_return_task_absolute_url(self):
# Arrange
owner = UserFactory.build(pk=1)
task = TaskFactory.build(owner=owner, author=owner)
# Act
url = task.get_absolute_url()
# Assert
self.assertEqual(url, '/%s/' % owner.username)
|
dryobates/testing_django
|
todo/tasks/tests/test_models.py
|
Python
|
mit
| 601 | 0 |
"""Offers a simple XML-RPC dispatcher for django_xmlrpc
Author::
Graham Binns
Credit must go to Brendan W. McAdams <[email protected]>, who
posted the original SimpleXMLRPCDispatcher to the Django wiki:
http://code.djangoproject.com/wiki/XML-RPC
New BSD License
===============
Copyright (c) 2007, Graham Binns http://launchpad.net/~codedragon
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the <ORGANIZATION> nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# This file is needed to run XMLRPC
from inspect import getargspec
from SimpleXMLRPCServer import SimpleXMLRPCDispatcher
from django.conf import settings
# If we need to debug, now we know
DEBUG = hasattr(settings, 'XMLRPC_DEBUG') and settings.XMLRPC_DEBUG
class DjangoXMLRPCDispatcher(SimpleXMLRPCDispatcher):
"""A simple XML-RPC dispatcher for Django.
Subclassess SimpleXMLRPCServer.SimpleXMLRPCDispatcher for the purpose of
overriding certain built-in methods (it's nicer than monkey-patching them,
that's for sure).
"""
def system_methodSignature(self, method):
"""Returns the signature details for a specified method
method
The name of the XML-RPC method to get the details for
"""
# See if we can find the method in our funcs dict
# TODO: Handle this better: We really should return something more
# formal than an AttributeError
func = self.funcs[method]
try:
sig = func._xmlrpc_signature
except:
sig = {
'returns': 'string',
'args': ['string' for arg in getargspec(func)[0]],
}
return [sig['returns']] + sig['args']
|
gnowgi/gnowsys-studio
|
gstudio/xmlrpc/dispatcher.py
|
Python
|
agpl-3.0
| 3,078 | 0.00065 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Gmp(AutotoolsPackage):
"""GMP is a free library for arbitrary precision arithmetic, operating
on signed integers, rational numbers, and floating-point numbers."""
homepage = "https://gmplib.org"
url = "https://ftp.gnu.org/gnu/gmp/gmp-6.1.2.tar.bz2"
version('6.1.2', '8ddbb26dc3bd4e2302984debba1406a5')
version('6.1.1', '4c175f86e11eb32d8bf9872ca3a8e11d')
version('6.1.0', '86ee6e54ebfc4a90b643a65e402c4048')
version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470')
version('6.0.0', '6ef5869ae735db9995619135bd856b84')
version('5.1.3', 'a082867cbca5e898371a97bb27b31fea')
# Old version needed for a binary package in ghc-bootstrap
version('4.3.2', 'dd60683d7057917e34630b4a787932e8')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
# gmp's configure script seems to be broken; it sometimes misdetects
# shared library support. Regenerating it fixes the issue.
force_autoreconf = True
def configure_args(self):
args = ['--enable-cxx']
# This flag is necessary for the Intel build to pass `make check`
if self.spec.compiler.name == 'intel':
args.append('CXXFLAGS=-no-ftz')
return args
|
tmerrick1/spack
|
var/spack/repos/builtin/packages/gmp/package.py
|
Python
|
lgpl-2.1
| 2,575 | 0.000388 |
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from unittest import mock
import ironic_inspector_client
from oslo_concurrency import processutils
from oslo_utils import units
from tripleoclient import exceptions
from tripleoclient.tests import fakes
from tripleoclient.workflows import baremetal
class TestBaremetalWorkflows(fakes.FakePlaybookExecution):
def setUp(self):
super(TestBaremetalWorkflows, self).setUp()
self.app.client_manager.workflow_engine = self.workflow = mock.Mock()
self.glance = self.app.client_manager.image = mock.Mock()
self.tripleoclient = mock.Mock()
self.app.client_manager.tripleoclient = self.tripleoclient
self.mock_playbook = mock.patch(
'tripleoclient.utils.run_ansible_playbook',
autospec=True
)
self.mock_playbook.start()
self.addCleanup(self.mock_playbook.stop)
self.node_update = [{'op': 'add',
'path': '/properties/capabilities',
'value': 'boot_option:local'},
{'op': 'add',
'path': '/driver_info/deploy_ramdisk',
'value': None},
{'op': 'add',
'path': '/driver_info/deploy_kernel',
'value': None},
{'op': 'add',
'path': '/driver_info/rescue_ramdisk',
'value': None},
{'op': 'add',
'path': '/driver_info/rescue_kernel',
'value': None}]
# Mock data
self.disks = [
{'name': '/dev/sda', 'size': 11 * units.Gi},
{'name': '/dev/sdb', 'size': 2 * units.Gi},
{'name': '/dev/sdc', 'size': 5 * units.Gi},
{'name': '/dev/sdd', 'size': 21 * units.Gi},
{'name': '/dev/sde', 'size': 13 * units.Gi},
]
for i, disk in enumerate(self.disks):
disk['wwn'] = 'wwn%d' % i
disk['serial'] = 'serial%d' % i
self.baremetal.node.list.return_value = [
mock.Mock(uuid="ABCDEFGH"),
]
self.node = mock.Mock(uuid="ABCDEFGH", properties={})
self.baremetal.node.get.return_value = self.node
self.inspector.get_data.return_value = {
'inventory': {'disks': self.disks}
}
self.existing_nodes = [
{'uuid': '1', 'driver': 'ipmi',
'driver_info': {'ipmi_address': '10.0.0.1'}},
{'uuid': '2', 'driver': 'pxe_ipmitool',
'driver_info': {'ipmi_address': '10.0.0.1', 'ipmi_port': 6235}},
{'uuid': '3', 'driver': 'foobar', 'driver_info': {}},
{'uuid': '4', 'driver': 'fake',
'driver_info': {'fake_address': 42}},
{'uuid': '5', 'driver': 'ipmi', 'driver_info': {}},
{'uuid': '6', 'driver': 'pxe_drac',
'driver_info': {'drac_address': '10.0.0.2'}},
{'uuid': '7', 'driver': 'pxe_drac',
'driver_info': {'drac_address': '10.0.0.3', 'drac_port': 6230}},
]
def test_register_or_update_success(self):
self.assertEqual(baremetal.register_or_update(
self.app.client_manager,
nodes_json=[],
instance_boot_option='local'
), [mock.ANY])
def test_introspect_success(self):
baremetal.introspect(self.app.client_manager, node_uuids=[],
run_validations=True, concurrency=20,
node_timeout=1200, max_retries=1,
retry_timeout=120)
def test_introspect_manageable_nodes_success(self):
baremetal.introspect_manageable_nodes(
self.app.client_manager, run_validations=False, concurrency=20,
node_timeout=1200, max_retries=1, retry_timeout=120,
)
def test_run_instance_boot_option(self):
result = baremetal._configure_boot(
self.app.client_manager,
node_uuid='MOCK_UUID',
instance_boot_option='netboot')
self.assertIsNone(result)
self.node_update[0].update({'value': 'boot_option:netboot'})
self.baremetal.node.update.assert_called_once_with(
mock.ANY, self.node_update)
def test_run_instance_boot_option_not_set(self):
result = baremetal._configure_boot(
self.app.client_manager,
node_uuid='MOCK_UUID')
self.assertIsNone(result)
self.node_update[0].update({'value': ''})
self.baremetal.node.update.assert_called_once_with(
mock.ANY, self.node_update)
def test_run_instance_boot_option_already_set_no_overwrite(self):
node_mock = mock.MagicMock()
node_mock.properties.get.return_value = ({'boot_option': 'netboot'})
self.app.client_manager.baremetal.node.get.return_value = node_mock
result = baremetal._configure_boot(
self.app.client_manager,
node_uuid='MOCK_UUID')
self.assertIsNone(result)
self.node_update[0].update({'value': 'boot_option:netboot'})
self.baremetal.node.update.assert_called_once_with(
mock.ANY, self.node_update)
def test_run_instance_boot_option_already_set_do_overwrite(self):
node_mock = mock.MagicMock()
node_mock.properties.get.return_value = ({'boot_option': 'netboot'})
self.app.client_manager.baremetal.node.get.return_value = node_mock
result = baremetal._configure_boot(
self.app.client_manager,
node_uuid='MOCK_UUID',
instance_boot_option='local')
self.assertIsNone(result)
self.node_update[0].update({'value': 'boot_option:local'})
self.baremetal.node.update.assert_called_once_with(
mock.ANY, self.node_update)
def test_run_exception_on_node_update(self):
self.baremetal.node.update.side_effect = Exception("Update error")
self.assertRaises(
Exception,
baremetal._configure_boot,
self.app.client_manager,
node_uuid='MOCK_UUID')
self.inspector.get_data.return_value = {
'inventory': {'disks': self.disks}
}
def test_smallest(self):
baremetal._apply_root_device_strategy(
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='smallest')
self.assertEqual(self.baremetal.node.update.call_count, 1)
root_device_args = self.baremetal.node.update.call_args_list[0]
expected_patch = [{'op': 'add', 'path': '/properties/root_device',
'value': {'wwn': 'wwn2'}},
{'op': 'add', 'path': '/properties/local_gb',
'value': 4}]
self.assertEqual(mock.call('ABCDEFGH', expected_patch),
root_device_args)
def test_smallest_with_ext(self):
self.disks[2]['wwn_with_extension'] = 'wwnext'
baremetal._apply_root_device_strategy(
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='smallest')
self.assertEqual(self.baremetal.node.update.call_count, 1)
root_device_args = self.baremetal.node.update.call_args_list[0]
expected_patch = [{'op': 'add', 'path': '/properties/root_device',
'value': {'wwn_with_extension': 'wwnext'}},
{'op': 'add', 'path': '/properties/local_gb',
'value': 4}]
self.assertEqual(mock.call('ABCDEFGH', expected_patch),
root_device_args)
def test_largest(self):
baremetal._apply_root_device_strategy(
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='largest')
self.assertEqual(self.baremetal.node.update.call_count, 1)
root_device_args = self.baremetal.node.update.call_args_list[0]
expected_patch = [{'op': 'add', 'path': '/properties/root_device',
'value': {'wwn': 'wwn3'}},
{'op': 'add', 'path': '/properties/local_gb',
'value': 20}]
self.assertEqual(mock.call('ABCDEFGH', expected_patch),
root_device_args)
def test_largest_with_ext(self):
self.disks[3]['wwn_with_extension'] = 'wwnext'
baremetal._apply_root_device_strategy(
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='largest')
self.assertEqual(self.baremetal.node.update.call_count, 1)
root_device_args = self.baremetal.node.update.call_args_list[0]
expected_patch = [{'op': 'add', 'path': '/properties/root_device',
'value': {'wwn_with_extension': 'wwnext'}},
{'op': 'add', 'path': '/properties/local_gb',
'value': 20}]
self.assertEqual(mock.call('ABCDEFGH', expected_patch),
root_device_args)
def test_no_overwrite(self):
self.node.properties['root_device'] = {'foo': 'bar'}
baremetal._apply_root_device_strategy(
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='smallest')
self.assertEqual(self.baremetal.node.update.call_count, 0)
def test_with_overwrite(self):
self.node.properties['root_device'] = {'foo': 'bar'}
baremetal._apply_root_device_strategy(
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='smallest',
overwrite=True)
self.assertEqual(self.baremetal.node.update.call_count, 1)
root_device_args = self.baremetal.node.update.call_args_list[0]
expected_patch = [{'op': 'add', 'path': '/properties/root_device',
'value': {'wwn': 'wwn2'}},
{'op': 'add', 'path': '/properties/local_gb',
'value': 4}]
self.assertEqual(mock.call('ABCDEFGH', expected_patch),
root_device_args)
def test_minimum_size(self):
baremetal._apply_root_device_strategy(
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='smallest',
minimum_size=10)
self.assertEqual(self.baremetal.node.update.call_count, 1)
root_device_args = self.baremetal.node.update.call_args_list[0]
expected_patch = [{'op': 'add', 'path': '/properties/root_device',
'value': {'wwn': 'wwn0'}},
{'op': 'add', 'path': '/properties/local_gb',
'value': 10}]
self.assertEqual(mock.call('ABCDEFGH', expected_patch),
root_device_args)
def test_bad_inventory(self):
self.inspector.get_data.return_value = {}
self.assertRaisesRegex(exceptions.RootDeviceDetectionError,
"Malformed introspection data",
baremetal._apply_root_device_strategy,
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='smallest')
self.assertEqual(self.baremetal.node.update.call_count, 0)
def test_no_disks(self):
self.inspector.get_data.return_value = {
'inventory': {
'disks': [{'name': '/dev/sda', 'size': 1 * units.Gi}]
}
}
self.assertRaisesRegex(exceptions.RootDeviceDetectionError,
"No suitable disks",
baremetal._apply_root_device_strategy,
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='smallest')
self.assertEqual(self.baremetal.node.update.call_count, 0)
def test_md_device_found(self):
self.inspector.get_data.return_value = {
'inventory': {
'disks': [{'name': '/dev/md0', 'size': 99 * units.Gi},
{'name': '/dev/sda', 'size': 100 * units.Gi}]
}
}
baremetal._apply_root_device_strategy(
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy=None)
self.assertEqual(self.baremetal.node.update.call_count, 0)
def test_no_data(self):
self.inspector.get_data.side_effect = (
ironic_inspector_client.ClientError(mock.Mock()))
self.assertRaisesRegex(exceptions.RootDeviceDetectionError,
"No introspection data",
baremetal._apply_root_device_strategy,
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='smallest')
self.assertEqual(self.baremetal.node.update.call_count, 0)
def test_no_wwn_and_serial(self):
self.inspector.get_data.return_value = {
'inventory': {
'disks': [{'name': '/dev/sda', 'size': 10 * units.Gi}]
}
}
self.assertRaisesRegex(exceptions.RootDeviceDetectionError,
"Neither WWN nor serial number are known",
baremetal._apply_root_device_strategy,
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='smallest')
self.assertEqual(self.baremetal.node.update.call_count, 0)
def test_device_list(self):
baremetal._apply_root_device_strategy(
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='hda,sda,sdb,sdc')
self.assertEqual(self.baremetal.node.update.call_count, 1)
root_device_args = self.baremetal.node.update.call_args_list[0]
expected_patch = [{'op': 'add', 'path': '/properties/root_device',
'value': {'wwn': 'wwn0'}},
{'op': 'add', 'path': '/properties/local_gb',
'value': 10}]
self.assertEqual(mock.call('ABCDEFGH', expected_patch),
root_device_args)
def test_device_list_not_found(self):
self.assertRaisesRegex(exceptions.RootDeviceDetectionError,
"Cannot find a disk",
baremetal._apply_root_device_strategy,
self.app.client_manager,
node_uuid='MOCK_UUID',
strategy='hda')
self.assertEqual(self.baremetal.node.update.call_count, 0)
def test_existing_ips(self):
result = baremetal._existing_ips(self.existing_nodes)
self.assertEqual({('10.0.0.1', 623), ('10.0.0.1', 6235),
('10.0.0.2', None), ('10.0.0.3', 6230)},
set(result))
def test_with_list(self):
result = baremetal._get_candidate_nodes(
['10.0.0.1', '10.0.0.2', '10.0.0.3'],
[623, 6230, 6235],
[['admin', 'password'], ['admin', 'admin']],
self.existing_nodes)
self.assertEqual([
{'ip': '10.0.0.3', 'port': 623,
'username': 'admin', 'password': 'password'},
{'ip': '10.0.0.1', 'port': 6230,
'username': 'admin', 'password': 'password'},
{'ip': '10.0.0.3', 'port': 6235,
'username': 'admin', 'password': 'password'},
{'ip': '10.0.0.3', 'port': 623,
'username': 'admin', 'password': 'admin'},
{'ip': '10.0.0.1', 'port': 6230,
'username': 'admin', 'password': 'admin'},
{'ip': '10.0.0.3', 'port': 6235,
'username': 'admin', 'password': 'admin'},
], result)
def test_with_subnet(self):
result = baremetal._get_candidate_nodes(
'10.0.0.0/30',
[623, 6230, 6235],
[['admin', 'password'], ['admin', 'admin']],
self.existing_nodes)
self.assertEqual([
{'ip': '10.0.0.1', 'port': 6230,
'username': 'admin', 'password': 'password'},
{'ip': '10.0.0.1', 'port': 6230,
'username': 'admin', 'password': 'admin'},
], result)
def test_invalid_subnet(self):
self.assertRaises(
netaddr.core.AddrFormatError,
baremetal._get_candidate_nodes,
'meow',
[623, 6230, 6235],
[['admin', 'password'], ['admin', 'admin']],
self.existing_nodes)
@mock.patch.object(processutils, 'execute', autospec=True)
def test_success(self, mock_execute):
result = baremetal._probe_node('10.0.0.42', 623,
'admin', 'password')
self.assertEqual({'pm_type': 'ipmi',
'pm_addr': '10.0.0.42',
'pm_user': 'admin',
'pm_password': 'password',
'pm_port': 623},
result)
mock_execute.assert_called_once_with('ipmitool', '-I', 'lanplus',
'-H', '10.0.0.42',
'-L', 'ADMINISTRATOR',
'-p', '623', '-U', 'admin',
'-f', mock.ANY, 'power', 'status',
attempts=2)
@mock.patch.object(processutils, 'execute', autospec=True)
def test_failure(self, mock_execute):
mock_execute.side_effect = processutils.ProcessExecutionError()
self.assertIsNone(baremetal._probe_node('10.0.0.42', 623,
'admin', 'password'))
mock_execute.assert_called_once_with('ipmitool', '-I', 'lanplus',
'-H', '10.0.0.42',
'-L', 'ADMINISTRATOR',
'-p', '623', '-U', 'admin',
'-f', mock.ANY, 'power', 'status',
attempts=2)
|
openstack/python-tripleoclient
|
tripleoclient/tests/workflows/test_baremetal.py
|
Python
|
apache-2.0
| 19,069 | 0 |
#!/usr/bin/env python
#
# This file is part of the SSM_LinearArray (Sound Sources Mapping
# using a Linear Microphone Array)
# developed by Daobilige Su <daobilige DOT su AT student DOT uts DOT edu DOT au>
#
# This file is under the GPLv3 licence.
#
import rospy
from std_msgs.msg import String
from std_msgs.msg import Int32MultiArray
#sudo apt-get install python-pyaudio
import pyaudio
from rospy.numpy_msg import numpy_msg
import numpy as np
import time
import signal
import os
import sys
CHUNK = 3200
FORMAT = pyaudio.paInt16
CHANNELS = 4
RATE = 16000
DEV_IDX = 5
p = pyaudio.PyAudio()
pub_mic_array = rospy.Publisher("/microphone_array_raw", numpy_msg(Int32MultiArray),queue_size=1)
def callback(in_data, frame_count, time_info, status):
global np,pub_mic_array
numpydata = np.fromstring(in_data, dtype=np.int16)
print('sending...')
numpydata_msg = Int32MultiArray()
numpydata_msg.data = numpydata
pub_mic_array.publish(numpydata_msg)
return (in_data, pyaudio.paContinue)
stream = p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
input=True,
frames_per_buffer=CHUNK,
input_device_index=DEV_IDX,
stream_callback=callback)
def signal_handler(signal, frame):
print('---stopping---')
stream.close()
p.terminate()
sys.exit()
signal.signal(signal.SIGINT, signal_handler)
def talker():
rospy.init_node('microphone_array_driver', anonymous=True)
print("---recording---")
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.close()
p.terminate()
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
|
daobilige-su/SSM_LinearArray
|
ROS/SSM_LinearArray/scripts/ps3_driver.py
|
Python
|
gpl-3.0
| 1,741 | 0.013211 |
from __future__ import unicode_literals
from logging import getLogger
from django.conf import settings
from django.core.cache import cache
from django.views.generic import View
from django.http import JsonResponse, HttpResponseBadRequest
from django.template.defaultfilters import slugify
from ratelimit.mixins import RatelimitMixin
from ..metadata.models import Organization
from ..content.models import ContentType
logger = getLogger(__name__)
class BaseApiView(RatelimitMixin, View):
cache = False
cache_timeout = 60 * 60
# Rate-limiting
ratelimit_key = 'ip'
ratelimit_rate = settings.BROWSE_RATE_LIMIT
ratelimit_block = True
ratelimit_method = 'GET'
def get(self, request, *args, **kwargs):
"""
Respond the content of `self.get_data` as JSON. Cache it, if enabled.
"""
if self.cache:
data = cache.get(self.get_cache_key())
if data:
logger.debug('API response: cache hit :: {}'.format(
self.get_cache_key()))
return data
data = JsonResponse(self.get_data(), safe=False)
if self.cache:
logger.debug('API response: cache set :: {}'.format(
self.get_cache_key()))
cache.set(self.get_cache_key(), data, self.cache_timeout)
return data
class AutoCompleteView(BaseApiView):
def get(self, request, *args, **kwargs):
"""
Store the `q` keyword in the class namespace.
"""
if not self.request.GET.get('q'):
return HttpResponseBadRequest('No search term given')
self.q = self.request.GET['q']
if len(self.q) < self.min_keyword_length:
error_str = 'Search term must be at least {} characters long.'
return HttpResponseBadRequest(
error_str.format(self.min_keyword_length))
return super(AutoCompleteView, self).get(request, *args, **kwargs)
class OrganizationsApiView(AutoCompleteView):
"""
Returns a list of organizations matching a given `q` keyword.
"""
cache = True
# API view specific
max_num_results = 50
min_keyword_length = 2
def get_cache_key(self):
return 'api_organizations_{}'.format(slugify(self.q))
def get_data(self):
data = Organization.objects.values('pk', 'org_name', 'state')
data = data.filter(org_name__icontains=self.q)
data = data.order_by('org_name')
data = list(data)
for x in data:
x['org_name'] = '{}, {}'.format(x['org_name'], x['state'])
return data
class TagsApiView(AutoCompleteView):
"""
Returns a list of tags matching a given `q` keyword.
"""
cache = True
# API view specific
max_num_results = 50
min_keyword_length = 2
def get_cache_key(self):
return 'api_tags_{}'.format(slugify(self.q))
def get_data(self):
# @todo: should we limit this to only tags on published contenttypes?
# I think this will be too heavy a query... :(
qs = ContentType.keywords.tag_model.objects.values(
'pk', 'name', 'slug').distinct('name')
qs = qs.filter(name__icontains=self.q)
qs = qs.exclude(count=0)
return list(qs)
|
AASHE/hub
|
hub/apps/api/views.py
|
Python
|
mit
| 3,271 | 0 |
#
# LMirror is Copyright (C) 2010 Robert Collins <[email protected]>
#
# LMirror is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
#
# In the LMirror source tree the file COPYING.txt contains the GNU General Public
# License version 3.
#
"""Tests for logging support code."""
from StringIO import StringIO
import logging
import os.path
import time
from l_mirror import logging_support
from l_mirror.tests import ResourcedTestCase
from l_mirror.tests.logging_resource import LoggingResourceManager
from l_mirror.tests.stubpackage import TempDirResource
class TestLoggingSetup(ResourcedTestCase):
resources = [('logging', LoggingResourceManager())]
def test_configure_logging_sets_converter(self):
out = StringIO()
c_log, f_log, formatter = logging_support.configure_logging(out)
self.assertEqual(c_log, logging.root.handlers[0])
self.assertEqual(f_log, logging.root.handlers[1])
self.assertEqual(None, c_log.formatter)
self.assertEqual(formatter, f_log.formatter)
self.assertEqual(time.gmtime, formatter.converter)
self.assertEqual("%Y-%m-%d %H:%M:%SZ", formatter.datefmt)
self.assertEqual(logging.StreamHandler, c_log.__class__)
self.assertEqual(out, c_log.stream)
self.assertEqual(logging.FileHandler, f_log.__class__)
self.assertEqual(os.path.expanduser("~/.cache/lmirror/log"), f_log.baseFilename)
def test_can_supply_filename_None(self):
out = StringIO()
c_log, f_log, formatter = logging_support.configure_logging(out, None)
self.assertEqual(None, f_log)
|
rbtcollins/lmirror
|
l_mirror/tests/test_logging_support.py
|
Python
|
gpl-3.0
| 2,180 | 0.004128 |
# -*- coding: utf-8 -*-
# Copyright(C) 2012-2019 Budget Insight
# yapf-compatible
from weboob.browser import AbstractBrowser
class NetfincaBrowser(AbstractBrowser):
PARENT = 'netfinca'
BASEURL = 'https://www.cabourse.credit-agricole.fr'
|
laurentb/weboob
|
modules/cragr/netfinca_browser.py
|
Python
|
lgpl-3.0
| 251 | 0 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common.utils.data_utils import rand_name
import tempest.test
class BaseIdentityAdminTest(tempest.test.BaseTestCase):
@classmethod
def setUpClass(cls):
super(BaseIdentityAdminTest, cls).setUpClass()
os = clients.AdminManager(interface=cls._interface)
cls.client = os.identity_client
cls.token_client = os.token_client
cls.endpoints_client = os.endpoints_client
cls.v3_client = os.identity_v3_client
cls.service_client = os.service_client
cls.policy_client = os.policy_client
cls.v3_token = os.token_v3_client
cls.creds_client = os.credentials_client
if not cls.client.has_admin_extensions():
raise cls.skipException("Admin extensions disabled")
cls.data = DataGenerator(cls.client)
cls.v3data = DataGenerator(cls.v3_client)
os = clients.Manager(interface=cls._interface)
cls.non_admin_client = os.identity_client
cls.v3_non_admin_client = os.identity_v3_client
@classmethod
def tearDownClass(cls):
cls.data.teardown_all()
cls.v3data.teardown_all()
super(BaseIdentityAdminTest, cls).tearDownClass()
def disable_user(self, user_name):
user = self.get_user_by_name(user_name)
self.client.enable_disable_user(user['id'], False)
def disable_tenant(self, tenant_name):
tenant = self.get_tenant_by_name(tenant_name)
self.client.update_tenant(tenant['id'], enabled=False)
def get_user_by_name(self, name):
_, users = self.client.get_users()
user = [u for u in users if u['name'] == name]
if len(user) > 0:
return user[0]
def get_tenant_by_name(self, name):
_, tenants = self.client.list_tenants()
tenant = [t for t in tenants if t['name'] == name]
if len(tenant) > 0:
return tenant[0]
def get_role_by_name(self, name):
_, roles = self.client.list_roles()
role = [r for r in roles if r['name'] == name]
if len(role) > 0:
return role[0]
class DataGenerator(object):
def __init__(self, client):
self.client = client
self.users = []
self.tenants = []
self.roles = []
self.role_name = None
self.v3_users = []
self.projects = []
self.v3_roles = []
def setup_test_user(self):
"""Set up a test user."""
self.setup_test_tenant()
self.test_user = rand_name('test_user_')
self.test_password = rand_name('pass_')
self.test_email = self.test_user + '@testmail.tm'
resp, self.user = self.client.create_user(self.test_user,
self.test_password,
self.tenant['id'],
self.test_email)
self.users.append(self.user)
def setup_test_tenant(self):
"""Set up a test tenant."""
self.test_tenant = rand_name('test_tenant_')
self.test_description = rand_name('desc_')
resp, self.tenant = self.client.create_tenant(
name=self.test_tenant,
description=self.test_description)
self.tenants.append(self.tenant)
def setup_test_role(self):
"""Set up a test role."""
self.test_role = rand_name('role')
resp, self.role = self.client.create_role(self.test_role)
self.roles.append(self.role)
def setup_test_v3_user(self):
"""Set up a test v3 user."""
self.setup_test_project()
self.test_user = rand_name('test_user_')
self.test_password = rand_name('pass_')
self.test_email = self.test_user + '@testmail.tm'
resp, self.v3_user = self.client.create_user(self.test_user,
self.test_password,
self.project['id'],
self.test_email)
self.v3_users.append(self.v3_user)
def setup_test_project(self):
"""Set up a test project."""
self.test_project = rand_name('test_project_')
self.test_description = rand_name('desc_')
resp, self.project = self.client.create_project(
name=self.test_project,
description=self.test_description)
self.projects.append(self.project)
def setup_test_v3_role(self):
"""Set up a test v3 role."""
self.test_role = rand_name('role')
resp, self.v3_role = self.client.create_role(self.test_role)
self.v3_roles.append(self.v3_role)
def teardown_all(self):
for user in self.users:
self.client.delete_user(user['id'])
for tenant in self.tenants:
self.client.delete_tenant(tenant['id'])
for role in self.roles:
self.client.delete_role(role['id'])
for v3_user in self.v3_users:
self.client.delete_user(v3_user['id'])
for v3_project in self.projects:
self.client.delete_project(v3_project['id'])
for v3_role in self.v3_roles:
self.client.delete_role(v3_role['id'])
|
itskewpie/tempest
|
tempest/api/identity/base.py
|
Python
|
apache-2.0
| 6,216 | 0.000161 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder import test
class ExampleSkipTestCase(test.TestCase):
test_counter = 0
@test.skip_test("Example usage of @test.skip_test()")
def test_skip_test_example(self):
self.fail("skip_test failed to work properly.")
@test.skip_if(True, "Example usage of @test.skip_if()")
def test_skip_if_example(self):
self.fail("skip_if failed to work properly.")
@test.skip_unless(False, "Example usage of @test.skip_unless()")
def test_skip_unless_example(self):
self.fail("skip_unless failed to work properly.")
@test.skip_if(False, "This test case should never be skipped.")
def test_001_increase_test_counter(self):
ExampleSkipTestCase.test_counter += 1
@test.skip_unless(True, "This test case should never be skipped.")
def test_002_increase_test_counter(self):
ExampleSkipTestCase.test_counter += 1
def test_003_verify_test_counter(self):
self.assertEquals(ExampleSkipTestCase.test_counter, 2,
"Tests were not skipped appropriately")
|
tylertian/Openstack
|
openstack F/cinder/cinder/tests/test_skip_examples.py
|
Python
|
apache-2.0
| 1,837 | 0 |
from concurrent.futures import ThreadPoolExecutor
import os
import re
import gzip
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import logging
import mimetypes
from collections import defaultdict
from flask import url_for as flask_url_for
from flask import current_app, request
from boto.s3.connection import S3Connection
from boto.exception import S3CreateError, S3ResponseError
from boto.s3.key import Key
logger = logging.getLogger('flask_s3')
mimetypes.add_type('text/css', '.less')
def url_for(endpoint, **values):
"""
Generates a URL to the given endpoint.
If the endpoint is for a static resource then an Amazon S3 URL is
generated, otherwise the call is passed on to `flask.url_for`.
Because this function is set as a jinja environment variable when
`FlaskS3.init_app` is invoked, this function replaces
`flask.url_for` in templates automatically. It is unlikely that this
function will need to be directly called from within your
application code, unless you need to refer to static assets outside
of your templates.
"""
app = current_app
if 'S3_BUCKET_NAME' not in app.config:
raise ValueError("S3_BUCKET_NAME not found in app configuration.")
if app.debug and not app.config['USE_S3_DEBUG']:
return flask_url_for(endpoint, **values)
if endpoint == 'static' or endpoint.endswith('.static'):
scheme = 'http'
scheme = app.config['S3_URL_SCHEME'] or 'https'
bucket_path = '%s.%s' % (app.config['S3_BUCKET_NAME'],
app.config['S3_BUCKET_DOMAIN'])
if app.config['S3_CDN_DOMAIN']:
bucket_path = '%s' % app.config['S3_CDN_DOMAIN']
if app.config['S3_PREFIX']:
bucket_path = "/".join((bucket_path, app.config['S3_PREFIX']))
urls = app.url_map.bind(bucket_path, url_scheme=scheme)
try:
mimetype = mimetypes.guess_type(values['filename'])[0]
except KeyError:
mimetype = None
if app.config['USE_GZIP']:
accept_encoding = request.headers.get('Accept-Encoding', '')
if (mimetype in app.config['S3_GZIP_CONTENT_TYPES'] and
'gzip' in accept_encoding.lower()):
values['filename'] += '.gz'
url = urls.build(endpoint, values=values, force_external=True)
if app.config['S3_URL_SCHEME'] is None:
url = re.sub(r'^https://', '//', url)
return url
return flask_url_for(endpoint, **values)
def _bp_static_url(blueprint):
""" builds the absolute url path for a blueprint's static folder """
u = u'%s%s' % (blueprint.url_prefix or '', blueprint.static_url_path or '')
return u
def _gather_files(app, hidden):
""" Gets all files in static folders and returns in dict."""
dirs = [(unicode(app.static_folder), app.static_url_path)]
if hasattr(app, 'blueprints'):
blueprints = app.blueprints.values()
bp_details = lambda x: (x.static_folder, _bp_static_url(x))
dirs.extend([bp_details(x) for x in blueprints if x.static_folder])
valid_files = defaultdict(list)
for static_folder, static_url_loc in dirs:
if not os.path.isdir(static_folder):
logger.warning("WARNING - [%s does not exist]" % static_folder)
else:
logger.debug("Checking static folder: %s" % static_folder)
for root, _, files in os.walk(static_folder):
files = [os.path.join(root, x) \
for x in files if hidden or x[0] != '.']
if files:
valid_files[(static_folder, static_url_loc)].extend(files)
return valid_files
def _path_to_relative_url(path):
""" Converts a folder and filename into a ralative url path """
return os.path.splitdrive(path)[1].replace('\\', '/')
def _static_folder_path(static_url, static_folder, static_asset, prefix=''):
"""
Returns a path to a file based on the static folder, and not on the
filesystem holding the file.
Returns a path relative to static_url for static_asset
"""
# first get the asset path relative to the static folder.
# static_asset is not simply a filename because it could be
# sub-directory then file etc.
if not static_asset.startswith(static_folder):
raise ValueError("%s startic asset must be under %s static folder" %
(static_asset, static_folder))
rel_asset = static_asset[len(static_folder):]
# Now bolt the static url path and the relative asset location together
key = u'%s/%s' % (static_url.rstrip('/'), rel_asset.lstrip('/'))
if prefix:
key = u'%s/%s' % (prefix, key)
return key
def _write_files(app, static_url_loc, static_folder, files, bucket,
ex_keys=None):
""" Writes all the files inside a static folder to S3. """
with ThreadPoolExecutor(app.config['S3_UPLOAD_COCURRENCY']) as executor:
for file_path in files:
asset_loc = _path_to_relative_url(file_path)
key_name = _static_folder_path(static_url_loc, static_folder,
asset_loc, app.config['S3_PREFIX'])
mimetype = mimetypes.guess_type(key_name)[0]
is_gzippable = mimetype in app.config['S3_GZIP_CONTENT_TYPES']
headers = app.config['S3_HEADERS']
msg = "Uploading %s to %s as %s" % (file_path, bucket, key_name)
logger.debug(msg)
if ex_keys and key_name in ex_keys:
logger.debug("%s excluded from upload" % key_name)
else:
do_gzip = app.config['USE_GZIP'] and is_gzippable
# upload origin file
executor.submit(_upload_file, file_path, bucket, key_name, headers)
# upload gzipped file (if enabled)
if do_gzip:
gzip_key_name = "%s.gz" % key_name
executor.submit(_upload_file, file_path, bucket, gzip_key_name, headers, True)
def _upload_file(file_path, bucket, key_name, headers={}, do_gzip=False):
k = Key(bucket=bucket, name=key_name)
for header, value in headers.items():
if (header, value) != ('Content-Encoding', 'gzip'):
k.set_metadata(header, value)
mimetype = mimetypes.guess_type(file_path)[0]
if mimetype:
k.set_metadata('Content-Type', mimetype)
with open(file_path) as f:
content = f.read()
if do_gzip:
k.set_metadata('Content-Encoding', 'gzip')
gzipped = StringIO()
with gzip.GzipFile(fileobj=gzipped, mode='w') as _gzip:
_gzip.write(content)
content = gzipped.getvalue()
try:
k.set_contents_from_string(content)
except S3ResponseError:
if not do_gzip:
k.set_contents_from_filename(file_path)
else:
raise
k.make_public()
return k
def _upload_files(app, files_, bucket):
for (static_folder, static_url), names in files_.iteritems():
_write_files(app, static_url, static_folder, names, bucket)
def create_all(app, user=None, password=None, bucket_name=None,
location='', include_hidden=False, force_refresh=False):
"""
Uploads of the static assets associated with a Flask application to
Amazon S3.
All static assets are identified on the local filesystem, including
any static assets associated with *registered* blueprints. In turn,
each asset is uploaded to the bucket described by `bucket_name`. If
the bucket does not exist then it is created.
Flask-S3 creates the same relative static asset folder structure on
S3 as can be found within your Flask application.
Many of the optional arguments to `create_all` can be specified
instead in your application's configuration using the Flask-S3
`configuration`_ variables.
:param app: a :class:`flask.Flask` application object.
:param user: an AWS Access Key ID. You can find this key in the
Security Credentials section of your AWS account.
:type user: `basestring` or None
:param password: an AWS Secret Access Key. You can find this key in
the Security Credentials section of your AWS
account.
:type password: `basestring` or None
:param bucket_name: the name of the bucket you wish to server your
static assets from. **Note**: while a valid
character, it is recommended that you do not
include periods in bucket_name if you wish to
serve over HTTPS. See Amazon's `bucket
restrictions`_ for more details.
:type bucket_name: `basestring` or None
:param location: the AWS region to host the bucket in; an empty
string indicates the default region should be used,
which is the US Standard region. Possible location
values include: `'DEFAULT'`, `'EU'`, `'USWest'`,
`'APSoutheast'`
:type location: `basestring` or None
:param include_hidden: by default Flask-S3 will not upload hidden
files. Set this to true to force the upload of hidden files.
:type include_hidden: `bool`
.. _bucket restrictions: http://docs.amazonwebservices.com/AmazonS3\
/latest/dev/BucketRestrictions.html
"""
if user is None and 'AWS_ACCESS_KEY_ID' in app.config:
user = app.config['AWS_ACCESS_KEY_ID']
if password is None and 'AWS_SECRET_ACCESS_KEY' in app.config:
password = app.config['AWS_SECRET_ACCESS_KEY']
if bucket_name is None and 'S3_BUCKET_NAME' in app.config:
bucket_name = app.config['S3_BUCKET_NAME']
if not bucket_name:
raise ValueError("No bucket name provided.")
# build list of static files
all_files = _gather_files(app, include_hidden)
logger.debug("All valid files: %s" % all_files)
conn = S3Connection(user, password) # connect to s3
# get_or_create bucket
try:
try:
bucket = conn.create_bucket(bucket_name, location=location)
bucket.make_public(recursive=True)
except S3CreateError as e:
if e.error_code == u'BucketAlreadyOwnedByYou':
bucket = conn.get_bucket(bucket_name)
if force_refresh:
bucket.make_public(recursive=True)
else:
raise e
except S3CreateError as e:
raise e
_upload_files(app, all_files, bucket)
class FlaskS3(object):
"""
The FlaskS3 object allows your application to use Flask-S3.
When initialising a FlaskS3 object you may optionally provide your
:class:`flask.Flask` application object if it is ready. Otherwise,
you may provide it later by using the :meth:`init_app` method.
:param app: optional :class:`flask.Flask` application object
:type app: :class:`flask.Flask` or None
"""
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
"""
An alternative way to pass your :class:`flask.Flask` application
object to Flask-S3. :meth:`init_app` also takes care of some
default `settings`_.
:param app: the :class:`flask.Flask` application object.
"""
defaults = [('S3_URL_SCHEME', None),
('S3_USE_HTTPS', None),
('USE_S3', True),
('USE_GZIP', False),
('USE_S3_DEBUG', False),
('S3_BUCKET_DOMAIN', 's3.amazonaws.com'),
('S3_CDN_DOMAIN', ''),
('S3_USE_CACHE_CONTROL', False),
('S3_HEADERS', {}),
('S3_GZIP_CONTENT_TYPES', (
'text/css',
'application/javascript',
'application/x-javascript',
)),
('S3_PREFIX', None),
('S3_UPLOAD_COCURRENCY', 32)]
for k, v in defaults:
app.config.setdefault(k, v)
if app.config['USE_S3']:
app.jinja_env.globals['url_for'] = url_for
if app.config['S3_USE_CACHE_CONTROL'] and 'S3_CACHE_CONTROL' in app.config:
cache_control_header = app.config['S3_CACHE_CONTROL']
app.config['S3_HEADERS']['Cache-Control'] = cache_control_header
if app.config['S3_URL_SCHEME'] is None and \
app.config['S3_USE_HTTPS'] is not None:
scheme = 'https' if app.config['S3_USE_HTTPS'] else 'http'
app.config['S3_URL_SCHEME'] = scheme
|
spoqa/flask-s3
|
flask_s3.py
|
Python
|
mit
| 12,801 | 0.002734 |
# Generated by Django 2.1 on 2018-08-13 08:04
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ibms', '0006_auto_20180813_1603'),
]
operations = [
migrations.RenameField(
model_name='serviceprioritymappings',
old_name='costcentreName',
new_name='costCentreName',
),
]
|
parksandwildlife/ibms
|
ibms_project/ibms/migrations/0007_auto_20180813_1604.py
|
Python
|
apache-2.0
| 391 | 0 |
import numpy as np
from scipy.interpolate import interp1d
from astropy.io import ascii
from astropy import units as u
from newdust import constants as c
from newdust.graindist.composition import _find_cmfile
__all__ = ['CmSilicate']
RHO_SIL = 3.8 # g cm^-3
class CmSilicate(object):
"""
| **ATTRIBUTES**
| cmtype : 'Silicate'
| rho : grain material density (g cm^-3)
| citation : A string containing citation to the original work
| interps : A tuple containing scipy.interp1d objects (rp, ip)
|
| *functions*
| rp(lam, unit='kev') : Returns real part (unit='kev'|'angs')
| ip(lam, unit='kev') : Returns imaginary part (unit='kev'|'angs')
| cm(lam, unit='kev') : Complex index of refraction of dtype='complex'
| plot(lam=None, unit='kev') : Plots Re(m-1) and Im(m)
| if lam is *None*, plots the original interp objects
| otherwise, plots with user defined wavelength (lam)
"""
def __init__(self, rho=RHO_SIL):
self.cmtype = 'Silicate'
self.rho = rho
self.citation = "Using optical constants for astrosilicate,\nDraine, B. T. 2003, ApJ, 598, 1026\nhttp://adsabs.harvard.edu/abs/2003ApJ...598.1026D"
D03file = _find_cmfile('callindex.out_sil.D03')
D03dat = ascii.read(D03file, header_start=4, data_start=5)
wavel = D03dat['wave(um)'] * u.micron
rp = interp1d(wavel.to(u.cm).value, 1.0 + D03dat['Re(n)-1']) # wavelength (cm), rp
ip = interp1d(wavel.to(u.cm).value, D03dat['Im(n)']) # wavelength (cm), ip
self.interps = (rp, ip)
def _interp_helper(self, lam_cm, interp, rp=False):
# Returns zero for wavelengths not covered by the interpolation object
# If the real part is needed, returns 1 (consistent with vacuum)
result = np.zeros(np.size(lam_cm))
if rp: result += 1
if np.size(lam_cm) == 1:
if (lam_cm >= np.min(interp.x)) & (lam_cm <= np.max(interp.x)):
result = interp(lam_cm)
else:
ii = (lam_cm >= np.min(interp.x)) & (lam_cm <= np.max(interp.x))
result[ii] = interp(lam_cm[ii])
return result
def rp(self, lam, unit='kev'):
lam_cm = c._lam_cm(lam, unit)
return self._interp_helper(lam_cm, self.interps[0], rp=True)
def ip(self, lam, unit='kev'):
lam_cm = c._lam_cm(lam, unit)
return self._interp_helper(lam_cm, self.interps[1])
def cm(self, lam, unit='kev'):
return self.rp(lam, unit=unit) + 1j * self.ip(lam, unit=unit)
def plot(self, ax, lam=None, unit='kev', rppart=True, impart=True):
if lam is None:
rp_m1 = np.abs(self.interps[0].y - 1.0)
ip = self.interps[1].y
x = self.interps[0].x / c.micron2cm # cm / (cm/um)
xlabel = "Wavelength (um)"
else:
rp_m1 = np.abs(self.rp(lam, unit=unit)-1.0)
ip = self.ip(lam, unit)
x = lam
assert unit in c.ALLOWED_LAM_UNITS
if unit == 'kev': xlabel = "Energy (keV)"
if unit == 'angs': xlabel = "Wavelength (Angstroms)"
if rppart:
ax.plot(x, rp_m1, ls='-', label='|Re(m-1)|')
if impart:
ax.plot(x, ip, ls='--', label='Im(m)')
ax.set_xlabel(xlabel)
ax.legend()
|
eblur/newdust
|
newdust/graindist/composition/cmsilicate.py
|
Python
|
bsd-2-clause
| 3,341 | 0.00419 |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for generating various frontend pages."""
__author__ = 'Saifu Angto ([email protected])'
import json
from models import models
from models.config import ConfigProperty
from models.counters import PerfCounter
from utils import BaseHandler
from utils import BaseRESTHandler
from utils import XsrfTokenManager
from models.models import Student
# Whether to record events in a database.
CAN_PERSIST_ACTIVITY_EVENTS = ConfigProperty(
'gcb_can_persist_activity_events', bool, (
'Whether or not to record student activity interactions in a '
'datastore. Without event recording, you cannot analyze student '
'activity interactions. On the other hand, no event recording reduces '
'the number of datastore operations and minimizes the use of Google '
'App Engine quota. Turn event recording on if you want to analyze '
'this data.'),
False)
COURSE_EVENTS_RECEIVED = PerfCounter(
'gcb-course-events-received',
'A number of activity/assessment events received by the server.')
COURSE_EVENTS_RECORDED = PerfCounter(
'gcb-course-events-recorded',
'A number of activity/assessment events recorded in a datastore.')
def extract_unit_and_lesson_id(handler):
"""Extracts unit and lesson id from the request."""
c = handler.request.get('unit')
if not c:
unit_id = 1
else:
unit_id = int(c)
l = handler.request.get('lesson')
if not l:
lesson_id = 1
else:
lesson_id = int(l)
return unit_id, lesson_id
class CourseHandler(BaseHandler):
"""Handler for generating course page."""
@classmethod
def get_child_routes(cls):
"""Add child handlers for REST."""
return [('/rest/events', EventsRESTHandler)]
def get(self):
"""Handles GET requests."""
user = self.personalize_page_and_get_user()
if not user:
self.redirect('/preview')
return None
student = Student.get_by_email(user.email())
playlist = student.playlist
playlist_urls = student.playlist_urls
if not self.personalize_page_and_get_enrolled():
return
self.template_value['units'] = self.get_units()
self.template_value['playlist'] = playlist
self.template_value['playlist_urls'] = playlist_urls
self.template_value['navbar'] = {'course': True}
self.render('course.html')
class PlaylistViewerHandler(BaseHandler):
def get(self):
"""Handles GET requests."""
if not self.personalize_page_and_get_enrolled():
return
user = self.personalize_page_and_get_user()
student = Student.get_by_email(user.email())
playlist = student.playlist
# Extract incoming args
unit_id, lesson_id = extract_unit_and_lesson_id(self)
self.template_value['unit_id'] = unit_id
self.template_value['lesson_id'] = lesson_id
# Set template values for a unit and its lesson entities
for unit in self.get_units():
if unit.unit_id == str(unit_id):
self.template_value['units'] = unit
lessons = self.get_lessons(unit_id)
self.template_value['lessons'] = lessons
# Set template values for nav bar
self.template_value['navbar'] = {'course': True}
# Set template values for back and next nav buttons
if lesson_id == 1:
self.template_value['back_button_url'] = ''
elif lessons[lesson_id - 2].activity:
self.template_value['back_button_url'] = (
'activity?unit=%s&lesson=%s' % (unit_id, lesson_id - 1))
else:
self.template_value['back_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id - 1))
if lessons[lesson_id - 1].activity:
self.template_value['playlist_button_url'] = (
'activity?unit=%s&lessons=%s' % (unit_id, lesson_id))
elif playlist[0] != (str(unit_id) + '.' + str(lesson_id)):
self.template_value['playlist_button_url'] = ''
else:
self.template_value['playlist_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id + 1))
if lessons[lesson_id - 1].activity:
self.template_value['next_button_url'] = (
'activity?unit=%s&lesson=%s' % (unit_id, lesson_id))
elif lesson_id == len(lessons):
self.template_value['next_button_url'] = ''
else:
self.template_value['next_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id + 1))
self.response.out.write(unit_id)
self.response.out.write(lesson_id)
#self.render('unit.html')
class UnitHandler(BaseHandler):
"""Handler for generating unit page."""
def get(self):
"""Handles GET requests."""
if not self.personalize_page_and_get_enrolled():
return
user = self.personalize_page_and_get_user()
student = Student.get_by_email(user.email())
playlist = filter(lambda x: x != "", student.playlist)
# Extract incoming args
unit_id, lesson_id = extract_unit_and_lesson_id(self)
self.template_value['unit_id'] = unit_id
self.template_value['lesson_id'] = lesson_id
# Set template values for a unit and its lesson entities
for unit in self.get_units():
if unit.unit_id == str(unit_id):
self.template_value['units'] = unit
lessons = self.get_lessons(unit_id)
self.template_value['lessons'] = lessons
# Set template values for nav bar
self.template_value['navbar'] = {'course': True}
# Set template values for back and next nav buttons
if lesson_id == 1:
self.template_value['back_button_url'] = ''
elif lessons[lesson_id - 2].activity:
self.template_value['back_button_url'] = (
'activity?unit=%s&lesson=%s' % (unit_id, lesson_id - 1))
else:
self.template_value['back_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id - 1))
if lessons[lesson_id - 1].activity:
self.template_value['playlist_button_url'] = (
'activity?unit=%s&lesson=%s' % (unit_id, lesson_id))
elif str(unit_id) + '.' + str(lesson_id) in playlist:
for i in range (len(playlist)):
if playlist[i] == str(unit_id) + '.' + str(lesson_id):
if i != len(playlist) - 1:
next_playlist = playlist[i + 1] #will go out of bounds if at last item in playlist
self.template_value['playlist_button_url'] = (
'unit?unit=%s&lesson=%s' % (next_playlist[0], next_playlist[2]))
break
# if lessons[lesson_id - 1].activity:
# self.template_value['playlist_button_url'] = (
# 'activity?unit=%s&lessons=%s' % (unit_id, lesson_id))
# else:
# self.template_value['playlist_button_url'] = (
# 'unit?unit=%s&lesson=%s' % (unit_id, lesson_id +
# 1))
if lessons[lesson_id - 1].activity:
self.template_value['next_button_url'] = (
'activity?unit=%s&lesson=%s' % (unit_id, lesson_id))
elif lesson_id == len(lessons):
self.template_value['next_button_url'] = ''
else:
self.template_value['next_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id + 1))
self.render('unit.html')
class ActivityHandler(BaseHandler):
"""Handler for generating activity page and receiving submissions."""
def get(self):
"""Handles GET requests."""
if not self.personalize_page_and_get_enrolled():
return
user = self.personalize_page_and_get_user()
student = Student.get_by_email(user.email())
playlist = filter(lambda x: x != "", student.playlist)
# Extract incoming args
unit_id, lesson_id = extract_unit_and_lesson_id(self)
self.template_value['unit_id'] = unit_id
self.template_value['lesson_id'] = lesson_id
# Set template values for a unit and its lesson entities
for unit in self.get_units():
if unit.unit_id == str(unit_id):
self.template_value['units'] = unit
lessons = self.get_lessons(unit_id)
self.template_value['lessons'] = lessons
# Set template values for nav bar
self.template_value['navbar'] = {'course': True}
# Set template values for back and next nav buttons
self.template_value['back_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id))
if str(unit_id) + '.' + str(lesson_id) in playlist:
for i in range (len(playlist)):
if playlist[i] == str(unit_id) + '.' + str(lesson_id) and i != len(playlist) - 1:
next_playlist = playlist[i + 1] #will go out of bounds if at last item in playlist
self.template_value['playlist_button_url'] = (
'unit?unit=%s&lesson=%s' % (next_playlist[0], next_playlist[2]))
break
if lesson_id == len(lessons):
self.template_value['next_button_url'] = ''
else:
self.template_value['next_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id + 1))
self.template_value['record_events'] = CAN_PERSIST_ACTIVITY_EVENTS.value
self.template_value['event_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('event-post'))
self.render('activity.html')
class AssessmentHandler(BaseHandler):
"""Handler for generating assessment page."""
def get(self):
"""Handles GET requests."""
if not self.personalize_page_and_get_enrolled():
return
# Extract incoming args
n = self.request.get('name')
if not n:
n = 'Pre'
self.template_value['name'] = n
self.template_value['navbar'] = {'course': True}
self.template_value['record_events'] = CAN_PERSIST_ACTIVITY_EVENTS.value
self.template_value['assessment_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('assessment-post'))
self.template_value['event_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('event-post'))
self.render('assessment.html')
class EventsRESTHandler(BaseRESTHandler):
"""Provides REST API for an Event."""
def post(self):
"""Receives event and puts it into datastore."""
COURSE_EVENTS_RECEIVED.inc()
if not CAN_PERSIST_ACTIVITY_EVENTS.value:
return
request = json.loads(self.request.get('request'))
if not self.assert_xsrf_token_or_fail(request, 'event-post', {}):
return
user = self.get_user()
if not user:
return
student = models.Student.get_enrolled_student_by_email(user.email())
if not student:
return
models.EventEntity.record(
request.get('source'), user, request.get('payload'))
COURSE_EVENTS_RECORDED.inc()
|
supunkamburugamuve/mooc2
|
controllers/lessons.py
|
Python
|
apache-2.0
| 12,062 | 0.00257 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResConfigSettings(models.TransientModel):
_inherit = 'res.config.settings'
group_l10n_in_reseller = fields.Boolean(implied_group='l10n_in.group_l10n_in_reseller', string="Manage Reseller(E-Commerce)")
|
ddico/odoo
|
addons/l10n_in/models/res_config_settings.py
|
Python
|
agpl-3.0
| 355 | 0.002817 |
from __future__ import print_function
import flask
import os
import threading
import time
import webbrowser
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
_basedir = os.path.join("..", os.path.dirname(__file__))
app = flask.Flask(__name__, static_path="/unused")
PORT=5009
http_server = HTTPServer(WSGIContainer(app))
"""this is a simple server to facilitate developing the docs. by
serving up static files from this server, we avoid the need to use a
symlink.
"""
@app.route('/')
def welcome():
return """
<h1>Welcome to the Bokeh documentation server</h1>
You probably want to go to <a href="/en/latest/index.html"> Index</a>
"""
@app.route('/en/latest/<path:filename>')
def send_pic(filename):
return flask.send_from_directory(
os.path.join(_basedir,"sphinx/_build/html/"), filename)
def open_browser():
# Child process
time.sleep(0.5)
webbrowser.open("http://localhost:%d/en/latest/index.html" % PORT, new="tab")
def serve_http():
http_server.listen(PORT)
IOLoop.instance().start()
def shutdown_server():
ioloop = IOLoop.instance()
ioloop.add_callback(ioloop.stop)
print("Asked Server to shut down.")
def ui():
time.sleep(0.5)
input("Press <ENTER> to exit...\n")
if __name__ == "__main__":
print("\nStarting Bokeh plot server on port %d..." % PORT)
print("Visit http://localhost:%d/en/latest/index.html to see plots\n" % PORT)
t_server = threading.Thread(target=serve_http)
t_server.start()
t_browser = threading.Thread(target=open_browser)
t_browser.start()
ui()
shutdown_server()
t_server.join()
t_browser.join()
print("Server shut down.")
|
phobson/bokeh
|
sphinx/docserver.py
|
Python
|
bsd-3-clause
| 1,749 | 0.005146 |
"""
Regression tests for rescinding outstanding subscription requests.
"""
from twisted.words.protocols.jabber.client import IQ
from servicetest import (EventPattern, wrap_channel, assertLength,
assertEquals, call_async, sync_dbus)
from hazetest import exec_test
import constants as cs
import ns
jid = '[email protected]'
def test(q, bus, conn, stream, remove, local):
call_async(q, conn.Requests, 'EnsureChannel',{
cs.CHANNEL_TYPE: cs.CHANNEL_TYPE_CONTACT_LIST,
cs.TARGET_HANDLE_TYPE: cs.HT_LIST,
cs.TARGET_ID: 'subscribe',
})
e = q.expect('dbus-return', method='EnsureChannel')
subscribe = wrap_channel(bus.get_object(conn.bus_name, e.value[1]),
cs.CHANNEL_TYPE_CONTACT_LIST)
call_async(q, conn.Requests, 'EnsureChannel',{
cs.CHANNEL_TYPE: cs.CHANNEL_TYPE_CONTACT_LIST,
cs.TARGET_HANDLE_TYPE: cs.HT_LIST,
cs.TARGET_ID: 'stored',
})
e = q.expect('dbus-return', method='EnsureChannel')
stored = wrap_channel(bus.get_object(conn.bus_name, e.value[1]),
cs.CHANNEL_TYPE_CONTACT_LIST)
call_async(q, conn.Requests, 'EnsureChannel',{
cs.CHANNEL_TYPE: cs.CHANNEL_TYPE_CONTACT_LIST,
cs.TARGET_HANDLE_TYPE: cs.HT_LIST,
cs.TARGET_ID: 'publish',
})
e = q.expect('dbus-return', method='EnsureChannel')
publish = wrap_channel(bus.get_object(conn.bus_name, e.value[1]),
cs.CHANNEL_TYPE_CONTACT_LIST)
h = conn.RequestHandles(cs.HT_CONTACT, [jid])[0]
# Another client logged into our account (Gajim, say) wants to subscribe to
# Marco's presence. First, per RFC 3921 it 'SHOULD perform a "roster set"
# for the new roster item':
#
# <iq type='set'>
# <query xmlns='jabber:iq:roster'>
# <item jid='[email protected]'/>
# </query>
# </iq>
#
# 'As a result, the user's server (1) MUST initiate a roster push for the
# new roster item to all available resources associated with this user that
# have requested the roster, setting the 'subscription' attribute to a
# value of "none"':
iq = IQ(stream, "set")
item = iq.addElement((ns.ROSTER, 'query')).addElement('item')
item['jid'] = jid
item['subscription'] = 'none'
stream.send(iq)
# In response, Haze adds Marco to the roster, which we guess (wrongly,
# in this case) also means subscribe
q.expect_many(
EventPattern('dbus-signal', signal='MembersChanged',
args=['', [h], [], [], [], h, 0], path=subscribe.object_path),
EventPattern('dbus-signal', signal='MembersChanged',
args=['', [h], [], [], [], 0, 0], path=stored.object_path),
)
# Gajim sends a <presence type='subscribe'/> to Marco. 'As a result, the
# user's server MUST initiate a second roster push to all of the user's
# available resources that have requested the roster, setting [...]
# ask='subscribe' attribute in the roster item [for Marco]:
iq = IQ(stream, "set")
item = iq.addElement((ns.ROSTER, 'query')).addElement('item')
item['jid'] = jid
item['subscription'] = 'none'
item['ask'] = 'subscribe'
stream.send(iq)
# In response, Haze should add Marco to subscribe:remote-pending,
# but libpurple has no such concept, so nothing much happens.
# The user decides that they don't care what Marco's baking after all
# (maybe they read his blog instead?) and:
if remove:
# ...removes him from the roster...
if local:
# ...by telling Haze to remove him from stored
stored.Group.RemoveMembers([h], '')
event = q.expect('stream-iq', iq_type='set', query_ns=ns.ROSTER)
item = event.query.firstChildElement()
assertEquals(jid, item['jid'])
assertEquals('remove', item['subscription'])
else:
# ...using the other client.
pass
# The server must 'inform all of the user's available resources that
# have requested the roster of the roster item removal':
iq = IQ(stream, "set")
item = iq.addElement((ns.ROSTER, 'query')).addElement('item')
item['jid'] = jid
item['subscription'] = 'remove'
# When Marco found this bug, this roster update included:
item['ask'] = 'subscribe'
# which is a bit weird: I don't think the server should send that when
# the contact's being removed. I think CMs should ignore it, so I'm
# including it in the test.
stream.send(iq)
# In response, Haze should announce that Marco has been removed from
# subscribe:remote-pending and stored:members
q.expect_many(
EventPattern('dbus-signal', signal='MembersChanged',
args=['', [], [h], [], [], 0, 0],
path=subscribe.object_path),
EventPattern('dbus-signal', signal='MembersChanged',
args=['', [], [h], [], [], 0, 0],
path=stored.object_path),
)
else:
# ...rescinds the subscription request...
if local:
raise AssertionError("Haze can't do this ")
else:
# ...in the other client.
pass
# In response, the server sends a roster update:
iq = IQ(stream, "set")
item = iq.addElement((ns.ROSTER, 'query')).addElement('item')
item['jid'] = jid
item['subscription'] = 'none'
# no ask='subscribe' any more.
stream.send(iq)
# In response, Haze should announce that Marco has been removed from
# subscribe:remote-pending; but it can't know that, so nothing happens.
def test_remove_local(q, bus, conn, stream):
test(q, bus, conn, stream, remove=True, local=True)
def test_remove_remote(q, bus, conn, stream):
test(q, bus, conn, stream, remove=True, local=False)
def test_unsubscribe_remote(q, bus, conn, stream):
test(q, bus, conn, stream, remove=False, local=False)
if __name__ == '__main__':
exec_test(test_remove_local)
exec_test(test_remove_remote)
exec_test(test_unsubscribe_remote)
|
fcrozat/telepathy-haze
|
tests/twisted/roster/removed-from-rp-subscribe.py
|
Python
|
gpl-2.0
| 6,175 | 0.002591 |
from django.core import formfields, validators
from django.core.mail import mail_admins, mail_managers
from django.core.exceptions import Http404, ObjectDoesNotExist
from django.core.extensions import DjangoContext, render_to_response
from django.models.auth import users
from django.models.comments import comments, freecomments
from django.models.core import contenttypes
from django.parts.auth.formfields import AuthenticationForm
from django.utils.httpwrappers import HttpResponseRedirect
from django.utils.text import normalize_newlines
from django.conf.settings import BANNED_IPS, COMMENTS_ALLOW_PROFANITIES, COMMENTS_SKETCHY_USERS_GROUP, COMMENTS_FIRST_FEW, SITE_ID
import base64, datetime
COMMENTS_PER_PAGE = 20
class PublicCommentManipulator(AuthenticationForm):
"Manipulator that handles public registered comments"
def __init__(self, user, ratings_required, ratings_range, num_rating_choices):
AuthenticationForm.__init__(self)
self.ratings_range, self.num_rating_choices = ratings_range, num_rating_choices
choices = [(c, c) for c in ratings_range]
def get_validator_list(rating_num):
if rating_num <= num_rating_choices:
return [validators.RequiredIfOtherFieldsGiven(['rating%d' % i for i in range(1, 9) if i != rating_num], "This rating is required because you've entered at least one other rating.")]
else:
return []
self.fields.extend([
formfields.LargeTextField(field_name="comment", maxlength=3000, is_required=True,
validator_list=[self.hasNoProfanities]),
formfields.RadioSelectField(field_name="rating1", choices=choices,
is_required=ratings_required and num_rating_choices > 0,
validator_list=get_validator_list(1),
),
formfields.RadioSelectField(field_name="rating2", choices=choices,
is_required=ratings_required and num_rating_choices > 1,
validator_list=get_validator_list(2),
),
formfields.RadioSelectField(field_name="rating3", choices=choices,
is_required=ratings_required and num_rating_choices > 2,
validator_list=get_validator_list(3),
),
formfields.RadioSelectField(field_name="rating4", choices=choices,
is_required=ratings_required and num_rating_choices > 3,
validator_list=get_validator_list(4),
),
formfields.RadioSelectField(field_name="rating5", choices=choices,
is_required=ratings_required and num_rating_choices > 4,
validator_list=get_validator_list(5),
),
formfields.RadioSelectField(field_name="rating6", choices=choices,
is_required=ratings_required and num_rating_choices > 5,
validator_list=get_validator_list(6),
),
formfields.RadioSelectField(field_name="rating7", choices=choices,
is_required=ratings_required and num_rating_choices > 6,
validator_list=get_validator_list(7),
),
formfields.RadioSelectField(field_name="rating8", choices=choices,
is_required=ratings_required and num_rating_choices > 7,
validator_list=get_validator_list(8),
),
])
if not user.is_anonymous():
self["username"].is_required = False
self["username"].validator_list = []
self["password"].is_required = False
self["password"].validator_list = []
self.user_cache = user
def hasNoProfanities(self, field_data, all_data):
if COMMENTS_ALLOW_PROFANITIES:
return
return validators.hasNoProfanities(field_data, all_data)
def get_comment(self, new_data):
"Helper function"
return comments.Comment(None, self.get_user_id(), new_data["content_type_id"],
new_data["object_id"], new_data.get("headline", "").strip(),
new_data["comment"].strip(), new_data.get("rating1", None),
new_data.get("rating2", None), new_data.get("rating3", None),
new_data.get("rating4", None), new_data.get("rating5", None),
new_data.get("rating6", None), new_data.get("rating7", None),
new_data.get("rating8", None), new_data.get("rating1", None) is not None,
datetime.datetime.now(), new_data["is_public"], new_data["ip_address"], False, SITE_ID)
def save(self, new_data):
today = datetime.date.today()
c = self.get_comment(new_data)
for old in comments.get_list(content_type__id__exact=new_data["content_type_id"],
object_id__exact=new_data["object_id"], user__id__exact=self.get_user_id()):
# Check that this comment isn't duplicate. (Sometimes people post
# comments twice by mistake.) If it is, fail silently by pretending
# the comment was posted successfully.
if old.submit_date.date() == today and old.comment == c.comment \
and old.rating1 == c.rating1 and old.rating2 == c.rating2 \
and old.rating3 == c.rating3 and old.rating4 == c.rating4 \
and old.rating5 == c.rating5 and old.rating6 == c.rating6 \
and old.rating7 == c.rating7 and old.rating8 == c.rating8:
return old
# If the user is leaving a rating, invalidate all old ratings.
if c.rating1 is not None:
old.valid_rating = False
old.save()
c.save()
# If the commentor has posted fewer than COMMENTS_FIRST_FEW comments,
# send the comment to the managers.
if self.user_cache.get_comments_comment_count() <= COMMENTS_FIRST_FEW:
message = 'This comment was posted by a user who has posted fewer than %s comments:\n\n%s' % \
(COMMENTS_FIRST_FEW, c.get_as_text())
mail_managers("Comment posted by rookie user", message)
if COMMENTS_SKETCHY_USERS_GROUP and COMMENTS_SKETCHY_USERS_GROUP in [g.id for g in self.user_cache.get_group_list()]:
message = 'This comment was posted by a sketchy user:\n\n%s' % c.get_as_text()
mail_managers("Comment posted by sketchy user (%s)" % self.user_cache.username, c.get_as_text())
return c
class PublicFreeCommentManipulator(formfields.Manipulator):
"Manipulator that handles public free (unregistered) comments"
def __init__(self):
self.fields = (
formfields.TextField(field_name="person_name", maxlength=50, is_required=True,
validator_list=[self.hasNoProfanities]),
formfields.LargeTextField(field_name="comment", maxlength=3000, is_required=True,
validator_list=[self.hasNoProfanities]),
)
def hasNoProfanities(self, field_data, all_data):
if COMMENTS_ALLOW_PROFANITIES:
return
return validators.hasNoProfanities(field_data, all_data)
def get_comment(self, new_data):
"Helper function"
return freecomments.FreeComment(None, new_data["content_type_id"],
new_data["object_id"], new_data["comment"].strip(),
new_data["person_name"].strip(), datetime.datetime.now(), new_data["is_public"],
new_data["ip_address"], False, SITE_ID)
def save(self, new_data):
today = datetime.date.today()
c = self.get_comment(new_data)
# Check that this comment isn't duplicate. (Sometimes people post
# comments twice by mistake.) If it is, fail silently by pretending
# the comment was posted successfully.
for old_comment in freecomments.get_list(content_type__id__exact=new_data["content_type_id"],
object_id__exact=new_data["object_id"], person_name__exact=new_data["person_name"],
submit_date__year=today.year, submit_date__month=today.month,
submit_date__day=today.day):
if old_comment.comment == c.comment:
return old_comment
c.save()
return c
def post_comment(request):
"""
Post a comment
Redirects to the `comments.comments.comment_was_posted` view upon success.
Templates: `comment_preview`
Context:
comment
the comment being posted
comment_form
the comment form
options
comment options
target
comment target
hash
security hash (must be included in a posted form to succesfully
post a comment).
rating_options
comment ratings options
ratings_optional
are ratings optional?
ratings_required
are ratings required?
rating_range
range of ratings
rating_choices
choice of ratings
"""
if not request.POST:
raise Http404, "Only POSTs are allowed"
try:
options, target, security_hash = request.POST['options'], request.POST['target'], request.POST['gonzo']
except KeyError:
raise Http404, "One or more of the required fields wasn't submitted"
photo_options = request.POST.get('photo_options', '')
rating_options = normalize_newlines(request.POST.get('rating_options', ''))
if comments.get_security_hash(options, photo_options, rating_options, target) != security_hash:
raise Http404, "Somebody tampered with the comment form (security violation)"
# Now we can be assured the data is valid.
if rating_options:
rating_range, rating_choices = comments.get_rating_options(base64.decodestring(rating_options))
else:
rating_range, rating_choices = [], []
content_type_id, object_id = target.split(':') # target is something like '52:5157'
try:
obj = contenttypes.get_object(pk=content_type_id).get_object_for_this_type(pk=object_id)
except ObjectDoesNotExist:
raise Http404, "The comment form had an invalid 'target' parameter -- the object ID was invalid"
option_list = options.split(',') # options is something like 'pa,ra'
new_data = request.POST.copy()
new_data['content_type_id'] = content_type_id
new_data['object_id'] = object_id
new_data['ip_address'] = request.META.get('REMOTE_ADDR')
new_data['is_public'] = comments.IS_PUBLIC in option_list
manipulator = PublicCommentManipulator(request.user,
ratings_required=comments.RATINGS_REQUIRED in option_list,
ratings_range=rating_range,
num_rating_choices=len(rating_choices))
errors = manipulator.get_validation_errors(new_data)
# If user gave correct username/password and wasn't already logged in, log them in
# so they don't have to enter a username/password again.
if manipulator.get_user() and new_data.has_key('password') and manipulator.get_user().check_password(new_data['password']):
request.session[users.SESSION_KEY] = manipulator.get_user_id()
if errors or request.POST.has_key('preview'):
class CommentFormWrapper(formfields.FormWrapper):
def __init__(self, manipulator, new_data, errors, rating_choices):
formfields.FormWrapper.__init__(self, manipulator, new_data, errors)
self.rating_choices = rating_choices
def ratings(self):
field_list = [self['rating%d' % (i+1)] for i in range(len(rating_choices))]
for i, f in enumerate(field_list):
f.choice = rating_choices[i]
return field_list
comment = errors and '' or manipulator.get_comment(new_data)
comment_form = CommentFormWrapper(manipulator, new_data, errors, rating_choices)
return render_to_response('comments/preview', {
'comment': comment,
'comment_form': comment_form,
'options': options,
'target': target,
'hash': security_hash,
'rating_options': rating_options,
'ratings_optional': comments.RATINGS_OPTIONAL in option_list,
'ratings_required': comments.RATINGS_REQUIRED in option_list,
'rating_range': rating_range,
'rating_choices': rating_choices,
}, context_instance=DjangoContext(request))
elif request.POST.has_key('post'):
# If the IP is banned, mail the admins, do NOT save the comment, and
# serve up the "Thanks for posting" page as if the comment WAS posted.
if request.META['REMOTE_ADDR'] in BANNED_IPS:
mail_admins("Banned IP attempted to post comment", str(request.POST) + "\n\n" + str(request.META))
else:
manipulator.do_html2python(new_data)
comment = manipulator.save(new_data)
return HttpResponseRedirect("/comments/posted/?c=%s:%s" % (content_type_id, object_id))
else:
raise Http404, "The comment form didn't provide either 'preview' or 'post'"
def post_free_comment(request):
"""
Post a free comment (not requiring a log in)
Redirects to `comments.comments.comment_was_posted` view on success.
Templates: `comment_free_preview`
Context:
comment
comment being posted
comment_form
comment form object
options
comment options
target
comment target
hash
security hash (must be included in a posted form to succesfully
post a comment).
"""
if not request.POST:
raise Http404, "Only POSTs are allowed"
try:
options, target, security_hash = request.POST['options'], request.POST['target'], request.POST['gonzo']
except KeyError:
raise Http404, "One or more of the required fields wasn't submitted"
if comments.get_security_hash(options, '', '', target) != security_hash:
raise Http404, "Somebody tampered with the comment form (security violation)"
content_type_id, object_id = target.split(':') # target is something like '52:5157'
content_type = contenttypes.get_object(pk=content_type_id)
try:
obj = content_type.get_object_for_this_type(pk=object_id)
except ObjectDoesNotExist:
raise Http404, "The comment form had an invalid 'target' parameter -- the object ID was invalid"
option_list = options.split(',')
new_data = request.POST.copy()
new_data['content_type_id'] = content_type_id
new_data['object_id'] = object_id
new_data['ip_address'] = request.META['REMOTE_ADDR']
new_data['is_public'] = comments.IS_PUBLIC in option_list
manipulator = PublicFreeCommentManipulator()
errors = manipulator.get_validation_errors(new_data)
if errors or request.POST.has_key('preview'):
comment = errors and '' or manipulator.get_comment(new_data)
return render_to_response('comments/free_preview', {
'comment': comment,
'comment_form': formfields.FormWrapper(manipulator, new_data, errors),
'options': options,
'target': target,
'hash': security_hash,
}, context_instance=DjangoContext(request))
elif request.POST.has_key('post'):
# If the IP is banned, mail the admins, do NOT save the comment, and
# serve up the "Thanks for posting" page as if the comment WAS posted.
if request.META['REMOTE_ADDR'] in BANNED_IPS:
from django.core.mail import mail_admins
mail_admins("Practical joker", str(request.POST) + "\n\n" + str(request.META))
else:
manipulator.do_html2python(new_data)
comment = manipulator.save(new_data)
return HttpResponseRedirect("/comments/posted/?c=%s:%s" % (content_type_id, object_id))
else:
raise Http404, "The comment form didn't provide either 'preview' or 'post'"
def comment_was_posted(request):
"""
Display "comment was posted" success page
Templates: `comment_posted`
Context:
object
The object the comment was posted on
"""
obj = None
if request.GET.has_key('c'):
content_type_id, object_id = request.GET['c'].split(':')
try:
content_type = contenttypes.get_object(pk=content_type_id)
obj = content_type.get_object_for_this_type(pk=object_id)
except ObjectDoesNotExist:
pass
return render_to_response('comments/posted', {'object': obj}, context_instance=DjangoContext(request))
|
tungvx/deploy
|
Django-0.90/django/contrib/comments/views/comments.py
|
Python
|
apache-2.0
| 16,510 | 0.006481 |
import unittest
from unittest.mock import patch, MagicMock
from twitchcancer.api.pubsubmanager import PubSubManager
# PubSubManager.instance()
class TestPubSubManagerInstance(unittest.TestCase):
# check that we only store one instance of any topic
@patch('twitchcancer.api.pubsubmanager.PubSubManager.__new__', side_effect=PubSubManager.__new__)
def test_all(self, new):
PubSubManager.instance()
PubSubManager.instance()
self.assertEqual(new.call_count, 1)
# PubSubManager.subscribe()
class TestPubSubManagerSubscribe(unittest.TestCase):
# subscribe to a new topic
def test_subscribe_new(self):
p = PubSubManager()
p.subscribe("client", "topic")
# check that the topic was created
self.assertEqual(len(p.subscriptions.keys()), 1)
# check that we are subbed
self.assertTrue("client" in p.subscriptions["topic"])
self.assertTrue(len(p.subscriptions["topic"]), 1)
# subscribe to an existing topic
def test_subscribe_existing(self):
p = PubSubManager()
p.subscriptions["topic"] = {"other client"}
p.subscribe("client", "topic")
# check that the topic was reused
self.assertEqual(len(p.subscriptions.keys()), 1)
# check that we are subbed
self.assertTrue("client" in p.subscriptions["topic"])
self.assertTrue(len(p.subscriptions["topic"]), 2)
# PubSubManager.unsubscribe()
class TestPubSubManagerUnsubscribe(unittest.TestCase):
# unsubscribe from an existing topic
def test_unsubscribe_existing(self):
p = PubSubManager()
p.subscriptions["topic"] = {"client", "other client"}
p.unsubscribe("client", "topic")
# check that we are not subbed anymore
self.assertTrue("client" not in p.subscriptions["topic"])
# unsubscribe from an existing topic as the last client
def test_unsubscribe_existing_last(self):
p = PubSubManager()
p.subscriptions["topic"] = {"client"}
p.unsubscribe("client", "topic")
# check that the topic was garbage collected
self.assertTrue("topic" not in p.subscriptions)
# unsubscribe from an unknown topic
def test_unsubscribe_not_existing(self):
p = PubSubManager()
p.unsubscribe("client", "topic")
# check that the topic wasn't created
self.assertTrue("topic" not in p.subscriptions)
# PubSubManager.unsubscribe_all()
class TestPubSubManagerUnsubscribeAll(unittest.TestCase):
# check that unsubcribe is called for all topics
@patch('twitchcancer.api.pubsubmanager.PubSubManager.unsubscribe')
def test_unsubscribe_all(self, unsubscribe):
p = PubSubManager()
p.subscriptions["topic"] = {"client"}
p.subscriptions["topic 2"] = {"client"}
p.unsubscribe_all("client")
# check the number of calls
# TODO: check the actual arguments of each call
self.assertEqual(unsubscribe.call_count, 2)
# PubSubManager.publish()
class TestPubSubManagerPublish(unittest.TestCase):
# check that a client subscribed to a topic gets data on publish()
def test_publish_subscribed(self):
# subscribe a client to a topic
client = MagicMock()
p = PubSubManager()
p.subscriptions["topic"] = {client}
# publish data for that topic
topic = MagicMock()
topic.payload = MagicMock(return_value="payload")
p.publish(topic)
# make sure the client got data
client.send.assert_called_once_with("topic", "payload")
# check that a client not subscribed to a topic doesn't get data on publish()
def test_publish_not_subscribed(self):
# subscribe a client to a topic
client = MagicMock()
p = PubSubManager()
p.subscriptions["topic"] = {client}
# publish data for another topic
topic = MagicMock()
topic.match = MagicMock(return_value=False)
p.publish(topic)
# make sure the client didn't get called
self.assertFalse(client.send.called)
# PubSubManager.publish_one()
class TestPubSubManagerPublishOne(unittest.TestCase):
def test_publish_one_existing(self):
client = MagicMock()
topic = MagicMock()
topic.payload = MagicMock(return_value="payload")
with patch('twitchcancer.api.pubsubtopic.PubSubTopic.find', return_value=topic):
PubSubManager().publish_one(client, "topic")
# make sure the client got data
client.send.assert_called_once_with("topic", "payload")
@patch('twitchcancer.api.pubsubtopic.PubSubTopic.find', return_value=None)
def test_publish_one_not_existing(self, find):
client = MagicMock()
PubSubManager().publish_one(client, "topic")
# make sure the client didn't get called
self.assertFalse(client.send.called)
|
Benzhaomin/TwitchCancer
|
twitchcancer/api/tests/test_pubsubmanager.py
|
Python
|
gpl-3.0
| 4,912 | 0.000611 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.