repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
shruthiag96/ns3-dev-vns | src/lr-wpan/bindings/modulegen__gcc_LP64.py | 1 | 418364 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.lr_wpan', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanTxOption [enumeration]
module.add_enum('LrWpanTxOption', ['TX_OPTION_NONE', 'TX_OPTION_ACK', 'TX_OPTION_GTS', 'TX_OPTION_INDIRECT'])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyOption [enumeration]
module.add_enum('LrWpanPhyOption', ['IEEE_802_15_4_868MHZ_BPSK', 'IEEE_802_15_4_915MHZ_BPSK', 'IEEE_802_15_4_868MHZ_ASK', 'IEEE_802_15_4_915MHZ_ASK', 'IEEE_802_15_4_868MHZ_OQPSK', 'IEEE_802_15_4_915MHZ_OQPSK', 'IEEE_802_15_4_2_4GHZ_OQPSK', 'IEEE_802_15_4_INVALID_PHY_OPTION'])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyEnumeration [enumeration]
module.add_enum('LrWpanPhyEnumeration', ['IEEE_802_15_4_PHY_BUSY', 'IEEE_802_15_4_PHY_BUSY_RX', 'IEEE_802_15_4_PHY_BUSY_TX', 'IEEE_802_15_4_PHY_FORCE_TRX_OFF', 'IEEE_802_15_4_PHY_IDLE', 'IEEE_802_15_4_PHY_INVALID_PARAMETER', 'IEEE_802_15_4_PHY_RX_ON', 'IEEE_802_15_4_PHY_SUCCESS', 'IEEE_802_15_4_PHY_TRX_OFF', 'IEEE_802_15_4_PHY_TX_ON', 'IEEE_802_15_4_PHY_UNSUPPORTED_ATTRIBUTE', 'IEEE_802_15_4_PHY_READ_ONLY', 'IEEE_802_15_4_PHY_UNSPECIFIED'])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMcpsDataConfirmStatus [enumeration]
module.add_enum('LrWpanMcpsDataConfirmStatus', ['IEEE_802_15_4_SUCCESS', 'IEEE_802_15_4_TRANSACTION_OVERFLOW', 'IEEE_802_15_4_TRANSACTION_EXPIRED', 'IEEE_802_15_4_CHANNEL_ACCESS_FAILURE', 'IEEE_802_15_4_INVALID_ADDRESS', 'IEEE_802_15_4_INVALID_GTS', 'IEEE_802_15_4_NO_ACK', 'IEEE_802_15_4_COUNTER_ERROR', 'IEEE_802_15_4_FRAME_TOO_LONG', 'IEEE_802_15_4_UNAVAILABLE_KEY', 'IEEE_802_15_4_UNSUPPORTED_SECURITY', 'IEEE_802_15_4_INVALID_PARAMETER'])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanAssociationStatus [enumeration]
module.add_enum('LrWpanAssociationStatus', ['ASSOCIATED', 'PAN_AT_CAPACITY', 'PAN_ACCESS_DENIED', 'ASSOCIATED_WITHOUT_ADDRESS', 'DISASSOCIATED'])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPibAttributeIdentifier [enumeration]
module.add_enum('LrWpanPibAttributeIdentifier', ['phyCurrentChannel', 'phyChannelsSupported', 'phyTransmitPower', 'phyCCAMode', 'phyCurrentPage', 'phyMaxFrameDuration', 'phySHRDuration', 'phySymbolsPerOctet'])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMacState [enumeration]
module.add_enum('LrWpanMacState', ['MAC_IDLE', 'MAC_CSMA', 'MAC_SENDING', 'MAC_ACK_PENDING', 'CHANNEL_ACCESS_FAILURE', 'CHANNEL_IDLE', 'SET_PHY_TX_ON'])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanAddressMode [enumeration]
module.add_enum('LrWpanAddressMode', ['NO_PANID_ADDR', 'ADDR_MODE_RESERVED', 'SHORT_ADDR', 'EXT_ADDR'])
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper [class]
module.add_class('AsciiTraceHelper', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice [class]
module.add_class('AsciiTraceHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanEdPower [struct]
module.add_class('LrWpanEdPower')
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyDataAndSymbolRates [struct]
module.add_class('LrWpanPhyDataAndSymbolRates')
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPibAttributes [struct]
module.add_class('LrWpanPhyPibAttributes')
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPpduHeaderSymbolNumber [struct]
module.add_class('LrWpanPhyPpduHeaderSymbolNumber')
## lr-wpan-spectrum-value-helper.h (module 'lr-wpan'): ns3::LrWpanSpectrumValueHelper [class]
module.add_class('LrWpanSpectrumValueHelper')
## mac16-address.h (module 'network'): ns3::Mac16Address [class]
module.add_class('Mac16Address', import_from_module='ns.network')
## mac16-address.h (module 'network'): ns3::Mac16Address [class]
root_module['ns3::Mac16Address'].implicitly_converts_to(root_module['ns3::Address'])
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## mac64-address.h (module 'network'): ns3::Mac64Address [class]
module.add_class('Mac64Address', import_from_module='ns.network')
## mac64-address.h (module 'network'): ns3::Mac64Address [class]
root_module['ns3::Mac64Address'].implicitly_converts_to(root_module['ns3::Address'])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataConfirmParams [struct]
module.add_class('McpsDataConfirmParams')
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataIndicationParams [struct]
module.add_class('McpsDataIndicationParams')
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataRequestParams [struct]
module.add_class('McpsDataRequestParams')
## net-device-container.h (module 'network'): ns3::NetDeviceContainer [class]
module.add_class('NetDeviceContainer', import_from_module='ns.network')
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData_e [enumeration]
module.add_enum('TagData_e', ['MAX_SIZE'], outer_class=root_module['ns3::PacketTagList::TagData'], import_from_module='ns.network')
## pcap-file.h (module 'network'): ns3::PcapFile [class]
module.add_class('PcapFile', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelper [class]
module.add_class('PcapHelper', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelper [enumeration]
module.add_enum('', ['DLT_NULL', 'DLT_EN10MB', 'DLT_PPP', 'DLT_RAW', 'DLT_IEEE802_11', 'DLT_LINUX_SSL', 'DLT_PRISM_HEADER', 'DLT_IEEE802_11_RADIO', 'DLT_IEEE802_15_4', 'DLT_NETLINK'], outer_class=root_module['ns3::PcapHelper'], import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice [class]
module.add_class('PcapHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned char, signed char> [class]
module.add_class('SequenceNumber8', import_from_module='ns.network')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## nstime.h (module 'core'): ns3::TimeWithUnit [class]
module.add_class('TimeWithUnit', import_from_module='ns.core')
## traced-value.h (module 'core'): ns3::TracedValue<ns3::LrWpanMacState> [class]
module.add_class('TracedValue', template_parameters=['ns3::LrWpanMacState'])
## traced-value.h (module 'core'): ns3::TracedValue<ns3::LrWpanPhyEnumeration> [class]
module.add_class('TracedValue', template_parameters=['ns3::LrWpanPhyEnumeration'])
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t::impl_type [enumeration]
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## lr-wpan-helper.h (module 'lr-wpan'): ns3::LrWpanHelper [class]
module.add_class('LrWpanHelper', parent=[root_module['ns3::PcapHelperForDevice'], root_module['ns3::AsciiTraceHelperForDevice']])
## lr-wpan-lqi-tag.h (module 'lr-wpan'): ns3::LrWpanLqiTag [class]
module.add_class('LrWpanLqiTag', parent=root_module['ns3::Tag'])
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::LrWpanMacHeader [class]
module.add_class('LrWpanMacHeader', parent=root_module['ns3::Header'])
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::LrWpanMacHeader::LrWpanMacType [enumeration]
module.add_enum('LrWpanMacType', ['LRWPAN_MAC_BEACON', 'LRWPAN_MAC_DATA', 'LRWPAN_MAC_ACKNOWLEDGMENT', 'LRWPAN_MAC_COMMAND', 'LRWPAN_MAC_RESERVED'], outer_class=root_module['ns3::LrWpanMacHeader'])
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::LrWpanMacHeader::AddrModeType [enumeration]
module.add_enum('AddrModeType', ['NOADDR', 'RESADDR', 'SHORTADDR', 'EXTADDR'], outer_class=root_module['ns3::LrWpanMacHeader'])
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::LrWpanMacHeader::KeyIdModeType [enumeration]
module.add_enum('KeyIdModeType', ['IMPLICIT', 'NOKEYSOURCE', 'SHORTKEYSOURCE', 'LONGKEYSOURCE'], outer_class=root_module['ns3::LrWpanMacHeader'])
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## pcap-file-wrapper.h (module 'network'): ns3::PcapFileWrapper [class]
module.add_class('PcapFileWrapper', import_from_module='ns.network', parent=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::LrWpanInterferenceHelper, ns3::empty, ns3::DefaultDeleter<ns3::LrWpanInterferenceHelper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::LrWpanInterferenceHelper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::LrWpanInterferenceHelper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::SpectrumSignalParameters, ns3::empty, ns3::DefaultDeleter<ns3::SpectrumSignalParameters> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::SpectrumSignalParameters', 'ns3::empty', 'ns3::DefaultDeleter<ns3::SpectrumSignalParameters>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## spectrum-phy.h (module 'spectrum'): ns3::SpectrumPhy [class]
module.add_class('SpectrumPhy', import_from_module='ns.spectrum', parent=root_module['ns3::Object'])
## spectrum-signal-parameters.h (module 'spectrum'): ns3::SpectrumSignalParameters [struct]
module.add_class('SpectrumSignalParameters', import_from_module='ns.spectrum', parent=root_module['ns3::SimpleRefCount< ns3::SpectrumSignalParameters, ns3::empty, ns3::DefaultDeleter<ns3::SpectrumSignalParameters> >'])
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## boolean.h (module 'core'): ns3::BooleanChecker [class]
module.add_class('BooleanChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## boolean.h (module 'core'): ns3::BooleanValue [class]
module.add_class('BooleanValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## double.h (module 'core'): ns3::DoubleValue [class]
module.add_class('DoubleValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## enum.h (module 'core'): ns3::EnumChecker [class]
module.add_class('EnumChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## enum.h (module 'core'): ns3::EnumValue [class]
module.add_class('EnumValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## integer.h (module 'core'): ns3::IntegerValue [class]
module.add_class('IntegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## lr-wpan-csmaca.h (module 'lr-wpan'): ns3::LrWpanCsmaCa [class]
module.add_class('LrWpanCsmaCa', parent=root_module['ns3::Object'])
## lr-wpan-error-model.h (module 'lr-wpan'): ns3::LrWpanErrorModel [class]
module.add_class('LrWpanErrorModel', parent=root_module['ns3::Object'])
## lr-wpan-interference-helper.h (module 'lr-wpan'): ns3::LrWpanInterferenceHelper [class]
module.add_class('LrWpanInterferenceHelper', parent=root_module['ns3::SimpleRefCount< ns3::LrWpanInterferenceHelper, ns3::empty, ns3::DefaultDeleter<ns3::LrWpanInterferenceHelper> >'])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac [class]
module.add_class('LrWpanMac', parent=root_module['ns3::Object'])
## lr-wpan-mac-trailer.h (module 'lr-wpan'): ns3::LrWpanMacTrailer [class]
module.add_class('LrWpanMacTrailer', parent=root_module['ns3::Trailer'])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhy [class]
module.add_class('LrWpanPhy', parent=root_module['ns3::SpectrumPhy'])
## lr-wpan-spectrum-signal-parameters.h (module 'lr-wpan'): ns3::LrWpanSpectrumSignalParameters [struct]
module.add_class('LrWpanSpectrumSignalParameters', parent=root_module['ns3::SpectrumSignalParameters'])
## mac16-address.h (module 'network'): ns3::Mac16AddressChecker [class]
module.add_class('Mac16AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac16-address.h (module 'network'): ns3::Mac16AddressValue [class]
module.add_class('Mac16AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac64-address.h (module 'network'): ns3::Mac64AddressChecker [class]
module.add_class('Mac64AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac64-address.h (module 'network'): ns3::Mac64AddressValue [class]
module.add_class('Mac64AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper [class]
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## uinteger.h (module 'core'): ns3::UintegerValue [class]
module.add_class('UintegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::LrWpanNetDevice [class]
module.add_class('LrWpanNetDevice', parent=root_module['ns3::NetDevice'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::PlmeCcaConfirmCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::PlmeCcaConfirmCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::PlmeCcaConfirmCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::PlmeEdConfirmCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::PlmeEdConfirmCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::PlmeEdConfirmCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, unsigned int, ns3::Ptr< ns3::Packet >, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::PdDataIndicationCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, unsigned int, ns3::Ptr< ns3::Packet >, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::PdDataIndicationCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, unsigned int, ns3::Ptr< ns3::Packet >, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::PdDataIndicationCallback&')
typehandlers.add_type_alias(u'ns3::SequenceNumber< short unsigned int, short int >', u'ns3::SequenceNumber16')
typehandlers.add_type_alias(u'ns3::SequenceNumber< short unsigned int, short int >*', u'ns3::SequenceNumber16*')
typehandlers.add_type_alias(u'ns3::SequenceNumber< short unsigned int, short int >&', u'ns3::SequenceNumber16&')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned int, int >', u'ns3::SequenceNumber32')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned int, int >*', u'ns3::SequenceNumber32*')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned int, int >&', u'ns3::SequenceNumber32&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanMacState, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::LrWpanMacStateCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanMacState, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::LrWpanMacStateCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanMacState, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::LrWpanMacStateCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::LrWpanPibAttributeIdentifier, ns3::LrWpanPhyPibAttributes *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::PlmeGetAttributeConfirmCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::LrWpanPibAttributeIdentifier, ns3::LrWpanPhyPibAttributes *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::PlmeGetAttributeConfirmCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::LrWpanPibAttributeIdentifier, ns3::LrWpanPhyPibAttributes *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::PlmeGetAttributeConfirmCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::PdDataConfirmCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::PdDataConfirmCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::PdDataConfirmCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::McpsDataIndicationParams, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::McpsDataIndicationCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::McpsDataIndicationParams, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::McpsDataIndicationCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::McpsDataIndicationParams, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::McpsDataIndicationCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::McpsDataConfirmParams, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::McpsDataConfirmCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::McpsDataConfirmParams, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::McpsDataConfirmCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::McpsDataConfirmParams, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::McpsDataConfirmCallback&')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned char, signed char >', u'ns3::SequenceNumber8')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned char, signed char >*', u'ns3::SequenceNumber8*')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned char, signed char >&', u'ns3::SequenceNumber8&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::LrWpanPibAttributeIdentifier, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::PlmeSetAttributeConfirmCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::LrWpanPibAttributeIdentifier, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::PlmeSetAttributeConfirmCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::LrWpanPibAttributeIdentifier, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::PlmeSetAttributeConfirmCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::PlmeSetTRXStateConfirmCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::PlmeSetTRXStateConfirmCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::LrWpanPhyEnumeration, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::PlmeSetTRXStateConfirmCallback&')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
## Register a nested module for the namespace TracedValueCallback
nested_module = module.add_cpp_namespace('TracedValueCallback')
register_types_ns3_TracedValueCallback(nested_module)
## Register a nested module for the namespace internal
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_types_ns3_TracedValueCallback(module):
root_module = module.get_root()
typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) *', u'ns3::TracedValueCallback::Bool')
typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) **', u'ns3::TracedValueCallback::Bool*')
typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) *&', u'ns3::TracedValueCallback::Bool&')
typehandlers.add_type_alias(u'void ( * ) ( double, double ) *', u'ns3::TracedValueCallback::Double')
typehandlers.add_type_alias(u'void ( * ) ( double, double ) **', u'ns3::TracedValueCallback::Double*')
typehandlers.add_type_alias(u'void ( * ) ( double, double ) *&', u'ns3::TracedValueCallback::Double&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::SequenceNumber32, ns3::SequenceNumber32 ) *', u'ns3::TracedValueCallback::SequenceNumber32')
typehandlers.add_type_alias(u'void ( * ) ( ns3::SequenceNumber32, ns3::SequenceNumber32 ) **', u'ns3::TracedValueCallback::SequenceNumber32*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::SequenceNumber32, ns3::SequenceNumber32 ) *&', u'ns3::TracedValueCallback::SequenceNumber32&')
typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) *', u'ns3::TracedValueCallback::Uint8')
typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) **', u'ns3::TracedValueCallback::Uint8*')
typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) *&', u'ns3::TracedValueCallback::Uint8&')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) *', u'ns3::TracedValueCallback::Uint32')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) **', u'ns3::TracedValueCallback::Uint32*')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) *&', u'ns3::TracedValueCallback::Uint32&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *', u'ns3::TracedValueCallback::Time')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) **', u'ns3::TracedValueCallback::Time*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *&', u'ns3::TracedValueCallback::Time&')
typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) *', u'ns3::TracedValueCallback::Int16')
typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) **', u'ns3::TracedValueCallback::Int16*')
typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) *&', u'ns3::TracedValueCallback::Int16&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::LrWpanPhyEnumeration, ns3::LrWpanPhyEnumeration ) *', u'ns3::TracedValueCallback::LrWpanPhyEnumeration')
typehandlers.add_type_alias(u'void ( * ) ( ns3::LrWpanPhyEnumeration, ns3::LrWpanPhyEnumeration ) **', u'ns3::TracedValueCallback::LrWpanPhyEnumeration*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::LrWpanPhyEnumeration, ns3::LrWpanPhyEnumeration ) *&', u'ns3::TracedValueCallback::LrWpanPhyEnumeration&')
typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) *', u'ns3::TracedValueCallback::Int32')
typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) **', u'ns3::TracedValueCallback::Int32*')
typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) *&', u'ns3::TracedValueCallback::Int32&')
typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) *', u'ns3::TracedValueCallback::Int8')
typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) **', u'ns3::TracedValueCallback::Int8*')
typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) *&', u'ns3::TracedValueCallback::Int8&')
typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) *', u'ns3::TracedValueCallback::Uint16')
typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) **', u'ns3::TracedValueCallback::Uint16*')
typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) *&', u'ns3::TracedValueCallback::Uint16&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::LrWpanMacState, ns3::LrWpanMacState ) *', u'ns3::TracedValueCallback::LrWpanMacState')
typehandlers.add_type_alias(u'void ( * ) ( ns3::LrWpanMacState, ns3::LrWpanMacState ) **', u'ns3::TracedValueCallback::LrWpanMacState*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::LrWpanMacState, ns3::LrWpanMacState ) *&', u'ns3::TracedValueCallback::LrWpanMacState&')
def register_types_ns3_internal(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AsciiTraceHelper_methods(root_module, root_module['ns3::AsciiTraceHelper'])
register_Ns3AsciiTraceHelperForDevice_methods(root_module, root_module['ns3::AsciiTraceHelperForDevice'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3LrWpanEdPower_methods(root_module, root_module['ns3::LrWpanEdPower'])
register_Ns3LrWpanPhyDataAndSymbolRates_methods(root_module, root_module['ns3::LrWpanPhyDataAndSymbolRates'])
register_Ns3LrWpanPhyPibAttributes_methods(root_module, root_module['ns3::LrWpanPhyPibAttributes'])
register_Ns3LrWpanPhyPpduHeaderSymbolNumber_methods(root_module, root_module['ns3::LrWpanPhyPpduHeaderSymbolNumber'])
register_Ns3LrWpanSpectrumValueHelper_methods(root_module, root_module['ns3::LrWpanSpectrumValueHelper'])
register_Ns3Mac16Address_methods(root_module, root_module['ns3::Mac16Address'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3Mac64Address_methods(root_module, root_module['ns3::Mac64Address'])
register_Ns3McpsDataConfirmParams_methods(root_module, root_module['ns3::McpsDataConfirmParams'])
register_Ns3McpsDataIndicationParams_methods(root_module, root_module['ns3::McpsDataIndicationParams'])
register_Ns3McpsDataRequestParams_methods(root_module, root_module['ns3::McpsDataRequestParams'])
register_Ns3NetDeviceContainer_methods(root_module, root_module['ns3::NetDeviceContainer'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3PcapFile_methods(root_module, root_module['ns3::PcapFile'])
register_Ns3PcapHelper_methods(root_module, root_module['ns3::PcapHelper'])
register_Ns3PcapHelperForDevice_methods(root_module, root_module['ns3::PcapHelperForDevice'])
register_Ns3SequenceNumber8_methods(root_module, root_module['ns3::SequenceNumber8'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit'])
register_Ns3TracedValue__Ns3LrWpanMacState_methods(root_module, root_module['ns3::TracedValue< ns3::LrWpanMacState >'])
register_Ns3TracedValue__Ns3LrWpanPhyEnumeration_methods(root_module, root_module['ns3::TracedValue< ns3::LrWpanPhyEnumeration >'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3LrWpanHelper_methods(root_module, root_module['ns3::LrWpanHelper'])
register_Ns3LrWpanLqiTag_methods(root_module, root_module['ns3::LrWpanLqiTag'])
register_Ns3LrWpanMacHeader_methods(root_module, root_module['ns3::LrWpanMacHeader'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3PcapFileWrapper_methods(root_module, root_module['ns3::PcapFileWrapper'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3LrWpanInterferenceHelper_Ns3Empty_Ns3DefaultDeleter__lt__ns3LrWpanInterferenceHelper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::LrWpanInterferenceHelper, ns3::empty, ns3::DefaultDeleter<ns3::LrWpanInterferenceHelper> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3SpectrumSignalParameters_Ns3Empty_Ns3DefaultDeleter__lt__ns3SpectrumSignalParameters__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::SpectrumSignalParameters, ns3::empty, ns3::DefaultDeleter<ns3::SpectrumSignalParameters> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3SpectrumPhy_methods(root_module, root_module['ns3::SpectrumPhy'])
register_Ns3SpectrumSignalParameters_methods(root_module, root_module['ns3::SpectrumSignalParameters'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3BooleanChecker_methods(root_module, root_module['ns3::BooleanChecker'])
register_Ns3BooleanValue_methods(root_module, root_module['ns3::BooleanValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3DoubleValue_methods(root_module, root_module['ns3::DoubleValue'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3EnumChecker_methods(root_module, root_module['ns3::EnumChecker'])
register_Ns3EnumValue_methods(root_module, root_module['ns3::EnumValue'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3IntegerValue_methods(root_module, root_module['ns3::IntegerValue'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3LrWpanCsmaCa_methods(root_module, root_module['ns3::LrWpanCsmaCa'])
register_Ns3LrWpanErrorModel_methods(root_module, root_module['ns3::LrWpanErrorModel'])
register_Ns3LrWpanInterferenceHelper_methods(root_module, root_module['ns3::LrWpanInterferenceHelper'])
register_Ns3LrWpanMac_methods(root_module, root_module['ns3::LrWpanMac'])
register_Ns3LrWpanMacTrailer_methods(root_module, root_module['ns3::LrWpanMacTrailer'])
register_Ns3LrWpanPhy_methods(root_module, root_module['ns3::LrWpanPhy'])
register_Ns3LrWpanSpectrumSignalParameters_methods(root_module, root_module['ns3::LrWpanSpectrumSignalParameters'])
register_Ns3Mac16AddressChecker_methods(root_module, root_module['ns3::Mac16AddressChecker'])
register_Ns3Mac16AddressValue_methods(root_module, root_module['ns3::Mac16AddressValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3Mac64AddressChecker_methods(root_module, root_module['ns3::Mac64AddressChecker'])
register_Ns3Mac64AddressValue_methods(root_module, root_module['ns3::Mac64AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3UintegerValue_methods(root_module, root_module['ns3::UintegerValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3LrWpanNetDevice_methods(root_module, root_module['ns3::LrWpanNetDevice'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AsciiTraceHelper_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper::AsciiTraceHelper(ns3::AsciiTraceHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AsciiTraceHelper const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper::AsciiTraceHelper() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): ns3::Ptr<ns3::OutputStreamWrapper> ns3::AsciiTraceHelper::CreateFileStream(std::string filename, std::_Ios_Openmode filemode=std::ios_base::out) [member function]
cls.add_method('CreateFileStream',
'ns3::Ptr< ns3::OutputStreamWrapper >',
[param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode', default_value='std::ios_base::out')])
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDequeueSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultDequeueSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDequeueSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultDequeueSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDropSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultDropSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDropSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultDropSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultEnqueueSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultEnqueueSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultEnqueueSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultEnqueueSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultReceiveSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultReceiveSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultReceiveSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultReceiveSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): std::string ns3::AsciiTraceHelper::GetFilenameFromDevice(std::string prefix, ns3::Ptr<ns3::NetDevice> device, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromDevice',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'useObjectNames', default_value='true')])
## trace-helper.h (module 'network'): std::string ns3::AsciiTraceHelper::GetFilenameFromInterfacePair(std::string prefix, ns3::Ptr<ns3::Object> object, uint32_t interface, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromInterfacePair',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::Object >', 'object'), param('uint32_t', 'interface'), param('bool', 'useObjectNames', default_value='true')])
return
def register_Ns3AsciiTraceHelperForDevice_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice::AsciiTraceHelperForDevice(ns3::AsciiTraceHelperForDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AsciiTraceHelperForDevice const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice::AsciiTraceHelperForDevice() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool explicitFilename=false) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::Ptr<ns3::NetDevice> nd) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::Ptr< ns3::NetDevice >', 'nd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, std::string ndName, bool explicitFilename=false) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('std::string', 'ndName'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, std::string ndName) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('std::string', 'ndName')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::NetDeviceContainer d) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::NetDeviceContainer', 'd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::NetDeviceContainer d) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::NetDeviceContainer', 'd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::NodeContainer n) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::NodeContainer', 'n')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::NodeContainer n) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::NodeContainer', 'n')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, uint32_t nodeid, uint32_t deviceid, bool explicitFilename) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid'), param('bool', 'explicitFilename')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, uint32_t nodeid, uint32_t deviceid) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiAll(std::string prefix) [member function]
cls.add_method('EnableAsciiAll',
'void',
[param('std::string', 'prefix')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiAll(ns3::Ptr<ns3::OutputStreamWrapper> stream) [member function]
cls.add_method('EnableAsciiAll',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiInternal(ns3::Ptr<ns3::OutputStreamWrapper> stream, std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool explicitFilename) [member function]
cls.add_method('EnableAsciiInternal',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'explicitFilename')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::PeekU8() [member function]
cls.add_method('PeekU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(ns3::Buffer::Iterator start, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Adjust(int32_t adjustment) [member function]
cls.add_method('Adjust',
'void',
[param('int32_t', 'adjustment')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('==')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3LrWpanEdPower_methods(root_module, cls):
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanEdPower::LrWpanEdPower() [constructor]
cls.add_constructor([])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanEdPower::LrWpanEdPower(ns3::LrWpanEdPower const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LrWpanEdPower const &', 'arg0')])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanEdPower::averagePower [variable]
cls.add_instance_attribute('averagePower', 'double', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanEdPower::lastUpdate [variable]
cls.add_instance_attribute('lastUpdate', 'ns3::Time', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanEdPower::measurementLength [variable]
cls.add_instance_attribute('measurementLength', 'ns3::Time', is_const=False)
return
def register_Ns3LrWpanPhyDataAndSymbolRates_methods(root_module, cls):
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyDataAndSymbolRates::LrWpanPhyDataAndSymbolRates() [constructor]
cls.add_constructor([])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyDataAndSymbolRates::LrWpanPhyDataAndSymbolRates(ns3::LrWpanPhyDataAndSymbolRates const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LrWpanPhyDataAndSymbolRates const &', 'arg0')])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyDataAndSymbolRates::bitRate [variable]
cls.add_instance_attribute('bitRate', 'double', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyDataAndSymbolRates::symbolRate [variable]
cls.add_instance_attribute('symbolRate', 'double', is_const=False)
return
def register_Ns3LrWpanPhyPibAttributes_methods(root_module, cls):
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPibAttributes::LrWpanPhyPibAttributes() [constructor]
cls.add_constructor([])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPibAttributes::LrWpanPhyPibAttributes(ns3::LrWpanPhyPibAttributes const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LrWpanPhyPibAttributes const &', 'arg0')])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPibAttributes::phyCCAMode [variable]
cls.add_instance_attribute('phyCCAMode', 'uint8_t', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPibAttributes::phyChannelsSupported [variable]
cls.add_instance_attribute('phyChannelsSupported', 'uint32_t [ 32 ]', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPibAttributes::phyCurrentChannel [variable]
cls.add_instance_attribute('phyCurrentChannel', 'uint8_t', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPibAttributes::phyCurrentPage [variable]
cls.add_instance_attribute('phyCurrentPage', 'uint32_t', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPibAttributes::phyMaxFrameDuration [variable]
cls.add_instance_attribute('phyMaxFrameDuration', 'uint32_t', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPibAttributes::phySHRDuration [variable]
cls.add_instance_attribute('phySHRDuration', 'uint32_t', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPibAttributes::phySymbolsPerOctet [variable]
cls.add_instance_attribute('phySymbolsPerOctet', 'double', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPibAttributes::phyTransmitPower [variable]
cls.add_instance_attribute('phyTransmitPower', 'uint8_t', is_const=False)
return
def register_Ns3LrWpanPhyPpduHeaderSymbolNumber_methods(root_module, cls):
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPpduHeaderSymbolNumber::LrWpanPhyPpduHeaderSymbolNumber() [constructor]
cls.add_constructor([])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPpduHeaderSymbolNumber::LrWpanPhyPpduHeaderSymbolNumber(ns3::LrWpanPhyPpduHeaderSymbolNumber const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LrWpanPhyPpduHeaderSymbolNumber const &', 'arg0')])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPpduHeaderSymbolNumber::phr [variable]
cls.add_instance_attribute('phr', 'double', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPpduHeaderSymbolNumber::shrPreamble [variable]
cls.add_instance_attribute('shrPreamble', 'double', is_const=False)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhyPpduHeaderSymbolNumber::shrSfd [variable]
cls.add_instance_attribute('shrSfd', 'double', is_const=False)
return
def register_Ns3LrWpanSpectrumValueHelper_methods(root_module, cls):
## lr-wpan-spectrum-value-helper.h (module 'lr-wpan'): ns3::LrWpanSpectrumValueHelper::LrWpanSpectrumValueHelper(ns3::LrWpanSpectrumValueHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LrWpanSpectrumValueHelper const &', 'arg0')])
## lr-wpan-spectrum-value-helper.h (module 'lr-wpan'): ns3::LrWpanSpectrumValueHelper::LrWpanSpectrumValueHelper() [constructor]
cls.add_constructor([])
## lr-wpan-spectrum-value-helper.h (module 'lr-wpan'): ns3::Ptr<ns3::SpectrumValue> ns3::LrWpanSpectrumValueHelper::CreateNoisePowerSpectralDensity(uint32_t channel) [member function]
cls.add_method('CreateNoisePowerSpectralDensity',
'ns3::Ptr< ns3::SpectrumValue >',
[param('uint32_t', 'channel')])
## lr-wpan-spectrum-value-helper.h (module 'lr-wpan'): ns3::Ptr<ns3::SpectrumValue> ns3::LrWpanSpectrumValueHelper::CreateTxPowerSpectralDensity(double txPower, uint32_t channel) [member function]
cls.add_method('CreateTxPowerSpectralDensity',
'ns3::Ptr< ns3::SpectrumValue >',
[param('double', 'txPower'), param('uint32_t', 'channel')])
## lr-wpan-spectrum-value-helper.h (module 'lr-wpan'): static double ns3::LrWpanSpectrumValueHelper::TotalAvgPower(ns3::Ptr<ns3::SpectrumValue const> psd, uint32_t channel) [member function]
cls.add_method('TotalAvgPower',
'double',
[param('ns3::Ptr< ns3::SpectrumValue const >', 'psd'), param('uint32_t', 'channel')],
is_static=True)
return
def register_Ns3Mac16Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac16-address.h (module 'network'): ns3::Mac16Address::Mac16Address(ns3::Mac16Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac16Address const &', 'arg0')])
## mac16-address.h (module 'network'): ns3::Mac16Address::Mac16Address() [constructor]
cls.add_constructor([])
## mac16-address.h (module 'network'): ns3::Mac16Address::Mac16Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac16-address.h (module 'network'): static ns3::Mac16Address ns3::Mac16Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac16Address',
[],
is_static=True)
## mac16-address.h (module 'network'): static ns3::Mac16Address ns3::Mac16Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac16Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac16-address.h (module 'network'): void ns3::Mac16Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac16-address.h (module 'network'): void ns3::Mac16Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac16-address.h (module 'network'): static bool ns3::Mac16Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3Mac64Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac64-address.h (module 'network'): ns3::Mac64Address::Mac64Address(ns3::Mac64Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac64Address const &', 'arg0')])
## mac64-address.h (module 'network'): ns3::Mac64Address::Mac64Address() [constructor]
cls.add_constructor([])
## mac64-address.h (module 'network'): ns3::Mac64Address::Mac64Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac64-address.h (module 'network'): static ns3::Mac64Address ns3::Mac64Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac64Address',
[],
is_static=True)
## mac64-address.h (module 'network'): static ns3::Mac64Address ns3::Mac64Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac64Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac64-address.h (module 'network'): void ns3::Mac64Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac64-address.h (module 'network'): void ns3::Mac64Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac64-address.h (module 'network'): static bool ns3::Mac64Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3McpsDataConfirmParams_methods(root_module, cls):
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataConfirmParams::McpsDataConfirmParams() [constructor]
cls.add_constructor([])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataConfirmParams::McpsDataConfirmParams(ns3::McpsDataConfirmParams const & arg0) [copy constructor]
cls.add_constructor([param('ns3::McpsDataConfirmParams const &', 'arg0')])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataConfirmParams::m_msduHandle [variable]
cls.add_instance_attribute('m_msduHandle', 'uint8_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataConfirmParams::m_status [variable]
cls.add_instance_attribute('m_status', 'ns3::LrWpanMcpsDataConfirmStatus', is_const=False)
return
def register_Ns3McpsDataIndicationParams_methods(root_module, cls):
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataIndicationParams::McpsDataIndicationParams() [constructor]
cls.add_constructor([])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataIndicationParams::McpsDataIndicationParams(ns3::McpsDataIndicationParams const & arg0) [copy constructor]
cls.add_constructor([param('ns3::McpsDataIndicationParams const &', 'arg0')])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataIndicationParams::m_dsn [variable]
cls.add_instance_attribute('m_dsn', 'uint8_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataIndicationParams::m_dstAddr [variable]
cls.add_instance_attribute('m_dstAddr', 'ns3::Mac16Address', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataIndicationParams::m_dstAddrMode [variable]
cls.add_instance_attribute('m_dstAddrMode', 'uint8_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataIndicationParams::m_dstPanId [variable]
cls.add_instance_attribute('m_dstPanId', 'uint16_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataIndicationParams::m_mpduLinkQuality [variable]
cls.add_instance_attribute('m_mpduLinkQuality', 'uint8_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataIndicationParams::m_srcAddr [variable]
cls.add_instance_attribute('m_srcAddr', 'ns3::Mac16Address', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataIndicationParams::m_srcAddrMode [variable]
cls.add_instance_attribute('m_srcAddrMode', 'uint8_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataIndicationParams::m_srcPanId [variable]
cls.add_instance_attribute('m_srcPanId', 'uint16_t', is_const=False)
return
def register_Ns3McpsDataRequestParams_methods(root_module, cls):
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataRequestParams::McpsDataRequestParams(ns3::McpsDataRequestParams const & arg0) [copy constructor]
cls.add_constructor([param('ns3::McpsDataRequestParams const &', 'arg0')])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataRequestParams::McpsDataRequestParams() [constructor]
cls.add_constructor([])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataRequestParams::m_dstAddr [variable]
cls.add_instance_attribute('m_dstAddr', 'ns3::Mac16Address', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataRequestParams::m_dstAddrMode [variable]
cls.add_instance_attribute('m_dstAddrMode', 'ns3::LrWpanAddressMode', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataRequestParams::m_dstPanId [variable]
cls.add_instance_attribute('m_dstPanId', 'uint16_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataRequestParams::m_msduHandle [variable]
cls.add_instance_attribute('m_msduHandle', 'uint8_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataRequestParams::m_srcAddrMode [variable]
cls.add_instance_attribute('m_srcAddrMode', 'ns3::LrWpanAddressMode', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::McpsDataRequestParams::m_txOptions [variable]
cls.add_instance_attribute('m_txOptions', 'uint8_t', is_const=False)
return
def register_Ns3NetDeviceContainer_methods(root_module, cls):
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'arg0')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer() [constructor]
cls.add_constructor([])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::Ptr<ns3::NetDevice> dev) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(std::string devName) [constructor]
cls.add_constructor([param('std::string', 'devName')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & a, ns3::NetDeviceContainer const & b) [constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'a'), param('ns3::NetDeviceContainer const &', 'b')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::NetDeviceContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NetDeviceContainer', 'other')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(std::string deviceName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'deviceName')])
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::NetDeviceContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True)
## net-device-container.h (module 'network'): uint32_t ns3::NetDeviceContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function]
cls.add_method('Replace',
'bool',
[param('ns3::Tag &', 'tag')])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 21 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PcapFile_methods(root_module, cls):
## pcap-file.h (module 'network'): ns3::PcapFile::PcapFile() [constructor]
cls.add_constructor([])
## pcap-file.h (module 'network'): void ns3::PcapFile::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## pcap-file.h (module 'network'): void ns3::PcapFile::Close() [member function]
cls.add_method('Close',
'void',
[])
## pcap-file.h (module 'network'): static bool ns3::PcapFile::Diff(std::string const & f1, std::string const & f2, uint32_t & sec, uint32_t & usec, uint32_t & packets, uint32_t snapLen=ns3::PcapFile::SNAPLEN_DEFAULT) [member function]
cls.add_method('Diff',
'bool',
[param('std::string const &', 'f1'), param('std::string const &', 'f2'), param('uint32_t &', 'sec'), param('uint32_t &', 'usec'), param('uint32_t &', 'packets'), param('uint32_t', 'snapLen', default_value='ns3::PcapFile::SNAPLEN_DEFAULT')],
is_static=True)
## pcap-file.h (module 'network'): bool ns3::PcapFile::Eof() const [member function]
cls.add_method('Eof',
'bool',
[],
is_const=True)
## pcap-file.h (module 'network'): bool ns3::PcapFile::Fail() const [member function]
cls.add_method('Fail',
'bool',
[],
is_const=True)
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetDataLinkType() [member function]
cls.add_method('GetDataLinkType',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetMagic() [member function]
cls.add_method('GetMagic',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetSigFigs() [member function]
cls.add_method('GetSigFigs',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetSnapLen() [member function]
cls.add_method('GetSnapLen',
'uint32_t',
[])
## pcap-file.h (module 'network'): bool ns3::PcapFile::GetSwapMode() [member function]
cls.add_method('GetSwapMode',
'bool',
[])
## pcap-file.h (module 'network'): int32_t ns3::PcapFile::GetTimeZoneOffset() [member function]
cls.add_method('GetTimeZoneOffset',
'int32_t',
[])
## pcap-file.h (module 'network'): uint16_t ns3::PcapFile::GetVersionMajor() [member function]
cls.add_method('GetVersionMajor',
'uint16_t',
[])
## pcap-file.h (module 'network'): uint16_t ns3::PcapFile::GetVersionMinor() [member function]
cls.add_method('GetVersionMinor',
'uint16_t',
[])
## pcap-file.h (module 'network'): void ns3::PcapFile::Init(uint32_t dataLinkType, uint32_t snapLen=ns3::PcapFile::SNAPLEN_DEFAULT, int32_t timeZoneCorrection=ns3::PcapFile::ZONE_DEFAULT, bool swapMode=false) [member function]
cls.add_method('Init',
'void',
[param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='ns3::PcapFile::SNAPLEN_DEFAULT'), param('int32_t', 'timeZoneCorrection', default_value='ns3::PcapFile::ZONE_DEFAULT'), param('bool', 'swapMode', default_value='false')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Open(std::string const & filename, std::_Ios_Openmode mode) [member function]
cls.add_method('Open',
'void',
[param('std::string const &', 'filename'), param('std::_Ios_Openmode', 'mode')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Read(uint8_t * const data, uint32_t maxBytes, uint32_t & tsSec, uint32_t & tsUsec, uint32_t & inclLen, uint32_t & origLen, uint32_t & readLen) [member function]
cls.add_method('Read',
'void',
[param('uint8_t * const', 'data'), param('uint32_t', 'maxBytes'), param('uint32_t &', 'tsSec'), param('uint32_t &', 'tsUsec'), param('uint32_t &', 'inclLen'), param('uint32_t &', 'origLen'), param('uint32_t &', 'readLen')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, uint8_t const * const data, uint32_t totalLen) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('uint8_t const * const', 'data'), param('uint32_t', 'totalLen')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, ns3::Header const & header, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('ns3::Header const &', 'header'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file.h (module 'network'): ns3::PcapFile::SNAPLEN_DEFAULT [variable]
cls.add_static_attribute('SNAPLEN_DEFAULT', 'uint32_t const', is_const=True)
## pcap-file.h (module 'network'): ns3::PcapFile::ZONE_DEFAULT [variable]
cls.add_static_attribute('ZONE_DEFAULT', 'int32_t const', is_const=True)
return
def register_Ns3PcapHelper_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::PcapHelper::PcapHelper(ns3::PcapHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PcapHelper const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::PcapHelper::PcapHelper() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): ns3::Ptr<ns3::PcapFileWrapper> ns3::PcapHelper::CreateFile(std::string filename, std::_Ios_Openmode filemode, uint32_t dataLinkType, uint32_t snapLen=std::numeric_limits<unsigned int>::max(), int32_t tzCorrection=0) [member function]
cls.add_method('CreateFile',
'ns3::Ptr< ns3::PcapFileWrapper >',
[param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode'), param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='std::numeric_limits<unsigned int>::max()'), param('int32_t', 'tzCorrection', default_value='0')])
## trace-helper.h (module 'network'): std::string ns3::PcapHelper::GetFilenameFromDevice(std::string prefix, ns3::Ptr<ns3::NetDevice> device, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromDevice',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'useObjectNames', default_value='true')])
## trace-helper.h (module 'network'): std::string ns3::PcapHelper::GetFilenameFromInterfacePair(std::string prefix, ns3::Ptr<ns3::Object> object, uint32_t interface, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromInterfacePair',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::Object >', 'object'), param('uint32_t', 'interface'), param('bool', 'useObjectNames', default_value='true')])
return
def register_Ns3PcapHelperForDevice_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice::PcapHelperForDevice(ns3::PcapHelperForDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PcapHelperForDevice const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice::PcapHelperForDevice() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool promiscuous=false, bool explicitFilename=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'promiscuous', default_value='false'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, std::string ndName, bool promiscuous=false, bool explicitFilename=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('std::string', 'ndName'), param('bool', 'promiscuous', default_value='false'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::NetDeviceContainer d, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::NetDeviceContainer', 'd'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::NodeContainer n, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::NodeContainer', 'n'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, uint32_t nodeid, uint32_t deviceid, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcapAll(std::string prefix, bool promiscuous=false) [member function]
cls.add_method('EnablePcapAll',
'void',
[param('std::string', 'prefix'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcapInternal(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool promiscuous, bool explicitFilename) [member function]
cls.add_method('EnablePcapInternal',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'promiscuous'), param('bool', 'explicitFilename')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3SequenceNumber8_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_numeric_operator('+', root_module['ns3::SequenceNumber8'], root_module['ns3::SequenceNumber8'], param('ns3::SequenceNumber< unsigned char, signed char > const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::SequenceNumber8'], root_module['ns3::SequenceNumber8'], param('signed char', u'right'))
cls.add_inplace_numeric_operator('+=', param('signed char', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::SequenceNumber8'], root_module['ns3::SequenceNumber8'], param('signed char', u'right'))
cls.add_inplace_numeric_operator('-=', param('signed char', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('>=')
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned char, signed char>::SequenceNumber() [constructor]
cls.add_constructor([])
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned char, signed char>::SequenceNumber(unsigned char value) [constructor]
cls.add_constructor([param('unsigned char', 'value')])
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned char, signed char>::SequenceNumber(ns3::SequenceNumber<unsigned char, signed char> const & value) [copy constructor]
cls.add_constructor([param('ns3::SequenceNumber< unsigned char, signed char > const &', 'value')])
## sequence-number.h (module 'network'): unsigned char ns3::SequenceNumber<unsigned char, signed char>::GetValue() const [member function]
cls.add_method('GetValue',
'unsigned char',
[],
is_const=True)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Simulator_methods(root_module, cls):
## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Simulator const &', 'arg0')])
## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]
cls.add_method('Cancel',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]
cls.add_method('Destroy',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function]
cls.add_method('GetImplementation',
'ns3::Ptr< ns3::SimulatorImpl >',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]
cls.add_method('GetMaximumSimulationTime',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]
cls.add_method('IsExpired',
'bool',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]
cls.add_method('IsFinished',
'bool',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]
cls.add_method('Now',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]
cls.add_method('Remove',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function]
cls.add_method('SetImplementation',
'void',
[param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]
cls.add_method('SetScheduler',
'void',
[param('ns3::ObjectFactory', 'schedulerFactory')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & delay) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'delay')],
is_static=True)
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TimeWithUnit_methods(root_module, cls):
cls.add_output_stream_operator()
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor]
cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')])
return
def register_Ns3TracedValue__Ns3LrWpanMacState_methods(root_module, cls):
## traced-value.h (module 'core'): ns3::TracedValue<ns3::LrWpanMacState>::TracedValue() [constructor]
cls.add_constructor([])
## traced-value.h (module 'core'): ns3::TracedValue<ns3::LrWpanMacState>::TracedValue(ns3::TracedValue<ns3::LrWpanMacState> const & o) [copy constructor]
cls.add_constructor([param('ns3::TracedValue< ns3::LrWpanMacState > const &', 'o')])
## traced-value.h (module 'core'): ns3::TracedValue<ns3::LrWpanMacState>::TracedValue(ns3::LrWpanMacState const & v) [constructor]
cls.add_constructor([param('ns3::LrWpanMacState const &', 'v')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::LrWpanMacState>::Connect(ns3::CallbackBase const & cb, std::basic_string<char,std::char_traits<char>,std::allocator<char> > path) [member function]
cls.add_method('Connect',
'void',
[param('ns3::CallbackBase const &', 'cb'), param('std::string', 'path')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::LrWpanMacState>::ConnectWithoutContext(ns3::CallbackBase const & cb) [member function]
cls.add_method('ConnectWithoutContext',
'void',
[param('ns3::CallbackBase const &', 'cb')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::LrWpanMacState>::Disconnect(ns3::CallbackBase const & cb, std::basic_string<char,std::char_traits<char>,std::allocator<char> > path) [member function]
cls.add_method('Disconnect',
'void',
[param('ns3::CallbackBase const &', 'cb'), param('std::string', 'path')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::LrWpanMacState>::DisconnectWithoutContext(ns3::CallbackBase const & cb) [member function]
cls.add_method('DisconnectWithoutContext',
'void',
[param('ns3::CallbackBase const &', 'cb')])
## traced-value.h (module 'core'): ns3::LrWpanMacState ns3::TracedValue<ns3::LrWpanMacState>::Get() const [member function]
cls.add_method('Get',
'ns3::LrWpanMacState',
[],
is_const=True)
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::LrWpanMacState>::Set(ns3::LrWpanMacState const & v) [member function]
cls.add_method('Set',
'void',
[param('ns3::LrWpanMacState const &', 'v')])
return
def register_Ns3TracedValue__Ns3LrWpanPhyEnumeration_methods(root_module, cls):
## traced-value.h (module 'core'): ns3::TracedValue<ns3::LrWpanPhyEnumeration>::TracedValue() [constructor]
cls.add_constructor([])
## traced-value.h (module 'core'): ns3::TracedValue<ns3::LrWpanPhyEnumeration>::TracedValue(ns3::TracedValue<ns3::LrWpanPhyEnumeration> const & o) [copy constructor]
cls.add_constructor([param('ns3::TracedValue< ns3::LrWpanPhyEnumeration > const &', 'o')])
## traced-value.h (module 'core'): ns3::TracedValue<ns3::LrWpanPhyEnumeration>::TracedValue(ns3::LrWpanPhyEnumeration const & v) [constructor]
cls.add_constructor([param('ns3::LrWpanPhyEnumeration const &', 'v')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::LrWpanPhyEnumeration>::Connect(ns3::CallbackBase const & cb, std::basic_string<char,std::char_traits<char>,std::allocator<char> > path) [member function]
cls.add_method('Connect',
'void',
[param('ns3::CallbackBase const &', 'cb'), param('std::string', 'path')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::LrWpanPhyEnumeration>::ConnectWithoutContext(ns3::CallbackBase const & cb) [member function]
cls.add_method('ConnectWithoutContext',
'void',
[param('ns3::CallbackBase const &', 'cb')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::LrWpanPhyEnumeration>::Disconnect(ns3::CallbackBase const & cb, std::basic_string<char,std::char_traits<char>,std::allocator<char> > path) [member function]
cls.add_method('Disconnect',
'void',
[param('ns3::CallbackBase const &', 'cb'), param('std::string', 'path')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::LrWpanPhyEnumeration>::DisconnectWithoutContext(ns3::CallbackBase const & cb) [member function]
cls.add_method('DisconnectWithoutContext',
'void',
[param('ns3::CallbackBase const &', 'cb')])
## traced-value.h (module 'core'): ns3::LrWpanPhyEnumeration ns3::TracedValue<ns3::LrWpanPhyEnumeration>::Get() const [member function]
cls.add_method('Get',
'ns3::LrWpanPhyEnumeration',
[],
is_const=True)
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::LrWpanPhyEnumeration>::Set(ns3::LrWpanPhyEnumeration const & v) [member function]
cls.add_method('Set',
'void',
[param('ns3::LrWpanPhyEnumeration const &', 'v')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor, std::string callback) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long double v) [constructor]
cls.add_constructor([param('long double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::implementation [variable]
cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True)
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3LrWpanHelper_methods(root_module, cls):
## lr-wpan-helper.h (module 'lr-wpan'): ns3::LrWpanHelper::LrWpanHelper() [constructor]
cls.add_constructor([])
## lr-wpan-helper.h (module 'lr-wpan'): ns3::LrWpanHelper::LrWpanHelper(bool useMultiModelSpectrumChannel) [constructor]
cls.add_constructor([param('bool', 'useMultiModelSpectrumChannel')])
## lr-wpan-helper.h (module 'lr-wpan'): ns3::Ptr<ns3::SpectrumChannel> ns3::LrWpanHelper::GetChannel() [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::SpectrumChannel >',
[])
## lr-wpan-helper.h (module 'lr-wpan'): void ns3::LrWpanHelper::SetChannel(ns3::Ptr<ns3::SpectrumChannel> channel) [member function]
cls.add_method('SetChannel',
'void',
[param('ns3::Ptr< ns3::SpectrumChannel >', 'channel')])
## lr-wpan-helper.h (module 'lr-wpan'): void ns3::LrWpanHelper::SetChannel(std::string channelName) [member function]
cls.add_method('SetChannel',
'void',
[param('std::string', 'channelName')])
## lr-wpan-helper.h (module 'lr-wpan'): void ns3::LrWpanHelper::AddMobility(ns3::Ptr<ns3::LrWpanPhy> phy, ns3::Ptr<ns3::MobilityModel> m) [member function]
cls.add_method('AddMobility',
'void',
[param('ns3::Ptr< ns3::LrWpanPhy >', 'phy'), param('ns3::Ptr< ns3::MobilityModel >', 'm')])
## lr-wpan-helper.h (module 'lr-wpan'): ns3::NetDeviceContainer ns3::LrWpanHelper::Install(ns3::NodeContainer c) [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::NodeContainer', 'c')])
## lr-wpan-helper.h (module 'lr-wpan'): void ns3::LrWpanHelper::AssociateToPan(ns3::NetDeviceContainer c, uint16_t panId) [member function]
cls.add_method('AssociateToPan',
'void',
[param('ns3::NetDeviceContainer', 'c'), param('uint16_t', 'panId')])
## lr-wpan-helper.h (module 'lr-wpan'): void ns3::LrWpanHelper::EnableLogComponents() [member function]
cls.add_method('EnableLogComponents',
'void',
[])
## lr-wpan-helper.h (module 'lr-wpan'): static std::string ns3::LrWpanHelper::LrWpanPhyEnumerationPrinter(ns3::LrWpanPhyEnumeration e) [member function]
cls.add_method('LrWpanPhyEnumerationPrinter',
'std::string',
[param('ns3::LrWpanPhyEnumeration', 'e')],
is_static=True)
## lr-wpan-helper.h (module 'lr-wpan'): static std::string ns3::LrWpanHelper::LrWpanMacStatePrinter(ns3::LrWpanMacState e) [member function]
cls.add_method('LrWpanMacStatePrinter',
'std::string',
[param('ns3::LrWpanMacState', 'e')],
is_static=True)
## lr-wpan-helper.h (module 'lr-wpan'): int64_t ns3::LrWpanHelper::AssignStreams(ns3::NetDeviceContainer c, int64_t stream) [member function]
cls.add_method('AssignStreams',
'int64_t',
[param('ns3::NetDeviceContainer', 'c'), param('int64_t', 'stream')])
## lr-wpan-helper.h (module 'lr-wpan'): void ns3::LrWpanHelper::EnablePcapInternal(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool promiscuous, bool explicitFilename) [member function]
cls.add_method('EnablePcapInternal',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'promiscuous'), param('bool', 'explicitFilename')],
visibility='private', is_virtual=True)
## lr-wpan-helper.h (module 'lr-wpan'): void ns3::LrWpanHelper::EnableAsciiInternal(ns3::Ptr<ns3::OutputStreamWrapper> stream, std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool explicitFilename) [member function]
cls.add_method('EnableAsciiInternal',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'explicitFilename')],
visibility='private', is_virtual=True)
return
def register_Ns3LrWpanLqiTag_methods(root_module, cls):
## lr-wpan-lqi-tag.h (module 'lr-wpan'): ns3::LrWpanLqiTag::LrWpanLqiTag(ns3::LrWpanLqiTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LrWpanLqiTag const &', 'arg0')])
## lr-wpan-lqi-tag.h (module 'lr-wpan'): ns3::LrWpanLqiTag::LrWpanLqiTag() [constructor]
cls.add_constructor([])
## lr-wpan-lqi-tag.h (module 'lr-wpan'): ns3::LrWpanLqiTag::LrWpanLqiTag(uint8_t lqi) [constructor]
cls.add_constructor([param('uint8_t', 'lqi')])
## lr-wpan-lqi-tag.h (module 'lr-wpan'): void ns3::LrWpanLqiTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## lr-wpan-lqi-tag.h (module 'lr-wpan'): uint8_t ns3::LrWpanLqiTag::Get() const [member function]
cls.add_method('Get',
'uint8_t',
[],
is_const=True)
## lr-wpan-lqi-tag.h (module 'lr-wpan'): ns3::TypeId ns3::LrWpanLqiTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## lr-wpan-lqi-tag.h (module 'lr-wpan'): uint32_t ns3::LrWpanLqiTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## lr-wpan-lqi-tag.h (module 'lr-wpan'): static ns3::TypeId ns3::LrWpanLqiTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## lr-wpan-lqi-tag.h (module 'lr-wpan'): void ns3::LrWpanLqiTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## lr-wpan-lqi-tag.h (module 'lr-wpan'): void ns3::LrWpanLqiTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## lr-wpan-lqi-tag.h (module 'lr-wpan'): void ns3::LrWpanLqiTag::Set(uint8_t lqi) [member function]
cls.add_method('Set',
'void',
[param('uint8_t', 'lqi')])
return
def register_Ns3LrWpanMacHeader_methods(root_module, cls):
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::LrWpanMacHeader::LrWpanMacHeader(ns3::LrWpanMacHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LrWpanMacHeader const &', 'arg0')])
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::LrWpanMacHeader::LrWpanMacHeader() [constructor]
cls.add_constructor([])
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::LrWpanMacHeader::LrWpanMacHeader(ns3::LrWpanMacHeader::LrWpanMacType wpanMacType, uint8_t seqNum) [constructor]
cls.add_constructor([param('ns3::LrWpanMacHeader::LrWpanMacType', 'wpanMacType'), param('uint8_t', 'seqNum')])
## lr-wpan-mac-header.h (module 'lr-wpan'): uint32_t ns3::LrWpanMacHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint8_t ns3::LrWpanMacHeader::GetDstAddrMode() const [member function]
cls.add_method('GetDstAddrMode',
'uint8_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint16_t ns3::LrWpanMacHeader::GetDstPanId() const [member function]
cls.add_method('GetDstPanId',
'uint16_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::Mac64Address ns3::LrWpanMacHeader::GetExtDstAddr() const [member function]
cls.add_method('GetExtDstAddr',
'ns3::Mac64Address',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::Mac64Address ns3::LrWpanMacHeader::GetExtSrcAddr() const [member function]
cls.add_method('GetExtSrcAddr',
'ns3::Mac64Address',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint16_t ns3::LrWpanMacHeader::GetFrameControl() const [member function]
cls.add_method('GetFrameControl',
'uint16_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint8_t ns3::LrWpanMacHeader::GetFrameVer() const [member function]
cls.add_method('GetFrameVer',
'uint8_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint32_t ns3::LrWpanMacHeader::GetFrmCounter() const [member function]
cls.add_method('GetFrmCounter',
'uint32_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint8_t ns3::LrWpanMacHeader::GetFrmCtrlRes() const [member function]
cls.add_method('GetFrmCtrlRes',
'uint8_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::TypeId ns3::LrWpanMacHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint8_t ns3::LrWpanMacHeader::GetKeyIdIndex() const [member function]
cls.add_method('GetKeyIdIndex',
'uint8_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint8_t ns3::LrWpanMacHeader::GetKeyIdMode() const [member function]
cls.add_method('GetKeyIdMode',
'uint8_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint32_t ns3::LrWpanMacHeader::GetKeyIdSrc32() const [member function]
cls.add_method('GetKeyIdSrc32',
'uint32_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint64_t ns3::LrWpanMacHeader::GetKeyIdSrc64() const [member function]
cls.add_method('GetKeyIdSrc64',
'uint64_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint8_t ns3::LrWpanMacHeader::GetSecControl() const [member function]
cls.add_method('GetSecControl',
'uint8_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint8_t ns3::LrWpanMacHeader::GetSecCtrlReserved() const [member function]
cls.add_method('GetSecCtrlReserved',
'uint8_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint8_t ns3::LrWpanMacHeader::GetSecLevel() const [member function]
cls.add_method('GetSecLevel',
'uint8_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint8_t ns3::LrWpanMacHeader::GetSeqNum() const [member function]
cls.add_method('GetSeqNum',
'uint8_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint32_t ns3::LrWpanMacHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::Mac16Address ns3::LrWpanMacHeader::GetShortDstAddr() const [member function]
cls.add_method('GetShortDstAddr',
'ns3::Mac16Address',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::Mac16Address ns3::LrWpanMacHeader::GetShortSrcAddr() const [member function]
cls.add_method('GetShortSrcAddr',
'ns3::Mac16Address',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint8_t ns3::LrWpanMacHeader::GetSrcAddrMode() const [member function]
cls.add_method('GetSrcAddrMode',
'uint8_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): uint16_t ns3::LrWpanMacHeader::GetSrcPanId() const [member function]
cls.add_method('GetSrcPanId',
'uint16_t',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): ns3::LrWpanMacHeader::LrWpanMacType ns3::LrWpanMacHeader::GetType() const [member function]
cls.add_method('GetType',
'ns3::LrWpanMacHeader::LrWpanMacType',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): static ns3::TypeId ns3::LrWpanMacHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): bool ns3::LrWpanMacHeader::IsAckReq() const [member function]
cls.add_method('IsAckReq',
'bool',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): bool ns3::LrWpanMacHeader::IsAcknowledgment() const [member function]
cls.add_method('IsAcknowledgment',
'bool',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): bool ns3::LrWpanMacHeader::IsBeacon() const [member function]
cls.add_method('IsBeacon',
'bool',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): bool ns3::LrWpanMacHeader::IsCommand() const [member function]
cls.add_method('IsCommand',
'bool',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): bool ns3::LrWpanMacHeader::IsData() const [member function]
cls.add_method('IsData',
'bool',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): bool ns3::LrWpanMacHeader::IsFrmPend() const [member function]
cls.add_method('IsFrmPend',
'bool',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): bool ns3::LrWpanMacHeader::IsPanIdComp() const [member function]
cls.add_method('IsPanIdComp',
'bool',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): bool ns3::LrWpanMacHeader::IsSecEnable() const [member function]
cls.add_method('IsSecEnable',
'bool',
[],
is_const=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetAckReq() [member function]
cls.add_method('SetAckReq',
'void',
[])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetDstAddrFields(uint16_t panId, ns3::Mac16Address addr) [member function]
cls.add_method('SetDstAddrFields',
'void',
[param('uint16_t', 'panId'), param('ns3::Mac16Address', 'addr')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetDstAddrFields(uint16_t panId, ns3::Mac64Address addr) [member function]
cls.add_method('SetDstAddrFields',
'void',
[param('uint16_t', 'panId'), param('ns3::Mac64Address', 'addr')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetDstAddrMode(uint8_t addrMode) [member function]
cls.add_method('SetDstAddrMode',
'void',
[param('uint8_t', 'addrMode')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetFrameControl(uint16_t frameControl) [member function]
cls.add_method('SetFrameControl',
'void',
[param('uint16_t', 'frameControl')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetFrameVer(uint8_t ver) [member function]
cls.add_method('SetFrameVer',
'void',
[param('uint8_t', 'ver')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetFrmCounter(uint32_t frmCntr) [member function]
cls.add_method('SetFrmCounter',
'void',
[param('uint32_t', 'frmCntr')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetFrmCtrlRes(uint8_t res) [member function]
cls.add_method('SetFrmCtrlRes',
'void',
[param('uint8_t', 'res')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetFrmPend() [member function]
cls.add_method('SetFrmPend',
'void',
[])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetKeyId(uint8_t keyIndex) [member function]
cls.add_method('SetKeyId',
'void',
[param('uint8_t', 'keyIndex')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetKeyId(uint32_t keySrc, uint8_t keyIndex) [member function]
cls.add_method('SetKeyId',
'void',
[param('uint32_t', 'keySrc'), param('uint8_t', 'keyIndex')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetKeyId(uint64_t keySrc, uint8_t keyIndex) [member function]
cls.add_method('SetKeyId',
'void',
[param('uint64_t', 'keySrc'), param('uint8_t', 'keyIndex')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetKeyIdMode(uint8_t keyIdMode) [member function]
cls.add_method('SetKeyIdMode',
'void',
[param('uint8_t', 'keyIdMode')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetNoAckReq() [member function]
cls.add_method('SetNoAckReq',
'void',
[])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetNoFrmPend() [member function]
cls.add_method('SetNoFrmPend',
'void',
[])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetNoPanIdComp() [member function]
cls.add_method('SetNoPanIdComp',
'void',
[])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetPanIdComp() [member function]
cls.add_method('SetPanIdComp',
'void',
[])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetSecControl(uint8_t secLevel) [member function]
cls.add_method('SetSecControl',
'void',
[param('uint8_t', 'secLevel')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetSecCtrlReserved(uint8_t res) [member function]
cls.add_method('SetSecCtrlReserved',
'void',
[param('uint8_t', 'res')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetSecDisable() [member function]
cls.add_method('SetSecDisable',
'void',
[])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetSecEnable() [member function]
cls.add_method('SetSecEnable',
'void',
[])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetSecLevel(uint8_t secLevel) [member function]
cls.add_method('SetSecLevel',
'void',
[param('uint8_t', 'secLevel')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetSeqNum(uint8_t seqNum) [member function]
cls.add_method('SetSeqNum',
'void',
[param('uint8_t', 'seqNum')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetSrcAddrFields(uint16_t panId, ns3::Mac16Address addr) [member function]
cls.add_method('SetSrcAddrFields',
'void',
[param('uint16_t', 'panId'), param('ns3::Mac16Address', 'addr')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetSrcAddrFields(uint16_t panId, ns3::Mac64Address addr) [member function]
cls.add_method('SetSrcAddrFields',
'void',
[param('uint16_t', 'panId'), param('ns3::Mac64Address', 'addr')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetSrcAddrMode(uint8_t addrMode) [member function]
cls.add_method('SetSrcAddrMode',
'void',
[param('uint8_t', 'addrMode')])
## lr-wpan-mac-header.h (module 'lr-wpan'): void ns3::LrWpanMacHeader::SetType(ns3::LrWpanMacHeader::LrWpanMacType wpanMacType) [member function]
cls.add_method('SetType',
'void',
[param('ns3::LrWpanMacHeader::LrWpanMacType', 'wpanMacType')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3PcapFileWrapper_methods(root_module, cls):
## pcap-file-wrapper.h (module 'network'): static ns3::TypeId ns3::PcapFileWrapper::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## pcap-file-wrapper.h (module 'network'): ns3::PcapFileWrapper::PcapFileWrapper() [constructor]
cls.add_constructor([])
## pcap-file-wrapper.h (module 'network'): bool ns3::PcapFileWrapper::Fail() const [member function]
cls.add_method('Fail',
'bool',
[],
is_const=True)
## pcap-file-wrapper.h (module 'network'): bool ns3::PcapFileWrapper::Eof() const [member function]
cls.add_method('Eof',
'bool',
[],
is_const=True)
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Open(std::string const & filename, std::_Ios_Openmode mode) [member function]
cls.add_method('Open',
'void',
[param('std::string const &', 'filename'), param('std::_Ios_Openmode', 'mode')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Close() [member function]
cls.add_method('Close',
'void',
[])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Init(uint32_t dataLinkType, uint32_t snapLen=std::numeric_limits<unsigned int>::max(), int32_t tzCorrection=ns3::PcapFile::ZONE_DEFAULT) [member function]
cls.add_method('Init',
'void',
[param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='std::numeric_limits<unsigned int>::max()'), param('int32_t', 'tzCorrection', default_value='ns3::PcapFile::ZONE_DEFAULT')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, ns3::Header const & header, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('ns3::Header const &', 'header'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, uint8_t const * buffer, uint32_t length) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('uint8_t const *', 'buffer'), param('uint32_t', 'length')])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetMagic() [member function]
cls.add_method('GetMagic',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint16_t ns3::PcapFileWrapper::GetVersionMajor() [member function]
cls.add_method('GetVersionMajor',
'uint16_t',
[])
## pcap-file-wrapper.h (module 'network'): uint16_t ns3::PcapFileWrapper::GetVersionMinor() [member function]
cls.add_method('GetVersionMinor',
'uint16_t',
[])
## pcap-file-wrapper.h (module 'network'): int32_t ns3::PcapFileWrapper::GetTimeZoneOffset() [member function]
cls.add_method('GetTimeZoneOffset',
'int32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetSigFigs() [member function]
cls.add_method('GetSigFigs',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetSnapLen() [member function]
cls.add_method('GetSnapLen',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetDataLinkType() [member function]
cls.add_method('GetDataLinkType',
'uint32_t',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3LrWpanInterferenceHelper_Ns3Empty_Ns3DefaultDeleter__lt__ns3LrWpanInterferenceHelper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::LrWpanInterferenceHelper, ns3::empty, ns3::DefaultDeleter<ns3::LrWpanInterferenceHelper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::LrWpanInterferenceHelper, ns3::empty, ns3::DefaultDeleter<ns3::LrWpanInterferenceHelper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::LrWpanInterferenceHelper, ns3::empty, ns3::DefaultDeleter<ns3::LrWpanInterferenceHelper> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::LrWpanInterferenceHelper, ns3::empty, ns3::DefaultDeleter< ns3::LrWpanInterferenceHelper > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::LrWpanInterferenceHelper, ns3::empty, ns3::DefaultDeleter<ns3::LrWpanInterferenceHelper> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter< ns3::OutputStreamWrapper > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3SpectrumSignalParameters_Ns3Empty_Ns3DefaultDeleter__lt__ns3SpectrumSignalParameters__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::SpectrumSignalParameters, ns3::empty, ns3::DefaultDeleter<ns3::SpectrumSignalParameters> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::SpectrumSignalParameters, ns3::empty, ns3::DefaultDeleter<ns3::SpectrumSignalParameters> >::SimpleRefCount(ns3::SimpleRefCount<ns3::SpectrumSignalParameters, ns3::empty, ns3::DefaultDeleter<ns3::SpectrumSignalParameters> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::SpectrumSignalParameters, ns3::empty, ns3::DefaultDeleter< ns3::SpectrumSignalParameters > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::SpectrumSignalParameters, ns3::empty, ns3::DefaultDeleter<ns3::SpectrumSignalParameters> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SpectrumPhy_methods(root_module, cls):
## spectrum-phy.h (module 'spectrum'): ns3::SpectrumPhy::SpectrumPhy() [constructor]
cls.add_constructor([])
## spectrum-phy.h (module 'spectrum'): static ns3::TypeId ns3::SpectrumPhy::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## spectrum-phy.h (module 'spectrum'): void ns3::SpectrumPhy::SetDevice(ns3::Ptr<ns3::NetDevice> d) [member function]
cls.add_method('SetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'd')],
is_pure_virtual=True, is_virtual=True)
## spectrum-phy.h (module 'spectrum'): ns3::Ptr<ns3::NetDevice> ns3::SpectrumPhy::GetDevice() const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## spectrum-phy.h (module 'spectrum'): void ns3::SpectrumPhy::SetMobility(ns3::Ptr<ns3::MobilityModel> m) [member function]
cls.add_method('SetMobility',
'void',
[param('ns3::Ptr< ns3::MobilityModel >', 'm')],
is_pure_virtual=True, is_virtual=True)
## spectrum-phy.h (module 'spectrum'): ns3::Ptr<ns3::MobilityModel> ns3::SpectrumPhy::GetMobility() [member function]
cls.add_method('GetMobility',
'ns3::Ptr< ns3::MobilityModel >',
[],
is_pure_virtual=True, is_virtual=True)
## spectrum-phy.h (module 'spectrum'): void ns3::SpectrumPhy::SetChannel(ns3::Ptr<ns3::SpectrumChannel> c) [member function]
cls.add_method('SetChannel',
'void',
[param('ns3::Ptr< ns3::SpectrumChannel >', 'c')],
is_pure_virtual=True, is_virtual=True)
## spectrum-phy.h (module 'spectrum'): ns3::Ptr<ns3::SpectrumModel const> ns3::SpectrumPhy::GetRxSpectrumModel() const [member function]
cls.add_method('GetRxSpectrumModel',
'ns3::Ptr< ns3::SpectrumModel const >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## spectrum-phy.h (module 'spectrum'): ns3::Ptr<ns3::AntennaModel> ns3::SpectrumPhy::GetRxAntenna() [member function]
cls.add_method('GetRxAntenna',
'ns3::Ptr< ns3::AntennaModel >',
[],
is_pure_virtual=True, is_virtual=True)
## spectrum-phy.h (module 'spectrum'): void ns3::SpectrumPhy::StartRx(ns3::Ptr<ns3::SpectrumSignalParameters> params) [member function]
cls.add_method('StartRx',
'void',
[param('ns3::Ptr< ns3::SpectrumSignalParameters >', 'params')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3SpectrumSignalParameters_methods(root_module, cls):
## spectrum-signal-parameters.h (module 'spectrum'): ns3::SpectrumSignalParameters::SpectrumSignalParameters() [constructor]
cls.add_constructor([])
## spectrum-signal-parameters.h (module 'spectrum'): ns3::SpectrumSignalParameters::SpectrumSignalParameters(ns3::SpectrumSignalParameters const & p) [copy constructor]
cls.add_constructor([param('ns3::SpectrumSignalParameters const &', 'p')])
## spectrum-signal-parameters.h (module 'spectrum'): ns3::Ptr<ns3::SpectrumSignalParameters> ns3::SpectrumSignalParameters::Copy() [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::SpectrumSignalParameters >',
[],
is_virtual=True)
## spectrum-signal-parameters.h (module 'spectrum'): ns3::SpectrumSignalParameters::duration [variable]
cls.add_instance_attribute('duration', 'ns3::Time', is_const=False)
## spectrum-signal-parameters.h (module 'spectrum'): ns3::SpectrumSignalParameters::psd [variable]
cls.add_instance_attribute('psd', 'ns3::Ptr< ns3::SpectrumValue >', is_const=False)
## spectrum-signal-parameters.h (module 'spectrum'): ns3::SpectrumSignalParameters::txAntenna [variable]
cls.add_instance_attribute('txAntenna', 'ns3::Ptr< ns3::AntennaModel >', is_const=False)
## spectrum-signal-parameters.h (module 'spectrum'): ns3::SpectrumSignalParameters::txPhy [variable]
cls.add_instance_attribute('txPhy', 'ns3::Ptr< ns3::SpectrumPhy >', is_const=False)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function]
cls.add_method('As',
'ns3::TimeWithUnit',
[param('ns3::Time::Unit const', 'unit')],
is_const=True)
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function]
cls.add_method('GetDays',
'double',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function]
cls.add_method('GetHours',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function]
cls.add_method('GetMinutes',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function]
cls.add_method('GetYears',
'double',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function]
cls.add_method('Max',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function]
cls.add_method('Min',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function]
cls.add_method('StaticInit',
'bool',
[],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3BooleanChecker_methods(root_module, cls):
## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker() [constructor]
cls.add_constructor([])
## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker(ns3::BooleanChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BooleanChecker const &', 'arg0')])
return
def register_Ns3BooleanValue_methods(root_module, cls):
cls.add_output_stream_operator()
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(ns3::BooleanValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BooleanValue const &', 'arg0')])
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue() [constructor]
cls.add_constructor([])
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(bool value) [constructor]
cls.add_constructor([param('bool', 'value')])
## boolean.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::BooleanValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## boolean.h (module 'core'): bool ns3::BooleanValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## boolean.h (module 'core'): bool ns3::BooleanValue::Get() const [member function]
cls.add_method('Get',
'bool',
[],
is_const=True)
## boolean.h (module 'core'): std::string ns3::BooleanValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## boolean.h (module 'core'): void ns3::BooleanValue::Set(bool value) [member function]
cls.add_method('Set',
'void',
[param('bool', 'value')])
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3DoubleValue_methods(root_module, cls):
## double.h (module 'core'): ns3::DoubleValue::DoubleValue() [constructor]
cls.add_constructor([])
## double.h (module 'core'): ns3::DoubleValue::DoubleValue(ns3::DoubleValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DoubleValue const &', 'arg0')])
## double.h (module 'core'): ns3::DoubleValue::DoubleValue(double const & value) [constructor]
cls.add_constructor([param('double const &', 'value')])
## double.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::DoubleValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## double.h (module 'core'): bool ns3::DoubleValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## double.h (module 'core'): double ns3::DoubleValue::Get() const [member function]
cls.add_method('Get',
'double',
[],
is_const=True)
## double.h (module 'core'): std::string ns3::DoubleValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## double.h (module 'core'): void ns3::DoubleValue::Set(double const & value) [member function]
cls.add_method('Set',
'void',
[param('double const &', 'value')])
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3EnumChecker_methods(root_module, cls):
## enum.h (module 'core'): ns3::EnumChecker::EnumChecker(ns3::EnumChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EnumChecker const &', 'arg0')])
## enum.h (module 'core'): ns3::EnumChecker::EnumChecker() [constructor]
cls.add_constructor([])
## enum.h (module 'core'): void ns3::EnumChecker::Add(int value, std::string name) [member function]
cls.add_method('Add',
'void',
[param('int', 'value'), param('std::string', 'name')])
## enum.h (module 'core'): void ns3::EnumChecker::AddDefault(int value, std::string name) [member function]
cls.add_method('AddDefault',
'void',
[param('int', 'value'), param('std::string', 'name')])
## enum.h (module 'core'): bool ns3::EnumChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
## enum.h (module 'core'): bool ns3::EnumChecker::Copy(ns3::AttributeValue const & src, ns3::AttributeValue & dst) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'src'), param('ns3::AttributeValue &', 'dst')],
is_const=True, is_virtual=True)
## enum.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EnumChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): std::string ns3::EnumChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): std::string ns3::EnumChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): bool ns3::EnumChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_const=True, is_virtual=True)
return
def register_Ns3EnumValue_methods(root_module, cls):
## enum.h (module 'core'): ns3::EnumValue::EnumValue(ns3::EnumValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EnumValue const &', 'arg0')])
## enum.h (module 'core'): ns3::EnumValue::EnumValue() [constructor]
cls.add_constructor([])
## enum.h (module 'core'): ns3::EnumValue::EnumValue(int value) [constructor]
cls.add_constructor([param('int', 'value')])
## enum.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EnumValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): bool ns3::EnumValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## enum.h (module 'core'): int ns3::EnumValue::Get() const [member function]
cls.add_method('Get',
'int',
[],
is_const=True)
## enum.h (module 'core'): std::string ns3::EnumValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## enum.h (module 'core'): void ns3::EnumValue::Set(int value) [member function]
cls.add_method('Set',
'void',
[param('int', 'value')])
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3IntegerValue_methods(root_module, cls):
## integer.h (module 'core'): ns3::IntegerValue::IntegerValue() [constructor]
cls.add_constructor([])
## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(ns3::IntegerValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntegerValue const &', 'arg0')])
## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(int64_t const & value) [constructor]
cls.add_constructor([param('int64_t const &', 'value')])
## integer.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::IntegerValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## integer.h (module 'core'): bool ns3::IntegerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## integer.h (module 'core'): int64_t ns3::IntegerValue::Get() const [member function]
cls.add_method('Get',
'int64_t',
[],
is_const=True)
## integer.h (module 'core'): std::string ns3::IntegerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## integer.h (module 'core'): void ns3::IntegerValue::Set(int64_t const & value) [member function]
cls.add_method('Set',
'void',
[param('int64_t const &', 'value')])
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3LrWpanCsmaCa_methods(root_module, cls):
## lr-wpan-csmaca.h (module 'lr-wpan'): static ns3::TypeId ns3::LrWpanCsmaCa::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## lr-wpan-csmaca.h (module 'lr-wpan'): ns3::LrWpanCsmaCa::LrWpanCsmaCa() [constructor]
cls.add_constructor([])
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::SetMac(ns3::Ptr<ns3::LrWpanMac> mac) [member function]
cls.add_method('SetMac',
'void',
[param('ns3::Ptr< ns3::LrWpanMac >', 'mac')])
## lr-wpan-csmaca.h (module 'lr-wpan'): ns3::Ptr<ns3::LrWpanMac> ns3::LrWpanCsmaCa::GetMac() const [member function]
cls.add_method('GetMac',
'ns3::Ptr< ns3::LrWpanMac >',
[],
is_const=True)
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::SetSlottedCsmaCa() [member function]
cls.add_method('SetSlottedCsmaCa',
'void',
[])
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::SetUnSlottedCsmaCa() [member function]
cls.add_method('SetUnSlottedCsmaCa',
'void',
[])
## lr-wpan-csmaca.h (module 'lr-wpan'): bool ns3::LrWpanCsmaCa::IsSlottedCsmaCa() const [member function]
cls.add_method('IsSlottedCsmaCa',
'bool',
[],
is_const=True)
## lr-wpan-csmaca.h (module 'lr-wpan'): bool ns3::LrWpanCsmaCa::IsUnSlottedCsmaCa() const [member function]
cls.add_method('IsUnSlottedCsmaCa',
'bool',
[],
is_const=True)
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::SetMacMinBE(uint8_t macMinBE) [member function]
cls.add_method('SetMacMinBE',
'void',
[param('uint8_t', 'macMinBE')])
## lr-wpan-csmaca.h (module 'lr-wpan'): uint8_t ns3::LrWpanCsmaCa::GetMacMinBE() const [member function]
cls.add_method('GetMacMinBE',
'uint8_t',
[],
is_const=True)
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::SetMacMaxBE(uint8_t macMaxBE) [member function]
cls.add_method('SetMacMaxBE',
'void',
[param('uint8_t', 'macMaxBE')])
## lr-wpan-csmaca.h (module 'lr-wpan'): uint8_t ns3::LrWpanCsmaCa::GetMacMaxBE() const [member function]
cls.add_method('GetMacMaxBE',
'uint8_t',
[],
is_const=True)
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::SetMacMaxCSMABackoffs(uint8_t macMaxCSMABackoffs) [member function]
cls.add_method('SetMacMaxCSMABackoffs',
'void',
[param('uint8_t', 'macMaxCSMABackoffs')])
## lr-wpan-csmaca.h (module 'lr-wpan'): uint8_t ns3::LrWpanCsmaCa::GetMacMaxCSMABackoffs() const [member function]
cls.add_method('GetMacMaxCSMABackoffs',
'uint8_t',
[],
is_const=True)
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::SetUnitBackoffPeriod(uint64_t unitBackoffPeriod) [member function]
cls.add_method('SetUnitBackoffPeriod',
'void',
[param('uint64_t', 'unitBackoffPeriod')])
## lr-wpan-csmaca.h (module 'lr-wpan'): uint64_t ns3::LrWpanCsmaCa::GetUnitBackoffPeriod() const [member function]
cls.add_method('GetUnitBackoffPeriod',
'uint64_t',
[],
is_const=True)
## lr-wpan-csmaca.h (module 'lr-wpan'): ns3::Time ns3::LrWpanCsmaCa::GetTimeToNextSlot() const [member function]
cls.add_method('GetTimeToNextSlot',
'ns3::Time',
[],
is_const=True)
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::Start() [member function]
cls.add_method('Start',
'void',
[])
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::RandomBackoffDelay() [member function]
cls.add_method('RandomBackoffDelay',
'void',
[])
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::CanProceed() [member function]
cls.add_method('CanProceed',
'void',
[])
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::RequestCCA() [member function]
cls.add_method('RequestCCA',
'void',
[])
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::PlmeCcaConfirm(ns3::LrWpanPhyEnumeration status) [member function]
cls.add_method('PlmeCcaConfirm',
'void',
[param('ns3::LrWpanPhyEnumeration', 'status')])
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::SetLrWpanMacStateCallback(ns3::LrWpanMacStateCallback macState) [member function]
cls.add_method('SetLrWpanMacStateCallback',
'void',
[param('ns3::LrWpanMacStateCallback', 'macState')])
## lr-wpan-csmaca.h (module 'lr-wpan'): int64_t ns3::LrWpanCsmaCa::AssignStreams(int64_t stream) [member function]
cls.add_method('AssignStreams',
'int64_t',
[param('int64_t', 'stream')])
## lr-wpan-csmaca.h (module 'lr-wpan'): uint8_t ns3::LrWpanCsmaCa::GetNB() [member function]
cls.add_method('GetNB',
'uint8_t',
[])
## lr-wpan-csmaca.h (module 'lr-wpan'): void ns3::LrWpanCsmaCa::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3LrWpanErrorModel_methods(root_module, cls):
## lr-wpan-error-model.h (module 'lr-wpan'): ns3::LrWpanErrorModel::LrWpanErrorModel(ns3::LrWpanErrorModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LrWpanErrorModel const &', 'arg0')])
## lr-wpan-error-model.h (module 'lr-wpan'): ns3::LrWpanErrorModel::LrWpanErrorModel() [constructor]
cls.add_constructor([])
## lr-wpan-error-model.h (module 'lr-wpan'): double ns3::LrWpanErrorModel::GetChunkSuccessRate(double snr, uint32_t nbits) const [member function]
cls.add_method('GetChunkSuccessRate',
'double',
[param('double', 'snr'), param('uint32_t', 'nbits')],
is_const=True)
## lr-wpan-error-model.h (module 'lr-wpan'): static ns3::TypeId ns3::LrWpanErrorModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3LrWpanInterferenceHelper_methods(root_module, cls):
## lr-wpan-interference-helper.h (module 'lr-wpan'): ns3::LrWpanInterferenceHelper::LrWpanInterferenceHelper(ns3::Ptr<ns3::SpectrumModel const> spectrumModel) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::SpectrumModel const >', 'spectrumModel')])
## lr-wpan-interference-helper.h (module 'lr-wpan'): bool ns3::LrWpanInterferenceHelper::AddSignal(ns3::Ptr<ns3::SpectrumValue const> signal) [member function]
cls.add_method('AddSignal',
'bool',
[param('ns3::Ptr< ns3::SpectrumValue const >', 'signal')])
## lr-wpan-interference-helper.h (module 'lr-wpan'): bool ns3::LrWpanInterferenceHelper::RemoveSignal(ns3::Ptr<ns3::SpectrumValue const> signal) [member function]
cls.add_method('RemoveSignal',
'bool',
[param('ns3::Ptr< ns3::SpectrumValue const >', 'signal')])
## lr-wpan-interference-helper.h (module 'lr-wpan'): void ns3::LrWpanInterferenceHelper::ClearSignals() [member function]
cls.add_method('ClearSignals',
'void',
[])
## lr-wpan-interference-helper.h (module 'lr-wpan'): ns3::Ptr<ns3::SpectrumValue> ns3::LrWpanInterferenceHelper::GetSignalPsd() const [member function]
cls.add_method('GetSignalPsd',
'ns3::Ptr< ns3::SpectrumValue >',
[],
is_const=True)
## lr-wpan-interference-helper.h (module 'lr-wpan'): ns3::Ptr<ns3::SpectrumModel const> ns3::LrWpanInterferenceHelper::GetSpectrumModel() const [member function]
cls.add_method('GetSpectrumModel',
'ns3::Ptr< ns3::SpectrumModel const >',
[],
is_const=True)
return
def register_Ns3LrWpanMac_methods(root_module, cls):
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::LrWpanMac(ns3::LrWpanMac const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LrWpanMac const &', 'arg0')])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::LrWpanMac() [constructor]
cls.add_constructor([])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanAssociationStatus ns3::LrWpanMac::GetAssociationStatus() const [member function]
cls.add_method('GetAssociationStatus',
'ns3::LrWpanAssociationStatus',
[],
is_const=True)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::Mac64Address ns3::LrWpanMac::GetExtendedAddress() const [member function]
cls.add_method('GetExtendedAddress',
'ns3::Mac64Address',
[],
is_const=True)
## lr-wpan-mac.h (module 'lr-wpan'): uint64_t ns3::LrWpanMac::GetMacAckWaitDuration() const [member function]
cls.add_method('GetMacAckWaitDuration',
'uint64_t',
[],
is_const=True)
## lr-wpan-mac.h (module 'lr-wpan'): uint8_t ns3::LrWpanMac::GetMacMaxFrameRetries() const [member function]
cls.add_method('GetMacMaxFrameRetries',
'uint8_t',
[],
is_const=True)
## lr-wpan-mac.h (module 'lr-wpan'): uint16_t ns3::LrWpanMac::GetPanId() const [member function]
cls.add_method('GetPanId',
'uint16_t',
[],
is_const=True)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::Ptr<ns3::LrWpanPhy> ns3::LrWpanMac::GetPhy() [member function]
cls.add_method('GetPhy',
'ns3::Ptr< ns3::LrWpanPhy >',
[])
## lr-wpan-mac.h (module 'lr-wpan'): bool ns3::LrWpanMac::GetRxOnWhenIdle() [member function]
cls.add_method('GetRxOnWhenIdle',
'bool',
[])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::Mac16Address ns3::LrWpanMac::GetShortAddress() const [member function]
cls.add_method('GetShortAddress',
'ns3::Mac16Address',
[],
is_const=True)
## lr-wpan-mac.h (module 'lr-wpan'): static ns3::TypeId ns3::LrWpanMac::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::McpsDataRequest(ns3::McpsDataRequestParams params, ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('McpsDataRequest',
'void',
[param('ns3::McpsDataRequestParams', 'params'), param('ns3::Ptr< ns3::Packet >', 'p')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::PdDataConfirm(ns3::LrWpanPhyEnumeration status) [member function]
cls.add_method('PdDataConfirm',
'void',
[param('ns3::LrWpanPhyEnumeration', 'status')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::PdDataIndication(uint32_t psduLength, ns3::Ptr<ns3::Packet> p, uint8_t lqi) [member function]
cls.add_method('PdDataIndication',
'void',
[param('uint32_t', 'psduLength'), param('ns3::Ptr< ns3::Packet >', 'p'), param('uint8_t', 'lqi')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::PlmeCcaConfirm(ns3::LrWpanPhyEnumeration status) [member function]
cls.add_method('PlmeCcaConfirm',
'void',
[param('ns3::LrWpanPhyEnumeration', 'status')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::PlmeEdConfirm(ns3::LrWpanPhyEnumeration status, uint8_t energyLevel) [member function]
cls.add_method('PlmeEdConfirm',
'void',
[param('ns3::LrWpanPhyEnumeration', 'status'), param('uint8_t', 'energyLevel')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::PlmeGetAttributeConfirm(ns3::LrWpanPhyEnumeration status, ns3::LrWpanPibAttributeIdentifier id, ns3::LrWpanPhyPibAttributes * attribute) [member function]
cls.add_method('PlmeGetAttributeConfirm',
'void',
[param('ns3::LrWpanPhyEnumeration', 'status'), param('ns3::LrWpanPibAttributeIdentifier', 'id'), param('ns3::LrWpanPhyPibAttributes *', 'attribute')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::PlmeSetAttributeConfirm(ns3::LrWpanPhyEnumeration status, ns3::LrWpanPibAttributeIdentifier id) [member function]
cls.add_method('PlmeSetAttributeConfirm',
'void',
[param('ns3::LrWpanPhyEnumeration', 'status'), param('ns3::LrWpanPibAttributeIdentifier', 'id')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::PlmeSetTRXStateConfirm(ns3::LrWpanPhyEnumeration status) [member function]
cls.add_method('PlmeSetTRXStateConfirm',
'void',
[param('ns3::LrWpanPhyEnumeration', 'status')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::SetAssociationStatus(ns3::LrWpanAssociationStatus status) [member function]
cls.add_method('SetAssociationStatus',
'void',
[param('ns3::LrWpanAssociationStatus', 'status')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::SetCsmaCa(ns3::Ptr<ns3::LrWpanCsmaCa> csmaCa) [member function]
cls.add_method('SetCsmaCa',
'void',
[param('ns3::Ptr< ns3::LrWpanCsmaCa >', 'csmaCa')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::SetExtendedAddress(ns3::Mac64Address address) [member function]
cls.add_method('SetExtendedAddress',
'void',
[param('ns3::Mac64Address', 'address')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::SetLrWpanMacState(ns3::LrWpanMacState macState) [member function]
cls.add_method('SetLrWpanMacState',
'void',
[param('ns3::LrWpanMacState', 'macState')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::SetMacMaxFrameRetries(uint8_t retries) [member function]
cls.add_method('SetMacMaxFrameRetries',
'void',
[param('uint8_t', 'retries')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::SetMcpsDataConfirmCallback(ns3::McpsDataConfirmCallback c) [member function]
cls.add_method('SetMcpsDataConfirmCallback',
'void',
[param('ns3::McpsDataConfirmCallback', 'c')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::SetMcpsDataIndicationCallback(ns3::McpsDataIndicationCallback c) [member function]
cls.add_method('SetMcpsDataIndicationCallback',
'void',
[param('ns3::McpsDataIndicationCallback', 'c')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::SetPanId(uint16_t panId) [member function]
cls.add_method('SetPanId',
'void',
[param('uint16_t', 'panId')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::SetPhy(ns3::Ptr<ns3::LrWpanPhy> phy) [member function]
cls.add_method('SetPhy',
'void',
[param('ns3::Ptr< ns3::LrWpanPhy >', 'phy')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::SetRxOnWhenIdle(bool rxOnWhenIdle) [member function]
cls.add_method('SetRxOnWhenIdle',
'void',
[param('bool', 'rxOnWhenIdle')])
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::SetShortAddress(ns3::Mac16Address address) [member function]
cls.add_method('SetShortAddress',
'void',
[param('ns3::Mac16Address', 'address')])
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::aMinMPDUOverhead [variable]
cls.add_static_attribute('aMinMPDUOverhead', 'uint32_t const', is_const=True)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_aBaseSlotDuration [variable]
cls.add_instance_attribute('m_aBaseSlotDuration', 'uint64_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_aBaseSuperframeDuration [variable]
cls.add_instance_attribute('m_aBaseSuperframeDuration', 'uint64_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_aNumSuperframeSlots [variable]
cls.add_instance_attribute('m_aNumSuperframeSlots', 'uint64_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_macBeaconOrder [variable]
cls.add_instance_attribute('m_macBeaconOrder', 'uint64_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_macBeaconTxTime [variable]
cls.add_instance_attribute('m_macBeaconTxTime', 'uint64_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_macDsn [variable]
cls.add_instance_attribute('m_macDsn', 'ns3::SequenceNumber8', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_macMaxFrameRetries [variable]
cls.add_instance_attribute('m_macMaxFrameRetries', 'uint8_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_macPanId [variable]
cls.add_instance_attribute('m_macPanId', 'uint16_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_macPromiscuousMode [variable]
cls.add_instance_attribute('m_macPromiscuousMode', 'bool', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_macRxOnWhenIdle [variable]
cls.add_instance_attribute('m_macRxOnWhenIdle', 'bool', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_macSuperframeOrder [variable]
cls.add_instance_attribute('m_macSuperframeOrder', 'uint64_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): ns3::LrWpanMac::m_macSyncSymbolOffset [variable]
cls.add_instance_attribute('m_macSyncSymbolOffset', 'uint64_t', is_const=False)
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## lr-wpan-mac.h (module 'lr-wpan'): void ns3::LrWpanMac::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3LrWpanMacTrailer_methods(root_module, cls):
## lr-wpan-mac-trailer.h (module 'lr-wpan'): ns3::LrWpanMacTrailer::LrWpanMacTrailer(ns3::LrWpanMacTrailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LrWpanMacTrailer const &', 'arg0')])
## lr-wpan-mac-trailer.h (module 'lr-wpan'): ns3::LrWpanMacTrailer::LrWpanMacTrailer() [constructor]
cls.add_constructor([])
## lr-wpan-mac-trailer.h (module 'lr-wpan'): bool ns3::LrWpanMacTrailer::CheckFcs(ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('CheckFcs',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p')])
## lr-wpan-mac-trailer.h (module 'lr-wpan'): uint32_t ns3::LrWpanMacTrailer::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## lr-wpan-mac-trailer.h (module 'lr-wpan'): void ns3::LrWpanMacTrailer::EnableFcs(bool enable) [member function]
cls.add_method('EnableFcs',
'void',
[param('bool', 'enable')])
## lr-wpan-mac-trailer.h (module 'lr-wpan'): uint16_t ns3::LrWpanMacTrailer::GetFcs() const [member function]
cls.add_method('GetFcs',
'uint16_t',
[],
is_const=True)
## lr-wpan-mac-trailer.h (module 'lr-wpan'): ns3::TypeId ns3::LrWpanMacTrailer::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## lr-wpan-mac-trailer.h (module 'lr-wpan'): uint32_t ns3::LrWpanMacTrailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## lr-wpan-mac-trailer.h (module 'lr-wpan'): static ns3::TypeId ns3::LrWpanMacTrailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## lr-wpan-mac-trailer.h (module 'lr-wpan'): bool ns3::LrWpanMacTrailer::IsFcsEnabled() [member function]
cls.add_method('IsFcsEnabled',
'bool',
[])
## lr-wpan-mac-trailer.h (module 'lr-wpan'): void ns3::LrWpanMacTrailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## lr-wpan-mac-trailer.h (module 'lr-wpan'): void ns3::LrWpanMacTrailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## lr-wpan-mac-trailer.h (module 'lr-wpan'): void ns3::LrWpanMacTrailer::SetFcs(ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('SetFcs',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'p')])
## lr-wpan-mac-trailer.h (module 'lr-wpan'): ns3::LrWpanMacTrailer::LR_WPAN_MAC_FCS_LENGTH [variable]
cls.add_static_attribute('LR_WPAN_MAC_FCS_LENGTH', 'uint16_t const', is_const=True)
return
def register_Ns3LrWpanPhy_methods(root_module, cls):
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhy::aMaxPhyPacketSize [variable]
cls.add_static_attribute('aMaxPhyPacketSize', 'uint32_t const', is_const=True)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhy::aTurnaroundTime [variable]
cls.add_static_attribute('aTurnaroundTime', 'uint32_t const', is_const=True)
## lr-wpan-phy.h (module 'lr-wpan'): static ns3::TypeId ns3::LrWpanPhy::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::LrWpanPhy::LrWpanPhy() [constructor]
cls.add_constructor([])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetMobility(ns3::Ptr<ns3::MobilityModel> m) [member function]
cls.add_method('SetMobility',
'void',
[param('ns3::Ptr< ns3::MobilityModel >', 'm')],
is_virtual=True)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::Ptr<ns3::MobilityModel> ns3::LrWpanPhy::GetMobility() [member function]
cls.add_method('GetMobility',
'ns3::Ptr< ns3::MobilityModel >',
[],
is_virtual=True)
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetChannel(ns3::Ptr<ns3::SpectrumChannel> c) [member function]
cls.add_method('SetChannel',
'void',
[param('ns3::Ptr< ns3::SpectrumChannel >', 'c')],
is_virtual=True)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::Ptr<ns3::SpectrumChannel> ns3::LrWpanPhy::GetChannel() [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::SpectrumChannel >',
[])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetDevice(ns3::Ptr<ns3::NetDevice> d) [member function]
cls.add_method('SetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'd')],
is_virtual=True)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::Ptr<ns3::NetDevice> ns3::LrWpanPhy::GetDevice() const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True, is_virtual=True)
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetAntenna(ns3::Ptr<ns3::AntennaModel> a) [member function]
cls.add_method('SetAntenna',
'void',
[param('ns3::Ptr< ns3::AntennaModel >', 'a')])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::Ptr<ns3::AntennaModel> ns3::LrWpanPhy::GetRxAntenna() [member function]
cls.add_method('GetRxAntenna',
'ns3::Ptr< ns3::AntennaModel >',
[],
is_virtual=True)
## lr-wpan-phy.h (module 'lr-wpan'): ns3::Ptr<ns3::SpectrumModel const> ns3::LrWpanPhy::GetRxSpectrumModel() const [member function]
cls.add_method('GetRxSpectrumModel',
'ns3::Ptr< ns3::SpectrumModel const >',
[],
is_const=True, is_virtual=True)
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetTxPowerSpectralDensity(ns3::Ptr<ns3::SpectrumValue> txPsd) [member function]
cls.add_method('SetTxPowerSpectralDensity',
'void',
[param('ns3::Ptr< ns3::SpectrumValue >', 'txPsd')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetNoisePowerSpectralDensity(ns3::Ptr<ns3::SpectrumValue const> noisePsd) [member function]
cls.add_method('SetNoisePowerSpectralDensity',
'void',
[param('ns3::Ptr< ns3::SpectrumValue const >', 'noisePsd')])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::Ptr<ns3::SpectrumValue const> ns3::LrWpanPhy::GetNoisePowerSpectralDensity() [member function]
cls.add_method('GetNoisePowerSpectralDensity',
'ns3::Ptr< ns3::SpectrumValue const >',
[])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::StartRx(ns3::Ptr<ns3::SpectrumSignalParameters> params) [member function]
cls.add_method('StartRx',
'void',
[param('ns3::Ptr< ns3::SpectrumSignalParameters >', 'params')],
is_virtual=True)
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::PdDataRequest(uint32_t const psduLength, ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('PdDataRequest',
'void',
[param('uint32_t const', 'psduLength'), param('ns3::Ptr< ns3::Packet >', 'p')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::PlmeCcaRequest() [member function]
cls.add_method('PlmeCcaRequest',
'void',
[])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::PlmeEdRequest() [member function]
cls.add_method('PlmeEdRequest',
'void',
[])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::PlmeGetAttributeRequest(ns3::LrWpanPibAttributeIdentifier id) [member function]
cls.add_method('PlmeGetAttributeRequest',
'void',
[param('ns3::LrWpanPibAttributeIdentifier', 'id')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::PlmeSetTRXStateRequest(ns3::LrWpanPhyEnumeration state) [member function]
cls.add_method('PlmeSetTRXStateRequest',
'void',
[param('ns3::LrWpanPhyEnumeration', 'state')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::PlmeSetAttributeRequest(ns3::LrWpanPibAttributeIdentifier id, ns3::LrWpanPhyPibAttributes * attribute) [member function]
cls.add_method('PlmeSetAttributeRequest',
'void',
[param('ns3::LrWpanPibAttributeIdentifier', 'id'), param('ns3::LrWpanPhyPibAttributes *', 'attribute')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetPdDataIndicationCallback(ns3::PdDataIndicationCallback c) [member function]
cls.add_method('SetPdDataIndicationCallback',
'void',
[param('ns3::PdDataIndicationCallback', 'c')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetPdDataConfirmCallback(ns3::PdDataConfirmCallback c) [member function]
cls.add_method('SetPdDataConfirmCallback',
'void',
[param('ns3::PdDataConfirmCallback', 'c')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetPlmeCcaConfirmCallback(ns3::PlmeCcaConfirmCallback c) [member function]
cls.add_method('SetPlmeCcaConfirmCallback',
'void',
[param('ns3::PlmeCcaConfirmCallback', 'c')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetPlmeEdConfirmCallback(ns3::PlmeEdConfirmCallback c) [member function]
cls.add_method('SetPlmeEdConfirmCallback',
'void',
[param('ns3::PlmeEdConfirmCallback', 'c')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetPlmeGetAttributeConfirmCallback(ns3::PlmeGetAttributeConfirmCallback c) [member function]
cls.add_method('SetPlmeGetAttributeConfirmCallback',
'void',
[param('ns3::PlmeGetAttributeConfirmCallback', 'c')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetPlmeSetTRXStateConfirmCallback(ns3::PlmeSetTRXStateConfirmCallback c) [member function]
cls.add_method('SetPlmeSetTRXStateConfirmCallback',
'void',
[param('ns3::PlmeSetTRXStateConfirmCallback', 'c')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetPlmeSetAttributeConfirmCallback(ns3::PlmeSetAttributeConfirmCallback c) [member function]
cls.add_method('SetPlmeSetAttributeConfirmCallback',
'void',
[param('ns3::PlmeSetAttributeConfirmCallback', 'c')])
## lr-wpan-phy.h (module 'lr-wpan'): double ns3::LrWpanPhy::GetDataOrSymbolRate(bool isData) [member function]
cls.add_method('GetDataOrSymbolRate',
'double',
[param('bool', 'isData')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::SetErrorModel(ns3::Ptr<ns3::LrWpanErrorModel> e) [member function]
cls.add_method('SetErrorModel',
'void',
[param('ns3::Ptr< ns3::LrWpanErrorModel >', 'e')])
## lr-wpan-phy.h (module 'lr-wpan'): ns3::Ptr<ns3::LrWpanErrorModel> ns3::LrWpanPhy::GetErrorModel() const [member function]
cls.add_method('GetErrorModel',
'ns3::Ptr< ns3::LrWpanErrorModel >',
[],
is_const=True)
## lr-wpan-phy.h (module 'lr-wpan'): uint64_t ns3::LrWpanPhy::GetPhySHRDuration() const [member function]
cls.add_method('GetPhySHRDuration',
'uint64_t',
[],
is_const=True)
## lr-wpan-phy.h (module 'lr-wpan'): double ns3::LrWpanPhy::GetPhySymbolsPerOctet() const [member function]
cls.add_method('GetPhySymbolsPerOctet',
'double',
[],
is_const=True)
## lr-wpan-phy.h (module 'lr-wpan'): int64_t ns3::LrWpanPhy::AssignStreams(int64_t stream) [member function]
cls.add_method('AssignStreams',
'int64_t',
[param('int64_t', 'stream')])
## lr-wpan-phy.h (module 'lr-wpan'): void ns3::LrWpanPhy::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3LrWpanSpectrumSignalParameters_methods(root_module, cls):
## lr-wpan-spectrum-signal-parameters.h (module 'lr-wpan'): ns3::LrWpanSpectrumSignalParameters::LrWpanSpectrumSignalParameters() [constructor]
cls.add_constructor([])
## lr-wpan-spectrum-signal-parameters.h (module 'lr-wpan'): ns3::LrWpanSpectrumSignalParameters::LrWpanSpectrumSignalParameters(ns3::LrWpanSpectrumSignalParameters const & p) [copy constructor]
cls.add_constructor([param('ns3::LrWpanSpectrumSignalParameters const &', 'p')])
## lr-wpan-spectrum-signal-parameters.h (module 'lr-wpan'): ns3::Ptr<ns3::SpectrumSignalParameters> ns3::LrWpanSpectrumSignalParameters::Copy() [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::SpectrumSignalParameters >',
[],
is_virtual=True)
## lr-wpan-spectrum-signal-parameters.h (module 'lr-wpan'): ns3::LrWpanSpectrumSignalParameters::packetBurst [variable]
cls.add_instance_attribute('packetBurst', 'ns3::Ptr< ns3::PacketBurst >', is_const=False)
return
def register_Ns3Mac16AddressChecker_methods(root_module, cls):
## mac16-address.h (module 'network'): ns3::Mac16AddressChecker::Mac16AddressChecker() [constructor]
cls.add_constructor([])
## mac16-address.h (module 'network'): ns3::Mac16AddressChecker::Mac16AddressChecker(ns3::Mac16AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac16AddressChecker const &', 'arg0')])
return
def register_Ns3Mac16AddressValue_methods(root_module, cls):
## mac16-address.h (module 'network'): ns3::Mac16AddressValue::Mac16AddressValue() [constructor]
cls.add_constructor([])
## mac16-address.h (module 'network'): ns3::Mac16AddressValue::Mac16AddressValue(ns3::Mac16AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac16AddressValue const &', 'arg0')])
## mac16-address.h (module 'network'): ns3::Mac16AddressValue::Mac16AddressValue(ns3::Mac16Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac16Address const &', 'value')])
## mac16-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac16AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac16-address.h (module 'network'): bool ns3::Mac16AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac16-address.h (module 'network'): ns3::Mac16Address ns3::Mac16AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac16Address',
[],
is_const=True)
## mac16-address.h (module 'network'): std::string ns3::Mac16AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac16-address.h (module 'network'): void ns3::Mac16AddressValue::Set(ns3::Mac16Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac16Address const &', 'value')])
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3Mac64AddressChecker_methods(root_module, cls):
## mac64-address.h (module 'network'): ns3::Mac64AddressChecker::Mac64AddressChecker() [constructor]
cls.add_constructor([])
## mac64-address.h (module 'network'): ns3::Mac64AddressChecker::Mac64AddressChecker(ns3::Mac64AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac64AddressChecker const &', 'arg0')])
return
def register_Ns3Mac64AddressValue_methods(root_module, cls):
## mac64-address.h (module 'network'): ns3::Mac64AddressValue::Mac64AddressValue() [constructor]
cls.add_constructor([])
## mac64-address.h (module 'network'): ns3::Mac64AddressValue::Mac64AddressValue(ns3::Mac64AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac64AddressValue const &', 'arg0')])
## mac64-address.h (module 'network'): ns3::Mac64AddressValue::Mac64AddressValue(ns3::Mac64Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac64Address const &', 'value')])
## mac64-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac64AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac64-address.h (module 'network'): bool ns3::Mac64AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac64-address.h (module 'network'): ns3::Mac64Address ns3::Mac64AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac64Address',
[],
is_const=True)
## mac64-address.h (module 'network'): std::string ns3::Mac64AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac64-address.h (module 'network'): void ns3::Mac64AddressValue::Set(ns3::Mac64Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac64Address const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,const ns3::Address&,ns3::NetDevice::PacketType,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): ns3::Time ns3::Node::GetLocalTime() const [member function]
cls.add_method('GetLocalTime',
'ns3::Time',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3OutputStreamWrapper_methods(root_module, cls):
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(ns3::OutputStreamWrapper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OutputStreamWrapper const &', 'arg0')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::string filename, std::_Ios_Openmode filemode) [constructor]
cls.add_constructor([param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::ostream * os) [constructor]
cls.add_constructor([param('std::ostream *', 'os')])
## output-stream-wrapper.h (module 'network'): std::ostream * ns3::OutputStreamWrapper::GetStream() [member function]
cls.add_method('GetStream',
'std::ostream *',
[])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function]
cls.add_method('ReplacePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> nixVector) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'nixVector')])
## packet.h (module 'network'): std::string ns3::Packet::ToString() const [member function]
cls.add_method('ToString',
'std::string',
[],
is_const=True)
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3UintegerValue_methods(root_module, cls):
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue() [constructor]
cls.add_constructor([])
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(ns3::UintegerValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UintegerValue const &', 'arg0')])
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(uint64_t const & value) [constructor]
cls.add_constructor([param('uint64_t const &', 'value')])
## uinteger.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::UintegerValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## uinteger.h (module 'core'): bool ns3::UintegerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## uinteger.h (module 'core'): uint64_t ns3::UintegerValue::Get() const [member function]
cls.add_method('Get',
'uint64_t',
[],
is_const=True)
## uinteger.h (module 'core'): std::string ns3::UintegerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## uinteger.h (module 'core'): void ns3::UintegerValue::Set(uint64_t const & value) [member function]
cls.add_method('Set',
'void',
[param('uint64_t const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3LrWpanNetDevice_methods(root_module, cls):
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::LrWpanNetDevice::LrWpanNetDevice(ns3::LrWpanNetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LrWpanNetDevice const &', 'arg0')])
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::LrWpanNetDevice::LrWpanNetDevice() [constructor]
cls.add_constructor([])
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): int64_t ns3::LrWpanNetDevice::AssignStreams(int64_t stream) [member function]
cls.add_method('AssignStreams',
'int64_t',
[param('int64_t', 'stream')])
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::Address ns3::LrWpanNetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::Address ns3::LrWpanNetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::Ptr<ns3::Channel> ns3::LrWpanNetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::Ptr<ns3::LrWpanCsmaCa> ns3::LrWpanNetDevice::GetCsmaCa() const [member function]
cls.add_method('GetCsmaCa',
'ns3::Ptr< ns3::LrWpanCsmaCa >',
[],
is_const=True)
## lr-wpan-net-device.h (module 'lr-wpan'): uint32_t ns3::LrWpanNetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::Ptr<ns3::LrWpanMac> ns3::LrWpanNetDevice::GetMac() const [member function]
cls.add_method('GetMac',
'ns3::Ptr< ns3::LrWpanMac >',
[],
is_const=True)
## lr-wpan-net-device.h (module 'lr-wpan'): uint16_t ns3::LrWpanNetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::Address ns3::LrWpanNetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::Address ns3::LrWpanNetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::Ptr<ns3::Node> ns3::LrWpanNetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): ns3::Ptr<ns3::LrWpanPhy> ns3::LrWpanNetDevice::GetPhy() const [member function]
cls.add_method('GetPhy',
'ns3::Ptr< ns3::LrWpanPhy >',
[],
is_const=True)
## lr-wpan-net-device.h (module 'lr-wpan'): static ns3::TypeId ns3::LrWpanNetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## lr-wpan-net-device.h (module 'lr-wpan'): bool ns3::LrWpanNetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): bool ns3::LrWpanNetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): bool ns3::LrWpanNetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): bool ns3::LrWpanNetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): bool ns3::LrWpanNetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::McpsDataIndication(ns3::McpsDataIndicationParams params, ns3::Ptr<ns3::Packet> pkt) [member function]
cls.add_method('McpsDataIndication',
'void',
[param('ns3::McpsDataIndicationParams', 'params'), param('ns3::Ptr< ns3::Packet >', 'pkt')])
## lr-wpan-net-device.h (module 'lr-wpan'): bool ns3::LrWpanNetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): bool ns3::LrWpanNetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): bool ns3::LrWpanNetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::SetChannel(ns3::Ptr<ns3::SpectrumChannel> channel) [member function]
cls.add_method('SetChannel',
'void',
[param('ns3::Ptr< ns3::SpectrumChannel >', 'channel')])
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::SetCsmaCa(ns3::Ptr<ns3::LrWpanCsmaCa> csmaca) [member function]
cls.add_method('SetCsmaCa',
'void',
[param('ns3::Ptr< ns3::LrWpanCsmaCa >', 'csmaca')])
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::SetMac(ns3::Ptr<ns3::LrWpanMac> mac) [member function]
cls.add_method('SetMac',
'void',
[param('ns3::Ptr< ns3::LrWpanMac >', 'mac')])
## lr-wpan-net-device.h (module 'lr-wpan'): bool ns3::LrWpanNetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::SetPhy(ns3::Ptr<ns3::LrWpanPhy> phy) [member function]
cls.add_method('SetPhy',
'void',
[param('ns3::Ptr< ns3::LrWpanPhy >', 'phy')])
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::SetPromiscReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,const ns3::Address&,ns3::NetDevice::PacketType,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): bool ns3::LrWpanNetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
## lr-wpan-net-device.h (module 'lr-wpan'): void ns3::LrWpanNetDevice::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
register_functions_ns3_TracedValueCallback(module.get_submodule('TracedValueCallback'), root_module)
register_functions_ns3_internal(module.get_submodule('internal'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def register_functions_ns3_TracedValueCallback(module, root_module):
return
def register_functions_ns3_internal(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| gpl-2.0 | 7,679,634,221,064,839,000 | 64.780503 | 448 | 0.616444 | false |
feist/pcs | pcs/lib/commands/test/test_resource_agent.py | 1 | 15967 | # coding=utf-8
import logging
from unittest import mock, TestCase
from lxml import etree
from pcs.test.tools.assertions import (
assert_raise_library_error,
start_tag_error_text,
)
from pcs.test.tools.command_env import get_env_tools
from pcs.test.tools.custom_mock import MockLibraryReportProcessor
from pcs.common import report_codes
from pcs.lib import resource_agent as lib_ra
from pcs.lib.env import LibraryEnvironment
from pcs.lib.errors import ReportItemSeverity as severity
from pcs.lib.commands import resource_agent as lib
@mock.patch("pcs.lib.resource_agent.list_resource_agents_standards")
@mock.patch.object(
LibraryEnvironment,
"cmd_runner",
lambda self: "mock_runner"
)
class TestListStandards(TestCase):
def setUp(self):
self.mock_logger = mock.MagicMock(logging.Logger)
self.mock_reporter = MockLibraryReportProcessor()
self.lib_env = LibraryEnvironment(self.mock_logger, self.mock_reporter)
def test_success(self, mock_list_standards):
standards = [
"lsb",
"nagios",
"ocf",
"service",
"systemd",
]
mock_list_standards.return_value = standards
self.assertEqual(
lib.list_standards(self.lib_env),
standards
)
mock_list_standards.assert_called_once_with("mock_runner")
@mock.patch("pcs.lib.resource_agent.list_resource_agents_ocf_providers")
@mock.patch.object(
LibraryEnvironment,
"cmd_runner",
lambda self: "mock_runner"
)
class TestListOcfProviders(TestCase):
def setUp(self):
self.mock_logger = mock.MagicMock(logging.Logger)
self.mock_reporter = MockLibraryReportProcessor()
self.lib_env = LibraryEnvironment(self.mock_logger, self.mock_reporter)
def test_success(self, mock_list_providers):
providers = [
"booth",
"heartbeat",
"openstack",
"pacemaker",
]
mock_list_providers.return_value = providers
self.assertEqual(
lib.list_ocf_providers(self.lib_env),
providers
)
mock_list_providers.assert_called_once_with("mock_runner")
@mock.patch("pcs.lib.resource_agent.list_resource_agents_standards")
@mock.patch("pcs.lib.resource_agent.list_resource_agents")
@mock.patch.object(
LibraryEnvironment,
"cmd_runner",
lambda self: "mock_runner"
)
class TestListAgentsForStandardAndProvider(TestCase):
def setUp(self):
self.mock_logger = mock.MagicMock(logging.Logger)
self.mock_reporter = MockLibraryReportProcessor()
self.lib_env = LibraryEnvironment(self.mock_logger, self.mock_reporter)
def test_standard_specified(self, mock_list_agents, mock_list_standards):
agents = [
"Delay",
"Dummy",
"Stateful",
]
mock_list_agents.return_value = agents
self.assertEqual(
lib.list_agents_for_standard_and_provider(self.lib_env, "ocf:test"),
agents
)
mock_list_agents.assert_called_once_with("mock_runner", "ocf:test")
mock_list_standards.assert_not_called()
def test_standard_not_specified(
self, mock_list_agents, mock_list_standards
):
agents_ocf = [
"Delay",
"Dummy",
"Stateful",
]
agents_service = [
"corosync",
"pacemaker",
"pcsd",
]
mock_list_standards.return_value = ["ocf:test", "service"]
mock_list_agents.side_effect = [agents_ocf, agents_service]
self.assertEqual(
lib.list_agents_for_standard_and_provider(self.lib_env),
sorted(agents_ocf + agents_service, key=lambda x: x.lower())
)
mock_list_standards.assert_called_once_with("mock_runner")
self.assertEqual(2, len(mock_list_agents.mock_calls))
mock_list_agents.assert_has_calls([
mock.call("mock_runner", "ocf:test"),
mock.call("mock_runner", "service"),
])
@mock.patch(
"pcs.lib.resource_agent.list_resource_agents_standards_and_providers",
lambda runner: ["service", "ocf:test"]
)
@mock.patch(
"pcs.lib.resource_agent.list_resource_agents",
lambda runner, standard: {
"ocf:test": [
"Stateful",
"Delay",
],
"service": [
"corosync",
"pacemaker_remote",
],
}.get(standard, [])
)
@mock.patch.object(
LibraryEnvironment,
"cmd_runner",
lambda self: "mock_runner"
)
class TestListAgents(TestCase):
def setUp(self):
self.mock_logger = mock.MagicMock(logging.Logger)
self.mock_reporter = MockLibraryReportProcessor()
self.lib_env = LibraryEnvironment(self.mock_logger, self.mock_reporter)
def test_list_all(self):
self.assertEqual(
lib.list_agents(self.lib_env, False, None),
[
{
"name": "ocf:test:Delay",
"shortdesc": "",
"longdesc": "",
"parameters": [],
"actions": [],
},
{
"name": "ocf:test:Stateful",
"shortdesc": "",
"longdesc": "",
"parameters": [],
"actions": [],
},
{
"name": "service:corosync",
"shortdesc": "",
"longdesc": "",
"parameters": [],
"actions": [],
},
{
"name": "service:pacemaker_remote",
"shortdesc": "",
"longdesc": "",
"parameters": [],
"actions": [],
},
]
)
def test_search(self):
self.assertEqual(
lib.list_agents(self.lib_env, False, "te"),
[
{
"name": "ocf:test:Delay",
"shortdesc": "",
"longdesc": "",
"parameters": [],
"actions": [],
},
{
"name": "ocf:test:Stateful",
"shortdesc": "",
"longdesc": "",
"parameters": [],
"actions": [],
},
{
"name": "service:pacemaker_remote",
"shortdesc": "",
"longdesc": "",
"parameters": [],
"actions": [],
},
]
)
@mock.patch.object(lib_ra.Agent, "_get_metadata", autospec=True)
def test_describe(self, mock_metadata):
def mock_metadata_func(self):
if self.get_name() == "ocf:test:Stateful":
raise lib_ra.UnableToGetAgentMetadata(
self.get_name(),
"test exception"
)
return etree.XML("""
<resource-agent>
<shortdesc>short {name}</shortdesc>
<longdesc>long {name}</longdesc>
<parameters>
</parameters>
<actions>
</actions>
</resource-agent>
""".format(name=self.get_name()))
mock_metadata.side_effect = mock_metadata_func
# Stateful is missing as it does not provide valid metadata - see above
self.assertEqual(
lib.list_agents(self.lib_env, True, None),
[
{
"name": "ocf:test:Delay",
"shortdesc": "short ocf:test:Delay",
"longdesc": "long ocf:test:Delay",
"parameters": [],
"actions": [],
},
{
"name": "service:corosync",
"shortdesc": "short service:corosync",
"longdesc": "long service:corosync",
"parameters": [],
"actions": [],
},
{
"name": "service:pacemaker_remote",
"shortdesc": "short service:pacemaker_remote",
"longdesc": "long service:pacemaker_remote",
"parameters": [],
"actions": [],
},
]
)
class CompleteAgentList(TestCase):
def test_skip_agent_name_when_invalid_resource_agent_ame_raised(self):
# pylint: disable=too-few-public-methods, unused-argument, protected-access
invalid_agent_name = "systemd:lvm2-pvscan@252:2"#suppose it is invalid
class Agent():
def __init__(self, runner, name):
if name == invalid_agent_name:
raise lib_ra.InvalidResourceAgentName(name)
self.name = name
def get_name_info(self):
return self.name
self.assertEqual(["ocf:heartbeat:Dummy"], lib._complete_agent_list(
mock.MagicMock(),
["ocf:heartbeat:Dummy", invalid_agent_name],
describe=False,
search=False,
metadata_class=Agent,
))
@mock.patch.object(lib_ra.ResourceAgent, "_load_metadata", autospec=True)
@mock.patch("pcs.lib.resource_agent.guess_exactly_one_resource_agent_full_name")
@mock.patch.object(
LibraryEnvironment,
"cmd_runner",
lambda self: "mock_runner"
)
class TestDescribeAgent(TestCase):
def setUp(self):
self.mock_logger = mock.MagicMock(logging.Logger)
self.mock_reporter = MockLibraryReportProcessor()
self.lib_env = LibraryEnvironment(self.mock_logger, self.mock_reporter)
self.metadata = """
<resource-agent>
<shortdesc>short desc</shortdesc>
<longdesc>long desc</longdesc>
<parameters>
</parameters>
<actions>
</actions>
</resource-agent>
"""
self.description = {
"name": "ocf:test:Dummy",
"shortdesc": "short desc",
"longdesc": "long desc",
"parameters": [],
"actions": [],
"default_actions": [{"interval": "60s", "name": "monitor"}],
}
def test_full_name_success(self, mock_guess, mock_metadata):
mock_metadata.return_value = self.metadata
self.assertEqual(
lib.describe_agent(self.lib_env, "ocf:test:Dummy"),
self.description
)
self.assertEqual(len(mock_metadata.mock_calls), 1)
mock_guess.assert_not_called()
def test_guess_success(self, mock_guess, mock_metadata):
mock_metadata.return_value = self.metadata
mock_guess.return_value = lib_ra.ResourceAgent(
self.lib_env.cmd_runner(),
"ocf:test:Dummy"
)
self.assertEqual(
lib.describe_agent(self.lib_env, "dummy"),
self.description
)
self.assertEqual(len(mock_metadata.mock_calls), 1)
mock_guess.assert_called_once_with("mock_runner", "dummy")
def test_full_name_fail(self, mock_guess, mock_metadata):
mock_metadata.return_value = "invalid xml"
assert_raise_library_error(
lambda: lib.describe_agent(self.lib_env, "ocf:test:Dummy"),
(
severity.ERROR,
report_codes.UNABLE_TO_GET_AGENT_METADATA,
{
"agent": "ocf:test:Dummy",
"reason": start_tag_error_text(),
}
)
)
self.assertEqual(len(mock_metadata.mock_calls), 1)
mock_guess.assert_not_called()
class DescribeAgentUtf8(TestCase):
def setUp(self):
self.env_assist, self.config = get_env_tools(test_case=self)
self.config.runner.pcmk.load_agent(
agent_filename="resource_agent_ocf_heartbeat_dummy_utf8.xml"
)
def test_describe(self):
name = "ocf:heartbeat:Dummy"
self.assertEqual(
lib.describe_agent(self.env_assist.get_env(), name),
{
"name": name,
"shortdesc": u"Example stateless resource agent: ®",
"longdesc": u"This is a Dummy Resource Agent for testing utf-8"
u" in metadata: ®"
,
"parameters": [
{
"advanced": False,
"default": u"/var/run/resource-agents/Dummy-®.state",
"deprecated": False,
"deprecated_by": [],
"longdesc":
u"Location to store the resource state in: ®",
"name": u"state-®",
"obsoletes": None,
"pcs_deprecated_warning": "",
"required": False,
"shortdesc": u"State file: ®",
"type": "string",
"unique": True,
},
{
"advanced": True,
"default": 0,
"deprecated": False,
"deprecated_by": [],
"longdesc": "Set to 1 to turn on resource agent tracing"
" (expect large output) The trace output will be "
"saved to trace_file, if set, or by default to "
"$HA_VARRUN/ra_trace/<type>/<id>.<action>."
"<timestamp> e.g. $HA_VARRUN/ra_trace/oracle/db."
"start.2012-11-27.08:37:08",
"name": "trace_ra",
"obsoletes": None,
"pcs_deprecated_warning": "",
"required": False,
"shortdesc": "Set to 1 to turn on resource agent "
"tracing (expect large output)",
"type": "integer",
"unique": False,
},
{
"advanced": True,
"default": "",
"deprecated": False,
"deprecated_by": [],
"longdesc": "Path to a file to store resource agent "
"tracing log",
"name": "trace_file",
"obsoletes": None,
"pcs_deprecated_warning": "",
"required": False,
"shortdesc": "Path to a file to store resource agent "
"tracing log",
"type": "string",
"unique": False,
}
],
"actions": [
{"name": "start", "timeout": "20"},
{"name": "stop", "timeout": "20"},
{"name": "monitor", "interval": "10", "timeout": "20"},
{"name": "meta-data", "timeout": "5"},
{"name": "validate-all", "timeout": "20"},
{"name": u"custom-®", "timeout": "20"},
],
"default_actions": [
{"name": "start", "interval": "0s", "timeout": "20"},
{"name": "stop", "interval": "0s", "timeout": "20"},
{"name": "monitor", "interval": "10", "timeout": "20"},
{"name": u"custom-®", "interval": "0s", "timeout": "20"},
],
}
)
| gpl-2.0 | 2,561,513,103,280,016,000 | 32.955319 | 83 | 0.479604 | false |
panholt/sparkpy | sparkpy/models/room.py | 1 | 4745 | from .base import SparkBase, SparkProperty
from .time import SparkTime
from .message import SparkMessage
from .membership import SparkMembership
from .container import SparkContainer
class SparkRoom(SparkBase):
# | Start of class attributes |-------------------------------------------|
API_BASE = 'https://api.ciscospark.com/v1/rooms/'
PROPERTIES = {'id': SparkProperty('id'),
'title': SparkProperty('title', mutable=True),
'type': SparkProperty('type'),
'isLocked': SparkProperty('islocked',
optional=True),
'lastActivity': SparkProperty('lastActivity',
optional=True),
'created': SparkProperty('created'),
'creatorId': SparkProperty('creatorId'),
'sipAddress': SparkProperty('sipAddress', optional=True),
'teamId': SparkProperty('teamId', optional=True)}
# | Start of instance attributes |----------------------------------------|
def __init__(self, *args, **kwargs):
super().__init__(*args, path='rooms', **kwargs)
def update(self, key, value):
if key == 'title' and len(value):
self.parent.session.put(self.url, json={key: value})
elif key == 'isLocked':
raise NotImplemented('isLocked is not implemnted')
return
@property
def members(self):
''' Members of the Cisco Spark Room
:getter: a generator like object of members of the room
:type: `SparkContainer` of `SparkPeople` items
'''
return SparkContainer(SparkMembership,
params={'roomId': self.id},
parent=self)
@property
def messages(self):
''' Messages in the Cisco Spark Room
:getter: a generator like object of members of the room
:type: `SparkContainer` of `SparkPeople` items
'''
return SparkContainer(SparkMessage,
params=self.message_params,
parent=self)
@property
def link(self):
return f'https://web.ciscospark.com/rooms/{self.uuid}/chat'
@property
def message_params(self):
''' Retuns URL paramaters for /messages/
Sets the `roomId` filter and if the session owner is a bot,
the `mentionedPeople` filter is set to `me`
:getter: url paramaters
:type: dict
'''
data = {'roomId': self.id}
if self.parent.is_bot and self.type == 'group':
data['mentionedPeople'] = 'me'
return data
def send_message(self, text, file=None):
''' Send a message to the room
:param text: Markdown formatted text to send in message
:type title: str
:return: None
'''
self.parent.send_message(text, room_id=self.id, file=file)
return
def add_member(self, *args, email='', moderator=False):
''' Add a person to the room
:param email: email address of person to add
:type email: str
:param moderator: Default: False, Make person a moderator of room
:type moderator: bool
:return: None
'''
data = {'roomId': self.id}
if args:
# TODO Type checking
data['personId'] = args[0]
if '@' in email:
data['personEmail'] = email
if moderator:
data['isModerator'] = moderator
self.parent.session.post(SparkMembership.API_BASE, json=data)
return
def remove_member(self, *args, email=''):
''' Add a person to the room
:param email: email address of person to add
:type email: str
:param moderator: Default: False, Make person a moderator of room
:type moderator: bool
:return: None
'''
if args:
for member in self.members.filtered(lambda
x: x.personId == args[0]):
member.delete()
elif '@' in email:
for member in self.members.filtered(lambda
x: x.personEmail == email):
member.delete()
return
def remove_all_members(self):
''' Remove all people from the room leaving this account
:return: None
'''
for member in self.members.filtered(lambda x: x != self.parent.me.id):
member.delete()
return
def __repr__(self):
return f"SparkRoom('{self.id}')"
| mit | 6,291,617,384,162,153,000 | 33.136691 | 79 | 0.525395 | false |
prechelt/unread-decorator | setup.py | 1 | 3307 | # based on https://github.com/pypa/sampleproject/blob/master/setup.py
# see http://packaging.python.org/en/latest/tutorial.html#creating-your-own-project
from setuptools import setup, find_packages
from setuptools.command.install import install as stdinstall
import codecs
import os
import re
import sys
def find_version(*file_paths):
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, *file_paths), 'r', 'latin1') as f:
version_file = f.read()
# The version line must have the form
# __version__ = 'ver'
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
def get_file_contents(filename):
with codecs.open(filename, encoding='utf-8') as f:
contents = f.read()
return contents
package_name = "unread-decorator"
setup(
# basic information:
name=package_name,
version=find_version('unread_decorator.py'),
description="unread() for streams, unnext() for iterators",
long_description=get_file_contents("README.rst"),
# The project URL:
url='http://github.com/prechelt/' + package_name,
# Author details:
author='Lutz Prechelt',
author_email='[email protected]',
# Classification:
license='BSD License',
classifiers=[
'License :: OSI Approved :: BSD License',
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='undo, I/O, iterator',
py_modules=['unread_decorator'],
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages.
packages=find_packages(exclude=["contrib", "docs", "tests*"]),
# List run-time dependencies here. These will be installed by pip when your
# project is installed.
install_requires = [],
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
# 'mypackage': ['package_data.dat'],
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages.
# see http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
###data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
### entry_points={
# 'console_scripts': [
# 'sample=sample:main',
# ],
# },
) | bsd-2-clause | 5,754,088,370,807,579,000 | 32.755102 | 91 | 0.643786 | false |
fpliger/statz | setup.py | 1 | 1151 | from __future__ import print_function
import sys
import os.path
from setuptools import setup, find_packages
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
long_description = open(readme).read()
setup(
name='statz',
version='0.0.1',
author='Fabio Pliger',
author_email='[email protected]',
url='',
license='MIT',
platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'],
description='Statistics and auto-document pyramid, pytest and few other tools...',
long_description=long_description,
keywords='statz statistics',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Environment :: Web Environment",
"Programming Language :: Python",
"Intended Audience :: Developers",
"Framework :: Pyramid",
"Topic :: Utilities",
"Topic :: Documentation",
],
install_requires=[
'pyramid',
'pyramid_mako',
'webhelpers',
'py',
'pygments',
'pytest',
'mock',
],
extras_require = {
'testing': 'pytest',
},
) | mit | 5,771,319,466,270,742,000 | 25.181818 | 86 | 0.582103 | false |
askalbania/piernik | problems/mcrwind/piernik_problem.py | 1 | 1992 | #!/usr/bin/python
import sys
import numpy as np
import matplotlib
matplotlib.use('cairo')
from yt.mods import load as yt_load
from pylab import *
THRESHOLD = 1e-9
FIELD = "cr1"
def _myplot(diff, fname, ext, clbl):
v = abs(diff).max()
figure(1, (6, 8))
imshow(diff, vmin=-v, vmax=v, extent=ext, cmap='RdBu')
bar = colorbar()
bar.ax.set_xlabel(clbl)
draw()
xlabel('y [pc]')
ylabel('z [pc]')
savefig(fname)
clf()
def plot_diff(pf1, pf2, data1, data2, field):
wd = pf1.domain_width
n_d = pf1.domain_dimensions
ext = np.array([pf1.domain_left_edge[1], pf1.domain_right_edge[1],
pf1.domain_left_edge[2], pf1.domain_right_edge[2]])
ext *= pf1['pc']
img1 = data1.to_frb(wd[1], (n_d[2] * 10, n_d[1] * 10),
center=np.array([0, 0, 0]), height=wd[2])
img2 = data2.to_frb(wd[1], (n_d[2] * 10, n_d[1] * 10),
center=np.array([0, 0, 0]), height=wd[2])
diff = (img2[field] - img1[field])
clbl = \
r"$\rm{%s}^{\rm{new}} - \rm{%s}^{\rm{old}}$" % (field, field)
_myplot(diff, 'diff_bare.png', ext, clbl)
clbl = \
r"$\frac{\rm{%s}^{\rm{new}} - \rm{%s}^{\rm{old}}}{\rm{%s}^{\rm{old}}}$" % (field, field, field)
_myplot(diff / (img1[field] + THRESHOLD), 'diff.png', ext, clbl)
if len(sys.argv) != 3:
print("Wrong number of arguments!")
sys.exit(-1)
PF1 = yt_load(sys.argv[1])
PF2 = yt_load(sys.argv[2])
axis = np.where(PF1.h.grids[0].ActiveDimensions == 1)[0][0]
DATA1 = PF1.h.slice(axis, 0.0, fields=[FIELD])
DATA2 = PF2.h.slice(axis, 0.0, fields=[FIELD])
if not PF1.h.field_list == PF2.h.field_list:
print("Fields in files differ!")
sys.exit(-1)
for field in PF1.h.field_list:
if abs(DATA1[field] - DATA2[field]).max() >= THRESHOLD:
print("Field %s differs" % field)
plot_diff(PF1, PF2, DATA1, DATA2, field)
sys.exit(-1)
figure(1, (8,6))
draw()
savefig('diff.png')
savefig('diff_bare.png')
| gpl-3.0 | -1,219,925,660,328,792,000 | 28.294118 | 103 | 0.566767 | false |
ccoakley/dbcbet | dbcbet/test/dbcbet_test.py | 1 | 7562 | """Test dbcbet"""
from dbcbet.dbcbet import pre, post, inv, throws, dbc, bet, finitize, finitize_method, ContractViolation, ThrowsViolation
from dbcbet.helpers import state, argument_types
#
# These methods are the various preconditions, postconditions, and invariants used by tests
#
# a precondition
def both_numbers_positive(self, arg1, arg2):
return arg1 > 0 and arg2 > 0
# a (necessary) precondition
def first_greater_than_second(self, arg1, arg2):
return arg1 > arg2
# a postcondition
def returned_the_sum(self, old, ret, arg1, arg2):
return ret == arg1+arg2
# another postcondition
def set_x(self, old, ret, arg1, arg2):
return self.x == arg1-arg2
# an invariant
def x_non_negative(self):
return self.x >= 0
# a finitization
def finitize_example_class():
return {'x':[-1,0,1,2]}
# Pete: this seems like a typical case. Maybe the finitization should just be the returned hash, and not a function.
#
# showing off the syntax
#
# applying invariant to class, precondition and postconditions to the method
@inv(x_non_negative)
@finitize(finitize_example_class)
class ExampleClass:
def __init__(self):
self.x = 0
@finitize_method([-1,0,1,2,3],range(-1,3))
@pre(both_numbers_positive)
@pre(first_greater_than_second)
@post(set_x)
@post(returned_the_sum)
def do_something(self, a1, a2):
self.x = a1-a2
return a1+a2
# Tests
def test_bet():
bet(ExampleClass).run()
#
# A more complicated test with inheritance
#
def base_class_inv(self):
if hasattr(self, "x"):
return self.x != 1
else:
return True
def sub_class_inv(self):
if hasattr(self, "x"):
return self.x != 2
else:
return True
def base_class_method_pre(self, a):
return a != 3
def sub_class_method_pre(self, a):
return a != 4
def base_class_method_post(self, old, ret, a):
return a != 5
def sub_class_method_post(self, old, ret, a):
return a != 6
def sub_class_method_pre2(self, a):
return a != 7
def sub_class_method_post2(self, old, ret, a):
return a != 8
@inv(base_class_inv)
class TestBaseClass(object):
@pre(base_class_method_pre)
@post(base_class_method_post)
def a_method(self, a):
self.x = a
@inv(sub_class_inv)
class TestSubClass(TestBaseClass):
@pre(sub_class_method_pre)
@pre(sub_class_method_pre2)
@post(sub_class_method_post)
@post(sub_class_method_post2)
@finitize_method(range(-1,10))
def a_method(self, a):
self.x = a+1
def test_inheritance():
bet(TestSubClass).run()
print "Individual Tests"
explicit_success(TestSubClass, -1)
explicit_fail(TestSubClass, 0)
explicit_fail(TestSubClass, 1)
explicit_success(TestSubClass, 2)
explicit_success(TestSubClass, 3)
explicit_success(TestSubClass, 4)
explicit_fail(TestSubClass, 5)
explicit_fail(TestSubClass, 6)
explicit_success(TestSubClass, 7)
explicit_fail(TestSubClass, 8)
explicit_success(TestSubClass, 9)
def test_solo_composition():
test_only_pre()
test_only_post()
test_only_inv()
class TestOnlyPre(object):
@pre(sub_class_method_pre)
@pre(sub_class_method_pre2)
def a_method(self, a):
self.x = a+1
def test_only_pre():
explicit_fail(TestOnlyPre, 4)
explicit_success(TestOnlyPre, 5)
explicit_fail(TestOnlyPre, 7)
class TestOnlyPost(object):
@post(sub_class_method_post)
@post(sub_class_method_post2)
def a_method(self, a):
self.x = a+1
def test_only_post():
explicit_fail(TestOnlyPost, 6)
explicit_success(TestOnlyPost, 7)
explicit_fail(TestOnlyPost, 8)
@inv(base_class_inv)
@inv(sub_class_inv)
class TestOnlyInv(object):
def a_method(self, a):
self.x = a+1
def test_only_inv():
explicit_success(TestOnlyInv, -1)
explicit_fail(TestOnlyInv, 0)
explicit_fail(TestOnlyInv, 1)
explicit_success(TestOnlyInv, 2)
def explicit_fail(class_, val):
t = class_()
try:
t.a_method(val)
assert False, str(val) + " worked, should have failed"
except ContractViolation as cv:
assert True
def explicit_success(class_, val):
t = class_()
try:
t.a_method(val)
assert True
except ContractViolation as cv:
assert False, str(val) + " failed, should have worked: " + str(cv)
class GoodException(Exception):
pass
class BadException(Exception):
pass
class AnotherGoodException(GoodException):
pass
class ADifferentGoodException(Exception):
pass
class ThrowsTestClass(object):
@throws(ADifferentGoodException)
@throws(GoodException)
def do_something(self, x):
if x==1:
# allowed
raise GoodException()
if x==2:
# allowed
raise AnotherGoodException()
if x==3:
# allowed
raise ADifferentGoodException()
# not allowed
raise BadException()
@dbc
class ThrowsTestSubClass(ThrowsTestClass):
@throws(AnotherGoodException)
def do_something(self, x):
if x==1:
# not allowed
raise GoodException()
if x==2:
# allowed
raise AnotherGoodException()
if x==3:
# not allowed
raise ADifferentGoodException()
# not allowed
raise BadException()
@dbc
class ThrowsTestSubSubClass(ThrowsTestSubClass):
def do_something(self, x):
if x==1:
# not allowed
raise GoodException()
if x==2:
# allowed
raise AnotherGoodException()
if x==3:
# not allowed
raise ADifferentGoodException()
# not allowed
raise BadException()
def test_throws():
try:
ThrowsTestClass().do_something(1)
except GoodException:
print "GoodException worked"
try:
ThrowsTestClass().do_something(2)
except GoodException:
print "GoodException worked"
try:
ThrowsTestClass().do_something(3)
except ADifferentGoodException:
print "ADifferentGoodException worked"
try:
ThrowsTestClass().do_something(4)
except ThrowsViolation:
print "Translating BadException to ThrowsViolation worked"
try:
ThrowsTestSubClass().do_something(1)
except ThrowsViolation:
print "Translating GoodException to ThrowsViolation on subclass worked"
try:
ThrowsTestSubClass().do_something(2)
except GoodException:
print "GoodException worked"
try:
ThrowsTestSubClass().do_something(3)
except ThrowsViolation:
print "Translating ADifferentGoodException worked"
try:
ThrowsTestSubClass().do_something(4)
except ThrowsViolation:
print "Translating BadException to ThrowsViolation worked"
try:
ThrowsTestSubSubClass().do_something(1)
except ThrowsViolation:
print "Translating GoodException to ThrowsViolation on subsubclass worked"
try:
ThrowsTestSubSubClass().do_something(2)
except GoodException:
print "GoodException worked"
try:
ThrowsTestSubSubClass().do_something(3)
except ThrowsViolation:
print "Translating ADifferentGoodException worked"
try:
ThrowsTestSubSubClass().do_something(4)
except ThrowsViolation:
print "Translating BadException to ThrowsViolation worked"
if __name__ == "__main__":
test_inheritance()
test_throws()
test_bet()
test_solo_composition()
| mit | 8,137,722,210,013,781,000 | 24.633898 | 121 | 0.644406 | false |
espdev/readthedocs.org | readthedocs/restapi/views/search_views.py | 1 | 4951 | import logging
from rest_framework import decorators, permissions, status
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from readthedocs.builds.constants import LATEST
from readthedocs.builds.models import Version
from readthedocs.projects.models import Project, ProjectRelationship
from readthedocs.search.lib import search_file, search_project, search_section
from readthedocs.restapi import utils
log = logging.getLogger(__name__)
@decorators.api_view(['POST'])
@decorators.permission_classes((permissions.IsAdminUser,))
@decorators.renderer_classes((JSONRenderer,))
def index_search(request):
"""Add things to the search index"""
data = request.DATA['data']
version_pk = data['version_pk']
commit = data.get('commit')
version = Version.objects.get(pk=version_pk)
project_scale = 1
page_scale = 1
utils.index_search_request(
version=version, page_list=data['page_list'], commit=commit,
project_scale=project_scale, page_scale=page_scale)
return Response({'indexed': True})
@decorators.api_view(['GET'])
@decorators.permission_classes((permissions.AllowAny,))
@decorators.renderer_classes((JSONRenderer,))
def search(request):
"""Perform search, supplement links by resolving project domains"""
project_slug = request.GET.get('project', None)
version_slug = request.GET.get('version', LATEST)
query = request.GET.get('q', None)
if project_slug is None or query is None:
return Response({'error': 'Need project and q'},
status=status.HTTP_400_BAD_REQUEST)
try:
project = Project.objects.get(slug=project_slug)
except Project.DoesNotExist:
return Response({'error': 'Project not found'},
status=status.HTTP_404_NOT_FOUND)
log.debug("(API Search) %s", query)
results = search_file(request=request, project_slug=project_slug,
version_slug=version_slug, query=query)
# Supplement result paths with domain information on project
hits = results.get('hits', {}).get('hits', [])
for (n, hit) in enumerate(hits):
fields = hit.get('fields', {})
search_project = fields.get('project')[0]
search_version = fields.get('version')[0]
path = fields.get('path')[0]
canonical_url = project.get_docs_url(version_slug=version_slug)
if search_project != project_slug:
try:
subproject = project.subprojects.get(child__slug=search_project)
canonical_url = subproject.child.get_docs_url(
version_slug=search_version
)
except ProjectRelationship.DoesNotExist:
pass
results['hits']['hits'][n]['fields']['link'] = (
canonical_url + path
)
return Response({'results': results})
@decorators.api_view(['GET'])
@decorators.permission_classes((permissions.AllowAny,))
@decorators.renderer_classes((JSONRenderer,))
def project_search(request):
query = request.GET.get('q', None)
if query is None:
return Response({'error': 'Need project and q'}, status=status.HTTP_400_BAD_REQUEST)
log.debug("(API Project Search) %s", (query))
results = search_project(request=request, query=query)
return Response({'results': results})
@decorators.api_view(['GET'])
@decorators.permission_classes((permissions.AllowAny,))
@decorators.renderer_classes((JSONRenderer,))
def section_search(request):
"""Section search
Queries with query ``q`` across all documents and projects. Queries can be
limited to a single project or version by using the ``project`` and
``version`` GET arguments in your request.
When you search, you will have a ``project`` facet, which includes the
number of matching sections per project. When you search inside a project,
the ``path`` facet will show the number of matching sections per page.
Possible GET args
-----------------
q **(required)**
The query string **Required**
project
A project slug
version
A version slug
path
A file path slug
Example::
GET /api/v2/search/section/?q=virtualenv&project=django
"""
query = request.GET.get('q', None)
if not query:
return Response(
{'error': 'Search term required. Use the "q" GET arg to search. '},
status=status.HTTP_400_BAD_REQUEST)
project_slug = request.GET.get('project', None)
version_slug = request.GET.get('version', LATEST)
path = request.GET.get('path', None)
log.debug("(API Section Search) [%s:%s] %s", project_slug, version_slug,
query)
results = search_section(
request=request,
query=query,
project_slug=project_slug,
version_slug=version_slug,
path=path,
)
return Response({'results': results})
| mit | 4,044,174,051,605,192,000 | 33.381944 | 92 | 0.654211 | false |
zerothi/sids | sisl/utils/ranges.py | 1 | 8369 | import re
from itertools import groupby
from numpy import zeros, ones, cumsum, take, int32, int64
from numpy import asarray
__all__ = ["strmap", "strseq", "lstranges", "erange", "list2str", "fileindex"]
__all__ += ["array_arange"]
# Function to change a string to a range of integers
def strmap(func, s, start=None, end=None, sep="b"):
""" Parse a string as though it was a slice and map all entries using ``func``.
Parameters
----------
func : function
function to parse every match with
s : str
the string that should be parsed
start : optional
the replacement in case the LHS of the delimiter is not present
end : optional
the replacement in case the RHS of the delimiter is not present
sep : {"b", "c"}
separator used, ``"b"`` is square brackets, ``"c"``, curly braces
Examples
--------
>>> strmap(int, "1")
[1]
>>> strmap(int, "1-2")
[(1, 2)]
>>> strmap(int, "1-")
[(1, None)]
>>> strmap(int, "1-", end=4)
[(1, 4)]
>>> strmap(int, "1-10[2-3]")
[((1, 10), [(2, 3)])]
"""
if sep == "b":
segment = re.compile(r"\[(.+)\]\[(.+)\]|(.+)\[(.+)\]|(.+)")
sep1, sep2 = "[", "]"
elif sep == "c":
segment = re.compile(r"\{(.+)\}\{(.+)\}|(.+)\{(.+)\}|(.+)")
sep1, sep2 = "{", "}"
else:
raise ValueError("strmap: unknown separator for the sequence")
# Create list
s = s.replace(" ", "")
if len(s) == 0:
return [None]
elif s in ["-", ":"]:
return [(start, end)]
commas = s.split(",")
# Collect all the comma separated quantities that
# may be selected by [..,..]
i = 0
while i < len(commas) - 1:
if commas[i].count(sep1) == commas[i].count(sep2):
i = i + 1
else:
# there must be more [ than ]
commas[i] = commas[i] + "," + commas[i+1]
del commas[i+1]
# Check the last input...
i = len(commas) - 1
if commas[i].count(sep1) != commas[i].count(sep2):
raise ValueError(f"Unbalanced string: not enough {sep1} and {sep2}")
# Now we have a comma-separated list
# with collected brackets.
l = []
for seg in commas:
# Split it in groups of reg-exps
m = segment.findall(seg)[0]
if len(m[0]) > 0:
# this is: [..][..]
rhs = strmap(func, m[1], start, end, sep)
for el in strmap(func, m[0], start, end, sep):
l.append((el, rhs))
elif len(m[2]) > 0:
# this is: ..[..]
l.append((strseq(func, m[2], start, end),
strmap(func, m[3], start, end, sep)))
elif len(m[4]) > 0:
l.append(strseq(func, m[4], start, end))
return l
def strseq(cast, s, start=None, end=None):
""" Accept a string and return the casted tuples of content based on ranges.
Parameters
----------
cast : function
parser of the individual elements
s : str
string with content
Examples
--------
>>> strseq(int, "3")
3
>>> strseq(int, "3-6")
(3, 6)
>>> strseq(int, "3-")
(3, None)
>>> strseq(int, "3:2:7")
(3, 2, 7)
>>> strseq(int, "3:2:", end=8)
(3, 2, 8)
>>> strseq(int, ":2:", start=2)
(2, 2, None)
>>> strseq(float, "3.2:6.3")
(3.2, 6.3)
"""
if ":" in s:
s = [ss.strip() for ss in s.split(":")]
elif "-" in s:
s = [ss.strip() for ss in s.split("-")]
if isinstance(s, list):
if len(s[0]) == 0:
s[0] = start
if len(s[-1]) == 0:
s[-1] = end
return tuple(cast(ss) if ss is not None else None for ss in s)
return cast(s)
def erange(start, step, end=None):
""" Returns the range with both ends includede """
if end is None:
return range(start, step + 1)
return range(start, end + 1, step)
def lstranges(lst, cast=erange, end=None):
""" Convert a `strmap` list into expanded ranges """
l = []
# If an entry is a tuple, it means it is either
# a range 0-1 == tuple(0, 1), or
# a sub-range
# 0[0-1], 0-1[0-1]
if isinstance(lst, tuple):
if len(lst) == 3:
l.extend(cast(*lst))
else:
head = lstranges(lst[0], cast, end)
bot = lstranges(lst[1], cast, end)
if isinstance(head, list):
for el in head:
l.append([el, bot])
elif isinstance(bot, list):
l.append([head, bot])
else:
l.extend(cast(head, bot))
elif isinstance(lst, list):
for lt in lst:
ls = lstranges(lt, cast, end)
if isinstance(ls, list):
l.extend(ls)
else:
l.append(ls)
else:
if lst is None and end is not None:
return cast(0, end)
return lst
return l
def list2str(lst):
""" Convert a list of elements into a string of ranges
Examples
--------
>>> list2str([2, 4, 5, 6])
"2, 4-6"
>>> list2str([2, 4, 5, 6, 8, 9])
"2, 4-6, 8-9"
"""
lst = lst[:]
lst.sort()
# Create positions
pos = [j - i for i, j in enumerate(lst)]
t = 0
rng = ""
for _, els in groupby(pos):
ln = len(list(els))
el = lst[t]
if t > 0:
rng += ", "
t += ln
if ln == 1:
rng += str(el)
#elif ln == 2:
# rng += "{}, {}".format(str(el), str(el+ln-1))
else:
rng += "{}-{}".format(el, el+ln-1)
return rng
# Function to retrieve an optional index from the
# filename
# file[0] returns:
# file, 0
# file returns:
# file, None
# file[0-1] returns
# file, [0,1]
def fileindex(f, cast=int):
""" Parses a filename string into the filename and the indices.
This range can be formatted like this:
file[1,2,3-6]
in which case it will return:
file, [1,2,3,4,5,6]
Parameters
----------
f : str
filename to parse
cast : function
the function to cast the bracketed value
Examples
--------
>>> fileindex("Hello[0]")
("Hello", 0)
>>> fileindex("Hello[0-2]")
("Hello", [0, 1, 2])
"""
if "[" not in f:
return f, None
# Grab the filename
f = f.split("[")
fname = f.pop(0)
# Re-join and remove the last "]"
f = "[".join(f)
if f[-1] == "]":
f = f[:-1]
ranges = strmap(cast, f)
rng = lstranges(ranges)
if len(rng) == 1:
return fname, rng[0]
return fname, rng
def array_arange(start, end=None, n=None, dtype=int64):
""" Creates a single array from a sequence of `numpy.arange`
Parameters
----------
start : array_like
a list of start elements for `numpy.arange`
end : array_like
a list of end elements (exclusive) for `numpy.arange`.
This argument is not used if `n` is passed.
n : array_like
a list of counts of elements for `numpy.arange`.
This is equivalent to ``end=start + n``.
dtype : numpy.dtype
the returned lists data-type
Examples
--------
>>> array_arange([1, 5], [3, 6])
array([1, 2, 5], dtype=int64)
>>> array_arange([1, 6], [4, 9])
array([1, 2, 3, 6, 7, 8], dtype=int64)
>>> array_arange([1, 6], n=[2, 2])
array([1, 2, 6, 7], dtype=int64)
"""
# Tests show that the below code is faster than
# implicit for-loops, or list-comprehensions
# concatenate(map(..)
# The below is much faster and does not require _any_ loops
if n is None:
# We need n to speed things up
n = asarray(end) - asarray(start)
else:
n = asarray(n)
# The below algorithm only works for non-zero n
idx = n.nonzero()[0]
# Grab corner case
if len(idx) == 0:
return zeros(0, dtype=dtype)
# Reduce size
start = take(start, idx)
n = take(n, idx)
# Create array of 1's.
# The 1's are important when issuing the cumultative sum
a = ones(n.sum(), dtype=dtype)
# set pointers such that we can
# correct for final cumsum
ptr = cumsum(n[:-1])
a[0] = start[0]
# Define start and correct for previous values
a[ptr] = start[1:] - start[:-1] - n[:-1] + 1
return cumsum(a, dtype=dtype)
| lgpl-3.0 | -7,897,043,921,294,780,000 | 25.400631 | 83 | 0.50699 | false |
wowref/wowref.com | wowref/wotlk/dbc/lib/dbcfile.py | 1 | 3843 | #!/usr/bin/env python
import os
from struct import Struct
from .dtypes import *
UNICODE_BLANK = ''
class DBCRecord(object):
"""A simple object to convert a dict to an object."""
def __init__(self, d=None):
self.data = d
def __repr__(self):
return "<DBCRecord %r>" % self.data
def __getitem__(self, item):
print('hi')
return self.data[item]
def __getattr__(self, item):
item = self.data[item]
if isinstance(item, bytes):
item = item.decode('utf-8')
return item
class DBCFile(object):
"""Base representation of a DBC file."""
header_struct = Struct('4s4i')
def __init__(self, filename, skele=None, verbose=False):
self.filename = filename
if not hasattr(self, 'skeleton'):
self.skeleton = skele
self.__create_struct()
def __iter__(self):
"""Iterated based approach to the dbc reading."""
if not os.path.exists(self.filename):
raise Exception("File '%s' not found" % (self.filename,))
f = open(self.filename, 'rb')
f_read = f.read
# Read in header
sig, records, fields, record_size, string_block_size = \
self.header_struct.unpack(f_read(20))
# Check signature
if sig != b'WDBC':
f.close()
raise Exception('Invalid file type')
self.records = records
self.fields = fields
self.record_size = record_size
self.string_block_size = string_block_size
if not self.struct:
# If the struct doesn't exist, create a default one
self.skeleton = Array('data', Int32, fields)
self.__create_struct()
# Ensure that struct and record_size is the same
if self.struct.size != record_size:
f.close()
raise Exception('Struct size mismatch (%d != %d)' %
(self.struct.size, record_size))
struct_unpack = self.struct.unpack
# Read in string block
f.seek(20 + records * record_size)
self.string_block = f_read(string_block_size)
f.seek(20)
try:
for i in range(records):
yield self.__process_record(struct_unpack(f_read(record_size)))
finally:
f.close()
def __create_struct(self):
"""Creates a Struct from the Skeleton."""
if self.skeleton:
s = ['<']
for item in self.skeleton:
if isinstance(item, Array):
s.extend(x.c for x in item.items)
else:
s.append(item.c)
self.struct = Struct(''.join(s))
else:
self.struct = None
def __process_record(self, data):
"""Processes a record (row of data)."""
output = {}
data_iter = iter(data)
for field in self.skeleton:
if isinstance(field, Array):
output[field.name] = [
self.__process_field(item, next(data_iter)) for item in field.items
if not isinstance(item, PadByte)
]
elif not isinstance(field, PadByte):
output[field.name] = self.__process_field(field, next(data_iter))
return DBCRecord(output)
def __process_field(self, _type, data):
output = data
if isinstance(_type, String):
if data == 0:
output = UNICODE_BLANK
else:
if data > self.string_block_size or self.string_block[data - 1] != 0:
raise Exception('Invalid string')
output = self.string_block[data:self.string_block.find(0, data)]
if isinstance(output, bytes):
output = output.decode('utf-8')
return output
| mit | 2,913,251,282,306,833,000 | 30.243902 | 87 | 0.53604 | false |
socialwifi/jsonapi-requests | tests/test_auth.py | 1 | 1174 | from unittest import mock
import pytest
from flask import Flask
from jsonapi_requests import configuration
from jsonapi_requests import auth
from jsonapi_requests import request_factory
@pytest.fixture
def api_configuration():
return configuration.Factory({'API_ROOT': 'http://testing', 'AUTH': auth.FlaskForwardAuth()}).create()
@pytest.fixture
def flask_app():
app = Flask(__name__)
yield app
@pytest.fixture
def valid_response():
response = mock.Mock(status_code=200)
response.json.return_value = {}
return response
@pytest.fixture
def request_send_mock(valid_response):
with mock.patch('requests.sessions.Session.send') as mocked:
mocked.return_value = valid_response
yield mocked
def test_flask_auth_forward(api_configuration, request_send_mock, flask_app):
with flask_app.test_request_context(headers={'Authorization': 'Bearer 11111111-1111-1111-1111-111111111111'}):
request_factory.ApiRequestFactory(api_configuration).get('endpoint')
args, kwargs = request_send_mock.call_args
headers = args[0].headers
assert 'Bearer 11111111-1111-1111-1111-111111111111' in headers['Authorization']
| bsd-3-clause | -3,956,231,088,293,769,000 | 26.952381 | 114 | 0.735945 | false |
OCA/event | event_project/tests/test_event_project.py | 1 | 2779 | # Copyright 2016 Pedro M. Baeza <[email protected]>
# Copyright 2017 David Vidal <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields
from odoo.tests import common
from datetime import timedelta, date
class TestEventProject(common.SavepointCase):
@classmethod
def setUpClass(cls):
super(TestEventProject, cls).setUpClass()
cls.date = {
'begin': fields.Date.to_string(date.today()),
'end': fields.Date.to_string(date.today() + timedelta(days=7)),
'begin2': fields.Date.to_string(date.today() + timedelta(days=1)),
'end2': fields.Date.to_string(date.today() + timedelta(days=9)),
}
cls.project = cls.env['project.project'].create({
'name': 'Test project',
})
cls.project_2 = cls.env['project.project'].create({
'name': 'Test project 2',
})
cls.event = cls.env['event.event'].create({
'name': 'Test event with project',
'date_begin': cls.date['begin'],
'date_end': cls.date['end'],
'project_id': cls.project.id,
})
cls.task = cls.env['project.task'].create({
'name': 'Task in project 2',
'project_id': cls.project_2.id,
})
def test_01_defaults(self):
self.assertEqual(self.event.project_id.calculation_type, 'date_end')
self.assertEqual(self.event.project_id.date,
self.event.date_begin.date())
self.assertEqual(self.event.display_name, self.event.project_id.name)
def test_02_project_recalculation(self):
self.event.date_begin = self.date['begin2']
self.event.date_end = self.date['end2']
self.event.name = 'Event name changed'
self.assertEqual(self.event.project_id.date,
self.event.date_begin.date())
self.assertEqual(self.event.display_name, self.event.project_id.name)
def test_03_project_change(self):
self.event.project_id = self.project_2
self.event.refresh()
self.assertTrue(self.event.project_id)
self.assertNotEqual(self.event.project_id, self.project_2)
self.assertEqual(self.event.project_id.calculation_type, 'date_end')
self.assertEqual(self.event.project_id.date,
self.event.date_begin.date())
self.assertEqual(self.event.display_name, self.event.project_id.name)
self.assertEqual(self.event.count_tasks, 1)
def test_04_cancel_and_draft_event(self):
self.event.button_cancel()
self.assertFalse(self.event.project_id.active)
self.event.button_draft()
self.assertTrue(self.event.project_id.active)
| agpl-3.0 | 5,265,019,467,606,680,000 | 40.477612 | 78 | 0.616409 | false |
wmayner/pyphi | test/test_labels.py | 1 | 1069 | import pytest
from pyphi.labels import NodeLabels
@pytest.fixture
def nl():
return NodeLabels(("A", "B", "C"), (0, 1, 2))
def test_defaults():
nd = NodeLabels(None, (0, 1, 2))
assert nd.labels == ("n0", "n1", "n2")
def test_labels2indices(nl):
assert nl.labels2indices(("A", "B")) == (0, 1)
assert nl.labels2indices(("A", "C")) == (0, 2)
def test_indices2labels(nl):
assert nl.indices2labels((0, 1)) == ("A", "B")
assert nl.indices2labels((0, 2)) == ("A", "C")
def test_coerce_to_indices(nl):
assert nl.coerce_to_indices(("B", "A")) == (0, 1)
assert nl.coerce_to_indices((0, 2, 1)) == (0, 1, 2)
assert nl.coerce_to_indices(()) == ()
with pytest.raises(ValueError):
nl.coerce_to_indices((0, "A"))
def test_iterable(nl):
assert [l for l in nl] == ["A", "B", "C"]
def test_len(nl):
assert len(nl) == 3
def test_contains(nl):
assert "B" in nl
assert "D" not in nl
def test_instantiation_from_other_node_labels_object(nl):
copied = NodeLabels(nl, (0, 1, 2))
assert copied == nl
| gpl-3.0 | 318,098,530,878,710,140 | 20.38 | 57 | 0.57811 | false |
keith-lewis100/pont-workbench | main/data_models.py | 1 | 7503 | #_*_ coding: UTF-8 _*_
import logging
from google.appengine.api import users
from google.appengine.ext import ndb
import db
import mailer
import renderers
import urls
from role_types import RoleType
logger = logging.getLogger('model')
workbench = db.WorkBench.get_or_insert('main')
committee_labels=[
('AMB', 'Ambulance'),
('PHC', 'PrimaryHealth'),
('SEC', 'SecondaryHealth'),
('LIV', 'Livelihoods'),
('ENG', 'Engineering'),
('EDU', 'Education'),
('CHU', 'Churches'),
('WEC', 'Wildlife Centre'),
('GEN', 'General')]
if db.User.query().count() == 0:
user = db.User();
user.name = 'Keith'
user.email = '[email protected]'
key = user.put()
role = db.Role(parent=key)
role.type_index = RoleType.USER_ADMIN
role.committee = ''
role.put()
class Committee:
def __init__(self, id, name):
self.id = id
self.name = name
self.key = self
def kind(self):
return 'Committee'
def urlsafe(self):
return self.id
def parent(self):
return None
def get_committee_list():
return [Committee(id, name) for id, name in committee_labels]
def lookup_committee(c_id):
for id, name in committee_labels:
if id == c_id:
return Committee(id, name)
return None
def get_next_ref():
ref = workbench.last_ref_id + 1
workbench.last_ref_id = ref
workbench.put()
return ref
def lookup_entity(db_id):
key = create_key(db_id)
return key.get()
def create_key(db_id):
if db_id is None or db_id == "":
return None
return ndb.Key(urlsafe=db_id)
def get_parent(entity):
parent_key = entity.key.parent()
if parent_key is not None:
return parent_key.get()
if entity.key.kind() == 'Fund':
return lookup_committee(entity.committee)
return None
def lookup_user_by_email(email):
user = db.User.query(db.User.email == email).get()
if user is None:
user = db.User()
user.name = email
return user
def lookup_current_user():
email = users.get_current_user().email()
return lookup_user_by_email(email)
def logout_url():
return users.create_logout_url('/')
def calculate_transferred_amount(payment):
if payment is None or payment.transfer is None:
return ""
transfer = payment.transfer.get()
if transfer.exchange_rate is None:
return ""
requested_amount = payment.amount.value
if payment.amount.currency == 'sterling':
sterling = requested_amount
shillings = int(requested_amount * transfer.exchange_rate)
if payment.amount.currency == 'ugx':
sterling = int(requested_amount / transfer.exchange_rate)
shillings = requested_amount
return u"£{:,}".format(sterling) + "/" + u"{:,}".format(shillings) + ' Ush'
STATE_CLOSED = 0
def email_entity_creator(entity, user, message):
if not hasattr(entity, 'creator'):
return
if user.key == entity.creator:
logging.info('not sending email same user %s', user.name)
return
creator = entity.creator.get()
entity_type = entity.key.kind()
entity_ref = renderers.render_link(entity.name, urls.url_for_entity(entity, external=True))
content = renderers.render_single_column((entity_type, entity_ref, message, user.name),
('EntityType', 'Entity', 'Message', 'User'))
mailer.send_email('Workbench Entity State Change', content, [creator.email])
class Model(object):
def __init__(self, entity, committee=None, table=None):
self.entity = entity
self.committee = committee
self.table = table
self.user = lookup_current_user()
self.forms = {}
self.errors=[]
self.next_entity = None
self.entity_deleted = False
self.show_closed = False
def get_state(self):
return getattr(self.entity, 'state_index', 0)
def user_has_role(self, role_type):
if self.user.key is None:
return False
query = db.Role.query(ancestor=self.user.key).filter(db.Role.type_index==role_type)
if role_type == RoleType.COMMITTEE_ADMIN:
query = query.filter(db.Role.committee==self.committee)
return query.iter().has_next()
def lookup_users_with_role(self, role_type):
query = db.Role.query(db.Role.type_index==role_type)
if role_type == RoleType.COMMITTEE_ADMIN:
query = query.filter(db.Role.committee==self.committee)
return query.map(lambda r: r.key.parent().get())
def add_form(self, action_name, form):
self.forms[action_name] = form
def get_form(self, action_name):
return self.forms.get(action_name)
def is_stateful(self):
return hasattr(self.table, 'state_index')
def apply_query(self, entity_query):
if not hasattr(self.table, 'state_index'):
return entity_query.fetch()
if self.show_closed:
return entity_query.filter(self.table.state_index == 0).fetch()
else:
return entity_query.filter(self.table.state_index > 0).fetch()
def perform_create(self, action_name):
form = self.get_form(action_name)
if not form.validate():
return False
if not self.check_uniqueness(form):
return False
entity = self.entity
form.populate_obj(entity)
if hasattr(entity, 'creator'):
entity.creator = self.user.key
entity.put()
self.audit(action_name, "Create performed")
return True
def check_uniqueness(self, form):
if not hasattr(form, 'name'):
return True
name = form.name.data
if name == self.entity.name:
return True
parent_key = None
if self.entity.key:
parent_key = self.entity.key.parent()
existing = self.table.query(self.table.name == name, ancestor=parent_key).count(1)
if existing > 0:
form.name.errors = [ 'Entity named %s already exists' % name ]
return False
return True
def perform_update(self, action_name):
form = self.get_form(action_name)
if not form.validate():
return False
if not self.check_uniqueness(form):
return False
form.populate_obj(self.entity)
self.entity.put()
self.audit(action_name, "Update performed")
return True
def perform_close(self, action_name):
self.entity.state_index = STATE_CLOSED
self.entity.put()
return self.email_and_audit(action_name, "%s performed" % action_name.title())
def add_error(self, error_text):
self.errors.append(error_text)
def audit(self, action_name, message, entity=None, parent_key=None):
if not entity:
entity = self.entity
audit = db.AuditRecord()
audit.entity = entity.key
audit.parent = parent_key
audit.user = self.user.key
audit.action = action_name
audit.message = message
audit.put()
return audit
def email_and_audit(self, action_name, message):
audit = self.audit(action_name, message)
email_entity_creator(self.entity, self.user, message)
return audit
def __repr__(self):
return 'Model(%s, %s)' % (repr(self.entity), self.committee)
| mit | 7,102,527,674,163,779,000 | 30.128631 | 95 | 0.603439 | false |
josdejong/mathjs | test/benchmark/matrix_operations_python.py | 1 | 4102 | # install numpy
#
# sudo apt install python-pip
# pip install --user --upgrade pip
# pip install --user numpy scipy matplotlib ipython jupyter pandas sympy nose
#
import sys
import timeit
import numpy as np
print (sys.version_info)
# fiedler matrix 25 x 25
A = [
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24],
[ 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23],
[ 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22],
[ 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21],
[ 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20],
[ 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
[ 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18],
[ 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17],
[ 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
[ 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
[10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14],
[11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13],
[12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
[13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
[14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
[15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
[16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8],
[17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7],
[18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6],
[19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5],
[20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4],
[21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3],
[22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2],
[23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1],
[24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
]
iterations = 10000
ms = 1000000
def add():
return np.add(A, A)
def multiply():
return np.matmul(A, A)
def transpose():
return np.transpose(A)
def det():
return np.linalg.det(A)
print('Add duration {} microseconds'.format(timeit.timeit(add, number=iterations) * ms / (iterations)))
print('Multiply duration {} microseconds'.format(timeit.timeit(multiply, number=iterations) * ms / iterations))
print('Transpose duration {} microseconds'.format(timeit.timeit(transpose, number=iterations) * ms / iterations))
print('Det duration {} microseconds'.format(timeit.timeit(det, number=iterations) * ms / iterations))
# run again with more iterations to see whether we get the same sort of durations
iterations2 = iterations * 10
print('')
print('second round...')
print('Add duration {} microseconds'.format(timeit.timeit(add, number=iterations2) * ms / iterations2))
print('Multiply duration {} microseconds'.format(timeit.timeit(multiply, number=iterations2) * ms / iterations2))
print('Transpose duration {} microseconds'.format(timeit.timeit(transpose, number=iterations2) * ms / iterations2))
print('Det duration {} microseconds'.format(timeit.timeit(det, number=iterations2) * ms / iterations2))
| apache-2.0 | -5,856,528,032,766,062,000 | 57.6 | 115 | 0.478303 | false |
bmng-dev/PyBitmessage | src/debug.py | 1 | 2373 | # -*- coding: utf-8 -*-
'''
Levels:
DEBUG Detailed information, typically of interest only when diagnosing problems.
INFO Confirmation that things are working as expected.
WARNING An indication that something unexpected happened, or indicative of some problem in the
near future (e.g. ‘disk space low’). The software is still working as expected.
ERROR Due to a more serious problem, the software has not been able to perform some function.
CRITICAL A serious error, indicating that the program itself may be unable to continue running.
'''
import logging
import logging.config
import os
import sys
import state
log_level = 'WARNING'
def log_uncaught_exceptions(ex_cls, ex, tb):
logging.getLogger(__name__).error('Unhandled exception', exc_info=(ex_cls, ex, tb))
def configure_logging():
have_logging = False
try:
logging.config.fileConfig(os.path.join (state.appdata, 'logging.dat'))
have_logging = True
print "Loaded logger configuration from %s" % (os.path.join(state.appdata, 'logging.dat'))
except:
if os.path.isfile(os.path.join(state.appdata, 'logging.dat')):
print "Failed to load logger configuration from %s, using default logging config" % (os.path.join(state.appdata, 'logging.dat'))
print sys.exc_info()
else:
# no need to confuse the user if the logger config is missing entirely
print "Using default logger configuration"
sys.excepthook = log_uncaught_exceptions
if have_logging:
return
logging.config.dictConfig({
'version': 1,
'formatters': {
'default': {
'format': '%(asctime)s.%(msecs)03d - %(levelname)s - %(name)s - %(threadName)s - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
},
},
'handlers': {
'file': {
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'default',
'level': log_level,
'filename': state.appdata + 'debug.log',
'maxBytes': 2097152, # 2 MiB
'backupCount': 1,
'encoding': 'UTF-8',
},
},
'root': {
'level': 'NOTSET',
'handlers': ['file'],
},
})
| mit | -3,289,994,757,642,380,300 | 34.893939 | 140 | 0.580414 | false |
Krissbro/LondonGaymers | cleverbot/cleverbot.py | 1 | 2825 | try:
from cleverbot import Cleverbot as _Cleverbot
if 'API_URL' in _Cleverbot.__dict__:
_Cleverbot = False
except:
_Cleverbot = False
from discord.ext import commands
from cogs.utils import checks
from .utils.dataIO import dataIO
import os
import discord
import asyncio
class Cleverbot():
"""Cleverbot"""
def __init__(self, bot):
self.bot = bot
self.clv = _Cleverbot('Red-DiscordBot')
self.settings = dataIO.load_json("data/cleverbot/settings.json")
@commands.group(no_pm=True, invoke_without_command=True)
async def cleverbot(self, *, message):
"""Talk with cleverbot"""
result = await self.get_response(message)
await self.bot.say(result)
@cleverbot.command()
@checks.is_owner()
async def toggle(self):
"""Toggles reply on mention"""
self.settings["TOGGLE"] = not self.settings["TOGGLE"]
if self.settings["TOGGLE"]:
await self.bot.say("I will reply on mention.")
else:
await self.bot.say("I won't reply on mention anymore.")
dataIO.save_json("data/cleverbot/settings.json", self.settings)
async def get_response(self, msg):
question = self.bot.loop.run_in_executor(None, self.clv.ask, msg)
try:
answer = await asyncio.wait_for(question, timeout=10)
except asyncio.TimeoutError:
answer = "We'll talk later..."
return answer
async def on_message(self, message):
if not self.settings["TOGGLE"] or message.channel.is_private:
return
if not self.bot.user_allowed(message):
return
if message.author.id != self.bot.user.id:
mention = message.server.me.mention
if message.content.startswith(mention):
content = message.content.replace(mention, "").strip()
await self.bot.send_typing(message.channel)
response = await self.get_response(content)
await self.bot.send_message(message.channel, response)
def check_folders():
if not os.path.exists("data/cleverbot"):
print("Creating data/cleverbot folder...")
os.makedirs("data/cleverbot")
def check_files():
f = "data/cleverbot/settings.json"
data = {"TOGGLE" : True}
if not dataIO.is_valid_json(f):
dataIO.save_json(f, data)
def setup(bot):
if _Cleverbot is False:
raise RuntimeError("Your cleverbot library is either missing or not "
"up to date. Please do\n"
"[p]debug bot.pip_install('cleverbot')\n"
"and restart Red once you get a response.\n"
"Then [p]load cleverbot")
check_folders()
check_files()
n = Cleverbot(bot)
bot.add_cog(n)
| gpl-3.0 | 1,294,411,271,893,057,800 | 33.036145 | 77 | 0.603894 | false |
Koodous/androguard-yara | download_androguard_report.py | 1 | 2918 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Copyright (c) 2015. The Koodous Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import requests
import argparse
import json
__author__ = 'A.Sánchez <[email protected]> && xgusix'
def download_report(sha256, auth, dst):
"""
Function to download and save the Androguard report from Koodous.
"""
if not auth:
print("Please, provide your token!")
return
url = 'https://api.koodous.com/apks/{}/analysis'.format(sha256)
data = dict()
response = requests.get(url=url, headers={"Authorization": "Token {}".format(auth)})
#Check if the APK is in the database
if response.status_code == 405:
print ("Sorry, this APK does not have a report yet, you can request it "
"via the Koodous website.")
elif response.status_code == 404:
print ("Sorry, we don\'t have this APK in Koodous. You can share with "
"the community through our website.")
rt = False
if response.status_code == 200:
rt = True
data = response.json()
try:
json.dump(data.get('androguard', None), open(dst, 'w'))
print "Report created in {}".format(dst)
except Exception, e:
print "There was an error writing the report: {}".format(e)
rt = False
return rt
def main():
parser = argparse.ArgumentParser(
description="Tool to download reports from Koodous")
parser.add_argument('-s', '--sha256', action='store',
dest='sha256')
parser.add_argument('-o', '--output', action='store', dest='filename',
help=("File to dump the downloaded report, by default: "
"<sha256>-report.json"))
parser.add_argument('-a', '--auth', action='store', dest='auth',
help=("Authorization token for Koodous API"))
args = parser.parse_args()
if not args.sha256 or not args.auth:
print "I need at least a SHA256 hash and your Koodous API token!"
parser.print_help()
return
report_name = "{}-report.json".format(args.sha256)
if args.filename:
report_name = args.filename
success = download_report(sha256=args.sha256, auth=args.auth, dst=report_name)
if success:
print "Androguard report saved in {}".format(report_name)
if __name__ == '__main__':
main()
| apache-2.0 | -8,956,185,248,688,753,000 | 31.411111 | 88 | 0.63387 | false |
TUBvision/hrl | lib/graphics/graphics.py | 1 | 12520 | """
This is the HRL submodule for handling graphics devices and OpenGL. Graphics
devices in HRL instantiate the 'Graphics' abstract class, which defines the
common functions required for displaying greyscale images.
Image presentation in HRL can be understood as a multi step process as follows:
Bitmap (The image written in an 8 bit, 4 channel format)
-> Greyscale Array (A numpy array of doubles between 0 and 1)
-> Processed Greyscale Array (A Gresycale Array remapped with a lookup table)
-> Display List (An index to a stored texture in graphical memory)
-> Texture (A python class instance which can be drawn)
i) The conversion of Bitmaps to Greyscale arrays is handled by functions in
'hrl.extra' Where possible, it is recommended to bypass this step and work
directly with numpy arrays.
ii) The conversion of Greyscale Arrays to Processed Greyscale Arrays is handled by
the base 'hrl' class, and consists primarily of gamma correction and contrast
range selection.
iii) Saving a Processed Greyscale Array into graphics memory and interacting
with it as a Texture object is handled in this module.
The 'Texture' class is a wrapper for certain OpenGL functions designed to
simplify the display of individual 2d images. The sole method of the Texture
class is 'draw'.
Texture objects are not meant to be created on their own, but are instead
created via the 'newTexture' method of Graphics. Graphics.newTexture will take
a given Processed Greyscale Array (with other optional arguments as well), and
return it as Texture object designed to be shown on the particular Graphics
object.
The openGL code was based largely on a great tutorial by a mysterious tutor
here: http://disruption.ca/gutil/introduction.html
"""
import OpenGL.GL as gl
import pygame as pg
import numpy as np
import abc
### Classes ###
## Graphics Class ##
class Graphics(object):
"""
The Graphics abstract base class. New graphics hardware must instantiate
this class. The key method is 'greyToChannels', which defines how to
represent a greyscale value between 0 and 1 as a 4-tuple (r,g,b,a), so that
the given grey value is correctly on the Graphics backend.
"""
__metaclass__ = abc.ABCMeta
# Abstract Methods #
def greyToChannels(self,gry):
"""
Converts a single greyscale value into a 4 colour channel representation
specific to self (the graphics backend).
Parameters
----------
gry : The grey value
Returns
-------
(r,g,b,a) the grey represented as a corresponding 4-tuple
"""
return
# Concrete Methods #
def __init__(self,w,h,bg,fs=False,db=True,lut=None):
"""
The Graphics constructor predefines the basic OpenGL initializations
that must be performed regardless of the specific backends.
Parameters
----------
w : The width (in pixels) of the openGL window
h : The height (in pixels) of the openGL window
bg : The default background grey value (between 0 and 1)
fs : Enable fullscreen display (Boolean) Default: False
db : Enable double buffering (Boolean) Default: True
Returns
-------
Graphics object
"""
# Here we can add other options like fullscreen
dbit = pg.OPENGL
if db: dbit = dbit | pg.DOUBLEBUF
if fs: dbit = dbit | pg.FULLSCREEN
screen = pg.display.set_mode((w,h), dbit)
pg.mouse.set_visible(False)
# Disables this thing
gl.glDisable(gl.GL_DEPTH_TEST)
# Set Matrix style coordinate system.
gl.glMatrixMode(gl.GL_PROJECTION)
gl.glLoadIdentity();
gl.glOrtho(0,w,h,0,-1,1)
gl.glMatrixMode(gl.GL_MODELVIEW)
# Enable texturing
gl.glEnable(gl.GL_TEXTURE_2D)
# Enable blending
gl.glEnable(gl.GL_BLEND)
# Blend settings. Blending is unrelated to e.g. magnification.
# Blending is how the colours from transluscent objects are
# combined, and is therefore largely irrelevant.
gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA)
# Gamma Function Correction
self._lut = None
self._gammainv = lambda x: x
if lut != None:
print "..using look-up table: %s" % lut
self._lut = np.genfromtxt(lut,skip_header=1)
self._gammainv = lambda x: np.interp(x,self._lut[:,0],self._lut[:,1])
# Here we change the default color
self.changeBackground(bg)
self.flip()
def newTexture(self,grys0,shape='square'):
"""
Given a numpy array of values between 0 and 1, returns a new
Texture object. The texture object comes equipped with the draw
method for obvious purposes.
NB: Images in HRL are represented in matrix style coordinates. i.e. the
origin is in the upper left corner, and increases to the right and
downwards.
Parameters
----------
grys : The greyscale numpy array
shape : The shape to 'cut out' of the given greyscale array. A square
will render the entire array. Available: 'square', 'circle'
Default: 'square'
Returns
-------
Texture object
"""
grys = np.flipud(grys0) # flipping up-down necessary
grys = self._gammainv(grys) # added gamma correction
byts = channelsToInt(self.greyToChannels(grys[::-1,])).tostring()
wdth = len(grys[0])
hght = len(grys[:,0])
return Texture(byts,wdth,hght,shape)
def flip(self,clr=True):
"""
Flips in the image backbuffer. In general, one will want to draw
a set of Textures and then call flip to display them all at once.
Takes a clr argument which causes the back buffer to clear after
the flip. When off, textures will be drawn on top of the current back
buffer. By default the back buffer will be cleared automatically, but in
performance sensitive scenarios it may be worth turning this off.
Parameters
----------
clr : Whether to clear the back buffer after flip. Default: True
"""
pg.display.flip()
if clr: gl.glClear(gl.GL_COLOR_BUFFER_BIT)
def changeBackground(self,bg):
"""
Changes the current background grey value.
Parameters
----------
bg : The new gray value (between 0 and 1)
"""
mx = float(2**8-1)
(r,g,b,a) = self.greyToChannels(self._gammainv(bg))
gl.glClearColor(r/mx,g/mx,b/mx,a/mx)
gl.glClear(gl.GL_COLOR_BUFFER_BIT)
## Texture Class ##
class Texture:
"""
The Texture class is a wrapper object for a compiled texture in
OpenGL. It's only method is the draw method.
"""
def __init__(self,byts,wdth,hght,shape):
"""
The internal constructor for Textures. Users should use
Graphics.newTexture to create textures rather than this constructor.
Parameters
----------
byts : A bytestring representation of the greyscale array
wdth : The width of the array
hght : The height of the array
shape : The shape to 'cut out' of the given greyscale array. A square
will render the entire array. Available: 'square', 'circle'
Returns
-------
Texture object
"""
self._txid, self.wdth, self.hght = loadTexture(byts,wdth,hght)
if shape == 'square':
self._dlid = createSquareDL(self._txid,self.wdth,self.hght)
elif shape == 'circle':
self._dlid = createCircleDL(self._txid,self.wdth,self.hght)
else:
raise NameError('Invalid Shape')
# def __del__(self):
# if self._txid != None:
# deleteTexture(self._txid)
# self._txid = None
# if self._dlid != None:
# deleteTextureDL(self._dlid)
# self._dlid = None
def draw(self,pos=None,sz=None,rot=0,rotc=None):
"""
This method loads the Texture into the back buffer. Calling
Graphics.flip will cause it to be drawn to the screen. It also allows a
number of transformation to be performed on the image before it is
loaded (e.g. translation, rotation)
Parameters
----------
pos : A pair (rows,columns) representing the the position in pixels in
the Graphics window of the upper left corner (origin) of the Texture
sz : A tuple (width,height) representing the size of the image in
pixels. None causes the natural width and height of the image to be
used, which prevents an blending of the image.
rot : Rotation applied to the image. May result in scaling/interpolation.
rotc : Defines the centre of the rotation.
Returns
-------
None
"""
if pos:
gl.glLoadIdentity()
gl.glTranslate(pos[0],pos[1],0)
if rot != 0:
if rotc == None:
rotc = (self.wdth / 2, self.hght / 2)
(w,h) = rotc
gl.glTranslate(rotc[0],rotc[1],0)
gl.glRotate(rot,0,0,-1)
gl.glTranslate(-rotc[0],-rotc[1],0)
if sz:
(wdth,hght) = sz
gl.glScalef(wdth/(self.wdth*1.0), hght/(self.hght*1.0),1.0)
gl.glCallList(self._dlid)
### Internal Functions ###
## OpenGL Texture Functions ##
def channelsToInt((r,g,b,a)):
"""
Takes a channel representation and returns a corresponding unsigned 32 bit
int. Running the tostring method on a 2d array which has had this function
applied to it will produce a bytestring appropriate for use as a texture
with openGL.
"""
R = 2**0
G = 2**8
B = 2**16
A = 2**24
return r*R + g*G + b*B + a*A
def loadTexture(byts,wdth,hght):
"""
LoadTexture takes a bytestring representation of a Processed Greyscale array
and loads it into OpenGL texture memory.
In this function we also define our texture minification and
magnification functions, of which there are many options. Take great
care when shrinking, blowing up, or rotating an image. The resulting
interpolations can effect experimental results.
"""
txid = gl.glGenTextures(1)
gl.glBindTexture(gl.GL_TEXTURE_2D, txid)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR)
gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, gl.GL_RGBA, wdth, hght, 0, gl.GL_RGBA, gl.GL_UNSIGNED_BYTE, byts)
return txid,wdth,hght
def deleteTexture(txid):
"""
deleteTexture removes the texture from the OpenGL texture memory.
"""
gl.glDeleteTextures(txid)
## OpenGL Display List Functions ##
def createSquareDL(txid,wdth,hght):
"""
createSquareDL takes a texture id with width and height and
generates a display list - an precompiled set of instructions for
rendering the image. This speeds up image display. The instructions
compiled are essentially creating a square and binding the texture
to it.
"""
dlid = gl.glGenLists(1)
gl.glNewList(dlid,gl.GL_COMPILE)
gl.glBindTexture(gl.GL_TEXTURE_2D, txid)
gl.glBegin(gl.GL_QUADS)
gl.glTexCoord2f(0, 0); gl.glVertex2f(0, 0)
gl.glTexCoord2f(0, 1); gl.glVertex2f(0, hght)
gl.glTexCoord2f(1, 1); gl.glVertex2f(wdth, hght)
gl.glTexCoord2f(1, 0); gl.glVertex2f(wdth, 0)
gl.glEnd()
gl.glFinish()
gl.glEndList()
return dlid
def createCircleDL(txid,wdth,hght):
"""
createCircleDL takes a texture id with width and height and
generates a display list - an precompiled set of instructions for
rendering the image. This speeds up image display. The instructions
compiled are essentially creating a circle and binding the texture
to it.
"""
dlid = gl.glGenLists(1)
gl.glNewList(dlid,gl.GL_COMPILE)
gl.glBindTexture(gl.GL_TEXTURE_2D, txid)
gl.glBegin(gl.GL_TRIANGLE_FAN)
for ang in np.linspace(0,2*np.pi,360):
(x,y) = ((np.cos(ang))/2,(np.sin(ang))/2)
gl.glTexCoord2f(x, y); gl.glVertex2f(x*wdth,y*hght)
gl.glEnd()
gl.glFinish()
gl.glEndList()
return dlid
def deleteTextureDL(dlid):
"""
deleteTextureDL removes the given display list from memory.
"""
gl.glDeleteLists(dlid,1)
| lgpl-2.1 | 7,395,943,921,647,267,000 | 32.655914 | 106 | 0.640575 | false |
PeterDing/iScript | xiami.py | 1 | 55901 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import re
import sys
from getpass import getpass
import os
import copy
import random
import time
import datetime
import json
import argparse
import requests
import urllib
import hashlib
import select
from mutagen.id3 import ID3,TRCK,TIT2,TALB,TPE1,APIC,TDRC,COMM,TPOS,USLT
from HTMLParser import HTMLParser
url_song = "http://www.xiami.com/song/%s"
url_album = "http://www.xiami.com/album/%s"
url_collect = "http://www.xiami.com/collect/ajax-get-list"
url_artist_albums = "http://www.xiami.com/artist/album/id/%s/page/%s"
url_artist_top_song = "http://www.xiami.com/artist/top-%s"
url_lib_songs = "http://www.xiami.com/space/lib-song/u/%s/page/%s"
url_recent = "http://www.xiami.com/space/charts-recent/u/%s/page/%s"
# 电台来源:来源于"收藏的歌曲","收藏的专辑","喜欢的艺人","我收藏的精选集"
url_radio_my = "http://www.xiami.com/radio/xml/type/4/id/%s"
# 虾米猜, 基于你的虾米试听行为所建立的个性电台
url_radio_c = "http://www.xiami.com/radio/xml/type/8/id/%s"
############################################################
# wget exit status
wget_es = {
0:"No problems occurred.",
2:"User interference.",
1<<8:"Generic error code.",
2<<8:"Parse error - for instance, when parsing command-line ' \
'optio.wgetrc or .netrc...",
3<<8:"File I/O error.",
4<<8:"Network failure.",
5<<8:"SSL verification failure.",
6<<8:"Username/password authentication failure.",
7<<8:"Protocol errors.",
8<<8:"Server issued an error response."
}
############################################################
parser = HTMLParser()
s = '\x1b[%d;%dm%s\x1b[0m' # terminual color template
cookie_file = os.path.join(os.path.expanduser('~'), '.Xiami.cookies')
headers = {
"Accept":"text/html,application/xhtml+xml,application/xml; " \
"q=0.9,image/webp,*/*;q=0.8",
"Accept-Encoding":"text/html",
"Accept-Language":"en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4,zh-TW;q=0.2",
"Content-Type":"application/x-www-form-urlencoded",
"Referer":"http://www.xiami.com/",
"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36"\
}
HEADERS2 = {
'pragma': 'no-cache',
'accept-encoding': 'gzip, deflate, br',
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,zh-TW;q=0.7',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
'accept': 'text/javascript, application/javascript, application/ecmascript, application/x-ecmascript, */*; q=0.01',
'cache-control': 'no-cache',
'authority': 'www.xiami.com',
'x-requested-with': 'XMLHttpRequest',
'referer': 'https://www.xiami.com/play?ids=/song/playlist/id/',
}
ss = requests.session()
ss.headers.update(headers)
############################################################
# Regular Expression Templates
re_disc_description = r'disc (\d+) \[(.+?)\]'
############################################################
def decry(row, encryed_url):
url = encryed_url
urllen = len(url)
rows = int(row)
cols_base = urllen / rows # basic column count
rows_ex = urllen % rows # count of rows that have 1 more column
matrix = []
for r in xrange(rows):
length = cols_base + 1 if r < rows_ex else cols_base
matrix.append(url[:length])
url = url[length:]
url = ''
for i in xrange(urllen):
url += matrix[i % rows][i / rows]
return urllib.unquote(url).replace('^', '0')
def modificate_text(text):
text = parser.unescape(text)
text = re.sub(r'//*', '-', text)
text = text.replace('/', '-')
text = text.replace('\\', '-')
text = re.sub(r'\s\s+', ' ', text)
text = text.strip()
return text
def modificate_file_name_for_wget(file_name):
file_name = re.sub(r'\s*:\s*', u' - ', file_name) # for FAT file system
file_name = file_name.replace('?', '') # for FAT file system
file_name = file_name.replace('"', '\'') # for FAT file system
file_name = file_name.replace('$', '\\$') # for command, see issue #7
return file_name
def z_index(song_infos):
size = len(song_infos)
z = len(str(size))
return z
########################################################
class Song(object):
def __init__(self):
self.__sure()
self.track = 0
self.year = 0
self.cd_serial = 0
self.disc_description = ''
# z = len(str(album_size))
self.z = 1
def __sure(self):
__dict__ = self.__dict__
if '__keys' not in __dict__:
__dict__['__keys'] = {}
def __getattr__(self, name):
__dict__ = self.__dict__
return __dict__['__keys'].get(name)
def __setattr__(self, name, value):
__dict__ = self.__dict__
__dict__['__keys'][name] = value
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
return setattr(self, key, value)
def feed(self, **kwargs):
for name, value in kwargs.items():
setattr(self, name, value)
class XiamiH5API(object):
URL = 'http://api.xiami.com/web'
PARAMS = {
'v': '2.0',
'app_key': '1',
}
def __init__(self):
self.cookies = {
'user_from': '2',
'XMPLAYER_addSongsToggler': '0',
'XMPLAYER_isOpen': '0',
'_xiamitoken': hashlib.md5(str(time.time())).hexdigest()
}
self.sess = requests.session()
self.sess.cookies.update(self.cookies)
def _request(self, url, method='GET', **kwargs):
try:
resp = self.sess.request(method, url, **kwargs)
except Exception, err:
print 'Error:', err
sys.exit()
return resp
def _make_params(self, **kwargs):
params = copy.deepcopy(self.PARAMS)
params.update(kwargs)
return params
def song(self, song_id):
params = self._make_params(id=song_id, r='song/detail')
url = self.URL
resp = self._request(url, params=params, headers=headers)
info = resp.json()['data']['song']
pic_url = re.sub('_\d+\.', '.', info['logo'])
song = Song()
song.feed(
song_id=info['song_id'],
song_name=info['song_name'],
album_id=info['album_id'],
album_name=info['album_name'],
artist_id=info['artist_id'],
artist_name=info['artist_name'],
singers=info['singers'],
album_pic_url=pic_url,
comment='http://www.xiami.com/song/' + str(info['song_id'])
)
return song
def album(self, album_id):
url = self.URL
params = self._make_params(id=album_id, r='album/detail')
resp = self._request(url, params=params, headers=headers)
info = resp.json()['data']
songs = []
album_id=info['album_id'],
album_name=info['album_name'],
artist_id = info['artist_id']
artist_name = info['artist_name']
pic_url = re.sub('_\d+\.', '.', info['album_logo'])
for track, info_n in enumerate(info['songs'], 1):
song = Song()
song.feed(
song_id=info_n['song_id'],
song_name=info_n['song_name'],
album_id=album_id,
album_name=album_name,
artist_id=artist_id,
artist_name=artist_name,
singers=info_n['singers'],
album_pic_url=pic_url,
track=track,
comment='http://www.xiami.com/song/' + str(info_n['song_id'])
)
songs.append(song)
return songs
def collect(self, collect_id):
url = self.URL
params = self._make_params(id=collect_id, r='collect/detail')
resp = self._request(url, params=params, headers=headers)
info = resp.json()['data']
collect_name = info['collect_name']
collect_id = info['list_id']
songs = []
for info_n in info['songs']:
pic_url = re.sub('_\d+\.', '.', info['album_logo'])
song = Song()
song.feed(
song_id=info_n['song_id'],
song_name=info_n['song_name'],
album_id=info_n['album_id'],
album_name=info_n['album_name'],
artist_id=info_n['artist_id'],
artist_name=info_n['artist_name'],
singers=info_n['singers'],
album_pic_url=pic_url,
comment='http://www.xiami.com/song/' + str(info_n['song_id'])
)
songs.append(song)
return collect_id, collect_name, songs
def artist_top_songs(self, artist_id, page=1, limit=20):
url = self.URL
params = self._make_params(id=artist_id, page=page, limit=limit, r='artist/hot-songs')
resp = self._request(url, params=params, headers=headers)
info = resp.json()['data']
for info_n in info['songs']:
song_id = info_n['song_id']
yield self.song(song_id)
def search_songs(self, keywords, page=1, limit=20):
url = self.URL
params = self._make_params(key=keywords, page=page, limit=limit, r='search/songs')
resp = self._request(url, params=params, headers=headers)
info = resp.json()['data']
for info_n in info['songs']:
pic_url = re.sub('_\d+\.', '.', info['album_logo'])
song = Song()
song.feed(
song_id=info_n['song_id'],
song_name=info_n['song_name'],
album_id=info_n['album_id'],
album_name=info_n['album_name'],
artist_id=info_n['artist_id'],
artist_name=info_n['artist_name'],
singers=info_n['singer'],
album_pic_url=pic_url,
comment='http://www.xiami.com/song/' + str(info_n['song_id'])
)
yield song
def get_song_id(self, *song_sids):
song_ids = []
for song_sid in song_sids:
if isinstance(song_sid, int) or song_sid.isdigit():
song_ids.append(int(song_sid))
url = 'https://www.xiami.com/song/playlist/id/{}/cat/json'.format(song_sid)
resp = self._request(url, headers=headers)
info = resp.json()
song_id = int(str(info['data']['trackList'][0]['song_id']))
song_ids.append(song_id)
return song_ids
class XiamiWebAPI(object):
URL = 'https://www.xiami.com/song/playlist/'
def __init__(self):
self.sess = requests.session()
def _request(self, url, method='GET', **kwargs):
try:
resp = self.sess.request(method, url, **kwargs)
except Exception, err:
print 'Error:', err
sys.exit()
return resp
def _make_song(self, info):
song = Song()
location=info['location']
row = location[0]
encryed_url = location[1:]
durl = decry(row, encryed_url)
song.feed(
song_id=info['song_id'],
song_sub_title=info['song_sub_title'],
songwriters=info['songwriters'],
singers=info['singers'],
song_name=parser.unescape(info['name']),
album_id=info['album_id'],
album_name=info['album_name'],
artist_id=info['artist_id'],
artist_name=info['artist_name'],
composer=info['composer'],
lyric_url='http:' + info['lyric_url'],
track=info['track'],
cd_serial=info['cd_serial'],
album_pic_url='http:' + info['album_pic'],
comment='http://www.xiami.com/song/' + str(info['song_id']),
length=info['length'],
play_count=info['playCount'],
location=info['location'],
location_url=durl
)
return song
def _find_z(self, album):
zs = []
song = album[0]
for i, song in enumerate(album[:-1]):
next_song = album[i+1]
cd_serial = song.cd_serial
next_cd_serial = next_song.cd_serial
if cd_serial != next_cd_serial:
z = len(str(song.track))
zs.append(z)
z = len(str(song.track))
zs.append(z)
for song in album:
song.z = zs[song.cd_serial - 1]
def song(self, song_id):
url = self.URL + 'id/%s/cat/json' % song_id
resp = self._request(url, headers=HEADERS2)
# there is no song
if not resp.json().get('data'):
return None
info = resp.json()['data']['trackList'][0]
song = self._make_song(info)
return song
def songs(self, *song_ids):
url = self.URL + 'id/%s/cat/json' % '%2C'.join(song_ids)
resp = self._request(url, headers=HEADERS2)
# there is no song
if not resp.json().get('data'):
return None
info = resp.json()['data']
songs = []
for info_n in info['trackList']:
song = self._make_song(info_n)
songs.append(song)
return songs
def album(self, album_id):
url = self.URL + 'id/%s/type/1/cat/json' % album_id
resp = self._request(url, headers=HEADERS2)
# there is no album
if not resp.json().get('data'):
return None
info = resp.json()['data']
songs = []
for info_n in info['trackList']:
song = self._make_song(info_n)
songs.append(song)
self._find_z(songs)
return songs
def collect(self, collect_id):
url = self.URL + 'id/%s/type/3/cat/json' % collect_id
resp = self._request(url, headers=HEADERS2)
info = resp.json()['data']
songs = []
for info_n in info['trackList']:
song = self._make_song(info_n)
songs.append(song)
return songs
def search_songs(self, keywords):
url = 'https://www.xiami.com/search?key=%s&_=%s' % (
urllib.quote(keywords), int(time.time() * 1000))
resp = self._request(url, headers=headers)
html = resp.content
song_ids = re.findall(r'song/(\w+)"', html)
songs = self.songs(*song_ids)
return songs
class xiami(object):
def __init__(self):
self.dir_ = os.getcwdu()
self.template_record = 'https://www.xiami.com/count/playrecord?sid={song_id}&ishq=1&t={time}&object_id={song_id}&object_name=default&start_point=120&_xiamitoken={token}'
self.collect_id = ''
self.album_id = ''
self.artist_id = ''
self.song_id = ''
self.user_id = ''
self.cover_id = ''
self.cover_data = ''
self.html = ''
self.disc_description_archives = {}
self.download = self.play if args.play else self.download
self._is_play = bool(args.play)
self._api = XiamiWebAPI()
def init(self):
if os.path.exists(cookie_file):
try:
cookies = json.load(open(cookie_file))
ss.cookies.update(cookies.get('cookies', cookies))
if not self.check_login():
print s % (1, 91, ' !! cookie is invalid, please login\n')
sys.exit(1)
except:
open(cookie_file, 'w').close()
print s % (1, 97, ' please login')
sys.exit(1)
else:
print s % (1, 91, ' !! cookie_file is missing, please login')
sys.exit(1)
def check_login(self):
#print s % (1, 97, '\n -- check_login')
url = 'http://www.xiami.com/task/signin'
r = self._request(url)
if r.content:
#print s % (1, 92, ' -- check_login success\n')
# self.save_cookies()
return True
else:
print s % (1, 91, ' -- login fail, please check email and password\n')
return False
def _request(self, url, headers=None, params=None, data=None, method='GET', timeout=30, retry=2):
for _ in range(retry):
try:
headers = headers or ss.headers
resp = ss.request(method, url, headers=headers, params=params, data=data, timeout=timeout)
except Exception, err:
continue
if not resp.ok:
raise Exception("response is not ok, status_code = %s" % resp.status_code)
# save cookies
self.save_cookies()
return resp
raise err
# manually, add cookies
# you must know how to get the cookie
def add_cookies(self, cookies):
_cookies = {}
for item in cookies.strip('; ').split('; '):
k, v = item.split('=', 1)
_cookies[k] = v
self.save_cookies(_cookies)
ss.cookies.update(_cookies)
def login(self, email, password):
print s % (1, 97, '\n -- login')
#validate = self.get_validate()
data = {
'email': email,
'password': password,
#'validate': validate,
'remember': 1,
'LoginButton': '登录'
}
hds = {
'Origin': 'http://www.xiami.com',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US,en;q=0.8',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36',
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Cache-Control': 'max-age=1',
'Referer': 'http://www.xiami.com/web/login',
'Connection': 'keep-alive',
'_xiamitoken': hashlib.md5(str(time.time())).hexdigest()
}
url = 'https://login.xiami.com/web/login'
for i in xrange(2):
res = self._request(url, headers=hds, data=data)
if ss.cookies.get('member_auth'):
return True
else:
if 'checkcode' not in res.content:
return False
validate = self.get_validate(res.content)
data['validate'] = validate
return False
# {{{ code from https://github.com/ly0/xiami-tools/blob/master/xiami.py
def login_taobao(self, username, password):
print s % (1, 97, '\n -- login taobao')
p = {
"lang": "zh_cn",
"appName": "xiami",
"appEntrance": "taobao",
"cssLink": "",
"styleType": "vertical",
"bizParams": "",
"notLoadSsoView": "",
"notKeepLogin": "",
"appName": "xiami",
"appEntrance": "taobao",
"cssLink": "https://h.alipayobjects.com/static/applogin/" \
"assets/login/mini-login-form-min.css",
"styleType": "vertical",
"bizParams": "",
"notLoadSsoView": "true",
"notKeepLogin": "true",
"rnd": str(random.random()),
}
url = 'https://passport.alipay.com/mini_login.htm'
r = ss.get(url, params=p, verify=True)
cm = r.content
data = {
"loginId": username,
"password": password,
"appName": "xiami",
"appEntrance": "taobao",
"hsid": re.search(r'"hsid" value="(.+?)"', cm).group(1),
"cid": re.search(r'"cid" value="(.+?)"', cm).group(1),
"rdsToken": re.search(r'"rdsToken" value="(.+?)"', cm).group(1),
"umidToken": re.search(r'"umidToken" value="(.+?)"', cm).group(1),
"_csrf_token": re.search(r'"_csrf_token" value="(.+?)"', cm).group(1),
"checkCode": "",
}
url = 'https://passport.alipay.com/newlogin/login.do?fromSite=0'
theaders = headers
theaders['Referer'] = 'https://passport.alipay.com/mini_login.htm'
while True:
r = ss.post(url, data=data, headers=theaders, verify=True)
j = r.json()
if j['content']['status'] == -1:
if 'titleMsg' not in j['content']['data']: continue
err_msg = j['content']['data']['titleMsg']
if err_msg == u'请输入验证码' or err_msg == u'验证码错误,请重新输入':
captcha_url = 'http://pin.aliyun.com/get_img?' \
'identity=passport.alipay.com&sessionID=%s' % data['cid']
tr = self._request(captcha_url, headers=theaders)
path = os.path.join(os.path.expanduser('~'), 'vcode.jpg')
with open(path, 'w') as g:
img = tr.content
g.write(img)
print " ++ 验证码已经保存至", s % (2, 91, path)
captcha = raw_input(
(s % (2, 92, ' ++ %s: ' % err_msg)).encode('utf8'))
data['checkCode'] = captcha
continue
if not j['content']['data'].get('st'):
print s % (2, 91, " !! 输入的 username 或 password 有误.")
sys.exit(1)
url = 'http://www.xiami.com/accounts/back?st=%s' \
% j['content']['data']['st']
self._request(url, headers=theaders)
self.save_cookies()
return
# }}}
def get_validate(self, cn):
#url = 'https://login.xiami.com/coop/checkcode?forlogin=1&%s' \
#% int(time.time())
url = re.search(r'src="(http.+checkcode.+?)"', cn).group(1)
path = os.path.join(os.path.expanduser('~'), 'vcode.png')
with open(path, 'w') as g:
data = self._request(url).content
g.write(data)
print " ++ 验证码已经保存至", s % (2, 91, path)
validate = raw_input(s % (2, 92, ' 请输入验证码: '))
return validate
def save_cookies(self, cookies=None):
if not cookies:
cookies = ss.cookies.get_dict()
with open(cookie_file, 'w') as g:
json.dump(cookies, g)
def get_durl(self, id_):
while True:
try:
if not args.low:
url = 'http://www.xiami.com/song/gethqsong/sid/%s'
j = self._request(url % id_).json()
t = j['location']
else:
url = 'http://www.xiami.com/song/playlist/id/%s'
cn = self._request(url % id_).text
t = re.search(r'location>(.+?)</location', cn).group(1)
if not t: return None
row = t[0]
encryed_url = t[1:]
durl = decry(row, encryed_url)
return durl
except Exception, e:
print s % (1, 91, ' |-- Error, get_durl --'), e
time.sleep(5)
# FIXME, this request alway returns 405
def record(self, song_id, album_id):
return
# token = ss.cookies.get('_xiamitoken', '')
# t = int(time.time() * 1000)
# self._request(self.template_record.format(
# song_id=song_id, album_id=album_id, token=token, time=t))
def get_cover(self, info):
if info['album_name'] == self.cover_id:
return self.cover_data
else:
self.cover_id = info['album_name']
while True:
url = info['album_pic_url']
try:
self.cover_data = self._request(url).content
if self.cover_data[:5] != '<?xml':
return self.cover_data
except Exception, e:
print s % (1, 91, ' \\\n \\-- Error, get_cover --'), e
time.sleep(5)
def get_lyric(self, info):
def lyric_parser(data):
# get ' ' from http://img.xiami.net/lyric/1_13772259457649.lrc
if len(data) < 10:
return None
if re.search(r'\[\d\d:\d\d', data):
title = ' title: %s\n' % info['song_name'].encode('utf8')
album = ' album: %s\n' % info['album_name'].encode('utf8')
artist = 'artist: %s\n' % info['artist_name'].encode('utf8')
tdict = {}
for line in data.split('\n'):
if re.search(r'^\[\d\d:', line):
cn = re.sub(r'\[\d{2}:\d{2}\.\d{2}\]', '', line)
time_tags = re.findall(r'\[\d{2}:\d{2}\.\d{2}\]', line)
for tag in time_tags: tdict[tag] = cn + '\n'
time_tags = tdict.keys()
time_tags.sort()
data = ''.join([title, album, artist,
'\n------------------\n\n'] + \
[tdict[tag] for tag in time_tags])
return data
else:
# for http://img.xiami.net/lyric/upload/19/1770983119_1356864643.lrc
return data
url = 'http://www.xiami.com/song/playlist/id/%s' % info['song_id']
xml = self._request(url).content
t = re.search('<lyric>(http.+?)</lyric>', xml)
if not t: return None
lyric_url = t.group(1)
data = self._request(lyric_url).content.replace('\r\n', '\n')
data = lyric_parser(data)
if data:
return data.decode('utf8', 'ignore')
else:
return None
def get_disc_description(self, album_url, info):
if not self.html:
self.html = self._request(album_url).text
t = re.findall(re_disc_description, self.html)
t = dict([(a, modificate_text(parser.unescape(b))) \
for a, b in t])
self.disc_description_archives = dict(t)
if self.disc_description_archives.has_key(info['cd_serial']):
disc_description = self.disc_description_archives[info['cd_serial']]
return u'(%s)' % disc_description
else:
return u''
def modified_id3(self, file_name, info):
id3 = ID3()
id3.add(TRCK(encoding=3, text=str(info['track'])))
id3.add(TDRC(encoding=3, text=str(info['year'])))
id3.add(TIT2(encoding=3, text=info['song_name']))
id3.add(TALB(encoding=3, text=info['album_name']))
id3.add(TPE1(encoding=3, text=info['artist_name']))
id3.add(TPOS(encoding=3, text=str(info['cd_serial'])))
lyric_data = self.get_lyric(info)
id3.add(USLT(encoding=3, text=lyric_data)) if lyric_data else None
#id3.add(TCOM(encoding=3, text=info['composer']))
#id3.add(WXXX(encoding=3, desc=u'xiami_song_url', text=info['song_url']))
#id3.add(TCON(encoding=3, text=u'genre'))
#id3.add(TSST(encoding=3, text=info['sub_title']))
#id3.add(TSRC(encoding=3, text=info['disc_code']))
id3.add(COMM(encoding=3, desc=u'Comment', \
text=info['comment']))
id3.add(APIC(encoding=3, mime=u'image/jpeg', type=3, \
desc=u'Front Cover', data=self.get_cover(info)))
id3.save(file_name)
def url_parser(self, urls):
for url in urls:
if '/collect/' in url:
self.collect_id = re.search(r'/collect/(\w+)', url).group(1)
#print(s % (2, 92, u'\n -- 正在分析精选集信息 ...'))
self.download_collect()
elif '/album/' in url:
self.album_id = re.search(r'/album/(\w+)', url).group(1)
#print(s % (2, 92, u'\n -- 正在分析专辑信息 ...'))
self.download_album()
elif '/artist/' in url or 'i.xiami.com' in url:
def get_artist_id(url):
html = self._request(url).text
artist_id = re.search(r'artist_id = \'(\w+)\'', html).group(1)
return artist_id
self.artist_id = re.search(r'/artist/(\w+)', url).group(1) \
if '/artist/' in url else get_artist_id(url)
code = raw_input(' >> a # 艺术家所有专辑.\n' \
' >> r # 艺术家 radio\n' \
' >> t # 艺术家top 20歌曲.\n >> ')
if code == 'a':
#print(s % (2, 92, u'\n -- 正在分析艺术家专辑信息 ...'))
self.download_artist_albums()
elif code == 't':
#print(s % (2, 92, u'\n -- 正在分析艺术家top20信息 ...'))
self.download_artist_top_20_songs()
elif code == 'r':
self.download_artist_radio()
else:
print(s % (1, 92, u' --> Over'))
elif '/song/' in url:
self.song_id = re.search(r'/song/(\w+)', url).group(1)
#print(s % (2, 92, u'\n -- 正在分析歌曲信息 ...'))
self.download_song()
elif '/u/' in url:
self.user_id = re.search(r'/u/(\w+)', url).group(1)
code = raw_input(
' >> m # 该用户歌曲库.\n'
' >> c # 最近在听\n'
' >> s # 分享的音乐\n'
' >> r # 歌曲试听排行 - 一周\n'
' >> rt # 歌曲试听排行 - 全部 \n'
' >> rm # 私人电台:来源于"收藏的歌曲","收藏的专辑",'
' "喜欢的艺人","收藏的精选集"\n'
' >> rc # 虾米猜:基于试听行为所建立的个性电台\n >> ')
if code == 'm':
#print(s % (2, 92, u'\n -- 正在分析用户歌曲库信息 ...'))
self.download_user_songs(url_lib_songs, u'收藏的歌曲')
elif code == 'c':
self.download_user_songs(url_recent, u'最近在听的歌曲')
elif code == 's':
url_shares = 'http://www.xiami.com' \
'/space/feed/u/%s/type/3/page/%s' % (self.user_id, '%s')
self.download_user_shares(url_shares)
elif code == 'r':
url = 'http://www.xiami.com/space/charts/u/%s/c/song/t/week' % self.user_id
self.download_ranking_songs(url, 'week')
elif code == 'rt':
url = 'http://www.xiami.com/space/charts/u/%s/c/song/t/all' % self.user_id
self.download_ranking_songs(url, 'all')
elif code == 'rm':
#print(s % (2, 92, u'\n -- 正在分析该用户的虾米推荐 ...'))
url_rndsongs = url_radio_my
self.download_user_radio(url_rndsongs)
elif code == 'rc':
url_rndsongs = url_radio_c
self.download_user_radio(url_rndsongs)
else:
print(s % (1, 92, u' --> Over'))
elif '/chart/' in url:
self.chart_id = re.search(r'/c/(\d+)', url).group(1) \
if '/c/' in url else 101
type_ = re.search(r'/type/(\d+)', url).group(1) \
if '/type/' in url else 0
self.download_chart(type_)
elif '/genre/' in url:
if '/gid/' in url:
self.genre_id = re.search(r'/gid/(\d+)', url).group(1)
url_genre = 'http://www.xiami.com' \
'/genre/songs/gid/%s/page/%s'
elif '/sid/' in url:
self.genre_id = re.search(r'/sid/(\d+)', url).group(1)
url_genre = 'http://www.xiami.com' \
'/genre/songs/sid/%s/page/%s'
else:
print s % (1, 91, ' !! Error: missing genre id at url')
sys.exit(1)
code = raw_input(' >> t # 风格推荐\n' \
' >> r # 风格radio\n >> ')
if code == 't':
self.download_genre(url_genre)
elif code == 'r':
self.download_genre_radio(url_genre)
elif 'luoo.net' in url:
self.hack_luoo(url)
elif 'sid=' in url:
_mod = re.search(r'sid=([\w+,]+\w)', url)
if _mod:
song_ids = _mod.group(1).split(',')
self.download_songs(song_ids)
else:
print s % (2, 91, u' 请正确输入虾米网址.')
def make_file_name(self, song, cd_serial_auth=False):
z = song['z']
file_name = str(song['track']).zfill(z) + '.' \
+ song['song_name'] \
+ ' - ' + song['artist_name'] + '.mp3'
if cd_serial_auth:
song['file_name'] = ''.join([
'[Disc-',
str(song['cd_serial']),
' # ' + song['disc_description'] \
if song['disc_description'] else '', '] ',
file_name])
else:
song['file_name'] = file_name
def get_songs(self, album_id, song_id=None):
songs = self._api.album(album_id)
if not songs:
return []
cd_serial_auth = int(songs[-1]['cd_serial']) > 1
for song in songs:
self.make_file_name(song, cd_serial_auth=cd_serial_auth)
songs = [i for i in songs if i['song_id'] == song_id] \
if song_id else songs
return songs
def get_song(self, song_id):
song = self._api.song(song_id)
if not song:
return []
self.make_file_name(song)
return [song]
def download_song(self):
songs = self.get_song(self.song_id)
print(s % (2, 97, u'\n >> ' + u'1 首歌曲将要下载.')) \
if not args.play else ''
#self.song_infos = [song_info]
self.download(songs)
def download_songs(self, song_ids):
for song_id in song_ids:
self.song_id = song_id
songs = self.get_song(self.song_id)
self.download(songs)
def download_album(self):
songs = self.get_songs(self.album_id)
if not songs:
return
song = songs[0]
d = song['album_name'] + ' - ' + song['artist_name']
dir_ = os.path.join(os.getcwdu(), d)
self.dir_ = modificate_file_name_for_wget(dir_)
amount_songs = unicode(len(songs))
songs = songs[args.from_ - 1:]
print(s % (2, 97, u'\n >> ' + amount_songs + u' 首歌曲将要下载.')) \
if not args.play else ''
self.download(songs, amount_songs, args.from_)
def download_collect(self):
page = 1
song_ids = []
while True:
params = {
'id': self.collect_id,
'p': page,
'limit': 50,
}
infos = self._request(url_collect, params=params).json()
for info in infos['result']['data']:
song_ids.append(str(info['song_id']))
if infos['result']['total_page'] == page:
break
page += 1
html = self._request('http://www.xiami.com/collect/%s' % self.collect_id).text
html = html.split('<div id="wall"')[0]
collect_name = re.search(r'<title>(.+?)<', html).group(1)
d = collect_name
dir_ = os.path.join(os.getcwdu(), d)
self.dir_ = modificate_file_name_for_wget(dir_)
amount_songs = unicode(len(song_ids))
song_ids = song_ids[args.from_ - 1:]
print(s % (2, 97, u'\n >> ' + amount_songs + u' 首歌曲将要下载.')) \
if not args.play else ''
n = args.from_
for i in song_ids:
songs = self.get_song(i)
self.download(songs, amount_songs, n)
self.html = ''
self.disc_description_archives = {}
n += 1
def download_artist_albums(self):
ii = 1
album_ids = []
while True:
html = self._request(
url_artist_albums % (self.artist_id, str(ii))).text
t = re.findall(r'/album/(\w+)"', html)
if album_ids == t: break
album_ids = t
if album_ids:
for i in album_ids:
print ' ++ http://www.xiami.com/album/%s' % i
self.album_id = i
self.download_album()
self.html = ''
self.disc_description_archives = {}
else:
break
ii += 1
def download_artist_top_20_songs(self):
html = self._request(url_artist_top_song % self.artist_id).text
song_ids = re.findall(r'/music/send/id/(\d+)', html)
artist_name = re.search(
r'<p><a href="/artist/\w+">(.+?)<', html).group(1)
d = modificate_text(artist_name + u' - top 20')
dir_ = os.path.join(os.getcwdu(), d)
self.dir_ = modificate_file_name_for_wget(dir_)
amount_songs = unicode(len(song_ids))
print(s % (2, 97, u'\n >> ' + amount_songs + u' 首歌曲将要下载.')) \
if not args.play else ''
n = 1
for i in song_ids:
songs = self.get_song(i)
self.download(songs, amount_songs, n)
self.html = ''
self.disc_description_archives = {}
n += 1
def download_artist_radio(self):
html = self._request(url_artist_top_song % self.artist_id).text
artist_name = re.search(
r'<p><a href="/artist/\w+">(.+?)<', html).group(1)
d = modificate_text(artist_name + u' - radio')
dir_ = os.path.join(os.getcwdu(), d)
self.dir_ = modificate_file_name_for_wget(dir_)
url_artist_radio = "http://www.xiami.com/radio/xml/type/5/id/%s" \
% self.artist_id
n = 1
while True:
xml = self._request(url_artist_radio).text
song_ids = re.findall(r'<song_id>(\d+)', xml)
for i in song_ids:
songs = self.get_song(i)
self.download(songs, n=n)
self.html = ''
self.disc_description_archives = {}
n += 1
def download_user_songs(self, url, desc):
dir_ = os.path.join(os.getcwdu(),
u'虾米用户 %s %s' % (self.user_id, desc))
self.dir_ = modificate_file_name_for_wget(dir_)
ii = 1
n = 1
while True:
html = self._request(url % (self.user_id, str(ii))).text
song_ids = re.findall(r'/song/(.+?)"', html)
if song_ids:
for i in song_ids:
songs = self.get_song(i)
self.download(songs, n)
self.html = ''
self.disc_description_archives = {}
n += 1
else:
break
ii += 1
def download_user_shares(self, url_shares):
d = modificate_text(u'%s 的分享' % self.user_id)
dir_ = os.path.join(os.getcwdu(), d)
self.dir_ = modificate_file_name_for_wget(dir_)
page = 1
while True:
html = self._request(url_shares % page).text
shares = re.findall(r'play.*\(\'\d+\'\)', html)
for share in shares:
if 'album' in share:
self.album_id = re.search(r'\d+', share).group()
self.download_album()
else:
self.song_id = re.search(r'\d+', share).group()
self.download_song()
if not shares: break
page += 1
def download_ranking_songs(self, url, tp):
d = modificate_text(u'%s 的试听排行 - %s' % (self.user_id, tp))
dir_ = os.path.join(os.getcwdu(), d)
self.dir_ = modificate_file_name_for_wget(dir_)
page = 1
n = 1
while True:
html = self._request(url + '/page/' + str(page)).text
song_ids = re.findall(r"play\('(\d+)'", html)
if not song_ids:
break
for song_id in song_ids:
songs = self.get_song(song_id)
self.download(songs, n=n)
self.html = ''
self.disc_description_archives = {}
n += 1
page += 1
def download_user_radio(self, url_rndsongs):
d = modificate_text(u'%s 的虾米推荐' % self.user_id)
dir_ = os.path.join(os.getcwdu(), d)
self.dir_ = modificate_file_name_for_wget(dir_)
n = 1
while True:
xml = self._request(url_rndsongs % self.user_id).text
song_ids = re.findall(r'<song_id>(\d+)', xml)
for i in song_ids:
songs = self.get_song(i)
self.download(songs, n=n)
self.html = ''
self.disc_description_archives = {}
n += 1
def download_chart(self, type_):
html = self._request('http://www.xiami.com/chart/index/c/%s' \
% self.chart_id).text
title = re.search(r'<title>(.+?)</title>', html).group(1)
d = modificate_text(title)
dir_ = os.path.join(os.getcwdu(), d)
self.dir_ = modificate_file_name_for_wget(dir_)
html = self._request(
'http://www.xiami.com/chart/data?c=%s&limit=200&type=%s' \
% (self.chart_id, type_)).text
song_ids = re.findall(r'/song/(\d+)', html)
n = 1
for i in song_ids:
songs = self.get_song(i)
self.download(songs, n=n)
self.html = ''
self.disc_description_archives = {}
n += 1
def download_genre(self, url_genre):
html = self._request(url_genre % (self.genre_id, 1)).text
if '/gid/' in url_genre:
t = re.search(
r'/genre/detail/gid/%s".+?title="(.+?)"' \
% self.genre_id, html).group(1)
elif '/sid/' in url_genre:
t = re.search(
r'/genre/detail/sid/%s" title="(.+?)"' \
% self.genre_id, html).group(1)
d = modificate_text(u'%s - 代表曲目 - xiami' % t)
dir_ = os.path.join(os.getcwdu(), d)
self.dir_ = modificate_file_name_for_wget(dir_)
n = 1
page = 2
while True:
song_ids = re.findall(r'/song/(\d+)', html)
if not song_ids: break
for i in song_ids:
songs = self.get_song(i)
self.download(songs, n=n)
self.html = ''
self.disc_description_archives = {}
n += 1
html = self._request(url_genre % (self.chart_id, page)).text
page += 1
def download_genre_radio(self, url_genre):
html = self._request(url_genre % (self.genre_id, 1)).text
if '/gid/' in url_genre:
t = re.search(
r'/genre/detail/gid/%s".+?title="(.+?)"' \
% self.genre_id, html).group(1)
url_genre_radio = "http://www.xiami.com/radio/xml/type/12/id/%s" \
% self.genre_id
elif '/sid/' in url_genre:
t = re.search(
r'/genre/detail/sid/%s" title="(.+?)"' \
% self.genre_id, html).group(1)
url_genre_radio = "http://www.xiami.com/radio/xml/type/13/id/%s" \
% self.genre_id
d = modificate_text(u'%s - radio - xiami' % t)
dir_ = os.path.join(os.getcwdu(), d)
self.dir_ = modificate_file_name_for_wget(dir_)
n = 1
while True:
xml = self._request(url_genre_radio).text
song_ids = re.findall(r'<song_id>(\d+)', xml)
for i in song_ids:
songs = self.get_song(i)
self.download(songs, n=n)
self.html = ''
self.disc_description_archives = {}
n += 1
def hack_luoo(self, url):
# parse luoo.net
theaders = headers
theaders.pop('Referer')
r = requests.get(url)
if not r.ok:
return None
cn = r.content
songs_info = re.findall(r'<p class="name">(.+?)</p>\s+'
r'<p class="artist">(?:Artist:|艺人:)(.+?)</p>\s+'
r'<p class="album">(?:Album:|专辑:)(.+?)</p>', cn)
# search song at xiami
for name, artist, album in songs_info:
name = name.strip()
artist = artist.strip()
album = album.strip()
songs = self._api.search_songs(name + ' ' + artist)
if not songs:
print s % (1, 93, ' !! no find:'), ' - '.join([name, artist, album])
continue
self.make_file_name(songs[0])
self.download(songs[:1], n=1)
def display_infos(self, i, nn, n, durl):
length = datetime.datetime.fromtimestamp(i['length']).strftime('%M:%S')
print n, '/', nn
print s % (2, 94, i['file_name'])
print s % (2, 95, i['album_name'])
print s % (2, 93, length)
print 'http://www.xiami.com/song/%s' % i['song_id']
print 'http://www.xiami.com/album/%s' % i['album_id']
print durl
if i['durl_is_H'] == 'h':
print s % (1, 97, 'MP3-Quality:'), s % (1, 92, 'High')
else:
print s % (1, 97, 'MP3-Quality:'), s % (1, 91, 'Low')
print '—' * int(os.popen('tput cols').read())
def get_mp3_quality(self, durl):
if 'm3.file.xiami.com' in durl \
or 'm6.file.xiami.com' in durl \
or '_h.mp3' in durl \
or 'm320.xiami.net' in durl:
return 'h'
else:
return 'l'
def play(self, songs, nn=u'1', n=1):
if args.play == 2:
songs = sorted(songs, key=lambda k: k['play_count'], reverse=True)
for i in songs:
self.record(i['song_id'], i['album_id'])
durl = self.get_durl(i['song_id'])
if not durl:
print s % (2, 91, ' !! Error: can\'t get durl'), i['song_name']
continue
cookies = '; '.join(['%s=%s' % (k, v) for k, v in ss.cookies.items()])
mp3_quality = self.get_mp3_quality(durl)
i['durl_is_H'] = mp3_quality
self.display_infos(i, nn, n, durl)
n = int(n) + 1
cmd = 'mpv --really-quiet ' \
'--cache 8146 ' \
'--user-agent "%s" ' \
'--http-header-fields "Referer: http://img.xiami.com' \
'/static/swf/seiya/1.4/player.swf?v=%s",' \
'"Cookie: %s" ' \
'"%s"' \
% (headers['User-Agent'], int(time.time()*1000), cookies, durl)
os.system(cmd)
timeout = 1
ii, _, _ = select.select([sys.stdin], [], [], timeout)
if ii:
sys.exit(0)
else:
pass
def download(self, songs, amount_songs=u'1', n=1):
dir_ = modificate_file_name_for_wget(self.dir_)
cwd = os.getcwd()
if dir_ != cwd:
if not os.path.exists(dir_):
os.mkdir(dir_)
ii = 1
for i in songs:
num = random.randint(0, 100) % 8
col = s % (2, num + 90, i['file_name'])
t = modificate_file_name_for_wget(i['file_name'])
file_name = os.path.join(dir_, t)
if os.path.exists(file_name): ## if file exists, no get_durl
if args.undownload:
self.modified_id3(file_name, i)
ii += 1
n += 1
continue
else:
ii += 1
n += 1
continue
if not args.undownload:
if n == None:
print(u'\n ++ download: #%s/%s# %s' \
% (ii, amount_songs, col))
else:
print(u'\n ++ download: #%s/%s# %s' \
% (n, amount_songs, col))
n += 1
durl = self.get_durl(i['song_id'])
if not durl:
print s % (2, 91, ' |-- Error: can\'t get durl')
continue
mp3_quality = self.get_mp3_quality(durl)
if mp3_quality == 'h':
print ' |--', s % (1, 97, 'MP3-Quality:'), s % (1, 91, 'High')
else:
print ' |--', s % (1, 97, 'MP3-Quality:'), s % (1, 91, 'Low')
cookies = '; '.join(['%s=%s' % (k, v) for k, v in ss.cookies.items()])
file_name_for_wget = file_name.replace('`', '\`')
quiet = ' -q' if args.quiet else ' -nv'
cmd = 'wget -c%s ' \
'-U "%s" ' \
'--header "Referer:http://img.xiami.com' \
'/static/swf/seiya/1.4/player.swf?v=%s" ' \
'--header "Cookie: member_auth=%s" ' \
'-O "%s.tmp" %s' \
% (quiet, headers['User-Agent'], int(time.time()*1000), cookies, file_name_for_wget, durl)
cmd = cmd.encode('utf8')
status = os.system(cmd)
if status != 0: # other http-errors, such as 302.
wget_exit_status_info = wget_es[status]
print('\n\n ----### \x1b[1;91mERROR\x1b[0m ==> \x1b[1;91m%d ' \
'(%s)\x1b[0m ###--- \n\n' % (status, wget_exit_status_info))
print s % (1, 91, ' ===> '), cmd
sys.exit(1)
else:
os.rename('%s.tmp' % file_name, file_name)
self.modified_id3(file_name, i)
ii += 1
time.sleep(5)
def _save_do(self, id_, type, tags):
data = {
"tags": tags,
"type": type,
"id": id_,
"desc": "",
"grade": "",
"share": 0,
"shareTo": "all",
"_xiamitoken": ss.cookies['_xiamitoken'],
}
url = 'https://www.xiami.com/ajax/addtag'
r = self._request(url, data=data, method='POST')
j = r.json()
if j['status'] == 'ok':
return 0
else:
return j['status']
def save(self, urls):
tags = args.tags
for url in urls:
if '/collect/' in url:
collect_id = re.search(r'/collect/(\w+)', url).group(1)
print s % (1, 97, u'\n ++ save collect:'), \
'http://www.xiami.com/song/collect/' + collect_id
result = self._save_do(collect_id, 4, tags)
elif '/album/' in url:
album_id = re.search(r'/album/(\w+)', url).group(1)
album = self._api.album(album_id)
album_id = album[0].album_id
print s % (1, 97, u'\n ++ save album:'), \
'http://www.xiami.com/album/' + str(album_id)
result = self._save_do(album_id, 5, tags)
elif '/artist/' in url:
artist_id = re.search(r'/artist/(\w+)', url).group(1)
print s % (1, 97, u'\n ++ save artist:'), \
'http://www.xiami.com/artist/' + artist_id
result = self._save_do(artist_id, 6, tags)
elif '/song/' in url:
song_id = re.search(r'/song/(\w+)', url).group(1)
song = self._api.song(song_id)
song_id = song.song_id
print s % (1, 97, u'\n ++ save song:'), \
'http://www.xiami.com/song/' + str(song_id)
result = self._save_do(song_id, 3, tags)
elif '/u/' in url:
user_id = re.search(r'/u/(\d+)', url).group(1)
print s % (1, 97, u'\n ++ save user:'), \
'http://www.xiami.com/u/' + user_id
result = self._save_do(user_id, 1, tags)
else:
result = -1
print(s % (2, 91, u' 请正确输入虾米网址.'))
if result == 0:
print s % (1, 92, ' ++ success.\n')
else:
print s % (1, 91, ' !! Error at _save_do.'), result, '\n'
def main(argv):
if len(argv) < 2:
sys.exit()
######################################################
# for argparse
p = argparse.ArgumentParser(description='downloading any xiami.com')
p.add_argument('xxx', type=str, nargs='*', \
help='命令对象.')
p.add_argument('-p', '--play', action='count', \
help='play with mpv')
p.add_argument('-l', '--low', action='store_true', \
help='low mp3')
p.add_argument('-q', '--quiet', action='store_true', \
help='quiet for download')
p.add_argument('-f', '--from_', action='store', \
default=1, type=int, \
help='从第几个开始下载,eg: -f 42')
p.add_argument('-d', '--undescription', action='store_true', \
help='no add disk\'s distribution')
p.add_argument('-t', '--tags', action='store', \
type=str, default='', help='tags. eg: piano,cello')
p.add_argument('-n', '--undownload', action='store_true', \
help='no download, using to renew id3 tags')
global args
args = p.parse_args(argv[2:])
comd = argv[1]
xxx = args.xxx
if comd == 'login' or comd == 'g':
# or comd == 'logintaobao' or comd == 'gt':
# taobao has updated login algorithms which is hard to hack
# so remove it.
if len(xxx) < 1:
email = raw_input(s % (1, 97, ' username: ') \
if comd == 'logintaobao' or comd == 'gt' \
else s % (1, 97, ' email: '))
cookies = getpass(s % (1, 97, ' cookies: '))
elif len(xxx) == 1:
# for add_member_auth
if '; ' in xxx[0]:
email = None
cookies = xxx[0]
else:
email = xxx[0]
cookies = getpass(s % (1, 97, ' cookies: '))
elif len(xxx) == 2:
email = xxx[0]
cookies = xxx[1]
else:
msg = ('login: \n'
'login cookies')
print s % (1, 91, msg)
return
x = xiami()
x.add_cookies(cookies)
is_signin = x.check_login()
if is_signin:
print s % (1, 92, ' ++ login succeeds.')
else:
print s % (1, 91, ' login failes')
elif comd == 'signout':
g = open(cookie_file, 'w')
g.close()
elif comd == 'd' or comd == 'download':
urls = xxx
x = xiami()
x.init()
x.url_parser(urls)
elif comd == 'p' or comd == 'play':
if not args.play: args.play = 1
urls = xxx
x = xiami()
x.init()
x.url_parser(urls)
elif comd == 's' or comd == 'save':
urls = xxx
x = xiami()
x.init()
x.save(urls)
else:
print s % (2, 91, u' !! 命令错误\n')
if __name__ == '__main__':
argv = sys.argv
main(argv)
| mit | 8,859,938,585,418,477,000 | 35.344532 | 177 | 0.472513 | false |
ctsit/redcap_deployment | utility_redcap.py | 1 | 4631 | from fabric.api import *
from tempfile import mkstemp
import os
import utility
try:
import configparser
except:
from six.moves import configparser
__all__ = []
def get_current_redcap_version():
"""
gets the current redcap version from database
"""
with settings(user=env.deploy_user):
with hide('output'):
current_version = run('mysql -s -N -e "SELECT value from redcap_config WHERE field_name=\'redcap_version\'"')
return current_version
def make_upload_target():
"""
Make the directory from which new software will be deployed,
e.g., /var/www.backup/redcap-20160117T1543/
"""
env.upload_target_backup_dir = '/'.join([env.upload_project_full_path, env.remote_project_name])
with settings(user=env.deploy_user):
run("mkdir -p %(upload_target_backup_dir)s" % env)
def upload_package_and_extract(name, upgrade=False):
"""
Upload the redcap package and extract it into the directory from which new
software will be deployed, e.g., /var/www.backup/redcap-20160117T1543/
"""
# NOTE: run as $ fab <env> package make_upload_target upe ...necessary env
# variables are set by package and make_upload_target funcitons
with settings(user=env.deploy_user):
# Make a temp folder to upload the tar to
temp1 = run('mktemp -d')
put(name, temp1)
# Test where temp/'receiving' is
temp2 = run('mktemp -d')
# Extract in temp ... -C specifies what directory to extract to
# Extract to temp2 so the tar is not included in the contents
run('tar -xzf %s/%s -C %s' % (temp1, name, temp2))
# Transfer contents from temp2/redcap to ultimate destination
with settings(warn_only=True):
if run('test -d %s/webtools2/pdf/font/unifont' % env.upload_target_backup_dir).succeeded:
run('chmod ug+w %s/webtools2/pdf/font/unifont/*' % env.upload_target_backup_dir)
# Write the new code on top of the existing code
if upgrade == False:
run('rsync -rc %s/redcap/* %s' % (temp2, env.upload_target_backup_dir))
else:
# exclude some files during upgrades
exclusions = "--exclude=database.php --exclude=hook_functions.php"
run('rsync -rc %s %s/redcap/* %s' % (exclusions, temp2, env.upload_target_backup_dir))
# make sure the temp file directory in redcap web space will be writeable
run('chmod -R g+w %s/temp' % env.upload_target_backup_dir)
# Remove the temp directories
run('rm -rf %s %s' % (temp1, temp2))
def move_software_to_live():
"""
Replace the symbolic link to the old code with symbolic link to new code.
"""
with settings(user=env.deploy_user):
with settings(warn_only=True):
if run("test -d %(live_project_full_path)s" % env).succeeded:
# we need to back this directory up on the fly, destroy it and then symlink it back into existence
with settings(warn_only=False):
new_backup_dir = env.upload_target_backup_dir + "-previous"
run("mkdir -p %s" % new_backup_dir)
run("cp -rf -P %s/* %s" % (env.live_project_full_path, new_backup_dir))
run("rm -rf %s" % env.live_project_full_path)
# now switch the new code to live
run('ln -s %s %s' % (env.upload_target_backup_dir,env.live_project_full_path))
# update directory permissions
run('chmod 775 %s/modules' %env.upload_target_backup_dir)
def set_redcap_base_url():
"""
Set the REDCap base url
"""
set_redcap_config('redcap_base_url', env.url_of_deployed_app)
def set_redcap_config(field_name="", value=""):
"""
Update a single values in the redcap config table
"""
with settings(user=env.deploy_user):
run('echo "update redcap_config set value=\'%s\' where field_name = \'%s\';" | mysql' % (value, field_name))
def test(warn_only=False):
"""
Run all tests against a running REDCap instance
"""
utility.write_remote_my_cnf()
version = get_current_redcap_version()
utility.delete_remote_my_cnf()
local("python tests/test.py %s/ redcap_v%s/" % (env.url_of_deployed_app,version))
with settings(warn_only=True):
if local("python tests/test.py %s/ redcap_v%s/" % (env.url_of_deployed_app,version)).failed:
if warn_only:
warn("One or more tests failed.")
return(False)
else:
abort("One or more tests failed.")
else:
return(True)
| bsd-3-clause | -1,521,481,576,971,571,200 | 37.915966 | 121 | 0.616929 | false |
taxipp/ipp-macro-series-parser | ipp_macro_series_parser/denombrements_fiscaux/denombrements_parsers.py | 1 | 29920 |
import logging
import numpy
import os
import pandas
import pkg_resources
import re
from ipp_macro_series_parser.config import Config
config_parser = Config()
xls_directory = config_parser.get('data', 'denombrements_fiscaux_xls')
hdf_directory = config_parser.get('data', 'denombrements_fiscaux_hdf')
log = logging.getLogger(__name__)
def parse_ipp_denombrements():
file_path = os.path.join(xls_directory, u'Agrégats IPP - Données fiscales.xls')
def parse_bloc(name = None, sheetname = '2042-montant', skiprows = 0, parse_cols = None, slice_start = None,
slice_end = None, prefix = ''):
assert name is not None
df = pandas.read_excel(
file_path,
na_values = '-',
sheetname = sheetname,
skiprows = skiprows,
parse_cols = parse_cols).iloc[slice_start:slice_end]
df.columns = ['year'] + (prefix + df.columns[1:].str.lower()).tolist()
try:
df = df.convert_objects(convert_numeric=True)
df = df.astype(float)
df.year = df.year.astype(int)
except Exception as e:
print(e)
return name, df
return name, df
# Fiche principale
# 1 - Traitements, salaire, prime pour l'emploi, pensions et rentes
traitements_salaires = dict(
name = 'traitements_salaires',
sheetname = '2042-montant',
skiprows = 4,
parse_cols = 'A:AB',
slice_start = 1,
slice_end = 18,
prefix = 'f1',
)
prime_emploi = dict(
name = 'prime_emploi',
sheetname = '2042-montant',
skiprows = 25,
parse_cols = 'A:K',
slice_start = 1,
slice_end = 17,
prefix = 'f1',
)
pension_retraite = dict(
name = 'pension_retraite',
sheetname = '2042-montant',
skiprows = 46,
parse_cols = 'A:M',
slice_start = 1,
slice_end = 18,
prefix = 'f1',
)
rentes_viageres_titre_onereux = dict(
name = 'rentes_viageres_titre_onereux',
sheetname = '2042-montant',
skiprows = 68,
parse_cols = 'A:E',
slice_start = 1,
slice_end = 17,
prefix = 'f1',
)
# 2 - Revenus des valeurs et capitaux mobiliers
prelevement_forfaitaire_liberatoire = dict(
name = 'prelevement_forfaitaire_liberatoire',
sheetname = '2042-montant',
skiprows = 89,
parse_cols = 'A:D',
slice_start = 1,
slice_end = 18,
prefix = 'f2',
)
revenus_avec_abattement = dict(
name = 'revenus_avec_abattement',
sheetname = '2042-montant',
skiprows = 111,
parse_cols = 'A:E',
slice_start = 1,
slice_end = 18,
prefix = 'f2',
)
revenus_sans_abattement = dict(
name = 'revenus_sans_abattement',
sheetname = '2042-montant',
skiprows = 133,
parse_cols = 'A:D',
slice_start = 1,
slice_end = 18,
prefix = 'f2',
)
autres_revenus_financiers = dict(
name = 'autres_revenus_financiers',
sheetname = '2042-montant',
skiprows = 154,
parse_cols = 'A:I',
slice_start = 1,
slice_end = 18,
prefix = 'f2',
)
# 3 - Plus values et gains taxables à 16% (18% à partir de 2008)
plus_values = dict(
name = 'plus_values',
sheetname = '2042-montant',
skiprows = 199,
parse_cols = 'A:C',
slice_start = 1,
slice_end = 19,
prefix = 'f3',
)
# 4 - Revenus fonciers
# TODO: copier coller d'une note
# Pour les dénombrements de 96 à 2001, on ne connait plus le détail des différents déficits mais seulement total
# agrégé (case total def)
# Comme les parts des différents déficits sur le déficit total est pratiquement constant dans le temps, on assume
# donc que la répartition du déficit total entre les différents déficits est constant entre 96 et 2001 et égal à son
# niveau de 2003
# TODO: virer 2012 à 2014 ?
revenus_fonciers = dict(
name = 'revenus_foncier',
sheetname = '2042-montant',
skiprows = 222,
parse_cols = 'A:H',
slice_start = 1,
slice_end = 20,
prefix = 'f3',
)
contribution_revenus_locatifs = dict(
name = 'contribution_revenus_locatifs',
sheetname = '2042-montant',
skiprows = 246,
parse_cols = 'A:C',
slice_start = 1,
slice_end = 18,
prefix = 'f4',
)
# 5- Revenus exceptionnels ou différés
revenus_exceptionnels = dict(
name = 'revenus_exceptionnels',
sheetname = '2042-montant',
skiprows = 268,
parse_cols = 'A:B',
slice_start = 1,
slice_end = 19,
prefix = 'f5',
)
# 6- Charges déductibles et imputations diverses
charges_deductibles = dict(
name = 'charges_deductibles',
sheetname = '2042-montant',
skiprows = 316,
parse_cols = 'A:I',
slice_start = 1,
slice_end = 19,
prefix = 'f6',
)
epargne_retraite = dict(
name = 'epargne_retraite',
sheetname = '2042-montant',
skiprows = 338,
parse_cols = 'A:O',
slice_start = 1,
slice_end = 18,
prefix = 'f6',
)
# 7- Charges ouvrant droit à réduction ou à crédit d'impôt
reductions_credits_impot = dict(
name = 'reductions_credits_impot',
sheetname = '2042-montant',
skiprows = 360,
parse_cols = 'A:BH',
slice_start = 1,
slice_end = 18,
prefix = 'f7',
)
# 8- Autres imputations, reprises de réductions d'impôt, conventions internationales, divers
autres_imputations = dict(
name = 'autres_imputations',
sheetname = '2042-montant',
skiprows = 383,
parse_cols = 'A:L',
slice_start = 1,
slice_end = 18,
prefix = 'f7',
)
# Fiche complémentaire
# 1- Gains de levée d'options
options = dict(
name = 'options',
sheetname = '2042C - montant',
skiprows = 5,
parse_cols = 'A:I',
slice_start = 0,
slice_end = 17,
prefix = 'f1',
)
name, df = parse_bloc(**options)
df.dtypes
df.year
# salaires exonérés
salaires_exoneres = dict(
name = 'salaires_exoneres',
sheetname = '2042C - montant',
skiprows = 26,
parse_cols = 'A:I',
slice_start = 0,
slice_end = 17,
prefix = 'f1',
)
# crédit d'impôt mobilité
# TODO; nothing in agrégats IPP
# 3- Plus-values et gains divers
plus_values_complementaire = dict(
name = 'plus_values_complementaire',
sheetname = '2042C - montant',
skiprows = 67,
parse_cols = 'A:T',
slice_start = 0,
slice_end = 17,
prefix = 'f3',
)
# 4- Revenus fonciers
revenus_fonciers_complementaire = dict(
name = 'revenus_fonciers_complementaire',
sheetname = '2042C - montant',
skiprows = 88,
parse_cols = 'A:B',
slice_start = 0,
slice_end = 17,
prefix = 'f4',
)
# 5- Revenus et plus-values des professions non salariées
prime_emploi_complementaire = dict(
name = 'prime_emploi_complementaire',
sheetname = '2042C - montant',
skiprows = 111,
parse_cols = 'A:G',
slice_start = 0,
slice_end = 17,
prefix = 'f5',
)
revenus_agricoles_forfait = dict(
name = 'revenus_agricoles_forfait',
sheetname = '2042C - montant',
skiprows = 167,
parse_cols = 'A:Q',
slice_start = 0,
slice_end = 18,
prefix = 'f5',
)
revenus_agricoles_reel = dict(
name = 'revenus_agricoles_reel',
sheetname = '2042C - montant',
skiprows = 190,
parse_cols = 'A:Y',
slice_start = 0,
slice_end = 18,
prefix = 'f5',
)
revenus_agricoles_deficits = dict(
name = 'revenus_agricoles_deficits',
sheetname = '2042C - montant',
skiprows = 212,
parse_cols = 'A:M',
slice_start = 1,
slice_end = 18,
prefix = 'f5',
)
# TODO: *Avant 2007, les cases HE, IE, JE étaient séparé en deux (cases HE et HK,…,JE et JK) en fonction de
# l'appartenance ou non à un CGA
# Revenus industriels et commerciaux professionnels
bic_pro_micro_entreprise = dict(
name = 'bic_pro_micro_entreprise',
sheetname = '2042C - montant',
skiprows = 237,
parse_cols = 'A:U',
slice_start = 0,
slice_end = 17,
prefix = 'f5',
)
bic_pro_reel = dict(
name = 'bic_pro_reel',
sheetname = '2042C - montant',
skiprows = 282,
parse_cols = 'A:AE',
slice_start = 0,
slice_end = 17,
prefix = 'f5',
)
# TODO
# Pour les revenus de 1997, il n'y a pas de distinction entre les BIC professionnels et les BIC non professionnels.
# On choisit de mettre les "BIC exonérés" dans cette case (et de ne rien mettre dans la case NB associée aux BIC
# non professionnels exonérés).
bic_pro_cga = dict(
name = 'bic_pro_cga',
sheetname = '2042C - montant',
skiprows = 304,
parse_cols = 'A:G',
slice_start = 0,
slice_end = 17,
prefix = 'f5',
)
bic_non_pro_micro_entreprise = dict(
name = 'bic_non_pro_micro_entreprise',
sheetname = '2042C - montant',
skiprows = 328,
parse_cols = 'A:T',
slice_start = 0,
slice_end = 18,
prefix = 'f5',
)
bic_non_pro_reel = dict(
name = 'bic_non_pro_reel',
sheetname = '2042C - montant',
skiprows = 351,
parse_cols = 'A:AH',
slice_start = 0,
slice_end = 17,
prefix = 'f5',
)
# Pour l'année 1997, on dispose d'un montant agrégé pour les BIC non professionneles et les BNC non professionnels,
# sans distinction non plus du régime d'imposition (simplifié, réel). Pour cette année, on met le montant agrégé
# dans la case NC pour les revenus et dans la case NF pour les déficits. Il s'agit des cases relatives aux BIC non
# professionnels imposés au régime réel.
bic_non_pro_deficit_anterieur = dict(
name = 'bic_non_pro_deficit_anterieur',
sheetname = '2042C - montant',
skiprows = 373,
parse_cols = 'A:G',
slice_start = 0,
slice_end = 17,
prefix = 'f5',
)
# Revenus non commerciaux professionnels
bnc_pro_micro_vous = dict(
name = 'bnc_pro_micro_vous',
sheetname = '2042C - montant',
skiprows = 396,
parse_cols = 'A:P',
slice_start = 0,
slice_end = 18,
prefix = 'f5',
)
# *Avant 2007, la cases QD était séparé en deux (cases QD et QJ) en fonction de l'appartenance ou non à un AA
bnc_pro_micro_conj = dict(
name = 'bnc_pro_micro_conj',
sheetname = '2042C - montant',
skiprows = 417,
parse_cols = 'A:O',
slice_start = 0,
slice_end = 17,
prefix = 'f5',
)
# *Avant 2007, la cases RD était séparé en deux (cases RD et RJ) en fonction de l'appartenance ou non à un AA
bnc_pro_micro_pac = dict(
name = 'bnc_pro_micro_pac',
sheetname = '2042C - montant',
skiprows = 437,
parse_cols = 'A:N',
slice_start = 0,
slice_end = 17,
prefix = 'f5',
)
# *Avant 2007, la cases SD était séparé en deux (cases SD et SJ) en fonction de l'appartenance ou non à un AA
# Revenus non commerciaux non professionnels
bnc_non_pro_vous = dict(
name = 'bnc_non_pro_vous',
sheetname = '2042C - montant',
skiprows = 482,
parse_cols = 'A:T',
slice_start = 1,
slice_end = 18,
prefix = 'f5',
)
# * Avant 2006, l'ensemble des variables de JG à MT ne concerne plus seulement le contribuable mais l'ensemble du
# foyer. Les cases JK à SW et LK à SX sont donc supprimées.
bnc_non_pro_conj = dict(
name = 'bnc_non_pro_conj',
sheetname = '2042C - montant',
skiprows = 502,
parse_cols = 'A:M',
slice_start = 1,
slice_end = 18,
prefix = 'f5',
)
bnc_non_pro_pac = dict(
name = 'bnc_non_pro_pac',
sheetname = '2042C - montant',
skiprows = 521,
parse_cols = 'A:M',
slice_start = 1,
slice_end = 18,
prefix = 'f5',
)
# Revenus accessoires
# TODO
# Revenus a imposer aux prelevements sociaux
revenus_prelevements_sociaux = dict(
name = 'revenus_prelevements_sociaux',
sheetname = '2042C - montant',
skiprows = 567,
parse_cols = 'A:I',
slice_start = 0,
prefix = 'f5',
slice_end = 17,
)
# 6- Charges et imputations diverses = charges à déduire du revenu
charges_imputations_diverses = dict(
name = 'charges_imputations_diverses',
sheetname = '2042C - montant',
skiprows = 587,
parse_cols = 'A:R',
slice_start = 2,
prefix = 'f5',
slice_end = 19,
)
# 3 Cette case EH (investissemencompte épargne co-developpement) n'a rien à voir avec la case EH colonne O
# (investissement DOM-TOM)
# 4 : Cette case était dans la déclaration 2042 avant 2007 (case somme à ajouter au revenu imposable)
# 7- Charges ouvrant droit à réduction ou à crédit d'impôt
reductions_credits_impot_complementaire = dict(
name = 'reductions_credits_impot_complementaire',
sheetname = '2042C - montant',
skiprows = 613,
parse_cols = 'A:BA',
slice_start = 2,
prefix = 'f5',
slice_end = 20,
)
# 3 : les données brutes sont abérrantes pour l'année 2007, on vait par exemple 113 863 3, il manque donc deux zéros
# derrères le 3. Et pour UA et UJ, j'ai rajouté 3 zéros derrières les nombres brutes pour avoir le bon rapport de
# grandeur.
# * UI = Total réduction d'impôt Outre-mer Avant 2008 : la déclaration détaille les composantes des Ivt Outremer par
# secteur d'activité
# 8- Autres imputations, conventions internationales, crédits d'impôt entreprise
autres_imputations_complementaire = dict(
name = 'autres_imputations_complementaire',
sheetname = '2042C - montant',
skiprows = 639,
parse_cols = 'A:Z',
slice_start = 1,
prefix = 'f5',
slice_end = 20,
)
# name, df = parse_bloc(**autres_imputations_complementaire)
# print df.dtypes
# df.year
# 8- Autres imputations, conventions internationales, crédits d'impôt entreprise
blocs = [
traitements_salaires,
prime_emploi,
pension_retraite,
rentes_viageres_titre_onereux,
prelevement_forfaitaire_liberatoire,
revenus_avec_abattement,
revenus_sans_abattement,
autres_revenus_financiers,
plus_values,
revenus_fonciers,
contribution_revenus_locatifs,
revenus_exceptionnels,
charges_deductibles,
epargne_retraite,
reductions_credits_impot,
autres_imputations,
options,
salaires_exoneres,
plus_values_complementaire,
revenus_fonciers_complementaire,
prime_emploi_complementaire,
revenus_agricoles_forfait,
revenus_agricoles_reel,
revenus_agricoles_deficits,
bic_pro_micro_entreprise,
bic_pro_reel,
bic_pro_cga,
bic_non_pro_micro_entreprise,
bic_non_pro_reel,
bic_non_pro_deficit_anterieur,
bnc_pro_micro_vous,
bnc_pro_micro_conj,
bnc_pro_micro_pac,
bnc_non_pro_vous,
bnc_non_pro_conj,
bnc_non_pro_pac,
revenus_prelevements_sociaux,
charges_imputations_diverses,
reductions_credits_impot_complementaire,
autres_imputations_complementaire
]
data_frame_by_bloc_name = dict(parse_bloc(**bloc) for bloc in blocs)
correct_errors(data_frame_by_bloc_name, show_only = False)
ipp_denombrements = pandas.DataFrame()
for data_frame in data_frame_by_bloc_name.values():
ipp_denombrements = pandas.concat((
ipp_denombrements,
pandas.melt(data_frame, id_vars=['year'], var_name = 'code')
))
ipp_denombrements.dropna(inplace = True)
return ipp_denombrements
def correct_errors(data_frame_by_bloc_name, show_only = False):
import re
pattern = re.compile("^f[1-8][a-z][a-z]$")
note_pattern = re.compile("^f[1-8][a-z][a-z][1-4]$")
corrected_columns = set()
problematic_columns = set()
for bloc_name, data_frame in data_frame_by_bloc_name.items():
correct_name_by_wrong_name = dict()
drop_columns = list()
for column in data_frame.columns:
if column == 'year':
assert numpy.issubdtype(data_frame[column].dtype, numpy.integer)
assert data_frame[column].isin(range(1990, 2015)).all()
continue
if not pattern.match(column):
# print '- ' + str(column)
# remove trailing spaces
problematic_columns.add(column)
if column != column.strip():
correct_name_by_wrong_name[column] = column.strip()
# remove *
if column.endswith('*') and pattern.match(column[:-1]):
correct_name_by_wrong_name[column] = column[:-1]
# remove unnamed
if "unnamed" in column or "-" in column or 'total' in column:
drop_columns.append(column)
# remove trailing 1, 2, 3, 4 (notes in excel file)
if note_pattern.match(column):
correct_name_by_wrong_name[column] = column[:-1]
corrected_columns = corrected_columns.union(set(correct_name_by_wrong_name.keys()))
corrected_columns = corrected_columns.union(set(drop_columns))
if not show_only:
data_frame.drop(labels = drop_columns, axis = 1, inplace = True)
data_frame.rename(columns = correct_name_by_wrong_name, inplace = True)
print('Remaining problematic columns')
print(problematic_columns.difference(corrected_columns))
def parse_openfisca_denombrements():
openfisca_denombrements = pandas.read_excel(os.path.join(xls_directory, '2042_national.xls'), sheetname = 'montant')
assert openfisca_denombrements.dtypes.apply(lambda x: numpy.issubdtype(x, numpy.float)).all(), \
openfisca_denombrements.dtypes
openfisca_denombrements = openfisca_denombrements.stack().reset_index()
openfisca_denombrements.rename(columns = {'level_0': 'code', 'level_1': 'year', 0: 'value'}, inplace = True)
openfisca_denombrements[['year']] = openfisca_denombrements[['year']].astype(int)
return openfisca_denombrements
def parse_dgfip_denombrements(years = None):
assert years is not None
assert min(years) >= 2001
assert max(years) <= 2013
dgfip_directory = os.path.join(xls_directory, 'D2042Nat')
files = os.listdir(dgfip_directory)
result = pandas.DataFrame()
for year in years:
file_regex = re.compile("^R20{}".format(str(year)[2:4]))
for filename in files:
if file_regex.match(filename):
log.info("Using file {} for year {}".format(filename, year))
break
print(year)
if year in [2001, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013]:
dgfip_denombrements = pandas.read_excel(os.path.join(dgfip_directory, filename))
if year == 2003:
dgfip_denombrements = pandas.read_excel(os.path.join(dgfip_directory, filename), skiprows = 4)
if year in [2001, 2003]:
regex = re.compile("^[0-9][A-Z]{2}")
dgfip_denombrements.code.fillna("", inplace = True)
dgfip_denombrements = dgfip_denombrements.set_index('code').filter(regex = regex, axis = 0)
new_variable_name_by_old = dict(
(x, "f{}".format(x.lower())) for x in dgfip_denombrements.index)
dgfip_denombrements = dgfip_denombrements.rename(index = new_variable_name_by_old)
dgfip_denombrements['year'] = year
dgfip_denombrements.rename(columns = {'montant': 'value', 'Nombre': 'nombre'}, inplace = True)
del dgfip_denombrements['nombre']
# TODO:
if year in [2005, 2006, 2007, 2008]:
# continue
regex = re.compile("[A-Z]{2}")
dgfip_denombrements = dgfip_denombrements.set_index('nom').filter(regex = regex, axis = 0)
dgfip_denombrements.index.name = 'code'
new_variable_name_by_old = dict(
(x, "f{}".format(x.lower())) for x in dgfip_denombrements.index)
dgfip_denombrements = dgfip_denombrements.rename(index = new_variable_name_by_old)
print(dgfip_denombrements)
boum
# trouver un moyen de renommer les codes pour qu'il y ait le numéro des sections
# dgfip_denombrements.rename(columns = {'nom': 'code'}, inplace = True)
# for ind in dgfip_denombrements.index:
# if re.match("[A-Z][I, J, K, O, P, Q, S, V, W, X]", dgfip_denombrements.ix[ind]['code']):
# print dgfip_denombrements.ix[ind]['code']
# dgfip_denombrements.rename(
# {dgfip_denombrements.ix[ind]['code']: "1{}".format(dgfip_denombrements.ix[ind]['code'])}) # ,inplace = True
#
# or
# dgfip_denombrements = dgfip_denombrements.filter(items = ['nom'], regex = regex)
#
# dgfip_denombrements['code'] = dgfip_denombrements['nom']
# for ind in dgfip_denombrements.index:
# if re.match("[A-Z][I, J, K, O, P, Q, S, V, W, X]", dgfip_denombrements.ix[ind]['nom']):
# print dgfip_denombrements.ix[ind]['nom']
# dgfip_denombrements.ix[ind]['code'] = "1{}".format(dgfip_denombrements.ix[ind]['nom'])
#
# dgfip_denombrements = dgfip_denombrements.set_index('code').filter(regex = regex, axis = 0)
if year == 2004:
regex = re.compile("^Z[0-9][A-Z]{2}")
dgfip_denombrements.case.fillna("", inplace = True)
dgfip_denombrements.drop_duplicates(['case'], inplace = True)
dgfip_denombrements = dgfip_denombrements.set_index('case').filter(regex = regex, axis = 0)
dgfip_denombrements.index.name = 'code'
new_variable_name_by_old = dict(
(x, "f{}".format(x[1:].lower())) for x in dgfip_denombrements.index)
dgfip_denombrements = dgfip_denombrements.rename(index = new_variable_name_by_old)
dgfip_denombrements.reset_index(inplace = True)
dgfip_denombrements['year'] = year
dgfip_denombrements.rename(columns = {'Montant': 'value'}, inplace = True)
del dgfip_denombrements['Nombre'], dgfip_denombrements[u'libellé'], dgfip_denombrements['nom']
if year in [2009, 2010, 2011, 2012]:
regex = re.compile("^Z[0-9][A-Z]{2}")
dgfip_denombrements = dgfip_denombrements.set_index('nom').filter(regex = regex, axis = 0)
dgfip_denombrements.index.name = 'code'
new_variable_name_by_old = dict(
(x, "f{}".format(x[1:].lower())) for x in dgfip_denombrements.index)
dgfip_denombrements = dgfip_denombrements.rename(index = new_variable_name_by_old)
dgfip_denombrements.reset_index(inplace = True)
dgfip_denombrements['year'] = year
if year == 2009:
dgfip_denombrements.rename(columns = {'Montants': 'value', 'Nombre': 'nombre'}, inplace = True)
else:
dgfip_denombrements.rename(columns = {'montants': 'value'}, inplace = True)
del dgfip_denombrements['maximal'], dgfip_denombrements['nombre']
if year == 2013:
regex = re.compile("^Z[0-9][A-Z]{2}")
dgfip_denombrements = dgfip_denombrements.set_index('nom').filter(regex = regex, axis = 0)
dgfip_denombrements.index.name = 'code'
new_variable_name_by_old = dict(
(x, "f{}".format(x[1:].lower())) for x in dgfip_denombrements.index)
dgfip_denombrements = dgfip_denombrements.rename(index = new_variable_name_by_old)
dgfip_denombrements.reset_index(inplace = True)
dgfip_denombrements['year'] = year
dgfip_denombrements.rename(columns = {'ano': 'value'}, inplace = True)
del dgfip_denombrements['pas_ano']
result = pandas.concat((result, dgfip_denombrements))
result.dropna(subset = ['value'], inplace = True) # dropping NA's
return result.loc[result.value != 0].copy() # excluding 0 values
def create_denombrements_fiscaux_data_frame(year = None, years = None, overwrite = False):
"""
Generates the table with all the data from Dénombrements Fiscaux .
Parameters
----------
year : int
year of DGFIP data (coincides with year of declaration)
years : list of integers
list of years of interest. Optional.
Example
--------
>>> table_2013 = denombrements_fiscaux_df_generator(year = 2013)
Returns the main table of dénombrements fiscaux for the year 2013.
"""
if year is not None and years is None:
years = [year]
log.info('Parsing dénombrements fiscaux raw data for the following years: {}'.format(years))
# Data coming for openfisca xls file
openfisca_denombrements = parse_openfisca_denombrements()
openfisca_denombrements['origin'] = 'OF'
# Data coming from IPP
ipp_denombrements = parse_ipp_denombrements()
ipp_denombrements['origin'] = 'IPP'
df = pandas.concat([ipp_denombrements, openfisca_denombrements])
# Drop real duplicates
df = df.drop_duplicates(subset = ['year', 'code', 'value'])
df = df.reset_index(drop=True)
# Problematic duplicates
dups = df.duplicated(['year', 'code']) | df.duplicated(['year', 'code'], keep = 'last')
z = df.loc[dups].copy()
# sum of two columns in IPP for year < 2007
wrong_before_2007 = ['f5ne', 'f5oe', 'f5rd', 'f5ke', 'f5le', 'f5he', 'f5ie', 'f5qd']
df = df.loc[~(df.code.isin(wrong_before_2007) & (df.year < 2007))]
log.info('Remaining roblematic duplicates when merging IPP and OF \n {}'.format(
z.loc[~(z.code.isin(wrong_before_2007) & (z.year < 2007))]
))
df = df.loc[df.year.isin(years)].copy() if years is not None else df.copy()
# Data coming from DGFiP
dgfip_denombrements = parse_dgfip_denombrements(years)
dgfip_denombrements['origin'] = 'DGFiP'
df2 = pandas.concat([dgfip_denombrements, df])
# Drop real duplicates
df2 = df2.drop_duplicates(subset = ['year', 'code', 'value'])
df2 = df2.reset_index(drop=True)
dups2 = df2.duplicated(['year', 'code']) | df2.duplicated(['year', 'code'], keep = 'last')
errors = df2.loc[dups2].copy()
wrong_codes = ['f5ne', 'f5oe', 'f5rd', 'f5ke', 'f5le', 'f4tq', 'f5hd',
'f5id', 'f5he', 'f5ie', 'f5qd', 'f3ve', 'f3vf', 'f3ve', 'f3vf', 'f7tf', 'f7tf', 'f2gr', 'f2ch', 'f2bg', 'f6el',
'f6st', 'f2bg', 'f7cd', 'f2gr', 'f2ch', 'f7cd', 'f6st', 'f6el']
wrong_years = [2006, 2005, 2004, 2003]
log.info('Remaining problematic duplicates when merging with DGFiP data \n {}'.format(
errors.loc[~(errors.code.isin(wrong_codes) | errors.year.isin(wrong_years))]
))
df2 = df2.loc[~(df2.code.isin(wrong_codes) | (df2.year.isin(wrong_years)))]
result = df2.loc[df2.year.isin(years)].copy() if years is not None else df2.copy()
log.info('For now, we keep only DGFiP data')
result = dgfip_denombrements.copy() # TODO: recoupement avec data OpenFisca & IPP
if overwrite:
save_df_to_hdf(result, 'denombrements_fiscaux.h5', 'montants')
return result, errors
def build_section_code():
openfisca_denombrements = parse_openfisca_denombrements()
ipp_denombrements = parse_ipp_denombrements()
df = pandas.concat([openfisca_denombrements.code, openfisca_denombrements.code])
return df.unique()
def get_denombrements_fiscaux_data_frame(year = None, years = None, rebuild = False, overwrite = False,
fill_value = numpy.nan):
if year is not None and years is None:
years = [year]
if rebuild:
return create_denombrements_fiscaux_data_frame(years = years, overwrite = overwrite)
else:
data_frame = import_from_hdf('denombrements_fiscaux.h5', 'montants')
return data_frame.loc[data_frame.year.isin(years)].copy()
def save_df_to_hdf(df, hdf_filename, key):
file_path = os.path.join(hdf_directory, hdf_filename)
df.to_hdf(file_path, key)
def import_from_hdf(hdf_filename, key):
file_path = os.path.join(hdf_directory, hdf_filename)
store = pandas.HDFStore(file_path)
df = store[key]
return df
if __name__ == '__main__':
build_section_code()
dgfip = parse_dgfip_denombrements(years = range(2008, 2009))
print(dgfip)
# denomb_fisc_all, errors = create_denombrements_fiscaux_data_frame(
# years = range(2009, 2014),
# overwrite = True,
# )
| gpl-3.0 | -7,274,436,793,316,057,000 | 33.142039 | 132 | 0.580588 | false |
johnnoone/zbx | zbx/io/defaults.py | 1 | 3539 | """
zbx.io.defaults
~~~~~~~~~~~~~~~
Defines all zabbix defaults
"""
__all__ = ['rules', 'RuleSet']
from abc import ABCMeta
from itertools import chain
from six import add_metaclass
@add_metaclass(ABCMeta)
class RuleSet(object):
def __init__(self, path, rules):
self.path = path
self.rules = rules
def __iter__(self):
try:
rules = self.rules.items()
except AttributeError:
rules = self.rules
for rule in rules:
try:
if isinstance(rule, RuleSet):
for endpath, value in rule:
yield self.format_path(endpath), value
else:
endpath, value = rule
yield self.format_path(endpath), value
except Exception:
raise
def format_path(self, path):
if self.path:
return '{}/{}'.format(self.path, path)
return path
def __add__(self, other):
return RuleSet(None, chain(self, other))
RuleSet.register(list)
def scope(path, rules):
return RuleSet(path, rules)
rules = scope('host', [
('ipmi_authtype', -1),
('ipmi_available', 0),
('ipmi_privilege', 2),
('ipmi_username', ''),
('ipmi_password', ''),
('maintenance_status', 0),
('snmp_available', 0),
('status', 0),
scope('inventory', [
('inventory_mode', 0)
]),
])
rules += scope('host_prototype', [
('ipmi_authtype', -1),
('ipmi_available', 0),
('ipmi_privilege', 2),
('maintenance_status', 0),
('snmp_available', 0),
('status', 0),
scope('inventory', [
('inventory_mode', 0)
]),
])
rules += scope('item', [
('authtype', 0),
('data_type', 0),
('delta', 0),
('formula', 1),
('history', 90),
('inventory_link', 0),
('state', 0),
('status', 0),
('trends', 365),
('units', ''),
('snmpv3_authprotocol', 0),
('snmpv3_privprotocol', 0),
('multiplier', 0),
])
rules += scope('screen', [
# ('hsize', 1),
('vsize', 1)
])
rules += scope('screen_item', [
('dynamic', 0),
('elements', 25),
('halign', 0),
('height', 200),
('sort_triggers', 0),
('style', 0),
('valign', 0),
('width', 320),
# ('x', 0),
# ('y', 0),
('colspan', 1),
('rowspan', 1),
])
rules += scope('action', [
('recovery_msg', 0),
('status', 0),
scope('condition', [
('operator', 0)
]),
scope('operation', [
('esc_period', 0),
('esc_step_from', 1),
('esc_step_to', 1),
('evaltype', 0),
]),
scope('message', [
('default_msg', 0)
]),
scope('operation', [
('operator', 0)
])
])
rules += scope('graph', [
('type', 0),
('percent_left', 0.),
('percent_right', 0.),
('show_3d', 0),
('show_legend', 1),
('show_work_period', 1),
('show_triggers', 1),
('yaxismax', 100.0),
('yaxismin', 0.0),
('ymax_type', 0),
('ymin_type', 0),
('ymin_item_1', 0),
('ymax_item_1', 0)
])
rules += scope('graph_item', [
('calc_fnc', 2),
('drawtype', 0),
('sortorder', 0),
('type', 0),
('yaxisside', 0)
])
rules += scope('trigger', [
('priority', 0),
('state', 0),
('status', 0),
('type', 0),
('value', 0)
])
rules += scope('discovery_rule', [
('authtype', 0),
('lifetime', 30),
('snmpv3_authprotocol', 0),
('snmpv3_privprotocol', 0),
('state', 0),
('status', 0),
])
| bsd-3-clause | 6,384,678,967,305,997,000 | 19.107955 | 62 | 0.473298 | false |
th3sys/capsule | push_items.py | 1 | 1667 | from __future__ import print_function # Python 2/3 compatibility
import boto3
import json
import uuid
import time
import decimal
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
sec_table = dynamodb.Table('Securities')
quotes_table = dynamodb.Table('Quotes.EOD')
with open("quotes.json") as json_file:
quotes = json.load(json_file, parse_float = decimal.Decimal)
for quote in quotes:
Symbol = quote['Symbol']
Date = str(time.time())
Source = quote['Source']
Details = quote['Details']
print("Adding quote:", Symbol, Date)
quotes_table.put_item(
Item={
'Symbol': Symbol,
'Date': Date,
'Source': Source,
'Details': Details
}
)
with open("securities.json") as json_file:
securities = json.load(json_file, parse_float = decimal.Decimal)
for security in securities:
Symbol = security['Symbol']
Broker = security['Broker']
ProductType = security['ProductType']
SubscriptionEnabled = bool(security['SubscriptionEnabled'])
TradingEnabled = bool(security['TradingEnabled'])
Description = security['Description']
Risk = security['Risk']
print("Adding security:", Symbol)
sec_table.put_item(
Item={
'Symbol': Symbol,
'Broker' : Broker,
'ProductType': ProductType,
'SubscriptionEnabled': SubscriptionEnabled,
'TradingEnabled':TradingEnabled,
'Description':Description,
'Risk':Risk
}
)
| mit | 7,072,675,700,951,598,000 | 28.767857 | 68 | 0.577684 | false |
skosukhin/spack | var/spack/repos/builtin/packages/clamr/package.py | 1 | 3013 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Clamr(CMakePackage):
"""The CLAMR code is a cell-based adaptive mesh refinement (AMR)
mini-app developed as a testbed for hybrid algorithm development
using MPI and OpenCL GPU code.
"""
homepage = "https://github.com/lanl/CLAMR"
url = "https://github.com/lanl/CLAMR.git"
tags = ['proxy-app']
version('master', git='https://github.com/lanl/CLAMR.git')
variant(
'graphics', default='opengl',
values=('opengl', 'mpe', 'none'),
description='Build with specified graphics support')
variant(
'precision', default='mixed',
values=('single', 'mixed', 'full'),
description='single, mixed, or full double precision values')
depends_on('mpi')
depends_on('mpe', when='graphics=mpe')
def cmake_args(self):
spec = self.spec
cmake_args = []
if 'graphics=none' in spec:
cmake_args.append('-DGRAPHICS_TYPE=None')
elif 'graphics=mpe' in spec:
cmake_args.append('-DGRAPHICS_TYPE=MPE')
else:
cmake_args.append('-DGRAPHICS_TYPE=OpenGL')
if 'precision=full' in spec:
cmake_args.append('-DPRECISION_TYPE=full_precision')
elif 'precision=single' in spec:
cmake_args.append('-DPRECISION_TYPE=minimum_precision')
else:
cmake_args.append('-DPRECISION_TYPE=mixed_precision')
# if MIC, then -DMIC_NATIVE=yes
return cmake_args
def install(self, spec, prefix):
install('README', prefix)
install('LICENSE', prefix)
install_tree('docs', join_path(prefix, 'docs'))
install_tree('tests', join_path(prefix, 'tests'))
with working_dir(self.build_directory):
make('install')
| lgpl-2.1 | 4,936,657,519,546,583,000 | 37.628205 | 78 | 0.629605 | false |
gsnbng/erpnext | erpnext/hr/report/vehicle_expenses/vehicle_expenses.py | 2 | 3137 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
import erpnext
from frappe import _
from frappe.utils import flt,cstr
from erpnext.accounts.report.financial_statements import get_period_list
def execute(filters=None):
columns, data, chart = [], [], []
if filters.get('fiscal_year'):
company = erpnext.get_default_company()
period_list = get_period_list(filters.get('fiscal_year'), filters.get('fiscal_year'),
'', '', 'Fiscal Year', 'Monthly', company=company)
columns=get_columns()
data=get_log_data(filters)
chart=get_chart_data(data,period_list)
return columns, data, None, chart
def get_columns():
columns = [_("License") + ":Link/Vehicle:100", _('Create') + ":data:50",
_("Model") + ":data:50", _("Location") + ":data:100",
_("Log") + ":Link/Vehicle Log:100", _("Odometer") + ":Int:80",
_("Date") + ":Date:100", _("Fuel Qty") + ":Float:80",
_("Fuel Price") + ":Float:100",_("Fuel Expense") + ":Float:100",
_("Service Expense") + ":Float:100"
]
return columns
def get_log_data(filters):
fy = frappe.db.get_value('Fiscal Year', filters.get('fiscal_year'), ['year_start_date', 'year_end_date'], as_dict=True)
data = frappe.db.sql("""select
vhcl.license_plate as "License", vhcl.make as "Make", vhcl.model as "Model",
vhcl.location as "Location", log.name as "Log", log.odometer as "Odometer",
log.date as "Date", log.fuel_qty as "Fuel Qty", log.price as "Fuel Price",
log.fuel_qty * log.price as "Fuel Expense"
from
`tabVehicle` vhcl,`tabVehicle Log` log
where
vhcl.license_plate = log.license_plate and log.docstatus = 1 and date between %s and %s
order by date""" ,(fy.year_start_date, fy.year_end_date), as_dict=1)
dl=list(data)
for row in dl:
row["Service Expense"]= get_service_expense(row["Log"])
return dl
def get_service_expense(logname):
expense_amount = frappe.db.sql("""select sum(expense_amount)
from `tabVehicle Log` log,`tabVehicle Service` ser
where ser.parent=log.name and log.name=%s""",logname)
return flt(expense_amount[0][0]) if expense_amount else 0
def get_chart_data(data,period_list):
fuel_exp_data,service_exp_data,fueldata,servicedata = [],[],[],[]
service_exp_data = []
fueldata = []
for period in period_list:
total_fuel_exp=0
total_ser_exp=0
for row in data:
if row["Date"] <= period.to_date and row["Date"] >= period.from_date:
total_fuel_exp+=flt(row["Fuel Expense"])
total_ser_exp+=flt(row["Service Expense"])
fueldata.append([period.key,total_fuel_exp])
servicedata.append([period.key,total_ser_exp])
labels = [period.key for period in period_list]
fuel_exp_data= [row[1] for row in fueldata]
service_exp_data= [row[1] for row in servicedata]
datasets = []
if fuel_exp_data:
datasets.append({
'name': 'Fuel Expenses',
'values': fuel_exp_data
})
if service_exp_data:
datasets.append({
'name': 'Service Expenses',
'values': service_exp_data
})
chart = {
"data": {
'labels': labels,
'datasets': datasets
}
}
chart["type"] = "line"
return chart
| agpl-3.0 | -586,803,672,256,891,600 | 33.855556 | 120 | 0.671023 | false |
droundy/deft | papers/histogram/figs/plot-convergence.py | 1 | 5111 | #!/usr/bin/python2
import matplotlib, sys
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy
matplotlib.rc('font', **{'family': 'serif', 'serif': ['Computer Modern']})
matplotlib.rc('text', usetex=True)
import styles
if len(sys.argv) != 7:
print('useage: %s ww ff N min_T methods seed' % sys.argv[0])
exit(1)
ww = float(sys.argv[1])
#arg ww = [1.3, 1.5, 2.0, 3.0]
ff = float(sys.argv[2])
#arg ff = [0.3]
N = float(sys.argv[3])
#arg N = range(5,21)
min_T = eval(sys.argv[4])
#arg min_T = [0.1]
methods = eval(sys.argv[5])
#arg methods = [["wang_landau","simple_flat","tmmc","oetmmc"]]
seed = int(sys.argv[6])
#arg seed = [0]
# input: ["data/s%03d/periodic-ww%04.2f-ff%04.2f-N%i-%s-conv_T%g-%s.dat" % (seed, ww, ff, N, method, min_T, data) for method in methods for data in ["E","lnw"]]
max_T = 2
T_bins = 1e3
dT = max_T/T_bins
T_range = numpy.arange(dT, max_T, dT)
# make dictionaries which we can index by method name
U = {} # internal energy
CV = {} # heat capacity
S = {} # entropy
minlog = 0
for method in methods:
e_hist = numpy.loadtxt("data/s%03d/periodic-ww%04.2f-ff%04.2f-N%i-%s-conv_T%g-E.dat"
% (seed, ww, ff, N, method, min_T), ndmin=2)
lnw_hist = numpy.loadtxt("data/s%03d/periodic-ww%04.2f-ff%04.2f-N%i-%s-conv_T%g-lnw.dat"
% (seed, ww, ff, N, method, min_T), ndmin=2)
energy = -e_hist[:, 0] # array of energies
lnw = lnw_hist[e_hist[:, 0].astype(int), 1] # look up the lnw for each actual energy
ln_dos = numpy.log(e_hist[:, 1]) - lnw
log10w = lnw_hist[e_hist[:, 0].astype(int), 1]*numpy.log10(numpy.exp(1))
log10_dos = numpy.log10(e_hist[:, 1]) - log10w
log10_dos -= log10_dos.max()
if log10_dos.min() < minlog:
minlog = log10_dos.min()
plt.figure('dos')
plt.plot(energy, log10_dos, styles.dots(method), label=styles.title(method))
Z = numpy.zeros(len(T_range)) # partition function
U[method] = numpy.zeros(len(T_range)) # internal energy
CV[method] = numpy.zeros(len(T_range)) # heat capacity
S[method] = numpy.zeros(len(T_range)) # entropy
Z_inf = sum(numpy.exp(ln_dos - ln_dos.max()))
S_inf = sum(-numpy.exp(ln_dos - ln_dos.max())*(-ln_dos.max() - numpy.log(Z_inf))) / Z_inf
for i in range(len(T_range)):
ln_dos_boltz = ln_dos - energy/T_range[i]
dos_boltz = numpy.exp(ln_dos_boltz - ln_dos_boltz.max())
Z[i] = sum(dos_boltz)
U[method][i] = sum(energy*dos_boltz)/Z[i]
S[method][i] = sum(-dos_boltz*(-energy/T_range[i] - ln_dos_boltz.max() \
- numpy.log(Z[i])))/Z[i]
S[method][i] -= S_inf
CV[method][i] = sum((energy/T_range[i])**2*dos_boltz)/Z[i] - \
(sum(energy/T_range[i]*dos_boltz)/Z[i])**2
plt.figure('u')
plt.plot(T_range, U[method]/N, styles.plot(method), label=styles.title(method))
plt.figure('hc')
plt.plot(T_range, CV[method]/N, styles.plot(method), label=styles.title(method))
plt.figure('S')
plt.plot(T_range, S[method]/N, styles.plot(method), label=styles.title(method))
plt.figure('dos')
plt.ylim(minlog, 0)
locs, labels = plt.yticks()
def tentothe(n):
if n == 0:
return '1'
if n == 10:
return '10'
if int(n) == n:
return r'$10^{%d}$' % n
return r'$10^{%g}$' % n
newlabels = [tentothe(n) for n in locs]
plt.yticks(locs, newlabels)
plt.ylim(minlog, 0)
plt.xlabel('$U/N\epsilon$')
plt.ylabel('$DoS$')
plt.title('Density of states for $\lambda=%g$, $\eta=%g$, and $N=%i$'
' ($kT_{min}/\epsilon=%g$)' % (ww, ff, N, min_T))
plt.legend(loc='best')
plt.tight_layout(pad=0.2)
plt.savefig("figs/periodic-ww%02.0f-ff%02.0f-N%i-dos-conv-T%02.0f.pdf"
% (ww*100, ff*100, N, min_T*100))
plt.figure('u')
plt.title('Specific internal energy for $\lambda=%g$, $\eta=%g$, and $N=%i$'
' ($kT_{min}/\epsilon=%g$)' % (ww, ff, N, min_T))
plt.xlabel('$kT/\epsilon$')
plt.ylabel('$U/N\epsilon$')
plt.legend(loc='best')
plt.axvline(min_T, linewidth=1, color='k', linestyle=':')
plt.tight_layout(pad=0.2)
plt.savefig("figs/periodic-ww%02.0f-ff%02.0f-N%i-u-conv-T%02.0f.pdf"
% (ww*100, ff*100, N, min_T*100))
plt.figure('hc')
plt.title('Specific heat capacity for $\lambda=%g$, $\eta=%g$, and $N=%i$'
' ($kT_{min}/\epsilon=%g$)' % (ww, ff, N, min_T))
plt.ylim(0)
plt.xlabel('$kT/\epsilon$')
plt.ylabel('$C_V/Nk$')
plt.legend(loc='best')
plt.axvline(min_T, linewidth=1, color='k', linestyle=':')
plt.tight_layout(pad=0.2)
plt.savefig("figs/periodic-ww%02.0f-ff%02.0f-N%i-hc-conv-T%02.0f.pdf"
% (ww*100, ff*100, N, min_T*100))
plt.figure('S')
plt.title('Configurational entropy for $\lambda=%g$, $\eta=%g$, and $N=%i$'
' ($kT_{min}/\epsilon=%g$)' % (ww, ff, N, min_T))
plt.xlabel(r'$kT/\epsilon$')
plt.ylabel(r'$S_{\textit{config}}/Nk$')
plt.legend(loc='best')
plt.axvline(min_T, linewidth=1, color='k', linestyle=':')
plt.tight_layout(pad=0.2)
plt.savefig("figs/periodic-ww%02.0f-ff%02.0f-N%i-S-conv-T%02.0f.pdf"
% (ww*100, ff*100, N, min_T*100))
| gpl-2.0 | -6,103,449,264,001,766,000 | 32.847682 | 160 | 0.586774 | false |
eshijia/magnum | magnum/api/controllers/v1/baymodel.py | 1 | 14815 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import glanceclient.exc
import novaclient.exceptions as nova_exc
import pecan
from pecan import rest
import wsme
from wsme import types as wtypes
from magnum.api.controllers import base
from magnum.api.controllers import link
from magnum.api.controllers.v1 import collection
from magnum.api.controllers.v1 import types
from magnum.api.controllers.v1 import utils as api_utils
from magnum.api import expose
from magnum.api import validation
from magnum.common import clients
from magnum.common import exception
from magnum.common import policy
from magnum import objects
class BayModelPatchType(types.JsonPatchType):
pass
class BayModel(base.APIBase):
"""API representation of a baymodel.
This class enforces type checking and value constraints, and converts
between the internal object model and the API representation of a baymodel.
"""
_coe = None
def _get_coe(self):
return self._coe
def _set_coe(self, value):
if value and self._coe != value:
self._coe = value
elif value == wtypes.Unset:
self._coe = wtypes.Unset
uuid = types.uuid
"""Unique UUID for this baymodel"""
name = wtypes.StringType(min_length=1, max_length=255)
"""The name of the bay model"""
coe = wsme.wsproperty(wtypes.text, _get_coe, _set_coe, mandatory=True)
"""The Container Orchestration Engine for this bay model"""
image_id = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255),
mandatory=True)
"""The image name or UUID to use as a base image for this baymodel"""
flavor_id = wtypes.StringType(min_length=1, max_length=255)
"""The flavor of this bay model"""
master_flavor_id = wtypes.StringType(min_length=1, max_length=255)
"""The flavor of the master node for this bay model"""
dns_nameserver = wtypes.IPv4AddressType()
"""The DNS nameserver address"""
keypair_id = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255),
mandatory=True)
"""The name or id of the nova ssh keypair"""
external_network_id = wtypes.StringType(min_length=1, max_length=255)
"""The external network to attach the Bay"""
fixed_network = wtypes.StringType(min_length=1, max_length=255)
"""The fixed network name to attach the Bay"""
network_driver = wtypes.StringType(min_length=1, max_length=255)
"""The name of the driver used for instantiating container networks"""
apiserver_port = wtypes.IntegerType(minimum=1024, maximum=65535)
"""The API server port for k8s"""
docker_volume_size = wtypes.IntegerType(minimum=1)
"""The size in GB of the docker volume"""
ssh_authorized_key = wtypes.StringType(min_length=1)
"""The SSH Authorized Key"""
cluster_distro = wtypes.StringType(min_length=1, max_length=255)
"""The Cluster distro for the bay, ex - coreos, fedora-atomic."""
links = wsme.wsattr([link.Link], readonly=True)
"""A list containing a self link and associated baymodel links"""
http_proxy = wtypes.StringType(min_length=1, max_length=255)
"""http_proxy for the bay """
https_proxy = wtypes.StringType(min_length=1, max_length=255)
"""https_proxy for the bay """
no_proxy = wtypes.StringType(min_length=1, max_length=255)
"""Its comma separated list of ip for which proxies should not
used in the bay"""
registry_enabled = wsme.wsattr(types.boolean, default=False)
"""Indicates whether the docker registry is enabled"""
labels = wtypes.DictType(str, str)
"""One or more key/value pairs"""
insecure = wsme.wsattr(types.boolean, default=False)
"""Indicates whether the TLS should be disabled"""
def __init__(self, **kwargs):
self.fields = []
for field in objects.BayModel.fields:
# Skip fields we do not expose.
if not hasattr(self, field):
continue
self.fields.append(field)
setattr(self, field, kwargs.get(field, wtypes.Unset))
@staticmethod
def _convert_with_links(baymodel, url, expand=True):
if not expand:
baymodel.unset_fields_except(['uuid', 'name', 'image_id',
'apiserver_port', 'coe'])
baymodel.links = [link.Link.make_link('self', url,
'baymodels', baymodel.uuid),
link.Link.make_link('bookmark', url,
'baymodels', baymodel.uuid,
bookmark=True)]
return baymodel
@classmethod
def convert_with_links(cls, rpc_baymodel, expand=True):
baymodel = BayModel(**rpc_baymodel.as_dict())
return cls._convert_with_links(baymodel, pecan.request.host_url,
expand)
@classmethod
def sample(cls, expand=True):
sample = cls(
uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c',
name='example',
image_id='Fedora-k8s',
flavor_id='m1.small',
master_flavor_id='m1.small',
dns_nameserver='8.8.1.1',
keypair_id='keypair1',
external_network_id='ffc44e4a-2319-4062-bce0-9ae1c38b05ba',
fixed_network='private',
network_driver='libnetwork',
apiserver_port=8080,
docker_volume_size=25,
cluster_distro='fedora-atomic',
ssh_authorized_key='ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAB',
coe='kubernetes',
http_proxy='http://proxy.com:123',
https_proxy='https://proxy.com:123',
no_proxy='192.168.0.1,192.168.0.2,192.168.0.3',
labels={'key1': 'val1', 'key2': 'val2'},
created_at=datetime.datetime.utcnow(),
updated_at=datetime.datetime.utcnow())
return cls._convert_with_links(sample, 'http://localhost:9511', expand)
class BayModelCollection(collection.Collection):
"""API representation of a collection of baymodels."""
baymodels = [BayModel]
"""A list containing baymodels objects"""
def __init__(self, **kwargs):
self._type = 'baymodels'
@staticmethod
def convert_with_links(rpc_baymodels, limit, url=None, expand=False,
**kwargs):
collection = BayModelCollection()
collection.baymodels = [BayModel.convert_with_links(p, expand)
for p in rpc_baymodels]
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
@classmethod
def sample(cls):
sample = cls()
sample.baymodels = [BayModel.sample(expand=False)]
return sample
class BayModelsController(rest.RestController):
"""REST controller for BayModels."""
_custom_actions = {
'detail': ['GET'],
}
def _get_baymodels_collection(self, marker, limit,
sort_key, sort_dir, expand=False,
resource_url=None):
limit = api_utils.validate_limit(limit)
sort_dir = api_utils.validate_sort_dir(sort_dir)
marker_obj = None
if marker:
marker_obj = objects.BayModel.get_by_uuid(pecan.request.context,
marker)
baymodels = objects.BayModel.list(pecan.request.context, limit,
marker_obj, sort_key=sort_key,
sort_dir=sort_dir)
return BayModelCollection.convert_with_links(baymodels, limit,
url=resource_url,
expand=expand,
sort_key=sort_key,
sort_dir=sort_dir)
def _get_image_data(self, context, image_ident):
"""Retrieves os_distro and other metadata from the Glance image.
:param image_ident: image id or name of baymodel.
"""
try:
cli = clients.OpenStackClients(context)
return api_utils.get_openstack_resource(cli.glance().images,
image_ident, 'images')
except glanceclient.exc.NotFound:
raise exception.ImageNotFound(image_id=image_ident)
except glanceclient.exc.HTTPForbidden:
raise exception.ImageNotAuthorized(image_id=image_ident)
@policy.enforce_wsgi("baymodel")
@expose.expose(BayModelCollection, types.uuid,
types.uuid, int, wtypes.text, wtypes.text)
def get_all(self, baymodel_uuid=None, marker=None, limit=None,
sort_key='id', sort_dir='asc'):
"""Retrieve a list of baymodels.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
"""
return self._get_baymodels_collection(marker, limit, sort_key,
sort_dir)
@policy.enforce_wsgi("baymodel")
@expose.expose(BayModelCollection, types.uuid,
types.uuid, int, wtypes.text, wtypes.text)
def detail(self, baymodel_uuid=None, marker=None, limit=None,
sort_key='id', sort_dir='asc'):
"""Retrieve a list of baymodels with detail.
:param baymodel_uuid: UUID of a baymodel, to get only baymodels for
that baymodel.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
"""
# NOTE(lucasagomes): /detail should only work agaist collections
parent = pecan.request.path.split('/')[:-1][-1]
if parent != "baymodels":
raise exception.HTTPNotFound
expand = True
resource_url = '/'.join(['baymodels', 'detail'])
return self._get_baymodels_collection(marker, limit,
sort_key, sort_dir, expand,
resource_url)
@policy.enforce_wsgi("baymodel", "get")
@expose.expose(BayModel, types.uuid_or_name)
def get_one(self, baymodel_ident):
"""Retrieve information about the given baymodel.
:param baymodel_ident: UUID or logical name of a baymodel.
"""
rpc_baymodel = api_utils.get_rpc_resource('BayModel', baymodel_ident)
return BayModel.convert_with_links(rpc_baymodel)
def check_keypair_exists(self, context, keypair):
"""Checks the existence of the keypair"""
cli = clients.OpenStackClients(context)
try:
cli.nova().keypairs.get(keypair)
except nova_exc.NotFound:
raise exception.KeyPairNotFound(keypair=keypair)
@policy.enforce_wsgi("baymodel", "create")
@expose.expose(BayModel, body=BayModel, status_code=201)
@validation.enforce_network_driver_types('flannel')
def post(self, baymodel):
"""Create a new baymodel.
:param baymodel: a baymodel within the request body.
"""
baymodel_dict = baymodel.as_dict()
context = pecan.request.context
self.check_keypair_exists(context, baymodel_dict['keypair_id'])
baymodel_dict['project_id'] = context.project_id
baymodel_dict['user_id'] = context.user_id
image_data = self._get_image_data(context, baymodel_dict['image_id'])
if image_data.get('os_distro'):
baymodel_dict['cluster_distro'] = image_data['os_distro']
else:
raise exception.OSDistroFieldNotFound(
image_id=baymodel_dict['image_id'])
new_baymodel = objects.BayModel(context, **baymodel_dict)
new_baymodel.create()
# Set the HTTP Location Header
pecan.response.location = link.build_url('baymodels',
new_baymodel.uuid)
return BayModel.convert_with_links(new_baymodel)
@policy.enforce_wsgi("baymodel", "update")
@wsme.validate(types.uuid, [BayModelPatchType])
@expose.expose(BayModel, types.uuid, body=[BayModelPatchType])
@validation.enforce_network_driver_types('flannel')
def patch(self, baymodel_uuid, patch):
"""Update an existing baymodel.
:param baymodel_uuid: UUID of a baymodel.
:param patch: a json PATCH document to apply to this baymodel.
"""
rpc_baymodel = objects.BayModel.get_by_uuid(pecan.request.context,
baymodel_uuid)
try:
baymodel_dict = rpc_baymodel.as_dict()
baymodel = BayModel(**api_utils.apply_jsonpatch(
baymodel_dict,
patch))
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
# Update only the fields that have changed
for field in objects.BayModel.fields:
try:
patch_val = getattr(baymodel, field)
except AttributeError:
# Ignore fields that aren't exposed in the API
continue
if patch_val == wtypes.Unset:
patch_val = None
if rpc_baymodel[field] != patch_val:
rpc_baymodel[field] = patch_val
rpc_baymodel.save()
return BayModel.convert_with_links(rpc_baymodel)
@policy.enforce_wsgi("baymodel")
@expose.expose(None, types.uuid_or_name, status_code=204)
def delete(self, baymodel_ident):
"""Delete a baymodel.
:param baymodel_uuid: UUID or logical name of a baymodel.
"""
rpc_baymodel = api_utils.get_rpc_resource('BayModel', baymodel_ident)
rpc_baymodel.destroy()
| apache-2.0 | -8,307,967,998,542,834,000 | 38.193122 | 79 | 0.60351 | false |
nitish-tripathi/Simplery | ANN/Odin_10/Network.py | 1 | 12776 |
"""
http://neuralnetworksanddeeplearning.com/chap1.html#implementing_our_network_to_classify_digits
http://numericinsight.com/uploads/A_Gentle_Introduction_to_Backpropagation.pdf
https://ayearofai.com/rohan-lenny-1-neural-networks-the-backpropagation-algorithm-explained-abf4609d4f9d
"""
#### Libraries
# Standard library
import sys
import random
import json
# Third-party libraries
import numpy as np
# Import inside Odin
from Cost import QuadraticCost, CrossEntropyCost
from Helpers import Helpers
class Network(object):
def __init__(self, model = None, sizes = None, eta = None, C = 0.0, cost = CrossEntropyCost, decrease_const = 0.0):
"""
Initializes artificial neural network classifier.
The biases and weights for the
network are initialized randomly, using a Gaussian
distribution with mean 0, and variance 1. Note that the first
layer is assumed to be an input layer, and by convention we
won't set any biases for those neurons, since biases are only
ever used in computing the outputs from later layers.
Parameters
---------
sizes: 1d array
Contains the number of neurons in the respective layers of
the network. For example, if the list was [2, 3, 1] then it
would be a three-layer network, with the first layer containing
2 neurons, the second layer 3 neurons, and the third layer 1 neuron.
eta: float
Learning rate
C: float
L2 parameterization. It is used to not allow the weights to become larger,
in order to avoid overfitting
cost: Cost class
Defines the cost calculation class, either CrossEntropyCost or
Quadratic cost
decrease_const : float (default: 0.0)
Decrease constant. Shrinks the learning rate
after each epoch via eta / (1 + epoch*decrease_const)
"""
if model != None:
self.load(model)
return
elif sizes == None:
raise NotImplementedError('Parameter sizes cannot be None')
return
np.random.seed()
self.sizes = sizes
self._num_layers = len(sizes)
self.biases = [np.random.randn(y, 1) for y in sizes[1:]]
self.weights = [np.random.randn(y, x)/np.sqrt(x)
for x, y in zip(sizes[:-1], sizes[1:])]
self._C = C
self._eta = eta
self._decrease_const = decrease_const
self.cost = cost
self.test_cost = []
def _feedforward(self, a):
"""Return the output of the network if ``a`` is input."""
for b, w in zip(self.biases, self.weights):
a = Helpers.sigmoid(np.dot(w, a)+b)
return a
def _feedforward2(self, a):
zs = []
activations = [a]
activation = a
for b, w in zip(self.biases, self.weights):
z = np.dot(w, activation) + b
zs.append(z)
activation = Helpers.sigmoid(z)
activations.append(activation)
return (zs, activations)
def fit(self, training_data, epochs, mini_batch_size=1,
test_data=None, calc_test_cost=False):
"""
Fit the model to the training data.
Train the neural network using mini-batch stochastic
gradient descent.
Parameters
---------
training_data: list of tuples (X, y)
X is input and y is desired output
epoch: int
Maximum number of iterations over the training dataset.
mini_batch_size: int (default: 1)
Divides training data into k minibatches for efficiency.
Normal gradient descent learning if k=1 (default).
test_data: list of tuples (X, y)
If provided then the network will be evaluated against the
test data after each epoch, and partial progress printed out.
This is useful for tracking progress, but slows things down
substantially.
"""
if test_data: n_test = len(test_data)
n = len(training_data)
self._nOut = training_data[0][1].shape[0]
for j in xrange(epochs):
# Adaptive learning rate
self._eta /= (1 + self._decrease_const*j)
# Randomly shuffling training data
random.shuffle(training_data)
# Partition training data into mini-batches of the appropriate size
mini_batches = [
training_data[k:k+mini_batch_size]
for k in xrange(0, n, mini_batch_size)]
# Then for each mini_batch we apply a single step of gradient descent
for mini_batch in mini_batches:
#self._update_mini_batch_old(mini_batch, eta)
self._update_mini_batch(mini_batch, n)
if test_data:
print "Epoch {0}: {1} / {2}".format(
j, self.evaluate(test_data), n_test, self._total_cost(test_data, True))
if calc_test_cost == True:
cost = self._total_cost(test_data, True)
self.test_cost.append(cost)
else:
#print "Epoch {0} complete".format(j)
sys.stderr.write('\rEpoch: %d/%d' % (j+1, epochs))
sys.stderr.flush()
print ""
def _update_mini_batch_old(self, mini_batch):
nabla_b = [np.zeros(b.shape) for b in self.biases]
nabla_w = [np.zeros(w.shape) for w in self.weights]
for x, y in mini_batch:
delta_nabla_b, delta_nabla_w = self._backpropagation(x, y)
nabla_b = [nb+dnb for nb, dnb in zip(nabla_b, delta_nabla_b)]
nabla_w = [nw+dnw for nw, dnw in zip(nabla_w, delta_nabla_w)]
self.weights = [w-(self._eta/len(mini_batch))*nw
for w, nw in zip(self.weights, nabla_w)]
self.biases = [b-(self._eta/len(mini_batch))*nb
for b, nb in zip(self.biases, nabla_b)]
def _update_mini_batch(self, mini_batch, n):
"""Update the network's weights and biases by applying
gradient descent using backpropagation to a single mini batch.
The ``mini_batch`` is a list of tuples ``(x, y)``, and ``eta``
is the learning rate."""
batch_size = len(mini_batch)
# transform to (input x batch_size) matrix
x = np.asarray([_x.ravel() for _x, _y in mini_batch]).transpose()
# transform to (output x batch_size) matrix
y = np.asarray([_y.ravel() for _x, _y in mini_batch]).transpose()
nabla_b, nabla_w = self._backpropagation(x, y)
self.weights = [(1-self._eta*(self._C/n))*w - (self._eta / batch_size) * nw for w, nw in zip(self.weights, nabla_w)]
self.biases = [b - (self._eta / batch_size) * nb for b, nb in zip(self.biases, nabla_b)]
return
def _backpropagation(self, x, y):
"""Return a tuple ``(nabla_b, nabla_w)`` representing the
gradient for the cost function C_x. ``nabla_b`` and
``nabla_w`` are layer-by-layer lists of numpy arrays, similar
to ``self.biases`` and ``self.weights``."""
nabla_b = [0 for i in self.biases]
nabla_w = [0 for i in self.weights]
# feedforward
zs, activations = self._feedforward2(x)
# backward pass
delta = self.cost.delta(zs[-1], activations[-1], y)
#delta = self._cost_derivative(activations[-1], y) * Helpers.sigmoid_prime(zs[-1])
nabla_b[-1] = delta.sum(1).reshape([len(delta), 1]) # reshape to (n x 1) matrix
nabla_w[-1] = np.dot(delta, activations[-2].transpose())
for l in xrange(2, self._num_layers):
z = zs[-l]
sp = Helpers.sigmoid_prime(z)
delta = np.dot(self.weights[-l + 1].transpose(), delta) * sp
nabla_b[-l] = delta.sum(1).reshape([len(delta), 1]) # reshape to (n x 1) matrix
nabla_w[-l] = np.dot(delta, activations[-l - 1].transpose())
return (nabla_b, nabla_w)
def _backpropagation_old(self, x, y):
nabla_b = [np.zeros(b.shape) for b in self.biases]
nabla_w = [np.zeros(w.shape) for w in self.weights]
# feedforward
activation = x
activations = [x] # list to store all the activations, layer by layer
zs = [] # list to store all the z vectors, layer by layer
for b, w in zip(self.biases, self.weights):
z = np.dot(w, activation)+b
zs.append(z)
activation = Helpers.sigmoid(z)
activations.append(activation)
# backward pass
delta = self._cost_derivative(activations[-1], y) * \
Helpers.sigmoid_prime(zs[-1])
nabla_b[-1] = delta
nabla_w[-1] = np.dot(delta, activations[-2].transpose())
# Note that the variable l in the loop below is used a little
# differently to the notation in Chapter 2 of the book. Here,
# l = 1 means the last layer of neurons, l = 2 is the
# second-last layer, and so on. It's a renumbering of the
# scheme in the book, used here to take advantage of the fact
# that Python can use negative indices in lists.
for l in xrange(2, self._num_layers):
z = zs[-l]
sp = Helpers.sigmoid_prime(z)
delta = np.dot(self.weights[-l+1].transpose(), delta) * sp
nabla_b[-l] = delta
nabla_w[-l] = np.dot(delta, activations[-l-1].transpose())
return (nabla_b, nabla_w)
def evaluate(self, test_data):
"""
Evaluate the test data.
Return the number of test inputs for which the neural
network outputs the correct result. Note that the neural
network's output is assumed to be the index of whichever
neuron in the final layer has the highest activation.
Parameters
---------
test_data: list of tuples (X, y)
X is input and y is desired output
"""
test_results = [(np.argmax(self._feedforward(x)), y)
for (x, y) in test_data]
return sum(int(x == y) for (x, y) in test_results)
def _cost_derivative(self, output_activations, y):
"""Return the vector of partial derivatives \partial C_x /
\partial a for the output activations."""
return (output_activations-y)
def _total_cost(self, data, convert=False):
"""Return the total cost for the data set ``data``. The flag
``convert`` should be set to False if the data set is the
training data (the usual case), and to True if the data set is
the validation or test data. See comments on the similar (but
reversed) convention for the ``accuracy`` method, above.
"""
cost = 0.0
for x, y in data:
a = self._feedforward(x)
if convert: y = self._vectorized_result(y)
cost += self.cost.fn(a, y)/len(data)
cost += 0.5*(self._C/len(data))*sum(
np.linalg.norm(w)**2 for w in self.weights)
return cost
def _vectorized_result(self, j):
"""Return a 10-dimensional unit vector with a 1.0 in the j'th position
and zeroes elsewhere. This is used to convert a digit (0...9)
into a corresponding desired output from the neural network.
"""
if j <= self._nOut:
e = np.zeros((self._nOut, 1))
e[j] = 1.0
return e
else:
return j
def save(self, filename='model'):
"""
Save the neural network to the file ``filename``.
"""
data = {"sizes": self.sizes,
"weights": [w.tolist() for w in self.weights],
"biases": [b.tolist() for b in self.biases],
"cost": str(self.cost.__name__),
"eta": self._eta,
"C": self._C}
f = open(filename, "w")
json.dump(data, f)
f.close()
def load(self, filename):
"""
Load a neural network from the file ``filename``.
Returns an instance of Network.
"""
f = open(filename, "r")
data = json.load(f)
f.close()
self.cost = getattr(sys.modules[__name__], data["cost"])
self.sizes = data["sizes"]
self.weights = [np.array(w) for w in data["weights"]]
self.biases = [np.array(b) for b in data["biases"]]
self._eta = data["eta"]
self._C = data["C"]
| mit | 369,818,851,461,477,000 | 37.251497 | 124 | 0.560269 | false |
kevinjqiu/jirafs | jirafs/cmdline.py | 1 | 4707 | import argparse
import codecs
import logging
import os
import sys
import time
from blessings import Terminal
import six
from verlib import NormalizedVersion
from . import utils
from .exceptions import (
GitCommandError,
JiraInteractionFailed,
JirafsError,
NotTicketFolderException
)
# Write data to stdout as UTF-8 bytes when there's no encoding specified
if sys.version_info < (3, ) and sys.stdout.encoding is None:
sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
logger = logging.getLogger(__name__)
def main():
term = Terminal()
if sys.version_info < (2, 7):
raise RuntimeError(
"Jirafs requires minimally version 2.7 of Python 2, or "
"any version of Python 3. Please upgrade your version of "
"python before using Jirafs."
)
if utils.get_git_version() < NormalizedVersion('1.8'):
raise RuntimeError(
"Jirafs requires minimally version 1.8 of Git. Please "
"upgrade your version of git before using Jirafs."
)
commands = utils.get_installed_commands()
parser = argparse.ArgumentParser(
description='Edit Jira issues locally from your filesystem',
add_help=False,
)
parser.add_argument(
'command',
type=six.text_type,
choices=commands.keys()
)
parser.add_argument(
'--subtasks',
action='store_true',
default=False
)
parser.add_argument(
'--log-level',
default=None,
dest='log_level',
)
args, extra = parser.parse_known_args()
if args.log_level is not None:
logging.basicConfig(level=logging.getLevelName(args.log_level))
command_name = args.command
cmd_class = commands[command_name]
# Subtasks
if args.subtasks:
cmd_class.RUN_FOR_SUBTASKS = True
started = time.time()
logger.debug(
'Command %s(%s) started',
command_name,
extra
)
jira = utils.lazy_get_jira()
try:
cmd_class.execute_command(
extra, jira=jira, path=os.getcwd(), command_name=command_name
)
except GitCommandError as e:
print(
u"{t.red}Error (code: {code}) while running git "
u"command.{t.normal}".format(
t=term,
code=e.returncode
)
)
print("")
print(u"{t.red}Command:{t.normal}{t.red}{t.bold}".format(t=term))
print(u" {cmd}".format(cmd=e.command))
print(u"{t.normal}".format(t=term))
print(u"{t.red}Output:{t.normal}{t.red}{t.bold}".format(t=term))
for line in e.output.decode('utf8').split('\n'):
print(u" %s" % line)
print(u"{t.normal}".format(t=term))
sys.exit(10)
except NotTicketFolderException:
if not getattr(cmd_class, 'TRY_SUBFOLDERS', False):
print(
u"{t.red}The command '{cmd}' must be ran from "
u"within an issue folder.{t.normal}".format(
t=term,
cmd=command_name
)
)
sys.exit(20)
count_runs = 0
for folder in os.listdir(os.getcwd()):
try:
cmd_class.execute_command(
extra,
jira=jira,
path=os.path.join(
os.getcwd(),
folder,
),
command_name=command_name,
)
count_runs += 1
except NotTicketFolderException:
pass
if count_runs == 0:
print(
u"{t.red}The command '{cmd}' must be ran from "
u"within an issue folder or from within a folder containing "
u"issue folders.{t.normal}".format(
t=term,
cmd=command_name
)
)
sys.exit(21)
except JiraInteractionFailed as e:
print(
u"{t.red}JIRA was unable to satisfy your "
u"request: {t.normal}{t.red}{t.bold}{error}{t.normal}".format(
t=term,
error=str(e)
)
)
sys.exit(80)
except JirafsError as e:
print(
u"{t.red}Jirafs encountered an error processing your "
u"request: {t.normal}{t.red}{t.bold}{error}{t.normal}".format(
t=term,
error=str(e)
)
)
sys.exit(90)
logger.debug(
'Command %s(%s) finished in %s seconds',
command_name,
extra,
(time.time() - started)
)
| mit | 8,821,853,478,044,909,000 | 27.70122 | 77 | 0.5273 | false |
phamtrisi/metapp2 | manage.py | 1 | 2515 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import subprocess
from flask.ext.script import Manager, Shell, Server
from flask.ext.migrate import MigrateCommand
from metapp2.app import create_app
from metapp2.user.models import User
from metapp2.meeting.models import Meeting
from metapp2.meeting_user.models import Meeting_User
from metapp2.meeting_user_type.models import Meeting_User_Type
from metapp2.meeting_purpose.models import Meeting_Purpose
from metapp2.meeting_note.models import Meeting_Note
from metapp2.meeting_material.models import Meeting_Material
from metapp2.meeting_decision.models import Meeting_Decision
from metapp2.meeting_agenda.models import Meeting_Agenda
from metapp2.meeting_agenda_item.models import Meeting_Agenda_Item
from metapp2.meeting_agenda_item_user.models import Meeting_Agenda_Item_User
from metapp2.meeting_action_item.models import Meeting_Action_Item
from metapp2.meeting_action_item_user.models import Meeting_Action_Item_User
from metapp2.group.models import Group
from metapp2.group_user.models import Group_User
from metapp2.settings import DevConfig, ProdConfig
from metapp2.database import db
from metapp2 import app
HERE = os.path.abspath(os.path.dirname(__file__))
TEST_PATH = os.path.join(HERE, 'tests')
manager = Manager(app)
def _make_context():
"""Return context dict for a shell session so you can access
app, db, and the User model by default.
"""
return {'app': app, 'db': db, 'User': User, 'Meeting': Meeting, 'Meeting_Purpose': Meeting_Purpose, 'Meeting_Note': Meeting_Note, 'Meeting_Material': Meeting_Material, 'Meeting_Decision': Meeting_Decision,'Meeting_Action_Item': Meeting_Action_Item, 'Group': Group, 'Group_User': Group_User, 'Meeting_User': Meeting_User, 'Meeting_User_Type': Meeting_User_Type, 'Meeting_Action_Item_User': Meeting_Action_Item_User, 'Meeting_Agenda': Meeting_Agenda, 'Meeting_Agenda_Item': Meeting_Agenda_Item, 'Meeting_Agenda_Item_User': Meeting_Agenda_Item_User}
@manager.command
def test():
"""Run the tests."""
import pytest
exit_code = pytest.main([TEST_PATH, '--verbose'])
return exit_code
@manager.command
def create_db():
db.create_all()
@manager.command
def drop_db():
db.drop_all()
@manager.command
def run():
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
manager.add_command('shell', Shell(make_context=_make_context))
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| bsd-3-clause | -5,898,075,227,168,820,000 | 37.106061 | 550 | 0.754672 | false |
mogillc/theo | software/TheoTest/motor.py | 1 | 1505 | import curses
import mraa
import time
stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
stdscr.keypad(1)
stdscr.addstr(0,0,"Press 'q' to quit\n")
stdscr.refresh()
def set_motor(chan,en,phase):
if(chan == 0):
gpio = mraa.Gpio(21)
gpio.dir(mraa.DIR_OUT)
gpio.write(phase)
gpio = mraa.Gpio(0)
gpio.dir(mraa.DIR_OUT)
gpio.write(en)
elif(chan == 1):
gpio = mraa.Gpio(20)
gpio.dir(mraa.DIR_OUT)
gpio.write(phase)
gpio = mraa.Gpio(14)
gpio.dir(mraa.DIR_OUT)
gpio.write(en)
def init_motor():
gpio = mraa.Gpio(31)
gpio.dir(mraa.DIR_OUT)
gpio.write(1)
# setting motor to run in Enable/Phase mode
set_motor(0,0,0)
set_motor(1,0,0)
def move_forward():
set_motor(0,1,0)
set_motor(1,0,0)
def move_backward():
set_motor(0,1,1)
set_motor(1,0,0)
def turn_left():
set_motor(0,0,0)
set_motor(1,1,0)
def turn_right():
set_motor(0,0,0)
set_motor(1,1,1)
def updateMotor(key):
if(key==ord('w') or key==curses.KEY_UP):
stdscr.addstr(1,0,"Forward ")
move_forward()
elif(key==ord('s') or key==curses.KEY_DOWN):
stdscr.addstr(1,0,"Backward")
move_backward()
elif(key==ord('a') or key==curses.KEY_LEFT):
stdscr.addstr(1,0,"Left ")
turn_left()
elif(key==ord('d') or key==curses.KEY_RIGHT):
stdscr.addstr(1,0,"Righ ")
turn_right()
elif(key==ord(' ')):
stdscr.addstr(1,0,"Stop ")
init_motor()
init_motor()
key = ''
while key != ord('q'):
key = stdscr.getch()
#stdscr.addch(1,2,key)
#stdscr.refresh()
updateMotor(key)
curses.endwin()
| apache-2.0 | -8,672,312,592,866,148,000 | 17.13253 | 46 | 0.642525 | false |
Affirm/cabot | cabot/metricsapp/models/grafana.py | 1 | 5474 | import logging
import requests
import urlparse
from django.core.exceptions import ValidationError
from django.db import models
from cabot.metricsapp import defs
from cabot.metricsapp.api import get_series_ids, get_panel_url
logger = logging.getLogger(__name__)
class GrafanaInstance(models.Model):
class Meta:
app_label = 'metricsapp'
name = models.CharField(
unique=True,
max_length=30,
help_text='Unique name for Grafana site.'
)
url = models.CharField(
max_length=100,
help_text='Url of Grafana site.'
)
api_key = models.CharField(
max_length=100,
help_text='Grafana API token for authentication (http://docs.grafana.org/http_api/auth/).'
)
sources = models.ManyToManyField(
'MetricsSourceBase',
through='GrafanaDataSource',
help_text='Metrics sources used by this Grafana site.'
)
_sessions = dict()
def __unicode__(self):
return self.name
def clean(self, *args, **kwargs):
"""Make sure the input url/api key work"""
response = self.get_request('api/search')
try:
response.raise_for_status()
except requests.exception.HTTPError:
raise ValidationError('Request to Grafana API failed.')
@property
def session(self):
"""A requests.session object with the correct authorization headers"""
session = self._sessions.get(self.api_key)
if session is None:
session = requests.Session()
session.headers.update({'Authorization': 'Bearer {}'.format(self.api_key)})
self._sessions[self.api_key] = session
return session
def get_request(self, uri=''):
"""Make a request to the Grafana instance"""
return self.session.get(urlparse.urljoin(self.url, uri), timeout=defs.GRAFANA_REQUEST_TIMEOUT_S)
class GrafanaDataSource(models.Model):
"""
Intermediate model to match the name of a data source in a Grafana instance
with the corresponding MetricsDataSource
"""
class Meta:
app_label = 'metricsapp'
grafana_source_name = models.CharField(
max_length=30,
help_text='The name for a data source in grafana (e.g. metrics-stage")'
)
grafana_instance = models.ForeignKey('GrafanaInstance', on_delete=models.CASCADE)
metrics_source_base = models.ForeignKey('MetricsSourceBase', on_delete=models.CASCADE)
def __unicode__(self):
return '{} ({}, {})'.format(self.grafana_source_name, self.metrics_source_base.name,
self.grafana_instance.name)
class GrafanaPanel(models.Model):
"""
Data about a Grafana panel.
"""
class Meta:
app_label = 'metricsapp'
@property
def modifiable_url(self):
"""Url with modifiable time range, dashboard link, etc"""
if self.panel_url:
return '{}&fullscreen'.format(self.panel_url.replace('dashboard-solo', 'dashboard'))
return None
def get_rendered_image(self):
"""Get a .png image of this panel"""
# GrafanaInstance.get_request only takes the path
panel_url = self.panel_url.replace(urlparse.urljoin(self.grafana_instance.url, '/'), '')
rendered_image_url = urlparse.urljoin('render/', panel_url)
rendered_image_url = '{}&width={}&height={}'.format(rendered_image_url,
defs.GRAFANA_RENDERED_IMAGE_WIDTH,
defs.GRAFANA_RENDERED_IMAGE_HEIGHT)
# Unfortunately "$__all" works for the normal image but not render
rendered_image_url = rendered_image_url.replace('$__all', 'All')
try:
image_request = self.grafana_instance.get_request(rendered_image_url)
image_request.raise_for_status()
return image_request.content
except requests.exceptions.RequestException:
logger.error('Failed to get Grafana panel image')
return None
grafana_instance = models.ForeignKey('GrafanaInstance', on_delete=models.CASCADE)
dashboard_uri = models.CharField(max_length=100)
panel_id = models.IntegerField()
series_ids = models.CharField(max_length=50)
selected_series = models.CharField(max_length=50)
panel_url = models.CharField(max_length=2500, null=True)
def build_grafana_panel_from_session(session):
"""Returns an (unsaved!) GrafanaPanel model instance for use with rendering or to save to the DB"""
grafana_panel = GrafanaPanel()
set_grafana_panel_from_session(grafana_panel, session)
return grafana_panel
def set_grafana_panel_from_session(grafana_panel, session):
"""
Update a GrafanaPanel model with data based on session vars
Note that this does not update the DB - call grafana_panel.save() yourself if you want to do that
"""
instance = GrafanaInstance.objects.get(id=session['instance_id'])
dashboard_uri = session['dashboard_uri']
panel_url = get_panel_url(instance.url, dashboard_uri, session['panel_id'], session['templating_dict'])
grafana_panel.grafana_instance = instance
grafana_panel.dashboard_uri = dashboard_uri
grafana_panel.panel_id = int(session['panel_id'])
grafana_panel.series_ids = get_series_ids(session['panel_info'])
grafana_panel.selected_series = '_'.join(session['series'])
grafana_panel.panel_url = panel_url
| mit | -6,052,379,822,475,333,000 | 35.493333 | 107 | 0.647059 | false |
qdzzyb2014/flask-weibo | app/forms.py | 1 | 1550 | from flask.ext.wtf import Form
from wtforms import TextField, BooleanField, TextAreaField, PasswordField
from wtforms.validators import Required, Length, Email
class LoginForm(Form):
user_name = TextField('user_name', validators = [Required()])
password = PasswordField('password', validators = [Required()])
remember_me = BooleanField('remember_me', default = False)
class SignUpForm(Form):
user_name = TextField('user_name', validators = [Required()])
password = PasswordField('password', validators = [Required()])
user_email = TextField('user_email', validators = [Email(), Required()])
class EditForm(Form):
nickname = TextField('nickname', validators = [Required()])
about_me = TextAreaField('about_me', validators = [Length(min = 0, max = 140)])
def __init__(self, original_nickname, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.original_nickname = original_nickname
def validate(self):
if not Form.validate(self):
return False
if self.nickname.data == self.original_nickname:
return True
user = User.query.filter_by(nickname = self.nickname.data).first()
if user != None:
self.nickname.errors.append('This nickname is already in use. Please choose another one.')
return False
return True
class PostForm(Form):
post = TextField('post', validators = [Required()])
class SearchForm(Form):
search = TextField('search', validators = [Required()])
| bsd-3-clause | -3,858,252,173,252,549,000 | 38.74359 | 102 | 0.652903 | false |
QEF/postqe | postqe/bands.py | 1 | 4666 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Functions to calculate the electronic band structure.
Note: no symmetry recognition is implemented yet.
"""
import numpy as np
from math import fabs, sqrt
from postqe.xmlfile import get_cell_data, get_calculation_data, get_band_strucure_data
from postqe.constants import ev_to_ry
def compute_bands(xmlfile, filebands='filebands', spin_component=''):
"""
"""
ibrav, alat, a, b, nat, ntyp, atomic_positions, atomic_species = get_cell_data(xmlfile)
prefix, outdir, ecutwfc, ecutrho, functional, lsda, noncolin, pseudodir, nr, nr_smooth = \
get_calculation_data(xmlfile)
nks, nbnd, ks_energies = get_band_strucure_data(xmlfile)
# open output file
fout = open(filebands, "w")
fout.write("& plot nbnd = "+str(nbnd)+" nks = "+str(nks)+" /\n")
kpoints = np.zeros((nks, 3))
bands = np.zeros((nks, nbnd))
if lsda: # magnetic
for i in range(0, nks):
kpoints[i] = ks_energies[i]['k_point']['$']
fout.write(12 * ' ' + ' {:.6E}'.format(kpoints[i,0]) + ' {:.6E}'.format(kpoints[i,1]) + ' {:.6E}\n'.format(kpoints[i,2]))
if (spin_component==1): # get bands for spin up
for j in range(0, nbnd // 2):
bands[i,j] = ks_energies[i]['eigenvalues'][j] * 2 * nat / ev_to_ry # eigenvalue at k-point i, band j
fout.write(' {:.3E}'.format(bands[i,j]))
else: # get bands for spin down
for j in range(nbnd // 2, nbnd):
bands[i, j] = ks_energies[i]['eigenvalues'][j] * 2 * nat / ev_to_ry # eigenvalue at k-point i, band j
fout.write(' {:.3E}'.format(bands[i,j]))
fout.write('\n')
else: # non magnetic
for i in range(0, nks):
kpoints[i] = ks_energies[i]['k_point']['$']
fout.write(12 * ' ' + ' {:.6E}'.format(kpoints[i,0]) + ' {:.6E}'.format(kpoints[i,1]) + ' {:.6E}\n'.format(kpoints[i,2]))
for j in range(0, nbnd):
bands[i, j] = ks_energies[i]['eigenvalues'][j] * nat / ev_to_ry # eigenvalue at k-point i, band j
fout.write(' {:.3E}'.format(bands[i,j]))
fout.write('\n')
return kpoints, bands
def set_high_symmetry_points(kpoints):
"""
Determines which k-points have "high simmetry" and are at the boundaries of the Brillouin zone.
:param kpoints: a matrix (nks,3) with the k-points coordinates. nks is the number of k-points.
:return high_sym: an array of nks booleans, True if the kpoint is a high symmetry one
"""
nks = kpoints.shape[0]
high_sym = np.full(nks,False,dtype=bool)
high_sym[0] = True
high_sym[nks-1] = True
k1 = np.zeros(3)
k2 = np.zeros(3)
for i in range(1,nks-1):
if np.dot(kpoints[i,:],kpoints[i,:]) < 1.e-9: # the Gamma point is always a high symmetry one
high_sym[i] = True
else:
k1 = kpoints[i,:] - kpoints[i-1,:]
k2 = kpoints[i+1,:] - kpoints[i,:]
ps = np.dot(k1,k2) / sqrt(np.dot(k1,k1)) / sqrt(np.dot(k2,k2))
if fabs(ps-1.0) > 1.0e-4 :
high_sym[i] = True
return high_sym
def compute_kx(kpoints):
"""
This functions "linearize" the path along the k-points list in input and calculate
the linear x variable kx for the plot.
:param kpoints: a matrix (nks,3) with the k-points coordinates. nks is the number of k-points.
:return kx : linear x variable for the plot determined as the k-points path
"""
nks = kpoints.shape[0]
kx = np.zeros(nks)
ktemp = kpoints[2, :] - kpoints[1, :]
dxmod_save = sqrt (np.dot(ktemp, ktemp))
for i in range(1,nks):
ktemp = kpoints[i, :] - kpoints[i-1, :]
dxmod = sqrt(np.dot(ktemp, ktemp))
if dxmod > 5*dxmod_save: # a big jump in dxmod is a sign the points kpoints[i] and kpoints[i]
# are quite distant and belong to two different lines. We put them on
# the same point in the graph
kx[i] = kx[i-1]
elif dxmod > 1.e-5: # this is the usual case. The two points kpoints[i] and kpoints[i] are in the
# same path.
kx[i] = kx[i-1] + dxmod
dxmod_save = dxmod
else: # ! This is the case in which dxmod is almost zero. The two points coincide
# in the graph, but we do not save dxmod.
kx[i] = kx[i-1] + dxmod
return kx
| lgpl-2.1 | 3,263,816,706,830,989,300 | 39.224138 | 133 | 0.545649 | false |
tompecina/legal | legal/uds/views.py | 1 | 11475 | # -*- coding: utf-8 -*-
#
# uds/views.py
#
# Copyright (C) 2011-19 Tomáš Pecina <[email protected]>
#
# This file is part of legal.pecina.cz, a web-based toolbox for lawyers.
#
# This application is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from datetime import datetime
from csv import writer as csvwriter
from json import dump
from os.path import join
from django.shortcuts import redirect, HttpResponse
from django.views.decorators.http import require_http_methods
from django.views.decorators.gzip import gzip_page
from django.apps import apps
from django.urls import reverse
from django.http import QueryDict, Http404
from legal.common.glob import (
INERR, TEXT_OPTS_KEYS, REPO_URL, EXLIM_TITLE, FTLIM_TITLE, LOCAL_SUBDOMAIN, LOCAL_URL, DTF, ODP)
from legal.common.utils import Pager, new_xml, xml_decorate, LOGGER, render
from legal.uds.forms import MainForm
from legal.uds.models import Agenda, Document, DocumentIndex, File
APP = __package__.rpartition('.')[2]
APPVERSION = apps.get_app_config(APP).version
BATCH = 50
REPO_PREFIX = join(REPO_URL, APP)
EXLIM = 1000
FTLIM = 1000
assert FTLIM <= EXLIM
@require_http_methods(('GET', 'POST'))
def mainpage(request):
LOGGER.debug('Main page accessed using method {}'.format(request.method), request, request.POST)
err_message = ''
page_title = apps.get_app_config(APP).verbose_name
agendas = Agenda.objects.all().order_by('desc')
if request.method == 'GET':
form = MainForm()
return render(
request,
'uds_mainpage.xhtml',
{'app': APP,
'page_title': page_title,
'err_message': err_message,
'agendas': agendas,
'form': form})
form = MainForm(request.POST)
if form.is_valid():
cld = form.cleaned_data
query = QueryDict(mutable=True)
for key in cld:
if cld[key]:
query[key] = cld[key]
query['start'] = 0
del query['format']
return redirect('{}?{}'.format(reverse('{}:{}list'.format(APP, cld['format'])), query.urlencode()))
err_message = INERR
LOGGER.debug('Invalid form', request)
return render(
request,
'uds_mainpage.xhtml',
{'app': APP,
'page_title': page_title,
'err_message': err_message,
'agendas': agendas,
'form': form})
def g2p(reqd):
par = {}
if 'publisher' in reqd:
par['publisher_id'] = reqd['publisher']
lims = {
'senate': 0,
'number': 1,
'year': 1970,
'page': 1,
'agenda': 1,
'id': 1,
}
for fld in lims:
if fld in reqd:
par[fld] = npar = int(reqd[fld])
assert npar >= lims[fld]
if 'register' in reqd:
par['register'] = reqd['register'].upper()
if 'date_posted_from' in reqd:
par['posted__gte'] = datetime.strptime(reqd['date_posted_from'], DTF).date()
if 'date_posted_to' in reqd:
par['posted__lt'] = datetime.strptime(reqd['date_posted_to'], DTF).date() + ODP
if 'text' in reqd:
par['text__search'] = reqd['text']
return par
@require_http_methods(('GET',))
def htmllist(request):
LOGGER.debug('HTML list accessed', request, request.GET)
reqd = request.GET.copy()
try:
par = g2p(reqd)
start = int(reqd['start']) if 'start' in reqd else 0
assert start >= 0
docins = DocumentIndex.objects.using('sphinx').filter(**par).order_by('-posted', 'id')
total = docins.count()
if total and start >= total:
start = total - 1
if start >= FTLIM:
if 'text' in reqd:
return render(
request,
'ftlim.xhtml',
{'app': APP,
'page_title': FTLIM_TITLE,
'limit': FTLIM,
'back': reverse('uds:mainpage')})
docs = Document.objects.filter(**par).order_by('-posted', 'id').distinct()
total = docs.count()
if total and start >= total:
start = total - 1
docs = docs[start:(start + BATCH)]
else:
docins = list(docins[start:(start + BATCH)].values_list('id', flat=True))
docs = Document.objects.filter(id__in=docins).order_by('-posted', 'id').distinct()
for doc in docs:
doc.files = File.objects.filter(document=doc).order_by('fileid').distinct()
idx = 1
for file in doc.files:
file.brk = idx % 5 == 0
idx += 1
except:
raise Http404
return render(
request,
'uds_list.xhtml',
{'app': APP,
'page_title': 'Výsledky vyhledávání',
'rows': docs,
'pager': Pager(start, total, reverse('uds:htmllist'), reqd, BATCH),
'total': total,
'noindex': True})
@gzip_page
@require_http_methods(('GET',))
def xmllist(request):
LOGGER.debug('XML list accessed', request, request.GET)
reqd = request.GET.copy()
try:
par = g2p(reqd)
resins = DocumentIndex.objects.using('sphinx').filter(**par).order_by('posted', 'id')
except:
raise Http404
total = resins.count()
if total > EXLIM:
return render(
request,
'exlim.xhtml',
{'app': APP,
'page_title': EXLIM_TITLE,
'limit': EXLIM,
'total': total,
'back': reverse('uds:mainpage')})
resins = list(resins.values_list('id', flat=True))
res = Document.objects.filter(id__in=resins).order_by('posted', 'id').distinct()
doc = {
'documents': {
'xmlns': 'http://' + LOCAL_SUBDOMAIN,
'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'xsi:schemaLocation': 'http://{} {}/static/{}-{}.xsd'.format(LOCAL_SUBDOMAIN, LOCAL_URL, APP, APPVERSION),
'application': APP,
'version': APPVERSION,
'created': datetime.now().replace(microsecond=0).isoformat()
}
}
xml = new_xml('')
tag_documents = xml_decorate(xml.new_tag('documents'), doc)
xml.append(tag_documents)
for item in res:
tag_document = xml.new_tag('document')
tag_documents.append(tag_document)
tag_document['id'] = item.docid
tag_publisher = xml.new_tag('publisher')
tag_document.append(tag_publisher)
tag_publisher['id'] = item.publisher.pubid
tag_publisher.append(item.publisher.name)
tag_ref = xml.new_tag('ref')
tag_document.append(tag_ref)
tag_ref.append(item.ref)
tag_description = xml.new_tag('description')
tag_document.append(tag_description)
tag_description.append(item.desc)
tag_agenda = xml.new_tag('agenda')
tag_document.append(tag_agenda)
tag_agenda.append(item.agenda.desc)
tag_posted = xml.new_tag('posted')
tag_document.append(tag_posted)
tag_posted.append(item.posted.isoformat())
tag_files = xml.new_tag('files')
tag_document.append(tag_files)
for fil in File.objects.filter(document=item).order_by('fileid').distinct():
tag_file = xml.new_tag('file')
tag_files.append(tag_file)
tag_file['id'] = fil.fileid
tag_name = xml.new_tag('name')
tag_file.append(tag_name)
tag_name.append(fil.name)
tag_url = xml.new_tag('url')
tag_file.append(tag_url)
tag_url.append(join(REPO_PREFIX, str(fil.fileid), fil.name))
response = HttpResponse(
str(xml).encode('utf-8') + b'\n',
content_type='text/xml; charset=utf-8')
response['Content-Disposition'] = 'attachment; filename=Dokumenty.xml'
return response
@gzip_page
@require_http_methods(('GET',))
def csvlist(request):
LOGGER.debug('CSV list accessed', request, request.GET)
reqd = request.GET.copy()
try:
par = g2p(reqd)
resins = DocumentIndex.objects.using('sphinx').filter(**par).order_by('posted', 'id')
except:
raise Http404
total = resins.count()
if total > EXLIM:
return render(
request,
'exlim.xhtml',
{'app': APP,
'page_title': EXLIM_TITLE,
'limit': EXLIM,
'total': total,
'back': reverse('uds:mainpage')})
resins = list(resins.values_list('id', flat=True))
res = Document.objects.filter(id__in=resins).order_by('posted', 'id').distinct()
response = HttpResponse(content_type='text/csv; charset=utf-8')
response['Content-Disposition'] = 'attachment; filename=Dokumenty.csv'
writer = csvwriter(response)
hdr = (
'Datum vyvěšení',
'Soud/státní zastupitelství',
'Popis dokumentu',
'Spisová značka/číslo jednací',
'Agenda',
'Soubory',
)
writer.writerow(hdr)
for item in res:
files = File.objects.filter(document=item).order_by('fileid').distinct()
dat = (
'{:%d.%m.%Y}'.format(item.posted),
item.publisher.name,
item.desc,
item.ref,
item.agenda.desc,
';'.join([join(REPO_PREFIX, str(fil.fileid), fil.name) for fil in files]),
)
writer.writerow(dat)
return response
@gzip_page
@require_http_methods(('GET',))
def jsonlist(request):
LOGGER.debug('JSON list accessed', request, request.GET)
reqd = request.GET.copy()
try:
par = g2p(reqd)
resins = DocumentIndex.objects.using('sphinx').filter(**par).order_by('posted', 'id')
except:
raise Http404
total = resins.count()
if total > EXLIM:
return render(
request,
'exlim.xhtml',
{'app': APP,
'page_title': EXLIM_TITLE,
'limit': EXLIM,
'total': total,
'back': reverse('uds:mainpage')})
resins = list(resins.values_list('id', flat=True))
res = Document.objects.filter(id__in=resins).order_by('posted', 'id').distinct()
response = HttpResponse(content_type='application/json; charset=utf-8')
response['Content-Disposition'] = 'attachment; filename=Dokumenty.json'
lst = []
for item in res:
files = File.objects.filter(document=item).order_by('fileid').distinct()
lst.append({
'posted': item.posted.isoformat(),
'publisher': item.publisher.name,
'desc': item.desc,
'ref': item.ref,
'agenda': item.agenda.desc,
'files': [{
'id': f.fileid,
'name': f.name,
'url': join(REPO_PREFIX, str(f.fileid), f.name)}
for f in files],
})
dump(lst, response)
return response
| gpl-3.0 | 4,609,566,359,525,702,000 | 32.601173 | 118 | 0.575929 | false |
axiros/transcrypt | make.py | 1 | 1049 | #!/usr/bin/env python
print '''
we require an index.html.tmpl next to us in this folder, into which we put the content
of pandoc generated raw show.html, then write index.html with the result.
'''
from time import sleep
import os
if __name__ == '__main__':
oldstat = 0
print 'looping, checking changes of show.markdown'
while True:
stat = os.stat('./show.markdown')
if stat == oldstat:
sleep(1)
continue
oldstat = stat
os.system('pandoc show.markdown -o show.html -s -V "theme:black" -t revealjs')
# now take a hammer:
t = open('./index.html.tmpl').read()
with open('./show.html') as fd:
s = fd.read()
title = s.split('<title>', 1)[1].split('</title')[0]
body = s.split('<body>', 1)[1].split('<script ')[0]
t = t.replace('_TITLE_', title).replace('_CONTENT_', body)
open('./index.html', 'w').write(t)
os.system('./safari_reload.sh')
#os.system('hg addremove * && hg commit -m "`date`" &')
| apache-2.0 | 3,437,461,641,889,378,300 | 32.83871 | 87 | 0.561487 | false |
gibbon-joel/metahive | bin/import-to-hive.py | 1 | 28520 | #!/usr/bin/python
import os, sys
import hashlib
import MySQLdb
import MySQLdb.cursors
from datetime import datetime
import time
import shutil
import magic
import argparse
import re
sys.path.append('%s/../lib' %(os.path.dirname(__file__)))
import metahivesettings.settings
#from metahive.scanners mport *
import metahive.scanners
regScan = {}
scannersByMimetype = {}
for name in metahive.scanners.__all__:
plugin = getattr(metahive.scanners, name)
try:
register_plugin = plugin.register
except AttributeError:
print "Plugin %s does not have a register() function" %(name)
pass
else:
supported_mimetypes = register_plugin()
for mimetype in supported_mimetypes:
if mimetype not in scannersByMimetype:
scannersByMimetype[mimetype] = []
scannersByMimetype[mimetype].append(name)
regScan[name] = plugin
db_credentials = metahivesettings.settings.db_credentials()
repoDir = metahivesettings.settings.repo_dir()
#print registeredScanners
#print scannersByMimetype
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--sourcedir", help="Top level directory to work on (e.g. /path/to/upload/folder", required=True)
parser.add_argument("-v", "--verbose", help="Be verbose (more debug output)", required=False, default=False, action='store_true')
parser.add_argument("-c", "--copy-to-repo", help="copy scanned supported files into the media repository", required=False, default=False, action='store_true', dest='copy_to_repo')
parser.add_argument("-d", "--delete-original", help="delete original/duplicate files if we have a copy in the media repository", required=False, default=False, action='store_true', dest='delete_original')
args = parser.parse_args()
if args.copy_to_repo and not repoDir:
print "repository directory is not set in config's [repository] section - cannot copy to repo'"
sys.exit(2)
m=magic.open(magic.MAGIC_MIME_TYPE)
m.load()
def hash_file(filename, hashtype='sha1'):
BUF_SIZE=1024*1024 # to read files (and compute incremental hash) in 1MB blocks, not having to read in 2TB file at once...
if hashtype == 'sha1':
sha1 = hashlib.sha1()
with open(filename, 'rb') as f:
while True:
data = f.read(BUF_SIZE)
if not data:
break
sha1.update(data)
hexhash = sha1.hexdigest()
return hexhash
def makePathFromHash(hash):
# INPUT: 2ef94a0e9a4ef32fda6e10b83b1e698036b726f1
# Should create a usable full path for this file
# OUTPUT: $repoDir/2/e/f
output = '%s/%s/%s' %(hash[0], hash[1], hash[2])
return output
def getMimeType(filename):
try:
result = m.file(filename)
except Exception as e:
result = False
print repr(e)
return result
def gatherBasicInfo(filenames_array):
"""
This will collect "basic" OS-level information like ctime, mtime, size etc.
It expects an array of filenames (with full path information) and will return a dict with the
full filename as key and the basic info as a dict.
input: [ '/path/to/file/1.jpg', '/path/to/file/2.jpg' ]
output: { '/path/to/file/1.jpg' : { 'hash.sha1': '...', 'ctime': '...' }, ... }
"""
begin = time.time()
fileInfo = {}
for filename in filenames_array:
try:
info = os.stat(filename)
except:
print "Could not stat file '%s'" %(filename)
else:
file_mtime = datetime.fromtimestamp(info.st_mtime)
file_ctime = datetime.fromtimestamp(info.st_ctime)
fileInfo[filename] = {
'ctime': file_ctime,
'mtime': file_mtime,
'size': info.st_size,
}
finish = time.time()
time_taken = finish - begin
files_per_second = len(filenames_array) / float(time_taken)
print "It took %0.2f seconds to gather basic info for %i files (%0.1f files per second)" %(time_taken, len(filenames_array), files_per_second)
return fileInfo
def getRepoStateForFiles(filenames_dict):
"""
Expects a dict of dicts (essentially, the output of "gatherBasicInfo"). Constructs SQL to check
which of the files (if any) we have already in the database.
"""
"""
As we do not want to hash everything again if it's known but not stored in the repo, we will
rely on os.stat + filename as a rough initial check, only hashing if we do not find an exact match...
"""
original_filenames_by_hash = {}
for filename, filedata in filenames_dict.iteritems():
sql = "SELECT id, sha1, file_size, original_ctime as ctime, original_mtime as mtime, is_in_repo FROM files WHERE file_size=%s and original_ctime=%s and original_mtime=%s and original_filename=%s"
numHits = c.execute ( sql, [ filedata['size'], filedata['ctime'], filedata['mtime'], filename ] )
if numHits > 0:
if numHits > 1:
#print "AAAARGH - file %s found more than once in the database - this should never happen" %(filename)
print "<5> More than one hit for %s found in DB, cannot use hash from db, hashing live..."
filenames_dict[filename]['hash.sha1'] = hash_file(filename)
else:
row = c.fetchone()
print "<6> Exactly one match for stat-params for %s found in DB, using hash %s from DB" %(filename, row['sha1'])
filenames_dict[filename]['hash.sha1'] = row['sha1']
else:
print "<6> File %s not known yet by name/size/mtime - hash it" %(filename)
myhash = hash_file(filename)
filenames_dict[filename]['hash.sha1'] = myhash
original_filenames_by_hash[myhash] = filename
hash_lookup = {}
#hash_lookup['463699b9bc849c94e0f45ff2f21b171d2d128bec'] = {'size': 0, 'name': 'undefined name'}
for filename, filedata in filenames_dict.iteritems():
#print filedata
hash_lookup[filedata['hash.sha1']] = { 'size': filedata['size'], 'name': filename }
# I want to create SQL of the form 'SELECT id, filesize FROM files WHERE hash IN ( hash1, hash2, hash3, ... )'
# then compare hash & filesizes
placeholders = ', '.join(['%s'] * len(hash_lookup))
sql = 'SELECT * FROM files WHERE sha1 IN (%s)' %(placeholders)
#print sql
#print hash_lookup.keys()
c.execute( sql, hash_lookup.keys() )
rows = c.fetchall()
# ({'sha1': '463699b9bc849c94e0f45ff2f21b171d2d128bec', 'id': 284L, 'file_size': None},)
known = {}
#print "******"
#print "Original filenames by hash:"
#print original_filenames_by_hash
#print "******"
for row in rows:
if row['sha1'] in hash_lookup and 'name' in hash_lookup[row['sha1']]:
print "%s: %s " %(row['sha1'], hash_lookup[row['sha1']])
myhash = row['sha1']
filename = hash_lookup[myhash]['name']
print "%s: %s " %(myhash, filename)
#if myhash in original_filenames_by_hash and filename != original_filenames_by_hash[myhash]:
if filename != row['original_filename']:
# file is known by a different original name in DB
print "<5> Found new original name %s for the known file %s (%s)" %(row['original_filename'], myhash, filename)
existing_original_name = c.execute('SELECT * FROM original_filenames WHERE file_id=%s AND original_filename=%s', [row['id'], filename])
if existing_original_name < 1:
c.execute('INSERT INTO original_filenames (file_id, original_filename) VALUES (%s, %s)', [row['id'], filename])
print "<6> Alternate name %s for %s added to DB" %(filename, myhash)
else:
print "<7> Alternate name %s already known for %s" %(filename, myhash)
else:
filename = 'unknown filename'
known[row['sha1']] = {
'size': row['file_size'],
'name': filename,
'ctime': filenames_dict[filename]['ctime'],
'mtime': filenames_dict[filename]['mtime'],
'id': row['id'],
'is_in_repo': row['is_in_repo']
}
db.commit() # for any original_filenames changes
notKnown = {}
for hashvalue, value in hash_lookup.iteritems():
if hashvalue not in known:
notKnown[hashvalue] = {
'size': filenames_dict[value['name']]['size'],
'name': value['name'],
'ctime': filenames_dict[value['name']]['ctime'],
'mtime': filenames_dict[value['name']]['mtime'],
'id': None,
'is_in_repo': False
}
#diffkeys = set(hash_lookup) - set(known)
#print hash_lookup
#print known
#print diffkeys
#print notKnown
#print rows
return [ notKnown, known ]
def addFileIntoDB ( filehash, mimetype, extraInfo ):
"""
takes a hash and the "extraInfo" dict with ctime, mtime, size, name and is_in_repo values, then tries to add it into the db.
Returns False on failure or the insert_id on success.
"""
# f7bef5ce2781d8667f2ed85eac4627d532d32222, {'is_in_repo': False, 'ctime': datetime.datetime(2015, 10, 14, 19, 1, 52, 418553), 'mtime': datetime.datetime(2015, 4, 26, 14, 24, 26), 'size': 2628630, 'id': None, 'name': '/treasure/media-throwaway/temp/upload/foobar/IMG_6344.JPG'}
sql = """INSERT INTO files SET
is_in_repo = %s,
original_filename = %s,
type = %s,
sha1 = %s,
file_size = %s,
original_mtime = %s,
original_ctime = %s
"""
try:
affected = c.execute(sql, [ extraInfo['is_in_repo'], extraInfo['name'], mimetype, filehash, extraInfo['size'], extraInfo['mtime'], extraInfo['ctime'] ] )
except Exception as e:
print "Cannot insert file %s into DB" %(filehash)
print repr(e)
return False
print "Successfully INSERTed. Affected: %i" %(affected)
return c.lastrowid
def getExtension(filename):
extensionPos = filename.rfind('.')
return filename[extensionPos+1:].lower()
def getMetadataForFiles(files, scannersOnly = False):
"""
retrieve metadata for a dict of files
if scannersOnly = True, we're only interested in the actual scanners (to know which plugins to run again)
"""
placeholders = ', '.join(['%s'] * len(files))
filesById = []
#print files
for filename, extraInfo in files.iteritems():
if 'id' in extraInfo:
filesById.append(extraInfo['id'])
if scannersOnly:
sql = 'SELECT DISTINCT file_id, scanner FROM metadata WHERE file_id IN (%s) GROUP BY file_id, scanner' %(placeholders)
else:
sql = 'SELECT * FROM metadata WHERE file_id IN (%s)' %(placeholders)
#print sql
#print hash_lookup.keys()
c.execute( sql, filesById )
rows = c.fetchall()
metadata = {}
for row in rows:
fileId = row['file_id']
if fileId not in metadata:
metadata[fileId] = {}
metadata[fileId][row['scanner']] = {}
for k, v in row.iteritems():
metadata[fileId][row['scanner']][k] = v
return metadata
def getFileIDByHash(filehash):
if filehash in known:
if 'id' in known[filehash]:
return known[filehash]['id']
numrows = c.execute('SELECT id FROM files WHERE sha1=%s', [filehash])
if numrows == 1:
return c.fetchone()[0]
return False
def getMetadataFromDB(file_id, scanner = 'all' ):
#print "%s has id %s" %(filehash, file_id)
if not file_id:
return False
#+----------+--------------+------+-----+---------+----------------+
#| Field | Type | Null | Key | Default | Extra |
#+----------+--------------+------+-----+---------+----------------+
#| id | bigint(20) | NO | PRI | NULL | auto_increment |
#| file_id | bigint(20) | NO | | NULL | |
#| scanner | varchar(255) | YES | | NULL | |
#| tagname | varchar(255) | YES | | NULL | |
#| tagvalue | varchar(255) | YES | | NULL | |
#+----------+--------------+------+-----+---------+----------------+
if scanner is 'all':
numrows = c.execute("SELECT * FROM metadata WHERE file_id=%s", [file_id])
else:
numrows = c.execute("SELECT * FROM metadata WHERE file_id=%s AND scanner=%s", [file_id, scanner])
#print "getMeta fetched %i rows" %(numrows)
result = c.fetchall()
metadata = {}
for row in result:
if row['scanner'] not in metadata:
metadata[row['scanner']] = {}
metadata[row['scanner']][row['tagname']] = row['tagvalue']
return metadata
def compareMetadata(old, new):
deleted = {}
added = {}
#print repr(old)
#print repr(new)
for scanner in old:
if scanner not in new:
deleted[scanner] = old[scanner]
else:
for tagname in old[scanner]:
if tagname not in new[scanner]:
if scanner not in deleted:
deleted[scanner] = {}
deleted[scanner][tagname] = old[scanner][tagname]
else:
if str(old[scanner][tagname]) != str(new[scanner][tagname]):
if scanner not in deleted:
deleted[scanner] = {}
if scanner not in added:
added[scanner] = {}
print "value of tag %s differs: %s vs %s" %(tagname, repr(old[scanner][tagname]), repr(new[scanner][tagname]))
deleted[scanner][tagname] = old[scanner][tagname]
added[scanner][tagname] = new[scanner][tagname]
for scanner in new:
if scanner not in old:
added[scanner] = new[scanner]
else:
for tagname in new[scanner]:
if tagname not in old[scanner]:
if scanner not in added:
added[scanner] = {}
added[scanner][tagname] = new[scanner][tagname]
return [ deleted, added ]
def makeString(indict):
for k, v in indict.iteritems():
indict[k] = str(v)
return indict
def putMetadataIntoDB(scanner, filehash, metaDict):
print "Put metadata from scanner %s for filehash %s into DB" %(scanner, filehash)
file_id = getFileIDByHash(filehash)
oldData = getMetadataFromDB(file_id, scanner=scanner)
#print oldData
if not oldData: oldData = { scanner: {} }
newData = { scanner: makeString(metaDict) }
deleted, added = compareMetadata(oldData, newData)
#print "diff:"
#print deleted
#print "--"
#print added
#print "++"
#print "***"
deletedRows = c.execute('DELETE FROM metadata WHERE file_id=%s and scanner=%s', [file_id, scanner])
placeholders = ', '.join(["(%s, '%s', %%s, %%s, %%s, %%s)" %(file_id, scanner)] * len(newData[scanner]))
sql = 'INSERT INTO metadata (file_id, scanner, tagname, tagvalue, tagvalue_float, tagvalue_date) VALUES %s' %(placeholders)
#print sql
#print hash_lookup.keys()
sqlarray = []
for tagname, tagvalue in newData[scanner].iteritems():
sqlarray.append(tagname)
sqlarray.append(tagvalue)
try:
valFloat = float(tagvalue)
except ValueError:
valFloat = None
sqlarray.append(valFloat)
valDate = None
if 'date' in tagname.lower() or 'time' in tagname.lower():
try:
# 2015:08:22 19:09:58.241
# 2015:09:14
# 2015:08:22 19:09:58.241
# 2015:08:22 19:09:58+02:00
# 2015:08:22 19:09:58
# 2015:08:22 19:09:58.241
# 17:09:56.52
# 2015:08:22 17:09:56.52Z
# 2015:08:22
m = re.search('^((19|20|21)[0-9][0-9])[-:._]((0[1-9]|1[0-2]))[-:._]([0-3][0-9])(.*)', tagvalue )
if m:
valDate = "%s-%s-%s %s" %(m.group(1), m.group(3), m.group(5), m.group(6))
print "Matched %s in %s => %s" %(tagvalue, tagname, valDate)
else:
m = re.search('^([01][0-9]|2[0-3])[-:._]([0-5][0-9])[-:._]([0-5][0-9])(\.[0-9]+)?', tagvalue )
if m:
valDate = "1970-01-01 %s:%s:%s" %(m.group(1), m.group(2), m.group(3))
if m.group(4):
valDate = "%s%s" %(valDate, m.group(4))
print "Matched %s in %s => %s" %(tagvalue, tagname, valDate)
#else:
#print "Could not match %s in %s" %(tagvalue, tagname)
except ValueError:
valDate = None
sqlarray.append(valDate)
try:
numrows = c.execute( sql, sqlarray )
except Exception as e:
print "error on INSERT metadata"
print repr(e)
else:
print "<7> %i rows INSERTed for scanner %s on file %s" %(numrows, scanner, file_id)
db.commit()
def getExtension(filename):
extensionPos = filename.rfind('.')
return filename[extensionPos+1:].lower()
def safelyImportFileIntoRepo ( filehash, extraInfo ):
extension = getExtension(extraInfo['name'])
targetFilename = '%s/%s/%s.%s' %(repoDir, makePathFromHash(filehash), filehash, extension)
print "<7> safely import %s to %s" %(extraInfo['name'], targetFilename)
try:
dirExists = os.stat(os.path.dirname(targetFilename))
except Exception as e:
if e.errno == 2:
# No such file or directory
try:
os.makedirs(os.path.dirname(targetFilename))
except Exception as e:
print "<4> Could not create repo directory: %s" %(os.path.dirname(targetFilename))
print repr(e)
return False
else:
print repr(e)
return False
if os.path.exists(targetFilename):
# file already exists in repo
destHash = hash_file(targetFilename)
if destHash != filehash:
print "<4> Hash collision - a file with the same hash %s already exists in the repo - this should never happen" %(destHash)
return False
else:
# file in repo is the same we want to import so don't do anything
print "<7> %s already exists in the repo, doing nothing" %(filehash)
return True
# only if target does not exist yet:
try:
shutil.copy2(extraInfo['name'], targetFilename) # copy2 preserves mtime/atime
except Exception as e:
print "<5> Could not copy '%s' to '%s'" %(filename, targetFilename)
print repr(e)
return False
destHash = hash_file(targetFilename)
if destHash != filehash:
print "<5> Newly copied file has non-matching hash: original = '%s', copy = '%s'" %(filehash, destHash)
return False
else:
print "<7> Successfully imported %s into the repo" %(filehash)
return True
if not db_credentials:
print "No database credentials, cannot run."
sys.exit(1)
try:
db = MySQLdb.connect(user=db_credentials['db_username'], passwd=db_credentials['db_password'], db=db_credentials['db_name'], cursorclass=MySQLdb.cursors.DictCursor)
except Exception as e:
print "Could not connect to SQL Server"
print repr(e)
sys.exit(2)
try:
c = db.cursor()
except Exception as e:
print "Could not acquire a DB cursor"
print repr(e)
sys.exit(3)
filesByMimetype = {}
debugcount = 0
for (dirpath, dirnames, filenames) in os.walk(args.sourcedir, topdown=True, onerror=None, followlinks=False):
if filenames:
print "Working on directory %s" %(dirpath)
for filename in filenames:
fullfilename = '%s/%s' %(dirpath, filename)
try:
mimetype = getMimeType(fullfilename)
except Exception as e:
print "Could not detect MIME type for %s" %(fullfilename)
mimetype = None
continue
if mimetype not in filesByMimetype:
filesByMimetype[mimetype] = []
filesByMimetype[mimetype].append(fullfilename)
debugcount += 1
if debugcount > 32:
print "*** DEBUG: breaking after %i files ***" %(debugcount)
break
for mimetype in filesByMimetype:
if mimetype in scannersByMimetype:
# supported file (we have at least one scanner that can give us metadata), so hash it...
filesBasicInfo = gatherBasicInfo(filesByMimetype[mimetype])
# check whether we have data already in SQL; figure out whether we need to import & delete... etc.
notKnown, known = getRepoStateForFiles ( filesBasicInfo )
hashByFilename = {}
for filehash, extraInfo in notKnown.iteritems():
# extraInfo is hash + ctime etc
print "unknown %s file: %s, info: %s" %(mimetype, filehash, extraInfo)
fileId = addFileIntoDB(filehash, mimetype, extraInfo)
if fileId:
# hmmm. When to commit the DB? After every file, or at some other point?
try:
db.commit()
except Exception as e:
print "Could not commit DB changes."
print repr(e)
else:
extraInfo['id'] = fileId
known[filehash] = extraInfo
hashByFilename[extraInfo['name']] = filehash
for filehash, extraInfo in known.iteritems():
# extraInfo is hash, ctime, db_id and the "lives_in_repo" field.
print "known file: %s, info: %s" %(filehash, extraInfo)
if args.copy_to_repo and not extraInfo['is_in_repo']:
try:
importedIntoRepo = safelyImportFileIntoRepo(filehash, extraInfo)
except Exception as e:
print repr(e)
print "Could not import file %s(%s) into repo" %(filehash, extraInfo['name'])
else:
if not importedIntoRepo:
print "Could not import file %s(%s) into repo" %(filehash, extraInfo['name'])
else:
try:
affected_rows = c.execute('UPDATE files SET is_in_repo=True WHERE id=%s', [extraInfo['id']])
except:
print "Could not update DB status for file %s (id %s)" %(filehash, extraInfo['id'])
else:
print "%i rows updated for file %i" %(affected_rows, extraInfo['id'])
extraInfo['is_in_repo'] = True
known[filehash]['is_in_repo'] = True
db.commit()
if args.delete_original and extraInfo['is_in_repo']:
extension = getExtension(extraInfo['name'])
targetFilename = '%s/%s/%s.%s' %(repoDir, makePathFromHash(filehash), filehash, extension)
if os.path.exists(targetFilename) and hash_file(targetFilename) == filehash:
print "<6> We have a valid copy of %s in the repo, going to delete %s" %(filehash, extraInfo['name'])
try:
os.unlink(extraInfo['name'])
except Exception as e:
print "Could not delete original %s" %(extraInfo['name'])
print repr(e)
else:
print "<6> Successfully deleted original of %s (%s)" %(filehash, extraInfo['name'])
else:
print "<4> A file that we think is in the repo does not exist - NOT deleting original: %s" %(filehash)
try:
importedIntoRepo = safelyImportFileIntoRepo(filehash, extraInfo)
except Exception as e:
print repr(e)
print "Could not import file %s(%s) into repo" %(filehash, extraInfo['name'])
else:
if not importedIntoRepo:
print "Could not import file %s(%s) into repo" %(filehash, extraInfo['name'])
else:
print "<5> Re-imported file %s into the repo" %(filehash)
#print "not found in Repo: %s" %("\n".join(notKnown))
#print "already in Repo: %s" %("\n".join(known))
knownMetaData = getMetadataForFiles(files = known, scannersOnly = True)
print "=================="
print "knownMetaData:"
print knownMetaData
print "=================="
hashById = {}
for k, v in known.iteritems():
#print "hbF: %s = %s" %(k, v)
if v['name'] not in hashByFilename:
hashByFilename[v['name']] = k
if v['id'] not in hashById:
hashById[v['id']] = k
else:
print "Duplicate filename %s?! This should not happen" %(v['name'])
#print "hbF:"
#print hashByFilename
#print "**"
# iterate over registered metadata scanners for the current mimetype
for plugin in scannersByMimetype[mimetype]:
begin = time.time()
list_of_files_to_scan = []
for filename in filesByMimetype[mimetype]:
filehash = None
if filename in hashByFilename:
filehash = hashByFilename[filename]
if filehash in known:
if 'id' in known[filehash]:
fileId = known[filehash]['id']
if fileId in knownMetaData:
fmd = knownMetaData[fileId]
if plugin in fmd:
print "Not scanning file %s with scanner %s, already have data in DB" %(filename, plugin)
continue
if filehash and filehash in known:
if known[filehash]['is_in_repo']:
extension = getExtension(extraInfo['name'])
targetFilename = '%s/%s/%s.%s' %(repoDir, makePathFromHash(filehash), filehash, extension)
list_of_files_to_scan.append(targetFilename)
else:
list_of_files_to_scan.append(filename)
print "list of files to scan with %s: %s" %(plugin, list_of_files_to_scan)
if list_of_files_to_scan:
metadata = regScan[plugin].scanBulk(list_of_files_to_scan)
finish = time.time()
time_taken = finish - begin
if time_taken <= 0:
files_per_second = -1 # avoid division by zero
else:
files_per_second = len(filesByMimetype[mimetype]) / float(time_taken)
print "plugin %s took %0.2f seconds to parse %i files (%0.1f files per second)" %(plugin, time_taken, len(filesByMimetype[mimetype]), files_per_second)
else:
metadata = False
if metadata:
for filename, metaDict in metadata.iteritems():
if filename in hashByFilename:
filehash = hashByFilename[filename]
elif filename.startswith(repoDir):
filehash = os.path.basename(filename)[1:os.path.basename(filename).rfind('.')]
else:
print "file %s - no hash found, skip" %(filename)
continue
try:
putMetadataIntoDB(plugin, filehash, metaDict)
except Exception as e:
print "Could not put metadata into DB"
print repr(e)
else:
print "<7> successfully updated metadata for %s" %(filename)
print "%s: %s" %(filename, metaDict)
else:
if args.verbose:
print "There is no plugin to handle mimetype %s." %(mimetype)
print filesByMimetype[mimetype]
print "--"
| gpl-3.0 | -1,959,386,333,837,326,000 | 41.127031 | 281 | 0.552735 | false |
TheProjecter/kassie | exemples/chat.py | 1 | 3905 | # -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Cet exemple met en place un système de chat minimaliste gérant :
- les connexions multiples
- les déconnexions fortuites
- la réception ou l'envoie de messages aux clients
On utilise les fonctions de callback pour paramétrer comment doit
réagir le serveur lors d'une connexion, d'une déconnexion ou d'une
réception d'un message. Consultez le code pour plus d'informations.
"""
import os
import sys
sys.path.append(os.getcwd() + "/../src")
from reseau.connexions.serveur import *
fin_ligne = "\r\n"
# Fonctions de callback
def connexion(serveur, client):
"""Que se passe-t-il quand client se connecte ?"""
print("Connexion du client {0}".format(client))
for c in serveur.clients.values():
if c is not client:
c.envoyer("$$ {0} se connecte au serveur{1}".format( \
client, fin_ligne).encode())
def deconnexion(serveur, client):
"""Que se passe-t-il quand client se déconnecte ?"""
print("Déconnexion du client {0} : {1}".format(client, client.retour))
for c in serveur.clients.values():
if c is not client:
c.envoyer("** {0} se déconnecte du serveur{1}".format( \
client, fin_ligne).encode())
def reception(serveur, client):
"""Que se passe-t-il quand client envoie un message au serveur ?"""
msg = client.get_message() # msg contient un type bytes, aps str
print("J'ai réceptionné en bytes {0}".format(msg))
for c in serveur.clients.values():
c.envoyer("<{0}> {1}{2}".format(client.id, msg, fin_ligne).encode())
# Création et paramétrage du serveur
serveur = ConnexionServeur(4000) # test sur le port 4000
# Paramétrage des callbacks
# callback lors de la connexion
serveur.callbacks["connexion"].fonction = connexion
serveur.callbacks["connexion"].parametres = (serveur,)
# callback lors de la déconnexion
serveur.callbacks["deconnexion"].fonction = deconnexion
serveur.callbacks["deconnexion"].parametres = (serveur,)
# callback lors de la réception de message
serveur.callbacks["reception"].fonction = reception
serveur.callbacks["reception"].parametres = (serveur,)
# Fin du paramétrage du serveur
serveur.init() # initialisation, indispensable
while True: # le serveur ne s'arrête pas naturellement
serveur.verifier_connexions()
serveur.verifier_receptions()
| bsd-3-clause | -4,216,159,609,555,280,000 | 38.242424 | 79 | 0.732304 | false |
vpp-dev/vpp-lua-plugin | samples/polua-classifier-test.py | 1 | 16825 | # do this in the shell
# virtualenv/bin/pip install scapy
# sudo LD_LIBRARY_PATH=$LD_LIBRARY_PATH virtualenv/bin/scapy
# You also can do this from within scapy:
# execfile("polua-classifier-test.py")
# The rest is in the scapy CLI
import vpp_papi
v = vpp_papi
v.connect("pytest")
def cli(cmd):
print("Running " + cmd)
reply = vpp_papi.cli_inband(len(cmd), cmd)
print("Reply: ", reply)
return reply
import re
import binascii
def pad_to_vector(s):
ls = len(s)
lsp = ls - ls%16 + 16
return s + (chr(0)*(lsp-ls))
# Full mask for matching on ICMP protocol for IPv4
ipv4_proto_mask_spaces = "000000000000 000000000000 0000 00 00 0000 0000 0000 00 FF 0000 00000000 00000000 00 00 0000 00 00"
# Full value for this match
ipv4_proto_valu_spaces = "000000000000 000000000000 0000 00 00 0000 0000 0000 00 01 0000 00000000 00000000 00 00 0000 00 00"
ipv4_proto_dport_mask_spaces = "000000000000 000000000000 0000 00 00 0000 0000 0000 00 FF 0000 00000000 00000000 0000 FFFF 00000000 00000000 0000 0000 0000 0000"
ipv4_proto_dport_valu_spaces = "000000000000 000000000000 0000 00 00 0000 0000 0000 00 06 0000 00000000 00000000 0000 0016 00000000 00000000 0000 0000 0000 0000"
ipv6_proto_mask_spaces = "000000000000 000000000000 0000 0 00 00000 0000 FF 00 00000000000000000000000000000000 00000000000000000000000000000000 00 00 0000 0000"
ipv6_proto_valu_spaces = "000000000000 000000000000 0000 0 00 00000 0000 3A 00 00000000000000000000000000000000 00000000000000000000000000000000 00 00 0000 0000"
ipv6_proto_dport_mask_spaces = "000000000000 000000000000 0000 0 00 00000 0000 FF 00 00000000000000000000000000000000 00000000000000000000000000000000 0000 FFFF 00000000 00000000 0000 0000 0000 0000"
ipv6_proto_dport_valu_spaces = "000000000000 000000000000 0000 0 00 00000 0000 06 00 00000000000000000000000000000000 00000000000000000000000000000000 0000 0016 00000000 00000000 0000 0000 0000 0000"
# Array of unprocessed masks and values
masks_spaces = [ ipv4_proto_mask_spaces, ipv4_proto_dport_mask_spaces, ipv6_proto_mask_spaces, ipv6_proto_dport_mask_spaces ]
valus_spaces = [ ipv4_proto_valu_spaces, ipv4_proto_dport_valu_spaces, ipv6_proto_valu_spaces, ipv6_proto_dport_valu_spaces ]
# Remove spaces
masks = map(lambda obj: obj.replace(" ",""), masks_spaces)
# Convert from hex representation to binary
masks_bin = map(lambda obj: binascii.unhexlify(obj), masks)
# Pad the masks to overall vector length
masks_bin_padded = map(pad_to_vector, masks_bin)
# Create naive tables
### tables = map(lambda s: v.classify_add_del_table(True, 0, 32, 20000, 0, len(s)/16, 4294967295, 4294967295, s), masks_bin_padded)
# check the result
cli("show classify tables")
## Now let's get rid of trying to match on the leading empty vectors - there is no point in doing that
# tell how many vectors are all-zero in the beginning of s
def n_leading_empty_vectors(s):
hs = re.sub("[^\x00].+$","", s)
lhs = len(hs)
vlhs = (lhs - (lhs%16))/16
return vlhs
# return s without the nv starting vectors
def strip_leading_vectors(s, nv):
ts = s[16*nv:]
return ts
### tables2 = map(lambda s : v.classify_add_del_table(True, 0, 32, 20000, n_leading_empty_vectors(s), len(s)/16 - n_leading_empty_vectors(s), 4294967295, 4294967295, strip_leading_vectors(s, n_leading_empty_vectors(s))), masks_bin_padded)
# Get rid of unnecessary zeroes in the end
def strip_trailing_zeroes(s):
return re.sub("[\x00]+$", "", s)
# create binary masks with no trailing zeroes
masks_bin_notrailzero = map(strip_trailing_zeroes, masks_bin)
# pad them to vector lengths
masks_bin_notrailzero_padded = map(pad_to_vector, masks_bin_notrailzero)
# Create the good tables
### tables3 = map(lambda s: v.classify_add_del_table(True, 0, 32, 20000, n_leading_empty_vectors(s), len(s)/16 - n_leading_empty_vectors(s), 4294967295, 4294967295, strip_leading_vectors(s, n_leading_empty_vectors(s))), masks_bin_notrailzero_padded)
# The tables above are good, but we need to link them and define the default action being deny
#
# the next action is the index of one of the next actions, as per "show vlib graph":
#
# for this case it is 0..15:
#
# l2-input-classify error-drop [0] l2-rw
# ethernet-input-not-l2 [1] l2-learn
# ip4-input [2] l2-input-vtr
# ip6-input [3] l2-input
# li-hit [4] l2-input-acl
# feature-bitmap-drop [5] l2-flood
# l2-output [6] l2-input-classify
# l2-flood [7] arp-term-l2bd
# arp-term-l2bd [8] l2-policer-classify
# l2-fwd [9]
# l2-rw [10]
# l2-learn [11]
# l2-input-vtr [12]
# l2-input-acl [13]
# l2-policer-classify [14]
# l2-input-classify [15]
action_drop = 0
action_cont = 4294967295
action_acl_match = 4294967295
link_with_prev = [ False, True, False, True ]
miss_idx = [ action_drop, action_cont, action_drop, action_cont ]
final_tables = []
for i in range(0, len(masks_bin_notrailzero_padded)):
s = masks_bin_notrailzero_padded[i]
arg_nbuckets = 32
arg_mem = 20000
arg_skip = n_leading_empty_vectors(s)
arg_match = len(s)/16 - n_leading_empty_vectors(s)
arg_next = 4294967295
if link_with_prev[i]:
# the final_tables previous has been already added in the last iteration
arg_next = final_tables[i-1].new_table_index
arg_miss_idx = miss_idx[i]
print("skip: ", arg_skip, " match: ", arg_match)
final_tables.append(v.classify_add_del_table(True, 0, arg_nbuckets, arg_mem, arg_skip, arg_match, arg_next, arg_miss_idx, strip_leading_vectors(s, arg_skip)))
sessions = []
for i in range(0, len(masks_bin_notrailzero_padded)):
a_valu_spaces = valus_spaces[i]
a_valu = a_valu_spaces.replace(" ", "")
a_valu_bin = binascii.unhexlify(a_valu)
a_valu_bin_notrailzero = strip_trailing_zeroes(a_valu_bin)
a_valu_bin_notrailzero_padded = pad_to_vector(a_valu_bin_notrailzero)
a_skip = n_leading_empty_vectors(masks_bin_notrailzero_padded[i])
# classify_add_del_session(is_add, table_index, hit_next_index, opaque_index, advance, match, async=False)
###
### WRONG !!!
### a_match = strip_leading_vectors(a_valu_bin_notrailzero_padded, a_skip)
### match needs to be packet contents "as is".
###
a_match = a_valu_bin_notrailzero_padded
# classify_add_del_session(is_add, table_index, hit_next_index, opaque_index, advance, match, async=False)
sessions.append(v.classify_add_del_session(True, final_tables[i].new_table_index, action_acl_match, 42, 0, a_match))
# Let's now open some shell sessions to send the pings from
# First define a class for it
from subprocess import Popen, PIPE
import time
class ShellSession:
def __init__(self, name):
self.description = "Interactive shell session"
self.name = name
self.fname = "/tmp/session-" + name + "-output.txt"
self.fw = open(self.fname, "wb")
self.fr = open(self.fname, "r")
self.p = Popen("/bin/bash", stdin = PIPE, stdout = self.fw, stderr = self.fw, bufsize = 1)
self.raw_write("export REAL_TMUX=`which tmux`\n")
self.raw_write("export PATH=/shared/runtmux:$PATH\n")
self.raw_write("runtmux new-session -d -s " + self.name + "\n")
def write(self, data):
self.raw_write("tmux send-keys -t " + self.name + " '" + data + "'\n")
self.raw_write("tmux send-keys -t " + self.name + " Enter\n")
def raw_write(self, data):
self.p.stdin.write(data)
def read(self):
return self.fr.read()
def close(self):
self.fr.close()
self.fw.close()
def connect_with(self, other):
this_end = self.name + "_" + other.name
other_end = other.name + "_" + self.name
self.write("ip link del " + this_end + "\n")
self.write("ip link add name " + this_end + " type veth peer name " + other_end + "\n")
self.write("ip link set dev " + this_end + " up promisc on\n")
other.write("echo $$ >/tmp/" + other.name + ".pid\n")
time.sleep(0.5)
self.write("ip link set dev " + other_end + " up promisc on netns /proc/`cat /tmp/" + other.name + ".pid`/ns/net\n")
print("netns of " + other_end + " is /proc/`cat /tmp/"+other.name+".pid`/ns/net\n")
time.sleep(0.3)
# Three sessions, first s0 in the same net namespace as VPP
s0 = ShellSession("s0")
# s1 in its separate namespace and s2 in yet another one
s1 = ShellSession("s1")
# jump into a separate network namespace.
s1.write("unshare -n /bin/bash\n")
# check we have no interfaces other than lo
s1.write("/sbin/ifconfig -a\n")
# wait a second or two here because otherwise there is no output
time.sleep(1)
s1.read()
s2 = ShellSession("s2")
# jump into a separate network namespace.
s2.write("unshare -n /bin/bash\n")
# check we have no interfaces other than lo
s2.write("/sbin/ifconfig -a\n")
# wait a second or two here because otherwise there is no output
time.sleep(1)
s2.read()
# Connect the sessions using the veth pairs
s0.connect_with(s1)
s0.connect_with(s2)
# we now should have lo and s1_s0 interfaces here
s1.write("/sbin/ifconfig -a\n")
# wait a second or two here because otherwise there is no output
time.sleep(1)
print(s1.read())
# we now should have s0_s1 and s0_s2 interfaces here
s0.write("/sbin/ifconfig -a\n")
# wait a second or two here because otherwise there is no output
time.sleep(1)
s0.read()
# Now let's go back to our two sessions, s1 and s2 and configure them
# the s1 gets the address x::1
s1.write("ip -6 addr add dev s1_s0 2001:db8:1::1/64\n")
s1.write("ip -4 addr add dev s1_s0 192.0.2.1/24\n")
s1.write("ip link set dev s1_s0 up promisc on\n")
# the s2 gets the x::2 and x::3
s2.write("ip -6 addr add dev s2_s0 2001:db8:1::2/64\n")
s2.write("ip -6 addr add dev s2_s0 2001:db8:1::3/64\n")
s2.write("ip -4 addr add dev s2_s0 192.0.2.2/24\n")
s2.write("ip link set dev s2_s0 up promisc on\n")
# check the addresses
s1.write("ip addr\n")
s2.write("ip addr\n")
time.sleep(1)
s1.read()
s2.read()
# create the VPP interfaces via CLI and add them to the bridge
cli("create host-interface name s0_s1")
cli("create host-interface name s0_s2")
cli("set interface state host-s0_s1 up")
cli("set interface state host-s0_s2 up")
cli("set interface l2 bridge host-s0_s1 42")
cli("set interface l2 bridge host-s0_s2 42")
if False:
# create the interfaces on the VPP corresponding to the s0_s1 and s0_s2 interfaces
vpp_if_to_s1 = v.af_packet_create("s0_s1", "AAAAAA", True)
vpp_if_to_s2 = v.af_packet_create("s0_s2", "AAAAAA", True)
ifaces = [ vpp_if_to_s1, vpp_if_to_s2 ]
# bring the interfaces up
for i in ifaces:
up = True
v.sw_interface_set_flags(i.sw_if_index, up, False, False)
# Let's add the bridge
bd_id = 42
v.bridge_domain_add_del(bd_id, True, True, True, True, 0, True)
# Now lets add the interfaces to the bridge
for i in ifaces:
sw_if_index = i.sw_if_index
v.sw_interface_set_l2_bridge(sw_if_index, bd_id, False, False, True)
# ping!
s1.write("ping6 -c 3 2001:db8:1::2\n")
s1.write("ping -c 3 192.0.2.2\n")
# wait for a while here
time.sleep(10)
s1.read()
# the ping must succeed
# Now let's apply the policy outbound on s2
# classify_set_interface_l2_tables(sw_if_index, ip4_table_index, ip6_table_index, other_table_index, is_input, async=False)
# remember those tables ?
ip4_table_index = final_tables[1].new_table_index
ip6_table_index = final_tables[3].new_table_index
minus_one = 4294967295
# classify table mask l3 ip6 proto buckets 64
# vl_api_classify_add_del_session_t_handler
# LUA:
# call classify add del session is_add 1 table_index 0 hit_next_index 5 opaque_index 42 match \x00\x00\x00\x00\x3A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00
# call classify add del session is_add 1 table_index 0 hit_next_index 5 opaque_index 43 match \x00\x00\x00\x00\x3A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00
# call classify add del session table_index 0 hit_next_index 5 opaque_index 43 match \x3A\x00\x00\x00\x3A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00
# classify session hit-next 5 table-index 0 match l3 ip6 proto 58 opaque-index 42
# classify session del hit-next 5 table-index 0 match l3 ip6 proto 58 opaque-index 42
# classify session hit-next 5 table-index 2 match l3 ip6 proto 58 opaque-index 42
# classify session hit-next 0 table-index 14 match l3 ip6 proto 58 opaque-index 123
# classify session hit-next -1 table-index 14 match l3 ip6 proto 58 opaque-index 123
# classify session hit-next 5 table-index 14 match l3 ip6 proto 58 opaque-index 123
# classify session del hit-next -1 table-index 14 match l3 ip6 proto 58 opaque-index 123
# classify session del hit-next 0 table-index 14 match l3 ip6 proto 58 opaque-index 123
# classify session del hit-next 5 table-index 14 match l3 ip6 proto 58 opaque-index 123
# v.classify_add_del_session(True, 14, 5, 123, 0, '\x00\x00\x00\x00:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
# v.classify_add_del_session(True, 1, 5, 123, 0, '\x00\x00\x00\x00:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
# v.classify_add_del_session(False, 14, 5, 123, 0, '\x00\x00\x00\x00:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
# Put egress policy onto target-facing interface
# v.classify_set_interface_l2_tables(vpp_if_to_s2.sw_if_index, ip4_table_index, ip6_table_index, minus_one, False)
# v.classify_set_interface_l2_tables(vpp_if_to_s1.sw_if_index, ip4_table_index, ip6_table_index, minus_one, True)
# classify session hit-next -1 table-index 2 match l3 ip6 proto 58 opaque-index 123
cli("lua run plugins/lua-plugin/samples/polua.lua")
cli("lua polua host-s0_s1 in permit")
cli("lua polua host-s0_s2 out permit")
cli("lua polua host-s0_s2 in")
testIPv6 = False
testIPv4 = True
if testIPv6:
print("IPv6 ping with filter")
cli("clear trace")
cli("trace add af-packet-input 100")
s1.write("ping6 -c 3 2001:db8:1::2\n")
time.sleep(10)
cli("show trace max 1")
time.sleep(1)
print("IPv6 ping to another host with filter")
cli("clear trace")
cli("trace add af-packet-input 100")
s1.write("ping6 -c 3 2001:db8:1::3\n")
time.sleep(10)
cli("show trace max 1")
time.sleep(1)
if testIPv4:
print("IPv4 ping with filter")
cli("clear trace")
cli("trace add af-packet-input 100")
s1.write("ping -c 3 192.0.2.2\n")
time.sleep(5)
cli("show trace max 1")
print("IPv4 udp with filter")
cli("clear trace")
cli("trace add af-packet-input 100")
s1.write("perl -e \"print(chr(65) x 4000);\" >/tmp/test\n")
s1.write("nc -u -w 1 192.0.2.2 4444 </tmp/test\n")
time.sleep(5)
cli("show trace max 1")
if True:
print("IPv4 slow udp with filter")
cli("clear trace")
cli("trace add af-packet-input 100")
s1.write("perl -e \"print(chr(65) x 100);\" >/tmp/test\n")
s1.write("nc -u -w 1 -p 5554 192.0.2.2 4444 </tmp/test\n")
time.sleep(1)
s1.write("nc -u -w 1 -p 5554 192.0.2.2 4444 </tmp/test\n")
time.sleep(5)
cli("show trace max 1")
if True:
print("IPv4 tcp port 3333 with filter")
cli("clear trace")
cli("trace add af-packet-input 100")
# s2.write("nc -w 4 -l -p 3333\n")
s1.write("nc -w 1 192.0.2.2 3333 </dev/zero\n")
time.sleep(5)
cli("show trace max 1")
if True:
print("IPv4 tcp port 3333 with filter in the other direction")
cli("clear trace")
cli("trace add af-packet-input 100")
# s2.write("nc -w 4 -l -p 3333\n")
s2.write("nc -w 1 192.0.2.1 3333 </dev/zero\n")
time.sleep(5)
cli("show trace max 1")
if True:
print("IPv4 tcp port 22 with filter")
cli("clear trace")
cli("trace add af-packet-input 100")
s1.write("nc -w 1 192.0.2.2 22 </dev/zero\n")
time.sleep(5)
cli("show trace max 1")
time.sleep(1)
# cancel the filters
# v.classify_set_interface_l2_tables(vpp_if_to_s2.sw_if_index, minus_one, minus_one, minus_one, False)
# v.classify_set_interface_l2_tables(vpp_if_to_s1.sw_if_index, minus_one, minus_one, minus_one, True)
#
# Not supported yet in python API...
# >>> v.classify_session_dump(15)
# Message decode failed 322 <function classify_session_details_decode at 0x7fd41b84e758>
# Traceback (most recent call last):
# File "/home/ubuntu/vpp/virtualenv/local/lib/python2.7/site-packages/vpp_papi-1.2-py2.7-linux-x86_64.egg/vpp_papi/vpp_papi.py", line 49, in msg_handler
# r = api_func_table[id[0]](msg)
# File "/home/ubuntu/vpp/virtualenv/local/lib/python2.7/site-packages/vpp_papi-1.2-py2.7-linux-x86_64.egg/vpp_papi/vpe.py", line 7653, in classify_session_details_decode
# tr = unpack_from('>' + str(c) + 's', msg[30:])
# struct.error: unpack_from requires a buffer of at least 48 bytes
# []
| apache-2.0 | 7,852,703,831,240,850,000 | 37.501144 | 249 | 0.676909 | false |
NoahDStein/NeuralNetSandbox | wavenet.py | 1 | 16175 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy
import tensorflow as tf
import tensorflow.contrib.slim as slim
import time
from tfutil import restore_latest, modified_dynamic_shape, quantizer, dequantizer, crappy_plot, draw_on, \
queue_append_and_update, modified_static_shape
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_integer('max_steps', 100000, 'Number of steps to run trainer.')
flags.DEFINE_float('learning_rate', 0.001, 'Initial learning rate.')
flags.DEFINE_string('data_dir', '/tmp/data', 'Directory for storing data')
flags.DEFINE_string('summaries_dir', 'data/wavenet/logs', 'Summaries directory')
flags.DEFINE_string('train_dir', 'data/wavenet/save', 'Saves directory')
TRAIN_SIZE = 60000
TEST_SIZE = 10000
SIG_LEN = 256
NUM_COMPONENTS = 3
BATCH_SIZE = 64
PRIOR_BATCH_SIZE = 5
RESTORE_BEFORE_TRAIN = False
TRAIN = True
HIDDEN_LAYER_SIZE = 32
DELAYS = [1, 2, 4, 8, 16] * 4
QUANT_LEVELS = 256
QUANT_LOWER = -10.0
QUANT_UPPER = 10.0
def log(s):
print('[%s] ' % time.asctime() + s)
def rand_periodic(num_components, num_signals, signal_length):
time = numpy.arange(signal_length, dtype=numpy.float32).reshape(1, signal_length)
period = numpy.random.rand(num_signals, 1) * 80 + 40
counter = 2*numpy.pi*time / period
sin_coeff = numpy.random.randn(num_components, num_signals)
cos_coeff = numpy.random.randn(num_components, num_signals)
arg = numpy.arange(1, num_components + 1).reshape(num_components, 1, 1) * counter
return numpy.einsum('ij,ijk->jk', sin_coeff, numpy.sin(arg)) + numpy.einsum('ij,ijk->jk', cos_coeff, numpy.cos(arg))
def delay(tensor, steps):
if steps == 0:
return tensor
static_shape = tensor.get_shape()
zeros = tf.zeros(modified_dynamic_shape(tensor, [None, abs(steps), None]), dtype=tensor.dtype)
if steps > 0:
shifted_tensor = tensor[:, :static_shape.as_list()[1]-steps, :]
delayed_tensor = tf.concat(1, (zeros, shifted_tensor))
else:
shifted_tensor = tensor[:, -steps:, :]
delayed_tensor = tf.concat(1, (shifted_tensor, zeros))
delayed_tensor.set_shape(static_shape)
return delayed_tensor
def log_std_act(log_std):
return tf.clip_by_value(log_std, -4.0, 4.0)
def id_act(z):
return z
def double_relu(z):
return [tf.nn.relu(z), tf.nn.relu(-z)]
default_act = tf.nn.relu # double_relu
do_bn = dict(bn=True)
def train():
# Import data
log('simulating data')
numpy.random.seed(3737)
test_data = rand_periodic(NUM_COMPONENTS, TEST_SIZE, SIG_LEN)
train_data = rand_periodic(NUM_COMPONENTS, TRAIN_SIZE, SIG_LEN)
log('done simulating')
with tf.name_scope('input'):
all_train_data_initializer = tf.placeholder(tf.float32, [TRAIN_SIZE, SIG_LEN])
all_train_data = tf.Variable(all_train_data_initializer, trainable=False, collections=[])
random_training_example = tf.train.slice_input_producer([all_train_data])
training_batch = tf.train.batch([random_training_example], batch_size=BATCH_SIZE, enqueue_many=True)
all_test_data_initializer = tf.placeholder(tf.float32, [TEST_SIZE, SIG_LEN])
all_test_data = tf.Variable(all_test_data_initializer, trainable=False, collections=[])
test_batch = tf.train.batch([all_test_data], batch_size=BATCH_SIZE, enqueue_many=True)
num_runs = tf.Variable(0.0, trainable=False, collections=[])
running_error = tf.Variable(0.0, trainable=False, collections=[])
fed_input_data = tf.placeholder(tf.float32, [None, SIG_LEN])
def sub_predictor(input_val, queue_contents=None):
queue_updates = []
def next_queue(model_tensor, depth):
if queue_contents is None:
new_shape = [None] * model_tensor.get_shape().ndims
new_shape[1] = depth
this_queue_contents = tf.zeros(shape=modified_static_shape(model_tensor, new_shape))
else:
this_queue_contents = queue_contents[len(queue_updates)]
concatenated_contents, updated_contents = queue_append_and_update(1, this_queue_contents, model_tensor)
queue_updates.append(updated_contents)
return concatenated_contents
all_res = []
last = input_val
# Causal convolution
FILTER_SIZE = 16
bn_params = dict(decay=0.95, scope='bn', updates_collections=None)
with slim.arg_scope([slim.conv2d, slim.fully_connected], normalizer_fn=slim.batch_norm, normalizer_params=bn_params, num_outputs=HIDDEN_LAYER_SIZE):
last = next_queue(last, FILTER_SIZE-1)
last = tf.expand_dims(last, 1)
last = slim.conv2d(last, kernel_size=(1, FILTER_SIZE), padding='VALID', activation_fn=None, scope='predictor/causalconv')
last = tf.reshape(last, modified_static_shape(input_val, [None, None, HIDDEN_LAYER_SIZE]))
res = last
all_res.append(res)
for res_layer, cur_delay in enumerate(DELAYS):
total = next_queue(last, cur_delay)
last = tf.concat(2, (total[:, cur_delay:, :], total[:, :-cur_delay, :]))
# Dilated causal convolution
tanh = slim.fully_connected(last, activation_fn=tf.nn.tanh, scope='predictor/res{}T'.format(res_layer))
sigmoid = slim.fully_connected(last, activation_fn=tf.nn.sigmoid, scope='predictor/res{}S'.format(res_layer))
last = slim.fully_connected(tanh*sigmoid, activation_fn=None, scope='predictor/res{}/hidden'.format(res_layer))
res, last = last, last + res
all_res.append(res)
# last = tf.concat(3, [tf.expand_dims(r, 3) for r in all_res])
# num_layers = len(all_res)
# Need to keep these convolutions as not running over time or else add queues
# last = lm.conv_transpose_layer(last, 1, 5, num_layers//2, 'output/conv0', act=tf.nn.relu, strides=[1, 1, 2, 1], padding='SAME', bias_dim=2, **do_bn)
# last = lm.conv_transpose_layer(last, 1, 5, num_layers//4, 'output/conv1', act=tf.nn.relu, strides=[1, 1, 2, 1], padding='SAME', bias_dim=2, **do_bn)
# last = lm.conv_transpose_layer(last, 1, 5, num_layers//8, 'output/conv2', act=tf.nn.relu, strides=[1, 1, 2, 1], padding='SAME', bias_dim=2, **do_bn)
# last = lm.conv_layer(last, 1, 5, 1, 'output/conv3', act=id_act, padding='SAME', bias_dim=2, **do_bn)
# last = last[:, :, :, 0]
last = slim.fully_connected(tf.concat(2, all_res), activation_fn=tf.nn.relu, scope='output/hidden')
last = slim.fully_connected(last, num_outputs=QUANT_LEVELS, activation_fn=None, normalizer_params=dict(bn_params, scale=True), scope='output/logits')
return last, queue_updates
def predictor(data):
last = tf.expand_dims(data, 2)
ones = tf.ones_like(last, dtype=last.dtype)
noise = tf.random_normal(tf.shape(last))
last = tf.concat(2, (last + 0.1*noise, ones))
return sub_predictor(last)
def full_model(data):
output_logits, queue_updates = predictor(data)
output_logits = output_logits[:, :SIG_LEN-1, :]
output_mean = tf.argmax(output_logits, dimension=2)
targets = data[:, 1:]
quantized_targets = quantizer(targets, QUANT_LOWER, QUANT_UPPER, QUANT_LEVELS)
with tf.name_scope('error'):
batch_error = tf.reduce_mean(tf.reduce_sum(tf.nn.sparse_softmax_cross_entropy_with_logits(output_logits, quantized_targets), reduction_indices=[1]))
error_summary = tf.scalar_summary('training error', (running_error + batch_error)/(num_runs + 1.0))
output_plot = crappy_plot(output_mean, QUANT_LEVELS)
target_plot = crappy_plot(quantized_targets, QUANT_LEVELS)
M = tf.reduce_max(output_logits)
m = tf.reduce_min(output_logits)
scaled_logits = (output_logits-m)/(M-m)
# image = draw_on(tf.transpose(scaled_logits, perm=[0, 2, 1])[:, :, :, None], target_plot, [1.0, 0.0, 0.0])
# Casting is to work around some stupid tf bug; shouldn't be necessary
output_probs = tf.reshape(tf.cast(tf.nn.softmax(tf.reshape(tf.cast(output_logits, tf.float64), [-1, QUANT_LEVELS])), tf.float32), [-1, SIG_LEN-1, QUANT_LEVELS])
image = draw_on(tf.transpose(output_probs, perm=[0, 2, 1])[:, :, :, None], target_plot, [1.0, 0.0, 0.0])
# image = draw_on(1.0, target_plot, [1.0, 0.0, 0.0]) # The first 1.0 starts with a white canvas
# image = draw_on(image, output_plot, [0.0, 0.0, 1.0])
sample_summary = tf.image_summary('posterior_sample', image, 5)
summaries = tf.merge_summary([error_summary, sample_summary])
return output_mean, queue_updates, batch_error, batch_error, summaries #+ 0.1*weight_decay
def prior_model(prior_queue_init, length=SIG_LEN):
def cond(loop_counter, *_):
return tf.less(loop_counter, length)
def body(loop_counter, accumulated_output_array, accumulated_logits_array, next_input, *queue_contents):
next_logit, queue_updates = sub_predictor(next_input, queue_contents)
gumbeled = next_logit[:, 0, :] - tf.log(-tf.log(tf.random_uniform((tf.shape(next_logit)[0], QUANT_LEVELS))))
sample_disc = tf.arg_max(gumbeled, 1)
sample_cont = dequantizer(sample_disc, QUANT_LOWER, QUANT_UPPER, QUANT_LEVELS)
accumulated_output_array = accumulated_output_array.write(loop_counter, sample_cont)
accumulated_logits_array = accumulated_logits_array.write(loop_counter, next_logit[:, 0, :])
sample_cont = tf.expand_dims(sample_cont, 1)
sample_cont = tf.expand_dims(sample_cont, 1) # sic
next_input = tf.concat(2, (sample_cont, tf.ones_like(sample_cont)))
return [loop_counter+1, accumulated_output_array, accumulated_logits_array, next_input] + queue_updates
accumulated_output_array = tf.TensorArray(tf.float32, size=SIG_LEN, clear_after_read=False)
accumulated_logits_array = tf.TensorArray(tf.float32, size=SIG_LEN, clear_after_read=False)
loop_var_init = [tf.constant(0, dtype=tf.int32), accumulated_output_array, accumulated_logits_array, tf.zeros((PRIOR_BATCH_SIZE, 1, 2))] + prior_queue_init
accumulated_output_array, accumulated_logits_array = tf.while_loop(cond, body, loop_var_init, back_prop=False)[1:3]
output = tf.transpose(accumulated_output_array.pack(), [1, 0])
logits = tf.transpose(accumulated_logits_array.pack(), [1, 0, 2])
output.set_shape((PRIOR_BATCH_SIZE, length))
logits.set_shape((PRIOR_BATCH_SIZE, length, QUANT_LEVELS))
return output, logits
def prior_model_with_summary(queue_model):
prior_queue_init = []
for tensor in queue_model:
new_shape = tensor.get_shape().as_list()
new_shape[0] = PRIOR_BATCH_SIZE
prior_queue_init.append(tf.zeros(new_shape, dtype=tf.float32))
output_sample, output_logits = prior_model(prior_queue_init)
M = tf.reduce_max(output_logits)
m = tf.reduce_min(output_logits)
scaled_logits = (output_logits-m)/(M-m)
# Casting is to work around some stupid tf bug; shouldn't be necessary
output_probs = tf.reshape(tf.cast(tf.nn.softmax(tf.reshape(tf.cast(output_logits, tf.float64), [-1, QUANT_LEVELS])), tf.float32), [-1, SIG_LEN, QUANT_LEVELS])
image = draw_on(tf.transpose(output_probs, perm=[0, 2, 1])[:, :, :, None], crappy_plot(quantizer(output_sample, QUANT_LOWER, QUANT_UPPER, QUANT_LEVELS), QUANT_LEVELS), [0.0, 0.0, 1.0])
sample_image = tf.image_summary('prior_sample', image, PRIOR_BATCH_SIZE)
return output_sample, sample_image
with tf.name_scope('posterior'):
posterior_mean, queue_updates, _, training_error, training_merged = full_model(training_batch)
with slim.arg_scope([slim.batch_norm, slim.dropout], is_training=False):
tf.get_variable_scope().reuse_variables()
with tf.name_scope('prior'):
prior_sample, prior_sample_summary = prior_model_with_summary(queue_updates)
with tf.name_scope('test'):
_, _, test_error, _, test_merged = full_model(test_batch)
accum_test_error = [num_runs.assign(num_runs+1.0), running_error.assign(running_error+test_error)]
saver = tf.train.Saver(tf.trainable_variables() + tf.get_collection('BatchNormInternal'))
batch = tf.Variable(0)
learning_rate = tf.train.exponential_decay(FLAGS.learning_rate, batch, 5000, 0.8, staircase=True)
train_step = tf.train.AdamOptimizer(learning_rate).minimize(training_error, global_step=batch)
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.initialize_variables(tf.get_collection('BatchNormInternal')))
sess.run(all_train_data.initializer, feed_dict={all_train_data_initializer: train_data})
sess.run(all_test_data.initializer, feed_dict={all_test_data_initializer: test_data})
sess.run([num_runs.initializer, running_error.initializer])
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
if TRAIN:
train_writer = tf.train.SummaryWriter(FLAGS.summaries_dir + '/train', sess.graph)
test_writer = tf.train.SummaryWriter(FLAGS.summaries_dir + '/test')
if RESTORE_BEFORE_TRAIN:
log('restoring')
restore_latest(saver, sess, 'data/wavenet')
try:
log('starting training')
for i in range(FLAGS.max_steps):
if i % 1000 == 999:
# Track training error
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
summary, _ = sess.run([training_merged, train_step],
options=run_options,
run_metadata=run_metadata)
train_writer.add_summary(summary, i)
train_writer.add_run_metadata(run_metadata, 'batch%d' % i)
# Plot prior samples
prior_sample_summary_val, = sess.run([prior_sample_summary])
train_writer.add_summary(prior_sample_summary_val, i)
# Track test error
for _ in range(TEST_SIZE//BATCH_SIZE - 1):
sess.run(accum_test_error)
summary, _, _ = sess.run([test_merged] + accum_test_error)
acc, = sess.run([running_error/num_runs])
sess.run([num_runs.initializer, running_error.initializer])
test_writer.add_summary(summary, i)
log('batch %s: Test error = %s' % (i, acc))
else:
sess.run([train_step])
finally:
log('saving')
saver.save(sess, FLAGS.train_dir, global_step=batch)
log('done')
else:
log('restoring')
restore_latest(saver, sess, 'data/wavenet')
import matplotlib.pyplot as plt
plt.ioff()
fig = plt.figure()
ax = fig.add_subplot(111)
logit, = sess.run([predictor(fed_input_data)[0]], feed_dict={fed_input_data: train_data[10:20, :]})
def softmax(x, axis=None):
x = x - x.max(axis=axis, keepdims=True)
x = numpy.exp(x)
return x/numpy.sum(x, axis=axis, keepdims=True)
import IPython
IPython.embed()
coord.request_stop()
coord.join(threads)
sess.close()
def main(_):
if tf.gfile.Exists(FLAGS.summaries_dir):
tf.gfile.DeleteRecursively(FLAGS.summaries_dir)
tf.gfile.MakeDirs(FLAGS.summaries_dir)
train()
if __name__ == '__main__':
tf.app.run()
| apache-2.0 | -6,833,931,838,539,768,000 | 48.616564 | 192 | 0.617743 | false |
IsCoolEntertainment/debpkg_python-boto | boto/route53/connection.py | 1 | 16973 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# Copyright (c) 2011 Blue Pines Technologies LLC, Brad Carleton
# www.bluepines.org
# Copyright (c) 2012 42 Lines Inc., Jim Browne
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import exception
import random
import urllib
import uuid
import xml.sax
import boto
from boto.connection import AWSAuthConnection
from boto import handler
import boto.jsonresponse
from boto.route53.record import ResourceRecordSets
from boto.route53.zone import Zone
HZXML = """<?xml version="1.0" encoding="UTF-8"?>
<CreateHostedZoneRequest xmlns="%(xmlns)s">
<Name>%(name)s</Name>
<CallerReference>%(caller_ref)s</CallerReference>
<HostedZoneConfig>
<Comment>%(comment)s</Comment>
</HostedZoneConfig>
</CreateHostedZoneRequest>"""
#boto.set_stream_logger('dns')
class Route53Connection(AWSAuthConnection):
DefaultHost = 'route53.amazonaws.com'
"""The default Route53 API endpoint to connect to."""
Version = '2013-04-01'
"""Route53 API version."""
XMLNameSpace = 'https://route53.amazonaws.com/doc/2013-04-01/'
"""XML schema for this Route53 API version."""
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
port=None, proxy=None, proxy_port=None,
host=DefaultHost, debug=0, security_token=None,
validate_certs=True, https_connection_factory=None,
profile_name=None):
super(Route53Connection, self).__init__(host,
aws_access_key_id, aws_secret_access_key,
True, port, proxy, proxy_port, debug=debug,
security_token=security_token,
validate_certs=validate_certs,
https_connection_factory=https_connection_factory,
profile_name=profile_name)
def _required_auth_capability(self):
return ['route53']
def make_request(self, action, path, headers=None, data='', params=None):
if params:
pairs = []
for key, val in params.iteritems():
if val is None:
continue
pairs.append(key + '=' + urllib.quote(str(val)))
path += '?' + '&'.join(pairs)
return super(Route53Connection, self).make_request(action, path,
headers, data,
retry_handler=self._retry_handler)
# Hosted Zones
def get_all_hosted_zones(self, start_marker=None, zone_list=None):
"""
Returns a Python data structure with information about all
Hosted Zones defined for the AWS account.
:param int start_marker: start marker to pass when fetching additional
results after a truncated list
:param list zone_list: a HostedZones list to prepend to results
"""
params = {}
if start_marker:
params = {'marker': start_marker}
response = self.make_request('GET', '/%s/hostedzone' % self.Version,
params=params)
body = response.read()
boto.log.debug(body)
if response.status >= 300:
raise exception.DNSServerError(response.status,
response.reason,
body)
e = boto.jsonresponse.Element(list_marker='HostedZones',
item_marker=('HostedZone',))
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
if zone_list:
e['ListHostedZonesResponse']['HostedZones'].extend(zone_list)
while 'NextMarker' in e['ListHostedZonesResponse']:
next_marker = e['ListHostedZonesResponse']['NextMarker']
zone_list = e['ListHostedZonesResponse']['HostedZones']
e = self.get_all_hosted_zones(next_marker, zone_list)
return e
def get_hosted_zone(self, hosted_zone_id):
"""
Get detailed information about a particular Hosted Zone.
:type hosted_zone_id: str
:param hosted_zone_id: The unique identifier for the Hosted Zone
"""
uri = '/%s/hostedzone/%s' % (self.Version, hosted_zone_id)
response = self.make_request('GET', uri)
body = response.read()
boto.log.debug(body)
if response.status >= 300:
raise exception.DNSServerError(response.status,
response.reason,
body)
e = boto.jsonresponse.Element(list_marker='NameServers',
item_marker=('NameServer',))
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
def get_hosted_zone_by_name(self, hosted_zone_name):
"""
Get detailed information about a particular Hosted Zone.
:type hosted_zone_name: str
:param hosted_zone_name: The fully qualified domain name for the Hosted
Zone
"""
if hosted_zone_name[-1] != '.':
hosted_zone_name += '.'
all_hosted_zones = self.get_all_hosted_zones()
for zone in all_hosted_zones['ListHostedZonesResponse']['HostedZones']:
#check that they gave us the FQDN for their zone
if zone['Name'] == hosted_zone_name:
return self.get_hosted_zone(zone['Id'].split('/')[-1])
def create_hosted_zone(self, domain_name, caller_ref=None, comment=''):
"""
Create a new Hosted Zone. Returns a Python data structure with
information about the newly created Hosted Zone.
:type domain_name: str
:param domain_name: The name of the domain. This should be a
fully-specified domain, and should end with a final period
as the last label indication. If you omit the final period,
Amazon Route 53 assumes the domain is relative to the root.
This is the name you have registered with your DNS registrar.
It is also the name you will delegate from your registrar to
the Amazon Route 53 delegation servers returned in
response to this request.A list of strings with the image
IDs wanted.
:type caller_ref: str
:param caller_ref: A unique string that identifies the request
and that allows failed CreateHostedZone requests to be retried
without the risk of executing the operation twice. If you don't
provide a value for this, boto will generate a Type 4 UUID and
use that.
:type comment: str
:param comment: Any comments you want to include about the hosted
zone.
"""
if caller_ref is None:
caller_ref = str(uuid.uuid4())
params = {'name': domain_name,
'caller_ref': caller_ref,
'comment': comment,
'xmlns': self.XMLNameSpace}
xml_body = HZXML % params
uri = '/%s/hostedzone' % self.Version
response = self.make_request('POST', uri,
{'Content-Type': 'text/xml'}, xml_body)
body = response.read()
boto.log.debug(body)
if response.status == 201:
e = boto.jsonresponse.Element(list_marker='NameServers',
item_marker=('NameServer',))
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
else:
raise exception.DNSServerError(response.status,
response.reason,
body)
def delete_hosted_zone(self, hosted_zone_id):
uri = '/%s/hostedzone/%s' % (self.Version, hosted_zone_id)
response = self.make_request('DELETE', uri)
body = response.read()
boto.log.debug(body)
if response.status not in (200, 204):
raise exception.DNSServerError(response.status,
response.reason,
body)
e = boto.jsonresponse.Element()
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
# Resource Record Sets
def get_all_rrsets(self, hosted_zone_id, type=None,
name=None, identifier=None, maxitems=None):
"""
Retrieve the Resource Record Sets defined for this Hosted Zone.
Returns the raw XML data returned by the Route53 call.
:type hosted_zone_id: str
:param hosted_zone_id: The unique identifier for the Hosted Zone
:type type: str
:param type: The type of resource record set to begin the record
listing from. Valid choices are:
* A
* AAAA
* CNAME
* MX
* NS
* PTR
* SOA
* SPF
* SRV
* TXT
Valid values for weighted resource record sets:
* A
* AAAA
* CNAME
* TXT
Valid values for Zone Apex Aliases:
* A
* AAAA
:type name: str
:param name: The first name in the lexicographic ordering of domain
names to be retrieved
:type identifier: str
:param identifier: In a hosted zone that includes weighted resource
record sets (multiple resource record sets with the same DNS
name and type that are differentiated only by SetIdentifier),
if results were truncated for a given DNS name and type,
the value of SetIdentifier for the next resource record
set that has the current DNS name and type
:type maxitems: int
:param maxitems: The maximum number of records
"""
params = {'type': type, 'name': name,
'Identifier': identifier, 'maxitems': maxitems}
uri = '/%s/hostedzone/%s/rrset' % (self.Version, hosted_zone_id)
response = self.make_request('GET', uri, params=params)
body = response.read()
boto.log.debug(body)
if response.status >= 300:
raise exception.DNSServerError(response.status,
response.reason,
body)
rs = ResourceRecordSets(connection=self, hosted_zone_id=hosted_zone_id)
h = handler.XmlHandler(rs, self)
xml.sax.parseString(body, h)
return rs
def change_rrsets(self, hosted_zone_id, xml_body):
"""
Create or change the authoritative DNS information for this
Hosted Zone.
Returns a Python data structure with information about the set of
changes, including the Change ID.
:type hosted_zone_id: str
:param hosted_zone_id: The unique identifier for the Hosted Zone
:type xml_body: str
:param xml_body: The list of changes to be made, defined in the
XML schema defined by the Route53 service.
"""
uri = '/%s/hostedzone/%s/rrset' % (self.Version, hosted_zone_id)
response = self.make_request('POST', uri,
{'Content-Type': 'text/xml'},
xml_body)
body = response.read()
boto.log.debug(body)
if response.status >= 300:
raise exception.DNSServerError(response.status,
response.reason,
body)
e = boto.jsonresponse.Element()
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
def get_change(self, change_id):
"""
Get information about a proposed set of changes, as submitted
by the change_rrsets method.
Returns a Python data structure with status information about the
changes.
:type change_id: str
:param change_id: The unique identifier for the set of changes.
This ID is returned in the response to the change_rrsets method.
"""
uri = '/%s/change/%s' % (self.Version, change_id)
response = self.make_request('GET', uri)
body = response.read()
boto.log.debug(body)
if response.status >= 300:
raise exception.DNSServerError(response.status,
response.reason,
body)
e = boto.jsonresponse.Element()
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
def create_zone(self, name):
"""
Create a new Hosted Zone. Returns a Zone object for the newly
created Hosted Zone.
:type name: str
:param name: The name of the domain. This should be a
fully-specified domain, and should end with a final period
as the last label indication. If you omit the final period,
Amazon Route 53 assumes the domain is relative to the root.
This is the name you have registered with your DNS registrar.
It is also the name you will delegate from your registrar to
the Amazon Route 53 delegation servers returned in
response to this request.
"""
zone = self.create_hosted_zone(name)
return Zone(self, zone['CreateHostedZoneResponse']['HostedZone'])
def get_zone(self, name):
"""
Returns a Zone object for the specified Hosted Zone.
:param name: The name of the domain. This should be a
fully-specified domain, and should end with a final period
as the last label indication.
"""
name = self._make_qualified(name)
for zone in self.get_zones():
if name == zone.name:
return zone
def get_zones(self):
"""
Returns a list of Zone objects, one for each of the Hosted
Zones defined for the AWS account.
"""
zones = self.get_all_hosted_zones()
return [Zone(self, zone) for zone in
zones['ListHostedZonesResponse']['HostedZones']]
def _make_qualified(self, value):
"""
Ensure passed domain names end in a period (.) character.
This will usually make a domain fully qualified.
"""
if type(value) in [list, tuple, set]:
new_list = []
for record in value:
if record and not record[-1] == '.':
new_list.append("%s." % record)
else:
new_list.append(record)
return new_list
else:
value = value.strip()
if value and not value[-1] == '.':
value = "%s." % value
return value
def _retry_handler(self, response, i, next_sleep):
status = None
boto.log.debug("Saw HTTP status: %s" % response.status)
if response.status == 400:
code = response.getheader('Code')
if code and 'PriorRequestNotComplete' in code:
# This is a case where we need to ignore a 400 error, as
# Route53 returns this. See
# http://docs.aws.amazon.com/Route53/latest/DeveloperGuide/DNSLimitations.html
msg = "%s, retry attempt %s" % (
'PriorRequestNotComplete',
i
)
next_sleep = random.random() * (2 ** i)
i += 1
status = (msg, i, next_sleep)
return status
| mit | 573,871,493,404,455,400 | 38.38051 | 94 | 0.567548 | false |
CountZer0/PipelineConstructionSet | python/common/core/globalVariables.py | 1 | 1227 | '''
Author: Jason Parks
Created: Apr 22, 2012
Module: common.core.globalVariables
Purpose: to import globalVariables
'''
# Location of Toolset
#toolsLocation = '/Users/jasonparks/Documents/workspace/PipelineConstructionSet'
toolsLocation = 'C:/Users/jason/git/PipelineConstructionSet'
# NOTE!: It is necessary to manually add the above location's
# python directory, i.e-
#
# PYTHONPATH = 'C:/Users/jason/git/PipelineConstructionSet/python'
#
# to the PYTHONPATH environment variable on all user's
# machines whom want to use Pipeline Construction set
# optionally set to a space on the network for easy T.A. access
logLocation = 'C:/temp/pipelineConstructionSet'
# Name your games here:
teamA = 'GreatGameA'
teamB = 'GreatGameB'
teamC = 'GreatGameC'
teamD = 'GreatGameD'
# You need to change the name of the file
# ./PipelineConstructionSet/schemas/GreatGameA.xml
# and the xml header info in the file as well
# If you are making tools for more than one team,
# you'll need to make more GreatGame*.xml files
# manually update to date/time
build = '103012-20.33'
# This will show up in the PythonEditor or ScriptEditor
# when our DCC app first launches the toolMenu.
print "common.core.globalVariables imported" | bsd-3-clause | 4,143,952,701,070,569,500 | 27.55814 | 80 | 0.764466 | false |
vienin/vlaunch | src/updater.py | 1 | 9201 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# UFO-launcher - A multi-platform virtual machine launcher for the UFO OS
#
# Copyright (c) 2008-2009 Agorabox, Inc.
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import logging
import os, os.path as path
import urllib
import sys
from ConfigParser import ConfigParser
import gui
import tempfile
from conf import conf
import socket
import utils
def get_latest_version():
socket.setdefaulttimeout(5)
latest_version = urllib.urlopen(conf.UPDATEURL + "/latest").read()
latest_version = map(int, latest_version.split('.'))
string_version = ".".join(map(str, latest_version))
latest_size = int(urllib.urlopen(conf.UPDATEURL + "/launcher-" + string_version + ".tar.bz2").headers.get("content-length"))
latest_real_size = int(urllib.urlopen(conf.UPDATEURL + "/size-" + string_version).read())
logging.debug("Available version on the Net : " + str(string_version) + " (" + str(latest_size / 1024) + " k)")
return latest_version, latest_size, latest_real_size
def check_update(backend):
logging.debug("Using launcher version : " + str(conf.VERSION))
try:
logging.debug("Checking updates")
local_version = map(int, conf.VERSION.split('.'))
latest_version, latest_size, latest_real_size = get_latest_version()
if local_version < latest_version :
logging.debug("Updating to new version. Asking to user...")
input = gui.dialog_question(title=_("Update available"),
msg=_("A more recent version of the %s launcher is available,"
"do you want to install it ? (%s Mo to download) ?") % (conf.PRODUCTNAME, latest_size / (1024*1024),),
button1=_("Yes"), button2=_("No"))
logging.debug("Got : " + str(input))
if input == _("Yes"):
# Check available space
removed_space = 0
for file in open(os.path.join(conf.DATA_DIR, "launcher.filelist")).readlines():
try:
size = os.stat(path.join(path.dirname(conf.DATA_DIR), file.strip())).st_size
except:
continue
removed_space = removed_space + size
while True:
available_space = backend.get_free_space(conf.DATA_DIR)
if available_space + removed_space < latest_real_size:
input = gui.dialog_error_report(_("Insufficient free space"),
_("The available space on your key is insufficient for the update.<br><br>"
"Please remove more than <b>%s Mo</b> in the <b>\"Public\"</b> directory and retry.") %
((latest_real_size - (available_space + removed_space)) / (1024*1024),),
_("Retry"),
error=False)
if not input:
return
else:
break
# Run Updater and close launcher
backend.checking_pyqt()
executable = backend.prepare_update()
cmd = [ executable,
"--update",
path.dirname(conf.DATA_DIR), ".".join(map(str, latest_version)),
"--relaunch", conf.SCRIPT_PATH ]
logging.debug("Launching updater : " + " ".join(cmd))
logging.shutdown()
os.execv(executable, cmd)
sys.exit(0)
except SystemExit:
sys.exit(0)
except:
import traceback
info = sys.exc_info()
logging.debug("Unexpected error: " + str(info[1]))
logging.debug("".join(traceback.format_tb(info[2])))
logging.debug("Exception while updating")
def remove_deprecated_files(tar, old, dest):
if os.path.exists(old):
files = list(set(map(str.strip, open(old).readlines())) - set(tar.getnames()))
for f in [ os.path.join(dest, f) for f in files ]:
if os.path.islink(f) or os.path.isfile(f):
try: os.unlink(f)
except: logging.debug("Could not remove file " + f)
for d in [ os.path.join(dest, d) for d in files ]:
if os.path.isdir(d):
try: os.rmdir(d)
except: logging.debug("Could not remove directory " + d)
def self_update(ufo_dir, relaunch):
try:
latest_version, x, x = get_latest_version()
latest_version = ".".join(map(str, latest_version))
try:
if sys.platform == "darwin":
mount = utils.grep(utils.call([ "mount" ], output=True)[1], ufo_dir)
if mount:
dev = mount.split()[0]
utils.call([ "diskutil", "unmount", dev ])
utils.call([ "diskutil", "mount", dev ])
except:
pass
url = conf.UPDATEURL + "/launcher-" + latest_version + ".tar.bz2"
filename = tempfile.mkstemp()[1]
logging.debug("Downloading " + url + " to " + filename)
retcode = gui.download_file(url, filename, title=_("Downloading update"),
msg=_("Please wait while the update is being downloaded"),
success_msg=_("Your key will now be updated.<br>"
"This operation can take a few minutes<br><br>\n"
"<b>The USB key absolutely must not be unplugged during this process.</b>"))
if retcode:
raise Exception("Download was canceled")
logging.debug("Extracting update " + filename + " to " + ufo_dir)
import tarfile
tar = tarfile.open(filename)
filelist = path.join(ufo_dir, ".data", "launcher.filelist")
gui.wait_command(cmd=[remove_deprecated_files, tar, filelist, os.path.normcase(ufo_dir)],
title=_("Removing old files"),
msg=_("Please wait while the old files are being removed"))
if sys.platform == "darwin":
gui.wait_command(cmd=[ "tar", "-C", ufo_dir, "-xjf", filename ],
title=_("Installing update"),
msg=_("Please wait while the update is being installed.<br><br>"
"<b>The USB key absolutely must not be unplugged.</b>"))
mount = utils.grep(utils.call([ "mount" ], output=True)[1], ufo_dir)
if mount:
dev = mount.split()[0]
utils.call([ "diskutil", "unmount", dev ])
utils.call([ "diskutil", "mount", dev ])
# At this point we consider that update terminated successfully,
# as tar command return higher than 0 when all files has been copied.
success = True
else:
success = gui.extract_tar(tgz=tar,
dest=os.path.normcase(ufo_dir),
title=_("Installing update"),
msg=_("Please wait while the update is being installed.<br><br>"
"<b>The USB key absolutely must not be unplugged.</b>"))
tar.close()
if not success:
raise Exception("Installation has failed")
logging.debug("Updating settings.conf")
cp = ConfigParser()
cp.read([ conf.conf_file ])
cp.set("launcher", "VERSION", latest_version)
cp.write(open(conf.conf_file, "w"))
gui.dialog_info(title=_("Information"),
msg=_("Your %s launcher is up to date (v") % (conf.PRODUCTNAME,) + latest_version + ") !")
try:
os.remove(filename)
except:
pass
except:
gui.dialog_info(title=_("Error"),
msg=_("An error occurred. You key could not be updated."))
import traceback
info = sys.exc_info()
logging.debug("Unexpected error: " + str(info[1]))
logging.debug("".join(traceback.format_tb(info[2])))
logging.debug("Exception while updating")
logging.debug("Restarting UFO launcher : " + relaunch)
logging.shutdown()
os.execv(relaunch, [ relaunch, "--respawn" ] )
| gpl-2.0 | -7,252,024,127,118,176,000 | 43.235577 | 145 | 0.536572 | false |
veter-team/mechspeak | src/imshow.py | 1 | 1438 | import os, sys
import io
import Tkinter
import Image, ImageTk
from time import sleep
import cStringIO
import paho.mqtt.client as mqtt
host = "test.mosquitto.org"
#host = "localhost"
qos = 0
sensors_topic = "/rtmsg/d25638bb-17c2-46ac-b26e-ce1f67268088/sensors/"
camera_topic = sensors_topic + "camera1"
imgcnt = 0
def on_message(client, userdata, message):
global imgcnt
global old_label_image
global root
try:
image1 = Image.open(cStringIO.StringIO(message.payload))
root.geometry('%dx%d' % (image1.size[0],image1.size[1]))
tkpi = ImageTk.PhotoImage(image1)
label_image = Tkinter.Label(root, image=tkpi)
label_image.place(x=0,y=0,width=image1.size[0],height=image1.size[1])
imgcnt += 1
root.title(str(imgcnt))
if old_label_image is not None:
old_label_image.destroy()
old_label_image = label_image
root.update() # wait until user clicks the window
except Exception, e:
# This is used to skip anything not an image.
# Image.open will generate an exception if it cannot open a file.
print(e)
mqttc.disconnect()
mqttc = mqtt.Client("zatoichi" + str(os.getpid()))
print('Connecting...')
mqttc.connect(host)
print('Connected')
mqttc.on_message = on_message
mqttc.subscribe(camera_topic)
root = Tkinter.Tk()
root.geometry('+%d+%d' % (128, 128))
old_label_image = None
mqttc.loop_forever()
| mit | -6,768,945,433,732,105,000 | 25.62963 | 77 | 0.666898 | false |
radarsat1/siconos | io/swig/io/vview.py | 1 | 109726 | #!/usr/bin/env @PYTHON_EXECUTABLE@
"""
Description: Viewer and exporter for Siconos mechanics-IO HDF5 files based on VTK.
"""
# Lighter imports before command line parsing
from __future__ import print_function
import sys
import os
import json
import getopt
import math
import traceback
import vtk
from vtk.util.vtkAlgorithm import VTKPythonAlgorithmBase
from vtk.numpy_interface import dataset_adapter as dsa
# Exports from this module
__all__ = ['VView', 'VViewOptions', 'VExportOptions', 'VViewConfig']
if hasattr(math, 'inf'):
infinity = math.inf
else:
infinity = float('inf')
## Persistent configuration
class VViewConfig(dict):
def __init__(self, d={'background_color' : [0., 0. , 0.],
'window_size': [600,600]}, filename=None):
super(self.__class__, self).__init__(d)
self.should_save_config = True
if filename is not None:
self.filename = filename
else:
self.filename = os.path.join(os.environ['HOME'], '.config',
'siconos_vview.json')
def load_configuration(self):
if os.path.exists(self.filename):
try:
self.update(json.load(open(self.filename)))
print('Loaded configuration from ', self.filename)
for k in self:
print(' ', k,': ', self[k])
self.should_save_config = True
except:
self.should_save_config = False
print("Warning: Error loading configuration `{}'".format(self.filename))
def save_configuration(self, force=False):
if not force and not self.should_save_config:
return
try:
if not os.path.exists(os.path.join(os.environ['HOME'], '.config')):
os.mkdir(os.path.join(os.environ['HOME'], '.config'))
json.dump(self, open(self.filename,'w'))
except:
print("Error saving configuration `{}'".format(self.filename))
class VViewOptions(object):
def __init__(self):
self.min_time = None
self.max_time = None
self.cf_scale_factor = 1
self.normalcone_ratio = 1
self.time_scale_factor = 1
self.advance_by_time = None
self.frames_per_second = 25
self.cf_disable = False
if hasattr(vtk.vtkPolyDataMapper(), 'ImmediateModeRenderingOff'):
self.imr = False
else:
# vtk 8
self.imr = True
self.depth_peeling = True
self.maximum_number_of_peels = 100
self.occlusion_ratio = 0.1
self.global_filter = False
self.initial_camera = [None] * 5
self.visible_mode = 'all'
self.export = False
self.gen_para_script = False
self.with_edges = False
self.with_random_color = True
self.with_charts= 0
## Print usage information
def usage(self, long=False):
print(__doc__); print()
print('Usage: {0} [OPTION]... <HDF5>'
.format(os.path.split(sys.argv[0])[1]))
print()
if not long:
print("""[--help] [--tmin=<float value>] [--tmax=<float value>]
[--cf-scale=<float value>] [--no-cf] [--imr] [--global-filter]
[--no-depth-peeling] [--maximum-number-of-peels=<int value>]
[--occlusion-ratio=<float value>]
[--normalcone-ratio = <float value>]
[--advance=<'fps' or float value>] [--fps=float value]
[--camera=x,y,z] [--lookat=x,y,z] [--up=x,y,z] [--clipping=near,far] [--ortho=scale]
[--with-charts=<int value>]
[--visible=all,avatars,contactors] [--with-edges]
""")
else:
print("""Options:
--help
display this message
--version
display version information
--tmin= value
set the time lower bound for visualization
--tmax= value
set the time upper bound for visualization
--cf-scale= value (default : 1.0 )
rescale the arrow representing the contact forces by the value.
the normal cone and the contact points are also rescaled
--no-cf
do not display contact forces
--imr
immediate-mode-rendering, use less memory at the price of
slower rendering
--global-filter (default : off)
With export mode, concatenates all blocks in a big vtkPolyData.
This option is for when the number of objects is huge.
With vview, the display is done with only one vtk
actor. Note that global-filter use a vtkCompositeDataGeometryFilter
which is slow.
--no-depth-peeling (default : on)
do not use vtk depth peeling
--maximum-number-of-peels= value
maximum number of peels when depth peeling is on
--occlusion-ration= value
occlusion-ration when depth peeling is on
--normalcone-ratio = value (default : 1.0 )
introduce a ratio between the representation of the contact
forces arrows the normal cone and the contact points. useful
when the contact forces are small with respect to the
characteristic dimesion
--advance= value or 'fps'
automatically advance time during recording (default : don't
advance)
--fps= value
frames per second of generated video (default 25)
--camera=x,y,z
initial position of the camera (default=above looking down)
--lookat=x,y,z
initial direction to look (default=center of bounding box)
--up=x,y,z
initial up direction of the camera (default=y-axis)
--ortho=scale
start in ortho mode with given parallel scale
(default=perspective)
--with-charts=value
display convergence charts
--visible=all
all: view all contactors and avatars
avatars: view only avatar if an avatar is defined (for each
object) contactors: ignore avatars, view only contactors where
avatars are contactors with collision_group=-1
--with_edges
add edges in the rendering (experimental for primitives)
--with_fixed_color
use fixed color defined in the config file
""")
def parse(self):
## Parse command line
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], '',
['help', 'version',
'dat', 'tmin=', 'tmax=',
'no-cf', 'imr', 'global-filter',
'no-depth-peeling',
'maximum-number-of-peels=',
'occlusion-ratio=',
'cf-scale=', 'normalcone-ratio=',
'advance=', 'fps=',
'camera=', 'lookat=', 'up=', 'clipping=', 'ortho=', 'visible=', 'with-edges', 'with-fixed-color', 'with-charts='])
self.configure(opts, args)
except getopt.GetoptError as err:
sys.stderr.write('{0}\n'.format(str(err)))
self.usage()
exit(2)
def configure(self, opts, args):
for o, a in opts:
if o == '--help':
self.usage(long=True)
exit(0)
elif o == '--version':
print('{0} @SICONOS_VERSION@'.format(
os.path.split(sys.argv[0])[1]))
exit(0)
elif o == '--tmin':
self.min_time = float(a)
elif o == '--tmax':
self.max_time = float(a)
elif o == '--cf-scale':
self.cf_scale_factor = float(a)
elif o == '--no-cf':
self.cf_disable = True
elif o == '--imr':
self.imr = True
elif o == '--no-depth-peeling':
self.depth_peeling=False
elif o == '--maximum-number-of-peels':
self.maximum_number_of_peels = int(a)
elif o == '--occlusion-ratio':
self.occlusion_ratio = float(a)
elif o == '--global-filter':
self.global_filter = True
elif o == '--normalcone-ratio':
self.normalcone_ratio = float(a)
elif o == '--advance':
if 'fps' in a:
self.advance_by_time = \
eval(a, {'fps': 1.0 / self.frames_per_second})
else:
self.advance_by_time = float(a)
elif o == '--fps':
self.frames_per_second = int(a)
elif o == '--camera':
self.initial_camera[0] = map(float, a.split(','))
elif o == '--lookat':
self.initial_camera[1] = map(float, a.split(','))
elif o == '--up':
self.initial_camera[2] = map(float, a.split(','))
elif o == '--clipping':
self.initial_camera[4] = map(float, a.split(','))
elif o == '--ortho':
self.initial_camera[3] = float(a)
elif o == '--with-charts=':
self.with_charts = int(a)
elif o == '--visible':
self.visible_mode = a
elif o == '--with-edges':
self.with_edges = True
elif o == '--with-fixed-color':
self.with_random_color = False
if self.frames_per_second == 0:
self.frames_per_second = 25
if len(args) > 0:
self.io_filename = args[0]
else:
self.usage()
exit(1)
class VExportOptions(VViewOptions):
def __init__(self):
super(self.__class__, self).__init__()
self.export = True
self.ascii_mode = False
self.start_step = 0
self.end_step = None
self.stride = 1
self.nprocs = 1
self.gen_para_script = False
def usage(self, long=False):
print(__doc__); print()
print('Usage: {0} [--help] [--version] [--ascii] <HDF5>'
.format(os.path.split(sys.argv[0])[1]))
if long:
print()
print("""Options:
--help display this message
--version display version information
--global-filter one vtp file/time step
--start-step=n integer, set the first simulation time step
number (default: 0)
--end-step=n integer, set the last simulation time step
number (default: None)
--stride=n integer, set export time step/simulation time step
(default: 1)
--ascii export file in ascii format
---gen-para-script=n generation of a gnu parallel command for n processus
""")
def parse(self):
## Parse command line
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], '',
['help', 'version', 'ascii',
'start-step=', 'end-step=',
'stride=', 'global-filter',
'gen-para-script='])
self.configure(opts, args)
except getopt.GetoptError as err:
sys.stderr.write('{0}\n'.format(str(err)))
self.usage()
exit(2)
def configure(self, opts, args):
for o, a in opts:
if o == '--help':
self.usage(long=True)
exit(0)
if o == '--version':
print('{0} @SICONOS_VERSION@'.format(
os.path.split(sys.argv[0])[1]))
exit(0)
if o == '--global-filter':
self.global_filter = True
if o == '--start-step':
self.start_step = int(a)
if o == '--end-step':
self.end_step = int(a)
if o == '--stride':
self.stride = int(a)
if o == '--gen-para-script':
self.gen_para_script = True
self.nprocs = int(a)
if o in ('--ascii'):
self.ascii_mode = True
if len(args) > 0:
self.io_filename = args[0]
else:
self.usage()
exit(1)
class VRawDataExportOptions(VViewOptions):
def __init__(self, io_filename = None):
super(self.__class__, self).__init__()
self.export = True
self._export_position = True
self._export_velocity = True
self._export_cf = False
self._export_velocity_in_absolute_frame = False
self.start_step = 0
self.end_step = None
self.stride = 1
self.io_filename = io_filename
def usage(self, long=False):
print(__doc__); print()
print('Usage: {0} [--help] <HDF5>'
.format(os.path.split(sys.argv[0])[1]))
if long:
print()
print("""Options:
--help display this message
--version display version information
--start-step=n integer, set the first simulation time step
number (default: 0)
--end-step=n integer, set the last simulation time step
number (default: None)
--stride=n integer, set export time step/simulation time step
(default: 1)
--no-export-position do not export position
--no-export-velocity do not export position
--export-cf do export of contact friction data
--export-velocity-in-absolute-frame do export of contact friction data
""")
def parse(self):
## Parse command line
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], '',
['help', 'version', 'ascii',
'start-step=', 'end-step=',
'stride=',
'no-export-position',
'no-export-velocity',
'export-cf',
'export-velocity-in-absolute-frame'])
self.configure(opts, args)
except getopt.GetoptError as err:
sys.stderr.write('{0}\n'.format(str(err)))
self.usage()
exit(2)
def configure(self, opts, args):
for o, a in opts:
if o == '--help':
self.usage(long=True)
exit(0)
if o == '--version':
print('{0} @SICONOS_VERSION@'.format(
os.path.split(sys.argv[0])[1]))
exit(0)
if o == '--start-step':
self.start_step = int(a)
if o == '--end-step':
self.end_step = int(a)
if o == '--stride':
self.stride = int(a)
if o == '--no-export-position':
self._export_position = False
if o == '--no-export-velocity':
self._export_velocity = False
if o == '--export-cf':
self._export_cf = True
if o == '--export-velocity-in-absolute-frame':
self._export_velocity_in_absolute_frame = True
if self.io_filename is None:
if len(args) > 0 :
self.io_filename = args[0]
else:
self.usage()
exit(1)
## Utilities
def add_compatiblity_methods(obj):
"""
Add missing methods in previous VTK versions.
"""
if hasattr(obj, 'SetInput'):
obj.SetInputData = obj.SetInput
if hasattr(obj, 'AddInput'):
obj.AddInputData = obj.AddInput
def random_color():
r = random.uniform(0.1, 0.9)
g = random.uniform(0.1, 0.9)
b = random.uniform(0.1, 0.9)
return r, g, b
class Quaternion():
def __init__(self, *args):
import vtk
self._vtkmath = vtk.vtkMath()
self._data = vtk.vtkQuaternion[float](*args)
def __mul__(self, q):
r = Quaternion()
self._vtkmath.MultiplyQuaternion(self._data, q._data, r._data)
return r
def __getitem__(self, i):
return self._data[i]
def conjugate(self):
r = Quaternion((self[0], self[1], self[2], self[3]))
r._data.Conjugate()
return r
def rotate(self, v):
pv = Quaternion((0, v[0], v[1], v[2]))
rv = self * pv * self.conjugate()
# assert(rv[0] == 0)
return [rv[1], rv[2], rv[3]]
def axisAngle(self):
r = [0, 0, 0]
a = self._data.GetRotationAngleAndAxis(r)
return r, a
class InputObserver():
def __init__(self, vview, times=None, slider_repres=None):
self.vview = vview
self._opacity = 1.0
self._opacity_static = 1.0
self._opacity_contact = 0.4
self._current_id = vtk.vtkIdTypeArray()
self._renderer = vview.renderer
self._renderer_window = vview.renderer_window
self._image_counter = 0
self._view_cycle = -1
self._recording = False
self._times = None
if times is None or len(times)==0:
return
self._times = times
self._stimes = set(times)
self._time_step = (max(self._stimes) - min(self._stimes)) \
/ len(self._stimes)
self._time = min(times)
if slider_repres is None:
return
self._slider_repres = slider_repres
def update(self):
self.vview.io_reader.SetTime(self._time)
if self._times is None:
self.vview.renderer_window.Render()
return
if not self.vview.opts.cf_disable:
self.vview.io_reader.Update()
for mu in self.vview.io_reader._mu_coefs:
self.vview.contact_posa[mu].Update()
self.vview.contact_posb[mu].Update()
self.vview.contact_pos_force[mu].Update()
self.vview.contact_pos_norm[mu].Update()
self.vview.set_dynamic_actors_visibility(self.vview.io_reader._time)
pos_data = self.vview.io_reader.pos_data
self.vview.set_position(pos_data)
self._slider_repres.SetValue(self.vview.io_reader._time)
self._current_id.SetNumberOfValues(1)
self._current_id.SetValue(0, self.vview.io_reader._index)
if self.vview.opts.with_charts:
self.vview.iter_plot.SetSelection(self._current_id)
self.vview.prec_plot.SetSelection(self._current_id)
self.vview.renderer_window.Render()
def set_opacity(self):
for instance, actors in self.vview.dynamic_actors.items():
for actor,_,_ in actors:
actor.GetProperty().SetOpacity(self._opacity)
def set_opacity_static(self):
for instance, actors in self.vview.static_actors.items():
for actor,_,_ in actors:
actor.GetProperty().SetOpacity(self._opacity_static)
def set_opacity_contact(self):
for mu in self.vview.io_reader._mu_coefs:
self.vview.cactor[mu].GetProperty().SetOpacity(self._opacity_contact)
self.vview.gactor[mu].GetProperty().SetOpacity(self._opacity_contact)
self.vview.clactor[mu].GetProperty().SetOpacity(self._opacity_contact)
self.vview.sactora[mu].GetProperty().SetOpacity(self._opacity_contact)
self.vview.sactorb[mu].GetProperty().SetOpacity(self._opacity_contact)
def key(self, obj, event):
key = obj.GetKeySym()
print('key', key)
if key == 'r':
self.vview.reload()
self._slider_repres.SetMinimumValue(self.vview.min_time)
self._slider_repres.SetMaximumValue(self.vview.max_time)
self.update()
if key == 'p':
self._image_counter += 1
self.vview.image_maker.Update()
self.vview.writer.SetFileName(
'vview-{0}.png'.format(self._image_counter))
self.vview.writer.Write()
if key == 'Up':
self._time_step = self._time_step * 2.
self._time += self._time_step
if key == 'Down':
self._time_step = self._time_step / 2.
self._time -= self._time_step
if key == 'Left':
self._time -= self._time_step
if key == 'Right':
self._time += self._time_step
if key == 't':
print('Decrease the opacity of bodies')
self._opacity -= .1
self.set_opacity()
if key == 'T':
print('Increase the opacity of bodies')
self._opacity += .1
self.set_opacity()
if key == 'y':
print('Decrease the opacity of static bodies')
self._opacity_static -= .1
self.set_opacity_static()
if key == 'Y':
print('Increase the opacity of static bodies')
self._opacity_static += .1
self.set_opacity_static()
if key == 'u':
print('Decrease the opacity of contact elements')
self._opacity_contact -= .1
self.set_opacity_contact()
if key == 'U':
print('Increase the opacity of contact elements')
self._opacity_contact += .1
self.set_opacity_contact()
if key == 'c':
print('camera position:', self._renderer.GetActiveCamera().GetPosition())
print('camera focal point', self._renderer.GetActiveCamera().GetFocalPoint())
print('camera clipping range', self._renderer.GetActiveCamera().GetClippingRange())
print('camera up vector', self._renderer.GetActiveCamera().GetViewUp())
if self._renderer.GetActiveCamera().GetParallelProjection() != 0:
print('camera parallel scale', self._renderer.GetActiveCamera().GetParallelScale())
if key == 'o':
self._renderer.GetActiveCamera().SetParallelProjection(
1 - self._renderer.GetActiveCamera().GetParallelProjection())
if key == 'v':
# Cycle through some useful views
dist = norm(self._renderer.GetActiveCamera().GetPosition())
# dist2 = norm([numpy.sqrt(dist**2)/3]*2)
d3 = norm([numpy.sqrt(dist**2) / 3] * 3)
self._view_cycle += 1
if self._view_cycle == 0:
print('Left')
self._renderer.GetActiveCamera().SetPosition(
dist, 0, 0)
self._renderer.GetActiveCamera().SetFocalPoint(0, 0, 0)
self._renderer.GetActiveCamera().SetViewUp(0, 0, 1)
elif self._view_cycle == 1:
print('Right')
self._renderer.GetActiveCamera().SetPosition(
0, dist, 0)
self._renderer.GetActiveCamera().SetFocalPoint(0, 0, 0)
self._renderer.GetActiveCamera().SetViewUp(0, 0, 1)
elif self._view_cycle == 2:
print('Top')
self._renderer.GetActiveCamera().SetPosition(
0, 0, dist)
self._renderer.GetActiveCamera().SetFocalPoint(0, 0, 0)
self._renderer.GetActiveCamera().SetViewUp(1, 0, 0)
else: # Corner
print('Corner')
self._renderer.GetActiveCamera().SetPosition(
d3, d3, d3)
self._renderer.GetActiveCamera().SetFocalPoint(0, 0, 0)
self._renderer.GetActiveCamera().SetViewUp(
-1, -1, 1)
self._view_cycle = -1
self._renderer.ResetCameraClippingRange()
if key == 'C':
self._renderer.ResetCameraClippingRange()
if key == 's':
self.toggle_recording(True)
if key == 'e':
# Note 'e' has the effect to also "end" the program due to
# default behaviour of vtkInteractorStyle, see class
# documentation.
self.toggle_recording(False)
self.update()
def time(self, obj, event):
slider_repres = obj.GetRepresentation()
self._time = slider_repres.GetValue()
self.update()
def toggle_recording(self, recording):
if recording and not self._recording:
fps = 25
self._timer_id = (self.vview.interactor_renderer
.CreateRepeatingTimer(1000//fps))
self._recording = True
self.vview.recorder.Start()
elif self._recording and not recording:
self.vview.interactor_renderer.DestroyTimer(self._timer_id)
self._timer_id = None
self._recording = False
self.vview.recorder.End()
# observer on 2D chart
def iter_plot_observer(self, obj, event):
if self.vview.iter_plot.GetSelection() is not None:
# just one selection at the moment!
if self.vview.iter_plot.GetSelection().GetMaxId() >= 0:
self._time = self._times[
self.vview.iter_plot.GetSelection().GetValue(0)]
# -> recompute index ...
self.update()
def prec_plot_observer(self, obj, event):
if self.vview.prec_plot.GetSelection() is not None:
# just one selection at the moment!
if self.vview.prec_plot.GetSelection().GetMaxId() >= 0:
self._time = self._times[
self.vview.prec_plot.GetSelection().GetValue(0)]
# -> recompute index ...
self.update()
def recorder_observer(self, obj, event):
if self._recording:
if self.vview.opts.advance_by_time is not None:
self._time += self.vview.opts.advance_by_time
self.vview.slwsc.SetEnabled(False) # Scale slider
self.vview.xslwsc.SetEnabled(False) # Time scale slider
# slider_widget.SetEnabled(False) # Time slider (TODO video options)
# widget.SetEnabled(False) # Axis widget
self.update()
self.vview.image_maker.Modified()
self.vview.recorder.Write()
if self.vview.opts.advance_by_time is not None:
self.vview.slwsc.SetEnabled(True)
self.vview.xslwsc.SetEnabled(True)
# slider_widget.SetEnabled(True)
# widget.SetEnabled(True) # Axis widget
# End video if done
if self._time >= max(self._times):
self.toggle_recording(False)
class CellConnector(vtk.vtkProgrammableFilter):
"""
Add Arrays to Cells
"""
def __init__(self, instance, data_names, data_sizes):
vtk.vtkProgrammableFilter.__init__(self)
self._instance = instance
self._data_names = data_names
self._data_sizes = data_sizes
self.SetExecuteMethod(self.method)
self._datas = [numpy.zeros(s) for s in data_sizes]
self._vtk_datas = [None]*len(data_sizes)
self._index = list(enumerate(data_names))
for i, data_name in self._index:
self._vtk_datas[i] = vtk.vtkFloatArray()
self._vtk_datas[i].SetName(data_name)
self._vtk_datas[i].SetNumberOfComponents(data_sizes[i])
def method(self):
input = self.GetInput()
output = self.GetOutput()
output.ShallowCopy(input)
ncells = output.GetNumberOfCells()
for i, data_name in self._index:
self._vtk_datas[i].SetNumberOfTuples(ncells)
if output.GetCellData().GetArray(data_name) is None:
output.GetCellData().AddArray(self._vtk_datas[i])
data = self._datas[i]
data_t = data[0:self._data_sizes[i]]
for c in range(ncells):
output.GetCellData().GetArray(data_name).SetTuple(c, data_t)
def makeConvexSourceClass():
class UnstructuredGridSource(vtk.vtkProgrammableSource):
def GetOutputPort(self):
# 3: UnstructuredGridOutput for vtkProgrammableSource
return vtk.vtkProgrammableSource.GetOutputPort(self, 3)
class ConvexSource(UnstructuredGridSource):
def __init__(self, convex, points):
self._convex = convex
self._points = points
self.SetExecuteMethod(self.method)
def method(self):
output = self.GetUnstructuredGridOutput()
output.Allocate(1, 1)
output.InsertNextCell(
self._convex.GetCellType(), self._convex.GetPointIds())
output.SetPoints(self._points)
return ConvexSource
# attempt for a vtk reader
# only half the way, the reading part is ok but the output is only used
# in vview and export from python members
class IOReader(VTKPythonAlgorithmBase):
def __init__(self):
VTKPythonAlgorithmBase.__init__(self,
nInputPorts=0,
nOutputPorts=1,
outputType='vtkPolyData')
self._io = None
self._with_contact_forces = False
self.cf_data = None
self.time = 0
self.timestep = 0
self.points = vtk.vtkPoints()
def RequestInformation(self, request, inInfo, outInfo):
info = outInfo.GetInformationObject(0)
info.Set(vtk.vtkStreamingDemandDrivenPipeline.TIME_STEPS(),
self._times,
len(self._times))
info.Set(vtk.vtkStreamingDemandDrivenPipeline.TIME_RANGE(),
[self._times[0], self._times[-1]], 2)
return 1
def RequestData(self, request, inInfo, outInfo):
info = outInfo.GetInformationObject(0)
output = vtk.vtkPolyData.GetData(outInfo)
output.SetPoints(self.points)
# The time step requested
t = info.Get(vtk.vtkStreamingDemandDrivenPipeline.UPDATE_TIME_STEP())
id_t = max(0, numpy.searchsorted(self._times, t, side='right') - 1)
if id_t < len(self._indices)-1:
self._id_t_m = range(self._indices[id_t],
self._indices[id_t+1])
else:
self._id_t_m = [self._indices[id_t]]
self._time = self._times[id_t]
self._index = id_t
self.pos_data = self._idpos_data[self._id_t_m, :]
self.velo_data = self._ivelo_data[self._id_t_m, :]
vtk_pos_data = dsa.numpyTovtkDataArray(self.pos_data)
vtk_pos_data.SetName('pos_data')
vtk_velo_data = dsa.numpyTovtkDataArray(self.velo_data)
vtk_velo_data.SetName('velo_data')
vtk_points_data = dsa.numpyTovtkDataArray(self.pos_data[:, 2:5])
self.points.SetData(vtk_points_data)
output.GetPointData().AddArray(vtk_velo_data)
try:
if self._with_contact_forces:
ncfindices = len(self._cf_indices)
id_t_cf = min(numpy.searchsorted(self._cf_times, t,
side='right'),
ncfindices-1)
# Check the duration between t and last impact.
# If it is superior to current time step, we consider there
# is no contact (rebound).
# The current time step is the max between slider timestep
# and simulation timestep
ctimestep = max(self.timestep, self._avg_timestep)
if (id_t_cf > 0 and abs(t-self._cf_times[id_t_cf-1])
<= ctimestep):
if id_t_cf < ncfindices-1:
self._id_t_m_cf = range(self._cf_indices[id_t_cf-1],
self._cf_indices[id_t_cf])
self.cf_data = self._icf_data[self._id_t_m_cf, :]
else:
self.cf_data = self._icf_data[self._cf_indices[
id_t_cf]:, :]
self._cf_time = self._cf_times[id_t_cf]
vtk_cf_data = dsa.numpyTovtkDataArray(self.cf_data)
vtk_cf_data.SetName('cf_data')
output.GetFieldData().AddArray(vtk_cf_data)
else:
# there is no contact forces at this time
self.cf_data = None
vtk_cf_data = dsa.numpyTovtkDataArray(numpy.array([]))
vtk_cf_data.SetName('cf_data')
output.GetFieldData().AddArray(vtk_cf_data)
if self.cf_data is not None:
self.contact = True
data = self.cf_data
for mu in self._mu_coefs:
imu = numpy.where(
abs(data[:, 1] - mu) < 1e-15)[0]
#dom_imu = None
#dom_imu = numpy.where(
# self._dom_data[:,-1] == data[id_f[imu],-1]
#)[0]
if len(imu) > 0:
self.cpa_at_time[mu] = data[
imu, 2:5]
self.cpb_at_time[mu] = data[
imu, 5:8]
self.cn_at_time[mu] = - data[
imu, 8:11]
self.cf_at_time[mu] = data[
imu, 11:14]
if data[imu, :].shape[1] > 26:
self.ids_at_time[mu] = data[
imu, 23:26].astype(int)
else:
self.ids_at_time[mu] = None
else:
for mu in self._mu_coefs:
self.cpa_at_time[mu] = [[nan, nan, nan]]
self.cpb_at_time[mu] = [[nan, nan, nan]]
self.cn_at_time[mu] = [[nan, nan, nan]]
self.cf_at_time[mu] = [[nan, nan, nan]]
self.ids_at_time[mu] = None
for mu in self._mu_coefs:
self.cpa[mu] = numpy_support.numpy_to_vtk(
self.cpa_at_time[mu])
self.cpa[mu].SetName('contact_positions_a')
self.cpb[mu] = numpy_support.numpy_to_vtk(
self.cpb_at_time[mu])
self.cpb[mu].SetName('contact_positions_b')
self.cn[mu] = numpy_support.numpy_to_vtk(
self.cn_at_time[mu])
self.cn[mu].SetName('contact_normals')
self.cf[mu] = numpy_support.numpy_to_vtk(
self.cf_at_time[mu])
self.cf[mu].SetName('contact_forces')
# field info for vview (should go in point data)
self._contact_field[mu].AddArray(self.cpa[mu])
self._contact_field[mu].AddArray(self.cpb[mu])
self._contact_field[mu].AddArray(self.cn[mu])
self._contact_field[mu].AddArray(self.cf[mu])
# contact points
self._points[mu].SetData(self.cpa[mu])
self._output[mu].GetPointData().AddArray(self.cpb[mu])
self._output[mu].GetPointData().AddArray(self.cn[mu])
self._output[mu].GetPointData().AddArray(self.cf[mu])
if self.ids_at_time[mu] is not None:
self.ids[mu] = numpy_support.numpy_to_vtk(
self.ids_at_time[mu])
self.ids[mu].SetName('ids')
self._contact_field[mu].AddArray(self.ids[mu])
self._output[mu].GetPointData().AddArray(self.ids[mu])
dsa_ids = numpy.unique(self.ids_at_time[mu][:, 1])
dsb_ids = numpy.unique(self.ids_at_time[mu][:, 2])
_i, _i, dsa_pos_ids = numpy.intersect1d(
self.pos_data[:, 1],
dsa_ids, return_indices=True)
_i, _i, dsb_pos_ids = numpy.intersect1d(
self.pos_data[:, 1],
dsb_ids, return_indices=True)
# objects a & b translations
obj_pos_a = self.pos_data[dsa_pos_ids, 2:5]
obj_pos_b = self.pos_data[dsb_pos_ids, 2:5]
self._all_objs_pos[mu] = numpy.vstack((obj_pos_a,
obj_pos_b))
self._all_objs_pos_vtk[mu] = numpy_support.numpy_to_vtk(
self._all_objs_pos[mu])
self._objs_points[mu].SetData(self._all_objs_pos_vtk[mu])
self._objs_output[mu].GetPointData().AddArray(self.cn[mu])
self._objs_output[mu].GetPointData().AddArray(self.cf[mu])
#if dom_imu is not None:
# self.dom_at_time[mu] = self._dom_data[
# dom_imu, 1]
# self.dom[mu] = numpy_support.numpy_to_vtk(
# self.dom_at_time[mu])
# self.dom[mu].SetName('domains')
# self._contact_field[mu].AddArray(self.dom[mu])
except Exception:
traceback.print_exc()
return 1
def SetIO(self, io):
self._io = io
self._ispos_data = self._io.static_data()
self._idpos_data = self._io.dynamic_data()
try:
self._idom_data = self._io.domains_data()
except ValueError:
self._idom_data = None
self._icf_data = self._io.contact_forces_data()
self._isolv_data = self._io.solver_data()
self._ivelo_data = self._io.velocities_data()
self._spos_data = self._ispos_data[:, :]
# all times as hdf5 slice
self._raw_times = self._idpos_data[:, 0]
# build times steps
self._times, self._indices = numpy.unique(self._raw_times,
return_index=True)
dcf = self._times[1:]-self._times[:-1]
self._avg_timestep = numpy.mean(dcf)
self._min_timestep = numpy.min(dcf)
# self._times.sort()
# self._indices = ?
# we assume times must be sorted
# assert all(self._times[i] <= self._times[i+1]
# for i in range(len(self._times)-1))
# if self._with_contact_forces:
# assert all(self._cf_times[i] <= self._cf_times[i+1]
# for i in range(len(self._cf_times)-1))
self.Modified()
return 1
def SetTime(self, time):
self.GetOutputInformation(0).Set(
vtk.vtkStreamingDemandDrivenPipeline.UPDATE_TIME_STEP(),
time)
self.timestep = abs(self.time-time)
self.time = time
# with a True pipeline: self.Modified()
# but as the consumers (VView class, export function) are
# not (yet) vtk filters, the Update is needed here
self.Update()
# contact forces provider
def ContactForcesOn(self):
self._cf_raw_times = self._icf_data[:, 0]
self._cf_times, self._cf_indices = numpy.unique(self._cf_raw_times,
return_index=True)
self._mu_coefs = numpy.unique(self._icf_data[:, 1],
return_index=False)
self.cpa_at_time = dict()
self.cpa = dict()
self.cpb_at_time = dict()
self.cpb = dict()
self.cf_at_time = dict()
self.cf = dict()
self.cn_at_time = dict()
self.cn = dict()
self.ids_at_time = dict()
self.ids = dict()
self.dom_at_time = [dict(), None][self._idom_data is None]
self.dom = dict()
self._all_objs_pos = dict()
self._all_objs_pos_vtk = dict()
self._points = dict()
self._contact_field = dict()
self._output = dict()
self._objs_points = dict()
self._objs_output = dict()
for mu in self._mu_coefs:
# the contact points
self._points[mu] = vtk.vtkPoints()
self._contact_field[mu] = vtk.vtkPointData()
self._output[mu] = vtk.vtkPolyData()
self._output[mu].SetPoints(self._points[mu])
self._output[mu].SetFieldData(self._contact_field[mu])
# the objects translations
self._objs_points[mu] = vtk.vtkPoints()
self._objs_output[mu] = vtk.vtkPolyData()
self._objs_output[mu].SetPoints(self._objs_points[mu])
self._with_contact_forces = True
self.Update()
def ContactForcesOff(self):
self._with_contact_forces = False
self.Update()
def ExportOn(self):
self._export = True
def ExportOff(self):
self._export = False
# Read file and open VTK interaction window
class VView(object):
def __init__(self, io, options, config=None):
self.opts = options
self.config = [config,VViewConfig()][config is None]
self.gui_initialized = False
self.io = io
self.refs = []
self.refs_attrs = []
self.shape = dict()
self.pos = dict()
self.mass = dict()
self.inertia = dict()
self.contact_posa = dict()
self.contact_posb = dict()
self.contact_pos_force = dict()
self.contact_pos_norm = dict()
self.cone = dict()
self.cone_glyph = dict()
self.cmapper = dict()
self.cLUT = dict()
self.cactor = dict()
self.arrow = dict()
self.cylinder = dict()
self.sphere = dict()
self.arrow_glyph = dict()
self.gmapper = dict()
self.gactor = dict()
self.ctransform = dict()
self.cylinder_glyph = dict()
self.clmapper = dict()
self.sphere_glypha = dict()
self.sphere_glyphb = dict()
self.smappera = dict()
self.smapperb = dict()
self.sactora = dict()
self.sactorb = dict()
self.clactor = dict()
self.cell_connectors = dict()
self.times_of_birth = dict()
self.times_of_death = dict()
self.min_time = self.opts.min_time
self.max_time = self.opts.max_time
self.transforms = dict()
self.transformers = dict()
self.offsets = dict()
self.io_reader = IOReader()
self.io_reader.SetIO(io=self.io)
if self.opts.cf_disable:
self.io_reader.ContactForcesOff()
else:
self.io_reader.ContactForcesOn()
if self.opts.export:
self.io_reader.ExportOn()
else:
self.io_reader.ExportOff()
def reload(self):
if self.opts.cf_disable:
self.io_reader.ContactForcesOff()
else:
self.io_reader._time = min(times[:])
for mu in self.io_reader._mu_coefs:
self.contact_posa[mu].SetInputData(self.io_reader._output[mu])
self.contact_posa[mu].Update()
self.contact_posb[mu].SetInputData(self.io_reader._output[mu])
self.contact_posb[mu].Update()
self.contact_pos_force[mu].Update()
self.contact_pos_norm[mu].Update()
self.min_time = self.io_reader._times[0]
self.max_time = self.io_reader._times[-1]
self.set_dynamic_actors_visibility(self.time0)
def init_contact_pos(self, mu):
self.contact_posa[mu] = vtk.vtkDataObjectToDataSetFilter()
self.contact_posb[mu] = vtk.vtkDataObjectToDataSetFilter()
add_compatiblity_methods(self.contact_posa[mu])
add_compatiblity_methods(self.contact_posb[mu])
self.contact_pos_force[mu] = vtk.vtkFieldDataToAttributeDataFilter()
self.contact_pos_norm[mu] = vtk.vtkFieldDataToAttributeDataFilter()
self.contact_posa[mu].SetDataSetTypeToPolyData()
self.contact_posa[mu].SetPointComponent(0, "contact_positions_a", 0)
self.contact_posa[mu].SetPointComponent(1, "contact_positions_a", 1)
self.contact_posa[mu].SetPointComponent(2, "contact_positions_a", 2)
self.contact_posb[mu].SetDataSetTypeToPolyData()
self.contact_posb[mu].SetPointComponent(0, "contact_positions_b", 0)
self.contact_posb[mu].SetPointComponent(1, "contact_positions_b", 1)
self.contact_posb[mu].SetPointComponent(2, "contact_positions_b", 2)
self.contact_pos_force[mu].SetInputConnection(
self.contact_posa[mu].GetOutputPort())
self.contact_pos_force[mu].SetInputFieldToDataObjectField()
self.contact_pos_force[mu].SetOutputAttributeDataToPointData()
self.contact_pos_force[mu].SetVectorComponent(0, "contact_forces", 0)
self.contact_pos_force[mu].SetVectorComponent(1, "contact_forces", 1)
self.contact_pos_force[mu].SetVectorComponent(2, "contact_forces", 2)
self.contact_pos_norm[mu].SetInputConnection(
self.contact_posa[mu].GetOutputPort())
self.contact_pos_norm[mu].SetInputFieldToDataObjectField()
self.contact_pos_norm[mu].SetOutputAttributeDataToPointData()
self.contact_pos_norm[mu].SetVectorComponent(0, "contact_normals", 0)
self.contact_pos_norm[mu].SetVectorComponent(1, "contact_normals", 1)
self.contact_pos_norm[mu].SetVectorComponent(2, "contact_normals", 2)
# if self.cf_prov.dom_at_time is not None:
# self.contact_pos_norm[mu].SetScalarComponent(0, "domains", 0)
def init_cf_sources(self, mu, transform):
self.cf_collector.AddInputData(self.io_reader._output[mu])
self.cf_collector.AddInputData(self.io_reader._objs_output[mu])
self.contact_posa[mu].SetInputData(self.io_reader._output[mu])
self.contact_posa[mu].Update()
self.contact_posb[mu].SetInputData(self.io_reader._output[mu])
self.contact_posb[mu].Update()
self.contact_pos_force[mu].Update()
self.contact_pos_norm[mu].Update()
self.cone[mu] = vtk.vtkConeSource()
self.cone[mu].SetResolution(40)
self.cone[mu].SetRadius(mu) # one coef!!
self.cone_glyph[mu] = vtk.vtkGlyph3D()
self.cone_glyph[mu].SetSourceTransform(transform)
self.cone_glyph[mu].SetInputConnection(self.contact_pos_norm[mu].GetOutputPort())
self.cone_glyph[mu].SetSourceConnection(self.cone[mu].GetOutputPort())
self.cone_glyph[mu]._scale_fact = self.opts.normalcone_ratio
self.cone_glyph[mu].SetScaleFactor(
self.cone_glyph[mu]._scale_fact *self.opts.cf_scale_factor)
self.cone_glyph[mu].SetVectorModeToUseVector()
self.cone_glyph[mu].SetInputArrayToProcess(1, 0, 0, 0, 'contact_normals')
self.cone_glyph[mu].OrientOn()
# Don't allow scalar to affect size of glyph
self.cone_glyph[mu].SetScaleModeToDataScalingOff()
# Allow scalar to affect color of glyph
self.cone_glyph[mu].SetColorModeToColorByScalar()
self.cmapper[mu] = vtk.vtkPolyDataMapper()
if not self.opts.imr:
self.cmapper[mu].ImmediateModeRenderingOff()
self.cmapper[mu].SetInputConnection(self.cone_glyph[mu].GetOutputPort())
# Random color map, up to 256 domains
self.cLUT[mu] = vtk.vtkLookupTable()
self.cLUT[mu].SetNumberOfColors(256)
self.cLUT[mu].Build()
for i in range(256):
self.cLUT[mu].SetTableValue(i, *random_color())
self.cLUT[mu].SetTableRange(0, 255)
# By default don't allow scalars to have an effect
self.cmapper[mu].ScalarVisibilityOff()
# If domain information is available, we turn on the color
# table and turn on scalars
if self.io_reader.dom_at_time is not None:
self.cmapper[mu].SetLookupTable(self.cLUT[mu])
self.cmapper[mu].SetColorModeToMapScalars()
self.cmapper[mu].SetScalarModeToUsePointData()
self.cmapper[mu].SetScalarRange(0,255)
self.cmapper[mu].ScalarVisibilityOn()
self.cactor[mu] = vtk.vtkActor()
self.cactor[mu].GetProperty().SetOpacity(self.config.get('contact_opacity', 0.4))
self.cactor[mu].GetProperty().SetColor(0, 0, 1)
self.cactor[mu].SetMapper(self.cmapper[mu])
self.arrow[mu] = vtk.vtkArrowSource()
self.arrow[mu].SetTipResolution(40)
self.arrow[mu].SetShaftResolution(40)
self.cylinder[mu] = vtk.vtkCylinderSource()
self.cylinder[mu].SetRadius(.01)
self.cylinder[mu].SetHeight(1)
self.sphere[mu] = vtk.vtkSphereSource()
# 1. scale = (scalar value of that particular data index);
# 2. denominator = Range[1] - Range[0];
# 3. scale = (scale < Range[0] ? Range[0] : (scale > Range[1] ? Range[1] : scale));
# 4. scale = (scale - Range[0]) / denominator;
# 5. scale *= scaleFactor;
self.arrow_glyph[mu] = vtk.vtkGlyph3D()
self.arrow_glyph[mu].SetInputConnection(
self.contact_pos_force[mu].GetOutputPort())
self.arrow_glyph[mu].SetSourceConnection(self.arrow[mu].GetOutputPort())
self.arrow_glyph[mu].ScalingOn()
self.arrow_glyph[mu].SetScaleModeToScaleByVector()
self.arrow_glyph[mu].SetRange(0, .01)
self.arrow_glyph[mu].ClampingOn()
self.arrow_glyph[mu]._scale_fact = 5
self.arrow_glyph[mu].SetScaleFactor(
self.arrow_glyph[mu]._scale_fact * self.opts.cf_scale_factor)
self.arrow_glyph[mu].SetVectorModeToUseVector()
self.arrow_glyph[mu].SetInputArrayToProcess(1, 0, 0, 0, 'contact_forces')
self.arrow_glyph[mu].SetInputArrayToProcess(3, 0, 0, 0, 'contact_forces')
self.arrow_glyph[mu].OrientOn()
self.gmapper[mu] = vtk.vtkPolyDataMapper()
if not self.opts.imr:
self.gmapper[mu].ImmediateModeRenderingOff()
self.gmapper[mu].SetInputConnection(self.arrow_glyph[mu].GetOutputPort())
self.gmapper[mu].SetScalarModeToUsePointFieldData()
self.gmapper[mu].SetColorModeToMapScalars()
self.gmapper[mu].ScalarVisibilityOn()
self.gmapper[mu].SelectColorArray('contact_forces')
# gmapper.SetScalarRange(contact_pos_force.GetOutput().GetPointData().GetArray('contact_forces').GetRange())
self.gactor[mu] = vtk.vtkActor()
self.gactor[mu].SetMapper(self.gmapper[mu])
self.ctransform[mu] = vtk.vtkTransform()
self.ctransform[mu].Translate(-0.5, 0, 0)
self.ctransform[mu].RotateWXYZ(90, 0, 0, 1)
self.cylinder_glyph[mu] = vtk.vtkGlyph3D()
self.cylinder_glyph[mu].SetSourceTransform(self.ctransform[mu])
self.cylinder_glyph[mu].SetInputConnection(
self.contact_pos_norm[mu].GetOutputPort())
self.cylinder_glyph[mu].SetSourceConnection(self.cylinder[mu].GetOutputPort())
self.cylinder_glyph[mu].SetVectorModeToUseVector()
self.cylinder_glyph[mu].SetInputArrayToProcess(1, 0, 0, 0, 'contact_normals')
self.cylinder_glyph[mu].OrientOn()
self.cylinder_glyph[mu]._scale_fact = self.opts.normalcone_ratio
self.cylinder_glyph[mu].SetScaleFactor(
self.cylinder_glyph[mu]._scale_fact * self.opts.cf_scale_factor)
self.clmapper[mu] = vtk.vtkPolyDataMapper()
if not self.opts.imr:
self.clmapper[mu].ImmediateModeRenderingOff()
self.clmapper[mu].SetInputConnection(self.cylinder_glyph[mu].GetOutputPort())
self.sphere_glypha[mu] = vtk.vtkGlyph3D()
self.sphere_glypha[mu].SetInputConnection(self.contact_posa[mu].GetOutputPort())
self.sphere_glypha[mu].SetSourceConnection(self.sphere[mu].GetOutputPort())
self.sphere_glypha[mu].ScalingOn()
# self.sphere_glypha[mu].SetScaleModeToScaleByVector()
# self.sphere_glypha[mu].SetRange(-0.5, 2)
# self.sphere_glypha[mu].ClampingOn()
self.sphere_glypha[mu]._scale_fact = .1 * self.opts.normalcone_ratio
self.sphere_glypha[mu].SetScaleFactor(
self.sphere_glypha[mu]._scale_fact * self.opts.cf_scale_factor)
# self.sphere_glypha[mu].SetVectorModeToUseVector()
self.sphere_glyphb[mu] = vtk.vtkGlyph3D()
self.sphere_glyphb[mu].SetInputConnection(self.contact_posb[mu].GetOutputPort())
self.sphere_glyphb[mu].SetSourceConnection(self.sphere[mu].GetOutputPort())
self.sphere_glyphb[mu].ScalingOn()
# self.sphere_glyphb[mu].SetScaleModeToScaleByVector()
# self.sphere_glyphb[mu].SetRange(-0.5, 2)
# self.sphere_glyphb[mu].ClampingOn()
self.sphere_glyphb[mu]._scale_fact = .1 * self.opts.normalcone_ratio
self.sphere_glyphb[mu].SetScaleFactor(
self.sphere_glyphb[mu]._scale_fact * self.opts.cf_scale_factor)
# self.sphere_glyphb[mu].SetVectorModeToUseVector()
# self.sphere_glyphb[mu].SetInputArrayToProcess(1, 0, 0, 0, 'contact_normals')
# self.sphere_glyph.OrientOn()
self.smappera[mu] = vtk.vtkPolyDataMapper()
if not self.opts.imr:
self.smappera[mu].ImmediateModeRenderingOff()
self.smappera[mu].SetInputConnection(self.sphere_glypha[mu].GetOutputPort())
self.smapperb[mu] = vtk.vtkPolyDataMapper()
if not self.opts.imr:
self.smapperb[mu].ImmediateModeRenderingOff()
self.smapperb[mu].SetInputConnection(self.sphere_glyphb[mu].GetOutputPort())
# self.cmapper.SetScalarModeToUsePointFieldData()
# self.cmapper.SetColorModeToMapScalars()
# self.cmapper.ScalarVisibilityOn()
# self.cmapper.SelectColorArray('contact_normals')
# self.gmapper.SetScalarRange(contact_pos_force.GetOutput().GetPointData().GetArray('contact_forces').GetRange())
self.clactor[mu] = vtk.vtkActor()
# cactor.GetProperty().SetOpacity(0.4)
self.clactor[mu].GetProperty().SetColor(1, 0, 0)
self.clactor[mu].SetMapper(self.clmapper[mu])
self.sactora[mu] = vtk.vtkActor()
self.sactora[mu].GetProperty().SetColor(1, 0, 0)
self.sactora[mu].SetMapper(self.smappera[mu])
self.sactorb[mu] = vtk.vtkActor()
self.sactorb[mu].GetProperty().SetColor(0, 1, 0)
self.sactorb[mu].SetMapper(self.smapperb[mu])
def init_shape(self, shape_name):
shape_type = (self.io.shapes()[shape_name].attrs['type'])
try:
# work-around h5py unicode bug
# https://github.com/h5py/h5py/issues/379
shape_type = shape_type.decode('utf-8')
except AttributeError:
pass
scale = None
if 'scale' in self.io.shapes()[shape_name].attrs:
scale = self.io.shapes()[shape_name].attrs['scale']
ConvexSource = makeConvexSourceClass()
if shape_type in ['vtp', 'stl']:
with io_tmpfile() as tmpf:
tmpf[0].write(str(self.io.shapes()[shape_name][:][0]))
tmpf[0].flush()
reader = self.vtk_reader[shape_type]()
reader.SetFileName(tmpf[1])
reader.Update()
self.readers[shape_name] = reader
# a try for smooth rendering but it does not work here
normals = vtk.vtkPolyDataNormals()
normals.SetInputConnection(reader.GetOutputPort())
normals.SetFeatureAngle(60.0)
mapper = vtk.vtkDataSetMapper()
add_compatiblity_methods(mapper)
mapper.SetInputConnection(normals.GetOutputPort())
mapper.ScalarVisibilityOff()
# delayed (see the one in brep)
# note: "lambda : mapper" fails (dynamic scope)
# and (x for x in [mapper]) is ok.
self.mappers[shape_name] = (x for x in [mapper])
elif shape_type in ['brep']:
# try to find an associated shape
if 'associated_shape' in self.io.shapes()[shape_name].attrs:
associated_shape = \
self.io.shapes()[shape_name].\
attrs['associated_shape']
# delayed
self.mappers[shape_name] = (x for x in
[mappers[associated_shape]()])
else:
if 'brep' in self.io.shapes()[shape_name].attrs:
brep = self.io.shapes()[shape_name].attrs['brep']
else:
brep = shape_name
reader = brep_reader(str(self.io.shapes()[brep][:][0]),
self.io.shapes()[brep].attrs['occ_indx'])
self.readers[shape_name] = reader
mapper = vtk.vtkDataSetMapper()
add_compatiblity_methods(mapper)
mapper.SetInputConnection(reader.GetOutputPort())
self.mappers[shape_name] = (x for x in [mapper])
elif shape_type in ['stp', 'step', 'igs', 'iges']:
# try to find an associated shape
if 'associated_shape' in self.io.shapes()[shape_name].attrs:
associated_shape = \
self.io.shapes()[shape_name].\
attrs['associated_shape']
# delayed
self.mappers[shape_name] = (
x for x in [mappers[associated_shape]()])
elif shape_type in ['stp', 'step', 'igs', 'iges']:
with io_tmpfile(
debug=True,
suffix='.{0}'.format(shape_type),
contents=str(self.io.shapes()[shape_name][:][0])) as tmpf:
shape = occ_load_file(tmpf[1])
# whole shape
reader = topods_shape_reader(shape)
self.readers[shape_name] = reader
mapper = vtk.vtkDataSetMapper()
add_compatiblity_methods(mapper)
mapper.SetInputConnection(reader.GetOutputPort())
self.mappers[shape_name] = (x for x in [mapper])
# subparts
faces, edges = occ_topo_list(shape)
for i, f in enumerate(faces):
shape_indx = ('Face', shape_name, i)
reader = topods_shape_reader(f)
self.readers[shape_indx] = reader
mapper = vtk.vtkDataSetMapper()
add_compatiblity_methods(mapper)
mapper.SetInputConnection(reader.GetOutputPort())
self.mappers[shape_indx] = (x for x in [mapper])
for i, e in enumerate(edges):
shape_indx = ('Edge', shape_name, i)
reader = topods_shape_reader(e)
self.readers[shape_indx] = reader
mapper = vtk.vtkDataSetMapper()
add_compatiblity_methods(mapper)
mapper.SetInputConnection(reader.GetOutputPort())
self.mappers[shape_indx] = (x for x in [mapper])
elif shape_type == 'heightmap':
points = vtk.vtkPoints()
shape = self.io.shapes()[shape_name]
extents = list(shape.attrs['rect']) + [numpy.max(shape) - numpy.min(shape)]
# Data points are adjusted to center tangentially, but
# vertical position is left alone; i.e., supports
# non-zero-centered data. User must use contactor
# translation to compensate if desired, or simply adjust
# data itself to desired origin.
for x,d in enumerate(shape):
for y,v in enumerate(d):
points.InsertNextPoint(
float(x) / (shape.shape[0]-1) * extents[0] - extents[0]/2,
float(y) / (shape.shape[1]-1) * extents[1] - extents[1]/2,
v)
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
delaunay = vtk.vtkDelaunay2D()
delaunay.SetInputData(polydata)
delaunay.Update()
self.datasets[shape_name] = polydata
mapper = vtk.vtkPolyDataMapper()
if not self.opts.imr:
mapper.ImmediateModeRenderingOff()
mapper.SetInputConnection(delaunay.GetOutputPort())
add_compatiblity_methods(mapper)
self.mappers[shape_name] = None
self.mappers[shape_name] = (x for x in [mapper])
elif shape_type == 'convex':
# a convex shape
points = vtk.vtkPoints()
convex = vtk.vtkConvexPointSet()
data = self.io.shapes()[shape_name][:]
if self.io.dimension() == 3:
convex.GetPointIds().SetNumberOfIds(data.shape[0])
for id_, vertice in enumerate(data):
points.InsertNextPoint(vertice[0], vertice[1], vertice[2])
convex.GetPointIds().SetId(id_, id_)
elif self.io.dimension() == 2:
number_of_vertices = data.shape[0]
convex.GetPointIds().SetNumberOfIds(data.shape[0]*2)
for id_, vertice in enumerate(data):
points.InsertNextPoint(vertice[0], vertice[1], -0.05)
convex.GetPointIds().SetId(id_, id_)
points.InsertNextPoint(vertice[0], vertice[1], 0.05)
convex.GetPointIds().SetId(id_+number_of_vertices, id_+number_of_vertices)
source = ConvexSource(convex, points)
self.readers[shape_name] = source
# not a source!
self.datasets[shape_name] = source.GetUnstructuredGridOutput()
mapper = vtk.vtkDataSetMapper()
add_compatiblity_methods(mapper)
mapper.SetInputData(source.GetUnstructuredGridOutput())
self.mappers[shape_name] = (x for x in [mapper])
else:
assert shape_type == 'primitive'
primitive = self.io.shapes()[shape_name].attrs['primitive']
attrs = self.io.shapes()[shape_name][:][0]
if primitive == 'Sphere':
source = vtk.vtkSphereSource()
source.SetRadius(attrs[0])
source.SetThetaResolution(15)
source.SetPhiResolution(15)
elif primitive == 'Cone':
source = vtk.vtkConeSource()
source.SetRadius(attrs[0])
source.SetHeight(attrs[1])
source.SetResolution(15)
source.SetDirection(0, 1, 0) # needed
elif primitive == 'Cylinder':
source = vtk.vtkCylinderSource()
source.SetResolution(15)
source.SetRadius(attrs[0])
source.SetHeight(attrs[1])
# source.SetDirection(0,1,0)
elif primitive == 'Box':
source = vtk.vtkCubeSource()
source.SetXLength(attrs[0])
source.SetYLength(attrs[1])
source.SetZLength(attrs[2])
elif primitive == 'Capsule':
sphere1 = vtk.vtkSphereSource()
sphere1.SetRadius(attrs[0])
sphere1.SetCenter(0, attrs[1] / 2, 0)
sphere1.SetThetaResolution(15)
sphere1.SetPhiResolution(15)
sphere1.Update()
sphere2 = vtk.vtkSphereSource()
sphere2.SetRadius(attrs[0])
sphere2.SetCenter(0, -attrs[1] / 2, 0)
sphere2.SetThetaResolution(15)
sphere2.SetPhiResolution(15)
sphere2.Update()
cylinder = vtk.vtkCylinderSource()
cylinder.SetRadius(attrs[0])
cylinder.SetHeight(attrs[1])
cylinder.SetResolution(15)
cylinder.Update()
data = vtk.vtkMultiBlockDataSet()
data.SetNumberOfBlocks(3)
data.SetBlock(0, sphere1.GetOutput())
data.SetBlock(1, sphere2.GetOutput())
data.SetBlock(2, cylinder.GetOutput())
source = vtk.vtkMultiBlockDataGroupFilter()
add_compatiblity_methods(source)
source.AddInputData(data)
elif primitive == 'Disk':
source = vtk.vtkCylinderSource()
source.SetResolution(200)
source.SetRadius(attrs[0])
source.SetHeight(0.1)
elif primitive == 'Box2d':
source = vtk.vtkCubeSource()
source.SetXLength(attrs[0])
source.SetYLength(attrs[1])
source.SetZLength(0.1)
self.readers[shape_name] = source
mapper = vtk.vtkCompositePolyDataMapper()
if not self.opts.imr:
mapper.ImmediateModeRenderingOff()
mapper.SetInputConnection(source.GetOutputPort())
self.mappers[shape_name] = (x for x in [mapper])
if self.opts.with_edges:
mapper_edge = vtk.vtkCompositePolyDataMapper()
if not self.opts.imr:
mapper_edge.ImmediateModeRenderingOff()
mapper_edge.SetInputConnection(source.GetOutputPort())
self.mappers_edges[shape_name] = (y for y in [mapper_edge])
def init_shapes(self):
for shape_name in self.io.shapes():
self.init_shape(shape_name)
for shape_name in self.mappers.keys():
if shape_name not in self.unfrozen_mappers:
self.unfrozen_mappers[shape_name] = next(self.mappers[shape_name])
if self.opts.with_edges:
for shape_name in self.mappers_edges.keys():
if shape_name not in self.unfrozen_mappers_edges:
self.unfrozen_mappers_edges[shape_name] = next(self.mappers_edges[shape_name])
def init_contactor(self, contactor_instance_name, instance, instid):
contactor = instance[contactor_instance_name]
contact_shape_indx = None
if 'shape_name' not in contactor.attrs:
print("Warning: old format: ctr.name must be ctr.shape_name for contact {0}".format(contactor_instance_name))
shape_attr_name='name'
else:
shape_attr_name='shape_name'
if 'group' in contactor.attrs:
collision_group = contactor.attrs['group']
else:
collision_group = -1
if 'type' in contactor.attrs:
contact_type = contactor.attrs['type']
contact_index = contactor.attrs['contact_index']
contact_shape_indx = (contact_type, contactor.attrs[shape_attr_name],
contact_index)
else:
contact_shape_indx = contactor.attrs[shape_attr_name]
try:
# work-around h5py unicode bug
# https://github.com/h5py/h5py/issues/379
contact_shape_indx = contact_shape_indx.decode('utf-8')
except AttributeError:
pass
if not (self.opts.global_filter or self.opts.export):
actor = vtk.vtkActor()
if self.opts.with_edges:
actor_edge = vtk.vtkActor()
if instance.attrs.get('mass', 0) > 0:
# objects that may move
self.dynamic_actors[instid].append((actor, contact_shape_indx,
collision_group))
actor.GetProperty().SetOpacity(
self.config.get('dynamic_opacity', 0.7))
actor.GetProperty().SetColor(
self.config.get('dynamic_bodies_color', [0.3,0.3,0.3]))
if self.opts.with_edges:
self.dynamic_actors[instid].append((actor_edge, contact_shape_indx,
collision_group))
actor_edge.GetProperty().SetOpacity(
self.config.get('dynamic_opacity', 1.0))
actor_edge.GetProperty().SetRepresentationToWireframe()
else:
# objects that are not supposed to move
self.static_actors[instid].append((actor, contact_shape_indx,
collision_group))
actor.GetProperty().SetOpacity(
self.config.get('static_opacity', 1.0))
actor.GetProperty().SetColor(
self.config.get('static_bodies_color', [0.5,0.5,0.5]))
if self.opts.with_random_color :
actor.GetProperty().SetColor(random_color())
if self.opts.with_edges:
actor_edge.GetProperty().SetColor(random_color())
actor.SetMapper(self.unfrozen_mappers[contact_shape_indx])
if self.opts.with_edges:
actor_edge.SetMapper(self.unfrozen_mappers_edges[contact_shape_indx])
if not (self.opts.global_filter or self.opts.export):
self.renderer.AddActor(actor)
if self.opts.with_edges:
self.renderer.AddActor(actor_edge)
transform = vtk.vtkTransform()
transformer = vtk.vtkTransformFilter()
if contact_shape_indx in self.readers:
transformer.SetInputConnection(
self.readers[contact_shape_indx].GetOutputPort())
else:
transformer.SetInputData(self.datasets[contact_shape_indx])
if isinstance(contact_shape_indx, tuple):
contact_shape_name = contact_shape_indx[1]
else:
contact_shape_name = contact_shape_indx
if 'scale' in self.io.shapes()[contact_shape_name].attrs:
scale = self.io.shapes()[contact_shape_name].attrs['scale']
scale_transform = vtk.vtkTransform()
scale_transform.Scale(scale, scale, scale)
scale_transform.SetInput(transform)
transformer.SetTransform(scale_transform)
if not (self.opts.global_filter or self.opts.export):
actor.SetUserTransform(scale_transform)
if self.opts.with_edges:
actor_edge.SetUserTransform(scale_transform)
else:
transformer.SetTransform(transform)
if not (self.opts.global_filter or self.opts.export):
actor.SetUserTransform(transform)
if self.opts.with_edges:
actor_edge.SetUserTransform(transform)
self.transformers[contact_shape_indx] = transformer
self.transforms[instid].append(transform)
if 'center_of_mass' in instance.attrs:
center_of_mass = instance.\
attrs['center_of_mass'].astype(float)
else:
center_of_mass = [0., 0., 0.]
offset_orientation= contactor.attrs['orientation'].astype(float)
# for disk, we change the offset since cylinder source are directed along the y axis by default
# since the disk shapemis invariant with respect to the rotation w.r.t to z-axis
# we propose to erase it.
try:
if self.io.shapes()[contact_shape_name].attrs['primitive'] == 'Disk':
offset_orientation = [math.cos(pi/4.0), math.sin(pi/4.0), 0., 0.]
except:
pass
self.offsets[instid].append(
(numpy.subtract(contactor.attrs['translation'].astype(float),
center_of_mass),
offset_orientation))
self.cell_connectors[instid] = CellConnector(
instid,
data_names=['instance', 'translation',
'velocity', 'kinetic_energy'],
data_sizes=[1, 3, 6, 1])
self.cell_connectors[instid].SetInputConnection(
transformer.GetOutputPort())
self.objects_collector.AddInputConnection(
self.cell_connectors[instid].GetOutputPort())
self.cell_connectors[instid].Update()
def init_instance(self, instance_name):
instance = self.io.instances()[instance_name]
instid = int(instance.attrs['id'])
self.transforms[instid] = []
self.offsets[instid] = []
if 'time_of_birth' in instance.attrs:
self.times_of_birth[instid] = instance.attrs['time_of_birth']
if 'time_of_death' in instance.attrs:
self.times_of_death[instid] = instance.attrs['time_of_death']
if 'mass' in instance.attrs:
# a dynamic instance
self.mass[instid] = instance.attrs['id']
if 'inertia' in instance.attrs:
inertia = instance.attrs['inertia']
if self.io.dimension() ==3 :
if len(inertia.shape) > 1 and inertia.shape[0] == inertia.shape[1] == 3:
self.inertia[instid] = inertia
else:
self.inertia[instid] = numpy.zeros((3, 3))
self.inertia[instid][0, 0] = inertia[0]
self.inertia[instid][1, 1] = inertia[1]
self.inertia[instid][2, 2] = inertia[2]
elif self.io.dimension() ==2 :
self.inertia[instid] = inertia
else:
if self.io.dimension() ==3 :
self.inertia[instid] = numpy.eye(3)
elif self.io.dimension() ==2 :
self.inertia[instid] = 1.0
else:
pass
if instid >= 0:
self.dynamic_actors[instid] = list()
else:
self.static_actors[instid] = list()
for contactor_instance_name in instance:
self.init_contactor(contactor_instance_name, instance, instid)
def init_instances(self):
for instance_name in self.io.instances():
self.init_instance(instance_name)
# this sets the position for all transforms associated to an instance
def set_position_i(self, instance, q0, q1, q2, q3, q4, q5, q6):
# all objects are set to a nan position at startup,
# so they are invisibles
if (numpy.any(numpy.isnan([q0, q1, q2, q3, q4, q5, q6]))
or numpy.any(numpy.isinf([q0, q1, q2, q3, q4, q5, q6]))):
print('Bad position for object number', int(instance),' :', q0, q1, q2, q3, q4, q5, q6)
else:
q = Quaternion((q3, q4, q5, q6))
for transform, offset in zip(self.transforms[instance],
self.offsets[instance]):
p = q.rotate(offset[0])
r = q * Quaternion(offset[1])
transform.Identity()
transform.Translate(q0 + p[0], q1 + p[1], q2 + p[2])
axis, angle = r.axisAngle()
transform.RotateWXYZ(angle * 180. / pi,
axis[0],
axis[1],
axis[2])
def set_position(self, data):
self.set_position_v(data[:, 1],
data[:, 2],
data[:, 3],
data[:, 4],
data[:, 5],
data[:, 6],
data[:, 7],
data[:, 8])
def build_set_functions(self, cc=None):
if cc is None: cc = self.cell_connectors
# the numpy vectorization is ok on column vectors for each args
self.set_position_v = numpy.vectorize(self.set_position_i)
# here the numpy vectorization is used with a column vector and a
# scalar for the time arg
self.set_visibility_v = numpy.vectorize(self.set_dynamic_instance_visibility)
def set_velocity(instance, v0, v1, v2, v3, v4, v5):
if instance in cc:
cc[instance]._datas[2][:] = [v0, v1, v2, v3, v4, v5]
cc[instance]._datas[3][:] = \
0.5*(self.mass[instance]*(v0*v0+v1*v1+v2*v2) +
numpy.dot([v3, v4, v5],
numpy.dot(self.inertia[instance],
[v3, v4, v5])))
self.set_velocity_v = numpy.vectorize(set_velocity)
def set_translation(instance, x0, x1, x2 ):
if instance in cc:
cc[instance]._datas[1][:] = [x0, x1, x2]
self.set_translation_v = numpy.vectorize(set_translation)
def set_instance(instance):
if instance in cc:
cc[instance]._datas[0][:] = [instance]
self.set_instance_v = numpy.vectorize(set_instance)
# set visibility for all actors associated to a dynamic instance
def set_dynamic_instance_visibility(self, instance, time):
tob = self.times_of_birth.get(instance, -1)
tod = self.times_of_death.get(instance, infinity)
has_avatar = False
if self.opts.visible_mode=='avatars' or self.opts.visible_mode=='contactors':
for actor, index, group in self.dynamic_actors[instance]:
if group==-1:
has_avatar = True
break
if (tob <= time and tod >= time):
for actor, index, group in self.dynamic_actors[instance]:
if not has_avatar or visible_mode=='all':
actor.VisibilityOn()
elif visible_mode=='avatars' and group==-1 and has_avatar:
actor.VisibilityOn()
elif visible_mode=='contactors' and group!=-1 and has_avatar:
actor.VisibilityOn()
else:
actor.VisibilityOff()
else:
for actor, index, group in self.dynamic_actors[instance]:
actor.VisibilityOff()
def set_dynamic_actors_visibility(self, time):
self.set_visibility_v(list(self.dynamic_actors.keys()), time)
# callback maker for scale manipulation
def make_scale_observer(self, glyphs):
def scale_observer(obj, event):
slider_repres = obj.GetRepresentation()
scale_at_pos = slider_repres.GetValue()
for glyph in glyphs:
for k in glyph:
glyph[k].SetScaleFactor(
scale_at_pos * glyph[k]._scale_fact)
return scale_observer
# callback maker for time scale manipulation
def make_time_scale_observer(self, time_slider_repres, time_observer):
delta_time = self.max_time - self.min_time
def time_scale_observer(obj, event):
slider_repres = obj.GetRepresentation()
time_scale_at_pos = 1. - slider_repres.GetValue()
current_time = time_observer._time
shift = (current_time - self.min_time) / delta_time
xmin_time = self.min_time + time_scale_at_pos / 2. * delta_time
xmax_time = self.max_time - time_scale_at_pos / 2. * delta_time
xdelta_time = xmax_time - xmin_time
new_mintime = max(self.min_time, current_time - xdelta_time)
new_maxtime = min(self.max_time, current_time + xdelta_time)
time_slider_repres.SetMinimumValue(new_mintime)
time_slider_repres.SetMaximumValue(new_maxtime)
return time_scale_observer
# make a slider widget and its representation
def make_slider(self, title, observer, interactor,
startvalue, minvalue, maxvalue, cx1, cy1, cx2, cy2):
slider_repres = vtk.vtkSliderRepresentation2D()
slider_repres.SetMinimumValue(
minvalue - (maxvalue - minvalue) / 100)
slider_repres.SetMaximumValue(
maxvalue + (maxvalue - minvalue) / 100)
slider_repres.SetValue(startvalue)
slider_repres.SetTitleText(title)
slider_repres.GetPoint1Coordinate().\
SetCoordinateSystemToNormalizedDisplay()
slider_repres.GetPoint1Coordinate().SetValue(cx1, cy1)
slider_repres.GetPoint2Coordinate().\
SetCoordinateSystemToNormalizedDisplay()
slider_repres.GetPoint2Coordinate().SetValue(cx2, cy2)
slider_repres.SetSliderLength(0.02)
slider_repres.SetSliderWidth(0.03)
slider_repres.SetEndCapLength(0.01)
slider_repres.SetEndCapWidth(0.03)
slider_repres.SetTubeWidth(0.005)
slider_repres.SetLabelFormat('%f')
slider_repres.SetTitleHeight(0.02)
slider_repres.SetLabelHeight(0.02)
background_color = self.config.get('background_color', [.0,.0,.0])
reverse_background_color =numpy.ones(3) - background_color
if (numpy.linalg.norm(background_color-reverse_background_color) < 0.2):
reverse_background_color = numpy.ones(3)
slider_repres.GetSliderProperty().SetColor(*reverse_background_color)
slider_repres.GetTitleProperty().SetColor(*reverse_background_color);
slider_repres.GetLabelProperty().SetColor(*reverse_background_color);
slider_repres.GetTubeProperty().SetColor(*reverse_background_color);
slider_repres.GetCapProperty().SetColor(*reverse_background_color);
slider_widget = vtk.vtkSliderWidget()
slider_widget.SetInteractor(interactor)
slider_widget.SetRepresentation(slider_repres)
slider_widget.KeyPressActivationOff()
slider_widget.SetAnimationModeToAnimate()
slider_widget.SetEnabled(True)
slider_widget.AddObserver('InteractionEvent', observer)
return slider_widget, slider_repres
def setup_initial_position(self):
if self.opts.export:
# For time_of_birth specifications with export mode:
# a 0 scale for objects whose existence is deferred.
# The correct transform will be set in set_position when
# the objects appears in pos_data.
# One have to disable vtkMath generic warnings in order to avoid
# plenty of 'Unable to factor linear system'
vtk.vtkMath.GlobalWarningDisplayOff()
for instance_name in self.io.instances():
instance = self.io.instances()[instance_name]
instid = int(instance.attrs['id'])
for transform in self.transforms[instid]:
transform.Scale(0, 0, 0)
self.time0 = None
if len(self.io_reader._times) > 0:
# Positions at first time step
self.time0 = self.io_reader._times[0]
self.io_reader.SetTime(self.time0)
#self.pos_t0 = dsa.WrapDataObject(self.io_reader.GetOutputDataObject(0).GetFieldData().GetArray('pos_data'))
self.pos_t0 = [self.io_reader.pos_data]
else:
# this is for the case simulation has not been ran and
# time does not exists
self.time0 = 0
self.id_t0 = None
self.pos_t0 = numpy.array([
numpy.hstack(([0.,
self.io.instances()[k].attrs['id']]
,self.io.instances()[k].attrs['translation']
,self.io.instances()[k].attrs['orientation']))
for k in self.io.instances()
if self.io.instances()[k].attrs['id'] >= 0])
if numpy.shape(self.io_reader._spos_data)[0] > 0:
self.set_position(self.io_reader._spos_data)
# static objects are always visible
for instance, actors in self.static_actors.items():
for actor,_,_ in actors:
actor.VisibilityOn()
self.set_position(*self.pos_t0)
self.set_dynamic_actors_visibility(self.time0)
def setup_vtk_renderer(self):
self.renderer_window.AddRenderer(self.renderer)
self.interactor_renderer.SetRenderWindow(self.renderer_window)
self.interactor_renderer.GetInteractorStyle().SetCurrentStyleToTrackballCamera()
# http://www.itk.org/Wiki/VTK/Depth_Peeling
if self.opts.depth_peeling:
# Use a render window with alpha bits (as initial value is 0 (false) ):
self.renderer_window.SetAlphaBitPlanes(1)
# Force to not pick a framebuffer with a multisample buffer ( as initial
# value is 8):
self.renderer_window.SetMultiSamples(0)
# Choose to use depth peeling (if supported) (initial value is 0
# (false) )
self.renderer.SetUseDepthPeeling(1)
# Set depth peeling parameters.
self.renderer.SetMaximumNumberOfPeels(
self.opts.maximum_number_of_peels)
# Set the occlusion ratio (initial value is 0.0, exact image)
self.renderer.SetOcclusionRatio(self.opts.occlusion_ratio)
# Set the initial camera position and orientation if specified
if self.opts.initial_camera[0] is not None:
self.renderer.GetActiveCamera().SetPosition(*self.opts.initial_camera[0])
if self.opts.initial_camera[1] is not None:
self.renderer.GetActiveCamera().SetFocalPoint(*self.opts.initial_camera[1])
if self.opts.initial_camera[2] is not None:
self.renderer.GetActiveCamera().SetViewUp(*self.opts.initial_camera[2])
if self.opts.initial_camera[4] is not None:
self.renderer.GetActiveCamera().SetClippingRange(*self.opts.initial_camera[4])
else:
self.renderer.ResetCameraClippingRange()
if self.opts.initial_camera[3] is not None:
self.renderer.GetActiveCamera().ParallelProjectionOn()
self.renderer.GetActiveCamera().SetParallelScale(
self.opts.initial_camera[3])
self.image_maker = vtk.vtkWindowToImageFilter()
self.image_maker.SetInput(self.renderer_window)
self.recorder = vtk.vtkOggTheoraWriter()
self.recorder.SetQuality(2)
self.recorder.SetRate(self.opts.frames_per_second)
self.recorder.SetFileName(os.path.splitext(self.opts.io_filename)[0]+'.avi')
self.recorder.SetInputConnection(self.image_maker.GetOutputPort())
self.writer = vtk.vtkPNGWriter()
self.writer.SetInputConnection(self.image_maker.GetOutputPort())
# Create a vtkLight, and set the light parameters.
light = vtk.vtkLight()
light.SetFocalPoint(0, 0, 0)
light.SetPosition(0, 0, 500)
# light.SetLightTypeToHeadlight()
self.renderer.AddLight(light)
hlight = vtk.vtkLight()
hlight.SetFocalPoint(0, 0, 0)
# hlight.SetPosition(0, 0, 500)
hlight.SetLightTypeToHeadlight()
self.renderer.AddLight(hlight)
self.renderer.SetBackground(*self.config.get('background_color', [.0,.0,.0]))
def setup_charts(self):
# Warning! numpy support offer a view on numpy array
# the numpy array must not be garbage collected!
nxtime = self.io_reader._isolv_data[:, 0]
nxiters = self.io_reader._isolv_data[:, 1]
nprecs = self.io_reader._isolv_data[:, 2]
xtime = numpy_support.numpy_to_vtk(nxtime)
xiters = numpy_support.numpy_to_vtk(nxiters)
xprecs = numpy_support.numpy_to_vtk(nprecs)
xtime.SetName('time')
xiters.SetName('iterations')
xprecs.SetName('precisions')
table = vtk.vtkTable()
table.AddColumn(xtime)
table.AddColumn(xiters)
table.AddColumn(xprecs)
# table.Dump()
tview_iter = vtk.vtkContextView()
tview_prec = vtk.vtkContextView()
chart_iter = vtk.vtkChartXY()
chart_prec = vtk.vtkChartXY()
tview_iter.GetScene().AddItem(chart_iter)
tview_prec.GetScene().AddItem(chart_prec)
self.iter_plot = chart_iter.AddPlot(vtk.vtkChart.LINE)
self.iter_plot.SetLabel('Solver iterations')
self.iter_plot.GetXAxis().SetTitle('time')
self.iter_plot.GetYAxis().SetTitle('iterations')
self.prec_plot = chart_prec.AddPlot(vtk.vtkChart.LINE)
self.prec_plot.SetLabel('Solver precisions')
self.prec_plot.GetXAxis().SetTitle('time')
self.prec_plot.GetYAxis().SetTitle('precisions')
add_compatiblity_methods(self.iter_plot)
add_compatiblity_methods(self.prec_plot)
self.iter_plot.SetInputData(table, 'time', 'iterations')
self.prec_plot.SetInputData(table, 'time', 'precisions')
self.iter_plot.SetWidth(5.0)
self.prec_plot.SetWidth(5.0)
self.iter_plot.SetColor(0, 255, 0, 255)
self.prec_plot.SetColor(0, 255, 0, 255)
tview_iter.GetInteractor().AddObserver('RightButtonReleaseEvent',
self.input_observer.iter_plot_observer)
tview_prec.GetInteractor().AddObserver('RightButtonReleaseEvent',
self.input_observer.prec_plot_observer)
# screen_size = self.renderer_window.GetScreenSize()
self.renderer_window.SetSize(*self.config['window_size'])
self.renderer_window.SetWindowName('vview: ' + self.opts.io_filename)
tview_iter.GetRenderer().GetRenderWindow().SetSize(600, 200)
tview_prec.GetRenderer().GetRenderWindow().SetSize(600, 200)
tview_iter.GetInteractor().Initialize()
# tview_iter.GetInteractor().Start()
tview_iter.GetRenderer().SetBackground(.9, .9, .9)
tview_iter.GetRenderer().Render()
tview_prec.GetInteractor().Initialize()
# tview_prec.GetInteractor().Start()
tview_prec.GetRenderer().SetBackground(.9, .9, .9)
tview_prec.GetRenderer().Render()
self.tview_iter = tview_iter
self.tview_prec = tview_prec
def setup_sliders(self, times):
if len(times) > 0:
slider_repres = vtk.vtkSliderRepresentation2D()
if self.min_time is None:
self.min_time = times[0]
if self.max_time is None:
self.max_time = times[len(times) - 1]
slider_repres.SetMinimumValue(self.min_time)
slider_repres.SetMaximumValue(self.max_time)
slider_repres.SetValue(self.min_time)
slider_repres.SetTitleText("time")
slider_repres.GetPoint1Coordinate(
).SetCoordinateSystemToNormalizedDisplay()
slider_repres.GetPoint1Coordinate().SetValue(0.4, 0.9)
slider_repres.GetPoint2Coordinate(
).SetCoordinateSystemToNormalizedDisplay()
slider_repres.GetPoint2Coordinate().SetValue(0.9, 0.9)
slider_repres.SetSliderLength(0.02)
slider_repres.SetSliderWidth(0.03)
slider_repres.SetEndCapLength(0.01)
slider_repres.SetEndCapWidth(0.03)
slider_repres.SetTubeWidth(0.005)
slider_repres.SetLabelFormat("%3.4lf")
slider_repres.SetTitleHeight(0.02)
slider_repres.SetLabelHeight(0.02)
background_color = self.config.get('background_color', [.0,.0,.0])
reverse_background_color =numpy.ones(3) - background_color
if (numpy.linalg.norm(background_color-reverse_background_color) < 0.2):
reverse_background_color = numpy.ones(3)
slider_repres.GetSliderProperty().SetColor(*reverse_background_color)
slider_repres.GetTitleProperty().SetColor(*reverse_background_color);
slider_repres.GetLabelProperty().SetColor(*reverse_background_color);
slider_repres.GetTubeProperty().SetColor(*reverse_background_color);
slider_repres.GetCapProperty().SetColor(*reverse_background_color);
slider_widget = vtk.vtkSliderWidget()
self.slider_widget = slider_widget
slider_widget.SetInteractor(self.interactor_renderer)
slider_widget.SetRepresentation(slider_repres)
slider_widget.KeyPressActivationOff()
slider_widget.SetAnimationModeToAnimate()
slider_widget.SetEnabled(True)
self.input_observer = InputObserver(self, times, slider_repres)
slider_widget.AddObserver("InteractionEvent", self.input_observer.time)
else:
self.input_observer = InputObserver(self)
self.interactor_renderer.AddObserver('KeyPressEvent', self.input_observer.key)
self.interactor_renderer.AddObserver(
'TimerEvent', self.input_observer.recorder_observer)
if self.io.contact_forces_data().shape[0] > 0:
self.slwsc, self.slrepsc = self.make_slider(
'CF scale',
self.make_scale_observer([self.cone_glyph, self.cylinder_glyph,
self.sphere_glypha, self.sphere_glyphb,
self.arrow_glyph]),
self.interactor_renderer,
self.opts.cf_scale_factor, self.opts.cf_scale_factor -
self.opts.cf_scale_factor / 2,
self.opts.cf_scale_factor + self.opts.cf_scale_factor / 2,
0.03, 0.03, 0.03, 0.7)
if len(times) > 0:
self.xslwsc, self.xslrepsc = self.make_slider(
'Time scale',
self.make_time_scale_observer(slider_repres,
self.input_observer),
self.interactor_renderer,
self.opts.time_scale_factor, self.opts.time_scale_factor -
self.opts.time_scale_factor / 2,
self.opts.time_scale_factor + self.opts.time_scale_factor / 2,
0.1, 0.9, 0.3, 0.9)
def setup_axes(self):
# display coordinates axes
self.axes = vtk.vtkAxesActor()
self.axes.SetTotalLength(1.0, 1.0, 1.0)
self.widget = vtk.vtkOrientationMarkerWidget()
# self.widget.SetOutlineColor( 0.9300, 0.5700, 0.1300 )
self.widget.SetOrientationMarker(self.axes)
self.widget.SetInteractor(self.interactor_renderer)
# self.widget.SetViewport( 0.0, 0.0, 40.0, 40.0 );
self.widget.SetEnabled(True)
self.widget.InteractiveOn()
# this should be extracted from the VView class
def export(self):
times = self.io_reader._times[
self.opts.start_step:self.opts.end_step:self.opts.stride]
ntime = len(times)
if self.opts.gen_para_script:
# just the generation of a parallel command
options_str = ''
if self.opts.ascii_mode:
options_str += '--ascii'
if self.opts.global_filter:
options_str += '--global-filter'
ntimes_proc = int(ntime / self.opts.nprocs)
s = ''
for i in range(self.opts.nprocs):
s += '{0}/{1} '.format(ntimes_proc*i, ntimes_proc*(i+1))
print('#!/bin/sh')
print('parallel --verbose', sys.argv[0], self.opts.io_filename,
options_str, '--start-step={//} --end-step={/} :::', s)
else:
# export
big_data_writer = vtk.vtkXMLMultiBlockDataWriter()
add_compatiblity_methods(big_data_writer)
big_data_writer.SetInputConnection(self.big_data_collector.GetOutputPort())
if self.opts.ascii_mode:
big_data_writer.SetDataModeToAscii()
k = self.opts.start_step
packet = int(ntime/100)+1
# independant of time
spos_data = self.io_reader._spos_data
if spos_data.size > 0:
self.set_position_v(spos_data[:, 1], spos_data[:, 2],
spos_data[:, 3],
spos_data[:, 4], spos_data[:, 5],
spos_data[:, 6],
spos_data[:, 7], spos_data[:, 8])
for time in times:
k = k + self.opts.stride
if (k % packet == 0):
sys.stdout.write('.')
self.io_reader.SetTime(time)
pos_data = self.io_reader.pos_data
velo_data = self.io_reader.velo_data
self.set_position_v(
pos_data[:, 1], pos_data[:, 2], pos_data[:, 3],
pos_data[:, 4], pos_data[:, 5], pos_data[:, 6],
pos_data[:, 7], pos_data[:, 8])
self.set_velocity_v(
velo_data[:, 1],
velo_data[:, 2],
velo_data[:, 3],
velo_data[:, 4],
velo_data[:, 5],
velo_data[:, 6],
velo_data[:, 7])
self.set_translation_v(
pos_data[:, 1],
pos_data[:, 2],
pos_data[:, 3],
pos_data[:, 4],
)
self.set_instance_v(pos_data[:, 1])
big_data_writer.SetFileName('{0}-{1}.{2}'.format(
os.path.splitext(os.path.basename(self.opts.io_filename))[0],
k, big_data_writer.GetDefaultFileExtension()))
if self.opts.global_filter:
self.big_data_geometry_filter.Update()
else:
self.big_data_collector.Update()
big_data_writer.Write()
big_data_writer.Write()
def export_raw_data(self):
times = self.io_reader._times[
self.opts.start_step:self.opts.end_step:self.opts.stride]
ntime = len(times)
export_2d = False
if self.io.dimension() ==2 :
export_2d=True
print('We export raw data for 2D object')
# export
k = self.opts.start_step
packet = int(ntime/100)+1
# ######## position output ########
# nvalue = ndyna*7+1
# position_output = numpy.empty((ntime,nvalue))
# #print('position_output shape', numpy.shape(position_output))
# position_output[:,0] = times[:]
position_output = {}
velocity_output = {}
velocity_absolute_output = {}
for time in times:
k = k + self.opts.stride
if (k % packet == 0):
sys.stdout.write('.')
self.io_reader.SetTime(time)
pos_data = self.io_reader.pos_data
velo_data = self.io_reader.velo_data
ndyna=pos_data.shape[0]
for i in range(ndyna):
bdy_id = int(pos_data[i,1])
######## position output ########
if self.opts._export_position :
nvalue=pos_data.shape[1]
position_output_bdy = position_output.get(bdy_id)
if position_output_bdy is None:
position_output[bdy_id] = []
position_output_body = position_output[bdy_id]
position_output_body.append([])
position_output_body[-1].append(time)
if export_2d:
data_2d = [pos_data[i,2],pos_data[i,3],numpy.acos(pos_data[i,5]/2.0)]
position_output_body[-1].extend(data_2d)
#position_output_body[-1].extend(pos_data[i,2:nvalue])
else:
position_output_body[-1].extend(pos_data[i,2:nvalue])
position_output_body[-1].append(bdy_id)
######## velocity output ########
if self.opts._export_velocity :
nvalue=velo_data.shape[1]
velocity_output_bdy = velocity_output.get(bdy_id)
if velocity_output_bdy is None:
velocity_output[bdy_id] = []
velocity_output_body = velocity_output[bdy_id]
velocity_output_body.append([])
velocity_output_body[-1].append(time)
velocity_output_body[-1].extend(velo_data[i,2:nvalue])
velocity_output_body[-1].append(bdy_id)
######## velocity in absolute frame output ########
if self.opts._export_velocity_in_absolute_frame :
nvalue=velo_data.shape[1]
[q1,q2,q3,q4] = pos_data[i,5:9]
q = Quaternion((q1, q2, q3, q4))
velo = q.rotate(velo_data[i,5:8])
velocity_absolute_output_bdy = velocity_absolute_output.get(bdy_id)
if velocity_absolute_output_bdy is None:
velocity_absolute_output[bdy_id] = []
velocity_absolute_output_body = velocity_absolute_output[bdy_id]
velocity_absolute_output_body.append([])
velocity_absolute_output_body[-1].append(time)
velocity_absolute_output_body[-1].extend(velo_data[i,2:5])
velocity_absolute_output_body[-1].extend(velo[:])
velocity_absolute_output_body[-1].append(bdy_id)
for bdy_id in position_output.keys():
output = numpy.array(position_output[bdy_id])
filename_output = '{0}-position-body_{1}.dat'.format(
os.path.splitext(os.path.basename(self.opts.io_filename))[0],
bdy_id)
numpy.savetxt(filename_output, output)
for bdy_id in velocity_output.keys():
output = numpy.array(velocity_output[bdy_id])
filename_output = '{0}-velocity-body_{1}.dat'.format(
os.path.splitext(os.path.basename(self.opts.io_filename))[0],
bdy_id)
numpy.savetxt(filename_output, output)
for bdy_id in velocity_absolute_output.keys():
output = numpy.array(velocity_absolute_output[bdy_id])
filename_output = '{0}-velocity-absolute-body_{1}.dat'.format(
os.path.splitext(os.path.basename(self.opts.io_filename))[0],
bdy_id)
numpy.savetxt(filename_output, output)
cf_output = {}
for time in times:
#print('time', time)
k = k + self.opts.stride
if (k % packet == 0):
sys.stdout.write('.')
self.io_reader.SetTime(time)
cf_data = self.io_reader.cf_data
#print('cf_data', cf_data)
if cf_data is not None and self.opts._export_cf :
ncontact=cf_data.shape[0]
for i in range(ncontact):
contact_id = int(cf_data[i,23])
#print('contact_id', contact_id)
######## contact output ########
nvalue=cf_data.shape[1]
cf_output_contact = cf_output.get(contact_id)
if cf_output_contact is None:
cf_output[contact_id] = []
cf_output_contact = cf_output[contact_id]
cf_output_contact.append([])
cf_output_contact[-1].append(time)
cf_output_contact[-1].extend(cf_data[i,2:nvalue])
cf_output_contact[-1].append(contact_id)
for contact_id in cf_output.keys():
output = numpy.array(cf_output[contact_id])
filename_output = '{0}-cf-contact_{1}.dat'.format(
os.path.splitext(os.path.basename(self.opts.io_filename))[0],
contact_id)
numpy.savetxt(filename_output, output)
sys.stdout.write('\n')
def initialize_vtk(self):
if not self.opts.gen_para_script:
self.objects_collector = vtk.vtkMultiBlockDataGroupFilter()
add_compatiblity_methods(self.objects_collector)
self.cf_collector = vtk.vtkMultiBlockDataGroupFilter()
add_compatiblity_methods(self.cf_collector)
self.big_data_collector = vtk.vtkMultiBlockDataGroupFilter()
add_compatiblity_methods(self.big_data_collector)
self.big_data_collector.AddInputConnection(self.cf_collector.GetOutputPort())
if self.opts.global_filter:
self.big_data_geometry_filter = vtk.vtkCompositeDataGeometryFilter()
add_compatiblity_methods(self.big_data_geometry_filter)
self.big_data_geometry_filter.SetInputConnection(self.objects_collector.GetOutputPort())
self.big_data_collector.AddInputConnection(self.big_data_geometry_filter.GetOutputPort())
else:
self.big_data_collector.AddInputConnection(self.objects_collector.GetOutputPort())
if self.opts.global_filter and not self.opts.export:
self.big_data_mapper = vtk.vtkCompositePolyDataMapper()
add_compatiblity_methods(self.big_data_mapper)
self.big_data_mapper.SetInputConnection(self.big_data_collector.GetOutputPort())
if not self.opts.imr:
self.big_data_mapper.ImmediateModeRenderingOff()
self.big_actor = vtk.vtkActor()
self.big_actor.SetMapper(self.big_data_mapper)
times = self.io_reader._times
if (len(times) == 0):
print('No dynamic data found! Empty simulation.')
self.readers = dict()
self.datasets = dict()
self.mappers = dict()
self.mappers_edges = dict()
self.dynamic_actors = dict()
self.static_actors = dict()
self.vtk_reader = {'vtp': vtk.vtkXMLPolyDataReader,
'stl': vtk.vtkSTLReader}
self.unfrozen_mappers = dict()
self.unfrozen_mappers_edges = dict()
self.build_set_functions()
self.renderer = vtk.vtkRenderer()
self.renderer_window = vtk.vtkRenderWindow()
self.interactor_renderer = vtk.vtkRenderWindowInteractor()
self.init_shapes()
self.init_instances()
if self.opts.cf_disable:
self.io_reader.ContactForcesOff()
self.setup_initial_position()
else:
self.io_reader.ContactForcesOn()
for mu in self.io_reader._mu_coefs:
self.init_contact_pos(mu)
self.setup_initial_position()
transform = vtk.vtkTransform()
transform.Translate(-0.5, 0., 0.)
for mu in self.io_reader._mu_coefs:
self.init_cf_sources(mu, transform)
if not self.opts.export:
if not self.opts.cf_disable and not self.opts.global_filter:
for mu in self.io_reader._mu_coefs:
self.renderer.AddActor(self.gactor[mu])
self.renderer.AddActor(self.cactor[mu])
self.renderer.AddActor(self.clactor[mu])
self.renderer.AddActor(self.sactora[mu])
self.renderer.AddActor(self.sactorb[mu])
if self.opts.global_filter:
self.renderer.AddActor(self.big_actor)
self.renderer.ResetCamera()
def initialize_gui(self):
self.setup_vtk_renderer()
self.setup_sliders(self.io_reader._times)
if self.opts.with_charts:
self.setup_charts()
self.setup_axes()
self.gui_initialized = True
def run(self):
self.initialize_vtk()
self.initialize_gui()
self.interactor_renderer.Start()
##
## Program starts
##
if __name__=='__main__':
## Persistent configuration
config = VViewConfig()
# Load it immediately
config.load_configuration()
# Parse command-line
opts = VViewOptions()
opts.parse()
# Heavier imports after command line parsing
from vtk.util import numpy_support
from math import pi
import bisect
from numpy.linalg import norm
import numpy
import random
from siconos.io.mechanics_hdf5 import MechanicsHdf5
from siconos.io.mechanics_hdf5 import tmpfile as io_tmpfile
from siconos.io.mechanics_hdf5 import occ_topo_list, occ_load_file,\
topods_shape_reader, brep_reader
nan = numpy.nan
if __name__=='__main__':
## Options and config already loaded above
with MechanicsHdf5(io_filename=opts.io_filename, mode='r') as io:
vview = VView(io, opts, config)
vview.run()
# Update configuration and save it
config['window_size'] = vview.renderer_window.GetSize()
config.save_configuration(force=False)
| apache-2.0 | -5,573,428,976,966,245,000 | 39.104532 | 158 | 0.544502 | false |
matus-stehlik/glowing-batman | roots/wsgi.py | 1 | 1132 | """
WSGI config for roots project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "roots.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| mit | 8,697,966,878,818,311,000 | 39.428571 | 79 | 0.79947 | false |
hariseldon78/Teacup_gen3_customized | createTemperatureLookup.py | 1 | 6248 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Creates a C code lookup table for doing ADC to temperature conversion
# on a microcontroller
# based on: http://hydraraptor.blogspot.com/2007/10/measuring-temperature-easy-way.html
# Modified Thu 10 Feb 2011 02:02:28 PM MST jgilmore for 5D_on_arduino firmware
# temps are now in 14.2 fixed point notation (i.e. measured in quarter-degrees)
# temps are not permitted to be negative (BUG:may result in numtemps fewer than requested)
# bugfix: --num-temps command line option works.
"""Thermistor Value Lookup Table Generator
Generates lookup to temperature values for use in a microcontroller in C format based on:
http://hydraraptor.blogspot.com/2007/10/measuring-temperature-easy-way.html
The main use is for Arduino programs that read data from the circuit board described here:
http://make.rrrf.org/ts-1.0
Usage: python createTemperatureLookup.py [options]
Options:
-h, --help show this help
--r0=... thermistor rating where # is the ohm rating of the thermistor at t0 (eg: 10K = 10000)
--t0=... thermistor temp rating where # is the temperature in Celsuis to get r0 (from your datasheet)
--beta=... thermistor beta rating. see http://reprap.org/bin/view/Main/MeasuringThermistorBeta
--r1=... R1 rating where # is the ohm rating of R1 (eg: 10K = 10000)
--r2=... R2 rating where # is the ohm rating of R2 (eg: 10K = 10000)
--num-temps=... the number of temperature points to calculate (default: 20)
--max-adc=... the max ADC reading to use. if you use R1, it limits the top value for the thermistor circuit, and thus the possible range of ADC values
It is suggested to generate more values than you need, and delete some of the ones in the ranges
that aren't interesting. This will improve accuracy in the temperature ranges that are important to you.
"""
from math import *
import sys
import getopt
class Thermistor:
"Class to do the thermistor maths"
def __init__(self, r0, t0, beta, r1, r2):
self.r0 = r0 # stated resistance, e.g. 10K
self.t0 = t0 + 273.15 # temperature at stated resistance, e.g. 25C
self.beta = beta # stated beta, e.g. 3500
self.vadc = 5.0 # ADC reference
self.vcc = 5.0 # supply voltage to potential divider
self.k = r0 * exp(-beta / self.t0) # constant part of calculation
if r1 > 0:
self.vs = r1 * self.vcc / (r1 + r2) # effective bias voltage
self.rs = r1 * r2 / (r1 + r2) # effective bias impedance
else:
self.vs = self.vcc # effective bias voltage
self.rs = r2 # effective bias impedance
def temp(self,adc):
"Convert ADC reading into a temperature in Celcius"
v = adc * self.vadc / 1024 # convert the 10 bit ADC value to a voltage
r = self.rs * v / (self.vs - v) # resistance of thermistor
return (self.beta / log(r / self.k)) - 273.15 # temperature
def setting(self, t):
"Convert a temperature into a ADC value"
r = self.r0 * exp(self.beta * (1 / (t + 273.15) - 1 / self.t0)) # resistance of the thermistor
v = self.vs * r / (self.rs + r) # the voltage at the potential divider
return round(v / self.vadc * 1024) # the ADC reading
def main(argv):
r0 = 10000;
t0 = 25;
beta = 3947;
r1 = 680;
r2 = 1600;
num_temps = int(20);
max_adc = int(1023);
try:
opts, args = getopt.getopt(argv, "h", ["help", "r0=", "t0=", "beta=", "r1=", "r2=", "max-adc=", "num-temps="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt == "--r0":
r0 = int(arg)
elif opt == "--t0":
t0 = int(arg)
elif opt == "--beta":
beta = int(arg)
elif opt == "--r1":
r1 = int(arg)
elif opt == "--r2":
r2 = int(arg)
elif opt == "--max-adc":
max_adc = int(arg)
elif opt == "--num-temps":
num_temps = int(arg)
increment = int(max_adc/(num_temps-1));
t = Thermistor(r0, t0, beta, r1, r2)
adcs = range(1, max_adc, increment);
# adcs = [1, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 90, 100, 110, 130, 150, 190, 220, 250, 300]
first = 1
#Chop of negative temperatures (as we're using a unsigned 16-bit value for temp)
for i in range(0,len(adcs)):
if int(t.temp(adcs[i])*4) < 0:
adcs=adcs[0:i+1]
#Replace this with the ADC reading for 0C
adcs[i]=int(t.setting(0))
#If the closes ADC reading to 0C is negative, convert to next highest ADC reading
if int(t.temp(adcs[i])*4)<0:
adcs[i] -=1
break
print "// Thermistor lookup table"
print "// default thermistor lookup table"
print "// You may be able to improve the accuracy of this table in various ways."
print "// 1. Measure the actual resistance of the resistor. It's \"nominally\" 4.7K, but that's ± 5%."
print "// 2. Measure the actual beta of your thermistor:http://reprap.org/wiki/MeasuringThermistorBeta"
print "// 3. Generate more table entries than you need, then trim down the ones in uninteresting ranges."
print "// In either case you'll have to regenerate this table, which requires python, which is difficult to install on windows."
print "// Since you'll have to do some testing to determine the correct temperature for your application anyway, you"
print "// may decide that the effort isn't worth it. Who cares if it's reporting the \"right\" temperature as long as it's"
print "// keeping the temperature steady enough to print, right?"
print "// ./createTemperatureLookup.py --r0=%s --t0=%s --r1=%s --r2=%s --beta=%s --max-adc=%s" % (r0, t0, r1, r2, beta, max_adc)
print "// r0: %s" % (r0)
print "// t0: %s" % (t0)
print "// r1: %s" % (r1)
print "// r2: %s" % (r2)
print "// beta: %s" % (beta)
print "// max adc: %s" % (max_adc)
print "#define NUMTEMPS %s" % (len(adcs))
print "// {ADC, temp*4 }, // temp"
print "uint16_t temptable[NUMTEMPS][2] PROGMEM = {"
counter = 0
for adc in adcs:
counter = counter +1
if counter == len(adcs):
print " {%s, %s} // %s C" % (adc, int(t.temp(adc)*4), t.temp(adc))
else:
print " {%s, %s}, // %s C" % (adc, int(t.temp(adc)*4), t.temp(adc))
print "};"
def usage():
print __doc__
if __name__ == "__main__":
main(sys.argv[1:])
| gpl-2.0 | -2,905,846,211,853,608,000 | 39.044872 | 154 | 0.638547 | false |
jtraver/dev | python/graphics/circles8.py | 1 | 24535 | #!/usr/bin/python
# http://mcsp.wartburg.edu/zelle/python/graphics.py
# https://mcsp.wartburg.edu/zelle/python/graphics/graphics/index.html
import math
from graphics import *
XSCALE = 2550
YSCALE = 1310
XCENTER = XSCALE / 2
YCENTER = YSCALE / 2
# https://en.wikipedia.org/wiki/Incircle_and_excircles_of_a_triangle#Trilinear_coordinates
# {\displaystyle \left({\frac {ax_{a}+bx_{b}+cx_{c}}{a+b+c}},{\frac {ay_{a}+by_{b}+cy_{c}}{a+b+c}}\right)={\frac {a\left(x_{a},y_{a}\right)+b\left(x_{b},y_{b}\right)+c\left(x_{c},y_{c}\right)}{a+b+c}}.}
# {ax_{a}+bx_{b}+cx_{c}}{a+b+c}},{{ay_{a}+by_{b}+cy_{c}}{a+b+c}}
def circles5(win, scale):
white1 = color_rgb(255, 255, 255)
black1 = color_rgb(0, 0, 0)
# win.setBackground("black")
win.setBackground(black1)
red1 = color_rgb(255, 0, 0)
green1 = color_rgb(0, 255, 0)
blue1 = color_rgb(0, 0, 255)
print "red1 = %s" % str(red1)
print "green1 = %s" % str(green1)
print "blue1 = %s" % str(blue1)
rb_magenta1 = color_rgb(255, 0, 255)
gb_cyan1 = color_rgb(0, 255, 255)
rg_yellow1 = color_rgb(255, 255, 0)
rm_rose1 = color_rgb(255, 0, 127)
bm_violet1 = color_rgb(127, 0, 255)
bc_azure1 = color_rgb(0, 127, 255)
gc_green1 = color_rgb(0, 255, 127)
gy_chart1 = color_rgb(127, 255, 0)
ry_orange1 = color_rgb(255, 127, 0)
# red magenta blue cyan green yellow
# rose violet azure spring-green chartreuse orange
radius1 = 10 * scale
diameter1 = radius1 * 2
npoints = 6
inc1 = (math.pi * 2) / npoints
theta1 = 0
xs = []
ys = []
xa = XCENTER * diameter1
ya = YCENTER * diameter1
for i1 in range(npoints):
x1 = (math.sin(theta1) * diameter1) + XCENTER
xs.append(x1)
y1 = (math.cos(theta1) * diameter1) + YCENTER
ys.append(y1)
theta1 += inc1
# draw the "bottom" layer first
## rm_rose1 = color_rgb(255, 0, 127)
# bm_violet1 = color_rgb(127, 0, 255)
## bc_azure1 = color_rgb(0, 127, 255)
# gc_green1 = color_rgb(0, 255, 127)
## gy_chart1 = color_rgb(127, 255, 0)
# ry_orange1 = color_rgb(255, 127, 0)
# # red magenta blue cyan green yellow
# # rose violet azure spring-green chartreuse orange
xb4 = xs[5] * diameter1
yb4 = ys[5] * diameter1
xc4 = xs[0] * diameter1
yc4 = ys[0] * diameter1
x4 = (xa + xb4 + xc4) / (3 * diameter1)
y4 = (ya + yb4 + yc4) / (3 * diameter1)
c4 = Circle(Point(x4, y4), 10 * scale)
# c4.setOutline(bm_violet1)
# c4.setOutline(gc_green1)
c4.setOutline(ry_orange1)
c4.setFill(ry_orange1)
c4.setWidth(4)
c4.draw(win)
xb5 = xs[1] * diameter1
yb5 = ys[1] * diameter1
xc5 = xs[2] * diameter1
yc5 = ys[2] * diameter1
x5 = (xa + xb5 + xc5) / (3 * diameter1)
y5 = (ya + yb5 + yc5) / (3 * diameter1)
c5 = Circle(Point(x5, y5), 10 * scale)
c5.setOutline(bm_violet1)
c5.setFill(bm_violet1)
c5.setWidth(4)
c5.draw(win)
xb6 = xs[3] * diameter1
yb6 = ys[3] * diameter1
xc6 = xs[4] * diameter1
yc6 = ys[4] * diameter1
x6 = (xa + xb6 + xc6) / (3 * diameter1)
y6 = (ya + yb6 + yc6) / (3 * diameter1)
c6 = Circle(Point(x6, y6), 10 * scale)
c6.setOutline(gc_green1)
c6.setFill(gc_green1)
c6.setWidth(4)
c6.draw(win)
# https://en.wikipedia.org/wiki/Color_wheel
# https://en.wikipedia.org/wiki/File:Color_star-en_(tertiary_names).svg
# red purple blue green yellow orange
# magenta, violet, teal, chartreuse, amber, vermilion
# c0.setOutline("red") #FF0000
# c0.setOutline("purple") #A020F0
# c0.setOutline("blue") #0000FF
# c0.setOutline("green") #00FF00
# c0.setOutline("yellow") #FFFF00
# c0.setOutline("orange") #FFA500
# c0.setOutline("magenta") #FF00FF
# c0.setOutline("violet")
# # c0.setOutline("teal") # unknown #008080 https://en.wikipedia.org/wiki/X11_color_names
# c0.setOutline("chartreuse")
# # c0.setOutline("amber") # unknown
# # c0.setOutline("vermilion") # unknown
# https://en.wikipedia.org/wiki/File:RBG_color_wheel.svg
# red magenta blue cyan green yellow
# rose violet azure spring-green chartreuse orange
# c0.setOutline("red") #FF0000
# c0.setOutline("magenta") #FF00FF
# c0.setOutline("blue") #0000FF
# c0.setOutline("cyan") #00FFFF
# c0.setOutline("green") #00FF00
# c0.setOutline("yellow") #FFFF00
# # c0.setOutline("rose") # unknown
# c0.setOutline("pink") #FFC0CB
# c0.setOutline("violet") #EE82EE
# c0.setOutline("azure") #F0FFFF
# c0.setOutline("spring green") #00FF7F
# c0.setOutline("chartreuse") #7FFF00
# c0.setOutline("orange") #FFA500
radius1 = 10 * scale
diameter1 = radius1 * 2
npoints = 6
inc1 = (math.pi * 2) / npoints
theta1 = 0
xs = []
ys = []
# color1 = ["red", "magenta", "blue", "cyan", "green", "yellow"]
color1 = [red1, rb_magenta1, blue1, gb_cyan1, green1, rg_yellow1]
for i1 in range(npoints):
x1 = (math.sin(theta1) * diameter1) + XCENTER
xs.append(x1)
y1 = (math.cos(theta1) * diameter1) + YCENTER
ys.append(y1)
c1 = Circle(Point(x1, y1), 10 * scale)
c1.setOutline(color1[i1])
c1.setFill(color1[i1])
c1.setWidth(4)
c1.draw(win)
theta1 += inc1
c0 = Circle(Point(XCENTER,YCENTER), 10 * scale)
c0.setWidth(4)
# c0.setOutline("white")
c0.setOutline(white1)
c0.setFill(white1)
# c0.setWidth(10)
# c0.setOutline(rm_rose1)
# c0.setOutline(bm_violet1)
# c0.setOutline(bc_azure1)
# c0.setOutline(gc_green1)
# c0.setOutline(gy_chart1)
# c0.setOutline(ry_orange1)
c0.draw(win)
xa = XCENTER * diameter1
ya = YCENTER * diameter1
xb1 = xs[0] * diameter1
yb1 = ys[0] * diameter1
xc1 = xs[1] * diameter1
yc1 = ys[1] * diameter1
x1 = (xa + xb1 + xc1) / (3 * diameter1)
y1 = (ya + yb1 + yc1) / (3 * diameter1)
c1 = Circle(Point(x1, y1), 10 * scale)
# c1.setOutline("pink")
c1.setOutline(rm_rose1)
c1.setFill(rm_rose1)
c1.setWidth(4)
c1.draw(win)
xb2 = xs[2] * diameter1
yb2 = ys[2] * diameter1
xc2 = xs[3] * diameter1
yc2 = ys[3] * diameter1
x2 = (xa + xb2 + xc2) / (3 * diameter1)
y2 = (ya + yb2 + yc2) / (3 * diameter1)
c2 = Circle(Point(x2, y2), 10 * scale)
# c2.setOutline("azure")
c2.setOutline(bc_azure1)
c2.setFill(bc_azure1)
# c2.setWidth(10)
c2.setWidth(4)
c2.draw(win)
# red magenta blue cyan green yellow
# rose violet azure spring-green chartreuse orange
xb3 = xs[4] * diameter1
yb3 = ys[4] * diameter1
xc3 = xs[5] * diameter1
yc3 = ys[5] * diameter1
x3 = (xa + xb3 + xc3) / (3 * diameter1)
y3 = (ya + yb3 + yc3) / (3 * diameter1)
c3 = Circle(Point(x3, y3), 10 * scale)
# c3.setOutline(gc_green1)
c3.setOutline(gy_chart1)
c3.setFill(gy_chart1)
c3.setWidth(4)
c3.draw(win)
def circles4(win, scale):
c0 = Circle(Point(XCENTER,YCENTER), 10 * scale)
c0.draw(win)
radius1 = 10 * scale
diameter1 = radius1 * 2
npoints = 6
inc1 = (math.pi * 2) / npoints
theta1 = 0
xs = []
ys = []
for i1 in range(npoints):
x1 = (math.sin(theta1) * diameter1) + XCENTER
xs.append(x1)
y1 = (math.cos(theta1) * diameter1) + YCENTER
ys.append(y1)
c1 = Circle(Point(x1, y1), 10 * scale)
c1.draw(win)
theta1 += inc1
xa = XCENTER * diameter1
ya = YCENTER * diameter1
xb1 = xs[0] * diameter1
yb1 = ys[0] * diameter1
xc1 = xs[1] * diameter1
yc1 = ys[1] * diameter1
x1 = (xa + xb1 + xc1) / (3 * diameter1)
y1 = (ya + yb1 + yc1) / (3 * diameter1)
c1 = Circle(Point(x1, y1), 10 * scale)
c1.draw(win)
xb2 = xs[2] * diameter1
yb2 = ys[2] * diameter1
xc2 = xs[3] * diameter1
yc2 = ys[3] * diameter1
x2 = (xa + xb2 + xc2) / (3 * diameter1)
y2 = (ya + yb2 + yc2) / (3 * diameter1)
c2 = Circle(Point(x2, y2), 10 * scale)
c2.draw(win)
xb3 = xs[4] * diameter1
yb3 = ys[4] * diameter1
xc3 = xs[5] * diameter1
yc3 = ys[5] * diameter1
x3 = (xa + xb3 + xc3) / (3 * diameter1)
y3 = (ya + yb3 + yc3) / (3 * diameter1)
c3 = Circle(Point(x3, y3), 10 * scale)
c3.draw(win)
xb4 = xs[5] * diameter1
yb4 = ys[5] * diameter1
xc4 = xs[0] * diameter1
yc4 = ys[0] * diameter1
x4 = (xa + xb4 + xc4) / (3 * diameter1)
y4 = (ya + yb4 + yc4) / (3 * diameter1)
c4 = Circle(Point(x4, y4), 10 * scale)
c4.draw(win)
xb5 = xs[1] * diameter1
yb5 = ys[1] * diameter1
xc5 = xs[2] * diameter1
yc5 = ys[2] * diameter1
x5 = (xa + xb5 + xc5) / (3 * diameter1)
y5 = (ya + yb5 + yc5) / (3 * diameter1)
c5 = Circle(Point(x5, y5), 10 * scale)
c5.draw(win)
xb6 = xs[3] * diameter1
yb6 = ys[3] * diameter1
xc6 = xs[4] * diameter1
yc6 = ys[4] * diameter1
x6 = (xa + xb6 + xc6) / (3 * diameter1)
y6 = (ya + yb6 + yc6) / (3 * diameter1)
c6 = Circle(Point(x6, y6), 10 * scale)
c6.draw(win)
def circles3(win, scale):
c0 = Circle(Point(XCENTER,YCENTER), 10 * scale)
c0.draw(win)
radius1 = 10 * scale
diameter1 = radius1 * 2
npoints = 6
inc1 = (math.pi * 2) / npoints
theta1 = 0
xs = []
ys = []
for i1 in range(npoints):
x1 = (math.sin(theta1) * diameter1) + XCENTER
xs.append(x1)
y1 = (math.cos(theta1) * diameter1) + YCENTER
ys.append(y1)
c1 = Circle(Point(x1, y1), 10 * scale)
c1.draw(win)
theta1 += inc1
xa = XCENTER * diameter1
ya = YCENTER * diameter1
xb1 = xs[0] * diameter1
yb1 = ys[0] * diameter1
xc1 = xs[1] * diameter1
yc1 = ys[1] * diameter1
x1 = (xa + xb1 + xc1) / (3 * diameter1)
y1 = (ya + yb1 + yc1) / (3 * diameter1)
c1 = Circle(Point(x1, y1), 10 * scale)
c1.draw(win)
xb2 = xs[2] * diameter1
yb2 = ys[2] * diameter1
xc2 = xs[3] * diameter1
yc2 = ys[3] * diameter1
x2 = (xa + xb2 + xc2) / (3 * diameter1)
y2 = (ya + yb2 + yc2) / (3 * diameter1)
c2 = Circle(Point(x2, y2), 10 * scale)
c2.draw(win)
xb3 = xs[4] * diameter1
yb3 = ys[4] * diameter1
xc3 = xs[5] * diameter1
yc3 = ys[5] * diameter1
x3 = (xa + xb3 + xc3) / (3 * diameter1)
y3 = (ya + yb3 + yc3) / (3 * diameter1)
c3 = Circle(Point(x3, y3), 10 * scale)
c3.draw(win)
def circles2(win, scale):
c0 = Circle(Point(XCENTER,YCENTER), 10 * scale)
c0.draw(win)
radius1 = 10 * scale
diameter1 = radius1 * 2
# c1 = Circle(Point(XCENTER + diameter1,YCENTER), 10 * scale)
# c1.draw(win)
# c2 is at 60 degrees, same diameter
npoints = 6
inc1 = (math.pi * 2) / npoints
# inc1 = (math.pi) / npoints
theta1 = 0
# x2 = (math.sin(theta1) * diameter1) + XCENTER
# y2 = (math.cos(theta1) * diameter1) + YCENTER
# c2 = Circle(Point(x2, y2), 10 * scale)
# c2.draw(win)
# theta1 += inc1
# x3 = (math.sin(theta1) * diameter1) + XCENTER
# y3 = (math.cos(theta1) * diameter1) + YCENTER
# c3 = Circle(Point(x3, y3), 10 * scale)
# c3.draw(win)
for i1 in range(npoints):
x1 = (math.sin(theta1) * diameter1) + XCENTER
y1 = (math.cos(theta1) * diameter1) + YCENTER
c1 = Circle(Point(x1, y1), 10 * scale)
c1.draw(win)
theta1 += inc1
#for i1 in range(npoints):
# x1 = (math.sin(theta1) * radius) + xoffset
# y1 = (math.cos(theta1) * radius) + yoffset
# hex1(win, x1, y1, scale)
# theta1 += inc1
def circles1(win, xoffset, yoffset, scale = 1.0):
sxoffset = xoffset * scale + XCENTER
syoffset = yoffset * scale + YCENTER
#p = Polygon(
# Point(-4 * scale + sxoffset, -7 * scale + syoffset),
# Point( 4 * scale + sxoffset, -7 * scale + syoffset),
# Point( 8 * scale + sxoffset, 0 * scale + syoffset),
# Point( 4 * scale + sxoffset, 7 * scale + syoffset),
# Point(-4 * scale + sxoffset, 7 * scale + syoffset),
# Point(-8 * scale + sxoffset, 0 * scale + syoffset))
#p.draw(win)
# c = Circle(Point(50 * SCALE,50 * SCALE), 10 * SCALE)
c = Circle(Point(XCENTER,YCENTER), 10 * scale)
c.draw(win)
c1 = Circle(Point(-4 * scale + sxoffset, -7 * scale + syoffset), 10 * scale)
c1.draw(win)
c2 = Circle(Point( 4 * scale + sxoffset, -7 * scale + syoffset), 10 * scale)
c2.draw(win)
c3 = Circle(Point( 8 * scale + sxoffset, 0 * scale + syoffset), 10 * scale)
c3.draw(win)
c4 = Circle(Point( 4 * scale + sxoffset, 7 * scale + syoffset), 10 * scale)
c4.draw(win)
c5 = Circle(Point(-4 * scale + sxoffset, 7 * scale + syoffset), 10 * scale)
c5.draw(win)
c6 = Circle(Point(-8 * scale + sxoffset, 0 * scale + syoffset), 10 * scale)
c6.draw(win)
def main():
radius = 500.0
# scale = 0.5
scale = 10.0
win = GraphWin("circle1", XSCALE, YSCALE)
win.setCoords(0,0, XSCALE , YSCALE)
# one side is 8 units long
# height of vertical rectangle is 14
# bulge to either side is 4
# 1 -> 1
# layer 0
# center
# circle1(win, 0, 0, scale, radius)
# circles1(win, 0, 0, scale)
# circles2(win, scale)
# circles3(win, scale)
# circles4(win, scale)
circles5(win, scale)
# p0 = Point(XCENTER, YCENTER)
# p0.setFill("red")
# p0.setOutline("red")
# p0.draw(win)
# p1 = Point(XCENTER + 12 * scale, YCENTER + 7 * scale)
# l1 = Line(p0, p1)
# l1.setFill("red")
# l1.draw(win)
# t = Text(Point(XCENTER,YCENTER), "0")
# t.draw(win)
win.getMouse()
win.close()
# https://math.stackexchange.com/questions/260096/find-the-coordinates-of-a-point-on-a-circle
# x = rsin(theta), y = rcos(theta)
def circle1(win, xoffset, yoffset, scale = 1.0, radius = 10.0):
hex1(win, xoffset, yoffset, scale)
# theta is degrees or radians?
npoints = 10
npoints = 1
npoints = 100
inc1 = (math.pi * 2) / npoints
theta1 = 0.0
for i1 in range(npoints):
x1 = (math.sin(theta1) * radius) + xoffset
y1 = (math.cos(theta1) * radius) + yoffset
hex1(win, x1, y1, scale)
theta1 += inc1
# math = <module 'math' from '/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/lib-dynload/math.so'>
# acos acos(x) Return the arc cosine (measured in radians) of x.
# acosh acosh(x) Return the inverse hyperbolic cosine of x.
# asin asin(x) Return the arc sine (measured in radians) of x.
# asinh asinh(x) Return the inverse hyperbolic sine of x.
# atan atan(x) Return the arc tangent (measured in radians) of x.
# atan2 atan2(y, x) Return the arc tangent (measured in radians) of y/x. Unlike atan(y/x), the signs of both x and y are considered.
# atanh atanh(x) Return the inverse hyperbolic tangent of x.
# ceil ceil(x) Return the ceiling of x as a float. This is the smallest integral value >= x.
# copysign copysign(x, y) Return x with the sign of y.
# cos cos(x) Return the cosine of x (measured in radians).
# cosh cosh(x) Return the hyperbolic cosine of x.
# degrees degrees(x) Convert angle x from radians to degrees.
# erf erf(x) Error function at x.
# erfc erfc(x) Complementary error function at x.
# exp exp(x) Return e raised to the power of x.
# expm1 expm1(x) Return exp(x)-1. This function avoids the loss of precision involved in the direct evaluation of exp(x)-1 for small x.
# fabs fabs(x) Return the absolute value of the float x.
# factorial factorial(x) -> Integral Find x!. Raise a ValueError if x is negative or non-integral.
# floor floor(x) Return the floor of x as a float. This is the largest integral value <= x.
# fmod fmod(x, y) Return fmod(x, y), according to platform C. x % y may differ.
# frexp frexp(x) Return the mantissa and exponent of x, as pair (m, e). m is a float and e is an int, such that x = m * 2.**e. If x is 0, m and e are both 0. Else 0.5 <= abs(m) < 1.0.
# fsum fsum(iterable) Return an accurate floating point sum of values in the iterable. Assumes IEEE-754 floating point arithmetic.
# gamma gamma(x) Gamma function at x.
# hypot hypot(x, y) Return the Euclidean distance, sqrt(x*x + y*y).
# isinf isinf(x) -> bool Check if float x is infinite (positive or negative).
# isnan isnan(x) -> bool Check if float x is not a number (NaN).
# ldexp ldexp(x, i) Return x * (2**i).
# lgamma lgamma(x) Natural logarithm of absolute value of Gamma function at x.
# log log(x[, base]) Return the logarithm of x to the given base. If the base not specified, returns the natural logarithm (base e) of x.
# log10 log10(x) Return the base 10 logarithm of x.
# log1p log1p(x) Return the natural logarithm of 1+x (base e). The result is computed in a way which is accurate for x near zero.
# modf modf(x) Return the fractional and integer parts of x. Both results carry the sign of x and are floats.
# pow pow(x, y) Return x**y (x to the power of y).
# radians radians(x) Convert angle x from degrees to radians.
# sin sin(x) Return the sine of x (measured in radians).
# sinh sinh(x) Return the hyperbolic sine of x.
# sqrt sqrt(x) Return the square root of x.
# tan tan(x) Return the tangent of x (measured in radians).
# tanh tanh(x) Return the hyperbolic tangent of x.
# trunc trunc(x:Real) -> Integral Truncates x to the nearest Integral toward 0. Uses the __trunc__ magic method.
# math.pi = 3.14159265359
# math.e = 2.71828182846
# phi = 1.61803398875
def hex1(win, xoffset, yoffset, scale = 1.0):
sxoffset = xoffset * scale + XCENTER
syoffset = yoffset * scale + YCENTER
p = Polygon(
Point(-4 * scale + sxoffset, -7 * scale + syoffset),
Point( 4 * scale + sxoffset, -7 * scale + syoffset),
Point( 8 * scale + sxoffset, 0 * scale + syoffset),
Point( 4 * scale + sxoffset, 7 * scale + syoffset),
Point(-4 * scale + sxoffset, 7 * scale + syoffset),
Point(-8 * scale + sxoffset, 0 * scale + syoffset))
p.draw(win)
def old_main():
scale = 7.7
win = GraphWin("hex2", XSCALE, YSCALE)
win.setCoords(0,0, XSCALE , YSCALE)
# one side is 8 units long
# height of vertical rectangle is 14
# bulge to either side is 4
# 1 -> 1
# layer 0
# center
hex1(win, 0, 0, scale)
# 6 -> 7
# layer 1
# 1.1 upper right -> lastx + 12, lasty + 7
hex1(win, 12, 7, scale)
# 1.2 lower right -> lastx + 12, lasty - 7
hex1(win, 12, -7, scale)
# 1.3 bottom -> lastx , lasty - 14
hex1(win, 0, -14, scale)
# 1.4 lower left -> lastx - 12, lasty - 7
hex1(win, -12, -7, scale)
# 1.5 upper left -> lastx - 12, lasty + 7
hex1(win, -12, 7, scale)
# 1.6 top -> lastx , lasty + 14
hex1(win, 0, 14, scale)
# 12 -> 19
# layer 2
# 2.1 one o'clock
hex1(win, 12, 21, scale)
# 2.2 two o'clock
hex1(win, 24, 14, scale)
# 2.3 three o'clock
hex1(win, 24, 0, scale)
# 2.4 four o'clock
hex1(win, 24, -14, scale)
# 2.5 five o'clock
hex1(win, 12, -21, scale)
# 2.6 six o'clock
hex1(win, 0, -28, scale)
# 2.7 seven o'clock
hex1(win, -12, -21, scale)
# 2.8 eight o'clock
hex1(win, -24, -14, scale)
# 2.9 nine o'clock
hex1(win, -24, 0, scale)
# 2.10 ten o'clock
hex1(win, -24, 14, scale)
# 2.11 eleven o'clock
hex1(win, -12, 21, scale)
# 2.12 twelve o'clock
hex1(win, 0, 28, scale)
# 18 -> 37
# layer 3
# 3.1 above one o'clock
hex1(win, 12, 35, scale)
# 3.2 above two o'clock
hex1(win, 24, 28, scale)
# 3.3 shift one o'clock
hex1(win, 36, 21, scale)
# 3.4 down from 3
hex1(win, 36, 7, scale)
# 3.5 down from 4
hex1(win, 36, -7, scale)
# 3.6 down from 5
hex1(win, 36, -21, scale)
# 3.7 down from four o'clock
hex1(win, 24, -28, scale)
# 3.8 down from five o'clock
hex1(win, 12, -35, scale)
# 3.9 bottom
hex1(win, 0, -42, scale)
# 3.10 down from seven o'clock
hex1(win, -12, -35, scale)
# 3.11 down from eight o'clock
hex1(win, -24, -28, scale)
# 3.12
hex1(win, -36, -21, scale)
# 3.13 up from 12
hex1(win, -36, -7, scale)
# 3.14 up from 13
hex1(win, -36, 7, scale)
# 3.15 up from 14
hex1(win, -36, 21, scale)
# 3.16 up from ten o'clock
hex1(win, -24, 28, scale)
# 3.17 up from eleven o'clock
hex1(win, -12, 35, scale)
# 3.18 top
hex1(win, 0, 42, scale)
# 24 -> 61
# layer 4
# 4.1 above 3.1 must be 40 to 63
hex1(win, 12, 49, scale)
# 4.2 above 3.2 must be 40 to 63
hex1(win, 24, 42, scale)
# 4.3 above 3.3 must be 40 to 63
hex1(win, 36, 35, scale)
# 4.4 must be 44, 45, 46, 47, 60, 61, 62, 63
hex1(win, 48, 28, scale)
# 4.5 down from 4.4
hex1(win, 48, 14, scale)
# 4.6 down from 5
hex1(win, 48, 0, scale)
# 4.7 down from 6
hex1(win, 48, -14, scale)
# 4.8 down from 7 must be 9, 11, 25, 27, 41, 43, 57 or 59
hex1(win, 48, -28, scale)
# 4.9
hex1(win, 36, -35, scale)
# 4.10
hex1(win, 24, -42, scale)
# 4.11
hex1(win, 12, -49, scale)
# 4.12 bottom
hex1(win, 0, -56, scale)
# 4.13
hex1(win, -12, -49, scale)
# 4.14
hex1(win, -24, -42, scale)
# 4.15 must be 17, 21, 25, 29, 49, 53, 57 or 61
hex1(win, -36, -35, scale)
# 4.16
hex1(win, -48, -28, scale)
# 4.17
hex1(win, -48, -14, scale)
# 4.18
hex1(win, -48, 0, scale)
# 4.19
hex1(win, -48, 14, scale)
# 4.20
hex1(win, -48, 28, scale)
# 4.21
hex1(win, -36, 35, scale)
# 4.22
hex1(win, -24, 42, scale)
# 4.23
hex1(win, -12, 49, scale)
# 4.24 top must be 24 to 31
hex1(win, 0, 56, scale)
# 5.10 top must be 63 - 1 = 62
hex1(win, 0, 70, scale)
t = Text(Point(XCENTER,YCENTER + 70 * scale), "62")
t.draw(win)
# 5.20 lower right axis must be 63 - 16 = 47
hex1(win, 60, -35, scale)
t = Text(Point(XCENTER + 60 * scale,YCENTER - 35 * scale), "47")
t.draw(win)
# 5.30 lower left axis must be 63 - 8 = 55
hex1(win, -60, -35, scale)
t = Text(Point(XCENTER - 60 * scale,YCENTER - 35 * scale), "55")
t.draw(win)
# 30 -> 91
# layer 5
# 36 -> 127
# layer 6
# 42 -> 169 64, 128, 192, 256, 320
# layer 6
# 7 48 -> 217
# 8 54 -> 261
p0 = Point(XCENTER, YCENTER)
p0.setFill("red")
p0.setOutline("red")
p0.draw(win)
p1 = Point(XCENTER + 12 * scale, YCENTER + 7 * scale)
l1 = Line(p0, p1)
l1.setFill("red")
l1.draw(win)
t = Text(Point(XCENTER,YCENTER), "0")
t.draw(win)
win.getMouse()
win.close()
main()
#
#
# __
#/ \
#\__/
#
# ____
# / \
#/ \
#\ /
# \____/
#
# 5
# __ __
# / \
# 4 3
# / 0 \ 000000
# \ /
# 1 2
# \__ __/
# 0
#
# 5
# __ __
# / \
# 4 3
# / 1 \ 000001
# \ /
# 1 2
# \______/
# 0
#
# 5
# __ __
# / \
# 4 3
# / 2 \ 000010
# \ /
# 1 \ 2
# \__ __/
# 0
#
# 5
# __ __
# / \
# 4 3
# / 3 \ 000011
# \ /
# 1 \ 2
# \______/
# 0
#
# 5
# __ __
# / \
# 4 3
# / 4 \ 000100
# \ /
# 1 / 2
# \__ __/
# 0
#
#
# 5
# ______
# / \
# 4 / \ 3
# / 61 \ 111101
# \ /
# 1 / 2
# \______/
# 0
#
# 5
# ______
# / \
# 4 / \ 3
# / 62 \ 111110
# \ /
# 1 \ / 2
# \__ __/
# 0
#
# 5
# ______
# / \
# 4 / \ 3
# / 63 \ 111111
# \ /
# 1 \ / 2
# \______/
# 0
| mit | 1,088,411,825,872,629,100 | 30.33461 | 202 | 0.539352 | false |
ucloud/uai-sdk | uaitrain/operation/pack_docker_image/pytorch_pack_op.py | 1 | 1534 | # Copyright 2017 The UAI-SDK Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from uaitrain.operation.pack_docker_image.base_pack_op import BaseUAITrainDockerImagePackOp
class PytorchUAITrainDockerImagePackOp(BaseUAITrainDockerImagePackOp):
def __init__(self, parser):
super(PytorchUAITrainDockerImagePackOp, self).__init__(parser)
self.ai_arch = "pytorch"
def _gen_gpu_docker_cmd(self, pycmd):
gpu_docker_cmd = "sudo nvidia-docker run -it " + \
"-v " + self.test_data_path + ":" + "/data/data " + \
"-v " + self.test_output_path + ":" + "/data/output " + \
self.user_gpu_image + " " + "/bin/bash -c " + \
"\"cd /data && /usr/bin/python " + pycmd + " " + "--num_gpus=1 --work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output\""
return gpu_docker_cmd
| apache-2.0 | 1,090,517,671,928,776,800 | 50.133333 | 178 | 0.614081 | false |
Alex9029/awesomeTHz-Python-webapp | www/models.py | 1 | 1470 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Models for user, blog and comment.
'''
__author__ = 'Chalex'
import time, uuid
from transwarp.db import next_id
from transwarp.orm import Model, StringField, BooleanField, FloatField, TextField
class User(Model):
__table__ = 'users'
id = StringField(primary_key=True, default=next_id, ddl='varchar(50)')
email = StringField(updatable=False, ddl='varchar(50)')
password = StringField(ddl='varchar(50)')
admin = BooleanField()
name = StringField(ddl='varchar(50)')
image = StringField(ddl='varchar(500)')
created_at = FloatField(updatable=False, default=time.time)
class Blog(Model):
__table__ = 'blogs'
id = StringField(primary_key=True, default=next_id, ddl='varchar(50)')
user_id = StringField(updatable=False, ddl='varchar(50)')
user_name = StringField(ddl='varchar(50)')
user_image = StringField(ddl='varchar(500)')
name = StringField(ddl='varchar(50)')
summary = StringField(ddl='varchar(200)')
content = TextField()
created_at = FloatField(updatable=False, default=time.time)
class Comment(Model):
__table__ = 'comments'
id = StringField(primary_key=True, default=next_id, ddl='varchar(50)')
blog_id = StringField(updatable=False, ddl='varchar(50)')
user_id = StringField(updatable=False, ddl='varchar(50)')
user_name = StringField(ddl='varchar(50)');
user_image = StringField(ddl='varchar(500)')
content = TextField()
created_at = FloatField(updatable=False, default=time.time)
| gpl-2.0 | -2,106,225,807,252,490,500 | 29.645833 | 81 | 0.710884 | false |
eubr-bigsea/tahiti | migrations/versions/c6x2kllv52os_sklearn_operations.py | 1 | 5027 | # -*- coding: utf-8 -*-
"""Adding Scikit-learn Operations
Revision ID: c6x2kllv52os
Revises: bca9291ljsj5
Create Date: 2018-06-14 10:42:09.555626
"""
from alembic import context
from alembic import op
from sqlalchemy import String, Integer, Text
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import table, column,text
# revision identifiers, used by Alembic.
revision = 'c6x2kllv52os'
down_revision = 'bca9291ljsj5'
branch_labels = None
depends_on = None
def _insert_platform():
tb = table(
'platform',
column('id', Integer),
column('slug', String),
column('enabled', Integer),
column('icon', String), )
columns = ('id', 'slug', 'enabled', 'icon')
data = [
(4, 'scikit-learn', 1, '')
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_platform_translation():
tb = table(
'platform_translation',
column('id', Integer),
column('locale', String),
column('name', String),
column('description', String), )
columns = ('id', 'locale', 'name', 'description')
data = [
(4, 'en', 'Scikit-learn', 'Scikit-learn 0.19.1'),
(4, 'pt', 'Scikit-learn', 'Scikit-learn 0.19.1'),
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _add_operations_platform_from_spark():
tb = table(
'operation_platform',
column('operation_id', Integer),
column('platform_id', Integer))
columns = ('operation_id', 'platform_id')
data = [
(3001, 4), # data-reader
(30, 4), # data-writer
(24, 4),#'add-columns'
(12, 4),#'add-rows'
(15, 4),#'aggregation'
(21, 4),#'clean-missing'
(37, 4),#'difference'
(3014, 4),#'drop
(5, 4), #filter
(16, 4),#'join'
(6, 4), #'projection'
(23, 4),#'remove-duplicated-rows'
(27, 4), #replace-value
(28, 4),#'sample'
(13, 4),#'set-intersection'
(32, 4),#'sort'
(17, 4),#'split'
(7, 4), #'tranformation'
(25, 4),#'comment'
(3031, 4),#'read-shapefile'
(55, 4),#'within'
(41, 4), # Feature indexer
(92, 4), # Max-abs scaler
(91, 4), # Min-max scaler
(90, 4), # Standard scaler
(75, 4), # One Hot Encoder
(41, 4), # Feature Assembler
(95, 4), # PCA
(3026, 4), # Load model
(3027, 4), # Save model
(42, 4), # Apply model
(73, 4), # Regression Model
(78, 4), # Random Forest Regressor
(8, 4), # Linear Regression
(74, 4), # IsotonicRegression
(49, 4), #tokenizer
(50, 4), #remove-stop-words
(51, 4), #generate-n-grams
(52, 4), # word-to-vector
(10, 4), # clustering-model
(56, 4), # gaussian-mixture-clustering
(29, 4), # k-means-clustering
(48, 4), # lda-clustering
(1, 4), # classification-model
(4, 4), # naive-bayes-classifier
(9, 4), # svm-classification
(3005, 4), # knn-classifier
(3008, 4), # logistic-regression
(3, 4), # fp-growth
(85, 4), # association rule
(86, 4), # sequence mining
(26, 4), #publish-as-visualization
(35, 4), #table-visualization
(68, 4), #line-chart
(69, 4), #bar-chart
(70, 4), #pie-chart
(71, 4), #area-chart
(80, 4), #scatter-plot
(81, 4), #summary-statistics
(88, 4) , #map-chart
(89, 4), #donut-chart
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
all_commands = [
(_insert_platform, 'DELETE FROM platform WHERE id = 4' ),
(_insert_platform_translation,
'DELETE FROM platform_translation WHERE id = 4'),
(_add_operations_platform_from_spark,
'DELETE FROM operation_platform WHERE platform_id = 4'),
]
def upgrade():
ctx = context.get_context()
session = sessionmaker(bind=ctx.bind)()
connection = session.connection()
try:
for cmd in all_commands:
if isinstance(cmd[0], str):
connection.execute(cmd[0])
elif isinstance(cmd[0], list):
for row in cmd[0]:
connection.execute(row)
else:
cmd[0]()
except:
session.rollback()
raise
session.commit()
def downgrade():
ctx = context.get_context()
session = sessionmaker(bind=ctx.bind)()
connection = session.connection()
try:
for cmd in reversed(all_commands):
if isinstance(cmd[1], str):
connection.execute(cmd[1])
elif isinstance(cmd[1], list):
for row in cmd[1]:
connection.execute(row)
else:
cmd[1]()
except:
session.rollback()
raise
session.commit()
| apache-2.0 | -6,946,335,271,519,121,000 | 25.046632 | 64 | 0.527153 | false |
oVirt/imgbased | tests/testStorage.py | 1 | 1632 | #!/usr/bin/env python
# vim: et ts=4 sw=4 sts=4
import unittest
import logging
import glob
log = logging.info
def dd(N, count=100):
from sh import dd
dd("if=/dev/zero", "of=/var/tmp/%s.bin" % N, "bs=1M",
"count=%d" % count)
def trial(num_bins=1, size_bin=500, after_rm=None, max_delta=0.05):
from sh import imgbase, rm, ls
def img_free():
return float(imgbase("layout", "--free-space"))
imgbase = imgbase.bake("--debug")
a = img_free()
[dd(B, size_bin) for B in iter(range(0, num_bins))]
print("Files which were created")
print(ls("-shal", *glob.glob("/var/tmp/*.bin")))
b = img_free()
print("Files are getting removed")
rm("-f", *glob.glob("/var/tmp/*.bin"))
after_rm()
c = img_free()
ratio = a / c
print(a, b, c, ratio)
delta = 1 - ratio
assert delta < max_delta, \
"Delta %s is larger than %s" % (delta, max_delta)
@unittest.skip("Needs refactoring")
class TestFS(unittest.TestCase):
def test_fstrim(self, count=1, size=100):
# FIXME improve by splitting into several test cases
def after_rm():
from sh import fstrim
fstrim("-v", "/")
trial(count, size, after_rm)
def test_fstrim_many(self):
self.test_fstrim(100, 1)
def test_discard(self, count=1, size=100):
def after_rm():
from sh import sleep
print("We are testing the auto-discard " +
"functionality of the fs")
sleep("10")
trial(count, size, after_rm)
def test_discard_many(self):
self.test_discard(100, 1)
| gpl-2.0 | 6,680,292,225,265,847,000 | 22.652174 | 67 | 0.571691 | false |
shiroyuki/passerine | test/ft/db/test_uow_association_many_to_many.py | 1 | 5858 | from ft.db.dbtestcase import DbTestCase
from passerine.db.session import Session
from passerine.db.common import ProxyObject
from passerine.db.uow import Record
from passerine.db.entity import entity
from passerine.db.manager import Manager
from passerine.db.mapper import link, CascadingType, AssociationType
from passerine.db.metadata.helper import EntityMetadataHelper
@entity('members')
class Member(object):
def __init__(self, name):
self.name = name
@link(
mapped_by='members',
target=Member,
association=AssociationType.MANY_TO_MANY,
cascading=[CascadingType.DELETE, CascadingType.PERSIST]
)
@entity('groups')
class Group(object):
def __init__(self, name, members=[]):
self.name = name
self.members = members
class TestFunctional(DbTestCase):
def setUp(self):
self._setUp()
self._reset_db(self.__data_provider())
self.__reset_associations()
def test_load(self):
groups = self.session.collection(Group)
members = self.session.collection(Member)
group_a = groups.filter_one({'name': 'group a'})
member_d = members.filter_one({'name': 'member d'})
self.assertTrue(group_a.members._loaded, 'The IDs should be loaded by UOW.')
self.assertEqual(2, len(group_a.members))
self.assertTrue(group_a.members._loaded)
self.assertEqual('member a', group_a.members[0].name)
self.assertEqual('member b', group_a.members[1].name)
def test_with_new_entites(self):
groups = self.session.collection(Group)
members = self.session.collection(Member)
associations = self.__association_to_members()
group = groups.new(name='From Up On Poppy Hill')
umi = members.new(name='Umi')
shun = members.new(name='Shun')
shiro = members.new(name='Shiro')
group.members.extend([umi, shun, shiro])
groups.post(group)
self.assertEqual(4, len(groups))
self.assertEqual(7, len(members))
self.assertEqual(9, len(associations))
def test_commit(self):
groups = self.session.collection(Group)
members = self.session.collection(Member)
associations = self.__association_to_members()
group_a = groups.filter_one({'name': 'group a'})
group_b = groups.filter_one({'name': 'group b'})
group_c = groups.filter_one({'name': 'group c'})
assert group_c, 'Group C should not be null.'
member_d = members.filter_one({'name': 'member d'})
groups.delete(group_c)
group_a.members.append(member_d)
group_b.members.append(member_d)
group_a.members.pop(0)
self.session.persist(group_a, group_b)
self.session.flush()
self.assertEqual(2, len(groups))
self.assertEqual(5, len(associations))
def test_commit_with_new_element_on_explicit_persistence_and_repository(self):
groups = self.session.collection(Group)
members = self.session.collection(Member)
associations = self.__association_to_members()
group_a = groups.filter_one({'name': 'group a'})
group_b = groups.filter_one({'name': 'group b'})
group_c = groups.filter_one({'name': 'group c'})
member_d = members.filter_one({'name': 'member d'})
member_d.name = 'extra member'
member_e = members.new(name='member e')
groups.delete(group_c)
group_a.members.append(member_d)
group_a.members.pop(0)
groups.put(group_a)
group_b.members.append(member_e)
groups.put(group_b)
self.assertEqual(2, len(groups))
self.assertEqual(5, len(associations))
def test_commit_with_new_element_on_explicit_persistence_and_session(self):
groups = self.session.collection(Group)
members = self.session.collection(Member)
associations = self.__association_to_members()
group_a = groups.filter_one({'name': 'group a'})
group_b = groups.filter_one({'name': 'group b'})
group_c = groups.filter_one({'name': 'group c'})
member_d = members.filter_one({'name': 'member d'})
member_d.name = 'extra member'
member_e = members.new(name='member e')
groups.delete(group_c)
group_a.members.append(member_d)
group_b.members.append(member_e)
group_a.members.pop(0)
self.session.persist(group_a)
self.session.persist(group_b)
self.session.flush()
self.assertEqual(2, len(groups))
self.assertEqual(5, len(associations))
def __association_to_members(self):
return self.session.collection(EntityMetadataHelper.extract(Group).relational_map['members'].association_class.cls)
def __data_provider(self):
return [
{
'class': Member,
'fixtures': [
{'_id': 1, 'name': 'member a'},
{'_id': 2, 'name': 'member b'},
{'_id': 3, 'name': 'member c'},
{'_id': 4, 'name': 'member d'}
]
},
{
'class': Group,
'fixtures': [
{'_id': 1, 'name': 'group a'},
{'_id': 2, 'name': 'group b'},
{'_id': 3, 'name': 'group c'}
]
}
]
def __reset_associations(self):
associations = [
(1, 1),
(1, 2),
(2, 2),
(2, 3),
(3, 1),
(3, 3)
]
api = self.driver.collection('groups_members')
api.remove()
for origin, destination in associations:
api.insert({
'origin': origin,
'destination': destination
})
| mit | -367,212,113,049,477,570 | 29.670157 | 123 | 0.573233 | false |
travcunn/sic_assembler | tests.py | 1 | 8705 | import unittest
import sic_assembler.assembler as assembler
import sic_assembler.instructions as instructions
from sic_assembler.assembler import Assembler, SourceLine
from sic_assembler.instructions import Format
from sic_assembler.instructions import Format1, Format2, Format3, Format4
import sic_assembler.records as records
class TestFieldTypes(unittest.TestCase):
"""
Test each of the methods for parsing the source program.
"""
def test_comment(self):
self.assertTrue(assembler.comment(". SUBROUTINE TO WRITE RECORD FROM BUFFER"))
self.assertFalse(assembler.comment("WLOOP CLEAR X"))
def test_blank_line(self):
self.assertTrue(assembler.blank_line(" "))
self.assertFalse(assembler.blank_line("COPY START 0"))
def test_indexed_addressing(self):
self.assertTrue(instructions.indexed("BUFFER,X"))
self.assertFalse(instructions.indexed("BUFFER, X"))
def test_indirect_addressing(self):
self.assertTrue(instructions.indirect("@RETADR"))
self.assertFalse(instructions.indirect("RETADR"))
def test_immediate_operand(self):
self.assertTrue(instructions.immediate("#3355"))
self.assertFalse(instructions.immediate("ZERO"))
def test_extended(self):
self.assertTrue(instructions.extended("+LDT"))
self.assertFalse(instructions.extended("LDT"))
def test_literal(self):
self.assertTrue(instructions.literal("=X'05'"))
self.assertFalse(instructions.literal("X"))
class TestInstructionGeneration(unittest.TestCase):
"""
Test instruction generation for each instruction format.
"""
def test_format_1(self):
line = "TIO"
source_line = SourceLine.parse(line, 1)
instruction = Format1(mnemonic=source_line.mnemonic)
results = instruction.generate()
self.assertTrue(results[2] == "F8")
def test_format_2_one_register(self):
line = "TIXR T"
source_line = SourceLine.parse(line, 1)
instruction = Format2(mnemonic=source_line.mnemonic,
r1=source_line.operand, r2=None)
results = instruction.generate()
self.assertTrue(results[2] == "B850")
def test_format_2_two_registers(self):
line = "COMPR A,S"
source_line = SourceLine.parse(line, 1)
r1, r2 = source_line.operand.split(',')
instruction = Format2(mnemonic=source_line.mnemonic,
r1=r1, r2=r2)
results = instruction.generate()
self.assertTrue(results[2] == "A004")
def test_format_3_simple(self):
symtab = dict()
# add a symbol to the symbol table for lookup
symtab['RETADR'] = '30'
line = "FIRST STL RETADR"
source_line = SourceLine.parse(line, 1)
source_line.location = int('0000', 16)
instruction = Format3(base=None, symtab=symtab,
source_line=source_line)
results = instruction.generate()
self.assertTrue(results[2] == "17202D")
def test_format_3_immediate(self):
symtab = dict()
# add a symbol to the symbol table for lookup
symtab['LENGTH'] = '33'
line = "LDB #LENGTH"
source_line = SourceLine.parse(line, 2)
source_line.location = int('0003', 16)
instruction = Format3(base=None, symtab=symtab,
source_line=source_line)
results = instruction.generate()
self.assertTrue(results[2] == "69202D")
def test_format_3_base_relative_with_indexing(self):
symtab = dict()
# add a symbol to the symbol table for lookup
symtab['BUFFER'] = '36'
line = "STCH BUFFER,X"
source_line = SourceLine.parse(line, 1)
source_line.location = int('104E', 16)
base = hex(51)
instruction = Format3(base=base, symtab=symtab,
source_line=source_line)
results = instruction.generate()
self.assertTrue(results[2] == "57C003")
def test_format_4_simple(self):
symtab = dict()
# add a symbol to the symbol table for lookup
symtab['RDREC'] = '1036'
line = "+JSUB RDREC"
source_line = SourceLine.parse(line, 4)
source_line.location = int('0006', 16)
instruction = Format4(symtab=symtab, source_line=source_line)
results = instruction.generate()
self.assertTrue(results[2] == "4B101036")
def test_format_4_immediate_value(self):
symtab = dict()
line = "+LDT #4096"
source_line = SourceLine.parse(line, 1)
source_line.location = int('103C', 16)
instruction = Format4(symtab=symtab, source_line=source_line)
results = instruction.generate()
self.assertTrue(results[2] == "75101000")
def test_format_4_immediate_lookup_value(self):
symtab = dict()
# add a symbol to the symbol table for lookup
symtab['MAXLEN'] = '1000'
line = "+LDT #MAXLEN"
source_line = SourceLine.parse(line, 1)
source_line.location = int('103C', 16)
instruction = Format4(symtab=symtab, source_line=source_line)
results = instruction.generate()
self.assertTrue(results[2] == "75101000")
class TestAssemblyFile(unittest.TestCase):
"""
Test simple programs and check the generated objects and records.
"""
def setUp(self):
# test the object code generation from page 58 in the book
with open('test-programs/page58.asm', 'r') as f:
self.a = Assembler(f)
self.a.first_pass()
self.a.second_pass()
def test_output_objects(self):
generated_code = []
for x in self.a.generated_objects:
if isinstance(x[1], Format):
generated_code.append(x[1].generate()[2].upper())
else:
generated_code.append(x[1][2].upper())
expected_code = ['17202D', '69202D', '4B101036', '032026', '290000',
'332007', '4B10105D', '3F2FEC', '032010', '0F2016',
'010003', '0F200D', '4B10105D', '3E2003', b'454F46',
'B410', 'B400', 'B440', '75101000', 'E32019',
'332FFA', 'DB2013', 'A004', '332008', '57C003',
'B850', '3B2FEA', '134000', '4F0000', 'F1', 'B410',
'774000', 'E32011', '332FFA', '53C003', 'DF2008',
'B850', '3B2FEF', '4F0000', '05']
matches = True
for output in expected_code:
if output not in generated_code:
matches = False
break
self.assertTrue(matches)
class TestSimpleAssemblySyntaxChanges(TestAssemblyFile):
"""
Test a simple program with syntax changes and check the output.
"""
def setUp(self):
from sic_assembler.errors import OpcodeLookupError
# test the object code generation from page 58 in the book
with open('test-programs/page58-syntax-changes.asm', 'r') as f:
self.a = Assembler(f)
try:
self.a.first_pass()
except OpcodeLookupError as e:
print e.details
self.a.second_pass()
class TestRecordGeneration(unittest.TestCase):
def test_header_record(self):
h = records.gen_header('COPY', 4096, 4218)
expected_header = "HCOPY 00100000107A"
self.assertTrue(h == expected_header)
def test_text_record(self):
with open('test-programs/page58.asm', 'r') as f:
a = Assembler(f)
a.first_pass()
a.second_pass()
t = records.gen_text(a.generated_objects)
expected_t = ['T0000001D17202D69202D4B1010360320262900003320074B10105D3F2FEC032010',
'T00001D1D0F20160100030F200D4B10105D3E2003454F46B410B400B44075101000',
'T0010401FE32019332FFADB2013A00433200857C003B8503B2FEA1340004F0000F1B410',
'T00105F18774000E32011332FFA53C003DF2008B8503B2FEF4F000005']
records_match = t[0] == expected_t[0] and \
t[1] == expected_t[1] and \
t[2] == expected_t[2] and \
t[3] == expected_t[3]
self.assertTrue(records_match)
def test_end_record(self):
e = records.gen_end(4096)
expected_e = "E001000"
self.assertTrue(e == expected_e)
if __name__ == '__main__':
unittest.main()
| mit | 2,111,154,333,369,349,000 | 31.360595 | 96 | 0.585411 | false |
MizzouCMMSGroup1/ChromosomeModel | src/conjugate_gradient_plot.py | 1 | 4967 | # some imports we use
import numpy
import random
import math
from scipy import optimize
# matplot lib
import matplotlib as mpl
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import matplotlib.pyplot as plt
# our data dimension (*xyz), we are using megabase resolution
NUMBER_CONTACTS = 157
NUMBER_CONTACTS_POINTS = NUMBER_CONTACTS * 3
# bring in our cleaned data
IF_FILENAME = "if_data_stripped.csv"
if_data_raw = numpy.loadtxt(IF_FILENAME, delimiter=',')
# used to normalize our IF weights
IF_TOTAL = numpy.sum(if_data_raw)
# chromosome 7 weighting scores from paper, megabase
W1 = 1.0
W2 = 1.5
W3 = 1.5
W4 = 1.5
# micrometers
d_sq_min = 0.2
da_sq_max = 1.8
d_max = 4.5
d_sq_c = 7.0
d_sq_max = d_max * d_max # not defined in paper?
# some globals
coordinate_data = numpy.zeros(NUMBER_CONTACTS_POINTS)
def setup_testing(number_contacts=157):
# 157 is our default/actual size of c7
global if_data, IF_TOTAL
global NUMBER_CONTACTS, NUMBER_CONTACTS_POINTS
NUMBER_CONTACTS = number_contacts
NUMBER_CONTACTS_POINTS = NUMBER_CONTACTS * 3
if_data = if_data_raw[0:number_contacts,0:number_contacts]
IF_TOTAL = numpy.sum(if_data)
coordinate_data = numpy.zeros(number_contacts)
def init_model(bounding_box=0.5):
global coordinate_data
for i in range(0,NUMBER_CONTACTS_POINTS):
coordinate_data[i] = bounding_box * (0.5 - random.random())
return coordinate_data
def print_model():
global coordinate_data
print coordinate_data
def max_if(i,j):
return max(if_data[i,j], if_data[j,i])
def distance_sq(i,j):
a = [coordinate_data[i], coordinate_data[i+1], coordinate_data[i+2]]
b = [coordinate_data[j], coordinate_data[j+1], coordinate_data[j+2]]
return (a[0] - b[0])**2 + (a[1] - b[1])**2 + (a[2] - b[2])**2
def contact_score():
'''
minimize the distance (but keep above min_threshold) between non-sequential pairs that have affinity
'''
global IF_TOTAL
score = 0
for i in range(0,NUMBER_CONTACTS):
for j in range(0,NUMBER_CONTACTS):
if i != j or abs(i-j) != 1:
d_sq_ij = distance_sq(i,j)
score += W1 * math.tanh(d_sq_c - d_sq_ij) * max_if(i,j) + W2 * math.tanh(d_sq_ij - d_sq_min) / IF_TOTAL
return score
def noncontact_score():
'''
maximize the distance (but keep below max_threshold) between non-sequential pairs that don't have affinity
'''
global IF_TOTAL
score = 0
for i in range(0,NUMBER_CONTACTS):
for j in range(0,NUMBER_CONTACTS):
if i != j or abs(i-j) != 1:
d_sq_ij = distance_sq(i,j)
score += W3 * math.tanh(d_sq_max - d_sq_ij) / IF_TOTAL + W4 * math.tanh(d_sq_ij - d_sq_c) / IF_TOTAL
return score
def pair_smoothing():
'''
keep adjacent contacts (eg |i-j|==1) with slightly lower score than above so they are prioritized for optimization
'''
global IF_TOTAL
score = 0
for i in range(0,NUMBER_CONTACTS):
for j in range(0,NUMBER_CONTACTS):
if abs(i-j) == 1:
d_sq_ij = distance_sq(i,j)
score += W1 * max_if(i,j) * math.tanh(da_sq_max - d_sq_ij) + W2 * math.tanh(d_sq_ij - d_sq_min) / IF_TOTAL
return score
def model_score():
return contact_score() + noncontact_score() + pair_smoothing()
# shim between skeleton and cg code
iter_tracker = 0
old_score = 0
def f(x, *args):
#print x
global iter_tracker, old_score
iter_tracker += 1
global coordinate_data
for i in range(0,NUMBER_CONTACTS_POINTS):
coordinate_data[i] = x[i]
current_score = model_score()
#print "iter:", iter_tracker, "score:", current_score, "change:", current_score - old_score
old_score = current_score
return current_score
def main():
global iter_tracker
setup_testing(50)
TESTING_CONGUGATE_GRADIENT = True
#TESTING_CONGUGATE_GRADIENT = False
random_start = init_model().copy()
args = []
opts = {'maxiter' : 100, 'disp' : True }
results = 0
if (TESTING_CONGUGATE_GRADIENT):
results = optimize.minimize(f, random_start, args=args, method='CG', options=opts)
else:
results = optimize.minimize(f, random_start, args=args, method='Anneal', options=opts)
print "internal iter: ", iter_tracker
print results
print "saving final contact xyz coordinates"
x = numpy.zeros(NUMBER_CONTACTS)
y = numpy.zeros(NUMBER_CONTACTS)
z = numpy.zeros(NUMBER_CONTACTS)
for i in range(0,NUMBER_CONTACTS):
x[i] = results.x[i]
y[i] = results.x[i+1]
z[i] = results.x[i+2]
print results.x[i], results.x[i+1], results.x[i+2]
mpl.rcParams['legend.fontsize'] = 10
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot(x, y, z, label='3d plot of generated contacts')
ax.legend()
plt.show()
if __name__ == '__main__':
main()
| gpl-2.0 | 1,627,960,150,278,846,200 | 25.280423 | 122 | 0.624723 | false |
wrgeorge1983/Pystol | _iactive.py | 1 | 15584 | #! /usr/bin/python
"""
Created on Mar 26, 2015
@author: William.George
Credit to /r/Python for the non-wasteful and sensible handling of oldInit and
newInit
"""
# Standard Library Imports
import os
import sys
import json
from pprint import pprint # Not used here, but we want it in interactive mode.
import time
from subprocess import Popen
from collections import defaultdict
import multiprocessing.pool
import collections
sys.path += [os.getcwd()]
# Imports from other modules in this project
import sshutil
# Imports from third party modules
import phpipam
import ipaddress
import openpyxl
DEFAULT_SW_IP = '10.10.10.10'
DEFAULT_HOST_IP = '10.10.10.10'
DEFAULT_IPAM_HOST = 'ipam'
DEFAULT_IPAM_API_ID = 'ipam'
DEFAULT_IPAM_API_KEY = 'FFFFF'
try:
import iactiveconstants
DEFAULT_SW_IP = iactiveconstants.DEFAULT_SW_IP
DEFAULT_HOST_IP = iactiveconstants.DEFAULT_HOST_IP
DEFAULT_IPAM_HOST = iactiveconstants.DEFAULT_IPAM_HOST
DEFAULT_IPAM_API_ID = iactiveconstants.DEFAULT_IPAM_API_ID
DEFAULT_IPAM_API_KEY = iactiveconstants.DEFAULT_IPAM_API_KEY
except ImportError:
pass
# File class from user fdb on StackOverflow
# http://stackoverflow.com/questions/5896079/python-head-tail-and-backward-read-by-lines-of-a-text-file
class File(file):
""" An helper class for file reading """
def __init__(self, *args, **kwargs):
super(File, self).__init__(*args, **kwargs)
self.BLOCKSIZE = 4096
def head(self, lines_2find=1):
self.seek(0) # Rewind file
return [super(File, self).next() for x in xrange(lines_2find)]
def tail(self, lines_2find=1):
self.seek(0, 2) # Go to end of file
bytes_in_file = self.tell()
lines_found, total_bytes_scanned = 0, 0
while (lines_2find + 1 > lines_found and
bytes_in_file > total_bytes_scanned):
byte_block = min(
self.BLOCKSIZE,
bytes_in_file - total_bytes_scanned)
self.seek(-(byte_block + total_bytes_scanned), 2)
total_bytes_scanned += byte_block
lines_found += self.read(self.BLOCKSIZE).count('\n')
self.seek(-total_bytes_scanned, 2)
line_list = list(self.readlines())
return line_list[-lines_2find:]
def backward(self):
self.seek(0, 2) # Go to end of file
blocksize = self.BLOCKSIZE
last_row = ''
while self.tell() != 0:
try:
self.seek(-blocksize, 1)
except IOError:
blocksize = self.tell()
self.seek(-blocksize, 1)
block = self.read(blocksize)
self.seek(-blocksize, 1)
rows = block.split('\n')
rows[-1] = rows[-1] + last_row
while rows:
last_row = rows.pop(-1)
if rows and last_row:
yield last_row
yield last_row
def ipm(site, ipt):
"""
ipm(site, ipt):
site: An IP or Network address in dotted-decimal in a string.
e.g. "10.10.8.6" or "10.10.0.0"
ipt: 'input', trailing octets to be merged with site
as string:
e.g. "7" or "9.1.3"
or as int or float:
e.g. 7 or 3.8
Returns: trailing octets defined by ipt super-imposed on site
e.g. site("10.10.8.6", "1.2.3") == "10.1.2.3"
site("10.10.8.6", 5.1) == "10.10.5.1"
Note: It's possible that 'site' can be specified as 3 or fewer
('10.3', etc...) but this is probably not smart.
Note: This works exclusively by manipulating a string of octets
in dotted decimal format, and does not in any way account for
any real subnetting operations, etc...
"""
ipt = str(ipt).split('.')
site = site.split('.')
return '.'.join(site[:4 - len(ipt)] + ipt)
# TODO: This should really be wrapped in a class
def pull_subnets():
ipam = phpipam.PHPIPAM(DEFAULT_IPAM_HOST, DEFAULT_IPAM_API_ID,
DEFAULT_IPAM_API_KEY)
ipam.scheme = 'https'
rslt = ipam.read_subnets()
jload = json.loads(rslt)
subnets = jload['data']
return subnets
def site_lookup(sfilter):
subnets = pull_subnets()
return [subnets[x] for x in range(0, len(subnets) - 1) if sfilter in
subnets[x]['description']]
class IPAMController(object):
"""Generic wrapper for JSON objects returned by ipam api"""
def __init__(self, ipam, data=None, **kwargs):
"""Takes either the JSON data by itself or unpacked keywords.
if unpacked values are passed, ensure only the 'data' portion
of the result is sent. i.e.:
rslt = ipam.read_subnets(id=1)
rslt = json.loads(rslt)['data']
subnet
"""
self.ipam = ipam
if data is not None:
kwargs = json.loads(data)['data']
# Unsure if this is consistent or not, but I've seen it at least once
if type(kwargs) is list:
kwargs = kwargs[0]
for k, v in kwargs.items():
setattr(self, k, v)
class IPAMSubnet(IPAMController):
"""Wrap subnet JSON objects that come from phpipam"""
def __init__(self, **kwargs):
IPAMController.__init__(self, **kwargs)
net, mask = self.subnet, self.mask
try:
self.network = ipaddress.ip_network(u'{0}/{1}'.format(net, mask))
except ValueError:
self.network = 'INVALID'
self._site_codes = []
def _pull_site_codes(self):
subnet_id = self.id
addresses = self.ipam.generic('addresses', 'read', subnetId=subnet_id, format='ip')
addresses = json.loads(addresses)['data']
names = (x['dns_name'] for x in addresses)
site_codes = (x[5:8] for x in names)
self._site_codes = set(site_codes)
@property
def site_codes(self):
if len(self._site_codes) == 0:
self._pull_site_codes()
return self._site_codes
def __str__(self):
return str(self.network)
class IPAM(phpipam.PHPIPAM):
"""Handle subnets and addresses meaningfully"""
def __init__(self,
url=DEFAULT_IPAM_HOST,
api_id=DEFAULT_IPAM_API_ID,
api_key=DEFAULT_IPAM_API_KEY,
scheme='https'):
phpipam.PHPIPAM.__init__(self, url, api_id, api_key)
self.scheme = scheme
self._subnets = None
self._raw_subnets = None
self._addresse = None
def _pull_raw_subnets(self):
rslt = self.read_subnets()
jload = json.loads(rslt)
self._raw_subnets = jload['data']
@property
def raw_subnets(self):
if self._raw_subnets is None:
self._pull_raw_subnets()
return self._raw_subnets
def _pull_subnets(self):
self._subnets = {}
for subnet in self.raw_subnets:
self._subnets[subnet[u'id']] = IPAMSubnet(ipam=self, **subnet)
@property
def subnets(self, subnet_id=None):
"""access one or all subnets"""
if self._subnets is None:
self._pull_subnets()
if subnet_id is not None:
return self._subnets[subnet_id]
return self._subnets
def audit_subnets(self):
rslt = True
for subnet in self.subnets.values():
try:
net, mask = subnet.subnet, subnet.mask
subnet.network = ipaddress.ip_network(u'{0}/{1}'.format(net, mask))
except ValueError as e:
rslt = False
print e
return rslt
# Wrapps Switch() with features that are great for interactive access,
# but would be terrible to use in an normal script.
class clintSwitch(sshutil.Switch):
def __init__(self, ip=None, creds=None, timeout=None):
if timeout:
self.timeout = timeout
elif not hasattr(self, 'timeout'):
self.timeout = None
if creds:
clintSwitch.credentials = creds
else:
if not hasattr(self, "credentials"):
raise SyntaxError("Credentials must be provided at least once.")
creds = self.credentials
if ip:
ip = str(ip)
site = ip
ips = ip.split('.')
if len(ips) == 4:
clintSwitch.site = site
else:
if not hasattr(self, 'site'):
raise SyntaxError("Full IP must be provided at least once.")
ip = ipm(clintSwitch.site, ip)
clintSwitch.site = ip
else:
ip = 'None'
sshutil.Switch.__init__(self, ip, creds)
@property
def flash_total(self):
try:
return self.flash.total
except:
return 'UNK'
@property
def flash_free(self):
try:
return self.flash.free
except:
return 'UNK'
def pexecute(self, cmd, trim=True, timeout=None):
args = [cmd, trim]
if not timeout:
timeout = self.timeout
if timeout:
args.append(timeout)
print self.execute(*args)
def interact(self):
cmd = 'ssh {0}'.format(self.ip)
Popen(cmd, shell=True).communicate()
def bufferflush(self):
return self.connection.buffer_flush()
def poll_switch(sw, cmd, sleep_time):
"""sw.pexecute(cmd) every sleep_time seconds"""
while True:
sw.pexecute(cmd)
time.sleep(sleep_time)
def pythonrc():
"""Return expanded path to current users .pythonrc.py"""
home = os.path.expanduser('~/')
return home + '.pythonrc.py'
def retrieve_pcaps(sw):
destcreds = sshutil.get_credentials()
host = DEFAULT_HOST_IP
lines = sw.execute('sh flash: | i pcap').splitlines()
files = [line.split()[-1] for line in lines]
for fil in files:
command = 'copy {0} scp:'.format(fil)
sw.timeout = 2
print 'pulling {0}...'.format(fil)
sw.pexecute(command)
sw.pexecute(host)
sw.pexecute('\n')
sw.pexecute('\n')
sw.pexecute(destcreds[1], 5)
class WorkbookWrapper(object):
def __init__(self, filename):
self.column_from_string = lambda x: openpyxl.utils.column_index_from_string(x) - 1
self.wb = self.load_workbook(filename)
self.ws = self.wb.active
self.rows = self.ws.rows
self.columns = self.ws.columns
self.cell = self.ws.cell
self.build_header()
self.attribute_mapping = defaultdict(lambda: None)
self.attribute_mapping.update( # maps header fields to object attributes
{
'hostname': 'hostname',
'ip address': 'ip',
'supervisor': 'supervisor',
'ram (k)': 'available_ram',
'total flash': 'flash_total',
'free flash': 'flash_free',
'model': 'model',
'stacked': 'stacked',
'current': 'software_version',
'old': 'software_version',
'current': 'software_version',
'feature set (license)': 'license'
}
)
def build_header(self):
"""
Assume header is row A
:return:
"""
header_row = self.rows[0]
header = [(cell.value.lower(), index) for index, cell in enumerate(header_row)
if cell.value is not None]
self.header = defaultdict(str)
for (name, index) in header:
self.header[name] = index
self.header[index] = name
def output_values(self, switches):
"""
Takes switches (for now: manually provided, pre-populated) and outputs their attributes to xlsx.
:param switches:
:return:
"""
am = self.attribute_mapping
header = self.header
for row, switch in zip(self.rows[1:], switches): # skip header row obviously
if switch.state.upper() != 'UP':
continue
for index, cell in enumerate(row):
try:
rslt = getattr(switch, str(am[header[index]]), 'UNK')
except:
rslt = 'UNK'
if rslt != 'UNK':
cell.value = rslt
# def validate_hostname(self, switch, value):
# if switch.hostname == value:
# return True, switch.hostname
# else:
# return False, switch.hostname
#
# def validate_supervisor(self, switch, value):
# sup = switch.supervisor
# return sup == value, sup
#
# @staticmethod
# def validate_switch_attribute(switch, attribute, value):
# ref = getattr(switch, attribute)
# return ref == value, ref
@staticmethod
def load_workbook(filename):
"""
return an xlsx document
:param filename: filename of xlsx doc. Assume it's under ~/stage/
:return:
"""
path = os.path.join('~', 'stage', filename)
path = os.path.expanduser(path)
wb = openpyxl.load_workbook(path)
return wb
def switch_from_row(self, row=None, row_index=None):
if row is None:
assert row_index is not None, "switch_from_row expects row or row_index"
row = self.rows[row_index]
assert row in self.rows, "row must be an existing row in rows"
attrib_from_cell = lambda x: self.header[self.column_from_string(x.column)]
attrs = dict((attrib_from_cell(cell), cell.value) for cell in row
if cell.value is not None)
try:
switch = clintSwitch(ip=attrs['ip address'])
except KeyError:
return None
switch.row_index = row_index
return switch
def switches_from_rows(self):
return [self.switch_from_row(row=row) for row in self.rows[1:]] # skip header!
def get_attribs(self, switch):
pass
# TODO: These are here only for testing purposes and should be pruned / factored out
def populate_switch(switch):
try:
switch.populate_lite()
except:
pass
def test_wb_switches():
global wb
global switches
global pool
global rslts
wb = WorkbookWrapper('bia-netw.xlsx')
switches = [switch for switch in wb.switches_from_rows() if switch is not None]
pool = multiprocessing.pool.ThreadPool(processes=32)
start_time = time.time()
rslts = pool.map_async(populate_switch, switches)
increment_table = {100: 5, 50: 3, 25: 1, 10: 0.5}
remaining_q = []
increment = 5
while True:
remaining_switches = [switch.ip for switch in switches if switch.state == 'UNK']
remaining = len(remaining_switches)
if remaining == 0:
return
seconds = time.time() - start_time
print('{0} remaining after {1} seconds'.format(remaining, seconds))
for key in sorted(increment_table.keys()):
if remaining >= key:
increment = increment_table[key]
else:
break
if remaining in remaining_q: # at least one nonproductive cycle
if len(remaining_q) == 4:
print('Remaining switches:')
pprint(remaining_switches)
else:
remaining_q = []
remaining_q.append(remaining)
time.sleep(increment)
pool.close()
pool.join()
| lgpl-2.1 | 5,891,277,525,246,864,000 | 29.920635 | 104 | 0.563976 | false |
springmerchant/pybbm | pybb/migrations/0003_auto_20150424_0918.py | 1 | 1054 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from pybb.models import create_or_check_slug
def fill_slugs(apps, schema_editor):
Category = apps.get_model("pybb", "Category")
Forum = apps.get_model("pybb", "Forum")
Topic = apps.get_model("pybb", "Topic")
for category in Category.objects.all():
category.slug = create_or_check_slug(instance=category, model=Category)
category.save()
for forum in Forum.objects.all():
extra_filters = {'category': forum.category}
forum.slug = create_or_check_slug(instance=forum, model=Forum, **extra_filters)
forum.save()
for topic in Topic.objects.all():
extra_filters = {'forum': topic.forum}
topic.slug = create_or_check_slug(instance=topic, model=Topic, **extra_filters)
topic.save()
class Migration(migrations.Migration):
dependencies = [
('pybb', '0002_auto_20150424_0918'),
]
operations = [
migrations.RunPython(fill_slugs),
]
| bsd-2-clause | -3,424,356,357,889,820,000 | 29.114286 | 87 | 0.647059 | false |
Donkyhotay/MoonPy | zope/app/form/browser/tests/test_directives.py | 1 | 11244 | #############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Form Directives Tests
$Id: test_directives.py 67630 2006-04-27 00:54:03Z jim $
"""
import os
import unittest
from cStringIO import StringIO
from zope import component
from zope.component.interfaces import ComponentLookupError
from zope.configuration.xmlconfig import xmlconfig, XMLConfig
from zope.traversing.interfaces import TraversalError
from zope.interface import Interface, implements
from zope.publisher.browser import TestRequest
from zope.schema import TextLine, Int
from zope.security.proxy import ProxyFactory
import zope.app.component
import zope.app.form.browser
import zope.app.publisher.browser
from zope.app.form.browser import TextWidget
from zope.app.testing.placelesssetup import PlacelessSetup
from zope.app.form.tests import utils
tests_path = os.path.join(
os.path.dirname(zope.app.publisher.browser.__file__),
'tests')
template = """<configure
xmlns='http://namespaces.zope.org/zope'
xmlns:browser='http://namespaces.zope.org/browser'
i18n_domain='zope'>
%s
</configure>"""
request = TestRequest()
class Schema(Interface):
text = TextLine(
title=u'Text',
description=u'Nice text',
required=False)
class IC(Schema): pass
class Ob(object):
implements(IC)
unwrapped_ob = Ob()
ob = utils.securityWrap(unwrapped_ob, IC)
class ISomeWidget(Interface):
displayWidth = Int(
title=u"Display Width",
default=20,
required=True)
class SomeWidget(TextWidget):
implements(ISomeWidget)
class Test(PlacelessSetup, unittest.TestCase):
def setUp(self):
super(Test, self).setUp()
XMLConfig('meta.zcml', zope.app.component)()
XMLConfig('meta.zcml', zope.app.form.browser)()
XMLConfig('meta.zcml', zope.app.publisher.browser)()
from zope.app.testing import ztapi
from zope.traversing.adapters import DefaultTraversable
from zope.traversing.interfaces import ITraversable
component.provideAdapter(DefaultTraversable, (None,), ITraversable)
def testAddForm(self):
self.assertEqual(
component.queryMultiAdapter((ob, request), name='add.html'),
None)
xmlconfig(StringIO(template % ("""
<view
type="zope.publisher.interfaces.browser.IBrowserRequest"
for="zope.schema.interfaces.ITextLine"
provides="zope.app.form.interfaces.IInputWidget"
factory="zope.app.form.browser.TextWidget"
permission="zope.Public"
/>
<browser:addform
for="zope.app.form.browser.tests.test_directives.IC"
schema="zope.app.form.browser.tests.test_directives.Schema"
name="add.html"
label="Add a ZPT page"
fields="text"
permission="zope.Public" />
""")))
v = component.getMultiAdapter((ob, request), name='add.html')
# expect to fail as standard macros are not configured
self.assertRaises(TraversalError, v)
def testEditForm(self):
self.assertEqual(
component.queryMultiAdapter((ob, request), name='edit.html'),
None)
xmlconfig(StringIO(template % ("""
<view
type="zope.publisher.interfaces.browser.IBrowserRequest"
for="zope.schema.interfaces.ITextLine"
provides="zope.app.form.interfaces.IInputWidget"
factory="zope.app.form.browser.TextWidget"
permission="zope.Public"
/>
<browser:editform
for="zope.app.form.browser.tests.test_directives.IC"
schema="zope.app.form.browser.tests.test_directives.Schema"
name="edit.html"
label="Edit a ZPT page"
fields="text"
permission="zope.Public" />
""")))
v = component.getMultiAdapter((ob, request), name='edit.html')
# expect to fail as standard macros are not configured
self.assertRaises(TraversalError, v)
def testEditFormWithMenu(self):
self.assertEqual(
component.queryMultiAdapter((ob, request), name='edit.html'),
None)
xmlconfig(StringIO(template % ('''
<browser:menu id="test_menu" title="Test menu"/>
<view
type="zope.publisher.interfaces.browser.IBrowserRequest"
for="zope.schema.interfaces.ITextLine"
provides="zope.app.form.interfaces.IInputWidget"
factory="zope.app.form.browser.TextWidget"
permission="zope.Public"
/>
<browser:editform
for="zope.app.form.browser.tests.test_directives.IC"
schema="zope.app.form.browser.tests.test_directives.Schema"
name="edit.html"
label="Edit a ZPT page"
fields="text"
permission="zope.Public"
menu="test_menu"
title="Test View"
/>
''')))
v = component.queryMultiAdapter((ob, request), name='edit.html')
# expect to fail as standard macros are not configured
self.assertRaises(TraversalError, v)
def testSchemaDisplay(self):
self.assertEqual(
component.queryMultiAdapter((ob, request), name='view.html'),
None)
xmlconfig(StringIO(template % ('''
<view
type="zope.publisher.interfaces.browser.IBrowserRequest"
for="zope.schema.interfaces.IField"
provides="zope.app.form.interfaces.IDisplayWidget"
factory="zope.app.form.browser.DisplayWidget"
permission="zope.Public"
/>
<browser:schemadisplay
for="zope.app.form.browser.tests.test_directives.IC"
schema="zope.app.form.browser.tests.test_directives.Schema"
name="view.html"
label="View a ZPT page"
fields="text"
permission="zope.Public" />
''')))
v = component.queryMultiAdapter((ob, request), name='view.html')
# expect to fail as standard macros are not configured
self.assertRaises(TraversalError, v)
def testAddFormWithWidget(self):
self.assertEqual(
component.queryMultiAdapter((ob, request), name='add.html'),
None)
xmlconfig(StringIO(template % ('''
<view
type="zope.publisher.interfaces.browser.IBrowserRequest"
for="zope.schema.interfaces.ITextLine"
provides="zope.app.form.interfaces.IInputWidget"
factory="zope.app.form.browser.TextWidget"
permission="zope.Public"
/>
<browser:addform
for="zope.app.form.browser.tests.test_directives.IC"
schema="zope.app.form.browser.tests.test_directives.Schema"
name="add.html"
label="Add a ZPT page"
fields="text"
permission="zope.Public">
<widget
field="text"
class="zope.app.form.browser.tests.test_directives.SomeWidget"
displayWidth="30"
extra="foo"
/>
</browser:addform>
''')), )
view = component.queryMultiAdapter((ob, request), name='add.html')
self.assert_(hasattr(view, 'text_widget'))
self.assert_(isinstance(view.text_widget, SomeWidget))
self.assertEqual(view.text_widget.extra, u'foo')
self.assertEqual(view.text_widget.displayWidth, 30)
def testEditFormWithWidget(self):
self.assertEqual(
component.queryMultiAdapter((ob, request), name='edit.html'),
None)
xmlconfig(StringIO(template % ('''
<view
type="zope.publisher.interfaces.browser.IBrowserRequest"
for="zope.schema.interfaces.ITextLine"
provides="zope.app.form.interfaces.IInputWidget"
factory="zope.app.form.browser.TextWidget"
permission="zope.Public"
/>
<browser:editform
for="zope.app.form.browser.tests.test_directives.IC"
schema="zope.app.form.browser.tests.test_directives.Schema"
name="edit.html"
label="Edit a ZPT page"
fields="text"
permission="zope.Public">
<widget
field="text"
class="zope.app.form.browser.tests.test_directives.SomeWidget"
displayWidth="30"
extra="foo"
/>
</browser:editform>
''')), )
view = component.queryMultiAdapter((ob, request), name='edit.html')
self.assert_(hasattr(view, 'text_widget'))
self.assert_(isinstance(view.text_widget, SomeWidget))
self.assertEqual(view.text_widget.extra, u'foo')
self.assertEqual(view.text_widget.displayWidth, 30)
def testSchemaDisplayWithWidget(self):
self.assertEqual(
component.queryMultiAdapter((ob, request), name='view.html'),
None)
xmlconfig(StringIO(template % ('''
<view
type="zope.publisher.interfaces.browser.IBrowserRequest"
for="zope.schema.interfaces.IField"
provides="zope.app.form.interfaces.IDisplayWidget"
factory="zope.app.form.browser.DisplayWidget"
permission="zope.Public"
/>
<browser:schemadisplay
for="zope.app.form.browser.tests.test_directives.IC"
schema="zope.app.form.browser.tests.test_directives.Schema"
name="view.html"
label="View a ZPT page"
fields="text"
permission="zope.Public">
<browser:widget
field="text"
class="zope.app.form.browser.tests.test_directives.SomeWidget"
displayWidth="30"
extra="foo"
/>
</browser:schemadisplay>
''')))
view = component.queryMultiAdapter((ob, request), name='view.html')
self.assert_(hasattr(view, 'text_widget'))
self.assert_(isinstance(view.text_widget, SomeWidget))
self.assertEqual(view.text_widget.extra, u'foo')
self.assertEqual(view.text_widget.displayWidth, 30)
def test_suite():
loader=unittest.TestLoader()
return loader.loadTestsFromTestCase(Test)
if __name__=='__main__':
unittest.TextTestRunner().run(test_suite())
| gpl-3.0 | -4,779,756,308,137,482,000 | 34.695238 | 78 | 0.595607 | false |
pedrombmachado/summitxl | simple_follower/scripts/laserTracker.py | 1 | 3097 | #!/usr/bin/env python
# test mail: [email protected]
import rospy
import thread, threading
import time
import numpy as np
from sensor_msgs.msg import Joy, LaserScan
from geometry_msgs.msg import Twist, Vector3
from std_msgs.msg import String as StringMsg
from simple_follower.msg import position as PositionMsg
class laserTracker:
def __init__(self):
self.lastScan=None
self.winSize = rospy.get_param('~winSize')
self.deltaDist = rospy.get_param('~deltaDist')
self.scanSubscriber = rospy.Subscriber('/hokuyo_base/scan', LaserScan, self.registerScan)
self.positionPublisher = rospy.Publisher('/object_tracker/current_position', PositionMsg,queue_size=3)
self.infoPublisher = rospy.Publisher('/object_tracker/info', StringMsg, queue_size=3)
def registerScan(self, scan_data):
# registers laser scan and publishes position of closest object (or point rather)
ranges = np.array(scan_data.ranges)
# sort by distance to check from closer to further away points if they might be something real
sortedIndices = np.argsort(ranges)
minDistanceID = None
minDistance = float('inf')
if(not(self.lastScan is None)):
# if we already have a last scan to compare to:
for i in sortedIndices:
# check all distance measurements starting from the closest one
tempMinDistance = ranges[i]
# now we check if this might be noise:
# get a window. in it we will check if there has been a scan with similar distance
# in the last scan within that window
# we kneed to clip the window so we don't have an index out of bounds
windowIndex = np.clip([i-self.winSize, i+self.winSize+1],0,len(self.lastScan))
window = self.lastScan[windowIndex[0]:windowIndex[1]]
with np.errstate(invalid='ignore'):
# check if any of the scans in the window (in the last scan) has a distance close enough to the current one
if(np.any(abs(window-tempMinDistance)<=self.deltaDist)):
# this will also be false for all tempMinDistance = NaN or inf
# we found a plausible distance
minDistanceID = i
minDistance = ranges[minDistanceID]
break # at least one point was equally close
# so we found a valid minimum and can stop the loop
self.lastScan=ranges
#catches no scan, no minimum found, minimum is actually inf
if(minDistance > scan_data.range_max):
#means we did not really find a plausible object
# publish warning that we did not find anything
rospy.logwarn('laser no object found')
self.infoPublisher.publish(StringMsg('laser:nothing found'))
else:
# calculate angle of the objects location. 0 is straight ahead
minDistanceAngle = scan_data.angle_min + minDistanceID * scan_data.angle_increment
# here we only have an x angle, so the y is set arbitrarily
self.positionPublisher.publish(PositionMsg(minDistanceAngle, 42, minDistance))
if __name__ == '__main__':
print('starting')
rospy.init_node('laser_tracker')
tracker = laserTracker()
print('seems to do something')
try:
rospy.spin()
except rospy.ROSInterruptException:
print('exception')
| gpl-3.0 | 31,351,464,826,698,900 | 35.435294 | 112 | 0.723926 | false |
jmanday/Informatica | DAI/Practicas/resolucionPracticas_1_2/sesion02/ej_04.py | 1 | 1510 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Practicas de Desarrollo de Aplicaciones para Internet (DAI)
# Copyright (C) 2013 - Zerjillo ([email protected])
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from moon import *
from sun import *
from formulario import *
import web
urls = (
'/sun(.*)', 'Sun', # Todo lo que empiece por "sun" lo gestionará la clase Sun
'/moon(.*)', 'Moon', # Todo lo que empiece por "moon" (o la página principal) lo gestionará la clase Moon
'/()', 'Moon',
'/formulario', 'Formulario', #
)
app = web.application(urls, globals())
# Gestionamos el error 404 (not found)
def notfound():
return web.notfound("Lo siento, la página que buscas no existe. Prueba con /formulario")
# Asignamos el gestor del not found de la aplicación web a la función anterior
app.notfound = notfound
if __name__ == "__main__":
app.run() | gpl-3.0 | -1,203,772,582,571,132,200 | 32.466667 | 116 | 0.689701 | false |
rbarlow/pulp_docker | plugins/pulp_docker/plugins/distributors/publish_steps.py | 1 | 8980 | from gettext import gettext as _
import json
import os
from pulp.plugins.util import misc, publish_step
from pulp_docker.common import constants
from pulp_docker.plugins import models
from pulp_docker.plugins.distributors import configuration, v1_publish_steps
class WebPublisher(publish_step.PublishStep):
"""
Docker Web publisher class that is responsible for the actual publishing
of a docker repository via a web server. It will publish the repository with v1 code and v2
code.
"""
def __init__(self, repo, publish_conduit, config):
"""
Initialize the WebPublisher, adding the V1 and V2 publishers as its children. The V1
publisher will publish any Image units found in the repository, and the V2 publisher will
publish any Manifests and Blobs it finds in the repository.
:param repo: Pulp managed Yum repository
:type repo: pulp.plugins.model.Repository
:param publish_conduit: Conduit providing access to relative Pulp functionality
:type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
:param config: Pulp configuration for the distributor
:type config: pulp.plugins.config.PluginCallConfiguration
"""
super(WebPublisher, self).__init__(
step_type=constants.PUBLISH_STEP_WEB_PUBLISHER, repo=repo,
publish_conduit=publish_conduit, config=config)
# Publish v1 content, and then publish v2 content
self.add_child(v1_publish_steps.WebPublisher(repo, publish_conduit, config))
self.add_child(V2WebPublisher(repo, publish_conduit, config))
class V2WebPublisher(publish_step.PublishStep):
"""
This class performs the work of publishing a v2 Docker repository.
"""
def __init__(self, repo, publish_conduit, config):
"""
Initialize the V2WebPublisher.
:param repo: Pulp managed Yum repository
:type repo: pulp.plugins.model.Repository
:param publish_conduit: Conduit providing access to relative Pulp functionality
:type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
:param config: Pulp configuration for the distributor
:type config: pulp.plugins.config.PluginCallConfiguration
"""
super(V2WebPublisher, self).__init__(
step_type=constants.PUBLISH_STEP_WEB_PUBLISHER, repo=repo,
publish_conduit=publish_conduit, config=config)
docker_api_version = 'v2'
publish_dir = configuration.get_web_publish_dir(repo, config, docker_api_version)
app_file = configuration.get_redirect_file_name(repo)
app_publish_location = os.path.join(
configuration.get_app_publish_dir(config, docker_api_version), app_file)
self.working_dir = os.path.join(self.get_working_dir(), docker_api_version)
misc.mkdir(self.working_dir)
self.web_working_dir = os.path.join(self.get_working_dir(), 'web')
master_publish_dir = configuration.get_master_publish_dir(repo, config, docker_api_version)
atomic_publish_step = publish_step.AtomicDirectoryPublishStep(
self.get_working_dir(), [('', publish_dir), (app_file, app_publish_location)],
master_publish_dir, step_type=constants.PUBLISH_STEP_OVER_HTTP)
atomic_publish_step.description = _('Making v2 files available via web.')
self.add_child(PublishBlobsStep())
self.publish_manifests_step = PublishManifestsStep()
self.add_child(self.publish_manifests_step)
self.add_child(PublishTagsStep())
self.add_child(atomic_publish_step)
self.add_child(RedirectFileStep(app_publish_location))
class PublishBlobsStep(publish_step.UnitModelPluginStep):
"""
Publish Blobs.
"""
def __init__(self):
"""
Initialize the PublishBlobsStep, setting its description and calling the super class's
__init__().
"""
super(PublishBlobsStep, self).__init__(step_type=constants.PUBLISH_STEP_BLOBS,
model_classes=[models.Blob])
self.description = _('Publishing Blobs.')
def process_main(self, item):
"""
Link the item to the Blob file.
:param item: The Blob to process
:type item: pulp_docker.plugins.models.Blob
"""
misc.create_symlink(item._storage_path,
os.path.join(self.get_blobs_directory(), item.unit_key['digest']))
def get_blobs_directory(self):
"""
Get the directory where the blobs published to the web should be linked.
:return: The path to where blobs should be published.
:rtype: basestring
"""
return os.path.join(self.parent.get_working_dir(), 'blobs')
class PublishManifestsStep(publish_step.UnitModelPluginStep):
"""
Publish Manifests.
"""
def __init__(self):
"""
Initialize the PublishManifestsStep, setting its description and calling the super class's
__init__().
"""
super(PublishManifestsStep, self).__init__(step_type=constants.PUBLISH_STEP_MANIFESTS,
model_classes=[models.Manifest])
self.description = _('Publishing Manifests.')
def process_main(self, item):
"""
Link the item to the Manifest file.
:param item: The Blob to process
:type item: pulp_docker.plugins.models.Blob
"""
misc.create_symlink(item._storage_path,
os.path.join(self.get_manifests_directory(), item.unit_key['digest']))
def get_manifests_directory(self):
"""
Get the directory where the Manifests published to the web should be linked.
:return: The path to where Manifests should be published.
:rtype: basestring
"""
return os.path.join(self.parent.get_working_dir(), 'manifests')
class PublishTagsStep(publish_step.UnitModelPluginStep):
"""
Publish Tags.
"""
def __init__(self):
"""
Initialize the PublishTagsStep, setting its description and calling the super class's
__init__().
"""
super(PublishTagsStep, self).__init__(step_type=constants.PUBLISH_STEP_TAGS,
model_classes=[models.Tag])
self.description = _('Publishing Tags.')
# Collect the tag names we've seen so we can write them out during the finalize() method.
self._tag_names = set()
def process_main(self, item):
"""
Create the manifest tag links.
:param item: The tag to process
:type item: pulp_docker.plugins.models.Tag
"""
manifest = models.Manifest.objects.get(digest=item.manifest_digest)
misc.create_symlink(
manifest._storage_path,
os.path.join(self.parent.publish_manifests_step.get_manifests_directory(), item.name))
self._tag_names.add(item.name)
def finalize(self):
"""
Write the Tag list file so that clients can retrieve the list of available Tags.
"""
tags_path = os.path.join(self.parent.get_working_dir(), 'tags')
misc.mkdir(tags_path)
with open(os.path.join(tags_path, 'list'), 'w') as list_file:
tag_data = {
'name': configuration.get_repo_registry_id(self.get_repo(), self.get_config()),
'tags': list(self._tag_names)}
list_file.write(json.dumps(tag_data))
# We don't need the tag names anymore
del self._tag_names
class RedirectFileStep(publish_step.PublishStep):
"""
This step creates the JSON file that describes the published repository for Crane to use.
"""
def __init__(self, app_publish_location):
"""
Initialize the step.
:param app_publish_location: The full path to the location of the JSON file that this step
will generate.
:type app_publish_location: basestring
"""
super(RedirectFileStep, self).__init__(step_type=constants.PUBLISH_STEP_REDIRECT_FILE)
self.app_publish_location = app_publish_location
def process_main(self):
"""
Publish the JSON file for Crane.
"""
registry = configuration.get_repo_registry_id(self.get_repo(), self.get_config())
redirect_url = configuration.get_redirect_url(self.get_config(), self.get_repo(), 'v2')
redirect_data = {
'type': 'pulp-docker-redirect', 'version': 2, 'repository': self.get_repo().id,
'repo-registry-id': registry, 'url': redirect_url,
'protected': self.get_config().get('protected', False)}
misc.mkdir(os.path.dirname(self.app_publish_location))
with open(self.app_publish_location, 'w') as app_file:
app_file.write(json.dumps(redirect_data))
| gpl-2.0 | -9,088,429,965,034,889,000 | 39.45045 | 99 | 0.634633 | false |
andrellsantos/agentspeak-py | agentspeak-py/agent.py | 1 | 13361 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import random
import copy
from agentspeak import *
class Agent:
def __init__(self, name, belief_base, initial_goals, plan_library):
self.name = name
self.__belief_base = belief_base
# Conjunto de planos P
self.__plan_library = plan_library
# Conjunto de eventos E
self.__events = []
# Adiciona os objetivos iniciais no conjunto de eventos E
for initial_goal in initial_goals:
triggering_event = TriggeringEvent('+', initial_goal)
event = Event(triggering_event, TRUE_INTENTION)
self.__events.append(event)
self.__messages = []
self.__intentions = []
def run(self, perceptions = [], messages = {}):
# Função de verificação de mensagens
self.__check_messages(messages)
# Função de revisão de crenças (BRF)
self.__belief_revision_function(perceptions)
# Se não possuir nenhum elemento no conjunto de eventos ou conjunto de planos
if not self.__events and not self.__intentions:
return None
relevant_plans = []
while len(self.__events) > 0 and len(relevant_plans) == 0:
# Função de seleção de evento
event = self._event_selection()
# Função de unificação para seleção dos planos relevantes
relevant_plans = self.__unify_event(event)
if relevant_plans:
# Função de substituição para seleção dos planos relevantes
applicable_plans = self.__unify_context(relevant_plans)
if applicable_plans:
# Função de seleção do plano pretendido
intended_mean = self._intended_means_selection(applicable_plans)
# Função de atualização do conjunto de intenções
self.__update_intentions(intended_mean)
# Função de selecão da intenção que será executada
intention = self._intention_selection()
# Função .print(belief_base)
if intention and isinstance(intention, Action):
if isinstance(intention.literal, Print) and not intention.literal.content:
intention.literal.content = str(self.__belief_base)
# Retorna a intenção que será executada no ambiente
return intention
# Função de verificação de mensagens
def __check_messages(self, messages):
self.__messages.extend(messages.pop(self.name, []))
# Processa as mensagens recebidas
# [TO-DO] Digamos que eu tenha diversas mensagens para um agente.. eu processo tudo no mesmo
# ciclo de interpretação?
for message in self.__messages:
self.__process_messages(message.sender, message.type, message.literal)
# Limpa a caixa de mensagens do agente
self.__messages = []
def __process_messages(self, sender, type, literal):
# Tell
# O agente que enviou a mensagem pretende que o agente receptor possua uma crença em que
# o literal da mensagem seja verdadeiro.
if type == 'tell':
self.__belief_base.add(literal)
# Untell
# O agente que enviou a mensagem pretende que o agente receptor não possua uma crença em que
# o literal da mensagem seja verdadeiro.
elif type == 'untell':
self.__belief_base.remove(literal)
# Achieve
# O agente que enviou a mensagem solicita que o agente receptor tente alcançar um estado
# em que o conteúdo do literal da mensagem seja verdadeiro, isto é, delegando um objetivo
# para o agente receptor.
elif type == 'achieve':
goal = Goal('!' + literal)
triggering_event = TriggeringEvent('+', goal)
event = Event(triggering_event, TRUE_INTENTION)
self.__events.append(event)
# Unachieve
# O agente que enviou a mensagem solicita que o agente receptor desista do objetivo de atingir
# um estado em que o conteúdo do literal da mensagem seja verdadeiro.
elif type == 'unachieve':
goal = Goal('!' + literal)
triggering_event = TriggeringEvent('-', goal)
event = Event(triggering_event, TRUE_INTENTION)
self.__events.append(event)
# AskOne
elif type == 'askOne':
raise 'O tipo \'askOne\' está pendente de implementação na função .send()!'
# AskAll
elif type == 'askAll':
raise 'O tipo \'askAll\' está pendente de implementação na função .send()!'
# TellHow
elif type == 'tellHow':
raise 'O tipo \'tellHow\' está pendente de implementação na função .send()!'
# UntellHow
elif type == 'untellHow':
raise 'O tipo \'untellHow\' está pendente de implementação na função .send()!'
# AskHow
elif type == 'askHow':
raise 'O tipo \'askHow\' está pendente de implementação na função .send()!'
else:
raise 'Tipo incorreto da função .send()!'
# [TO-DO] Fazer (Página 118)
# Função de revisão de crenças (BRF)
def __belief_revision_function(self, perceptions):
# Recebe as informações provenientes do ambiente e as confronta com o seu conjunto de crenças
# Caso as percepções do ambiente sejam diferentes, o conjunto de crenças é atualizado para que
# reflitam o novo estado do ambiente
# Cada crença modificada gera um novo evento que é adicionado no conjunto de eventos
# Cada literal das percepções que não está na base de conhecimento é adicionado no conjunto de eventos
remove_list = []
for perception in perceptions:
if perception not in self.__belief_base.items:
remove_list.append(perception)
for item in remove_list:
triggering_event = self.__belief_base.add(item)
event = Event(triggering_event, TRUE_INTENTION)
self.__events.append(event)
# Cada literal da base de conhecimento que não está nas percepções é removido do conjunto de eventos
remove_list = []
for belief in self.__belief_base.items:
if belief not in perceptions:
remove_list.append(belief)
for item in remove_list:
triggering_event = self.__belief_base.remove(item)
event = Event(triggering_event, TRUE_INTENTION)
self.__events.append(event)
# Função de seleção de evento
def _event_selection(self):
# Escolhe um único evento do conjunto de eventos
event = None
if self.__events:
event = self.__events.pop(0)
return event
# Função de unificação para seleção dos planos relevantes
def __unify_event(self, event):
# Encontra os planos relevantes unificando os eventos ativadores com os cabeçalhos do conjunto de planos
relevant_plans = []
theta = {}
for plan in self.__plan_library:
unification = unify(event.triggering_event.literal, plan.triggering_event.literal, theta)
if unification != None:
plan = self.__substitute_unifier(unification, plan)
relevant_plans.append(plan)
return relevant_plans
# Função de substituição da unificação de um plano
def __substitute_unifier(self, unification, plan):
if len(unification) > 0:
# Cria um cópia do plano
plan = copy.deepcopy(plan)
# Realiza a substituição do evento arivador
plan.triggering_event = substitute(unification, plan.triggering_event.literal)
# Realiza a substituição do contexto
plan_context = []
for context in plan.context:
plan_context.append(substitute(unification, context))
plan.context = plan_context
# Realiza a substituição do corpo
plan_body = []
for body in plan.body:
if isinstance(body, Literal):
body = substitute(unification, body)
elif isinstance(body, Goal):
body.content = substitute(unification, body.content)
body.literal = substitute(unification, body.literal)
plan_body.append(body)
plan.body = plan_body
return plan
# Função de substituição para seleção dos planos relevantes
def __unify_context(self, relevant_plans):
applicable_plans = []
for plan in relevant_plans:
if self.__relevant_unifier(plan.context):
applicable_plans.append(plan)
return applicable_plans
def __unify_with_belief_base(self, content):
theta = {}
for belief in self.__belief_base.items:
if unify(content, belief, theta) != None:
return True
return False
def __relevant_unifier(self, context = []):
if not context:
return False
if len(context) == 1:
context = context[0]
if context.functor == 'true':
return True
if context.functor == 'not':
context = context.args[0]
ret = self.__unify_with_belief_base(context)
return not ret
relevant_unifier = self.__unify_with_belief_base(context)
return relevant_unifier
else:
relevant_unifier = self.__relevant_unifier(context[:1]) and self.__relevant_unifier(context[1:])
return relevant_unifier
# Função de seleção do plano pretendido
def _intended_means_selection(self, applicable_plans):
# Escolhe um único plano aplicável do conjunto de planos aplicáveis
applicable_plan = None
if applicable_plans:
applicable_plan = applicable_plans.pop(0)
# applicable_plan = random.choice(applicable_plans)
return applicable_plan
def __update_intentions(self, intended_mean):
if intended_mean:
intention = copy.deepcopy(intended_mean)
self.__intentions.append(intention)
# Função de selecão da intenção que será executada
def _intention_selection(self):
# Escolhe uma única intenção do conjunto de intenções
intention = None
while not intention:
if self.__intentions:
# Definição 13: Seleciona uma intenção i contida no topo do
# conjunto de intenções I.
intention = self.__intentions[-1]
if intention.body:
copy_intention = copy.deepcopy(intention)
literal = intention.body.pop(0)
if isinstance(literal, Goal):
if literal.type == '!':
# Definição 13: Se a fórmula no corpo de 'i' for um objetivo de realização,
# um evento do tipo <+!g(t), i> é adicionado no conjunto de eventos e a
# intenção que gerou o evento é considerada executada
triggering_event = TriggeringEvent('+', copy.deepcopy(literal))
event = Event(triggering_event, copy_intention)
self.__events.append(event)
intention = True
elif literal.type == '?':
# Definição 14: No caso da fórmula do corpo da intenção 'i' ser um evento de
# teste, o conjunto de crenças é percorrido para encontrar um átomo de crenças
# que unifique o predicado de teste. Se encontrado, o objetivo é removido do
# conjunto de intenções, caso contrário, não executa os demais literias do corpo.
theta = {}
has_unification = False
for belief in self.__belief_base.items:
unification = unify(literal.content, belief, theta)
if unification != None:
has_unification = True
break
if has_unification:
intention = True
else:
self.__intentions.remove(intention)
intention = None
else:
# Definição 15: Se a fórmula no corpo da intenção 'i' for uma ação a ser realizada
# pelo agente no ambiente, o interpretador atualiza o estado do ambiente com a ação
# requerida e remove a ação do conjunto de intenções
intention = Action(self.name, literal)
else:
self.__intentions.remove(intention)
intention = None
else:
break
return intention | gpl-3.0 | 7,225,907,124,630,131,000 | 43.016722 | 112 | 0.573708 | false |
openhealthcare/randomise.me | rm/trials/migrations/0020_auto__add_field_report_variable__chg_field_report_score.py | 1 | 7439 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Report.variable'
db.add_column(u'trials_report', 'variable',
self.gf('django.db.models.fields.related.ForeignKey')(default=2, to=orm['trials.Variable']),
keep_default=False)
# Changing field 'Report.score'
db.alter_column(u'trials_report', 'score', self.gf('django.db.models.fields.IntegerField')(null=True))
def backwards(self, orm):
# Deleting field 'Report.variable'
db.delete_column(u'trials_report', 'variable_id')
# User chose to not deal with backwards NULL issues for 'Report.score'
raise RuntimeError("Cannot reverse this migration. 'Report.score' and its values cannot be restored.")
models = {
u'trials.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"})
},
u'trials.participant': {
'Meta': {'object_name': 'Participant'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['userprofiles.RMUser']"})
},
u'trials.report': {
'Meta': {'object_name': 'Report'},
'date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}),
'variable': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Variable']"})
},
u'trials.singleuserallocation': {
'Meta': {'object_name': 'SingleUserAllocation'},
'date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.SingleUserTrial']"})
},
u'trials.singleuserreport': {
'Meta': {'object_name': 'SingleUserReport'},
'date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.SingleUserTrial']"})
},
u'trials.singleusertrial': {
'Meta': {'object_name': 'SingleUserTrial'},
'finish_date': ('django.db.models.fields.DateField', [], {}),
'group_a': ('django.db.models.fields.TextField', [], {}),
'group_b': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['userprofiles.RMUser']"}),
'question': ('django.db.models.fields.TextField', [], {}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'variable': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'trials.trial': {
'Meta': {'object_name': 'Trial'},
'description': ('django.db.models.fields.TextField', [], {}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'finish_date': ('django.db.models.fields.DateField', [], {}),
'group_a': ('django.db.models.fields.TextField', [], {}),
'group_a_expected': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'group_b': ('django.db.models.fields.TextField', [], {}),
'group_b_impressed': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_participants': ('django.db.models.fields.IntegerField', [], {}),
'min_participants': ('django.db.models.fields.IntegerField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['userprofiles.RMUser']"}),
'participants': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'question': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'recruiting': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'style': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
u'trials.variable': {
'Meta': {'object_name': 'Variable'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'style': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"})
},
u'userprofiles.rmuser': {
'Meta': {'object_name': 'RMUser'},
'account': ('django.db.models.fields.CharField', [], {'default': "'st'", 'max_length': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'})
}
}
complete_apps = ['trials'] | agpl-3.0 | -7,017,855,676,726,162,000 | 61.521008 | 137 | 0.54752 | false |
agconti/Ember-Demo | ember_demo/users/migrations/0002_set_site_domain_and_name.py | 1 | 4352 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.conf import settings
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"""Set site domain and name."""
Site = orm['sites.Site']
site = Site.objects.get(id=settings.SITE_ID)
site.domain = "example.com"
site.name = "ember_demo"
site.save()
def backwards(self, orm):
"""Revert site domain and name to default."""
Site = orm['sites.Site']
site = Site.objects.get(id=settings.SITE_ID)
site.domain = 'example.com'
site.name = 'example.com'
site.save()
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'users.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
}
}
complete_apps = ['sites', 'users']
symmetrical = True | mit | 6,375,379,218,639,753,000 | 58.630137 | 195 | 0.554917 | false |
trbs/django-constance-trbs | tests/storage.py | 1 | 3502 | # -*- encoding: utf-8 -*-
import six
from datetime import datetime, date, time
from decimal import Decimal
if six.PY3:
long = int
class StorageTestsMixin(object):
def test_store(self):
# read defaults
import constance
config = constance.load_config_class()()
self.assertEqual(config.INT_VALUE, 1)
self.assertEqual(config.LONG_VALUE, long(2 ** 64))
self.assertEqual(config.BOOL_VALUE, True)
self.assertEqual(config.STRING_VALUE, 'Hello world')
self.assertEqual(config.UNICODE_VALUE, six.u('Rivi\xe8re-Bonjour'))
self.assertEqual(config.DECIMAL_VALUE, Decimal('0.1'))
self.assertEqual(config.DATETIME_VALUE, datetime(2010, 8, 23, 11, 29, 24))
self.assertEqual(config.FLOAT_VALUE, 3.1415926536)
self.assertEqual(config.DATE_VALUE, date(2010, 12, 24))
self.assertEqual(config.TIME_VALUE, time(23, 59, 59))
# set values
config.INT_VALUE = 100
config.LONG_VALUE = long(2 ** 65)
config.BOOL_VALUE = False
config.STRING_VALUE = 'Beware the weeping angel'
config.UNICODE_VALUE = six.u('Québec')
config.DECIMAL_VALUE = Decimal('1.2')
config.DATETIME_VALUE = datetime(1977, 10, 2)
config.FLOAT_VALUE = 2.718281845905
config.DATE_VALUE = date(2001, 12, 20)
config.TIME_VALUE = time(1, 59, 0)
# read again
self.assertEqual(config.INT_VALUE, 100)
self.assertEqual(config.LONG_VALUE, long(2 ** 65))
self.assertEqual(config.BOOL_VALUE, False)
self.assertEqual(config.STRING_VALUE, 'Beware the weeping angel')
self.assertEqual(config.UNICODE_VALUE, six.u('Québec'))
self.assertEqual(config.DECIMAL_VALUE, Decimal('1.2'))
self.assertEqual(config.DATETIME_VALUE, datetime(1977, 10, 2))
self.assertEqual(config.FLOAT_VALUE, 2.718281845905)
self.assertEqual(config.DATE_VALUE, date(2001, 12, 20))
self.assertEqual(config.TIME_VALUE, time(1, 59, 0))
def test_nonexistent(self):
from constance import config
try:
config.NON_EXISTENT
except Exception as e:
self.assertEqual(type(e), AttributeError)
try:
config.NON_EXISTENT = 1
except Exception as e:
self.assertEqual(type(e), AttributeError)
def test_missing_values(self):
from constance import config
# set some values and leave out others
config.LONG_VALUE = long(2 ** 64)
config.BOOL_VALUE = False
config.UNICODE_VALUE = six.u('Québec')
config.DECIMAL_VALUE = Decimal('1.2')
config.DATETIME_VALUE = datetime(1977, 10, 2)
config.DATE_VALUE = date(2001, 12, 20)
config.TIME_VALUE = time(1, 59, 0)
self.assertEqual(config.INT_VALUE, 1) # this should be the default value
self.assertEqual(config.LONG_VALUE, long(2 ** 64))
self.assertEqual(config.BOOL_VALUE, False)
self.assertEqual(config.STRING_VALUE, 'Hello world') # this should be the default value
self.assertEqual(config.UNICODE_VALUE, six.u('Québec'))
self.assertEqual(config.DECIMAL_VALUE, Decimal('1.2'))
self.assertEqual(config.DATETIME_VALUE, datetime(1977, 10, 2))
self.assertEqual(config.FLOAT_VALUE, 3.1415926536) # this should be the default value
self.assertEqual(config.DATE_VALUE, date(2001, 12, 20))
self.assertEqual(config.TIME_VALUE, time(1, 59, 0))
| bsd-3-clause | 2,237,975,639,157,402,600 | 40.642857 | 96 | 0.641224 | false |
kg-bot/SupyBot | plugins/MilleBornes/config.py | 1 | 2718 | ###
# Copyright (c) 2010, Valentin Lorentz
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.conf as conf
import supybot.registry as registry
try:
from supybot.i18n import PluginInternationalization
from supybot.i18n import internationalizeDocstring
_ = PluginInternationalization('MilleBornes')
except:
# This are useless functions that's allow to run the plugin on a bot
# without the i18n plugin
_ = lambda x:x
internationalizeDocstring = lambda x:x
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
from supybot.questions import expect, anything, something, yn
conf.registerPlugin('MilleBornes', True)
MilleBornes = conf.registerPlugin('MilleBornes')
# This is where your configuration variables (if any) should go. For example:
# conf.registerGlobalValue(MilleBornes, 'someConfigVariableName',
# registry.Boolean(False, _("""Help for someConfigVariableName.""")))
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| gpl-3.0 | 5,178,330,492,657,698,000 | 45.067797 | 79 | 0.765636 | false |
tombstone/models | official/vision/detection/dataloader/factory.py | 1 | 4932 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Model architecture factory."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from official.vision.detection.dataloader import maskrcnn_parser
from official.vision.detection.dataloader import retinanet_parser
from official.vision.detection.dataloader import shapemask_parser
def parser_generator(params, mode):
"""Generator function for various dataset parser."""
if params.architecture.parser == 'retinanet_parser':
anchor_params = params.anchor
parser_params = params.retinanet_parser
parser_fn = retinanet_parser.Parser(
output_size=parser_params.output_size,
min_level=params.architecture.min_level,
max_level=params.architecture.max_level,
num_scales=anchor_params.num_scales,
aspect_ratios=anchor_params.aspect_ratios,
anchor_size=anchor_params.anchor_size,
match_threshold=parser_params.match_threshold,
unmatched_threshold=parser_params.unmatched_threshold,
aug_rand_hflip=parser_params.aug_rand_hflip,
aug_scale_min=parser_params.aug_scale_min,
aug_scale_max=parser_params.aug_scale_max,
use_autoaugment=parser_params.use_autoaugment,
autoaugment_policy_name=parser_params.autoaugment_policy_name,
skip_crowd_during_training=parser_params.skip_crowd_during_training,
max_num_instances=parser_params.max_num_instances,
use_bfloat16=params.architecture.use_bfloat16,
mode=mode)
elif params.architecture.parser == 'maskrcnn_parser':
anchor_params = params.anchor
parser_params = params.maskrcnn_parser
parser_fn = maskrcnn_parser.Parser(
output_size=parser_params.output_size,
min_level=params.architecture.min_level,
max_level=params.architecture.max_level,
num_scales=anchor_params.num_scales,
aspect_ratios=anchor_params.aspect_ratios,
anchor_size=anchor_params.anchor_size,
rpn_match_threshold=parser_params.rpn_match_threshold,
rpn_unmatched_threshold=parser_params.rpn_unmatched_threshold,
rpn_batch_size_per_im=parser_params.rpn_batch_size_per_im,
rpn_fg_fraction=parser_params.rpn_fg_fraction,
aug_rand_hflip=parser_params.aug_rand_hflip,
aug_scale_min=parser_params.aug_scale_min,
aug_scale_max=parser_params.aug_scale_max,
skip_crowd_during_training=parser_params.skip_crowd_during_training,
max_num_instances=parser_params.max_num_instances,
include_mask=params.architecture.include_mask,
mask_crop_size=parser_params.mask_crop_size,
use_bfloat16=params.architecture.use_bfloat16,
mode=mode)
elif params.architecture.parser == 'shapemask_parser':
anchor_params = params.anchor
parser_params = params.shapemask_parser
parser_fn = shapemask_parser.Parser(
output_size=parser_params.output_size,
min_level=params.architecture.min_level,
max_level=params.architecture.max_level,
num_scales=anchor_params.num_scales,
aspect_ratios=anchor_params.aspect_ratios,
anchor_size=anchor_params.anchor_size,
use_category=parser_params.use_category,
outer_box_scale=parser_params.outer_box_scale,
box_jitter_scale=parser_params.box_jitter_scale,
num_sampled_masks=parser_params.num_sampled_masks,
mask_crop_size=parser_params.mask_crop_size,
mask_min_level=parser_params.mask_min_level,
mask_max_level=parser_params.mask_max_level,
upsample_factor=parser_params.upsample_factor,
match_threshold=parser_params.match_threshold,
unmatched_threshold=parser_params.unmatched_threshold,
aug_rand_hflip=parser_params.aug_rand_hflip,
aug_scale_min=parser_params.aug_scale_min,
aug_scale_max=parser_params.aug_scale_max,
skip_crowd_during_training=parser_params.skip_crowd_during_training,
max_num_instances=parser_params.max_num_instances,
use_bfloat16=params.architecture.use_bfloat16,
mask_train_class=parser_params.mask_train_class,
mode=mode)
else:
raise ValueError('Parser %s is not supported.' % params.architecture.parser)
return parser_fn
| apache-2.0 | -2,585,703,879,675,773,000 | 46.883495 | 80 | 0.70884 | false |
CivicKnowledge/metaeditor | compat/tests/helpers.py | 1 | 1050 | # -*- coding: utf-8 -*-
import fudge
local_cache = {}
def patch_identifier_index(result):
""" Patches ambry search identifier to return given result. """
from ambry.library.search import Search
# convert each dict in the result to the hit expected by searcher.
class MyDict(dict):
pass
new_result = []
for i, one in enumerate(result):
my = MyDict()
my.update(one)
my.score = i
new_result.append(my)
class FakeSearcher(object):
def search(self, query, limit=20):
return new_result
def __enter__(self, *args, **kwargs):
return self
def __exit__(self, *args, **kwargs):
pass
class FakeIdentifierIndex(object):
schema = '?'
def searcher(*args, **kwargs):
return FakeSearcher()
local_cache['patched_identifier_index'] = fudge.patch_object(
Search, 'identifier_index', FakeIdentifierIndex())
def restore_patched():
local_cache['patched_identifier_index'].restore()
| mit | -950,198,280,900,492,700 | 22.333333 | 70 | 0.598095 | false |
rastrexando-eu/rastrexando-eu | core/migrations/0060_auto_20180128_1205.py | 1 | 1505 | # Generated by Django 2.0.1 on 2018-01-28 11:05
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0059_config'),
]
operations = [
migrations.AlterField(
model_name='config',
name='current_season',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='core.Season', verbose_name='Temporada Actual'),
),
migrations.AlterField(
model_name='medialink',
name='rastrexo',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='media_links', to='core.Rastrexo'),
),
migrations.AlterField(
model_name='rastrexo',
name='organization',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='rastrexos', to='core.Organization', verbose_name='Organización'),
),
migrations.AlterField(
model_name='rastrexo',
name='season',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='rastrexos', to='core.Season'),
),
migrations.AlterField(
model_name='teammatch',
name='rastrexo',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='team_matches', to='core.Rastrexo'),
),
]
| gpl-3.0 | -6,761,579,042,721,352,000 | 37.564103 | 183 | 0.621011 | false |
napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/mpls/signaling_protocols/segment_routing/interfaces/interface/interface_ref/config/__init__.py | 1 | 19329 | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/mpls/signaling-protocols/segment-routing/interfaces/interface/interface-ref/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configured reference to interface / subinterface
"""
__slots__ = ("_path_helper", "_extmethods", "__interface", "__subinterface")
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__interface = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="interface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__subinterface = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="subinterface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"mpls",
"signaling-protocols",
"segment-routing",
"interfaces",
"interface",
"interface-ref",
"config",
]
def _get_interface(self):
"""
Getter method for interface, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/interfaces/interface/interface_ref/config/interface (leafref)
YANG Description: Reference to a base interface. If a reference to a
subinterface is required, this leaf must be specified
to indicate the base interface.
"""
return self.__interface
def _set_interface(self, v, load=False):
"""
Setter method for interface, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/interfaces/interface/interface_ref/config/interface (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface() directly.
YANG Description: Reference to a base interface. If a reference to a
subinterface is required, this leaf must be specified
to indicate the base interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="interface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """interface must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__interface = t
if hasattr(self, "_set"):
self._set()
def _unset_interface(self):
self.__interface = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="interface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_subinterface(self):
"""
Getter method for subinterface, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/interfaces/interface/interface_ref/config/subinterface (leafref)
YANG Description: Reference to a subinterface -- this requires the base
interface to be specified using the interface leaf in
this container. If only a reference to a base interface
is requuired, this leaf should not be set.
"""
return self.__subinterface
def _set_subinterface(self, v, load=False):
"""
Setter method for subinterface, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/interfaces/interface/interface_ref/config/subinterface (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_subinterface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_subinterface() directly.
YANG Description: Reference to a subinterface -- this requires the base
interface to be specified using the interface leaf in
this container. If only a reference to a base interface
is requuired, this leaf should not be set.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="subinterface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """subinterface must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="subinterface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__subinterface = t
if hasattr(self, "_set"):
self._set()
def _unset_subinterface(self):
self.__subinterface = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="subinterface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
interface = __builtin__.property(_get_interface, _set_interface)
subinterface = __builtin__.property(_get_subinterface, _set_subinterface)
_pyangbind_elements = OrderedDict(
[("interface", interface), ("subinterface", subinterface)]
)
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/mpls/signaling-protocols/segment-routing/interfaces/interface/interface-ref/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configured reference to interface / subinterface
"""
__slots__ = ("_path_helper", "_extmethods", "__interface", "__subinterface")
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__interface = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="interface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__subinterface = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="subinterface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"mpls",
"signaling-protocols",
"segment-routing",
"interfaces",
"interface",
"interface-ref",
"config",
]
def _get_interface(self):
"""
Getter method for interface, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/interfaces/interface/interface_ref/config/interface (leafref)
YANG Description: Reference to a base interface. If a reference to a
subinterface is required, this leaf must be specified
to indicate the base interface.
"""
return self.__interface
def _set_interface(self, v, load=False):
"""
Setter method for interface, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/interfaces/interface/interface_ref/config/interface (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface() directly.
YANG Description: Reference to a base interface. If a reference to a
subinterface is required, this leaf must be specified
to indicate the base interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="interface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """interface must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__interface = t
if hasattr(self, "_set"):
self._set()
def _unset_interface(self):
self.__interface = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="interface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_subinterface(self):
"""
Getter method for subinterface, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/interfaces/interface/interface_ref/config/subinterface (leafref)
YANG Description: Reference to a subinterface -- this requires the base
interface to be specified using the interface leaf in
this container. If only a reference to a base interface
is requuired, this leaf should not be set.
"""
return self.__subinterface
def _set_subinterface(self, v, load=False):
"""
Setter method for subinterface, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/interfaces/interface/interface_ref/config/subinterface (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_subinterface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_subinterface() directly.
YANG Description: Reference to a subinterface -- this requires the base
interface to be specified using the interface leaf in
this container. If only a reference to a base interface
is requuired, this leaf should not be set.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="subinterface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """subinterface must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="subinterface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__subinterface = t
if hasattr(self, "_set"):
self._set()
def _unset_subinterface(self):
self.__subinterface = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="subinterface",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
interface = __builtin__.property(_get_interface, _set_interface)
subinterface = __builtin__.property(_get_subinterface, _set_subinterface)
_pyangbind_elements = OrderedDict(
[("interface", interface), ("subinterface", subinterface)]
)
| apache-2.0 | -7,710,712,199,952,492,000 | 39.437238 | 352 | 0.598686 | false |
FedoraScientific/salome-hexablock | doc/test_doc/make_transformation/make_scale.py | 1 | 1308 | # -*- coding: latin-1 -*-
# Copyright (C) 2009-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : [email protected]
#
####### Test make scale ###############
import hexablock
doc = hexablock.addDocument ("default")
ori = doc.addVertex ( 0, 0, 0)
vz = doc.addVector ( 0, 0, 1)
vx = doc.addVector ( 1 ,0, 0)
dr = 1
da = 360
dl = 1
nr = 1
na = 6
nl = 1
grid = doc.makeCylindrical (ori, vx,vz, dr,da,dl, nr,na,nl, False)
dest = doc.addVertex (15, 0, 0)
grid2 = doc.makeScale (grid, dest, 0.5)
##### doc .saveVtk ("make_scale.vtk")
| lgpl-2.1 | -8,208,037,687,077,105,000 | 28.727273 | 81 | 0.692661 | false |
openstack/designate | designate/objects/zone_import.py | 1 | 1570 | # Copyright 2015 Rackspace Inc.
#
# Author: Tim Simmons <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from designate.objects import base
from designate.objects import fields
@base.DesignateRegistry.register
class ZoneImport(base.DictObjectMixin, base.PersistentObjectMixin,
base.DesignateObject):
fields = {
'status': fields.EnumField(nullable=True,
valid_values=["ACTIVE", "PENDING",
"DELETED", "ERROR", "COMPLETE"]
),
'task_type': fields.EnumField(nullable=True,
valid_values=["IMPORT"]
),
'tenant_id': fields.StringFields(nullable=True),
'message': fields.StringFields(nullable=True, maxLength=160),
'zone_id': fields.UUIDFields(nullable=True)
}
@base.DesignateRegistry.register
class ZoneImportList(base.ListObjectMixin, base.DesignateObject,
base.PagedListObjectMixin):
LIST_ITEM_TYPE = ZoneImport
fields = {
'objects': fields.ListOfObjectsField('ZoneImport'),
}
| apache-2.0 | -6,399,281,235,386,972,000 | 34.681818 | 75 | 0.689809 | false |
anish/buildbot | master/buildbot/status/buildrequest.py | 1 | 4850 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.python import log
from zope.interface import implementer
from buildbot import interfaces
from buildbot.util.eventual import eventually
@implementer(interfaces.IBuildRequestStatus)
class BuildRequestStatus:
def __init__(self, buildername, brid, status, brdict=None):
self.buildername = buildername
self.brid = brid
self.status = status
self.master = status.master
self._brdict = brdict
self._buildrequest = None
self._buildrequest_lock = defer.DeferredLock()
@defer.inlineCallbacks
def _getBuildRequest(self):
"""
Get the underlying BuildRequest object for this status. This is a slow
operation!
@returns: BuildRequest instance or None, via Deferred
"""
# late binding to avoid an import cycle
from buildbot.process import buildrequest
# this is only set once, so no need to lock if we already have it
if self._buildrequest:
return self._buildrequest
yield self._buildrequest_lock.acquire()
try:
if not self._buildrequest:
if self._brdict is None:
self._brdict = (
yield self.master.db.buildrequests.getBuildRequest(
self.brid))
br = yield buildrequest.BuildRequest.fromBrdict(self.master,
self._brdict)
self._buildrequest = br
finally:
self._buildrequest_lock.release()
self._buildrequest_lock.release()
return self._buildrequest
def buildStarted(self, build):
self.status._buildrequest_buildStarted(build.status)
self.builds.append(build.status)
# methods called by our clients
@defer.inlineCallbacks
def getBsid(self):
br = yield self._getBuildRequest()
return br.bsid
@defer.inlineCallbacks
def getBuildProperties(self):
br = yield self._getBuildRequest()
return br.properties
def getSourceStamp(self):
# TODO..
return defer.succeed(None)
def getBuilderName(self):
return self.buildername
@defer.inlineCallbacks
def getBuilds(self):
builder = self.status.getBuilder(self.getBuilderName())
builds = []
bdicts = yield self.master.db.builds.getBuilds(buildrequestid=self.brid)
buildnums = sorted([bdict['number'] for bdict in bdicts])
for buildnum in buildnums:
bs = builder.getBuild(buildnum)
if bs:
builds.append(bs)
return builds
def subscribe(self, observer):
d = self.getBuilds()
@d.addCallback
def notify_old(oldbuilds):
for bs in oldbuilds:
eventually(observer, bs)
d.addCallback(lambda _:
self.status._buildrequest_subscribe(self.brid, observer))
d.addErrback(log.err, 'while notifying subscribers')
def unsubscribe(self, observer):
self.status._buildrequest_unsubscribe(self.brid, observer)
@defer.inlineCallbacks
def getSubmitTime(self):
br = yield self._getBuildRequest()
return br.submittedAt
def asDict(self):
result = {}
# Constant
result['source'] = None # not available sync, sorry
result['builderName'] = self.buildername
result['submittedAt'] = None # not available sync, sorry
# Transient
result['builds'] = [] # not available async, sorry
return result
@defer.inlineCallbacks
def asDict_async(self):
result = {}
ss = yield self.getSourceStamp()
result['source'] = ss.asDict()
props = yield self.getBuildProperties()
result['properties'] = props.asList()
result['builderName'] = self.getBuilderName()
result['submittedAt'] = yield self.getSubmitTime()
builds = yield self.getBuilds()
result['builds'] = [build.asDict() for build in builds]
return result
| gpl-2.0 | -6,598,749,122,687,935,000 | 30.699346 | 80 | 0.632165 | false |
rbmj/pyflightcontrol | pyflightcontrol/aircraft/mpl3115a2.py | 1 | 3189 | from smbus import SMBus
from sys import exit
import os
import time
class MPL3115A2(object):
#I2C ADDRESS/BITS
ADDRESS = (0x60)
#REGISTERS
REGISTER_STATUS = (0x00)
REGISTER_STATUS_TDR = 0x02
REGISTER_STATUS_PDR = 0x04
REGISTER_STATUS_PTDR = 0x08
REGISTER_PRESSURE_MSB = (0x01)
REGISTER_PRESSURE_CSB = (0x02)
REGISTER_PRESSURE_LSB = (0x03)
REGISTER_TEMP_MSB = (0x04)
REGISTER_TEMP_LSB = (0x05)
REGISTER_DR_STATUS = (0x06)
OUT_P_DELTA_MSB = (0x07)
OUT_P_DELTA_CSB = (0x08)
OUT_P_DELTA_LSB = (0x09)
OUT_T_DELTA_MSB = (0x0A)
OUT_T_DELTA_LSB = (0x0B)
BAR_IN_MSB = (0x14)
WHOAMI = (0x0C)
#BITS
PT_DATA_CFG = 0x13
PT_DATA_CFG_TDEFE = 0x01
PT_DATA_CFG_PDEFE = 0x02
PT_DATA_CFG_DREM = 0x04
CTRL_REG1 = (0x26)
CTRL_REG1_SBYB = 0x01
CTRL_REG1_OST = 0x02
CTRL_REG1_RST = 0x04
CTRL_REG1_OS1 = 0x00
CTRL_REG1_OS2 = 0x08
CTRL_REG1_OS4 = 0x10
CTRL_REG1_OS8 = 0x18
CTRL_REG1_OS16 = 0x20
CTRL_REG1_OS32 = 0x28
CTRL_REG1_OS64 = 0x30
CTRL_REG1_OS128 = 0x38
CTRL_REG1_RAW = 0x40
CTRL_REG1_ALT = 0x80
CTRL_REG1_BAR = 0x00
CTRL_REG2 = (0x27)
CTRL_REG3 = (0x28)
CTRL_REG4 = (0x29)
CTRL_REG5 = (0x2A)
REGISTER_STARTCONVERSION = (0x12)
def __init__(self):
os.system('echo -n 1 > ' +
'/sys/module/i2c_bcm2708/parameters/combined')
self._bus = SMBus(1)
whoami = self._bus.read_byte_data(MPL3115A2.ADDRESS,
MPL3115A2.WHOAMI)
if whoami != 0xc4:
raise #FIXME
# Enable Event Flags
self._bus.write_byte_data(MPL3115A2.ADDRESS,
MPL3115A2.PT_DATA_CFG, 0x07)
self.pressure = 0
self.temperature = 0
def poll(self):
self._bus.read_byte_data(MPL3115A2.ADDRESS, MPL3115A2.CTRL_REG1)
self._bus.write_byte_data(
MPL3115A2.ADDRESS,
MPL3115A2.CTRL_REG1,
MPL3115A2.CTRL_REG1_OST |
MPL3115A2.CTRL_REG1_OS8)
while True:
reg = self._bus.read_byte_data(MPL3115A2.ADDRESS,
MPL3115A2.REGISTER_STATUS)
if (reg & MPL3115A2.REGISTER_STATUS_PTDR) != 0:
break
msb, csb, lsb = self._bus.read_i2c_block_data(MPL3115A2.ADDRESS,
MPL3115A2.REGISTER_PRESSURE_MSB, 3)
self.pressure = ((msb<<16) | (csb<<8) | lsb) / 64.
# convert to psf
self.pressure = self.pressure*0.02089
msb, lsb = self._bus.read_i2c_block_data(MPL3115A2.ADDRESS,
MPL3115A2.REGISTER_TEMP_MSB, 2)
self.temperature = (msb << 8) | lsb
# check sign
if self.temperature > (1<<15):
self.temperature = self.temperature - (1<<16)
# make fractional and convert to kelvin
self.temperature = (self.temperature/256.) + 273.15
# convert to rankine
self.temperature = self.temperature*1.8
if __name__ == '__main__':
dev = MPL3115A2()
while True:
dev.poll()
print('p {}\tT {}'.format(dev.pressure, dev.temperature))
time.sleep(0.05)
| apache-2.0 | -4,089,819,986,893,017,600 | 26.973684 | 72 | 0.570398 | false |
uni-peter-zheng/tp-qemu | qemu/tests/flow_caches_stress_test.py | 2 | 6329 | import logging
import time
from autotest.client import utils
from autotest.client.shared import error
from virttest import utils_netperf, utils_net, env_process, utils_misc
from virttest import data_dir, utils_test
# This decorator makes the test function aware of context strings
@error.context_aware
def run(test, params, env):
"""
QEMU flow caches stress test test
1) Make sure nf_conntrack is disabled in host and guest.
If nf_conntrack is enabled in host, skip this case.
2) Boot guest with vhost=on/off.
3) Enable multi queues support in guest (optional).
4) After installation of netperf, run netserver in host.
5) Run netperf TCP_CRR protocal test in guest.
6) Transfer file between guest and host.
7) Check the md5 of copied file.
This is a sample QEMU test, so people can get used to some of the test APIs.
:param test: QEMU test object.
:param params: Dictionary with the test parameters.
:param env: Dictionary with test environment.
"""
msg = "Make sure nf_conntrack is disabled in host and guest."
error.context(msg, logging.info)
if "nf_conntrack" in utils.system_output("lsmod"):
err = "nf_conntrack load in host, skip this case"
raise error.TestNAError(err)
params["start_vm"] = "yes"
error.context("Boot up guest", logging.info)
env_process.preprocess_vm(test, params, env, params["main_vm"])
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=timeout)
if "nf_conntrack" in session.cmd_output("lsmod"):
msg = "Unload nf_conntrack module in guest."
error.context(msg, logging.info)
black_str = "#disable nf_conntrack\nblacklist nf_conntrack\n" \
"blacklist nf_conntrack_ipv6\nblacklist xt_conntrack\n" \
"blacklist nf_conntrack_ftp\nblacklist xt_state\n" \
"blacklist iptable_nat\nblacklist ipt_REDIRECT\n" \
"blacklist nf_nat\nblacklist nf_conntrack_ipv4"
cmd = "echo -e '%s' >> /etc/modprobe.d/blacklist.conf" % black_str
session.cmd(cmd)
session = vm.reboot(session, timeout=timeout)
if "nf_conntrack" in session.cmd_output("lsmod"):
err = "Fail to unload nf_conntrack module in guest."
error.TestError(err)
netperf_link = utils_misc.get_path(data_dir.get_deps_dir("netperf"),
params["netperf_link"])
md5sum = params.get("pkg_md5sum")
win_netperf_link = params.get("win_netperf_link")
if win_netperf_link:
win_netperf_link = utils_misc.get_path(data_dir.get_deps_dir("netperf"),
win_netperf_link)
win_netperf_md5sum = params.get("win_netperf_md5sum")
server_path = params.get("server_path", "/var/tmp/")
client_path = params.get("client_path", "/var/tmp/")
win_netperf_path = params.get("win_netperf_path", "c:\\")
client_num = params.get("netperf_client_num", 520)
netperf_timeout = int(params.get("netperf_timeout", 600))
netperf_client_ip = vm.get_address()
host_ip = utils_net.get_host_ip_address(params)
netperf_server_ip = params.get("netperf_server_ip", host_ip)
username = params.get("username", "root")
password = params.get("password", "123456")
passwd = params.get("hostpasswd", "123456")
client = params.get("shell_client", "ssh")
port = params.get("shell_port", "22")
compile_option_client = params.get("compile_option_client", "")
compile_option_server = params.get("compile_option_server", "")
if int(params.get("queues", 1)) > 1 and params.get("os_type") == "linux":
error.context("Enable multi queues support in guest.", logging.info)
guest_mac = vm.get_mac_address()
ifname = utils_net.get_linux_ifname(session, guest_mac)
cmd = "ethtool -L %s combined %s" % (ifname, params.get("queues"))
status, out = session.cmd_status_output(cmd)
msg = "Fail to enable multi queues support in guest."
msg += "Command %s fail output: %s" % (cmd, out)
error.TestError(msg)
if params.get("os_type") == "linux":
session.cmd("iptables -F", ignore_all_errors=True)
g_client_link = netperf_link
g_client_path = client_path
g_md5sum = md5sum
elif params.get("os_type") == "windows":
g_client_link = win_netperf_link
g_client_path = win_netperf_path
g_md5sum = win_netperf_md5sum
error.context("Setup netperf in guest and host", logging.info)
netperf_client = utils_netperf.NetperfClient(netperf_client_ip,
g_client_path,
g_md5sum, g_client_link,
username=username,
password=password,
compile_option=compile_option_client)
netperf_server = utils_netperf.NetperfServer(netperf_server_ip,
server_path,
md5sum,
netperf_link,
client, port,
password=passwd,
compile_option=compile_option_server)
try:
error.base_context("Run netperf test between host and guest.")
error.context("Start netserver in host.", logging.info)
netperf_server.start()
error.context("Start Netperf in guest for %ss." % netperf_timeout,
logging.info)
test_option = "-t TCP_CRR -l %s -- -b 10 -D" % netperf_timeout
netperf_client.bg_start(netperf_server_ip, test_option, client_num)
utils_misc.wait_for(lambda: not netperf_client.is_netperf_running(),
timeout=netperf_timeout, first=590, step=2)
utils_test.run_file_transfer(test, params, env)
finally:
netperf_server.stop()
netperf_client.package.env_cleanup(True)
if session:
session.close()
| gpl-2.0 | -550,993,333,842,579,100 | 45.19708 | 86 | 0.589825 | false |
dawn110110/xhj-renren | ntype.py | 1 | 1193 | #-*-coding:utf-8-*-
"""
Original Author:
wong2 <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
# 通知类型
NTYPES = {
'reply_in_status_comment': 16,
'at_in_status': 196
}
| mit | 6,446,602,852,576,925,000 | 34.909091 | 70 | 0.774684 | false |
aymara/knowledgesrl | src/errorslog.py | 1 | 4172 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import random
from collections import Counter
errors = {
"vn_parsing": [],
"vn_missing": [],
"frame_without_slot": [],
"frame_with_slot": [],
"impossible_role_matching": [],
"ambiguous_role": []
}
debug_data = []
def log_vn_missing(frame):
errors["vn_missing"].append({
"file": frame.filename, "sentence": frame.sentence,
"predicate": frame.predicate.lemma,
})
def log_frame_with_slot(frame, converted_frame):
errors["frame_with_slot"].append({
"file": frame.filename, "sentence": frame.sentence,
"predicate": frame.predicate.lemma,
"structure": converted_frame.structure
})
def log_frame_without_slot(frame, converted_frame):
errors["frame_without_slot"].append({
"file": frame.filename, "sentence": frame.sentence,
"predicate": frame.predicate.lemma,
"structure": converted_frame.structure
})
def log_impossible_role_matching(frame, i, msg):
errors["impossible_role_matching"].append({
"file": frame.filename, "sentence": frame.sentence,
"predicate": frame.predicate.lemma,
"fn_role": frame.args[i].role,
"fn_frame": frame.frame_name,
"msg": msg
})
def log_debug_data(frame, converted_frame, matcher, distrib, verbnet):
debug_data.append({
"sentence": frame.sentence,
"predicate": frame.predicate.lemma,
"args": [x.text for x in frame.args],
"vbclass": verbnet[frame.predicate.lemma],
"structure": converted_frame.structure,
"chosen_frames": matcher.best_frames,
"result": distrib
})
def display_errors_num():
print(
"\n\nProblems :\n"
"{} unhandled case were encoutered while parsing VerbNet\n"
"Ignored {} frame for which predicate data was missing\n"
"Ignored {} non-annotated layers in FrameNet\n"
"Marked {} arguments which were also predicate as NI\n"
"Could not retrieve phrase type of {} arguments in FrameNet\n"
"Ignored {} FrameNet frames which predicate was not in VerbNet\n"
"Ignored {} empty FrameNet frames\n"
"Was not able to compare {} roles\n\n".format(
len(errors["vn_parsing"]), len(errors["missing_predicate_data"]),
len(errors["unannotated_layer"]), len(errors["predicate_was_arg"]),
len(errors["missing_phrase_type"]), len(errors["vn_missing"]),
len(errors["frame_without_slot"]), len(errors["impossible_role_matching"]))
)
def display_mapping_errors():
predicate_errors = Counter()
for data in errors['frame_without_slot']:
print(data)
predicate_errors[data['predicate']] += 1
print(predicate_errors.most_common(10))
print("Mapping errors for {} of {} predicates.".format(len(errors['frame_without_slot']), len(errors['frame_without_slot']) + len(errors['frame_with_slot'])))
def display_error_details():
# for data in errors["vn_parsing"]: print(data)
# for data in errors["missing_predicate_data"]: print(data)
# for data in errors["unannotated_layer"]: print(data)
# for data in errors["predicate_was_arg"]: print(data)
# for data in errors["missing_phrase_type"]: print(data)
# for data in errors["vn_missing"]: print(data)
# for data in errors["frame_without_slot"]: print(data)
# for data in errors["impossible_role_matching"]: print(data)
# for data in errors["ambiguous_role"]: print(data)
pass
def display_debug(n):
random.shuffle(debug_data)
for i in range(0, n):
print(debug_data[i]["sentence"])
print("Predicate : "+debug_data[i]["predicate"])
print("Structure : "+" ".join(debug_data[i]["structure"]))
print("Arguments :")
for arg in debug_data[i]["args"]:
print(arg)
print("VerbNet data : ")
for vbframe in debug_data[i]["vbclass"]:
print(vbframe)
print("Chosen frames : ")
for vbframe in debug_data[i]["chosen_frames"]:
print(vbframe)
print("Result : ")
print(debug_data[i]["result"])
print("\n\n")
| agpl-3.0 | -1,891,198,027,836,748,300 | 33.196721 | 162 | 0.616731 | false |
ray-project/ray | rllib/examples/env/mbmpo_env.py | 1 | 3338 | from gym.envs.classic_control import PendulumEnv, CartPoleEnv
import numpy as np
# MuJoCo may not be installed.
HalfCheetahEnv = HopperEnv = None
try:
from gym.envs.mujoco import HalfCheetahEnv, HopperEnv
except Exception:
pass
class CartPoleWrapper(CartPoleEnv):
"""Wrapper for the Cartpole-v0 environment.
Adds an additional `reward` method for some model-based RL algos (e.g.
MB-MPO).
"""
def reward(self, obs, action, obs_next):
# obs = batch * [pos, vel, angle, rotation_rate]
x = obs_next[:, 0]
theta = obs_next[:, 2]
# 1.0 if we are still on, 0.0 if we are terminated due to bounds
# (angular or x-axis) being breached.
rew = 1.0 - ((x < -self.x_threshold) | (x > self.x_threshold) |
(theta < -self.theta_threshold_radians) |
(theta > self.theta_threshold_radians)).astype(np.float32)
return rew
class PendulumWrapper(PendulumEnv):
"""Wrapper for the Pendulum-v0 environment.
Adds an additional `reward` method for some model-based RL algos (e.g.
MB-MPO).
"""
def reward(self, obs, action, obs_next):
# obs = [cos(theta), sin(theta), dtheta/dt]
# To get the angle back from obs: atan2(sin(theta), cos(theta)).
theta = np.arctan2(
np.clip(obs[:, 1], -1.0, 1.0), np.clip(obs[:, 0], -1.0, 1.0))
# Do everything in (B,) space (single theta-, action- and
# reward values).
a = np.clip(action, -self.max_torque, self.max_torque)[0]
costs = self.angle_normalize(theta) ** 2 + \
0.1 * obs[:, 2] ** 2 + 0.001 * (a ** 2)
return -costs
@staticmethod
def angle_normalize(x):
return (((x + np.pi) % (2 * np.pi)) - np.pi)
class HalfCheetahWrapper(HalfCheetahEnv or object):
"""Wrapper for the MuJoCo HalfCheetah-v2 environment.
Adds an additional `reward` method for some model-based RL algos (e.g.
MB-MPO).
"""
def reward(self, obs, action, obs_next):
if obs.ndim == 2 and action.ndim == 2:
assert obs.shape == obs_next.shape
forward_vel = obs_next[:, 8]
ctrl_cost = 0.1 * np.sum(np.square(action), axis=1)
reward = forward_vel - ctrl_cost
return np.minimum(np.maximum(-1000.0, reward), 1000.0)
else:
forward_vel = obs_next[8]
ctrl_cost = 0.1 * np.square(action).sum()
reward = forward_vel - ctrl_cost
return np.minimum(np.maximum(-1000.0, reward), 1000.0)
class HopperWrapper(HopperEnv or object):
"""Wrapper for the MuJoCo Hopper-v2 environment.
Adds an additional `reward` method for some model-based RL algos (e.g.
MB-MPO).
"""
def reward(self, obs, action, obs_next):
alive_bonus = 1.0
assert obs.ndim == 2 and action.ndim == 2
assert (obs.shape == obs_next.shape
and action.shape[0] == obs.shape[0])
vel = obs_next[:, 5]
ctrl_cost = 1e-3 * np.sum(np.square(action), axis=1)
reward = vel + alive_bonus - ctrl_cost
return np.minimum(np.maximum(-1000.0, reward), 1000.0)
if __name__ == "__main__":
env = PendulumWrapper()
env.reset()
for _ in range(100):
env.step(env.action_space.sample())
env.render()
| apache-2.0 | 4,605,161,927,756,585,500 | 31.72549 | 79 | 0.584182 | false |
EliAndrewC/ensconce | tests/unit/crypto/test_ephemeral.py | 1 | 3398 | import os
import hashlib
from ensconce.crypto import MasterKey, state, util as crypto_util
from ensconce import exc
from tests import BaseModelTest
class EphemeralStateTest(BaseModelTest):
def setUp(self):
super(EphemeralStateTest, self).setUp()
# We need to reset the state
state.secret_key = None
#crypto_util.clear_key_metadata()
def _set_key(self, encryption_key, signing_key):
"""
Sets a key on the ephemeral store; this method also takes care of
setting up the key metadata (otherwise loading mismatched key will fail).
"""
state.secret_key = None
key = MasterKey(encryption_key=encryption_key, signing_key=signing_key)
crypto_util.initialize_key_metadata(key=key, salt=os.urandom(8), force_overwrite=True)
state.secret_key = key
def tearDown(self):
# Remove key_metadata rows so that they can be re-initialized.
super(EphemeralStateTest, self).tearDown()
crypto_util.initialize_key_metadata(key=self.SECRET_KEY, salt=os.urandom(8), force_overwrite=True)
def test_initialized(self):
""" Test ephemeral state initialization check. """
self.assertFalse(state.initialized)
self._set_key(hashlib.sha256('secret').digest(), hashlib.sha256('sign').digest())
self.assertTrue(state.initialized)
def test_access_uninitialized(self):
""" Test accessing uninitialized secret_key """
state.secret_key = None
with self.assertRaises(exc.CryptoNotInitialized):
state.secret_key
def test_already_initialized(self):
""" Test already-initialized ephemeral state key setting. """
self._set_key(hashlib.sha256('secret').digest(), hashlib.sha256('sign').digest())
ekey = hashlib.sha256('secret').digest()
skey = hashlib.sha256('sign').digest()
state.secret_key = MasterKey(ekey, skey)
def test_set_different_key(self):
""" Ensure that setting a new encryption key fails validation. """
state.secret_key = None
ekey = hashlib.sha256('new-key').digest()
skey = hashlib.sha256('new-key').digest()
with self.assertRaises(exc.IncorrectKey):
state.secret_key = MasterKey(ekey, skey)
def test_set_different_signing_key(self):
""" Ensure that setting a new signing key fails validation. """
self._set_key(hashlib.sha256('secret').digest(), hashlib.sha256('sign').digest())
ekey = hashlib.sha256('secret').digest()
skey = hashlib.sha256('new-key').digest()
with self.assertRaises(exc.IncorrectKey):
state.secret_key = MasterKey(ekey, skey)
def test_set_incorrect_size(self):
""" Test setting an incorrect sized key. """
# We only support 32-char keys.
with self.assertRaises(ValueError):
state.secret_key = ""
with self.assertRaises(ValueError):
state.secret_key = hashlib.sha384('secret').digest()
with self.assertRaises(ValueError):
state.secret_key = hashlib.sha1().digest()
def test_set_incorrect_type(self):
""" Test setting with incorrect type. """
with self.assertRaises(TypeError):
state.secret_key = hashlib.sha1() | bsd-3-clause | 6,698,764,096,634,126,000 | 39.464286 | 106 | 0.628605 | false |
jkominek/scicasting | hurricanes/firststorm.py | 1 | 2737 | #!/usr/bin/python
from datetime import datetime, date, timedelta
from pymc import *
import numpy as np
dates = [ date(1992, 4, 21),
date(1993, 5, 31),
date(1994, 6, 30),
date(1995, 6, 2),
date(1996, 6, 17),
date(1997, 6, 1),
date(1998, 7, 27),
date(1999, 6, 11),
date(2000, 6, 7),
date(2001, 6, 4),
date(2002, 7, 14),
date(2003, 4, 20),
date(2004, 7, 31),
date(2005, 6, 8),
date(2006, 6, 10),
date(2007, 5, 9),
date(2008, 5, 31),
date(2009, 5, 28),
date(2010, 6, 25),
date(2011, 6, 28),
date(2012, 5, 19),
date(2013, 6, 5) ]
relative_days = [ ]
for d in dates:
end_of_june = date(d.year, 6, 30)
relative_days.append((d - end_of_june).days)
days_array = np.array(relative_days)
simple_mean = np.mean(days_array)
simple_stddev = np.std(days_array)
def predict(today, FORECAST_CLEAR):
days_mean = Uniform('days_mean', lower=-90, upper=90)
days_tau = Uniform('days_tau', upper=1.0, lower=1.0/(simple_stddev*2)**2,
value=1.0/simple_stddev**2)
days = Normal('days', mu=days_mean, tau=days_tau,
value=days_array, observed=True)
next_year = Normal('next_year', mu=days_mean, tau=days_tau)
end_of_june = date(today.year, 6, 30)
today = (today - end_of_june).days
@deterministic()
def before_july(next_year=next_year):
return next_year<=0
@potential()
def not_before_today(next_year=next_year):
if next_year <= (today + FORECAST_CLEAR):
return -1e10
else:
return 0.0
model = Model([not_before_today, before_july, days, days_mean, days_tau])
M = MCMC(model)
M.sample(iter=70000, burn=10000, verbose=0, progress_bar=False)
return M
# http://www.nhc.noaa.gov/
# True if there is nothing with 48-hour formation
# potential on the map
FORECAST_48_HOUR_CLEAR = False
M = predict(date.today(), 2 if FORECAST_48_HOUR_CLEAR else 0)
print "Today"
print np.mean(M.trace('before_july')[:])
M = predict(date.today()+timedelta(1), 1.5 if FORECAST_48_HOUR_CLEAR else 0)
print "Tomorrow"
print np.mean(M.trace('before_july')[:])
M = predict(date.today()+timedelta(2), 0.25 if FORECAST_48_HOUR_CLEAR else 0)
print "2 days from now"
print np.mean(M.trace('before_july')[:])
M = predict(date.today()+timedelta(3), 0.0 if FORECAST_48_HOUR_CLEAR else 0)
print "3 days from now"
print np.mean(M.trace('before_july')[:])
M = predict(date.today()+timedelta(4), 0.0 if FORECAST_48_HOUR_CLEAR else 0)
print "4 days from now"
print np.mean(M.trace('before_july')[:])
| isc | 4,998,649,028,677,647,000 | 28.117021 | 77 | 0.583851 | false |
yeming233/rally | rally/plugins/openstack/context/watcher/audit_templates.py | 1 | 4458 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import six
from rally.common.i18n import _
from rally.common import logging
from rally.common import validation
from rally import consts
from rally import osclients
from rally.plugins.openstack.cleanup import manager as resource_manager
from rally.plugins.openstack.scenarios.watcher import utils as watcher_utils
from rally.plugins.openstack import types
from rally.task import context
LOG = logging.getLogger(__name__)
@validation.add("required_platform", platform="openstack", admin=True)
@context.configure(name="audit_templates", platform="openstack", order=550)
class AuditTemplateGenerator(context.Context):
"""Creates Watcher audit templates for tenants."""
CONFIG_SCHEMA = {
"type": "object",
"$schema": consts.JSON_SCHEMA,
"properties": {
"audit_templates_per_admin": {"type": "integer", "minimum": 1},
"fill_strategy": {"enum": ["round_robin", "random", None]},
"params": {
"type": "array",
"minItems": 1,
"uniqueItems": True,
"items": {
"type": "object",
"properties": {
"goal": {
"type": "object",
"properties": {
"name": {
"type": "string"
}
}
},
"strategy": {
"type": "object",
"properties": {
"name": {
"type": "string"
}
}
},
},
},
}
},
"additionalProperties": False,
"required": ["params"]
}
DEFAULT_CONFIG = {
"audit_templates_per_admin": 1,
"fill_strategy": "round_robin"
}
@logging.log_task_wrapper(LOG.info, _("Enter context: `Audit Templates`"))
def setup(self):
watcher_scenario = watcher_utils.WatcherScenario(
{"admin": self.context["admin"], "task": self.context["task"],
"owner_id": self.context["owner_id"],
"config": {
"api_versions": self.context["config"].get(
"api_versions", [])}
})
clients = osclients.Clients(self.context["admin"]["credential"])
self.context["audit_templates"] = []
for i in six.moves.range(self.config["audit_templates_per_admin"]):
cfg_size = len(self.config["params"])
if self.config["fill_strategy"] == "round_robin":
audit_params = self.config["params"][i % cfg_size]
elif self.config["fill_strategy"] == "random":
audit_params = random.choice(self.config["params"])
goal_id = types.WatcherGoal.transform(
clients=clients,
resource_config=audit_params["goal"])
strategy_id = types.WatcherStrategy.transform(
clients=clients,
resource_config=audit_params["strategy"])
audit_template = watcher_scenario._create_audit_template(
goal_id, strategy_id)
self.context["audit_templates"].append(audit_template.uuid)
@logging.log_task_wrapper(LOG.info, _("Exit context: `Audit Templates`"))
def cleanup(self):
resource_manager.cleanup(names=["watcher.action_plan",
"watcher.audit_template"],
admin=self.context.get("admin", []),
superclass=watcher_utils.WatcherScenario,
task_id=self.get_owner_id())
| apache-2.0 | -3,759,380,146,004,210,000 | 37.102564 | 78 | 0.528488 | false |
jacobian-archive/python-hdcloud | hdcloud/client.py | 1 | 1893 | import httplib2
import urlparse
import urllib
import hdcloud
from . import exceptions
try:
import json
except ImportError:
import simplejson as json
class HDCloudClient(httplib2.Http):
USER_AGENT = 'python-hdcloud/%s' % hdcloud.__version__
BASE_URL = 'http://hdcloud.com/api/v1/'
def __init__(self, username, password):
super(HDCloudClient, self).__init__()
self.add_credentials(username, password)
self.force_exception_to_status_code = True
def request(self, url, method, *args, **kwargs):
url = urlparse.urljoin(self.BASE_URL, url.lstrip('/'))
# Make sure to hardcode requests for JSON
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
path = "%s.json" % path
url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))
# Add User-Agent headers
kwargs.setdefault('headers', {})
kwargs['headers']['User-Agent'] = self.USER_AGENT
resp, body = self._hdc_request(url, method, *args, **kwargs)
if resp.status in (400, 401, 403, 404, 406, 413, 500):
raise exceptions.from_response(resp, body)
return resp, body
def _hdc_request(self, url, method, *args, **kwargs):
# Separate method for mocking and testing.
resp, body = super(HDCloudClient, self).request(url, method, *args, **kwargs)
body = json.loads(body) if body else None
return resp, body
def get(self, url, **kwargs):
return self.request(url, 'GET', **kwargs)
def post(self, url, **kwargs):
return self.request(url, 'POST', **kwargs)
def put(self, url, **kwargs):
return self.request(url, 'PUT', **kwargs)
def delete(self, url, **kwargs):
return self.request(url, 'DELETE', **kwargs)
| bsd-3-clause | -7,586,979,735,969,499,000 | 32.210526 | 85 | 0.595351 | false |
licko/vpp-1701-licko | test/test_gre.py | 1 | 23296 | #!/usr/bin/env python
import unittest
from logging import *
from framework import VppTestCase, VppTestRunner
from vpp_sub_interface import VppDot1QSubint
from vpp_gre_interface import VppGreInterface
from vpp_ip_route import IpRoute, RoutePath
from vpp_papi_provider import L2_VTR_OP
from scapy.packet import Raw
from scapy.layers.l2 import Ether, Dot1Q, GRE
from scapy.layers.inet import IP, UDP
from scapy.layers.inet6 import IPv6
from scapy.volatile import RandMAC, RandIP
from util import ppp, ppc
class TestGRE(VppTestCase):
""" GRE Test Case """
@classmethod
def setUpClass(cls):
super(TestGRE, cls).setUpClass()
def setUp(self):
super(TestGRE, self).setUp()
# create 2 pg interfaces - set one in a non-default table.
self.create_pg_interfaces(range(2))
self.pg1.set_table_ip4(1)
for i in self.pg_interfaces:
i.admin_up()
i.config_ip4()
i.resolve_arp()
def tearDown(self):
super(TestGRE, self).tearDown()
def create_stream_ip4(self, src_if, src_ip, dst_ip):
pkts = []
for i in range(0, 257):
info = self.create_packet_info(src_if.sw_if_index,
src_if.sw_if_index)
payload = self.info_to_payload(info)
p = (Ether(dst=src_if.local_mac, src=src_if.remote_mac) /
IP(src=src_ip, dst=dst_ip) /
UDP(sport=1234, dport=1234) /
Raw(payload))
info.data = p.copy()
pkts.append(p)
return pkts
def create_tunnel_stream_4o4(self, src_if,
tunnel_src, tunnel_dst,
src_ip, dst_ip):
pkts = []
for i in range(0, 257):
info = self.create_packet_info(src_if.sw_if_index,
src_if.sw_if_index)
payload = self.info_to_payload(info)
p = (Ether(dst=src_if.local_mac, src=src_if.remote_mac) /
IP(src=tunnel_src, dst=tunnel_dst) /
GRE() /
IP(src=src_ip, dst=dst_ip) /
UDP(sport=1234, dport=1234) /
Raw(payload))
info.data = p.copy()
pkts.append(p)
return pkts
def create_tunnel_stream_6o4(self, src_if,
tunnel_src, tunnel_dst,
src_ip, dst_ip):
pkts = []
for i in range(0, 257):
info = self.create_packet_info(src_if.sw_if_index,
src_if.sw_if_index)
payload = self.info_to_payload(info)
p = (Ether(dst=src_if.local_mac, src=src_if.remote_mac) /
IP(src=tunnel_src, dst=tunnel_dst) /
GRE() /
IPv6(src=src_ip, dst=dst_ip) /
UDP(sport=1234, dport=1234) /
Raw(payload))
info.data = p.copy()
pkts.append(p)
return pkts
def create_tunnel_stream_l2o4(self, src_if,
tunnel_src, tunnel_dst):
pkts = []
for i in range(0, 257):
info = self.create_packet_info(src_if.sw_if_index,
src_if.sw_if_index)
payload = self.info_to_payload(info)
p = (Ether(dst=src_if.local_mac, src=src_if.remote_mac) /
IP(src=tunnel_src, dst=tunnel_dst) /
GRE() /
Ether(dst=RandMAC('*:*:*:*:*:*'),
src=RandMAC('*:*:*:*:*:*')) /
IP(src=str(RandIP()), dst=str(RandIP())) /
UDP(sport=1234, dport=1234) /
Raw(payload))
info.data = p.copy()
pkts.append(p)
return pkts
def create_tunnel_stream_vlano4(self, src_if,
tunnel_src, tunnel_dst, vlan):
pkts = []
for i in range(0, 257):
info = self.create_packet_info(src_if.sw_if_index,
src_if.sw_if_index)
payload = self.info_to_payload(info)
p = (Ether(dst=src_if.local_mac, src=src_if.remote_mac) /
IP(src=tunnel_src, dst=tunnel_dst) /
GRE() /
Ether(dst=RandMAC('*:*:*:*:*:*'),
src=RandMAC('*:*:*:*:*:*')) /
Dot1Q(vlan=vlan) /
IP(src=str(RandIP()), dst=str(RandIP())) /
UDP(sport=1234, dport=1234) /
Raw(payload))
info.data = p.copy()
pkts.append(p)
return pkts
def verify_tunneled_4o4(self, src_if, capture, sent,
tunnel_src, tunnel_dst):
self.assertEqual(len(capture), len(sent))
for i in range(len(capture)):
try:
tx = sent[i]
rx = capture[i]
tx_ip = tx[IP]
rx_ip = rx[IP]
self.assertEqual(rx_ip.src, tunnel_src)
self.assertEqual(rx_ip.dst, tunnel_dst)
rx_gre = rx[GRE]
rx_ip = rx_gre[IP]
self.assertEqual(rx_ip.src, tx_ip.src)
self.assertEqual(rx_ip.dst, tx_ip.dst)
# IP processing post pop has decremented the TTL
self.assertEqual(rx_ip.ttl + 1, tx_ip.ttl)
except:
self.logger.error(ppp("Rx:", rx))
self.logger.error(ppp("Tx:", tx))
raise
def verify_tunneled_l2o4(self, src_if, capture, sent,
tunnel_src, tunnel_dst):
self.assertEqual(len(capture), len(sent))
for i in range(len(capture)):
try:
tx = sent[i]
rx = capture[i]
tx_ip = tx[IP]
rx_ip = rx[IP]
self.assertEqual(rx_ip.src, tunnel_src)
self.assertEqual(rx_ip.dst, tunnel_dst)
rx_gre = rx[GRE]
rx_l2 = rx_gre[Ether]
rx_ip = rx_l2[IP]
tx_gre = tx[GRE]
tx_l2 = tx_gre[Ether]
tx_ip = tx_l2[IP]
self.assertEqual(rx_ip.src, tx_ip.src)
self.assertEqual(rx_ip.dst, tx_ip.dst)
# bridged, not L3 forwarded, so no TTL decrement
self.assertEqual(rx_ip.ttl, tx_ip.ttl)
except:
self.logger.error(ppp("Rx:", rx))
self.logger.error(ppp("Tx:", tx))
raise
def verify_tunneled_vlano4(self, src_if, capture, sent,
tunnel_src, tunnel_dst, vlan):
try:
self.assertEqual(len(capture), len(sent))
except:
ppc("Unexpected packets captured:", capture)
raise
for i in range(len(capture)):
try:
tx = sent[i]
rx = capture[i]
tx_ip = tx[IP]
rx_ip = rx[IP]
self.assertEqual(rx_ip.src, tunnel_src)
self.assertEqual(rx_ip.dst, tunnel_dst)
rx_gre = rx[GRE]
rx_l2 = rx_gre[Ether]
rx_vlan = rx_l2[Dot1Q]
rx_ip = rx_l2[IP]
self.assertEqual(rx_vlan.vlan, vlan)
tx_gre = tx[GRE]
tx_l2 = tx_gre[Ether]
tx_ip = tx_l2[IP]
self.assertEqual(rx_ip.src, tx_ip.src)
self.assertEqual(rx_ip.dst, tx_ip.dst)
# bridged, not L3 forwarded, so no TTL decrement
self.assertEqual(rx_ip.ttl, tx_ip.ttl)
except:
self.logger.error(ppp("Rx:", rx))
self.logger.error(ppp("Tx:", tx))
raise
def verify_decapped_4o4(self, src_if, capture, sent):
self.assertEqual(len(capture), len(sent))
for i in range(len(capture)):
try:
tx = sent[i]
rx = capture[i]
tx_ip = tx[IP]
rx_ip = rx[IP]
tx_gre = tx[GRE]
tx_ip = tx_gre[IP]
self.assertEqual(rx_ip.src, tx_ip.src)
self.assertEqual(rx_ip.dst, tx_ip.dst)
# IP processing post pop has decremented the TTL
self.assertEqual(rx_ip.ttl + 1, tx_ip.ttl)
except:
self.logger.error(ppp("Rx:", rx))
self.logger.error(ppp("Tx:", tx))
raise
def verify_decapped_6o4(self, src_if, capture, sent):
self.assertEqual(len(capture), len(sent))
for i in range(len(capture)):
try:
tx = sent[i]
rx = capture[i]
tx_ip = tx[IP]
rx_ip = rx[IPv6]
tx_gre = tx[GRE]
tx_ip = tx_gre[IPv6]
self.assertEqual(rx_ip.src, tx_ip.src)
self.assertEqual(rx_ip.dst, tx_ip.dst)
self.assertEqual(rx_ip.hlim + 1, tx_ip.hlim)
except:
self.logger.error(ppp("Rx:", rx))
self.logger.error(ppp("Tx:", tx))
raise
def test_gre(self):
""" GRE tunnel Tests """
#
# Create an L3 GRE tunnel.
# - set it admin up
# - assign an IP Addres
# - Add a route via the tunnel
#
gre_if = VppGreInterface(self,
self.pg0.local_ip4,
"1.1.1.2")
gre_if.add_vpp_config()
#
# The double create (create the same tunnel twice) should fail,
# and we should still be able to use the original
#
try:
gre_if.add_vpp_config()
except Exception:
pass
else:
self.fail("Double GRE tunnel add does not fail")
gre_if.admin_up()
gre_if.config_ip4()
route_via_tun = IpRoute(self, "4.4.4.4", 32,
[RoutePath("0.0.0.0", gre_if.sw_if_index)])
route_via_tun.add_vpp_config()
#
# Send a packet stream that is routed into the tunnel
# - they are all dropped since the tunnel's desintation IP
# is unresolved - or resolves via the default route - which
# which is a drop.
#
tx = self.create_stream_ip4(self.pg0, "5.5.5.5", "4.4.4.4")
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.assert_nothing_captured(
remark="GRE packets forwarded without DIP resolved")
#
# Add a route that resolves the tunnel's destination
#
route_tun_dst = IpRoute(self, "1.1.1.2", 32,
[RoutePath(self.pg0.remote_ip4,
self.pg0.sw_if_index)])
route_tun_dst.add_vpp_config()
#
# Send a packet stream that is routed into the tunnel
# - packets are GRE encapped
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip4(self.pg0, "5.5.5.5", "4.4.4.4")
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture()
self.verify_tunneled_4o4(self.pg0, rx, tx,
self.pg0.local_ip4, "1.1.1.2")
#
# Send tunneled packets that match the created tunnel and
# are decapped and forwarded
#
self.vapi.cli("clear trace")
tx = self.create_tunnel_stream_4o4(self.pg0,
"1.1.1.2",
self.pg0.local_ip4,
self.pg0.local_ip4,
self.pg0.remote_ip4)
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture()
self.verify_decapped_4o4(self.pg0, rx, tx)
#
# Send tunneled packets that do not match the tunnel's src
#
self.vapi.cli("clear trace")
tx = self.create_tunnel_stream_4o4(self.pg0,
"1.1.1.3",
self.pg0.local_ip4,
self.pg0.local_ip4,
self.pg0.remote_ip4)
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.assert_nothing_captured(
remark="GRE packets forwarded despite no SRC address match")
#
# Configure IPv6 on the PG interface so we can route IPv6
# packets
#
self.pg0.config_ip6()
self.pg0.resolve_ndp()
#
# Send IPv6 tunnel encapslated packets
# - dropped since IPv6 is not enabled on the tunnel
#
self.vapi.cli("clear trace")
tx = self.create_tunnel_stream_6o4(self.pg0,
"1.1.1.2",
self.pg0.local_ip4,
self.pg0.local_ip6,
self.pg0.remote_ip6)
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.assert_nothing_captured(remark="IPv6 GRE packets forwarded "
"despite IPv6 not enabled on tunnel")
#
# Enable IPv6 on the tunnel
#
gre_if.config_ip6()
#
# Send IPv6 tunnel encapslated packets
# - forwarded since IPv6 is enabled on the tunnel
#
self.vapi.cli("clear trace")
tx = self.create_tunnel_stream_6o4(self.pg0,
"1.1.1.2",
self.pg0.local_ip4,
self.pg0.local_ip6,
self.pg0.remote_ip6)
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture()
self.verify_decapped_6o4(self.pg0, rx, tx)
#
# test case cleanup
#
route_tun_dst.remove_vpp_config()
route_via_tun.remove_vpp_config()
gre_if.remove_vpp_config()
self.pg0.unconfig_ip6()
def test_gre_vrf(self):
""" GRE tunnel VRF Tests """
#
# Create an L3 GRE tunnel whose destination is in the non-default
# table. The underlay is thus non-default - the overlay is still
# the default.
# - set it admin up
# - assign an IP Addres
#
gre_if = VppGreInterface(self, self.pg1.local_ip4,
"2.2.2.2",
outer_fib_id=1)
gre_if.add_vpp_config()
gre_if.admin_up()
gre_if.config_ip4()
#
# Add a route via the tunnel - in the overlay
#
route_via_tun = IpRoute(self, "9.9.9.9", 32,
[RoutePath("0.0.0.0", gre_if.sw_if_index)])
route_via_tun.add_vpp_config()
#
# Add a route that resolves the tunnel's destination - in the
# underlay table
#
route_tun_dst = IpRoute(self, "2.2.2.2", 32, table_id=1,
paths=[RoutePath(self.pg1.remote_ip4,
self.pg1.sw_if_index)])
route_tun_dst.add_vpp_config()
#
# Send a packet stream that is routed into the tunnel
# packets are sent in on pg0 which is in the default table
# - packets are GRE encapped
#
self.vapi.cli("clear trace")
tx = self.create_stream_ip4(self.pg0, "5.5.5.5", "9.9.9.9")
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg1.get_capture()
self.verify_tunneled_4o4(self.pg1, rx, tx,
self.pg1.local_ip4, "2.2.2.2")
#
# Send tunneled packets that match the created tunnel and
# are decapped and forwarded. This tests the decap lookup
# does not happen in the encap table
#
self.vapi.cli("clear trace")
tx = self.create_tunnel_stream_4o4(self.pg1,
"2.2.2.2",
self.pg1.local_ip4,
self.pg0.local_ip4,
self.pg0.remote_ip4)
self.pg1.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture()
self.verify_decapped_4o4(self.pg0, rx, tx)
#
# test case cleanup
#
route_tun_dst.remove_vpp_config()
route_via_tun.remove_vpp_config()
gre_if.remove_vpp_config()
def test_gre_l2(self):
""" GRE tunnel L2 Tests """
#
# Add routes to resolve the tunnel destinations
#
route_tun1_dst = IpRoute(self, "2.2.2.2", 32,
[RoutePath(self.pg0.remote_ip4,
self.pg0.sw_if_index)])
route_tun2_dst = IpRoute(self, "2.2.2.3", 32,
[RoutePath(self.pg0.remote_ip4,
self.pg0.sw_if_index)])
route_tun1_dst.add_vpp_config()
route_tun2_dst.add_vpp_config()
#
# Create 2 L2 GRE tunnels and x-connect them
#
gre_if1 = VppGreInterface(self, self.pg0.local_ip4,
"2.2.2.2",
is_teb=1)
gre_if2 = VppGreInterface(self, self.pg0.local_ip4,
"2.2.2.3",
is_teb=1)
gre_if1.add_vpp_config()
gre_if2.add_vpp_config()
gre_if1.admin_up()
gre_if2.admin_up()
self.vapi.sw_interface_set_l2_xconnect(gre_if1.sw_if_index,
gre_if2.sw_if_index,
enable=1)
self.vapi.sw_interface_set_l2_xconnect(gre_if2.sw_if_index,
gre_if1.sw_if_index,
enable=1)
#
# Send in tunnel encapped L2. expect out tunnel encapped L2
# in both directions
#
self.vapi.cli("clear trace")
tx = self.create_tunnel_stream_l2o4(self.pg0,
"2.2.2.2",
self.pg0.local_ip4)
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture()
self.verify_tunneled_l2o4(self.pg0, rx, tx,
self.pg0.local_ip4,
"2.2.2.3")
self.vapi.cli("clear trace")
tx = self.create_tunnel_stream_l2o4(self.pg0,
"2.2.2.3",
self.pg0.local_ip4)
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture()
self.verify_tunneled_l2o4(self.pg0, rx, tx,
self.pg0.local_ip4,
"2.2.2.2")
self.vapi.sw_interface_set_l2_xconnect(gre_if1.sw_if_index,
gre_if2.sw_if_index,
enable=0)
self.vapi.sw_interface_set_l2_xconnect(gre_if2.sw_if_index,
gre_if1.sw_if_index,
enable=0)
#
# Create a VLAN sub-interfaces on the GRE TEB interfaces
# then x-connect them
#
gre_if_11 = VppDot1QSubint(self, gre_if1, 11)
gre_if_12 = VppDot1QSubint(self, gre_if2, 12)
# gre_if_11.add_vpp_config()
# gre_if_12.add_vpp_config()
gre_if_11.admin_up()
gre_if_12.admin_up()
self.vapi.sw_interface_set_l2_xconnect(gre_if_11.sw_if_index,
gre_if_12.sw_if_index,
enable=1)
self.vapi.sw_interface_set_l2_xconnect(gre_if_12.sw_if_index,
gre_if_11.sw_if_index,
enable=1)
#
# Configure both to pop thier respective VLAN tags,
# so that during the x-coonect they will subsequently push
#
self.vapi.sw_interface_set_l2_tag_rewrite(gre_if_12.sw_if_index,
L2_VTR_OP.L2_POP_1,
12)
self.vapi.sw_interface_set_l2_tag_rewrite(gre_if_11.sw_if_index,
L2_VTR_OP.L2_POP_1,
11)
#
# Send traffic in both directiond - expect the VLAN tags to
# be swapped.
#
self.vapi.cli("clear trace")
tx = self.create_tunnel_stream_vlano4(self.pg0,
"2.2.2.2",
self.pg0.local_ip4,
11)
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture()
self.verify_tunneled_vlano4(self.pg0, rx, tx,
self.pg0.local_ip4,
"2.2.2.3",
12)
self.vapi.cli("clear trace")
tx = self.create_tunnel_stream_vlano4(self.pg0,
"2.2.2.3",
self.pg0.local_ip4,
12)
self.pg0.add_stream(tx)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture()
self.verify_tunneled_vlano4(self.pg0, rx, tx,
self.pg0.local_ip4,
"2.2.2.2",
11)
#
# Cleanup Test resources
#
gre_if_11.remove_vpp_config()
gre_if_12.remove_vpp_config()
gre_if1.remove_vpp_config()
gre_if2.remove_vpp_config()
route_tun1_dst.add_vpp_config()
route_tun2_dst.add_vpp_config()
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| apache-2.0 | 656,361,454,378,659,000 | 33.666667 | 78 | 0.455915 | false |
vhernandez/jwsProcessor | src/jwsprocessor/pysmoothing.py | 1 | 6326 | #!/urs/bin/env python
# -*- coding: UTF8 -*-
"""
PySMOOTHING Library v 0.11
* Copyright (C) 2005 Victor M. Hernandez Rocamora
* Code for Savitsky-Golay filtering converted from MatLab code of 'sgolay' and
'sgolayfilt' functions from Octave software(http://www.octave.org/), which
are Copyright (C) 2001 Paul Kienzle.
This module is licenced under the GNU General Public License 2.0 or a later
version.
"""
#Numpy:
from numpy import array, repeat, concatenate, ones, zeros, arange, reshape, put, add, dot, take, float32
from numpy.linalg import pinv
#SciPy:
from scipy.signal.signaltools import lfilter
def mean_movement(input_data, window):
if window%2 != 1:
raise Exception, "'mean_movement' function needs an odd window length"
if window > (len(input_data))+2:
raise Exception, "'mean_movement' function: input data too short"
input_data = list(input_data)
output_data = []
length = len(input_data)
n = (window-1)/2
input_data2 = ([input_data[0]]*n) + list(input_data)+ ([input_data[length-1]]*(n+1))
_sum=0.0
for i in xrange(0, window):
_sum+=input_data2[i]
w = float(window)
for i in xrange(n, n+length):
output_data.append(_sum/window)
_sum -= input_data2[i-n]
_sum += input_data2[i+n+1]
return output_data
def _mean_movement_only_python(data, m):
if m > (2*len(data))+2:
return data
input_array = list(data)
output_array = list(data)
mean_factor = (2*m)+1
length = len(data)
# Process data from the middle
for i in xrange(m, length-m):
_sum = 0
for j in xrange(i-m, i+m):
_sum += input_array[j]
output_array[i] = _sum/mean_factor
# Process data from the beginning
window = 1
for i in xrange(1, m):
_sum = 0
for j in xrange(i-window, i+window):
_sum += input_array[j]
output_array[i] = _sum/((2*window)+1)
window += 1
output_array[0] = input_array[0]
#Process data from the end
window = 1
for i in reversed(xrange(length-m, length-1)):
_sum = 0
for j in xrange(i-window, i+window):
_sum += input_array[j]
output_array[i] = _sum/((2*window)+1)
window +=1
output_array[length-1] = input_array[length-1]
del input_array
return output_array
def sgolay(p, n):
if n%2 != 1:
raise Exception, "'sgolay' function needs an odd filter length n"
elif p >= n:
raise Exception, "'sgolay' function needs filter length n larger than polynomial order p"
k = int(n/2)
F = zeros((n,n), float32)
for row in range(1,k+2):
#A = pinv( ( [(1:n)-row]'*ones(1,p+1) ) .^ ( ones(n,1)*[0:p] ) );
left = dot( reshape(arange(1,n+1)-row, (-1,1)), ones((1,p+1)))
right = repeat([range(0,p+1)], n, 0)
#A = generalized_inverse( left**right )
A = pinv( left**right )
#F(row,:) = A(1,:);
put(F.ravel(), add(arange(n),n*(row-1)), A[0])
#F(k+2:n,:) = F(k:-1:1,n:-1:1);
for fila in range(k+1, n):
put (F.ravel(), add(arange(n),n*fila), F[n-1-fila][::-1])
return F
def sgolayfilt(x, p, n ):
x = array(x, float32).ravel()
size = len(x)
if size < n:
raise Exception, "'sgolayfilt': insufficient data for filter"
## The first k rows of F are used to filter the first k points
## of the data set based on the first n points of the data set.
## The last k rows of F are used to filter the last k points
## of the data set based on the last n points of the dataset.
## The remaining data is filtered using the central row of F.
F = sgolay(p, n)
k = int(n/2)
#z = filter(F(k+1,:), 1, x);
z = lfilter(F[k], 1, x)
#y = [ F(1:k,:)*x(1:n,:) ; z(n:len,:) ; F(k+2:n,:)*x(len-n+1:len,:) ];
left = dot(take(F, arange(k),0), take(x, arange(n),0))
right = dot(take(F, arange(k+1,n),0), take(x, arange(size-n, size),0))
middle = take(z, arange(n-1, size))
return concatenate((left, middle, right))
def test1():
### Demo requires Matplotlib!!!! ###
import pylab
import random
import math
import time
def _gen_noisy_sine():
dataset = []
sign_vector = (-1,1)
for i in range(360):
sinval = math.sin(math.radians(i))
randint = float(random.randint(5,50))
randsign = float(random.choice(sign_vector))
randval = (random.random()/randint)*randsign
dataset.append(sinval + randval)
return dataset
noisy_data = _gen_noisy_sine()
TIMES = 100
print "Testing differents algorithms, executing each one", TIMES, "times:"
# test means movement algorithm
t1 = time.clock()
for i in xrange(TIMES):
_mean_movement_only_python(noisy_data, 25)
t2 = time.clock()
print "\t- Python-only Means Movement algorithm:", t2-t1, 's'
t1 = time.clock()
for i in xrange(TIMES):
mean_movement(noisy_data, 25)
t2 = time.clock()
print "\t- Python-only Means Movement algorithm 2:", t2-t1, 's'
t1 = time.clock()
for i in xrange(TIMES):
sgolayfilt(noisy_data, 1, 25)
t2 = time.clock()
print "\t- Savitsky-Golay algorithm", t2-t1, 's'
mm_denoised_data = mean_movement(noisy_data, 25)
sg_denoised_data = sgolayfilt(noisy_data, 1, 25)
pylab.plot(noisy_data, "r",
mm_denoised_data, "g",
sg_denoised_data, "b")
pylab.legend(("Original", "Means movement", "Savitsky-Golay"))
pylab.show()
def test2():
import jwslib
import sys
results = jwslib.read_file("spc.jws")
if results[0] == jwslib.JWS_ERROR_SUCCESS:
header = results[1]
channels = results[2]
else:
sys.exit(-1)
channels[0] = mean_movement(channels[0], 25)
jwslib.dump_channel_data('feo3.txt', header, channels)
def test3():
import jwslib
import sys
results = jwslib.read_file("spc.jws")
if results[0] == jwslib.JWS_ERROR_SUCCESS:
header = results[1]
channels = results[2]
else:
sys.exit(-1)
original = channels[0]
for i in range(5,25,2):
channels[0] = sgolayfilt(original, 1, i)
jwslib.dump_channel_data('spc_sg_1_%d.txt'%i, header, channels)
if __name__=="__main__":
test1()
| gpl-2.0 | -3,851,358,189,755,600,400 | 31.27551 | 104 | 0.585994 | false |
dessn/sn-bhm | dessn/planck/planck.py | 1 | 1173 | import numpy as np
import inspect
import os
def get_planck(restricted=True):
""" Priors from COM_CosmoParams_fullGrid_R2.00\base_w\plikHM_TT_lowTEB\base_w_plikHM_TT_lowTEB"""
file = os.path.abspath(inspect.stack()[0][1])
dir_name = os.path.dirname(file)
results = np.load(dir_name + "/planck.npy")
weights = results[:, 0]
likelihood = results[:, 1]
chain = results[:, 2:]
param_file = dir_name + "/planck.paramnames"
with open(param_file) as f:
params = ["$%s$" % l.split("\t")[1][:-1] for l in f]
if restricted:
wanted_params = [r"$\Omega_m$", "$w$"]
chain = chain[:, [params.index(p) for p in wanted_params]]
params = wanted_params
return chain, params, weights, likelihood
if __name__ == "__main__":
chain, params, weights, likelihood = get_planck()
om = chain[:, params.index(r"$\Omega_m$")]
w = chain[:, params.index(r"$w$")]
from chainconsumer import ChainConsumer
c = ChainConsumer()
c.add_chain(chain, parameters=params)
c.plotter.plot(display=True)
# import matplotlib.pyplot as plt
# plt.hist2d(om, w, bins=100, weights=weights)
# plt.show()
| mit | -8,789,032,739,981,367,000 | 32.514286 | 101 | 0.618926 | false |
houghb/ligpy | ligpy/ligpy_utils.py | 1 | 18322 | """
Misc utility functions required by several modules in the ligpy program.
"""
import os
import numpy as np
from constants import GAS_CONST, MW
def set_paths():
"""
Set the absolute path to required files on the current machine.
Returns
-------
reactionlist_path : str
path to the file `complete_reactionlist.dat`
rateconstantlist_path : str
path to the file `complete_rateconstantlist.dat`
compositionlist_path : str
path to the file `compositionlist.dat`
"""
module_dir = os.path.abspath(__file__).split('ligpy_utils')[0]
reactionlist_path = module_dir + 'data/complete_reaction_list.dat'
rateconstantlist_path = module_dir + 'data/complete_rateconstant_list.dat'
compositionlist_path = module_dir + 'data/compositionlist.dat'
return reactionlist_path, rateconstantlist_path, compositionlist_path
def get_specieslist(completereactionlist):
"""
Make a list of all the molecular species involved in the kinetic scheme.
Parameters
----------
completereactionlist : str
the path to the `complete_reaction_list.dat` file
Returns
-------
specieslist : list
a list of all the species in the kinetic scheme
"""
specieslist = []
for line in open(completereactionlist, 'r').readlines():
for spec in line.split(','):
# If the species has already been added to the list then move on.
if spec.split('_')[1].split()[0] in specieslist:
continue
else:
specieslist.append(spec.split('_')[1].split()[0])
specieslist.sort()
return specieslist
def get_speciesindices(specieslist):
"""
Create a dictionary to assign an arbitrary index to each of the species in
the kinetic scheme.
Parameters
----------
specieslist : list
a list of all the species in the model
Returns
-------
speciesindices : dict
a dictionary of arbitrary indices with the species
from specieslist as keys
indices_to_species : dict
the reverse of speciesindices (keys are the indices
and values are the species)
"""
speciesindices = {}
index = 0
for x in specieslist:
speciesindices[x] = index
index += 1
indices_to_species = dict(zip(speciesindices.values(),
speciesindices.keys()))
return speciesindices, indices_to_species
def define_initial_composition(compositionlist, species):
"""
Read the plant ID specified and define the initial composition of the
lignin polymer in terms of the three model components (PLIGC, PLIGH,
PLIGO).
Parameters
----------
compositionlist : str
the path of the `compositionlist.dat` file
species : str
the name of a lignin species that exists in the
`compositionlist.dat` file
Returns
-------
pligc_0 : float
The initial composition (mol/L) of PLIGC
pligh_0 : float
The initial composition (mol/L) of PLIGH
pligo_0 : float
The initial composition (mol/L) of PLIGO
"""
for line in open(compositionlist, 'rb').readlines():
if line.split(',')[0] == species:
# Initial compositions [mole fraction]
pligc_mol = float(line.split(',')[1])
pligh_mol = float(line.split(',')[2])
pligo_mol = float(line.split(',')[3])
# The weighted average molar mass of mixture [kg/mol]
weighted_m = (301*pligc_mol + 423*pligh_mol + 437*pligo_mol)/1000
# the density of the condensed phase [kg/L]
density = 0.75
# Initial compositions [mol/L]
pligc_0 = density/weighted_m * pligc_mol
pligh_0 = density/weighted_m * pligh_mol
pligo_0 = density/weighted_m * pligo_mol
break
return pligc_0, pligh_0, pligo_0
def build_k_matrix(rateconsts):
"""
Build a matrix of all the rate constant parameters (A, n, E).
Parameters
----------
rateconsts : str
the path to the file `complete_rateconstant_list.dat`
Returns
-------
kmatrix : list
a list of lists that defines a matrix. Each entry in the list
is A, n, E for a given reaction
"""
num_lines = sum(1 for line in open(rateconsts))
kmatrix = [None]*num_lines
for i, line in enumerate(open(rateconsts, 'r').readlines()):
kmatrix[i] = [line.split(' ')[0], line.split(' ')[1],
line.split(' ')[2].split()[0]]
return kmatrix
def get_k_value(T, reaction_index, kmatrix):
"""
Returns the value of the rate constant for a particular reaction index.
Parameters
----------
T : float
temperature in Kelvin
reaction_index : int
the index of the reaction for which you want the rate
kmatrix : list
the kmatrix generated by build_k_matrix()
Returns
-------
k : float
the value of the rate constant for the given reaction at the given
temperature.
"""
k = (eval(kmatrix[reaction_index][0]) *
T**eval(kmatrix[reaction_index][1]) *
np.exp(-1 * eval(kmatrix[reaction_index][2]) /(GAS_CONST * T)))
return k
def get_k_value_list(T, kmatrix):
"""
Returns a list of all the k-values for a given temperature.
Parameters
----------
T : float
temperature in Kelvin
kmatrix : list
the kmatrix generated by build_k_matrix()
Returns
-------
kvaluelist : list
a list of all the rate constant values for a given temperature
"""
kvaluelist = []
for index, row in enumerate(kmatrix):
kvaluelist.append(get_k_value(T, index, kmatrix))
return kvaluelist
def build_reactant_dict(completereactionlist, speciesindices):
"""
Build a dictionary of the reactants involved in each reaction,
along with their stoichiometric coefficients. The keys of the
dictionary are the reaction numbers, the values are lists of lists
[[reactant1index, -1*coeff1],...]
Parameters
----------
completereactionlist : str
path to the file `complete_reaction_list.dat`
speciesindices : dict
the dictionary speciesindices from
get_speciesindices()
Returns
-------
reactant_dict : dict
a dictionary where keys are reaction numbers and values
are lists of lists with the reactants and their
stoichiometric coefficients for each reaction
"""
reactant_dict = {}
for rxnindex, reaction in enumerate(open(completereactionlist, 'rb')
.readlines()):
reactants = []
# x is each coefficient_species set
for x in reaction.split(','):
# if the species is a reactant
if float(x.split('_')[0]) < 0:
reactants.append([speciesindices[x.split('_')[1].split()[0]],
-1*float(x.split('_')[0])])
# in preceding line: *-1 because I want the |stoich coeff|
reactant_dict[rxnindex] = reactants
return reactant_dict
def build_species_rxns_dict(completereactionlist):
"""
Build a dictionary where keys are species and values are lists with the
reactions that species is involved in, that reaction's sign in the net
rate equation, and the stoichiometric coefficient of the species in that
reaction.
Parameters
----------
completereactionlist : str
path to the file `complete_reaction_list.dat`
Returns
-------
species_rxns : dict
keys are the species in the model; values are lists of
[reaction that species is involved in,
sign of that species in the net rate equation,
stoichiometric coefficient]
"""
specieslist = get_specieslist(set_paths()[0])
species_rxns = {}
for species in specieslist:
# This loop makes a list of which reactions "species" takes part in
# and what sign that term in the net rate eqn has
# and what the stoichiometric coefficient is
reactions_involved = []
for rxnindex, line in enumerate(open(completereactionlist, 'rb')
.readlines()):
# example of x = '-1_ADIO'
for x in line.split(','):
# If the species being iterated over is part of this reaction
if species == x.split('_')[1].split()[0]:
# if the species is a reactant
if float(x.split('_')[0]) < 0:
reactions_involved.append(
[rxnindex, -1, x.split('_')[0]])
# if the species is a product
if float(x.split('_')[0]) > 0:
reactions_involved.append(
[rxnindex, 1, '+' + x.split('_')[0]])
species_rxns[species] = reactions_involved
return species_rxns
def build_rates_list(rateconstlist, reactionlist, speciesindices,
indices_to_species, human='no'):
""" This function writes the list of rate expressions for each reaction.
Parameters
----------
rateconstlist : str
the path to the file `complete_rateconstant_list.dat`
reactionlist : str
the path to the file `complete_reaction_list.dat`
speciesindices : dict
a dictionary of arbitrary indices with the species
from specieslist as keys
indices_to_species : dict
the reverse of speciesindices (keys are the indices
and values are the species)
human : str, optional
indicate whether the output of this function should
be formatted for a human to read ('yes'). Default
is 'no'
Returns
-------
rates_list : list
a list of the rate expressions for all the reactions in the
model
"""
kmatrix = build_k_matrix(rateconstlist)
reactant_dict = build_reactant_dict(reactionlist, speciesindices)
rates_list = []
for i, line in enumerate(kmatrix):
rate = 'rate[%s] = kvalue(T,%s) ' % (i, i)
concentrations = ''
for entry in reactant_dict[i]:
if entry == 'n': # if there is no reaction
concentrations = '* 0'
break
else:
if human == 'no':
concentrations += '* y[%s]**%s ' % (entry[0], entry[1])
elif human == 'yes':
concentrations += '* [%s]**%s ' % \
(indices_to_species[entry[0]], entry[1])
else:
raise ValueError('human must be a string: yes or no')
rate += concentrations
rates_list.append(rate)
return rates_list
def build_dydt_list(rates_list, specieslist, species_rxns, human='no'):
"""This function returns the list of dydt expressions generated for all
the reactions from rates_list.
Parameters
----------
rates_list : list
the output of build_rates_list()
specieslist : list
a list of all the species in the kinetic scheme
species_rxns : dict
dictionary where keys that are the model species and
values are the reactions they are involved in
human : str, optional
indicate whether the output of this function should
be formatted for a human to read ('yes'). Default
is 'no'
Returns
-------
dydt_expressions : list
expressions for the ODEs expressing the concentration
of each species with time
"""
dydt_expressions = []
for species in specieslist:
rate_formation = 'd[%s]/dt = ' % (species)
# "entry" is [reaction#, sign of that reaction, coefficient]
for entry in species_rxns[species]:
if human == 'no':
rate_formation += '%s*%s ' % \
(entry[2], rates_list[entry[0]].split(' = ')[1])
elif human == 'yes':
rate_formation += '%s*rate[%s] ' % (entry[2], entry[0])
else:
raise ValueError('human must be a string: yes or no')
dydt_expressions.append(rate_formation)
return dydt_expressions
def write_rates_and_odes(filename, rates, odes):
"""
Writes a file that contains the model equations to be solved (a list of
rate expressions, followed by a list of ODEs for each species). This
file is just for reference for humans to be able to look at the specific
reactions that are modeled, it is not actually used by the program. Users
should only need to generate this file if they've changed anything about
the kinetic scheme (it already exists in the data folder).
Parameters
----------
filename : str
the filename (including relative path if appropriate) of the
ratesandodes file to write
rates : list
the output of build_rates_list() with human='yes'
odes : list
the output of build_dydt_list() with human='yes'
Returns
-------
None
"""
with open(filename, 'wb') as initialize:
initialize.write('Reaction Rates:\n')
with open(filename, 'ab') as writer:
for line in rates:
writer.write(line+'\n')
writer.write('\n\nODE''s:\n')
for line in odes:
writer.write(line+'\n')
# These are some functions for checking the integrity of some model
# components, but they are not used except for exploratory or verification
# purposes
def check_species_in_MW(specieslist=None):
"""
Check to make sure that everything in the specieslist is in the MW
dictionary from `constants.py`.
Parameters
----------
specieslist : list, optional
a list of species to check against. If no list is
specified then the function get_specieslist() will be used
to generate the default list
Returns
-------
None
"""
if specieslist == None:
specieslist = get_specieslist(set_paths()[0])
for item in MW.keys():
if item in specieslist:
print '%s is in specieslist' % ('{: <20}'.format(item))
else:
print '********'+item
for item in specieslist:
if item in MW.keys():
print '%s is in MW dictionary' % ('{: <20}'.format(item))
else:
print '********'+item
print '\n%s should equal %s' % (len(MW.keys()), len(specieslist))
def check_mass_balance():
"""
Check for conservation of mass, and if mass is not conserved, see which
reactions are creating or losing mass.
Note that mass will not be wholly conserved in this model because
protons are not accounted for when radicals are involved in
non-Hydrogen-abstraction reactions, but all other reactions should
conserve mass.
Parameters
----------
None
Returns
-------
total_mass_balance : numpy array
an array with the amount of mass gained or lost
in each reaction
"""
specieslist = get_specieslist(set_paths()[0])
speciesindices = get_speciesindices(specieslist)[0]
kmatrix = build_k_matrix(set_paths()[1])
species_rxns = build_species_rxns_dict(set_paths()[0])
# Make vector of the MW's of each species, in the order from speciesindices
mw_vector = np.zeros((len(MW), 1))
for species in MW:
mw_vector[speciesindices[species]] = MW[species][0]
mw_vector = mw_vector.transpose()
# In this stoichiometric matrix, rows are species, columns are reactions
stoicmatrix = np.zeros((len(speciesindices), len(kmatrix)), dtype='float')
for species in species_rxns:
i = speciesindices[species]
for reaction in species_rxns[species]:
j = reaction[0]
stoicmatrix[i, j] += float(reaction[2])
# The result of this dot product should be a vector full of zeros.
# This will not be the case because protons are not accounted for when
# radicals are involved in non-H-abstraction rxns,
# but all other reactions should be 0
total_mass_balance = np.dot(mw_vector, stoicmatrix[:, :])
# Use this to look at which reactions are creating or losing mass
# (from missing Hydrogen)
h_sum = 0
for i, value in enumerate(total_mass_balance[0, :]):
if value != 0:
print i, value
h_sum += value
print '\nNet mass change = %s' % h_sum
return total_mass_balance
def check_species_fate():
"""
Check to see which species (if any) are only produced, but never
consumed in the model reactions (assuming that all reactions occur).
Parameters
----------
None
Returns
-------
fate_dict : dictionary
a dictionary with the fate of model species
"""
specieslist = get_specieslist(set_paths()[0])
species_rxns = build_species_rxns_dict(set_paths()[0])
fate_dict = {}
for species in specieslist:
fate_dict[species] = 'produced only'
for entry in species_rxns[species]:
if entry[1] < 0:
fate_dict[species] = 'consumed'
for species in specieslist:
if fate_dict[species] == 'consumed':
del fate_dict[species]
return fate_dict
| bsd-2-clause | -1,464,800,841,360,162,300 | 34.439072 | 79 | 0.569643 | false |
simphony/simphony-mayavi | simphony_mayavi/cuds/tests/test_vtk_lattice.py | 1 | 9617 | import unittest
from functools import partial
from numpy.testing import assert_array_equal
from hypothesis import given
from hypothesis.strategies import sampled_from
from tvtk.api import tvtk
from simphony.core.cuba import CUBA
from simphony.testing.abc_check_lattice import (
CheckLatticeNodeOperations, CheckLatticeNodeCoordinates)
from simphony.testing.utils import compare_lattice_nodes
from simphony.core.data_container import DataContainer
from simphony.cuds.lattice import (
make_hexagonal_lattice, make_cubic_lattice, make_orthorhombic_lattice,
make_body_centered_cubic_lattice, make_face_centered_cubic_lattice,
make_rhombohedral_lattice, make_tetragonal_lattice,
make_body_centered_tetragonal_lattice,
make_face_centered_orthorhombic_lattice,
make_base_centered_orthorhombic_lattice,
make_body_centered_orthorhombic_lattice,
make_monoclinic_lattice,
make_base_centered_monoclinic_lattice,
make_triclinic_lattice,
Lattice, LatticeNode)
from simphony.cuds.primitive_cell import BravaisLattice, PrimitiveCell
from simphony_mayavi.cuds.api import VTKLattice
from simphony_mayavi.core.api import supported_cuba
lattice_types = sampled_from([
make_cubic_lattice('test', 0.1, (3, 6, 5)),
make_hexagonal_lattice('test', 0.1, 0.2, (5, 4, 6)),
make_orthorhombic_lattice('test', (0.1, 0.2, 0.3), (3, 7, 6)),
make_body_centered_cubic_lattice('test', 0.1, (3, 6, 5)),
make_face_centered_cubic_lattice('test', 0.1, (3, 6, 5)),
make_rhombohedral_lattice('test', 0.1, 0.2, (3, 6, 5)),
make_tetragonal_lattice('test', 0.1, 0.2, (3, 6, 5)),
make_body_centered_tetragonal_lattice('test', 0.1, 0.5, (3, 6, 5)),
make_face_centered_orthorhombic_lattice('test', (0.5, 0.6, 0.7),
(3, 6, 5)),
make_base_centered_orthorhombic_lattice('test', (0.5, 0.6, 0.7),
(3, 6, 5)),
make_body_centered_orthorhombic_lattice('test', (0.5, 0.6, 0.7),
(3, 6, 5)),
make_monoclinic_lattice('test', (0.5, 0.6, 0.7), 0.4,
(3, 6, 5)),
make_base_centered_monoclinic_lattice('test', (0.5, 0.6, 0.7),
0.4, (3, 6, 5)),
make_triclinic_lattice('test', (0.5, 0.6, 0.7), (0.4, 0.3, 0.2),
(3, 6, 5))])
class TestVTKLatticeNodeOperations(
CheckLatticeNodeOperations, unittest.TestCase):
def container_factory(self, name, primitive_cell, size, origin):
return VTKLattice.empty(name, primitive_cell, size, origin)
def supported_cuba(self):
return supported_cuba()
class TestVTKLatticeNodeCoordinates(
CheckLatticeNodeCoordinates, unittest.TestCase):
def container_factory(self, name, primitive_cell, size, origin):
return VTKLattice.empty(name, primitive_cell, size, origin)
def supported_cuba(self):
return supported_cuba()
class TestVTKLattice(unittest.TestCase):
def setUp(self):
self.addTypeEqualityFunc(
LatticeNode, partial(compare_lattice_nodes, testcase=self))
def test_get_node_on_a_xy_plane_hexagonal_lattice(self):
# given
lattice = make_hexagonal_lattice('test', 0.1, 0.2, (5, 4, 6))
self.add_velocity(lattice)
vtk_lattice = VTKLattice.from_lattice(lattice)
# when
node = vtk_lattice.get((1, 1, 0))
# then
self.assertEqual(
node, LatticeNode(
(1, 1, 0),
data=DataContainer(VELOCITY=(1, 1, 0))))
def test_iter_nodes_on_a_xy_plane_hexagonal_lattice(self):
# given
lattice = make_hexagonal_lattice('test', 0.1, 0.2, (5, 4, 6))
self.add_velocity(lattice)
vtk_lattice = VTKLattice.from_lattice(lattice)
# when/then
for node in vtk_lattice.iter(item_type=CUBA.NODE):
self.assertEqual(
node, LatticeNode(
node.index,
data=DataContainer(VELOCITY=node.index)))
self.assertEqual(sum(1 for _ in vtk_lattice.iter(
item_type=CUBA.NODE)), 120)
def test_update_nodes_on_a_xy_plane_hexagonal_lattice(self):
# given
lattice = make_hexagonal_lattice('test', 0.1, 0.2, (5, 4, 6))
self.add_velocity(lattice)
vtk_lattice = VTKLattice.from_lattice(lattice)
node = vtk_lattice.get((1, 1, 0))
# when
node.data = DataContainer(VELOCITY=(1, 54, 0.3))
vtk_lattice.update((node,))
# then
new_node = vtk_lattice.get((1, 1, 0))
self.assertEqual(
new_node, LatticeNode(
(1, 1, 0),
data=DataContainer(VELOCITY=(1, 54, 0.3))))
def test_get_coordinate_on_a_xy_plane_hexagonal_lattice(self):
# given
lattice = make_hexagonal_lattice('test', 0.1, 0.2, (5, 4, 6))
self.add_velocity(lattice)
vtk_lattice = VTKLattice.from_lattice(lattice)
# when/then
for node in lattice.iter(item_type=CUBA.NODE):
assert_array_equal(
vtk_lattice.get_coordinate(node.index),
lattice.get_coordinate(node.index))
def test_initialization_with_unknown_type(self):
#
lattice = make_hexagonal_lattice('test', 0.1, 0.2, (5, 4, 6))
self.add_velocity(lattice)
data = VTKLattice.from_lattice(lattice)
primitive_cell = PrimitiveCell(lattice.primitive_cell.p1,
lattice.primitive_cell.p2,
lattice.primitive_cell.p3,
"Cubic")
# when/then
with self.assertRaises(ValueError):
VTKLattice(
name=lattice.name, primitive_cell=primitive_cell,
data_set=data.data_set)
def test_initialization_with_unfamiliar_dataset(self):
# given
data_set = tvtk.UnstructuredGrid(points=[(0, 0, 0,), (1, 1, 1)])
primitive_cell = PrimitiveCell.for_cubic_lattice(1.)
# when/then
with self.assertRaises(TypeError):
VTKLattice(
name='test', primitive_cell=primitive_cell,
data_set=data_set)
def test_create_empty_with_unknown_type(self):
primitive_cell = PrimitiveCell((1., 0., 0.), (0., 1., 0.),
(0., 0., 1.), "Cubic")
# when/then
with self.assertRaises(ValueError):
VTKLattice.empty(
name='test', primitive_cell=primitive_cell, size=(3, 4, 5),
origin=(0.0, 0.0, 0.0))
def test_create_from_unfamiliar_dataset(self):
# given
data_set = tvtk.UnstructuredGrid(points=[(0, 0, 0,), (1, 1, 1)])
# when/then
with self.assertRaises(TypeError):
VTKLattice.from_dataset(name='test', data_set=data_set)
@given(lattice_types)
def test_initialization_with_dataset(self, lattice):
# given
expected = VTKLattice.from_lattice(lattice)
# when
vtk_lattice = VTKLattice.from_dataset('test', expected.data_set)
# then
self.assertEqual(vtk_lattice.primitive_cell.bravais_lattice,
lattice.primitive_cell.bravais_lattice)
@given(lattice_types)
def test_creating_a_vtk_lattice_from_cuds_lattice(self, lattice):
# when
vtk_lattice = VTKLattice.from_lattice(lattice)
# then
self.assertEqual(vtk_lattice.primitive_cell.bravais_lattice,
lattice.primitive_cell.bravais_lattice)
self.assertEqual(vtk_lattice.data, lattice.data)
self.assertEqual(vtk_lattice.size, lattice.size)
assert_array_equal(vtk_lattice.origin, lattice.origin)
assert_array_equal(vtk_lattice.primitive_cell.p1,
lattice.primitive_cell.p1)
assert_array_equal(vtk_lattice.primitive_cell.p2,
lattice.primitive_cell.p2)
assert_array_equal(vtk_lattice.primitive_cell.p3,
lattice.primitive_cell.p3)
def test_data_setter(self):
# when
primitive_cell = PrimitiveCell.for_cubic_lattice(1.)
vtk_lattice = VTKLattice.empty('test', primitive_cell, (2, 3, 4),
(0, 0, 0))
vtk_lattice.data = {CUBA.TEMPERATURE: 40.}
# then
self.assertIsInstance(vtk_lattice.data, DataContainer)
def test_exception_create_dataset_with_inconsistent_lattice_type(self):
bad_lattice_types = (BravaisLattice.CUBIC,
BravaisLattice.TETRAGONAL,
BravaisLattice.ORTHORHOMBIC)
for lattice_type in bad_lattice_types:
# when
primitive_cell = PrimitiveCell((1., 0., 0.), # require PolyData
(0.5, 0.5, 0.),
(0., 0., 1.),
lattice_type)
lattice = Lattice('test', primitive_cell, (2, 3, 4),
(0., 0., 0.))
# then
with self.assertRaises(ValueError):
VTKLattice.from_lattice(lattice)
def add_velocity(self, lattice):
new_nodes = []
for node in lattice.iter(item_type=CUBA.NODE):
node.data[CUBA.VELOCITY] = node.index
new_nodes.append(node)
lattice.update(new_nodes)
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | -8,452,465,650,325,802,000 | 37.468 | 76 | 0.580638 | false |
yulin724/rt-thread-comment | tools/building.py | 1 | 10474 | import os
import sys
import string
from SCons.Script import *
BuildOptions = {}
Projects = []
Rtt_Root = ''
Env = None
def _get_filetype(fn):
if fn.rfind('.c') != -1 or fn.rfind('.C') != -1 or fn.rfind('.cpp') != -1:
return 1
# assimble file type
if fn.rfind('.s') != -1 or fn.rfind('.S') != -1:
return 2
# header type
if fn.rfind('.h') != -1:
return 5
# other filetype
return 5
def splitall(loc):
"""
Return a list of the path components in loc. (Used by relpath_).
The first item in the list will be either ``os.curdir``, ``os.pardir``, empty,
or the root directory of loc (for example, ``/`` or ``C:\\).
The other items in the list will be strings.
Adapted from *path.py* by Jason Orendorff.
"""
parts = []
while loc != os.curdir and loc != os.pardir:
prev = loc
loc, child = os.path.split(prev)
if loc == prev:
break
parts.append(child)
parts.append(loc)
parts.reverse()
return parts
def _make_path_relative(origin, dest):
"""
Return the relative path between origin and dest.
If it's not possible return dest.
If they are identical return ``os.curdir``
Adapted from `path.py <http://www.jorendorff.com/articles/python/path/>`_ by Jason Orendorff.
"""
origin = os.path.abspath(origin).replace('\\', '/')
dest = os.path.abspath(dest).replace('\\', '/')
#
orig_list = splitall(os.path.normcase(origin))
# Don't normcase dest! We want to preserve the case.
dest_list = splitall(dest)
#
if orig_list[0] != os.path.normcase(dest_list[0]):
# Can't get here from there.
return dest
#
# Find the location where the two paths start to differ.
i = 0
for start_seg, dest_seg in zip(orig_list, dest_list):
if start_seg != os.path.normcase(dest_seg):
break
i += 1
#
# Now i is the point where the two paths diverge.
# Need a certain number of "os.pardir"s to work up
# from the origin to the point of divergence.
segments = [os.pardir] * (len(orig_list) - i)
# Need to add the diverging part of dest_list.
segments += dest_list[i:]
if len(segments) == 0:
# If they happen to be identical, use os.curdir.
return os.curdir
else:
# return os.path.join(*segments).replace('\\', '/')
return os.path.join(*segments)
def IARProject(target, script):
import xml.etree.ElementTree as etree
project = file(target, "wb")
project_path = os.path.dirname(os.path.abspath(target))
tree = etree.parse('template.ewp')
tree.write('project.ewp')
def MDKProject(target, script):
template = file('template.Uv2', "rb")
lines = template.readlines()
project = file(target, "wb")
project_path = os.path.dirname(os.path.abspath(target))
line_index = 5
# write group
for group in script:
lines.insert(line_index, 'Group (%s)\r\n' % group['name'])
line_index += 1
lines.insert(line_index, '\r\n')
line_index += 1
# write file
ProjectFiles = []
CPPPATH = []
CPPDEFINES = []
LINKFLAGS = ''
CCFLAGS = ''
# number of groups
group_index = 1
for group in script:
# print group['name']
# get each include path
if group.has_key('CPPPATH') and group['CPPPATH']:
if CPPPATH:
CPPPATH += group['CPPPATH']
else:
CPPPATH += group['CPPPATH']
# get each group's definitions
if group.has_key('CPPDEFINES') and group['CPPDEFINES']:
if CPPDEFINES:
CPPDEFINES += ';' + group['CPPDEFINES']
else:
CPPDEFINES += group['CPPDEFINES']
# get each group's link flags
if group.has_key('LINKFLAGS') and group['LINKFLAGS']:
if LINKFLAGS:
LINKFLAGS += ' ' + group['LINKFLAGS']
else:
LINKFLAGS += group['LINKFLAGS']
# generate file items
for node in group['src']:
fn = node.rfile()
name = fn.name
path = os.path.dirname(fn.abspath)
basename = os.path.basename(path)
path = _make_path_relative(project_path, path)
path = os.path.join(path, name)
if ProjectFiles.count(name):
name = basename + '_' + name
ProjectFiles.append(name)
lines.insert(line_index, 'File %d,%d,<%s><%s>\r\n'
% (group_index, _get_filetype(name), path, name))
line_index += 1
group_index = group_index + 1
lines.insert(line_index, '\r\n')
line_index += 1
# remove repeat path
paths = set()
for path in CPPPATH:
inc = _make_path_relative(project_path, os.path.normpath(path))
paths.add(inc) #.replace('\\', '/')
paths = [i for i in paths]
CPPPATH = string.join(paths, ';')
definitions = [i for i in set(CPPDEFINES)]
CPPDEFINES = string.join(definitions, ', ')
while line_index < len(lines):
if lines[line_index].startswith(' ADSCINCD '):
lines[line_index] = ' ADSCINCD (' + CPPPATH + ')\r\n'
if lines[line_index].startswith(' ADSLDMC ('):
lines[line_index] = ' ADSLDMC (' + LINKFLAGS + ')\r\n'
if lines[line_index].startswith(' ADSCDEFN ('):
lines[line_index] = ' ADSCDEFN (' + CPPDEFINES + ')\r\n'
line_index += 1
# write project
for line in lines:
project.write(line)
project.close()
def BuilderProject(target, script):
project = file(target, "wb")
project_path = os.path.dirname(os.path.abspath(target))
# write file
CPPPATH = []
CPPDEFINES = []
LINKFLAGS = ''
CCFLAGS = ''
# number of groups
group_index = 1
for group in script:
# print group['name']
# generate file items
for node in group['src']:
fn = node.rfile()
name = fn.name
path = os.path.dirname(fn.abspath)
path = _make_path_relative(project_path, path)
path = os.path.join(path, name)
project.write('%s\r\n' % path)
group_index = group_index + 1
project.close()
class Win32Spawn:
def spawn(self, sh, escape, cmd, args, env):
import subprocess
newargs = string.join(args[1:], ' ')
cmdline = cmd + " " + newargs
startupinfo = subprocess.STARTUPINFO()
# startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, shell = False)
data, err = proc.communicate()
rv = proc.wait()
if rv:
print err
return rv
if data:
print data
return 0
def PrepareBuilding(env, root_directory):
import SCons.cpp
import rtconfig
global BuildOptions
global Projects
global Env
global Rtt_Root
Env = env
Rtt_Root = root_directory
# patch for win32 spawn
if env['PLATFORM'] == 'win32' and rtconfig.PLATFORM == 'gcc' and sys.version_info < (2, 6, 0):
win32_spawn = Win32Spawn()
win32_spawn.env = env
env['SPAWN'] = win32_spawn.spawn
# add program path
env.PrependENVPath('PATH', rtconfig.EXEC_PATH)
# parse rtconfig.h to get used component
PreProcessor = SCons.cpp.PreProcessor()
f = file('rtconfig.h', 'r')
contents = f.read()
f.close()
PreProcessor.process_contents(contents)
BuildOptions = PreProcessor.cpp_namespace
if (GetDepend('RT_USING_NEWLIB') == False) and rtconfig.PLATFORM == 'gcc':
AddDepend('RT_USING_MINILIBC')
# add target option
AddOption('--target',
dest='target',
type='string',
help='set target project: mdk')
if GetOption('target'):
SetOption('no_exec', 1)
#env['CCCOMSTR'] = "CC $TARGET"
#env['ASCOMSTR'] = "AS $TARGET"
#env['LINKCOMSTR'] = "Link $TARGET"
# board build script
objs = SConscript('SConscript', variant_dir='build/bsp', duplicate=0)
Repository(Rtt_Root)
# include kernel
objs.append(SConscript('src/SConscript', variant_dir='build/src', duplicate=0))
# include libcpu
objs.append(SConscript('libcpu/SConscript', variant_dir='build/libcpu', duplicate=0))
# include components
objs.append(SConscript('components/SConscript', variant_dir='build/components', duplicate=0))
return objs
def GetDepend(depend):
building = True
if type(depend) == type('str'):
if not BuildOptions.has_key(depend):
building = False
return building
# for list type depend
for item in depend:
if item != '':
if not BuildOptions.has_key(item):
building = False
return building
def AddDepend(option):
BuildOptions[option] = 1
def DefineGroup(name, src, depend, **parameters):
global Env
if not GetDepend(depend):
return []
group = parameters
group['name'] = name
if type(src) == type(['src1', 'str2']):
group['src'] = File(src)
else:
group['src'] = src
Projects.append(group)
if group.has_key('CCFLAGS'):
Env.Append(CCFLAGS = group['CCFLAGS'])
if group.has_key('CPPPATH'):
Env.Append(CPPPATH = group['CPPPATH'])
if group.has_key('CPPDEFINES'):
Env.Append(CPPDEFINES = group['CPPDEFINES'])
if group.has_key('LINKFLAGS'):
Env.Append(LINKFLAGS = group['LINKFLAGS'])
objs = Env.Object(group['src'])
if group.has_key('LIBRARY'):
objs = Env.Library(name, objs)
return objs
def EndBuilding(target):
import rtconfig
Env.AddPostAction(target, rtconfig.POST_ACTION)
if GetOption('target') == 'iar':
IARProject('project.ewp', Projects)
if GetOption('target') == 'mdk':
MDKProject('project.Uv2', Projects)
| gpl-2.0 | 4,825,416,945,449,508,000 | 27.01385 | 98 | 0.557953 | false |
whiskeylover/idreamoftoast | toast/app.py | 1 | 6684 | import datetime
import json
from urllib.parse import unquote
from urllib.request import urlopen
from flask import jsonify, Response
from peewee import CharField, DateTimeField, IntegerField, Model
import config
#-------------------------------------------------------------------------------
# Constants
#-------------------------------------------------------------------------------
MAX_TOP_DREAMS = 8
EXTERNAL_RESOURCE_REFRESH_FREQ = 30
#-------------------------------------------------------------------------------
# Config
#-------------------------------------------------------------------------------
app = config.get_app()
db = config.get_database()
#-------------------------------------------------------------------------------
# Models
#-------------------------------------------------------------------------------
class Dream(Model):
"""" Dream model. """
name = CharField()
count = IntegerField(default=0)
picURL = CharField(null=True)
picURLthn = CharField(null=True)
definition = CharField(null=True)
created_on = DateTimeField(default=datetime.datetime.now)
modified_on = DateTimeField(default=datetime.datetime.now)
class Meta:
database = db
#-------------------------------------------------------------------------------
# Methods
#-------------------------------------------------------------------------------
def init_db():
""" Initialize database. """
db.connect()
if not Dream.table_exists():
Dream.create_table()
def get_dreams(order, limit):
""" Helper method for getting dreams. """
dreams = Dream.select().where(Dream.count > 0).order_by(order)[:limit]
return [{'name':d.name, 'count':d.count, 'definition': d.definition, 'picURL': d.picURL, 'picURLthn': d.picURLthn} for d in dreams]
def get_dream(dream):
""" Helper method for getting a single dream. """
d = Dream.select().where(Dream.name == dream.lower()).first()
if d is None:
d = Dream.create(name=dream.lower(), count=0, picURL=get_flickrpicURL(dream), picURLthn=get_flickrpicURL(dream), definition=get_urbandictionary(dream))
return {'name':d.name, 'count':d.count, 'definition': d.definition, 'picURL': d.picURL, 'picURLthn': d.picURLthn}
#-------------------------------------------------------------------------------
# Routes / Controllers
#-------------------------------------------------------------------------------
@app.route("/dream/define/<term>")
def get_urbandictionary(term):
try:
response = urlopen('http://api.urbandictionary.com/v0/define?term=' + term.replace(" ", "+"))
html = response.read()
j = json.loads(html)
print("Refreshed " + term + "'s definition")
return j['list'][0]['definition']
except:
return ""
@app.route("/dream/picURL/<term>")
def get_flickrpicURL(term):
api_key = 'b60ce2a4db0b09dc4e9e895efe6d660e'
URL = 'https://api.flickr.com/services/rest/?method=flickr.photos.search&' + \
'api_key=' + api_key + \
'&tags=' + term.replace(" ", "+") + \
'&privacy_filter=1&format=json&nojsoncallback=1'
try:
response = urlopen(URL)
html = response.read()
j = json.loads(html)
print("Refreshed " + term + "'s picURL")
return "https://farm{0}.staticflickr.com/{1}/{2}_{3}_z.jpg".format( \
j['photos']['photo'][0]['farm'], \
j['photos']['photo'][0]['server'], \
j['photos']['photo'][0]['id'], \
j['photos']['photo'][0]['secret'])
except:
return "assets/img/888888.png"
@app.route("/dream/picURLthn/<term>")
def get_flickrpicURLthn(term):
api_key = 'b60ce2a4db0b09dc4e9e895efe6d660e'
URL = 'http://api.flickr.com/services/rest/?method=flickr.photos.search&' + \
'api_key=' + api_key + \
'&tags=' + term.replace(" ", "+") + \
'&privacy_filter=1&format=json&nojsoncallback=1'
try:
response = urlopen(URL)
html = response.read()
j = json.loads(html)
print("Refreshed " + term + "'s picURLthn")
return "http://farm{0}.staticflickr.com/{1}/{2}_{3}_q.jpg".format( \
j['photos']['photo'][0]['farm'], \
j['photos']['photo'][0]['server'], \
j['photos']['photo'][0]['id'], \
j['photos']['photo'][0]['secret'])
except:
return "assets/img/888888thn.png"
@app.route("/dreams/add/<dream>")
def add_dream(dream):
d, created = Dream.get_or_create(name=unquote(dream.lower()))
d.count += 1;
# if the record has just been created, fetch the picURL and definition
if created:
print("Creating new dream")
d.created_on = datetime.datetime.now()
d.modified_on = datetime.datetime.now()
d.picURL = get_flickrpicURL(d.name)
d.picURLthn = get_flickrpicURLthn(d.name)
d.definition = get_urbandictionary(d.name)
else:
print("Fetching existing dream")
# if the definition and URL are more than EXTERNAL_RESOURCE_REFRESH_FREQ days old
days_old = 0
try:
days_old = (d.modified_on - d.created_on).days
except:
days_old = 0
if days_old >= EXTERNAL_RESOURCE_REFRESH_FREQ:
d.picURL = get_flickrpicURL(d.name)
d.picURLthn = get_flickrpicURLthn(d.name)
d.definition = get_urbandictionary(d.name)
d.modified_on = datetime.datetime.now()
d.save()
return jsonify(data={'id': d.id,
'count': d.count})
@app.route("/dreams/top")
def top_dreams():
a = get_dreams(Dream.count.desc(), MAX_TOP_DREAMS)
#Response.headers.add('Access-Control-Allow-Origin', '*')
return Response(json.dumps(a), mimetype='application/json', headers={'Access-Control-Allow-Origin': '*'})
#return jsonify(data=get_dreams(Dream.count.desc(), MAX_TOP_DREAMS))
@app.route("/dreams/recent")
def recent_dreams():
a = get_dreams(Dream.modified_on.desc(), MAX_TOP_DREAMS)
return Response(json.dumps(a), mimetype='application/json', headers={'Access-Control-Allow-Origin': '*'})
#return jsonify(data=get_dreams(Dream.modified_on.desc(), MAX_TOP_DREAMS))
@app.route("/dreams/get/<dream>")
def get_single_dream(dream):
a = get_dream(unquote(dream.lower()))
return Response(json.dumps(a), mimetype='application/json', headers={'Access-Control-Allow-Origin': '*'})
#-------------------------------------------------------------------------------
# Main
#-------------------------------------------------------------------------------
if __name__ == "__main__":
# Development only! Reloads server on file change.
init_db()
app.debug = True
app.run()
| apache-2.0 | 3,691,948,276,489,078,300 | 33.8125 | 159 | 0.529473 | false |
yochem/glutenfree | setup.py | 1 | 1539 | """
glutenfree
"""
from setuptools import find_packages, setup
dependencies = ['click']
setup(
name='glutenfree',
version='0.1.0',
url='https://github.com/yochem/glutenfree',
license='MIT',
author='Yochem van Rosmalen',
author_email='[email protected]',
description='glutenfree',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'glutenfree = glutenfree/main:main'
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
# 'Development Status :: 1 - Planning',
# 'Development Status :: 2 - Pre-Alpha',
# 'Development Status :: 3 - Alpha',
'Development Status :: 4 - Beta',
# 'Development Status :: 5 - Production/Stable',
# 'Development Status :: 6 - Mature',
# 'Development Status :: 7 - Inactive',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| mit | 3,209,343,961,780,569,000 | 31.0625 | 72 | 0.590643 | false |
gonicus/gosa | common/src/gosa/common/network.py | 1 | 3494 | # This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import dbus
from gosa.common.components.dbus_runner import DBusRunner
from logging import getLogger
NM_STATE_UNKNOWN = 0
NM_STATE_ASLEEP = 10
NM_STATE_DISCONNECTED = 20
NM_STATE_DISCONNECTING = 30
NM_STATE_CONNECTING = 40
NM_STATE_CONNECTED_LOCAL = 50
NM_STATE_CONNECTED_SITE = 60
NM_STATE_CONNECTED_GLOBAL = 70
class Monitor(object):
def __init__(self, callback=None):
self.__callback = callback
self.log = getLogger(__name__)
self.__running = False
self.__thread = None
self.log.info("Initializing network state monitor")
# Initialize DBUS
dr = DBusRunner.get_instance()
self.__bus = dr.get_system_bus()
# Register actions to detect the network state
self.__upower_actions()
self.__network_actions()
# Get current state
try:
proxy = self.__bus.get_object('org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager')
iface = dbus.Interface(proxy, 'org.freedesktop.DBus.Properties')
version = str(iface.Get("org.freedesktop.NetworkManager", "Version"))
if tuple(version.split(".")) < ("0", "9"):
self.log.warning("network-manager is too old: defaulting to state 'online'")
self.__state = True
else:
# Register actions to detect the network state
self.__upower_actions()
self.__network_actions()
self.__state = iface.Get("org.freedesktop.NetworkManager", "State") in [NM_STATE_CONNECTED_SITE, NM_STATE_CONNECTED_GLOBAL]
except:
self.log.warning("no network-manager detected: defaulting to state 'online'")
self.__state = True
def is_online(self):
return self.__state
def __upower_actions(self):
try:
proxy = self.__bus.get_object('org.freedesktop.UPower', '/org/freedesktop/UPower')
iface = dbus.Interface(proxy, 'org.freedesktop.UPower')
iface.connect_to_signal("Sleeping", self.__upower_sleeping)
except:
self.log.warning("no UPower detected: will not be able to suspend network")
def __network_actions(self):
try:
proxy = self.__bus.get_object('org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager')
iface = dbus.Interface(proxy, 'org.freedesktop.NetworkManager')
iface.connect_to_signal("StateChanged", self.__network_state)
except:
self.log.warning("no network-manager detected: will not be able to suspend or activate network")
def __upower_sleeping(self):
self.log.info("network down")
self.__state = False
if self.__callback:
self.__callback(False)
def __network_state(self, state):
if state in [NM_STATE_CONNECTED_SITE, NM_STATE_CONNECTED_GLOBAL]:
if self.__state is False:
self.log.info("network up")
self.__state = True
if self.__callback:
self.__callback(True)
elif self.__state is True:
self.log.info("network down")
self.__state = False
if self.__callback:
self.__callback(False)
| lgpl-2.1 | -8,663,401,339,332,908,000 | 31.962264 | 139 | 0.603892 | false |
rdo-management/tuskar-ui | tuskar_ui/test/test_data/utils.py | 1 | 2129 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.test.test_data import utils
def load_test_data(load_onto=None):
from openstack_dashboard.test.test_data import cinder_data
from openstack_dashboard.test.test_data import glance_data
from openstack_dashboard.test.test_data import heat_data
from openstack_dashboard.test.test_data import keystone_data
from openstack_dashboard.test.test_data import neutron_data
from openstack_dashboard.test.test_data import nova_data
from openstack_dashboard.test.test_data import swift_data
from tuskar_ui.test.test_data import exceptions
from tuskar_ui.test.test_data import flavor_data
from tuskar_ui.test.test_data import heat_data as tuskar_heat_data
from tuskar_ui.test.test_data import keystone_data as tuskar_keystone_data
from tuskar_ui.test.test_data import node_data
from tuskar_ui.test.test_data import tuskar_data
# The order of these loaders matters, some depend on others.
loaders = (exceptions.data,
keystone_data.data,
glance_data.data,
nova_data.data,
cinder_data.data,
neutron_data.data,
swift_data.data,
heat_data.data,
flavor_data.data,
node_data.data,
tuskar_heat_data.data,
tuskar_keystone_data.data,
tuskar_data.data)
if load_onto:
for data_func in loaders:
data_func(load_onto)
return load_onto
else:
return utils.TestData(*loaders)
| apache-2.0 | 2,699,559,248,062,084,000 | 39.942308 | 78 | 0.678253 | false |
yephper/django | django/contrib/gis/geos/prototypes/predicates.py | 1 | 1630 | """
This module houses the GEOS ctypes prototype functions for the
unary and binary predicate operations on geometries.
"""
from ctypes import c_char, c_char_p, c_double
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import check_predicate
# ## Binary & unary predicate factories ##
class UnaryPredicate(GEOSFuncFactory):
"For GEOS unary predicate functions."
argtypes = [GEOM_PTR]
restype = c_char
errcheck = staticmethod(check_predicate)
class BinaryPredicate(UnaryPredicate):
"For GEOS binary predicate functions."
argtypes = [GEOM_PTR, GEOM_PTR]
# ## Unary Predicates ##
geos_hasz = UnaryPredicate('GEOSHasZ')
geos_isclosed = UnaryPredicate('GEOSisClosed')
geos_isempty = UnaryPredicate('GEOSisEmpty')
geos_isring = UnaryPredicate('GEOSisRing')
geos_issimple = UnaryPredicate('GEOSisSimple')
geos_isvalid = UnaryPredicate('GEOSisValid')
# ## Binary Predicates ##
geos_contains = BinaryPredicate('GEOSContains')
geos_covers = BinaryPredicate('GEOSCovers')
geos_crosses = BinaryPredicate('GEOSCrosses')
geos_disjoint = BinaryPredicate('GEOSDisjoint')
geos_equals = BinaryPredicate('GEOSEquals')
geos_equalsexact = BinaryPredicate('GEOSEqualsExact', argtypes=[GEOM_PTR, GEOM_PTR, c_double])
geos_intersects = BinaryPredicate('GEOSIntersects')
geos_overlaps = BinaryPredicate('GEOSOverlaps')
geos_relatepattern = BinaryPredicate('GEOSRelatePattern', argtypes=[GEOM_PTR, GEOM_PTR, c_char_p])
geos_touches = BinaryPredicate('GEOSTouches')
geos_within = BinaryPredicate('GEOSWithin')
| bsd-3-clause | -4,548,278,871,508,046,000 | 35.906977 | 98 | 0.752761 | false |
DavidWhittingham/agsadmin | agsadmin/sharing_admin/community/groups/Group.py | 1 | 1457 | from __future__ import (absolute_import, division, print_function, unicode_literals)
from builtins import (ascii, bytes, chr, dict, filter, hex, input, int, map, next, oct, open, pow, range, round, str,
super, zip)
from ...._utils import send_session_request
from ..._PortalEndpointBase import PortalEndpointBase
from .CreateUpdateGroupParams import CreateUpdateGroupParams
class Group(PortalEndpointBase):
@property
def id(self):
return self._pdata["id"]
@property
def _url_full(self):
return "{0}/{1}".format(self._url_base, self.id)
def __init__(self, requests_session, url_base, id):
super().__init__(requests_session, url_base)
self._pdata = {"id": id}
def get_properties(self):
"""
Gets the properties of the item.
"""
return self._get()
def update(self, update_group_params, clear_empty_fields=False):
"""
Updates the group properties.
"""
update_group_params = update_group_params._get_params() if isinstance(
update_group_params, CreateUpdateGroupParams) else update_group_params.copy()
if not "clearEmptyFields" in update_group_params:
update_group_params["clearEmptyFields"] = clear_empty_fields
r = self._create_operation_request(self, "update", method="POST", data=update_group_params)
return send_session_request(self._session, r).json() | bsd-3-clause | 7,639,082,891,741,181,000 | 32.906977 | 117 | 0.643102 | false |
qubole/qds-sdk-py | qds_sdk/cluster.py | 1 | 61544 | """
The cluster module contains the definitions for retrieving and manipulating
cluster information.
"""
from qds_sdk.qubole import Qubole
from qds_sdk.resource import Resource
from argparse import ArgumentParser
from qds_sdk import util
import logging
import json
log = logging.getLogger("qds_cluster")
def str2bool(v):
return v.lower() in ("yes", "true", "t", "1")
class Cluster(Resource):
"""
qds_sdk.Cluster is the class for retrieving and manipulating cluster
information.
"""
rest_entity_path = "clusters"
api_version = "v1.2"
@classmethod
def _parse_list(cls, args):
"""
Parse command line arguments to construct a dictionary of cluster
parameters that can be used to determine which clusters to list.
Args:
`args`: sequence of arguments
Returns:
Dictionary that can be used to determine which clusters to list
"""
argparser = ArgumentParser(prog="cluster list")
group = argparser.add_mutually_exclusive_group()
group.add_argument("--id", dest="cluster_id",
help="show cluster with this id")
group.add_argument("--label", dest="label",
help="show cluster with this label")
group.add_argument("--state", dest="state", action="store",
choices=['up', 'down', 'pending', 'terminating'],
help="list only clusters in the given state")
pagination_group = group.add_argument_group()
pagination_group.add_argument("--page", dest="page", action="store", type=int,
help="page number")
pagination_group.add_argument("--per-page", dest="per_page", action="store", type=int,
help="number of clusters to be retrieved per page")
arguments = argparser.parse_args(args)
return vars(arguments)
@classmethod
def list(cls, state=None, page=None, per_page=None):
"""
List existing clusters present in your account.
Kwargs:
`state`: list only those clusters which are in this state
Returns:
List of clusters satisfying the given criteria
"""
conn = Qubole.agent()
params = {}
if page:
params['page'] = page
if per_page:
params['per_page'] = per_page
if (params.get('page') or params.get('per_page')) and Qubole.version == 'v1.2':
log.warn("Pagination is not supported with API v1.2. Fetching all clusters.")
params = None if not params else params
cluster_list = conn.get(cls.rest_entity_path, params=params)
if state is None:
return cluster_list
elif state is not None:
result = []
if Qubole.version == 'v1.2':
for cluster in cluster_list:
if state.lower() == cluster['cluster']['state'].lower():
result.append(cluster)
elif Qubole.version == 'v1.3':
cluster_list = cluster_list['clusters']
for cluster in cluster_list:
if state.lower() == cluster['state'].lower():
result.append(cluster)
return result
@classmethod
def show(cls, cluster_id_label):
"""
Show information about the cluster with id/label `cluster_id_label`.
"""
conn = Qubole.agent()
return conn.get(cls.element_path(cluster_id_label))
@classmethod
def status(cls, cluster_id_label):
"""
Show the status of the cluster with id/label `cluster_id_label`.
"""
conn = Qubole.agent(version=Cluster.api_version)
return conn.get(cls.element_path(cluster_id_label) + "/state")
@classmethod
def master(cls, cluster_id_label):
"""
Show the details of the master of the cluster with id/label `cluster_id_label`.
"""
cluster_status = cls.status(cluster_id_label)
if cluster_status.get("state") == 'UP':
return list(filter(lambda x: x["role"] == "master", cluster_status.get("nodes")))[0]
else:
return cluster_status
@classmethod
def start(cls, cluster_id_label, api_version=None):
"""
Start the cluster with id/label `cluster_id_label`.
"""
conn = Qubole.agent(version=api_version)
data = {"state": "start"}
return conn.put(cls.element_path(cluster_id_label) + "/state", data)
@classmethod
def terminate(cls, cluster_id_label):
"""
Terminate the cluster with id/label `cluster_id_label`.
"""
conn = Qubole.agent(version=Cluster.api_version)
data = {"state": "terminate"}
return conn.put(cls.element_path(cluster_id_label) + "/state", data)
@classmethod
def _parse_create_update(cls, args, action, api_version):
"""
Parse command line arguments to determine cluster parameters that can
be used to create or update a cluster.
Args:
`args`: sequence of arguments
`action`: "create", "update" or "clone"
Returns:
Object that contains cluster parameters
"""
argparser = ArgumentParser(prog="cluster %s" % action)
create_required = False
label_required = False
if action == "create":
create_required = True
elif action == "update":
argparser.add_argument("cluster_id_label",
help="id/label of the cluster to update")
elif action == "clone":
argparser.add_argument("cluster_id_label",
help="id/label of the cluster to update")
label_required = True
argparser.add_argument("--label", dest="label",
nargs="+", required=(create_required or label_required),
help="list of labels for the cluster" +
" (atleast one label is required)")
ec2_group = argparser.add_argument_group("ec2 settings")
ec2_group.add_argument("--access-key-id",
dest="aws_access_key_id",
help="access key id for customer's aws" +
" account. This is required while" +
" creating the cluster",)
ec2_group.add_argument("--secret-access-key",
dest="aws_secret_access_key",
help="secret access key for customer's aws" +
" account. This is required while" +
" creating the cluster",)
ec2_group.add_argument("--aws-region",
dest="aws_region",
choices=["us-east-1", "us-west-2", "ap-northeast-1", "sa-east-1",
"eu-west-1", "ap-southeast-1", "us-west-1"],
help="aws region to create the cluster in",)
ec2_group.add_argument("--aws-availability-zone",
dest="aws_availability_zone",
help="availability zone to" +
" create the cluster in",)
ec2_group.add_argument("--subnet-id",
dest="subnet_id",
help="subnet to create the cluster in",)
ec2_group.add_argument("--vpc-id",
dest="vpc_id",
help="vpc to create the cluster in",)
ec2_group.add_argument("--master-elastic-ip",
dest="master_elastic_ip",
help="elastic ip to attach to master",)
ec2_group.add_argument("--bastion-node-public-dns",
dest="bastion_node_public_dns",
help="public dns name of the bastion node. Required only if cluster is in private subnet of a EC2-VPC",)
ec2_group.add_argument("--role-instance-profile",
dest="role_instance_profile",
help="IAM Role instance profile to attach on cluster",)
hadoop_group = argparser.add_argument_group("hadoop settings")
node_config_group = argparser.add_argument_group("node configuration") if (api_version >= 1.3) else hadoop_group
node_config_group.add_argument("--master-instance-type",
dest="master_instance_type",
help="instance type to use for the hadoop" +
" master node",)
node_config_group.add_argument("--slave-instance-type",
dest="slave_instance_type",
help="instance type to use for the hadoop" +
" slave nodes",)
node_config_group.add_argument("--initial-nodes",
dest="initial_nodes",
type=int,
help="number of nodes to start the" +
" cluster with",)
node_config_group.add_argument("--max-nodes",
dest="max_nodes",
type=int,
help="maximum number of nodes the cluster" +
" may be auto-scaled up to")
node_config_group.add_argument("--slave-request-type",
dest="slave_request_type",
choices=["ondemand", "spot", "hybrid", "spotblock"],
help="purchasing option for slave instaces",)
node_config_group.add_argument("--root-volume-size",
dest="root_volume_size",
type=int,
help="size of root volume in GB")
hadoop_group.add_argument("--custom-config",
dest="custom_config_file",
help="location of file containg custom" +
" hadoop configuration overrides")
hadoop_group.add_argument("--use-hbase", dest="use_hbase",
action="store_true", default=None,
help="Use hbase on this cluster",)
hadoop_group.add_argument("--is-ha", dest="is_ha",
action="store_true", default=None,
help="Enable HA config for cluster")
if api_version >= 1.3:
qubole_placement_policy_group = hadoop_group.add_mutually_exclusive_group()
qubole_placement_policy_group.add_argument("--use-qubole-placement-policy",
dest="use_qubole_placement_policy",
action="store_true",
default=None,
help="Use Qubole Block Placement policy" +
" for clusters with spot nodes",)
qubole_placement_policy_group.add_argument("--no-use-qubole-placement-policy",
dest="use_qubole_placement_policy",
action="store_false",
default=None,
help="Do not use Qubole Block Placement policy" +
" for clusters with spot nodes",)
fallback_to_ondemand_group = node_config_group.add_mutually_exclusive_group()
fallback_to_ondemand_group.add_argument("--fallback-to-ondemand",
dest="fallback_to_ondemand",
action="store_true",
default=None,
help="Fallback to on-demand nodes if spot nodes" +
" could not be obtained. Valid only if slave_request_type is spot",)
fallback_to_ondemand_group.add_argument("--no-fallback-to-ondemand",
dest="fallback_to_ondemand",
action="store_false",
default=None,
help="Dont Fallback to on-demand nodes if spot nodes" +
" could not be obtained. Valid only if slave_request_type is spot",)
node_cooldown_period_group = argparser.add_argument_group("node cooldown period settings")
node_cooldown_period_group.add_argument("--node-base-cooldown-period",
dest="node_base_cooldown_period",
type=int,
help="Cooldown period for on-demand nodes" +
" unit: minutes")
node_cooldown_period_group.add_argument("--node-spot-cooldown-period",
dest="node_spot_cooldown_period",
type=int,
help="Cooldown period for spot nodes" +
" unit: minutes")
ebs_volume_group = argparser.add_argument_group("ebs volume settings")
ebs_volume_group.add_argument("--ebs-volume-count",
dest="ebs_volume_count",
type=int,
help="Number of EBS volumes to attach to" +
" each instance of the cluster",)
ebs_volume_group.add_argument("--ebs-volume-type",
dest="ebs_volume_type",
choices=["standard", "gp2"],
help=" of the EBS volume. Valid values are " +
"'standard' (magnetic) and 'gp2' (ssd).",)
ebs_volume_group.add_argument("--ebs-volume-size",
dest="ebs_volume_size",
type=int,
help="Size of each EBS volume, in GB",)
enable_rubix_group = hadoop_group.add_mutually_exclusive_group()
enable_rubix_group.add_argument("--enable-rubix",
dest="enable_rubix",
action="store_true",
default=None,
help="Enable rubix for cluster", )
enable_rubix_group.add_argument("--no-enable-rubix",
dest="enable_rubix",
action="store_false",
default=None,
help="Do not enable rubix for cluster", )
hadoop2 = hadoop_group.add_mutually_exclusive_group()
hadoop2.add_argument("--use-hadoop2",
dest="use_hadoop2",
action="store_true",
default=None,
help="Use hadoop2 instead of hadoop1")
hadoop2.add_argument("--use-hadoop1",
dest="use_hadoop2",
action="store_false",
default=None,
help="Use hadoop1 instead of hadoop2. This is the default.")
hadoop2.add_argument("--use-spark",
dest="use_spark",
action="store_true",
default=None,
help="Turn on spark for this cluster")
spot_group = argparser.add_argument_group("spot instance settings" +
" (valid only when slave-request-type is hybrid or spot)")
spot_group.add_argument("--maximum-bid-price-percentage",
dest="maximum_bid_price_percentage",
type=float,
help="maximum value to bid for spot instances" +
" expressed as a percentage of the base" +
" price for the slave node instance type",)
spot_group.add_argument("--timeout-for-spot-request",
dest="timeout_for_request",
type=int,
help="timeout for a spot instance request" +
" unit: minutes")
spot_group.add_argument("--maximum-spot-instance-percentage",
dest="maximum_spot_instance_percentage",
type=int,
help="maximum percentage of instances that may" +
" be purchased from the aws spot market," +
" valid only when slave-request-type" +
" is 'hybrid'",)
stable_spot_group = argparser.add_argument_group("stable spot instance settings")
stable_spot_group.add_argument("--stable-maximum-bid-price-percentage",
dest="stable_maximum_bid_price_percentage",
type=float,
help="maximum value to bid for stable node spot instances" +
" expressed as a percentage of the base" +
" price for the master and slave node instance types",)
stable_spot_group.add_argument("--stable-timeout-for-spot-request",
dest="stable_timeout_for_request",
type=int,
help="timeout for a stable node spot instance request" +
" unit: minutes")
stable_spot_group.add_argument("--stable-allow-fallback",
dest="stable_allow_fallback", default=None,
type=str2bool,
help="whether to fallback to on-demand instances for stable nodes" +
" if spot instances aren't available")
spot_block_group = argparser.add_argument_group("spot block settings")
spot_block_group.add_argument("--spot-block-duration",
dest="spot_block_duration",
type=int,
help="spot block duration" +
" unit: minutes")
fairscheduler_group = argparser.add_argument_group(
"fairscheduler configuration options")
fairscheduler_group.add_argument("--fairscheduler-config-xml",
dest="fairscheduler_config_xml_file",
help="location for file containing" +
" xml with custom configuration" +
" for the fairscheduler",)
fairscheduler_group.add_argument("--fairscheduler-default-pool",
dest="default_pool",
help="default pool for the" +
" fairscheduler",)
security_group = argparser.add_argument_group("security setttings")
ephemerals = security_group.add_mutually_exclusive_group()
ephemerals.add_argument("--encrypted-ephemerals",
dest="encrypted_ephemerals",
action="store_true",
default=None,
help="encrypt the ephemeral drives on" +
" the instance",)
ephemerals.add_argument("--no-encrypted-ephemerals",
dest="encrypted_ephemerals",
action="store_false",
default=None,
help="don't encrypt the ephemeral drives on" +
" the instance",)
security_group.add_argument("--customer-ssh-key",
dest="customer_ssh_key_file",
help="location for ssh key to use to" +
" login to the instance")
security_group.add_argument("--persistent-security-group",
dest="persistent_security_group",
help="a security group to associate with each" +
" node of the cluster. Typically used" +
" to provide access to external hosts")
presto_group = argparser.add_argument_group("presto settings")
enabling_presto = presto_group.add_mutually_exclusive_group()
enabling_presto.add_argument("--enable-presto",
dest="enable_presto",
action="store_true",
default=None,
help="Enable presto for this cluster",)
enabling_presto.add_argument("--disable-presto",
dest="enable_presto",
action="store_false",
default=None,
help="Disable presto for this cluster",)
presto_group.add_argument("--presto-custom-config",
dest="presto_custom_config_file",
help="location of file containg custom" +
" presto configuration overrides")
termination = argparser.add_mutually_exclusive_group()
termination.add_argument("--disallow-cluster-termination",
dest="disallow_cluster_termination",
action="store_true",
default=None,
help="don't auto-terminate idle clusters," +
" use this with extreme caution",)
termination.add_argument("--allow-cluster-termination",
dest="disallow_cluster_termination",
action="store_false",
default=None,
help="auto-terminate idle clusters,")
ganglia = argparser.add_mutually_exclusive_group()
ganglia.add_argument("--enable-ganglia-monitoring",
dest="enable_ganglia_monitoring",
action="store_true",
default=None,
help="enable ganglia monitoring for the" +
" cluster",)
ganglia.add_argument("--disable-ganglia-monitoring",
dest="enable_ganglia_monitoring",
action="store_false",
default=None,
help="disable ganglia monitoring for the" +
" cluster",)
argparser.add_argument("--node-bootstrap-file",
dest="node_bootstrap_file",
help="""name of the node bootstrap file for this cluster. It
should be in stored in S3 at
<account-default-location>/scripts/hadoop/NODE_BOOTSTRAP_FILE
""",)
argparser.add_argument("--custom-ec2-tags",
dest="custom_ec2_tags",
help="""Custom ec2 tags to be set on all instances
of the cluster. Specified as JSON object (key-value pairs)
e.g. --custom-ec2-tags '{"key1":"value1", "key2":"value2"}'
""",)
env_group = argparser.add_argument_group("environment settings")
env_group.add_argument("--env-name",
dest="env_name",
default=None,
help="name of Python and R environment")
env_group.add_argument("--python-version",
dest="python_version",
default=None,
help="version of Python in environment")
env_group.add_argument("--r-version",
dest="r_version",
default=None,
help="version of R in environment")
arguments = argparser.parse_args(args)
return arguments
@classmethod
def create(cls, cluster_info, version=None):
"""
Create a new cluster using information provided in `cluster_info`.
Optionally provide the version (eg: v1.3) to use the new version of the
API. If None we default to v1.2
"""
conn = Qubole.agent(version=version)
return conn.post(cls.rest_entity_path, data=cluster_info)
@classmethod
def update(cls, cluster_id_label, cluster_info, version=None):
"""
Update the cluster with id/label `cluster_id_label` using information provided in
`cluster_info`.
Optionally provide the version (eg: v1.3) to use the new version of the
API. If None we default to v1.2
"""
conn = Qubole.agent(version=version)
return conn.put(cls.element_path(cluster_id_label), data=cluster_info)
@classmethod
def clone(cls, cluster_id_label, cluster_info, version=None):
"""
Update the cluster with id/label `cluster_id_label` using information provided in
`cluster_info`.
Optionally provide the version (eg: v1.3) to use the new version of the
API. If None we default to v1.2
"""
conn = Qubole.agent(version=version)
return conn.post(cls.element_path(cluster_id_label) + '/clone', data=cluster_info)
@classmethod
def _parse_cluster_manage_command(cls, args, action):
"""
Parse command line arguments for cluster manage commands.
"""
argparser = ArgumentParser(prog="cluster_manage_command")
group = argparser.add_mutually_exclusive_group(required=True)
group.add_argument("--id", dest="cluster_id",
help="execute on cluster with this id")
group.add_argument("--label", dest="label",
help="execute on cluster with this label")
if action == "remove" or action == "update":
argparser.add_argument("--private_dns",
help="the private_dns of the machine to be updated/removed", required=True)
if action == "update":
argparser.add_argument("--command",
help="the update command to be executed", required=True, choices=["replace"])
arguments = argparser.parse_args(args)
return arguments
@classmethod
def _parse_reassign_label(cls, args):
"""
Parse command line arguments for reassigning label.
"""
argparser = ArgumentParser(prog="cluster reassign_label")
argparser.add_argument("destination_cluster",
metavar="destination_cluster_id_label",
help="id/label of the cluster to move the label to")
argparser.add_argument("label",
help="label to be moved from the source cluster")
arguments = argparser.parse_args(args)
return arguments
@classmethod
def reassign_label(cls, destination_cluster, label):
"""
Reassign a label from one cluster to another.
Args:
`destination_cluster`: id/label of the cluster to move the label to
`label`: label to be moved from the source cluster
"""
conn = Qubole.agent(version=Cluster.api_version)
data = {
"destination_cluster": destination_cluster,
"label": label
}
return conn.put(cls.rest_entity_path + "/reassign-label", data)
@classmethod
def delete(cls, cluster_id_label):
"""
Delete the cluster with id/label `cluster_id_label`.
"""
conn = Qubole.agent(version=Cluster.api_version)
return conn.delete(cls.element_path(cluster_id_label))
@classmethod
def _parse_snapshot_restore_command(cls, args, action):
"""
Parse command line arguments for snapshot command.
"""
argparser = ArgumentParser(prog="cluster %s" % action)
group = argparser.add_mutually_exclusive_group(required=True)
group.add_argument("--id", dest="cluster_id",
help="execute on cluster with this id")
group.add_argument("--label", dest="label",
help="execute on cluster with this label")
argparser.add_argument("--s3_location",
help="s3_location where backup is stored", required=True)
if action == "snapshot":
argparser.add_argument("--backup_type",
help="backup_type: full/incremental, default is full")
elif action == "restore_point":
argparser.add_argument("--backup_id",
help="back_id from which restoration will be done", required=True)
argparser.add_argument("--table_names",
help="table(s) which are to be restored", required=True)
argparser.add_argument("--no-overwrite", action="store_false",
help="With this option, restore overwrites to the existing table if theres any in restore target")
argparser.add_argument("--no-automatic", action="store_false",
help="With this option, all the dependencies are automatically restored together with this backup image following the correct order")
arguments = argparser.parse_args(args)
return arguments
@classmethod
def _parse_get_snapshot_schedule(cls, args):
"""
Parse command line arguments for updating hbase snapshot schedule or to get details.
"""
argparser = ArgumentParser(prog="cluster snapshot_schedule")
group = argparser.add_mutually_exclusive_group(required=True)
group.add_argument("--id", dest="cluster_id",
help="execute on cluster with this id")
group.add_argument("--label", dest="label",
help="execute on cluster with this label")
arguments = argparser.parse_args(args)
return arguments
@classmethod
def _parse_update_snapshot_schedule(cls, args):
"""
Parse command line arguments for updating hbase snapshot schedule or to get details.
"""
argparser = ArgumentParser(prog="cluster snapshot_schedule")
group = argparser.add_mutually_exclusive_group(required=True)
group.add_argument("--id", dest="cluster_id",
help="execute on cluster with this id")
group.add_argument("--label", dest="label",
help="execute on cluster with this label")
argparser.add_argument("--frequency-num",
help="frequency number")
argparser.add_argument("--frequency-unit",
help="frequency unit")
argparser.add_argument("--s3-location",
help="s3_location about where to store snapshots")
argparser.add_argument("--status",
help="status of periodic job you want to change to", choices = ["RUNNING", "SUSPENDED"])
arguments = argparser.parse_args(args)
return arguments
@classmethod
def snapshot(cls, cluster_id_label, s3_location, backup_type):
"""
Create hbase snapshot full/incremental
"""
conn = Qubole.agent(version=Cluster.api_version)
parameters = {}
parameters['s3_location'] = s3_location
if backup_type:
parameters['backup_type'] = backup_type
return conn.post(cls.element_path(cluster_id_label) + "/snapshots", data=parameters)
@classmethod
def restore_point(cls, cluster_id_label, s3_location, backup_id, table_names, overwrite=True, automatic=True):
"""
Restoring cluster from a given hbase snapshot id
"""
conn = Qubole.agent(version=Cluster.api_version)
parameters = {}
parameters['s3_location'] = s3_location
parameters['backup_id'] = backup_id
parameters['table_names'] = table_names
parameters['overwrite'] = overwrite
parameters['automatic'] = automatic
return conn.post(cls.element_path(cluster_id_label) + "/restore_point", data=parameters)
@classmethod
def get_snapshot_schedule(cls, cluster_id_label):
"""
Get details for snapshot schedule
"""
conn = Qubole.agent(version=Cluster.api_version)
return conn.get(cls.element_path(cluster_id_label) + "/snapshot_schedule")
@classmethod
def update_snapshot_schedule(cls, cluster_id_label, s3_location=None, frequency_unit=None, frequency_num=None, status=None):
"""
Update for snapshot schedule
"""
conn = Qubole.agent(version=Cluster.api_version)
data = {}
if s3_location is not None:
data["s3_location"] = s3_location
if frequency_unit is not None:
data["frequency_unit"] = frequency_unit
if frequency_num is not None:
data["frequency_num"] = frequency_num
if status is not None:
data["status"] = status
return conn.put(cls.element_path(cluster_id_label) + "/snapshot_schedule", data)
@classmethod
def add_node(cls, cluster_id_label, parameters=None):
"""
Add a node to an existing cluster
"""
conn = Qubole.agent(version=Cluster.api_version)
parameters = {} if not parameters else parameters
return conn.post(cls.element_path(cluster_id_label) + "/nodes", data={"parameters" : parameters})
@classmethod
def remove_node(cls, cluster_id_label, private_dns, parameters=None):
"""
Add a node to an existing cluster
"""
conn = Qubole.agent(version=Cluster.api_version)
parameters = {} if not parameters else parameters
data = {"private_dns" : private_dns, "parameters" : parameters}
return conn.delete(cls.element_path(cluster_id_label) + "/nodes", data)
@classmethod
def update_node(cls, cluster_id_label, command, private_dns, parameters=None):
"""
Add a node to an existing cluster
"""
conn = Qubole.agent(version=Cluster.api_version)
parameters = {} if not parameters else parameters
data = {"command" : command, "private_dns" : private_dns, "parameters" : parameters}
return conn.put(cls.element_path(cluster_id_label) + "/nodes", data)
class ClusterInfo():
"""
qds_sdk.ClusterInfo is the class which stores information about a cluster.
You can use objects of this class to create or update a cluster.
"""
def __init__(self, label, aws_access_key_id, aws_secret_access_key,
disallow_cluster_termination=None,
enable_ganglia_monitoring=None,
node_bootstrap_file=None):
"""
Args:
`label`: A list of labels that identify the cluster. At least one label
must be provided when creating a cluster.
`aws_access_key_id`: The access key id for customer's aws account. This
is required for creating the cluster.
`aws_secret_access_key`: The secret access key for customer's aws
account. This is required for creating the cluster.
`disallow_cluster_termination`: Set this to True if you don't want
qubole to auto-terminate idle clusters. Use this option with
extreme caution.
`enable_ganglia_monitoring`: Set this to True if you want to enable
ganglia monitoring for the cluster.
`node_bootstrap_file`: name of the node bootstrap file for this
cluster. It should be in stored in S3 at
<your-default-location>/scripts/hadoop/
"""
self.label = label
self.ec2_settings = {}
self.ec2_settings['compute_access_key'] = aws_access_key_id
self.ec2_settings['compute_secret_key'] = aws_secret_access_key
self.disallow_cluster_termination = disallow_cluster_termination
self.enable_ganglia_monitoring = enable_ganglia_monitoring
self.node_bootstrap_file = node_bootstrap_file
self.hadoop_settings = {}
self.security_settings = {}
self.presto_settings = {}
def set_ec2_settings(self,
aws_region=None,
aws_availability_zone=None,
vpc_id=None,
subnet_id=None,
master_elastic_ip=None,
role_instance_profile=None,
bastion_node_public_dns=None):
"""
Kwargs:
`aws_region`: AWS region to create the cluster in.
`aws_availability_zone`: The availability zone to create the cluster
in.
`vpc_id`: The vpc to create the cluster in.
`subnet_id`: The subnet to create the cluster in.
`bastion_node_public_dns`: Public dns name of the bastion host. Required only if
cluster is in private subnet.
"""
self.ec2_settings['aws_region'] = aws_region
self.ec2_settings['aws_preferred_availability_zone'] = aws_availability_zone
self.ec2_settings['vpc_id'] = vpc_id
self.ec2_settings['subnet_id'] = subnet_id
self.ec2_settings['role_instance_profile'] = role_instance_profile
self.ec2_settings['master_elastic_ip'] = master_elastic_ip
self.ec2_settings['bastion_node_public_dns'] = bastion_node_public_dns
def set_hadoop_settings(self, master_instance_type=None,
slave_instance_type=None,
initial_nodes=None,
max_nodes=None,
custom_config=None,
slave_request_type=None,
use_hbase=None,
custom_ec2_tags=None,
use_hadoop2=None,
use_spark=None,
is_ha=None):
"""
Kwargs:
`master_instance_type`: The instance type to use for the Hadoop master
node.
`slave_instance_type`: The instance type to use for the Hadoop slave
nodes.
`initial_nodes`: Number of nodes to start the cluster with.
`max_nodes`: Maximum number of nodes the cluster may be auto-scaled up
to.
`custom_config`: Custom Hadoop configuration overrides.
`slave_request_type`: Purchasing option for slave instances.
Valid values: "ondemand", "hybrid", "spot".
`use_hbase`: Start hbase daemons on the cluster. Uses Hadoop2
`use_hadoop2`: Use hadoop2 in this cluster
`use_spark`: Use spark in this cluster
`is_ha` : enable HA config for cluster
"""
self.hadoop_settings['master_instance_type'] = master_instance_type
self.hadoop_settings['slave_instance_type'] = slave_instance_type
self.hadoop_settings['initial_nodes'] = initial_nodes
self.hadoop_settings['max_nodes'] = max_nodes
self.hadoop_settings['custom_config'] = custom_config
self.hadoop_settings['slave_request_type'] = slave_request_type
self.hadoop_settings['use_hbase'] = use_hbase
self.hadoop_settings['use_hadoop2'] = use_hadoop2
self.hadoop_settings['use_spark'] = use_spark
self.hadoop_settings['is_ha'] = is_ha
if custom_ec2_tags and custom_ec2_tags.strip():
try:
self.hadoop_settings['custom_ec2_tags'] = json.loads(custom_ec2_tags.strip())
except Exception as e:
raise Exception("Invalid JSON string for custom ec2 tags: %s" % e.message)
def set_spot_instance_settings(self, maximum_bid_price_percentage=None,
timeout_for_request=None,
maximum_spot_instance_percentage=None):
"""
Purchase options for spot instances. Valid only when
`slave_request_type` is hybrid or spot.
`maximum_bid_price_percentage`: Maximum value to bid for spot
instances, expressed as a percentage of the base price for the
slave node instance type.
`timeout_for_request`: Timeout for a spot instance request (Unit:
minutes)
`maximum_spot_instance_percentage`: Maximum percentage of instances
that may be purchased from the AWS Spot market. Valid only when
slave_request_type is "hybrid".
"""
self.hadoop_settings['spot_instance_settings'] = {
'maximum_bid_price_percentage': maximum_bid_price_percentage,
'timeout_for_request': timeout_for_request,
'maximum_spot_instance_percentage': maximum_spot_instance_percentage}
def set_stable_spot_instance_settings(self, maximum_bid_price_percentage=None,
timeout_for_request=None,
allow_fallback=True):
"""
Purchase options for stable spot instances.
`maximum_bid_price_percentage`: Maximum value to bid for stable node spot
instances, expressed as a percentage of the base price
(applies to both master and slave nodes).
`timeout_for_request`: Timeout for a stable node spot instance request (Unit:
minutes)
`allow_fallback`: Whether to fallback to on-demand instances for
stable nodes if spot instances are not available
"""
self.hadoop_settings['stable_spot_instance_settings'] = {
'maximum_bid_price_percentage': maximum_bid_price_percentage,
'timeout_for_request': timeout_for_request,
'allow_fallback': allow_fallback}
def set_fairscheduler_settings(self, fairscheduler_config_xml=None,
default_pool=None):
"""
Fair scheduler configuration options.
`fairscheduler_config_xml`: XML string with custom configuration
parameters for the fair scheduler.
`default_pool`: The default pool for the fair scheduler.
"""
self.hadoop_settings['fairscheduler_settings'] = {
'fairscheduler_config_xml': fairscheduler_config_xml,
'default_pool': default_pool}
def set_security_settings(self,
encrypted_ephemerals=None,
customer_ssh_key=None,
persistent_security_group=None):
"""
Kwargs:
`encrypted_ephemerals`: Encrypt the ephemeral drives on the instance.
`customer_ssh_key`: SSH key to use to login to the instances.
"""
self.security_settings['encrypted_ephemerals'] = encrypted_ephemerals
self.security_settings['customer_ssh_key'] = customer_ssh_key
self.security_settings['persistent_security_group'] = persistent_security_group
def set_presto_settings(self, enable_presto=None, presto_custom_config=None):
"""
Kwargs:
`enable_presto`: Enable Presto on the cluster.
`presto_custom_config`: Custom Presto configuration overrides.
"""
self.presto_settings['enable_presto'] = enable_presto
self.presto_settings['custom_config'] = presto_custom_config
def minimal_payload(self):
"""
This method can be used to create the payload which is sent while
creating or updating a cluster.
"""
payload = {"cluster": self.__dict__}
return util._make_minimal(payload)
class ClusterInfoV13():
"""
qds_sdk.ClusterInfo is the class which stores information about a cluster.
You can use objects of this class to create or update a cluster.
"""
def __init__(self, label, api_version=1.3):
"""
Args:
`label`: A list of labels that identify the cluster. At least one label
must be provided when creating a cluster.
`api_version`: api version to use
"""
self.label = label
self.api_version = api_version
self.ec2_settings = {}
self.hadoop_settings = {}
self.security_settings = {}
self.presto_settings = {}
self.node_configuration = {}
def set_cluster_info(self, aws_access_key_id=None,
aws_secret_access_key=None,
aws_region=None,
aws_availability_zone=None,
vpc_id=None,
subnet_id=None,
master_elastic_ip=None,
disallow_cluster_termination=None,
enable_ganglia_monitoring=None,
node_bootstrap_file=None,
master_instance_type=None,
slave_instance_type=None,
initial_nodes=None,
max_nodes=None,
slave_request_type=None,
fallback_to_ondemand=None,
node_base_cooldown_period=None,
node_spot_cooldown_period=None,
custom_config=None,
use_hbase=None,
custom_ec2_tags=None,
use_hadoop2=None,
use_spark=None,
use_qubole_placement_policy=None,
maximum_bid_price_percentage=None,
timeout_for_request=None,
maximum_spot_instance_percentage=None,
stable_maximum_bid_price_percentage=None,
stable_timeout_for_request=None,
stable_allow_fallback=True,
spot_block_duration=None,
ebs_volume_count=None,
ebs_volume_type=None,
ebs_volume_size=None,
root_volume_size=None,
fairscheduler_config_xml=None,
default_pool=None,
encrypted_ephemerals=None,
ssh_public_key=None,
persistent_security_group=None,
enable_presto=None,
bastion_node_public_dns=None,
role_instance_profile=None,
presto_custom_config=None,
is_ha=None,
env_name=None,
python_version=None,
r_version=None,
enable_rubix=None):
"""
Kwargs:
`aws_access_key_id`: The access key id for customer's aws account. This
is required for creating the cluster.
`aws_secret_access_key`: The secret access key for customer's aws
account. This is required for creating the cluster.
`aws_region`: AWS region to create the cluster in.
`aws_availability_zone`: The availability zone to create the cluster
in.
`vpc_id`: The vpc to create the cluster in.
`subnet_id`: The subnet to create the cluster in.
`master_elastic_ip`: Elastic IP to attach to master node
`disallow_cluster_termination`: Set this to True if you don't want
qubole to auto-terminate idle clusters. Use this option with
extreme caution.
`enable_ganglia_monitoring`: Set this to True if you want to enable
ganglia monitoring for the cluster.
`node_bootstrap_file`: name of the node bootstrap file for this
cluster. It should be in stored in S3 at
<your-default-location>/scripts/hadoop/
`master_instance_type`: The instance type to use for the Hadoop master
node.
`slave_instance_type`: The instance type to use for the Hadoop slave
nodes.
`initial_nodes`: Number of nodes to start the cluster with.
`max_nodes`: Maximum number of nodes the cluster may be auto-scaled up
to.
`slave_request_type`: Purchasing option for slave instances.
Valid values: "ondemand", "hybrid", "spot".
`fallback_to_ondemand`: Fallback to on-demand nodes if spot nodes could not be
obtained. Valid only if slave_request_type is 'spot'.
`node_base_cooldown_period`: Time for which an on-demand node waits before termination (Unit: minutes)
`node_spot_cooldown_period`: Time for which a spot node waits before termination (Unit: minutes)
`custom_config`: Custom Hadoop configuration overrides.
`use_hbase`: Start hbase daemons on the cluster. Uses Hadoop2
`use_hadoop2`: Use hadoop2 in this cluster
`use_spark`: Use spark in this cluster
`use_qubole_placement_policy`: Use Qubole Block Placement policy for
clusters with spot nodes.
`maximum_bid_price_percentage`: ( Valid only when `slave_request_type`
is hybrid or spot.) Maximum value to bid for spot
instances, expressed as a percentage of the base price
for the slave node instance type.
`timeout_for_request`: Timeout for a spot instance request (Unit:
minutes)
`maximum_spot_instance_percentage`: Maximum percentage of instances
that may be purchased from the AWS Spot market. Valid only when
slave_request_type is "hybrid".
`stable_maximum_bid_price_percentage`: Maximum value to bid for stable node spot
instances, expressed as a percentage of the base price
(applies to both master and slave nodes).
`stable_timeout_for_request`: Timeout for a stable node spot instance request (Unit:
minutes)
`stable_allow_fallback`: Whether to fallback to on-demand instances for
stable nodes if spot instances are not available
`spot_block_duration`: Time for which the spot block instance is provisioned (Unit:
minutes)
`ebs_volume_count`: Number of EBS volumes to attach
to each instance of the cluster.
`ebs_volume_type`: Type of the EBS volume. Valid
values are 'standard' (magnetic) and 'ssd'.
`ebs_volume_size`: Size of each EBS volume, in GB.
`root_volume_size`: Size of root volume, in GB.
`fairscheduler_config_xml`: XML string with custom configuration
parameters for the fair scheduler.
`default_pool`: The default pool for the fair scheduler.
`encrypted_ephemerals`: Encrypt the ephemeral drives on the instance.
`ssh_public_key`: SSH key to use to login to the instances.
`persistent_security_group`: Comma-separated list of persistent
security groups for the cluster.
`enable_presto`: Enable Presto on the cluster.
`presto_custom_config`: Custom Presto configuration overrides.
`bastion_node_public_dns`: Public dns name of the bastion node. Required only if cluster is in private subnet.
`is_ha`: Enabling HA config for cluster
`env_name`: Name of python and R environment. (For Spark clusters)
`python_version`: Version of Python for environment. (For Spark clusters)
`r_version`: Version of R for environment. (For Spark clusters)
`enable_rubix`: Enable rubix on the cluster (For Presto clusters)
"""
self.disallow_cluster_termination = disallow_cluster_termination
self.enable_ganglia_monitoring = enable_ganglia_monitoring
self.node_bootstrap_file = node_bootstrap_file
self.set_node_configuration(master_instance_type, slave_instance_type, initial_nodes, max_nodes,
slave_request_type, fallback_to_ondemand, custom_ec2_tags,
node_base_cooldown_period, node_spot_cooldown_period, root_volume_size)
self.set_ec2_settings(aws_access_key_id, aws_secret_access_key, aws_region, aws_availability_zone, vpc_id, subnet_id,
master_elastic_ip, bastion_node_public_dns, role_instance_profile)
self.set_hadoop_settings(custom_config, use_hbase, use_hadoop2, use_spark, use_qubole_placement_policy, is_ha, enable_rubix)
self.set_spot_instance_settings(maximum_bid_price_percentage, timeout_for_request, maximum_spot_instance_percentage)
self.set_stable_spot_instance_settings(stable_maximum_bid_price_percentage, stable_timeout_for_request, stable_allow_fallback)
self.set_spot_block_settings(spot_block_duration)
self.set_ebs_volume_settings(ebs_volume_count, ebs_volume_type, ebs_volume_size)
self.set_fairscheduler_settings(fairscheduler_config_xml, default_pool)
self.set_security_settings(encrypted_ephemerals, ssh_public_key, persistent_security_group)
self.set_presto_settings(enable_presto, presto_custom_config)
self.set_env_settings(env_name, python_version, r_version)
def set_ec2_settings(self,
aws_access_key_id=None,
aws_secret_access_key=None,
aws_region=None,
aws_availability_zone=None,
vpc_id=None,
subnet_id=None,
master_elastic_ip=None,
bastion_node_public_dns=None,
role_instance_profile=None):
self.ec2_settings['compute_access_key'] = aws_access_key_id
self.ec2_settings['compute_secret_key'] = aws_secret_access_key
self.ec2_settings['aws_region'] = aws_region
self.ec2_settings['aws_preferred_availability_zone'] = aws_availability_zone
self.ec2_settings['vpc_id'] = vpc_id
self.ec2_settings['subnet_id'] = subnet_id
self.ec2_settings['master_elastic_ip'] = master_elastic_ip
self.ec2_settings['bastion_node_public_dns'] = bastion_node_public_dns
self.ec2_settings['role_instance_profile'] = role_instance_profile
def set_node_configuration(self, master_instance_type=None,
slave_instance_type=None,
initial_nodes=None,
max_nodes=None,
slave_request_type=None,
fallback_to_ondemand=None,
custom_ec2_tags=None,
node_base_cooldown_period=None,
node_spot_cooldown_period=None,
root_volume_size=None):
self.node_configuration['master_instance_type'] = master_instance_type
self.node_configuration['slave_instance_type'] = slave_instance_type
self.node_configuration['initial_nodes'] = initial_nodes
self.node_configuration['max_nodes'] = max_nodes
self.node_configuration['slave_request_type'] = slave_request_type
self.node_configuration['fallback_to_ondemand'] = fallback_to_ondemand
self.node_configuration['node_base_cooldown_period'] = node_base_cooldown_period
self.node_configuration['node_spot_cooldown_period'] = node_spot_cooldown_period
self.node_configuration['root_volume_size'] = root_volume_size
if custom_ec2_tags and custom_ec2_tags.strip():
try:
self.node_configuration['custom_ec2_tags'] = json.loads(custom_ec2_tags.strip())
except Exception as e:
raise Exception("Invalid JSON string for custom ec2 tags: %s" % e.message)
def set_hadoop_settings(self, custom_config=None,
use_hbase=None,
use_hadoop2=None,
use_spark=None,
use_qubole_placement_policy=None,
is_ha=None,
enable_rubix=None):
self.hadoop_settings['custom_config'] = custom_config
self.hadoop_settings['use_hbase'] = use_hbase
self.hadoop_settings['use_hadoop2'] = use_hadoop2
self.hadoop_settings['use_spark'] = use_spark
self.hadoop_settings['use_qubole_placement_policy'] = use_qubole_placement_policy
self.hadoop_settings['is_ha'] = is_ha
self.hadoop_settings['enable_rubix'] = enable_rubix
def set_spot_instance_settings(self, maximum_bid_price_percentage=None,
timeout_for_request=None,
maximum_spot_instance_percentage=None):
self.node_configuration['spot_instance_settings'] = {
'maximum_bid_price_percentage': maximum_bid_price_percentage,
'timeout_for_request': timeout_for_request,
'maximum_spot_instance_percentage': maximum_spot_instance_percentage}
def set_stable_spot_instance_settings(self, maximum_bid_price_percentage=None,
timeout_for_request=None,
allow_fallback=True):
self.node_configuration['stable_spot_instance_settings'] = {
'maximum_bid_price_percentage': maximum_bid_price_percentage,
'timeout_for_request': timeout_for_request,
'allow_fallback': allow_fallback}
def set_spot_block_settings(self, spot_block_duration=None):
self.node_configuration['spot_block_settings'] = {'duration': spot_block_duration}
def set_ebs_volume_settings(self, ebs_volume_count=None,
ebs_volume_type=None,
ebs_volume_size=None):
self.node_configuration['ebs_volume_count'] = ebs_volume_count
self.node_configuration['ebs_volume_type'] = ebs_volume_type
self.node_configuration['ebs_volume_size'] = ebs_volume_size
def set_fairscheduler_settings(self, fairscheduler_config_xml=None,
default_pool=None):
self.hadoop_settings['fairscheduler_settings'] = {
'fairscheduler_config_xml': fairscheduler_config_xml,
'default_pool': default_pool}
def set_security_settings(self,
encrypted_ephemerals=None,
ssh_public_key=None,
persistent_security_group=None):
self.security_settings['encrypted_ephemerals'] = encrypted_ephemerals
self.security_settings['ssh_public_key'] = ssh_public_key
self.security_settings['persistent_security_group'] = persistent_security_group
def set_presto_settings(self, enable_presto=None, presto_custom_config=None):
self.presto_settings['enable_presto'] = enable_presto
self.presto_settings['custom_config'] = presto_custom_config
def set_env_settings(self, env_name=None, python_version=None, r_version=None):
self.node_configuration['env_settings'] = {}
self.node_configuration['env_settings']['name'] = env_name
self.node_configuration['env_settings']['python_version'] = python_version
self.node_configuration['env_settings']['r_version'] = r_version
def minimal_payload(self):
"""
This method can be used to create the payload which is sent while
creating or updating a cluster.
"""
payload_dict = self.__dict__
payload_dict.pop("api_version", None)
return util._make_minimal(payload_dict)
| apache-2.0 | 4,479,859,046,252,228,600 | 45.837139 | 159 | 0.539906 | false |
segfaulthunter/asynchia | benchmark/ee_parse.py | 1 | 2223 | # -*- coding: us-ascii -*-
# asynchia - asynchronous networking library
# Copyright (C) 2009 Florian Mayer <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
import operator
import itertools
import asynchia.ee
import asynchia.maps
import asynchia.util
import benchutil
class SendAllTransport(asynchia.SendallTrait, asynchia.SocketTransport):
pass
def mock_handler(mp, inbuf):
a, b = asynchia.util.socketpair()
sender = SendAllTransport(mp, a)
sender.sendall(inbuf)
recvr = asynchia.SocketTransport(mp, b)
return recvr
class ParseEE(benchutil.AsyncBenchmark):
def __init__(self, mp, size):
self.trnsp = mock_handler(mp, os.urandom(size))
sub = itertools.repeat(range(250, 20000))
chunks = []
x = size
while x > 0:
chunks.append(min(x, sub.next()))
x -= chunks[-1]
self.ptcl = reduce(
operator.add,
map(asynchia.ee.DelimitedStringCollector, chunks)
)
self.ptcl.onclose = lambda _: self.submit_async(time.time())
def run(self):
hndl = asynchia.ee.Handler(self.trnsp, self.ptcl)
def done(_):
raise asynchia.SocketMapClosedError
if __name__ == '__main__':
if len(sys.argv) >= 3:
sample = int(sys.argv[1])
len_ = int(sys.argv[2])
else:
sample = 50
len_ = 5000000
mp = asynchia.maps.DefaultSocketMap()
run = benchutil.Runner([ParseEE(mp, len_) for _ in xrange(sample)], done)
run.start()
mp.run()
print run.result
| gpl-3.0 | -3,150,133,316,793,390,600 | 26.7875 | 77 | 0.662618 | false |
mrosenstihl/PulsePrograms | autoPFGStimulatedEcho_const_T1/analyse.py | 1 | 2319 | import re, os,sys
import numpy as N
from tables import *
import scipy.odr
import pylab as P
conv = 6.36e-5
gamma = 2.67522e8
start = 50
stop = 250
def diffusion(p,x):
sig = N.exp(-p[0]*x)+p[1]
return sig
hdf = openFile(sys.argv[1])
temperature_runs = [run for run in hdf.root.data_pool if run._v_name.startswith('dir_')]
results = {}
for temperature in temperature_runs:
tbvt = temperature._v_name
print tbvt
for run in hdf.walkNodes(temperature):
print run._v_name
if run._v_name.startswith('dict_grad'):
dwell = run.indices.col('dwelltime')
delta = float(run._v_attrs.description_delta)
tau = float(run._v_attrs.description_tau)
tm = float(run._v_attrs.description_tm)
dac = float(run._v_attrs.description_dac)
# b Faktor
bfac = (gamma*dac*delta*conv)**2*( (tau+tm) / 4 + delta/12. + 5.*delta/16./N.pi**2)
real,imag = run.accu_data[:,0],run.accu_data[:,2]
mag = N.sqrt(real**2 + imag**2)
# Signalamplitude
sig = mag[start:stop].mean()-mag[-1024:].mean()
sig_err= mag[-1024:].std()/N.sqrt((stop-start))
try:
results[tbvt].append([bfac,sig,sig_err,delta,tau,tm,dac])
except:
results[tbvt] = []
results[tbvt].append([bfac,sig,sig_err,delta,tau,tm,dac])
results[tbvt] = N.array(results[tbvt])
x = results[tbvt][:,0]
y = results[tbvt][:,1]
y_err = results[tbvt][:,2]
delta = results[tbvt][:,3]
tau = results[tbvt][:,4]
tm = results[tbvt][:,5]
dac = results[tbvt][:,6]
# Create sets
deltas = set(delta)
taus = set(tau)
tms = set(tm)
P.subplot(211)
# Select the single measurements sets (same tau,tm,delta) and normalize them to g==0
mask = [y>5e-2]
for de in deltas:
for ta in taus:
for t in tms:
ind_de = delta==de
ind_ta = tau==ta
ind_t = tm==t
ind_dac0 = dac==0
# This is a set
ind = ind_de*ind_ta*ind_t
ind_norm = ind_de*ind_ta*ind_t*ind_dac0
y_err[ind] /= y[ind_norm]
y[ind] /= y[ind_norm]
x_err = x*0.05
#assume 5% error from calibration
data = scipy.odr.Data(x=x[mask],y=y[mask],wd=y_err[mask])
model = scipy.odr.Model(diffusion)
odr = scipy.odr.ODR(data,model,beta0=[2e-13,0.1], ifixx=(1,))
odr.run()
print "ODR Result"
odr.output.pprint()
print "Polyfit"
print N.polyfit(x[y>5e-2],N.log(y[y>5e-2]),1)
P.semilogy(x[mask],y[mask],'o')
P.show()
| bsd-2-clause | -8,492,228,953,505,733,000 | 24.483516 | 88 | 0.626994 | false |
emrecimen/CIOL-ICF | ICF_Training_Test.py | 1 | 8182 |
# This file is the implementation of the algorithm 1 and includes the main function
# One can use this file if the training and test files are separated.
# Inputs can be tuned in the corresponding area
# The accuracy rates and all function parameters are printed to the console as output
# Dataset can be in csv or arff file.
# If there are n classes; class labels should be integers 1 to n in the last column
import numpy as np
from gurobipy import *
import math
import csv
import arff
import time
import ICF_Purity
# Calculating separation function.Step 2 of Algorithm 1.
def PCFl2( Ajr, Bjr, cjr, purity):
distb =np.sqrt(np.power(Bjr-cjr,2).sum(axis=1))
dista =np.sqrt(np.power(Ajr-cjr,2).sum(axis=1))
gamma=(np.max(dista)+np.min(distb))/2.0
return { 'gamma': gamma, 'c':cjr, 'purity':purity}
# Solving P_r. LP model in Step 2 of Algorithm 1.
def PCF(Ajr, Bjr, cjr,status, purity):
# Create optimization model
m = Model('PCF')
# Create variables
gamma = m.addVar(vtype=GRB.CONTINUOUS, lb=1, name='gamma')
w = range(nn)
for a in range(nn):
w[a] = m.addVar(vtype=GRB.CONTINUOUS, name='w[%s]' % a)
ksi = m.addVar(vtype=GRB.CONTINUOUS, lb=0, name='ksi')
m.update()
hataA = {}
hataB = {}
for i in range(len(Ajr)):
hataA[i] = m.addVar(vtype=GRB.CONTINUOUS, lb=0, name='hataA[%s]' % i)
m.update()
m.addConstr(quicksum((Ajr[i][j] - cjr[j]) * w[j] for j in range(len(cjr))) + (ksi * quicksum(math.fabs(Ajr[i][j] - cjr[j]) for j in range(len(cjr)))) - gamma + 1.0 <= hataA[i])
for z in range(len(Bjr)):
hataB[z] = m.addVar(vtype=GRB.CONTINUOUS, lb=0, name='hataB[%s]' % z)
m.update()
m.addConstr(quicksum((Bjr[z][r] - cjr[r]) * -w[r] for r in range(len(cjr))) - (ksi * quicksum(math.fabs(Bjr[z][q] - cjr[q]) for q in range(len(cjr)))) + gamma + 1.0 <= hataB[z])
m.update()
m.setObjective((quicksum(hataA[k] for k in range(len(hataA))) / len(hataA))+(quicksum(hataB[l] for l in range(len(hataB))) / len(hataB)), GRB.MINIMIZE)
m.update()
# Compute optimal solution
m.optimize()
m.write('model.sol')
status.append(m.Status)
ww=[]
for i in range(len(cjr)):
ww.append(w[i].X)
return {'s':status,'w': ww, 'gamma': gamma.x, 'ksi': ksi.x, 'c':cjr, 'purity':purity}
def findgj(Aj, centroids, B,status, Rs, Rbs, purity):
gj=[]
r=0
for Ajr in Aj:
if purity[r]<tolpr:
newAjr, newB= ICF_Purity.eliminateWithR(Ajr, B, centroids[r], Rs[r], Rbs[r])
sonuc = PCF(newAjr, newB, centroids[r],status, purity[r])
status=sonuc['s']
gj.append(sonuc)
else:
newAjr, newB= ICF_Purity.eliminateWithR(Ajr, B, centroids[r], Rs[r], Rbs[r])
sonuc = PCFl2(newAjr, newB, centroids[r], purity[r])
gj.append(sonuc)
r=r+1
return status,gj
def pcfDeger(w, ksi, gamma, c, x):
deger = np.dot(w,x-c) + ksi*np.sum(abs(x-c)) -gamma
return deger
def pcfl2Deger(gamma,c, x):
deger =np.sqrt(np.sum(np.square(x-c))) - gamma
return deger
def sinifBul(data):
sinifTahmini=[]
g_deger=[]
for d in data:
t=1
enkDeger=float('inf')
gj_deger=[]
for gj in g:
gjr_deger=[]
for gjr in gj:
if gjr['purity']>tolpr:
fonkDeger= pcfl2Deger(gjr['gamma'],gjr['c'],d[0:-1])
else:
fonkDeger = pcfDeger(gjr['w'],gjr['ksi'],gjr['gamma'],gjr['c'],d[0:-1])
gjr_deger.append(fonkDeger)
if (fonkDeger<enkDeger):
enkDeger=fonkDeger
sinifT=t
t=t+1
gj_deger.append(gjr_deger)
g_deger.append(gj_deger)
sinifTahmini.append(sinifT)
return sinifTahmini
def egitimOraniniHesapla(gj, sinifEtiket, dataTrain):
dogruSayisiA=0.0
dogruSayisiB=0.0
say=0.0
for d in dataTrain:
enkDeger=float('inf')
for gjr in gj:
fonkDeger = pcfDeger(gjr['w'],gjr['ksi'],gjr['gamma'],gjr['c'],d[0:-1])
if (fonkDeger<enkDeger):
enkDeger=fonkDeger
if (enkDeger<0):
if d[-1]==sinifEtiket:
dogruSayisiA=dogruSayisiA+1
else:
say+=1
else:
if d[-1]!=sinifEtiket:
dogruSayisiB=dogruSayisiB+1
say+=1
egitimOrani=(float(dogruSayisiA)+float(dogruSayisiB))/len(dataTrain)
return egitimOrani
# Read arff file
def arffOku(dosya):
d = arff.load(open(dosya, 'rb'))
v=[]
for dd in d['data']:
satir=[]
for ddd in dd:
satir.append(float(ddd))
v.append(satir)
v=np.array(v)
return v
# Read csv file
def readData(dosya):
dosya = open(dosya)
okuyucu = csv.reader(dosya, quotechar=',')
data = []
for row in okuyucu:
satirVeri = []
for deger in row:
satirVeri.append(float(deger))
data.append(satirVeri)
data=np.array(data)
return data
###################### MAIN FUNCTION STARTS HERE ###############################
start_time = time.time()
###################### INPUTS ###############################
dataTrain = readData('/Users/exampleTrain.csv') #Dataset paths should be given here.
dataTest = readData('/Users/exampleTest.csv')
#dataTrain = arffOku('exampleTrain.arff')
tolpr=0.90 #epsilon 1 in algorithm 1
#epsilon 1 in algorithm 1 This parameter is a threshold to decide whether an LP is necessary, or not, in the ICF algorithm.
# High values for this parameter increases the chance of calling the LP for PCF construction, while its low values favor for algebraic cone construction
# (corresponding to a faster, but possibly lower resolution result).
######################
# mm=len(data) # row size
nn=len(dataTrain[0])-1 # feature size
sinifSayisi = int(np.max(dataTrain[:,-1])) # classes must be 1 to n in the last column ...................................
status = []
g=[]
for sinif in range(1,sinifSayisi+1):
Aj = []
Bj = []
for d in dataTrain:
if d[-1] == sinif:
Aj.append(d[ 0:-1])
else:
Bj.append(d[ 0:-1])
Aj=np.array(Aj)
Bj=np.array(Bj)
centroids, clusters, resR, resRB, purities = ICF_Purity.getPureClusters(Aj, Bj) # Call algorithm 2 here
status,gj=findgj(clusters, centroids, Bj,status,resR, resRB, purities ) # Calling Algorithm 1, Step 1-2
g.append(gj)
#--------------------------------TESTING---------------------------------------------------- '''
sinifTahminiTrain=sinifBul(dataTrain)
gercekSinifTrain=dataTrain[:,-1]
sinifTahminiTest=sinifBul(dataTest)
gercekSinifTest=dataTest[:,-1]
#Calculating training accuracy
EgitimDogrulukOrani= round(100.0*(np.sum((sinifTahminiTrain==gercekSinifTrain)))/len(dataTrain),2)
#Calculating test accuracy
TestDogrulukOrani= round(100.0*(np.sum((sinifTahminiTest==gercekSinifTest) ))/len(dataTest),2)
print "########################################################"
j=1
for gj in g:
r=1
print "For class ", j,"the classifiers are:"
for gjr in gj:
if gjr['purity'] < tolpr:
print j ,".class ", r ,".cluster classification function that separates A from B: gjr = w.(x-c) + ksi*|w.(x-c)|-gamma "
print "w =", gjr['w']
print "ksi =", gjr['ksi']
print "gamma =", gjr['gamma']
print "center =", gjr['c']
else:
print j,".class ", r, ".cluster classification function that separates A from B: gjr = |x-c|_2 - gamma "
print "gamma =", gjr['gamma']
print "center =", gjr['c']
print "-----------------------------------------------------------"
r=r+1
j=j+1
print "##################################################################"
print "Training Accuracy : %", EgitimDogrulukOrani
print "Test Accuracy : % ", TestDogrulukOrani
print "##################################################################"
print("--- %s seconds elapsed ---" % (time.time() - start_time))
| mit | -6,865,739,474,599,376,000 | 26.641892 | 185 | 0.558788 | false |
jesseklein406/django-imager | imagersite/imager_images/migrations/0004_auto_20150728_1555.py | 1 | 1063 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('imager_images', '0003_auto_20150726_1224'),
]
operations = [
migrations.AlterField(
model_name='album',
name='date_published',
field=models.DateField(null=True, blank=True),
),
migrations.AlterField(
model_name='album',
name='description',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='photo',
name='date_published',
field=models.DateField(null=True, blank=True),
),
migrations.AlterField(
model_name='photo',
name='description',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='photo',
name='title',
field=models.CharField(max_length=256, blank=True),
),
]
| mit | -1,288,981,641,645,240,300 | 26.25641 | 63 | 0.543744 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.