code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Mitee1(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.payload.pid
:field mitee_hdr_pyld_len: ax25_frame.payload.ax25_info.mitee_hdr.pyld_len
:field mitee_hdr_pkt_no: ax25_frame.payload.ax25_info.mitee_hdr.pkt_no
:field mitee_hdr_pkt_cnt: ax25_frame.payload.ax25_info.mitee_hdr.pkt_cnt
:field mitee_hdr_grp_no: ax25_frame.payload.ax25_info.mitee_hdr.grp_no
:field mitee_hdr_grp_size: ax25_frame.payload.ax25_info.mitee_hdr.grp_size
:field mitee_hdr_status: ax25_frame.payload.ax25_info.mitee_hdr.status
:field mitee_hdr_chksm: ax25_frame.payload.ax25_info.mitee_hdr.hdr_chksm
:field mitee_stats_bcn_tx: ax25_frame.payload.ax25_info.comms.mitee_stats.bcn_tx
:field mitee_stats_pkts_tx: ax25_frame.payload.ax25_info.comms.mitee_stats.pkts_tx
:field mitee_stats_pkts_rx: ax25_frame.payload.ax25_info.comms.mitee_stats.pkts_rx
:field mitee_stats_bytes_tx: ax25_frame.payload.ax25_info.comms.mitee_stats.bytes_tx
:field mitee_stats_bytes_rx: ax25_frame.payload.ax25_info.comms.mitee_stats.bytes_rx
:field mitee_stats_sync_errs: ax25_frame.payload.ax25_info.comms.mitee_stats.sync_errs
:field mitee_stats_hdr_chksm_errs: ax25_frame.payload.ax25_info.comms.mitee_stats.hdr_chksm_errs
:field mitee_stats_pyld_chksm_errs: ax25_frame.payload.ax25_info.comms.mitee_stats.pyld_chksm_errs
:field mitee_stats_pyld_avail_errs: ax25_frame.payload.ax25_info.comms.mitee_stats.pyld_avail_errs
:field mitee_stats_exec_cmds: ax25_frame.payload.ax25_info.comms.mitee_stats.exec_cmds
:field radio_stats_pkts_tx: ax25_frame.payload.ax25_info.comms.radio_stats.pkts_tx
:field radio_stats_pkts_rx: ax25_frame.payload.ax25_info.comms.radio_stats.pkts_rx
:field radio_stats_bytes_tx: ax25_frame.payload.ax25_info.comms.radio_stats.bytes_tx
:field radio_stats_bytes_rx: ax25_frame.payload.ax25_info.comms.radio_stats.bytes_rx
:field radio_stats_hdr_chksm_errs: ax25_frame.payload.ax25_info.comms.radio_stats.hdr_chksm_errs
:field radio_stats_pyld_chksm_errs: ax25_frame.payload.ax25_info.comms.radio_stats.pyld_chksm_errs
:field radio_stats_pyld_len_errs: ax25_frame.payload.ax25_info.comms.radio_stats.pyld_len_errs
:field radio_stats_uart_errs: ax25_frame.payload.ax25_info.comms.radio_stats.uart_errs
:field radio_stats_fail_timeouts: ax25_frame.payload.ax25_info.comms.radio_stats.fail_timeouts
:field sdep_stats_cmds_tx: ax25_frame.payload.ax25_info.comms.sdep_stats.cmds_tx
:field sdep_stats_resps_rx: ax25_frame.payload.ax25_info.comms.sdep_stats.resps_rx
:field sdep_stats_trx_tx: ax25_frame.payload.ax25_info.comms.sdep_stats.trx_tx
:field sdep_stats_sdep_stats_bytes_tx: ax25_frame.payload.ax25_info.comms.sdep_stats.bytes_tx
:field sdep_stats_sdep_stats_bytes_rx: ax25_frame.payload.ax25_info.comms.sdep_stats.bytes_rx
:field sdep_stats_sdep_stats_fail_timeouts: ax25_frame.payload.ax25_info.comms.sdep_stats.fail_timeouts
:field rst_stats_sat: ax25_frame.payload.ax25_info.comms.rst_stats.sat
:field rst_stats_comms: ax25_frame.payload.ax25_info.comms.rst_stats.comms
:field rst_stats_main: ax25_frame.payload.ax25_info.comms.rst_stats.main
:field radio_cfg_interface_baud_rate: ax25_frame.payload.ax25_info.comms.radio_cfg.interface_baud_rate
:field radio_cfg_tx_power_amp_level: ax25_frame.payload.ax25_info.comms.radio_cfg.tx_power_amp_level
:field radio_cfg_rx_rf_baud_rate: ax25_frame.payload.ax25_info.comms.radio_cfg.rx_rf_baud_rate
:field radio_cfg_tx_rf_baud_rate: ax25_frame.payload.ax25_info.comms.radio_cfg.tx_rf_baud_rate
:field radio_cfg_rx_modulation: ax25_frame.payload.ax25_info.comms.radio_cfg.rx_modulation
:field radio_cfg_tx_modulation: ax25_frame.payload.ax25_info.comms.radio_cfg.tx_modulation
:field radio_cfg_rx_freq: ax25_frame.payload.ax25_info.comms.radio_cfg.rx_freq
:field radio_cfg_tx_freq: ax25_frame.payload.ax25_info.comms.radio_cfg.tx_freq
:field radio_cfg_src: ax25_frame.payload.ax25_info.comms.radio_cfg.src
:field radio_cfg_dst: ax25_frame.payload.ax25_info.comms.radio_cfg.dst
:field radio_cfg_tx_preamble: ax25_frame.payload.ax25_info.comms.radio_cfg.tx_preamble
:field radio_cfg_tx_postamble: ax25_frame.payload.ax25_info.comms.radio_cfg.tx_postamble
:field radio_cfg_function_cfg1: ax25_frame.payload.ax25_info.comms.radio_cfg.function_cfg1
:field radio_cfg_function_cfg2: ax25_frame.payload.ax25_info.comms.radio_cfg.function_cfg2
:field radio_bcn_interval: ax25_frame.payload.ax25_info.comms.radio_bcn_interval
:field radio_tlm_data_op_counter: ax25_frame.payload.ax25_info.comms.radio_tlm_data.op_counter
:field radio_tlm_data_msp430_temp: ax25_frame.payload.ax25_info.comms.radio_tlm_data.msp430_temp
:field radio_tlm_data_timecount1: ax25_frame.payload.ax25_info.comms.radio_tlm_data.timecount1
:field radio_tlm_data_timecount2: ax25_frame.payload.ax25_info.comms.radio_tlm_data.timecount2
:field radio_tlm_data_timecount3: ax25_frame.payload.ax25_info.comms.radio_tlm_data.timecount3
:field radio_tlm_data_rssi: ax25_frame.payload.ax25_info.comms.radio_tlm_data.rssi
:field radio_tlm_data_bytes_rx: ax25_frame.payload.ax25_info.comms.radio_tlm_data.bytes_rx
:field radio_tlm_data_bytes_tx: ax25_frame.payload.ax25_info.comms.radio_tlm_data.bytes_tx
:field radio_fw_rev_num: ax25_frame.payload.ax25_info.comms.radio_fw_rev_num
:field mission_cnt: ax25_frame.payload.ax25_info.comms.mission_cnt
:field ps_temp: ax25_frame.payload.ax25_info.comms.ps_temp
:field second: ax25_frame.payload.ax25_info.cdh.datetime.second
:field minute: ax25_frame.payload.ax25_info.cdh.datetime.minute
:field hour: ax25_frame.payload.ax25_info.cdh.datetime.hour
:field day_of_week: ax25_frame.payload.ax25_info.cdh.datetime.day_of_week
:field day_of_month: ax25_frame.payload.ax25_info.cdh.datetime.day_of_month
:field month: ax25_frame.payload.ax25_info.cdh.datetime.month
:field year: ax25_frame.payload.ax25_info.cdh.datetime.year
:field boot_count: ax25_frame.payload.ax25_info.cdh.boot_count
:field error_count: ax25_frame.payload.ax25_info.cdh.error_count
:field error_last: ax25_frame.payload.ax25_info.cdh.error_last
:field ttc_state: ax25_frame.payload.ax25_info.cdh.ttc_state
:field ttc_remaining: ax25_frame.payload.ax25_info.cdh.ttc_remaining
:field ttc_queue_0: ax25_frame.payload.ax25_info.cdh.ttc_queue_0
:field ttc_queue_1: ax25_frame.payload.ax25_info.cdh.ttc_queue_1
:field ttc_queue_2: ax25_frame.payload.ax25_info.cdh.ttc_queue_2
:field ttc_queue_3: ax25_frame.payload.ax25_info.cdh.ttc_queue_3
:field ttc_queue_4: ax25_frame.payload.ax25_info.cdh.ttc_queue_4
:field ttc_queue_5: ax25_frame.payload.ax25_info.cdh.ttc_queue_5
:field ttc_queue_6: ax25_frame.payload.ax25_info.cdh.ttc_queue_6
:field ttc_queue_7: ax25_frame.payload.ax25_info.cdh.ttc_queue_7
:field bat_tmp: ax25_frame.payload.ax25_info.eps.bat_tmp
:field reg_temp_5v: ax25_frame.payload.ax25_info.eps.reg_temp_5v
:field volt_5v: ax25_frame.payload.ax25_info.eps.volt_5v
:field volt_digital: ax25_frame.payload.ax25_info.eps.volt_digital
:field volt_analog: ax25_frame.payload.ax25_info.eps.volt_analog
:field batt_charge: ax25_frame.payload.ax25_info.eps.batt_charge
:field batt_load: ax25_frame.payload.ax25_info.eps.batt_load
:field curr_5v: ax25_frame.payload.ax25_info.eps.curr_5v
:field curr_digital: ax25_frame.payload.ax25_info.eps.curr_digital
:field curr_analog: ax25_frame.payload.ax25_info.eps.curr_analog
:field volt_solar: ax25_frame.payload.ax25_info.eps.volt_solar
:field volt_batt: ax25_frame.payload.ax25_info.eps.volt_batt
:field batt_heater: ax25_frame.payload.ax25_info.eps.batt_heater
:field bdot_on: ax25_frame.payload.ax25_info.adcs.bdot_on
:field magtorq_x: ax25_frame.payload.ax25_info.adcs.magtorq.force_x
:field magtorq_y: ax25_frame.payload.ax25_info.adcs.magtorq.force_y
:field magtorq_z: ax25_frame.payload.ax25_info.adcs.magtorq.force_z
:field gyro_x: ax25_frame.payload.ax25_info.adcs.imu.gyro_x
:field gyro_y: ax25_frame.payload.ax25_info.adcs.imu.gyro_y
:field gyro_z: ax25_frame.payload.ax25_info.adcs.imu.gyro_z
:field accel_x: ax25_frame.payload.ax25_info.adcs.imu.accel_x
:field accel_y: ax25_frame.payload.ax25_info.adcs.imu.accel_y
:field accel_z: ax25_frame.payload.ax25_info.adcs.imu.accel_z
:field imu_temp: ax25_frame.payload.ax25_info.adcs.imu.temp
:field pd_top: ax25_frame.payload.ax25_info.adcs.photodiode.pd_top
:field pd_left: ax25_frame.payload.ax25_info.adcs.photodiode.pd_left
:field pd_bottom: ax25_frame.payload.ax25_info.adcs.photodiode.pd_bottom
:field pd_right: ax25_frame.payload.ax25_info.adcs.photodiode.pd_right
:field magtom_0_x: ax25_frame.payload.ax25_info.adcs.magtom_0.x
:field magtom_0_y: ax25_frame.payload.ax25_info.adcs.magtom_0.y
:field magtom_0_z: ax25_frame.payload.ax25_info.adcs.magtom_0.z
:field magtom_1_x: ax25_frame.payload.ax25_info.adcs.magtom_1.x
:field magtom_1_y: ax25_frame.payload.ax25_info.adcs.magtom_1.y
:field magtom_1_z: ax25_frame.payload.ax25_info.adcs.magtom_1.z
:field magtom_2_x: ax25_frame.payload.ax25_info.adcs.magtom_2.x
:field magtom_2_y: ax25_frame.payload.ax25_info.adcs.magtom_2.y
:field magtom_2_z: ax25_frame.payload.ax25_info.adcs.magtom_2.z
:field pyld_chksm: ax25_frame.payload.ax25_info.pyld_chksm
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Mitee1.Ax25Frame(self._io, self, self._root)
class MiteeHdr(KaitaiStruct):
"""Currently only considers beacon packets."""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.sync = self._io.read_bytes(2)
if not self.sync == b"\x4D\x69":
raise kaitaistruct.ValidationNotEqualError(b"\x4D\x69", self.sync, self._io, u"/types/mitee_hdr/seq/0")
self.cmd_id = self._io.read_bytes(2)
if not self.cmd_id == b"\xA0\x00":
raise kaitaistruct.ValidationNotEqualError(b"\xA0\x00", self.cmd_id, self._io, u"/types/mitee_hdr/seq/1")
self.pyld_len = self._io.read_u1()
self.pkt_no = self._io.read_u1()
self.pkt_cnt = self._io.read_u1()
self.grp_no = self._io.read_u1()
self.grp_size = self._io.read_u1()
self.status = self._io.read_u1()
self.hdr_chksm = self._io.read_u2le()
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Mitee1.Ax25Header(self._io, self, self._root)
_on = (self.ax25_header.ctl & 19)
if _on == 0:
self.payload = Mitee1.IFrame(self._io, self, self._root)
elif _on == 3:
self.payload = Mitee1.UiFrame(self._io, self, self._root)
elif _on == 19:
self.payload = Mitee1.UiFrame(self._io, self, self._root)
elif _on == 16:
self.payload = Mitee1.IFrame(self._io, self, self._root)
elif _on == 18:
self.payload = Mitee1.IFrame(self._io, self, self._root)
elif _on == 2:
self.payload = Mitee1.IFrame(self._io, self, self._root)
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Mitee1.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Mitee1.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Mitee1.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Mitee1.SsidMask(self._io, self, self._root)
self.ctl = self._io.read_u1()
class AdcsMagtorq(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.force_x = self._io.read_s1()
self.force_y = self._io.read_s1()
self.force_z = self._io.read_s1()
class UiFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self._raw_ax25_info = self._io.read_bytes_full()
_io__raw_ax25_info = KaitaiStream(BytesIO(self._raw_ax25_info))
self.ax25_info = Mitee1.Ax25Info(_io__raw_ax25_info, self, self._root)
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"ASCII")
class Adcs(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.bdot_on = self._io.read_u1()
self.magtorq = Mitee1.AdcsMagtorq(self._io, self, self._root)
self.imu = Mitee1.AdcsImu(self._io, self, self._root)
self.photodiode = Mitee1.AdcsPd(self._io, self, self._root)
self.magtom_0 = Mitee1.AdcsMagtom(self._io, self, self._root)
self.magtom_1 = Mitee1.AdcsMagtom(self._io, self, self._root)
self.magtom_2 = Mitee1.AdcsMagtom(self._io, self, self._root)
class Comms(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.mitee_stats = Mitee1.CommsMiteeStats(self._io, self, self._root)
self.radio_stats = Mitee1.CommsRadioStats(self._io, self, self._root)
self.sdep_stats = Mitee1.CommsSdepStats(self._io, self, self._root)
self.rst_stats = Mitee1.CommsRstStats(self._io, self, self._root)
self.radio_cfg = Mitee1.CommsRadioCfg(self._io, self, self._root)
self.radio_bcn_interval = self._io.read_u1()
self.radio_tlm_data = Mitee1.CommsRadioTlmData(self._io, self, self._root)
self.radio_fw_rev_num = self._io.read_u4le()
self.mission_cnt = self._io.read_u4le()
self.ps_temp = self._io.read_u2le()
class Cdh(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.datetime = Mitee1.CdhDatetime(self._io, self, self._root)
self.boot_count = self._io.read_u2le()
self.error_count = self._io.read_u4le()
self.error_last = self._io.read_u1()
self.ttc_state = self._io.read_u1()
self.ttc_remaining = self._io.read_u1()
self.ttc_queue_0 = self._io.read_u1()
self.ttc_queue_1 = self._io.read_u1()
self.ttc_queue_2 = self._io.read_u1()
self.ttc_queue_3 = self._io.read_u1()
self.ttc_queue_4 = self._io.read_u1()
self.ttc_queue_5 = self._io.read_u1()
self.ttc_queue_6 = self._io.read_u1()
self.ttc_queue_7 = self._io.read_u1()
self.padding = self._io.read_bytes(1)
class CommsRadioTlmData(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.op_counter = self._io.read_u2le()
self.msp430_temp = self._io.read_s2le()
self.timecount1 = self._io.read_u1()
self.timecount2 = self._io.read_u1()
self.timecount3 = self._io.read_u1()
self.rssi = self._io.read_u1()
self.bytes_rx = self._io.read_u4le()
self.bytes_tx = self._io.read_u4le()
class CdhDatetime(KaitaiStruct):
"""Current date and time (0000-01-01 00:00:00 = mission start)."""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.second = self._io.read_u1()
self.minute = self._io.read_u1()
self.hour = self._io.read_u1()
self.day_of_week = self._io.read_u1()
self.day_of_month = self._io.read_u1()
self.month = self._io.read_u1()
self.year = self._io.read_u2le()
class IFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self.ax25_info = self._io.read_bytes_full()
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class CommsSdepStats(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.cmds_tx = self._io.read_u4le()
self.resps_rx = self._io.read_u4le()
self.trx_tx = self._io.read_u4le()
self.bytes_tx = self._io.read_u4le()
self.bytes_rx = self._io.read_u4le()
self.fail_timeouts = self._io.read_u2le()
class CommsMiteeStats(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.bcn_tx = self._io.read_u4le()
self.pkts_tx = self._io.read_u4le()
self.pkts_rx = self._io.read_u4le()
self.bytes_tx = self._io.read_u4le()
self.bytes_rx = self._io.read_u4le()
self.sync_errs = self._io.read_u2le()
self.hdr_chksm_errs = self._io.read_u2le()
self.pyld_chksm_errs = self._io.read_u2le()
self.pyld_avail_errs = self._io.read_u2le()
self.exec_cmds = self._io.read_u2le()
class CommsRadioCfg(KaitaiStruct):
"""Lithium2 radio config."""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.interface_baud_rate = self._io.read_u1()
self.tx_power_amp_level = self._io.read_u1()
self.rx_rf_baud_rate = self._io.read_u1()
self.tx_rf_baud_rate = self._io.read_u1()
self.rx_modulation = self._io.read_u1()
self.tx_modulation = self._io.read_u1()
self.rx_freq = self._io.read_u4le()
self.tx_freq = self._io.read_u4le()
self.src = (self._io.read_bytes(6)).decode(u"ASCII")
self.dst = (self._io.read_bytes(6)).decode(u"ASCII")
self.tx_preamble = self._io.read_u2le()
self.tx_postamble = self._io.read_u2le()
self.function_cfg1 = self._io.read_u2le()
self.function_cfg2 = self._io.read_u2le()
class AdcsImu(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.gyro_x = self._io.read_s2le()
self.gyro_y = self._io.read_s2le()
self.gyro_z = self._io.read_s2le()
self.accel_x = self._io.read_s2le()
self.accel_y = self._io.read_s2le()
self.accel_z = self._io.read_s2le()
self.temp = self._io.read_s2le()
class Eps(KaitaiStruct):
"""EPS readings. 5V = 2048; 1V/A for current readings unless otherwise specified."""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.bat_tmp = self._io.read_s2le()
self.reg_temp_5v = self._io.read_s2le()
self.volt_5v = self._io.read_s2le()
self.volt_digital = self._io.read_s2le()
self.volt_analog = self._io.read_s2le()
self.batt_charge = self._io.read_s2le()
self.batt_load = self._io.read_s2le()
self.curr_5v = self._io.read_s2le()
self.curr_digital = self._io.read_s2le()
self.curr_analog = self._io.read_s2le()
self.volt_solar = self._io.read_s2le()
self.volt_batt = self._io.read_s2le()
self.batt_heater = self._io.read_u2le()
class CommsRadioStats(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pkts_tx = self._io.read_u4le()
self.pkts_rx = self._io.read_u4le()
self.bytes_tx = self._io.read_u4le()
self.bytes_rx = self._io.read_u4le()
self.hdr_chksm_errs = self._io.read_u2le()
self.pyld_chksm_errs = self._io.read_u2le()
self.pyld_len_errs = self._io.read_u2le()
self.uart_errs = self._io.read_u2le()
self.fail_timeouts = self._io.read_u2le()
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Mitee1.Callsign(_io__raw_callsign_ror, self, self._root)
class AdcsMagtom(KaitaiStruct):
"""Magentometer axes (1 = 0.92 mGa)."""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.x = self._io.read_s2le()
self.y = self._io.read_s2le()
self.z = self._io.read_s2le()
class CommsRstStats(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.sat = self._io.read_u2le()
self.comms = self._io.read_u2le()
self.main = self._io.read_u2le()
class AdcsPd(KaitaiStruct):
"""Under-antenna photodiode raw values."""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pd_top = self._io.read_u2le()
self.pd_left = self._io.read_u2le()
self.pd_bottom = self._io.read_u2le()
self.pd_right = self._io.read_u2le()
class Ax25Info(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.mitee_hdr = Mitee1.MiteeHdr(self._io, self, self._root)
self.comms = Mitee1.Comms(self._io, self, self._root)
self.cdh = Mitee1.Cdh(self._io, self, self._root)
self.eps = Mitee1.Eps(self._io, self, self._root)
self.adcs = Mitee1.Adcs(self._io, self, self._root)
self.pyld_chksm = self._io.read_u2le()
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/mitee1.py
| 0.461259 | 0.195268 |
mitee1.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Strand(KaitaiStruct):
""":field seq_no: seq_no
:field length: length
:field packet_type: packet_type
:field channel: body.channel
:field time_since_last_obc_i2c_message: body.data.time_since_last_obc_i2c_message
:field packets_up_count: body.data.packets_up_count
:field packets_down_count: body.data.packets_down_count
:field packets_up_dropped_count: body.data.packets_up_dropped_count
:field packets_down_dropped_count: body.data.packets_down_dropped_count
:field i2c_node_address: body.i2c_node_address
:field i2c_node_address: body.node.i2c_node_address
:field battery_0_current_direction: body.node.node.battery_0_current_direction
:field battery_0_current_ma: body.node.node.battery_0_current_ma
:field battery_0_voltage_v: body.node.node.battery_0_voltage_v
:field battery_0_temperature_deg_c: body.node.node.battery_0_temperature_deg_c
:field battery_1_current_direction: body.node.node.battery_1_current_direction
:field battery_1_current_ma: body.node.node.battery_1_current_ma
:field battery_1_voltage_v: body.node.node.battery_1_voltage_v
:field battery_1_temperature_deg_c: body.node.node.battery_1_temperature_deg_c
:field adc1_py_array_current: body.node.node.adc1_py_array_current
:field adc2_py_array_temperature: body.node.node.adc2_py_array_temperature
:field adc3_array_pair_y_voltage: body.node.node.adc3_array_pair_y_voltage
:field adc4_my_array_current: body.node.node.adc4_my_array_current
:field adc5_my_array_temperature: body.node.node.adc5_my_array_temperature
:field adc6_array_pair_x_voltage: body.node.node.adc6_array_pair_x_voltage
:field adc7_mx_array_current: body.node.node.adc7_mx_array_current
:field adc8_mx_array_temperature: body.node.node.adc8_mx_array_temperature
:field adc9_array_pair_z_voltage: body.node.node.adc9_array_pair_z_voltage
:field adc10_pz_array_current: body.node.node.adc10_pz_array_current
:field adc11_pz_array_temperature: body.node.node.adc11_pz_array_temperature
:field adc13_px_array_current: body.node.node.adc13_px_array_current
:field adc14_px_array_temperature: body.node.node.adc14_px_array_temperature
:field adc17_battery_bus_current: body.node.node.adc17_battery_bus_current
:field adc26_5v_bus_current: body.node.node.adc26_5v_bus_current
:field adc27_33v_bus_current: body.node.node.adc27_33v_bus_current
:field adc30_mz_array_temperature: body.node.node.adc30_mz_array_temperature
:field adc31_mz_array_current: body.node.node.adc31_mz_array_current
:field switch_0_ppt_power_supply_status: body.node.node.switch_0_ppt_power_supply_status
:field switch_1_ppt_1_2_status: body.node.node.switch_1_ppt_1_2_status
:field switch_2_phone_5v_webcam: body.node.node.switch_2_phone_5v_webcam
:field switch_3_warp_valve_status: body.node.node.switch_3_warp_valve_status
:field switch_4_warp_heater_status: body.node.node.switch_4_warp_heater_status
:field switch_5_digi_wi9c_status: body.node.node.switch_5_digi_wi9c_status
:field switch_6_sgr05_status: body.node.node.switch_6_sgr05_status
:field switch_7_reaction_wheels: body.node.node.switch_7_reaction_wheels
:field switch_8_solar_panel_deploy_arm: body.node.node.switch_8_solar_panel_deploy_arm
:field switch_9_solar_panel_deploy_fire: body.node.node.switch_9_solar_panel_deploy_fire
:field unix_time_little_endian: body.node.node.unix_time_little_endian
:field magnetometer_set_1: body.node.node.magnetometer_set_1
:field magnetometer_set_2: body.node.node.magnetometer_set_2
.. seealso::
Source - https://ukamsat.files.wordpress.com/2013/03/amsat-strand-1-20130327.xlsx
https://amsat-uk.org/satellites/telemetry/strand-1/strand-1-telemetry/
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hdlc_flag = self._io.read_bytes(2)
if not self.hdlc_flag == b"\xC0\x80":
raise kaitaistruct.ValidationNotEqualError(b"\xC0\x80", self.hdlc_flag, self._io, u"/seq/0")
self.seq_no = self._io.read_u1()
self.length = self._io.read_u1()
self.packet_type = self._io.read_u1()
_on = self.packet_type
if _on == 1:
self.body = Strand.ModemBeaconTlm(self._io, self, self._root)
elif _on == 2:
self.body = Strand.ObcBeaconTlm(self._io, self, self._root)
self.crc_16_ccit = self._io.read_bytes(2)
class ChAdc1PyArrayCurrent(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc1_py_array_current = self._io.read_u1()
class ChSwitch1Ppt12Status(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.switch_1_ppt_1_2_status = self._io.read_u1()
class ChAdc9ArrayPairZVoltage(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc9_array_pair_z_voltage = self._io.read_u1()
class ChBattery1CurrentDirection(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.battery_1_current_direction = self._io.read_u1()
class ModemBeaconTlm(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.channel = self._io.read_u1()
_on = self.channel
if _on == 224:
self.data = Strand.ChTimeSinceLastObcI2cMessage(self._io, self, self._root)
elif _on == 227:
self.data = Strand.ChPacketsUpDroppedCount(self._io, self, self._root)
elif _on == 226:
self.data = Strand.ChPacketsDownCount(self._io, self, self._root)
elif _on == 225:
self.data = Strand.ChPacketsUpCount(self._io, self, self._root)
elif _on == 228:
self.data = Strand.ChPacketsDownDroppedCount(self._io, self, self._root)
class ChMagnetometerSet1(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.magnetometer_set_1 = self._io.read_u1()
class CsBattery(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.i2c_node_address = self._io.read_u1()
_on = self.i2c_node_address
if _on == 14:
self.node = Strand.ChAdc14PxArrayTemperature(self._io, self, self._root)
elif _on == 10:
self.node = Strand.ChAdc10PzArrayCurrent(self._io, self, self._root)
elif _on == 17:
self.node = Strand.ChAdc17BatteryBusCurrent(self._io, self, self._root)
elif _on == 4:
self.node = Strand.ChAdc4MyArrayCurrent(self._io, self, self._root)
elif _on == 6:
self.node = Strand.ChAdc6ArrayPairXVoltage(self._io, self, self._root)
elif _on == 7:
self.node = Strand.ChAdc7MxArrayCurrent(self._io, self, self._root)
elif _on == 1:
self.node = Strand.ChAdc1PyArrayCurrent(self._io, self, self._root)
elif _on == 27:
self.node = Strand.ChAdc2733vBusCurrent(self._io, self, self._root)
elif _on == 13:
self.node = Strand.ChAdc13PxArrayCurrent(self._io, self, self._root)
elif _on == 11:
self.node = Strand.ChAdc11PzArrayTemperature(self._io, self, self._root)
elif _on == 3:
self.node = Strand.ChAdc3ArrayPairYVoltage(self._io, self, self._root)
elif _on == 5:
self.node = Strand.ChAdc5MyArrayTemperature(self._io, self, self._root)
elif _on == 8:
self.node = Strand.ChAdc8MxArrayTemperature(self._io, self, self._root)
elif _on == 9:
self.node = Strand.ChAdc9ArrayPairZVoltage(self._io, self, self._root)
elif _on == 26:
self.node = Strand.ChAdc265vBusCurrent(self._io, self, self._root)
elif _on == 31:
self.node = Strand.ChAdc31MzArrayCurrent(self._io, self, self._root)
elif _on == 2:
self.node = Strand.ChAdc2PyArrayTemperature(self._io, self, self._root)
elif _on == 30:
self.node = Strand.ChAdc30MzArrayTemperature(self._io, self, self._root)
class ChAdc3ArrayPairYVoltage(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc3_array_pair_y_voltage = self._io.read_u1()
class ChSwitch9SolarPanelDeployFire(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.switch_9_solar_panel_deploy_fire = self._io.read_u1()
class ChBattery1VoltageV(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.battery_1_voltage_v = self._io.read_u1()
class ChAdc4MyArrayCurrent(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc4_my_array_current = self._io.read_u1()
class ChAdc2PyArrayTemperature(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc2_py_array_temperature = self._io.read_u1()
class ChUnixTimeLittleEndian(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.unix_time_little_endian = self._io.read_u1()
class ChTimeSinceLastObcI2cMessage(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.time_since_last_obc_i2c_message = self._io.read_u1()
class ChSwitch0PptPowerSupplyStatus(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.switch_0_ppt_power_supply_status = self._io.read_u1()
class ChBattery0CurrentDirection(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.battery_0_current_direction = self._io.read_u1()
class ObcData(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.i2c_node_address = self._io.read_u1()
_on = self.i2c_node_address
if _on == 12:
self.node = Strand.ChUnixTimeLittleEndian(self._io, self, self._root)
class ChMagnetometerSet2(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.magnetometer_set_2 = self._io.read_u1()
class ChAdc2733vBusCurrent(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc27_33v_bus_current = self._io.read_u1()
class ChSwitch6Sgr05Status(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.switch_6_sgr05_status = self._io.read_u1()
class CsEps(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.i2c_node_address = self._io.read_u1()
_on = self.i2c_node_address
if _on == 0:
self.node = Strand.ChBattery0CurrentDirection(self._io, self, self._root)
elif _on == 4:
self.node = Strand.ChBattery0TemperatureDegC(self._io, self, self._root)
elif _on == 6:
self.node = Strand.ChBattery1CurrentMa(self._io, self, self._root)
elif _on == 1:
self.node = Strand.ChBattery0CurrentMa(self._io, self, self._root)
elif _on == 3:
self.node = Strand.ChBattery0VoltageV(self._io, self, self._root)
elif _on == 5:
self.node = Strand.ChBattery1CurrentDirection(self._io, self, self._root)
elif _on == 8:
self.node = Strand.ChBattery1VoltageV(self._io, self, self._root)
elif _on == 9:
self.node = Strand.ChBattery1TemperatureDegC(self._io, self, self._root)
class ChAdc7MxArrayCurrent(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc7_mx_array_current = self._io.read_u1()
class ChAdc31MzArrayCurrent(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc31_mz_array_current = self._io.read_u1()
class ChSwitch8SolarPanelDeployArm(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.switch_8_solar_panel_deploy_arm = self._io.read_u1()
class ChAdc5MyArrayTemperature(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc5_my_array_temperature = self._io.read_u1()
class ChPacketsDownDroppedCount(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.packets_down_dropped_count = self._io.read_u1()
class ObcBeaconTlm(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.i2c_node_address = self._io.read_u1()
_on = self.i2c_node_address
if _on == 45:
self.node = Strand.CsBattery(self._io, self, self._root)
elif _on == 137:
self.node = Strand.Magnetometers(self._io, self, self._root)
elif _on == 44:
self.node = Strand.CsEps(self._io, self, self._root)
elif _on == 102:
self.node = Strand.SwitchBoard(self._io, self, self._root)
elif _on == 128:
self.node = Strand.ObcData(self._io, self, self._root)
class ChPacketsUpCount(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.packets_up_count = self._io.read_u1()
class ChSwitch3WarpValveStatus(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.switch_3_warp_valve_status = self._io.read_u1()
class ChBattery0VoltageV(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.battery_0_voltage_v = self._io.read_u1()
class ChAdc13PxArrayCurrent(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc13_px_array_current = self._io.read_u1()
class ChAdc30MzArrayTemperature(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc30_mz_array_temperature = self._io.read_u1()
class ChAdc8MxArrayTemperature(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc8_mx_array_temperature = self._io.read_u1()
class ChAdc14PxArrayTemperature(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc14_px_array_temperature = self._io.read_u1()
class ChBattery0CurrentMa(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.battery_0_current_ma = self._io.read_u1()
class ChAdc6ArrayPairXVoltage(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc6_array_pair_x_voltage = self._io.read_u1()
class ChSwitch2Phone5vWebcam(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.switch_2_phone_5v_webcam = self._io.read_u1()
class ChPacketsDownCount(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.packets_down_count = self._io.read_u1()
class ChAdc10PzArrayCurrent(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc10_pz_array_current = self._io.read_u1()
class ChAdc17BatteryBusCurrent(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc17_battery_bus_current = self._io.read_u1()
class ChAdc11PzArrayTemperature(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc11_pz_array_temperature = self._io.read_u1()
class ChSwitch4WarpHeaterStatus(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.switch_4_warp_heater_status = self._io.read_u1()
class ChPacketsUpDroppedCount(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.packets_up_dropped_count = self._io.read_u1()
class ChSwitch7ReactionWheels(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.switch_7_reaction_wheels = self._io.read_u1()
class ChBattery1TemperatureDegC(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.battery_1_temperature_deg_c = self._io.read_u1()
class ChSwitch5DigiWi9cStatus(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.switch_5_digi_wi9c_status = self._io.read_u1()
class SwitchBoard(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.i2c_node_address = self._io.read_u1()
_on = self.i2c_node_address
if _on == 159:
self.node = Strand.ChSwitch6Sgr05Status(self._io, self, self._root)
elif _on == 169:
self.node = Strand.ChSwitch8SolarPanelDeployArm(self._io, self, self._root)
elif _on == 144:
self.node = Strand.ChSwitch3WarpValveStatus(self._io, self, self._root)
elif _on == 149:
self.node = Strand.ChSwitch4WarpHeaterStatus(self._io, self, self._root)
elif _on == 172:
self.node = Strand.ChSwitch9SolarPanelDeployFire(self._io, self, self._root)
elif _on == 164:
self.node = Strand.ChSwitch7ReactionWheels(self._io, self, self._root)
elif _on == 129:
self.node = Strand.ChSwitch0PptPowerSupplyStatus(self._io, self, self._root)
elif _on == 134:
self.node = Strand.ChSwitch1Ppt12Status(self._io, self, self._root)
elif _on == 139:
self.node = Strand.ChSwitch2Phone5vWebcam(self._io, self, self._root)
elif _on == 154:
self.node = Strand.ChSwitch5DigiWi9cStatus(self._io, self, self._root)
class ChBattery0TemperatureDegC(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.battery_0_temperature_deg_c = self._io.read_u1()
class ChAdc265vBusCurrent(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.adc26_5v_bus_current = self._io.read_u1()
class Magnetometers(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.i2c_node_address = self._io.read_u1()
_on = self.i2c_node_address
if _on == 3:
self.node = Strand.ChMagnetometerSet1(self._io, self, self._root)
elif _on == 5:
self.node = Strand.ChMagnetometerSet2(self._io, self, self._root)
class ChBattery1CurrentMa(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.battery_1_current_ma = self._io.read_u1()
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/strand.py
| 0.553505 | 0.210665 |
strand.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Stratosattk1(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.ax25_header.pid
:field obc_timestamp: ax25_frame.payload.obc_timestamp
:field eps_cell_current: ax25_frame.payload.eps_cell_current
:field eps_system_current: ax25_frame.payload.eps_system_current
:field eps_cell_voltage_half: ax25_frame.payload.eps_cell_voltage_half
:field eps_cell_voltage_full: ax25_frame.payload.eps_cell_voltage_full
:field eps_integral_cell_current: ax25_frame.payload.eps_integral_cell_current
:field eps_integral_system_current: ax25_frame.payload.eps_integral_system_current
:field adc_temperature_pos_x: ax25_frame.payload.adc_temperature_pos_x
:field adc_temperature_neg_x: ax25_frame.payload.adc_temperature_neg_x
:field adc_temperature_pos_y: ax25_frame.payload.adc_temperature_pos_y
:field adc_temperature_neg_y: ax25_frame.payload.adc_temperature_neg_y
:field adc_temperature_pos_z: ax25_frame.payload.adc_temperature_pos_z
:field adc_temperature_neg_z: ax25_frame.payload.adc_temperature_neg_z
:field adc_temperature_cell1: ax25_frame.payload.adc_temperature_cell1
:field adc_temperature_cell2: ax25_frame.payload.adc_temperature_cell2
:field attitude_control: ax25_frame.payload.attitude_control
:field obc_cpu_load: ax25_frame.payload.obc_cpu_load
:field obc_boot_count: ax25_frame.payload.obc_boot_count
:field comm_boot_count: ax25_frame.payload.comm_boot_count
:field comm_rssi: ax25_frame.payload.comm_rssi
:field comm_received_packets: ax25_frame.payload.comm_received_packets
:field comm_sent_packets: ax25_frame.payload.comm_sent_packets
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Stratosattk1.Ax25Frame(self._io, self, self._root)
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Stratosattk1.Ax25Header(self._io, self, self._root)
self.payload = Stratosattk1.StratosatBeaconTlm(self._io, self, self._root)
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Stratosattk1.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Stratosattk1.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Stratosattk1.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Stratosattk1.SsidMask(self._io, self, self._root)
self.ctl = self._io.read_u1()
self.pid = self._io.read_u1()
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"ASCII")
if not ((self.callsign == u"BEACON") or (self.callsign == u"RS52S ")) :
raise kaitaistruct.ValidationNotAnyOfError(self.callsign, self._io, u"/types/callsign/seq/0")
class StratosatBeaconTlm(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.obc_timestamp = self._io.read_u4le()
self.eps_cell_current = self._io.read_u2le()
self.eps_system_current = self._io.read_u2le()
self.eps_cell_voltage_half = self._io.read_u2le()
self.eps_cell_voltage_full = self._io.read_u2le()
self.eps_integral_cell_current = self._io.read_u4le()
self.eps_integral_system_current = self._io.read_u4le()
self.adc_temperature_pos_x = self._io.read_s1()
self.adc_temperature_neg_x = self._io.read_s1()
self.adc_temperature_pos_y = self._io.read_s1()
self.adc_temperature_neg_y = self._io.read_s1()
self.adc_temperature_pos_z = self._io.read_s1()
self.adc_temperature_neg_z = self._io.read_s1()
self.adc_temperature_cell1 = self._io.read_s1()
self.adc_temperature_cell2 = self._io.read_s1()
self.attitude_control = self._io.read_u1()
self.obc_cpu_load = self._io.read_u1()
self.obc_boot_count = self._io.read_u2le()
self.comm_boot_count = self._io.read_u2le()
self.comm_rssi = self._io.read_s1()
self.comm_received_packets = self._io.read_u2le()
self.comm_sent_packets = self._io.read_u2le()
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Stratosattk1.Callsign(_io__raw_callsign_ror, self, self._root)
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/stratosattk1.py
| 0.488283 | 0.150684 |
stratosattk1.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Acrux1(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.payload.pid
:field tx_count: ax25_frame.payload.msp_payload.tx_count
:field rx_count: ax25_frame.payload.msp_payload.rx_count
:field rx_valid: ax25_frame.payload.msp_payload.rx_valid
:field payload_type: ax25_frame.payload.msp_payload.payload_type
:field comouti1: ax25_frame.payload.msp_payload.comouti1
:field comoutv1: ax25_frame.payload.msp_payload.comoutv1
:field comouti2: ax25_frame.payload.msp_payload.comouti2
:field comoutv2: ax25_frame.payload.msp_payload.comoutv2
:field comt2: ax25_frame.payload.msp_payload.comt2
:field epsadcbatv1: ax25_frame.payload.msp_payload.epsadcbatv1
:field epsloadi1: ax25_frame.payload.msp_payload.epsloadi1
:field epsadcbatv2: ax25_frame.payload.msp_payload.epsadcbatv2
:field epsboostini2: ax25_frame.payload.msp_payload.epsboostini2
:field epsrail1: ax25_frame.payload.msp_payload.epsrail1
:field epsrail2: ax25_frame.payload.msp_payload.epsrail2
:field epstoppanelv: ax25_frame.payload.msp_payload.epstoppanelv
:field epstoppaneli: ax25_frame.payload.msp_payload.epstoppaneli
:field epst1: ax25_frame.payload.msp_payload.epst1
:field epst2: ax25_frame.payload.msp_payload.epst2
:field xposv: ax25_frame.payload.msp_payload.xposv
:field xposi: ax25_frame.payload.msp_payload.xposi
:field xpost1: ax25_frame.payload.msp_payload.xpost1
:field yposv: ax25_frame.payload.msp_payload.yposv
:field yposi: ax25_frame.payload.msp_payload.yposi
:field ypost1: ax25_frame.payload.msp_payload.ypost1
:field xnegv: ax25_frame.payload.msp_payload.xnegv
:field xnegi: ax25_frame.payload.msp_payload.xnegi
:field xnegt1: ax25_frame.payload.msp_payload.xnegt1
:field ynegv: ax25_frame.payload.msp_payload.ynegv
:field ynegi: ax25_frame.payload.msp_payload.ynegi
:field ynegt1: ax25_frame.payload.msp_payload.ynegt1
:field znegv: ax25_frame.payload.msp_payload.znegv
:field znegi: ax25_frame.payload.msp_payload.znegi
:field znegt1: ax25_frame.payload.msp_payload.znegt1
:field zpost: ax25_frame.payload.msp_payload.zpost
:field cdhtime: ax25_frame.payload.msp_payload.cdhtime
:field swcdhlastreboot: ax25_frame.payload.msp_payload.swcdhlastreboot
:field swsequence: ax25_frame.payload.msp_payload.swsequence
:field outreachmessage: ax25_frame.payload.msp_payload.outreachmessage
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Acrux1.Ax25Frame(self._io, self, self._root)
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Acrux1.Ax25Header(self._io, self, self._root)
_on = (self.ax25_header.ctl & 19)
if _on == 0:
self.payload = Acrux1.IFrame(self._io, self, self._root)
elif _on == 3:
self.payload = Acrux1.UiFrame(self._io, self, self._root)
elif _on == 19:
self.payload = Acrux1.UiFrame(self._io, self, self._root)
elif _on == 16:
self.payload = Acrux1.IFrame(self._io, self, self._root)
elif _on == 18:
self.payload = Acrux1.IFrame(self._io, self, self._root)
elif _on == 2:
self.payload = Acrux1.IFrame(self._io, self, self._root)
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Acrux1.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Acrux1.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Acrux1.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Acrux1.SsidMask(self._io, self, self._root)
self.ctl = self._io.read_u1()
class UiFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self.msp_payload = Acrux1.MspPayloadT(self._io, self, self._root)
self.zero_padding = self._io.read_bytes(91)
self.fec8_rs_checksum = self._io.read_bytes(32)
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"ASCII")
class IFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self.ax25_info = self._io.read_bytes_full()
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class MspPayloadT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.tx_count = self._io.read_u1()
self.rx_count = self._io.read_u1()
self.rx_valid = self._io.read_u1()
self.payload_type = self._io.read_u1()
self.comouti1 = self._io.read_s2le()
self.comoutv1 = self._io.read_s2le()
self.comouti2 = self._io.read_s2le()
self.comoutv2 = self._io.read_s2le()
self.comt2 = self._io.read_s2le()
self.epsadcbatv1 = self._io.read_s2le()
self.epsloadi1 = self._io.read_s2le()
self.epsadcbatv2 = self._io.read_s2le()
self.epsboostini2 = self._io.read_s2le()
self.epsrail1 = self._io.read_s2le()
self.epsrail2 = self._io.read_s2le()
self.epstoppanelv = self._io.read_s2le()
self.epstoppaneli = self._io.read_s2le()
self.epst1 = self._io.read_s2le()
self.epst2 = self._io.read_s2le()
self.xposv = self._io.read_s2le()
self.xposi = self._io.read_s2le()
self.xpost1 = self._io.read_s2le()
self.yposv = self._io.read_s2le()
self.yposi = self._io.read_s2le()
self.ypost1 = self._io.read_s2le()
self.xnegv = self._io.read_s2le()
self.xnegi = self._io.read_s2le()
self.xnegt1 = self._io.read_s2le()
self.ynegv = self._io.read_s2le()
self.ynegi = self._io.read_s2le()
self.ynegt1 = self._io.read_s2le()
self.znegv = self._io.read_s2le()
self.znegi = self._io.read_s2le()
self.znegt1 = self._io.read_s2le()
self.zpost = self._io.read_s2le()
self.cdhtime = self._io.read_u8le()
self.swcdhlastreboot = self._io.read_u8le()
self.swsequence = self._io.read_u2le()
self.outreachmessage = (self._io.read_bytes(48)).decode(u"ASCII")
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Acrux1.Callsign(_io__raw_callsign_ror, self, self._root)
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/acrux1.py
| 0.410993 | 0.180161 |
acrux1.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Mysat(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field rpt_callsign: ax25_frame.ax25_header.repeater.rpt_instance[0].rpt_callsign_raw.callsign_ror.callsign
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.payload.pid
:field callsign: ax25_frame.payload.ax25_info.beacon_type.callsign
:field obc_mode: ax25_frame.payload.ax25_info.beacon_type.obc_mode
:field obc_reset_counter: ax25_frame.payload.ax25_info.beacon_type.obc_reset_counter
:field obc_uptime: ax25_frame.payload.ax25_info.beacon_type.obc_uptime
:field gyro_norm: ax25_frame.payload.ax25_info.beacon_type.gyro_norm
:field eps_reset_counter: ax25_frame.payload.ax25_info.beacon_type.eps_reset_counter
:field eps_last_boot_cause: ax25_frame.payload.ax25_info.beacon_type.eps_last_boot_cause
:field eps_battery_mode: ax25_frame.payload.ax25_info.beacon_type.eps_battery_mode
:field timestamp: ax25_frame.payload.ax25_info.beacon_type.timestamp
:field obc_temperature: ax25_frame.payload.ax25_info.beacon_type.obc_temperature
:field obc_daughterboard_temperature: ax25_frame.payload.ax25_info.beacon_type.obc_daughterboard_temperature
:field eps_battery_temperature: ax25_frame.payload.ax25_info.beacon_type.eps_battery_temperature
:field eps_board_temperature: ax25_frame.payload.ax25_info.beacon_type.eps_board_temperature
:field ants_temperature: ax25_frame.payload.ax25_info.beacon_type.ants_temperature
:field trxvu_temperature: ax25_frame.payload.ax25_info.beacon_type.trxvu_temperature
:field adcs_temperature: ax25_frame.payload.ax25_info.beacon_type.adcs_temperature
:field obc_3v3_voltage: ax25_frame.payload.ax25_info.beacon_type.obc_3v3_voltage
:field camera_voltage: ax25_frame.payload.ax25_info.beacon_type.camera_voltage
:field trxvu_voltage: ax25_frame.payload.ax25_info.beacon_type.trxvu_voltage
:field eps_battery_voltage: ax25_frame.payload.ax25_info.beacon_type.eps_battery_voltage
:field obc_5v_current: ax25_frame.payload.ax25_info.beacon_type.obc_5v_current
:field eps_total_pv_current: ax25_frame.payload.ax25_info.beacon_type.eps_total_pv_current
:field eps_total_system_current: ax25_frame.payload.ax25_info.beacon_type.eps_total_system_current
:field message: ax25_frame.payload.ax25_info.beacon_type.message
Attention: `rpt_callsign` cannot be accessed because `rpt_instance` is an
array of unknown size at the beginning of the parsing process! Left an
example in here.
.. seealso::
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Mysat.Ax25Frame(self._io, self, self._root)
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Mysat.Ax25Header(self._io, self, self._root)
_on = (self.ax25_header.ctl & 19)
if _on == 0:
self.payload = Mysat.IFrame(self._io, self, self._root)
elif _on == 3:
self.payload = Mysat.UiFrame(self._io, self, self._root)
elif _on == 19:
self.payload = Mysat.UiFrame(self._io, self, self._root)
elif _on == 16:
self.payload = Mysat.IFrame(self._io, self, self._root)
elif _on == 18:
self.payload = Mysat.IFrame(self._io, self, self._root)
elif _on == 2:
self.payload = Mysat.IFrame(self._io, self, self._root)
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Mysat.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Mysat.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Mysat.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Mysat.SsidMask(self._io, self, self._root)
if (self.src_ssid_raw.ssid_mask & 1) == 0:
self.repeater = Mysat.Repeater(self._io, self, self._root)
self.ctl = self._io.read_u1()
class UiFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self._raw_ax25_info = self._io.read_bytes_full()
_io__raw_ax25_info = KaitaiStream(BytesIO(self._raw_ax25_info))
self.ax25_info = Mysat.MysatPayload(_io__raw_ax25_info, self, self._root)
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"ASCII")
class IFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self.ax25_info = self._io.read_bytes_full()
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class Repeaters(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rpt_callsign_raw = Mysat.CallsignRaw(self._io, self, self._root)
self.rpt_ssid_raw = Mysat.SsidMask(self._io, self, self._root)
class Repeater(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rpt_instance = []
i = 0
while True:
_ = Mysat.Repeaters(self._io, self, self._root)
self.rpt_instance.append(_)
if (_.rpt_ssid_raw.ssid_mask & 1) == 1:
break
i += 1
class MysatMessage(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.message = (self._io.read_bytes_full()).decode(u"utf-8")
class MysatPayload(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
_on = self._io.size()
if _on == 42:
self.beacon_type = Mysat.MysatTlm(self._io, self, self._root)
else:
self.beacon_type = Mysat.MysatMessage(self._io, self, self._root)
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Mysat.Callsign(_io__raw_callsign_ror, self, self._root)
class MysatTlm(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(5)).decode(u"utf-8")
self.obc_mode = self._io.read_u1()
self.obc_reset_counter = self._io.read_u4le()
self.obc_uptime = self._io.read_u4le()
self.gyro_norm = self._io.read_u1()
self.eps_reset_counter = self._io.read_u4le()
self.eps_last_boot_cause = self._io.read_u1()
self.eps_battery_mode = self._io.read_u1()
self.timestamp = self._io.read_u4le()
self.obc_temperature = self._io.read_u1()
self.obc_daughterboard_temperature = self._io.read_u1()
self.eps_battery_temperature = self._io.read_u1()
self.eps_board_temperature = self._io.read_u1()
self.ants_temperature = self._io.read_u1()
self.trxvu_temperature = self._io.read_u1()
self.adcs_temperature = self._io.read_u1()
self.obc_3v3_voltage = self._io.read_u1()
self.camera_voltage = self._io.read_u1()
self.trxvu_voltage = self._io.read_u1()
self.eps_battery_voltage = self._io.read_u1()
self.obc_5v_current = self._io.read_u2le()
self.eps_total_pv_current = self._io.read_u2le()
self.eps_total_system_current = self._io.read_u2le()
@property
def frame_length(self):
if hasattr(self, '_m_frame_length'):
return self._m_frame_length if hasattr(self, '_m_frame_length') else None
self._m_frame_length = self._io.size()
return self._m_frame_length if hasattr(self, '_m_frame_length') else None
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/mysat.py
| 0.617167 | 0.167389 |
mysat.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
import satnogsdecoders.process
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Vzlusat2(KaitaiStruct):
""":field csp_hdr_crc: csp_header.crc
:field csp_hdr_rdp: csp_header.rdp
:field csp_hdr_xtea: csp_header.xtea
:field csp_hdr_hmac: csp_header.hmac
:field csp_hdr_src_port: csp_header.source_port
:field csp_hdr_dst_port: csp_header.destination_port
:field csp_hdr_destination: csp_header.destination
:field csp_hdr_source: csp_header.source
:field csp_hdr_priority: csp_header.priority
:field obc_timestamp: csp_data.payload.obc_timestamp
:field obc_boot_count: csp_data.payload.obc_boot_count
:field obc_reset_cause: csp_data.payload.obc_reset_cause
:field eps_vbatt: csp_data.payload.eps_vbatt
:field eps_cursun: csp_data.payload.eps_cursun
:field eps_cursys: csp_data.payload.eps_cursys
:field eps_temp_bat: csp_data.payload.eps_temp_bat
:field radio_temp_pa: csp_data.payload.radio_temp_pa
:field radio_tot_tx_count: csp_data.payload.radio_tot_tx_count
:field radio_tot_rx_count: csp_data.payload.radio_tot_rx_count
:field flag: csp_data.payload.flag
:field chunk: csp_data.payload.chunk
:field time: csp_data.payload.time
:field data: csp_data.payload.data_raw.data.data_str
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.csp_header = Vzlusat2.CspHeaderT(self._io, self, self._root)
self.csp_data = Vzlusat2.CspDataT(self._io, self, self._root)
class CspHeaderT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.csp_header_raw = self._io.read_u4be()
@property
def source(self):
if hasattr(self, '_m_source'):
return self._m_source if hasattr(self, '_m_source') else None
self._m_source = ((self.csp_header_raw >> 25) & 31)
return self._m_source if hasattr(self, '_m_source') else None
@property
def source_port(self):
if hasattr(self, '_m_source_port'):
return self._m_source_port if hasattr(self, '_m_source_port') else None
self._m_source_port = ((self.csp_header_raw >> 8) & 63)
return self._m_source_port if hasattr(self, '_m_source_port') else None
@property
def destination_port(self):
if hasattr(self, '_m_destination_port'):
return self._m_destination_port if hasattr(self, '_m_destination_port') else None
self._m_destination_port = ((self.csp_header_raw >> 14) & 63)
return self._m_destination_port if hasattr(self, '_m_destination_port') else None
@property
def rdp(self):
if hasattr(self, '_m_rdp'):
return self._m_rdp if hasattr(self, '_m_rdp') else None
self._m_rdp = ((self.csp_header_raw & 2) >> 1)
return self._m_rdp if hasattr(self, '_m_rdp') else None
@property
def destination(self):
if hasattr(self, '_m_destination'):
return self._m_destination if hasattr(self, '_m_destination') else None
self._m_destination = ((self.csp_header_raw >> 20) & 31)
return self._m_destination if hasattr(self, '_m_destination') else None
@property
def priority(self):
if hasattr(self, '_m_priority'):
return self._m_priority if hasattr(self, '_m_priority') else None
self._m_priority = (self.csp_header_raw >> 30)
return self._m_priority if hasattr(self, '_m_priority') else None
@property
def reserved(self):
if hasattr(self, '_m_reserved'):
return self._m_reserved if hasattr(self, '_m_reserved') else None
self._m_reserved = ((self.csp_header_raw >> 4) & 15)
return self._m_reserved if hasattr(self, '_m_reserved') else None
@property
def xtea(self):
if hasattr(self, '_m_xtea'):
return self._m_xtea if hasattr(self, '_m_xtea') else None
self._m_xtea = ((self.csp_header_raw & 4) >> 2)
return self._m_xtea if hasattr(self, '_m_xtea') else None
@property
def hmac(self):
if hasattr(self, '_m_hmac'):
return self._m_hmac if hasattr(self, '_m_hmac') else None
self._m_hmac = ((self.csp_header_raw & 8) >> 3)
return self._m_hmac if hasattr(self, '_m_hmac') else None
@property
def crc(self):
if hasattr(self, '_m_crc'):
return self._m_crc if hasattr(self, '_m_crc') else None
self._m_crc = (self.csp_header_raw & 1)
return self._m_crc if hasattr(self, '_m_crc') else None
class CspDataT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.cmd = self._io.read_u1()
if ((self._parent.csp_header.source == 1) and (self._parent.csp_header.destination == 26) and (self._parent.csp_header.source_port == 18) and (self._parent.csp_header.destination_port == 18)) :
_on = self.cmd
if _on == 86:
self.payload = Vzlusat2.Vzlusat2BeaconT(self._io, self, self._root)
elif _on == 3:
self.payload = Vzlusat2.Vzlusat2DropT(self._io, self, self._root)
class Vzlusat2BeaconT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = self._io.read_bytes(8)
if not self.callsign == b"\x5A\x4C\x55\x53\x41\x54\x2D\x32":
raise kaitaistruct.ValidationNotEqualError(b"\x5A\x4C\x55\x53\x41\x54\x2D\x32", self.callsign, self._io, u"/types/vzlusat2_beacon_t/seq/0")
self.obc_timestamp = self._io.read_u4be()
self.obc_boot_count = self._io.read_u4be()
self.obc_reset_cause = self._io.read_u4be()
self.eps_vbatt = self._io.read_u2be()
self.eps_cursun = self._io.read_u2be()
self.eps_cursys = self._io.read_u2be()
self.eps_temp_bat = self._io.read_s2be()
self.radio_temp_pa_raw = self._io.read_s2be()
self.radio_tot_tx_count = self._io.read_u4be()
self.radio_tot_rx_count = self._io.read_u4be()
@property
def radio_temp_pa(self):
if hasattr(self, '_m_radio_temp_pa'):
return self._m_radio_temp_pa if hasattr(self, '_m_radio_temp_pa') else None
self._m_radio_temp_pa = (0.1 * self.radio_temp_pa_raw)
return self._m_radio_temp_pa if hasattr(self, '_m_radio_temp_pa') else None
class Vzlusat2DropT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.flag = self._io.read_u1()
self.chunk = self._io.read_u4be()
self.time = self._io.read_u4be()
self._raw_data_raw = self._io.read_bytes_full()
_io__raw_data_raw = KaitaiStream(BytesIO(self._raw_data_raw))
self.data_raw = Vzlusat2.Vzlusat2DropT.DataB64(_io__raw_data_raw, self, self._root)
class DataB64(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_data = self._io.read_bytes_full()
_process = satnogsdecoders.process.B64encode()
self._raw_data = _process.decode(self._raw__raw_data)
_io__raw_data = KaitaiStream(BytesIO(self._raw_data))
self.data = Vzlusat2.Vzlusat2DropT.StrB64(_io__raw_data, self, self._root)
class StrB64(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.data_str = (self._io.read_bytes_full()).decode(u"ASCII")
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/vzlusat2.py
| 0.476823 | 0.157137 |
vzlusat2.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Azaadisat2(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field rpt_callsign: ax25_frame.ax25_header.repeater.rpt_instance[0].rpt_callsign_raw.callsign_ror.callsign
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.payload.pid
:field call_sign: ax25_frame.payload.ax25_info.header.call_sign
:field frame_number: ax25_frame.payload.ax25_info.header.frame_number
:field message_type: ax25_frame.payload.ax25_info.header.message_type
:field transmitted_on: ax25_frame.payload.ax25_info.header.transmitted_on
:field boot_counter: ax25_frame.payload.ax25_info.data.boot_counter
:field deployment_status: ax25_frame.payload.ax25_info.data.deployment_status
:field arm_deployment_percentage: ax25_frame.payload.ax25_info.data.arm_deployment_percentage
:field expansion_deployment_percentage: ax25_frame.payload.ax25_info.data.expansion_deployment_percentage
:field obc_temperature: ax25_frame.payload.ax25_info.data.obc_temperature
:field bus_voltage: ax25_frame.payload.ax25_info.data.bus_voltage
:field bus_current: ax25_frame.payload.ax25_info.data.bus_current
:field battery_temperature: ax25_frame.payload.ax25_info.data.battery_temperature
:field radiation: ax25_frame.payload.ax25_info.data.radiation
:field checksum: ax25_frame.payload.ax25_info.data.checksum
:field message_slot: ax25_frame.payload.ax25_info.data.message_slot
:field size: ax25_frame.payload.ax25_info.data.size
:field message: ax25_frame.payload.ax25_info.data.message
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Azaadisat2.Ax25Frame(self._io, self, self._root)
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Azaadisat2.Ax25Header(self._io, self, self._root)
_on = (self.ax25_header.ctl & 19)
if _on == 0:
self.payload = Azaadisat2.IFrame(self._io, self, self._root)
elif _on == 3:
self.payload = Azaadisat2.UiFrame(self._io, self, self._root)
elif _on == 19:
self.payload = Azaadisat2.UiFrame(self._io, self, self._root)
elif _on == 16:
self.payload = Azaadisat2.IFrame(self._io, self, self._root)
elif _on == 18:
self.payload = Azaadisat2.IFrame(self._io, self, self._root)
elif _on == 2:
self.payload = Azaadisat2.IFrame(self._io, self, self._root)
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Azaadisat2.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Azaadisat2.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Azaadisat2.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Azaadisat2.SsidMask(self._io, self, self._root)
if (self.src_ssid_raw.ssid_mask & 1) == 0:
self.repeater = Azaadisat2.Repeater(self._io, self, self._root)
self.ctl = self._io.read_u1()
class SfMessageT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.message_slot = self._io.read_u1()
self.size = self._io.read_u1()
self.message = (self._io.read_bytes(self.size)).decode(u"ASCII")
self.checksum = self._io.read_u1()
class UiFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self._raw_ax25_info = self._io.read_bytes_full()
_io__raw_ax25_info = KaitaiStream(BytesIO(self._raw_ax25_info))
self.ax25_info = Azaadisat2.PayloadT(_io__raw_ax25_info, self, self._root)
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"ASCII")
if not ((self.callsign == u"SKITRC") or (self.callsign == u"AZDSAT")) :
raise kaitaistruct.ValidationNotAnyOfError(self.callsign, self._io, u"/types/callsign/seq/0")
class SatelliteInfoT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.boot_counter = self._io.read_u2le()
self.deployment_status = self._io.read_u1()
self.arm_deployment_percentage = self._io.read_u1()
self.expansion_deployment_percentage = self._io.read_u1()
self.obc_temperature = self._io.read_f4le()
self.bus_voltage = self._io.read_f4le()
self.bus_current = self._io.read_f4le()
self.battery_temperature = self._io.read_f4le()
self.radiation = self._io.read_f4le()
self.checksum = self._io.read_u1()
class HeaderT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.call_sign = (self._io.read_bytes(6)).decode(u"ASCII")
self.frame_number = self._io.read_u1()
self.message_type = self._io.read_u1()
self.transmitted_on = self._io.read_u1()
class IFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self._raw_ax25_info = self._io.read_bytes_full()
_io__raw_ax25_info = KaitaiStream(BytesIO(self._raw_ax25_info))
self.ax25_info = Azaadisat2.PayloadT(_io__raw_ax25_info, self, self._root)
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class PayloadT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.header = Azaadisat2.HeaderT(self._io, self, self._root)
_on = self.header.message_type
if _on == 1:
self.data = Azaadisat2.SatelliteInfoT(self._io, self, self._root)
elif _on == 2:
self.data = Azaadisat2.SfMessageT(self._io, self, self._root)
class Repeaters(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rpt_callsign_raw = Azaadisat2.CallsignRaw(self._io, self, self._root)
self.rpt_ssid_raw = Azaadisat2.SsidMask(self._io, self, self._root)
class Repeater(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rpt_instance = []
i = 0
while True:
_ = Azaadisat2.Repeaters(self._io, self, self._root)
self.rpt_instance.append(_)
if (_.rpt_ssid_raw.ssid_mask & 1) == 1:
break
i += 1
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Azaadisat2.Callsign(_io__raw_callsign_ror, self, self._root)
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/azaadisat2.py
| 0.463444 | 0.151216 |
azaadisat2.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
from enum import Enum
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Lightsail2(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.payload.pid
:field src_ip_addr: ax25_frame.payload.ax25_info.src_ip_addr
:field dst_ip_addr: ax25_frame.payload.ax25_info.dst_ip_addr
:field src_port: ax25_frame.payload.ax25_info.body.src_port
:field dst_port: ax25_frame.payload.ax25_info.body.dst_port
:field type: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.type
:field daughter_atmp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.daughter_atmp
:field daughter_btmp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.daughter_btmp
:field threev_pltmp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.threev_pltmp
:field rf_amptmp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.rf_amptmp
:field nx_tmp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.nx_tmp
:field px_tmp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.px_tmp
:field ny_tmp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.ny_tmp
:field py_tmp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.py_tmp
:field nz_tmp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.nz_tmp
:field pz_tmp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.pz_tmp
:field atmelpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.atmelpwrcurr
:field atmelpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.atmelpwrbusv
:field threev_pwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.threev_pwrcurr
:field threev_pwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.threev_pwrbusv
:field threev_plpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.threev_plpwrcurr
:field threev_plpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.threev_plpwrbusv
:field fivev_plpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.fivev_plpwrcurr
:field fivev_plpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.fivev_plpwrbusv
:field daughter_apwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.daughter_apwrcurr
:field daughter_apwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.daughter_apwrbusv
:field daughter_bpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.daughter_bpwrcurr
:field daughter_bpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.daughter_bpwrbusv
:field nx_intpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.nx_intpwrcurr
:field nx_intpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.nx_intpwrbusv
:field nx_extpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.nx_extpwrcurr
:field nx_extpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.nx_extpwrbusv
:field px_intpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.px_intpwrcurr
:field px_intpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.px_intpwrbusv
:field px_extpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.px_extpwrcurr
:field px_extpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.px_extpwrbusv
:field ny_intpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.ny_intpwrcurr
:field ny_intpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.ny_intpwrbusv
:field ny_extpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.ny_extpwrcurr
:field ny_extpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.ny_extpwrbusv
:field py_intpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.py_intpwrcurr
:field py_intpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.py_intpwrbusv
:field py_extpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.py_extpwrcurr
:field py_extpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.py_extpwrbusv
:field nz_extpwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.nz_extpwrcurr
:field nz_extpwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.nz_extpwrbusv
:field usercputime: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.usercputime
:field syscputime: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.syscputime
:field idlecputime: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.idlecputime
:field processes: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.processes
:field memfree: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.memfree
:field buffers: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.buffers
:field cached: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.cached
:field datafree: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.datafree
:field nanderasures: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.nanderasures
:field beaconcnt: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.beaconcnt
:field time: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.time
:field boottime: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.boottime
:field long_dur_counter: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sys.long_dur_counter
:field batt_pwr_draw: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.battPwrDraw
:field adcs_mode: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.adcs_mode
:field flags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.flags
:field q0_act: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.q0_act
:field q1_act: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.q1_act
:field q2_act: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.q2_act
:field q3_act: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.q3_act
:field x_rate: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.x_rate
:field y_rate: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.y_rate
:field z_rate: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.z_rate
:field gyro_px: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.gyro_pxy.val_a
:field gyro_py: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.gyro_pxy.val_b
:field gyro_iz: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.gyro_piz.val_a
:field gyro_pz: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.gyro_piz.val_b
:field gyro_ix: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.gyro_ixy.val_a
:field gyro_iy: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.gyro_ixy.val_b
:field sol_nxx: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sol_nxx
:field sol_nxy: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sol_nxy
:field sol_nyx: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sol_nyx
:field sol_nyy: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sol_nyy
:field sol_nzx: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sol_nzx
:field sol_nzy: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sol_nzy
:field sol_pxx: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sol_pxx
:field sol_pxy: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sol_pxy
:field sol_pyx: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sol_pyx
:field sol_pyy: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.sol_pyy
:field mag_nxx: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.mag_nxxy.val_a
:field mag_nxy: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.mag_nxxy.val_b
:field mag_nxz: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.mag_npxz.val_a
:field mag_pxz: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.mag_npxz.val_b
:field mag_pxx: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.mag_pxxy.val_a
:field mag_pxy: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.mag_pxxy.val_b
:field mag_nyz: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.mag_npyz.val_a
:field mag_pyz: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.mag_npyz.val_b
:field mag_pyx: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.mag_pyxy.val_a
:field mag_pyy: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.mag_pyxy.val_b
:field wheel_rpm: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.wheel_rpm
:field cam0_status: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.cam0.status
:field cam0_temp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.cam0.temp
:field cam0_last_contact: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.cam0.last_contact
:field cam0_pics_remaining: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.cam0.pics_remaining
:field cam0_retry_fails: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.cam0.retry_fails
:field cam1_status: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.cam1.status
:field cam1_temp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.cam1.temp
:field cam1_last_contact: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.cam1.last_contact
:field cam1_pics_remaining: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.cam1.pics_remaining
:field cam1_retry_fails: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.cam1.retry_fails
:field torqx_pwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.torqx_pwrcurr
:field torqx_pwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.torqx_pwrbusv
:field torqy_pwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.torqy_pwrcurr
:field torqy_pwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.torqy_pwrbusv
:field torqz_pwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.torqz_pwrcurr
:field torqz_pwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.torqz_pwrbusv
:field motor_pwrcurr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.motor_pwrcurr
:field motor_pwrbusv: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.motor_pwrbusv
:field pic_panel_flags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.pic_panel_flags
:field motor_cnt: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.motor_cnt
:field motor_limit: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.motor_limit
:field bat0_curr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat0.curr
:field bat0_volt: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat0.volt
:field bat0_temp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat0.temp
:field bat0_flags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat0.flags
:field bat0_ctlflags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat0.ctlflags
:field bat1_curr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat1.curr
:field bat1_volt: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat1.volt
:field bat1_temp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat1.temp
:field bat1_flags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat1.flags
:field bat1_ctlflags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat1.ctlflags
:field bat2_curr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat2.curr
:field bat2_volt: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat2.volt
:field bat2_temp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat2.temp
:field bat2_flags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat2.flags
:field bat2_ctlflags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat2.ctlflags
:field bat3_curr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat3.curr
:field bat3_volt: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat3.volt
:field bat3_temp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat3.temp
:field bat3_flags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat3.flags
:field bat3_ctlflags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat3.ctlflags
:field bat4_curr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat4.curr
:field bat4_volt: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat4.volt
:field bat4_temp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat4.temp
:field bat4_flags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat4.flags
:field bat4_ctlflags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat4.ctlflags
:field bat5_curr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat5.curr
:field bat5_volt: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat5.volt
:field bat5_temp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat5.temp
:field bat5_flags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat5.flags
:field bat5_ctlflags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat5.ctlflags
:field bat6_curr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat6.curr
:field bat6_volt: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat6.volt
:field bat6_temp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat6.temp
:field bat6_flags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat6.flags
:field bat6_ctlflags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat6.ctlflags
:field bat7_curr: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat7.curr
:field bat7_volt: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat7.volt
:field bat7_temp: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat7.temp
:field bat7_flags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat7.flags
:field bat7_ctlflags: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.bat7.ctlflags
:field comm_rxcount: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.comm.rxcount
:field comm_txcount: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.comm.txcount
:field comm_rxbytes: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.comm.rxbytes
:field comm_txbytes: ax25_frame.payload.ax25_info.body.body.lsb_beacondata.comm.txbytes
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Lightsail2.Ax25Frame(self._io, self, self._root)
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Lightsail2.Ax25Header(self._io, self, self._root)
_on = (self.ax25_header.ctl & 19)
if _on == 0:
self.payload = Lightsail2.IFrame(self._io, self, self._root)
elif _on == 3:
self.payload = Lightsail2.UiFrame(self._io, self, self._root)
elif _on == 19:
self.payload = Lightsail2.UiFrame(self._io, self, self._root)
elif _on == 16:
self.payload = Lightsail2.IFrame(self._io, self, self._root)
elif _on == 18:
self.payload = Lightsail2.IFrame(self._io, self, self._root)
elif _on == 2:
self.payload = Lightsail2.IFrame(self._io, self, self._root)
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Lightsail2.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Lightsail2.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Lightsail2.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Lightsail2.SsidMask(self._io, self, self._root)
self.ctl = self._io.read_u1()
class UiFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
_on = self.pid
if _on == 204:
self._raw_ax25_info = self._io.read_bytes_full()
_io__raw_ax25_info = KaitaiStream(BytesIO(self._raw_ax25_info))
self.ax25_info = Lightsail2.Ipv4Pkt(_io__raw_ax25_info, self, self._root)
elif _on == 240:
self._raw_ax25_info = self._io.read_bytes_full()
_io__raw_ax25_info = KaitaiStream(BytesIO(self._raw_ax25_info))
self.ax25_info = Lightsail2.NoneL3(_io__raw_ax25_info, self, self._root)
else:
self.ax25_info = self._io.read_bytes_full()
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"ascii")
class LsbBatterydataT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.curr = self._io.read_s1()
self.volt = self._io.read_u1()
self.temp = self._io.read_u1()
self.flags = self._io.read_u1()
self.ctlflags = self._io.read_u1()
class Packedsigned2x12T(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.a_high = self._io.read_s1()
self.b_high = self._io.read_s1()
self.ab_low = self._io.read_u1()
@property
def val_a(self):
if hasattr(self, '_m_val_a'):
return self._m_val_a if hasattr(self, '_m_val_a') else None
self._m_val_a = ((self.a_high << 4) | (self.ab_low & 15))
return self._m_val_a if hasattr(self, '_m_val_a') else None
@property
def val_b(self):
if hasattr(self, '_m_val_b'):
return self._m_val_b if hasattr(self, '_m_val_b') else None
self._m_val_b = ((self.b_high << 4) | (self.ab_low & (240 >> 4)))
return self._m_val_b if hasattr(self, '_m_val_b') else None
class UdpPayload(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.lsb_beacondata = Lightsail2.LsbBeacondataT(self._io, self, self._root)
class Ipv4Options(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.entries = []
i = 0
while not self._io.is_eof():
self.entries.append(Lightsail2.Ipv4Option(self._io, self, self._root))
i += 1
class NoNextHeader(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
pass
class Ipv6Pkt(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.version = self._io.read_bits_int_be(4)
self.traffic_class = self._io.read_bits_int_be(8)
self.flow_label = self._io.read_bits_int_be(20)
self._io.align_to_byte()
self.payload_length = self._io.read_u2be()
self.next_header_type = self._io.read_u1()
self.hop_limit = self._io.read_u1()
self.src_ipv6_addr = self._io.read_bytes(16)
self.dst_ipv6_addr = self._io.read_bytes(16)
_on = self.next_header_type
if _on == 17:
self.next_header = Lightsail2.UdpDtgrm(self._io, self, self._root)
elif _on == 0:
self.next_header = Lightsail2.OptionHopByHop(self._io, self, self._root)
elif _on == 4:
self.next_header = Lightsail2.Ipv4Pkt(self._io, self, self._root)
elif _on == 6:
self.next_header = Lightsail2.TcpSegm(self._io, self, self._root)
elif _on == 59:
self.next_header = Lightsail2.NoNextHeader(self._io, self, self._root)
self.rest = self._io.read_bytes_full()
class IFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self.ax25_info = self._io.read_bytes_full()
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class CamerainfoT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.status = self._io.read_u1()
self.temp = self._io.read_u1()
self.last_contact = self._io.read_u1()
self.pics_remaining = self._io.read_bits_int_be(6)
self.retry_fails = self._io.read_bits_int_be(2)
class LsbSysmgrdataT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.type = self._io.read_u1()
self.daughter_atmp = self._io.read_u1()
self.daughter_btmp = self._io.read_u1()
self.threev_pltmp = self._io.read_u1()
self.rf_amptmp = self._io.read_u1()
self.nx_tmp = self._io.read_u1()
self.px_tmp = self._io.read_u1()
self.ny_tmp = self._io.read_u1()
self.py_tmp = self._io.read_u1()
self.nz_tmp = self._io.read_u1()
self.pz_tmp = self._io.read_u1()
self.atmelpwrcurr = self._io.read_u1()
self.atmelpwrbusv = self._io.read_u1()
self.threev_pwrcurr = self._io.read_u1()
self.threev_pwrbusv = self._io.read_u1()
self.threev_plpwrcurr = self._io.read_u1()
self.threev_plpwrbusv = self._io.read_u1()
self.fivev_plpwrcurr = self._io.read_u1()
self.fivev_plpwrbusv = self._io.read_u1()
self.daughter_apwrcurr = self._io.read_u1()
self.daughter_apwrbusv = self._io.read_u1()
self.daughter_bpwrcurr = self._io.read_u1()
self.daughter_bpwrbusv = self._io.read_u1()
self.nx_intpwrcurr = self._io.read_u1()
self.nx_intpwrbusv = self._io.read_u1()
self.nx_extpwrcurr = self._io.read_u1()
self.nx_extpwrbusv = self._io.read_u1()
self.px_intpwrcurr = self._io.read_u1()
self.px_intpwrbusv = self._io.read_u1()
self.px_extpwrcurr = self._io.read_u1()
self.px_extpwrbusv = self._io.read_u1()
self.ny_intpwrcurr = self._io.read_u1()
self.ny_intpwrbusv = self._io.read_u1()
self.ny_extpwrcurr = self._io.read_u1()
self.ny_extpwrbusv = self._io.read_u1()
self.py_intpwrcurr = self._io.read_u1()
self.py_intpwrbusv = self._io.read_u1()
self.py_extpwrcurr = self._io.read_u1()
self.py_extpwrbusv = self._io.read_u1()
self.nz_extpwrcurr = self._io.read_u1()
self.nz_extpwrbusv = self._io.read_u1()
self.usercputime = self._io.read_u4be()
self.syscputime = self._io.read_u4be()
self.idlecputime = self._io.read_u4be()
self.processes = self._io.read_u4be()
self.memfree = self._io.read_u4be()
self.buffers = self._io.read_u4be()
self.cached = self._io.read_u4be()
self.datafree = self._io.read_u4be()
self.nanderasures = self._io.read_u4be()
self.beaconcnt = self._io.read_u2be()
self.time = self._io.read_u4be()
self.boottime = self._io.read_u4be()
self.long_dur_counter = self._io.read_u2be()
class Ipv4Pkt(KaitaiStruct):
class ProtocolEnum(Enum):
hopopt = 0
icmp = 1
igmp = 2
ggp = 3
ipv4 = 4
st = 5
tcp = 6
cbt = 7
egp = 8
igp = 9
bbn_rcc_mon = 10
nvp_ii = 11
pup = 12
argus = 13
emcon = 14
xnet = 15
chaos = 16
udp = 17
mux = 18
dcn_meas = 19
hmp = 20
prm = 21
xns_idp = 22
trunk_1 = 23
trunk_2 = 24
leaf_1 = 25
leaf_2 = 26
rdp = 27
irtp = 28
iso_tp4 = 29
netblt = 30
mfe_nsp = 31
merit_inp = 32
dccp = 33
x_3pc = 34
idpr = 35
xtp = 36
ddp = 37
idpr_cmtp = 38
tp_plus_plus = 39
il = 40
ipv6 = 41
sdrp = 42
ipv6_route = 43
ipv6_frag = 44
idrp = 45
rsvp = 46
gre = 47
dsr = 48
bna = 49
esp = 50
ah = 51
i_nlsp = 52
swipe = 53
narp = 54
mobile = 55
tlsp = 56
skip = 57
ipv6_icmp = 58
ipv6_nonxt = 59
ipv6_opts = 60
any_host_internal_protocol = 61
cftp = 62
any_local_network = 63
sat_expak = 64
kryptolan = 65
rvd = 66
ippc = 67
any_distributed_file_system = 68
sat_mon = 69
visa = 70
ipcv = 71
cpnx = 72
cphb = 73
wsn = 74
pvp = 75
br_sat_mon = 76
sun_nd = 77
wb_mon = 78
wb_expak = 79
iso_ip = 80
vmtp = 81
secure_vmtp = 82
vines = 83
ttp = 84
nsfnet_igp = 85
dgp = 86
tcf = 87
eigrp = 88
ospfigp = 89
sprite_rpc = 90
larp = 91
mtp = 92
ax_25 = 93
ipip = 94
micp = 95
scc_sp = 96
etherip = 97
encap = 98
any_private_encryption_scheme = 99
gmtp = 100
ifmp = 101
pnni = 102
pim = 103
aris = 104
scps = 105
qnx = 106
a_n = 107
ipcomp = 108
snp = 109
compaq_peer = 110
ipx_in_ip = 111
vrrp = 112
pgm = 113
any_0_hop = 114
l2tp = 115
ddx = 116
iatp = 117
stp = 118
srp = 119
uti = 120
smp = 121
sm = 122
ptp = 123
isis_over_ipv4 = 124
fire = 125
crtp = 126
crudp = 127
sscopmce = 128
iplt = 129
sps = 130
pipe = 131
sctp = 132
fc = 133
rsvp_e2e_ignore = 134
mobility_header = 135
udplite = 136
mpls_in_ip = 137
manet = 138
hip = 139
shim6 = 140
wesp = 141
rohc = 142
reserved_255 = 255
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.b1 = self._io.read_u1()
self.b2 = self._io.read_u1()
self.total_length = self._io.read_u2be()
self.identification = self._io.read_u2be()
self.b67 = self._io.read_u2be()
self.ttl = self._io.read_u1()
self.protocol = KaitaiStream.resolve_enum(Lightsail2.Ipv4Pkt.ProtocolEnum, self._io.read_u1())
self.header_checksum = self._io.read_u2be()
self.src_ip_addr = self._io.read_u4be()
self.dst_ip_addr = self._io.read_u4be()
self._raw_options = self._io.read_bytes((self.ihl_bytes - 20))
_io__raw_options = KaitaiStream(BytesIO(self._raw_options))
self.options = Lightsail2.Ipv4Options(_io__raw_options, self, self._root)
_on = self.protocol
if _on == Lightsail2.Ipv4Pkt.ProtocolEnum.udp:
self._raw_body = self._io.read_bytes((self.total_length - self.ihl_bytes))
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
self.body = Lightsail2.UdpDtgrm(_io__raw_body, self, self._root)
elif _on == Lightsail2.Ipv4Pkt.ProtocolEnum.icmp:
self._raw_body = self._io.read_bytes((self.total_length - self.ihl_bytes))
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
self.body = Lightsail2.IcmpPkt(_io__raw_body, self, self._root)
elif _on == Lightsail2.Ipv4Pkt.ProtocolEnum.ipv6:
self._raw_body = self._io.read_bytes((self.total_length - self.ihl_bytes))
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
self.body = Lightsail2.Ipv6Pkt(_io__raw_body, self, self._root)
elif _on == Lightsail2.Ipv4Pkt.ProtocolEnum.tcp:
self._raw_body = self._io.read_bytes((self.total_length - self.ihl_bytes))
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
self.body = Lightsail2.TcpSegm(_io__raw_body, self, self._root)
else:
self.body = self._io.read_bytes((self.total_length - self.ihl_bytes))
@property
def version(self):
if hasattr(self, '_m_version'):
return self._m_version if hasattr(self, '_m_version') else None
self._m_version = ((self.b1 & 240) >> 4)
return self._m_version if hasattr(self, '_m_version') else None
@property
def ihl(self):
if hasattr(self, '_m_ihl'):
return self._m_ihl if hasattr(self, '_m_ihl') else None
self._m_ihl = (self.b1 & 15)
return self._m_ihl if hasattr(self, '_m_ihl') else None
@property
def ihl_bytes(self):
if hasattr(self, '_m_ihl_bytes'):
return self._m_ihl_bytes if hasattr(self, '_m_ihl_bytes') else None
self._m_ihl_bytes = (self.ihl * 4)
return self._m_ihl_bytes if hasattr(self, '_m_ihl_bytes') else None
class TcpSegm(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.src_port = self._io.read_u2be()
self.dst_port = self._io.read_u2be()
self.seq_num = self._io.read_u4be()
self.ack_num = self._io.read_u4be()
self.b12 = self._io.read_u1()
self.b13 = self._io.read_u1()
self.window_size = self._io.read_u2be()
self.checksum = self._io.read_u2be()
self.urgent_pointer = self._io.read_u2be()
self.body = self._io.read_bytes_full()
class NoneL3(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.data = self._io.read_bytes_full()
class LsbBeacondataT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.sys = Lightsail2.LsbSysmgrdataT(self._io, self, self._root)
self.comm = Lightsail2.LsbCommdataT(self._io, self, self._root)
self.bat0 = Lightsail2.LsbBatterydataT(self._io, self, self._root)
self.bat1 = Lightsail2.LsbBatterydataT(self._io, self, self._root)
self.bat2 = Lightsail2.LsbBatterydataT(self._io, self, self._root)
self.bat3 = Lightsail2.LsbBatterydataT(self._io, self, self._root)
self.bat4 = Lightsail2.LsbBatterydataT(self._io, self, self._root)
self.bat5 = Lightsail2.LsbBatterydataT(self._io, self, self._root)
self.bat6 = Lightsail2.LsbBatterydataT(self._io, self, self._root)
self.bat7 = Lightsail2.LsbBatterydataT(self._io, self, self._root)
self.batt_pwr_draw = self._io.read_s2be()
self.adcs_mode = self._io.read_u1()
self.flags = self._io.read_u1()
self.q0_act = self._io.read_s2be()
self.q1_act = self._io.read_s2be()
self.q2_act = self._io.read_s2be()
self.q3_act = self._io.read_s2be()
self.x_rate = self._io.read_s2be()
self.y_rate = self._io.read_s2be()
self.z_rate = self._io.read_s2be()
self.gyro_pxy = Lightsail2.Packedsigned2x12T(self._io, self, self._root)
self.gyro_piz = Lightsail2.Packedsigned2x12T(self._io, self, self._root)
self.gyro_ixy = Lightsail2.Packedsigned2x12T(self._io, self, self._root)
self.sol_nxx = self._io.read_u2be()
self.sol_nxy = self._io.read_u2be()
self.sol_nyx = self._io.read_u2be()
self.sol_nyy = self._io.read_u2be()
self.sol_nzx = self._io.read_u2be()
self.sol_nzy = self._io.read_u2be()
self.sol_pxx = self._io.read_u2be()
self.sol_pxy = self._io.read_u2be()
self.sol_pyx = self._io.read_u2be()
self.sol_pyy = self._io.read_u2be()
self.mag_nxxy = Lightsail2.Packedsigned2x12T(self._io, self, self._root)
self.mag_npxz = Lightsail2.Packedsigned2x12T(self._io, self, self._root)
self.mag_pxxy = Lightsail2.Packedsigned2x12T(self._io, self, self._root)
self.mag_npyz = Lightsail2.Packedsigned2x12T(self._io, self, self._root)
self.mag_pyxy = Lightsail2.Packedsigned2x12T(self._io, self, self._root)
self.wheel_rpm = self._io.read_s2be()
self.cam0 = Lightsail2.CamerainfoT(self._io, self, self._root)
self.cam1 = Lightsail2.CamerainfoT(self._io, self, self._root)
self.torqx_pwrcurr = self._io.read_u1()
self.torqx_pwrbusv = self._io.read_s1()
self.torqy_pwrcurr = self._io.read_u1()
self.torqy_pwrbusv = self._io.read_s1()
self.torqz_pwrcurr = self._io.read_u1()
self.torqz_pwrbusv = self._io.read_s1()
self.motor_pwrcurr = self._io.read_u1()
self.motor_pwrbusv = self._io.read_u1()
self.pic_panel_flags = self._io.read_u1()
self.motor_cnt_high = self._io.read_s1()
self.motor_cnt_low = self._io.read_u2be()
self.motor_limit_high = self._io.read_s1()
self.motor_limit_low = self._io.read_u2be()
@property
def motor_cnt(self):
if hasattr(self, '_m_motor_cnt'):
return self._m_motor_cnt if hasattr(self, '_m_motor_cnt') else None
self._m_motor_cnt = ((self.motor_cnt_high << 16) | self.motor_cnt_low)
return self._m_motor_cnt if hasattr(self, '_m_motor_cnt') else None
@property
def motor_limit(self):
if hasattr(self, '_m_motor_limit'):
return self._m_motor_limit if hasattr(self, '_m_motor_limit') else None
self._m_motor_limit = ((self.motor_limit_high << 16) | self.motor_limit_low)
return self._m_motor_limit if hasattr(self, '_m_motor_limit') else None
class UdpDtgrm(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.src_port = self._io.read_u2be()
self.dst_port = self._io.read_u2be()
self.length = self._io.read_u2be()
self.checksum = self._io.read_u2be()
self._raw_body = self._io.read_bytes_full()
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
self.body = Lightsail2.UdpPayload(_io__raw_body, self, self._root)
class Packedunsigned2x12T(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.a_high = self._io.read_u1()
self.b_high = self._io.read_u1()
self.ab_low = self._io.read_u1()
@property
def val_a(self):
if hasattr(self, '_m_val_a'):
return self._m_val_a if hasattr(self, '_m_val_a') else None
self._m_val_a = ((self.a_high << 4) | (self.ab_low & 15))
return self._m_val_a if hasattr(self, '_m_val_a') else None
@property
def val_b(self):
if hasattr(self, '_m_val_b'):
return self._m_val_b if hasattr(self, '_m_val_b') else None
self._m_val_b = ((self.b_high << 4) | (self.ab_low & (240 >> 4)))
return self._m_val_b if hasattr(self, '_m_val_b') else None
class DestinationUnreachableMsg(KaitaiStruct):
class DestinationUnreachableCode(Enum):
net_unreachable = 0
host_unreachable = 1
protocol_unreachable = 2
port_unreachable = 3
fragmentation_needed_and_df_set = 4
source_route_failed = 5
dst_net_unkown = 6
sdt_host_unkown = 7
src_isolated = 8
net_prohibited_by_admin = 9
host_prohibited_by_admin = 10
net_unreachable_for_tos = 11
host_unreachable_for_tos = 12
communication_prohibited_by_admin = 13
host_precedence_violation = 14
precedence_cuttoff_in_effect = 15
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.code = KaitaiStream.resolve_enum(Lightsail2.DestinationUnreachableMsg.DestinationUnreachableCode, self._io.read_u1())
self.checksum = self._io.read_u2be()
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Lightsail2.Callsign(_io__raw_callsign_ror, self, self._root)
class LsbCommdataT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rxcount = self._io.read_u2be()
self.txcount = self._io.read_u2be()
self.rxbytes = self._io.read_u4be()
self.txbytes = self._io.read_u4be()
class OptionHopByHop(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.next_header_type = self._io.read_u1()
self.hdr_ext_len = self._io.read_u1()
self.body = self._io.read_bytes((self.hdr_ext_len - 1))
_on = self.next_header_type
if _on == 0:
self.next_header = Lightsail2.OptionHopByHop(self._io, self, self._root)
elif _on == 6:
self.next_header = Lightsail2.TcpSegm(self._io, self, self._root)
elif _on == 59:
self.next_header = Lightsail2.NoNextHeader(self._io, self, self._root)
class EchoMsg(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.code = self._io.read_bytes(1)
if not self.code == b"\x00":
raise kaitaistruct.ValidationNotEqualError(b"\x00", self.code, self._io, u"/types/echo_msg/seq/0")
self.checksum = self._io.read_u2be()
self.identifier = self._io.read_u2be()
self.seq_num = self._io.read_u2be()
self.data = self._io.read_bytes_full()
class TimeExceededMsg(KaitaiStruct):
class TimeExceededCode(Enum):
time_to_live_exceeded_in_transit = 0
fragment_reassembly_time_exceeded = 1
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.code = KaitaiStream.resolve_enum(Lightsail2.TimeExceededMsg.TimeExceededCode, self._io.read_u1())
self.checksum = self._io.read_u2be()
class IcmpPkt(KaitaiStruct):
class IcmpTypeEnum(Enum):
echo_reply = 0
destination_unreachable = 3
source_quench = 4
redirect = 5
echo = 8
time_exceeded = 11
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.icmp_type = KaitaiStream.resolve_enum(Lightsail2.IcmpPkt.IcmpTypeEnum, self._io.read_u1())
if self.icmp_type == Lightsail2.IcmpPkt.IcmpTypeEnum.destination_unreachable:
self.destination_unreachable = Lightsail2.DestinationUnreachableMsg(self._io, self, self._root)
if self.icmp_type == Lightsail2.IcmpPkt.IcmpTypeEnum.time_exceeded:
self.time_exceeded = Lightsail2.TimeExceededMsg(self._io, self, self._root)
if ((self.icmp_type == Lightsail2.IcmpPkt.IcmpTypeEnum.echo) or (self.icmp_type == Lightsail2.IcmpPkt.IcmpTypeEnum.echo_reply)) :
self.echo = Lightsail2.EchoMsg(self._io, self, self._root)
class Ipv4Option(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.b1 = self._io.read_u1()
self.len = self._io.read_u1()
self.body = self._io.read_bytes(((self.len - 2) if self.len > 2 else 0))
@property
def copy(self):
if hasattr(self, '_m_copy'):
return self._m_copy if hasattr(self, '_m_copy') else None
self._m_copy = ((self.b1 & 128) >> 7)
return self._m_copy if hasattr(self, '_m_copy') else None
@property
def opt_class(self):
if hasattr(self, '_m_opt_class'):
return self._m_opt_class if hasattr(self, '_m_opt_class') else None
self._m_opt_class = ((self.b1 & 96) >> 5)
return self._m_opt_class if hasattr(self, '_m_opt_class') else None
@property
def number(self):
if hasattr(self, '_m_number'):
return self._m_number if hasattr(self, '_m_number') else None
self._m_number = (self.b1 & 31)
return self._m_number if hasattr(self, '_m_number') else None
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/lightsail2.py
| 0.475605 | 0.151247 |
lightsail2.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Connectat11(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.ax25_header.pid
:field beacon_id: ax25_frame.beacon.beacon_header.beacon_id
:field rx_packet_cnt: ax25_frame.beacon.beacon_data.rx_packet_cnt
:field tmtc_temperature1: ax25_frame.beacon.beacon_data.tmtc_temperature1
:field tmtc_temperature2: ax25_frame.beacon.beacon_data.tmtc_temperature2
:field mppt_converter_voltage_1: ax25_frame.beacon.beacon_data.mppt_converter_voltage_1
:field mppt_converter_voltage_2: ax25_frame.beacon.beacon_data.mppt_converter_voltage_2
:field mppt_converter_voltage_3: ax25_frame.beacon.beacon_data.mppt_converter_voltage_3
:field mppt_converter_voltage_4: ax25_frame.beacon.beacon_data.mppt_converter_voltage_4
:field panel1_current: ax25_frame.beacon.beacon_data.panel1_current
:field panel3_current: ax25_frame.beacon.beacon_data.panel3_current
:field panel2_current: ax25_frame.beacon.beacon_data.panel2_current
:field panel5_current: ax25_frame.beacon.beacon_data.panel5_current
:field panel6_current: ax25_frame.beacon.beacon_data.panel6_current
:field panel4_current: ax25_frame.beacon.beacon_data.panel4_current
:field vbat: ax25_frame.beacon.beacon_data.vbat
:field eps_temperature1: ax25_frame.beacon.beacon_header.eps_temperature1
:field eps_temperature2: ax25_frame.beacon.beacon_header.eps_temperature2
:field eps_temperature3: ax25_frame.beacon.beacon_header.eps_temperature3
:field eps_temperature4: ax25_frame.beacon.beacon_header.eps_temperature4
:field obc_unix_time: ax25_frame.beacon.beacon_header.obc_unix_time
:field obc_boot_time: ax25_frame.beacon.beacon_header.obc_boot_time
:field obc_boot_count: ax25_frame.beacon.beacon_header.obc_boot_count
:field panel1_temperature: ax25_frame.beacon.beacon_header.panel1_temperature
:field panel2_temperature: ax25_frame.beacon.beacon_header.panel2_temperature
:field panel3_temperature: ax25_frame.beacon.beacon_header.panel3_temperature
:field panel4_temperature: ax25_frame.beacon.beacon_header.panel4_temperature
:field panel5_temperature: ax25_frame.beacon.beacon_header.panel5_temperature
:field panel6_temperature: ax25_frame.beacon.beacon_header.panel6_temperature
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Connectat11.Ax25Frame(self._io, self, self._root)
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Connectat11.Ax25Header(self._io, self, self._root)
self.beacon = Connectat11.BeaconT(self._io, self, self._root)
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Connectat11.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Connectat11.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Connectat11.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Connectat11.SsidMask(self._io, self, self._root)
self.ctl = self._io.read_u1()
self.pid = self._io.read_u1()
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"utf-8")
if not ((self.callsign == u"CONT11") or (self.callsign == u"PLANS1")) :
raise kaitaistruct.ValidationNotAnyOfError(self.callsign, self._io, u"/types/callsign/seq/0")
class BeaconHeaderT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.beacon_preamble = self._io.read_u4le()
self.beacon_preamble1 = self._io.read_u1()
self.beacon_id = self._io.read_u1()
self.beacon_padding = self._io.read_u8le()
self.beacon_padding1 = self._io.read_u2le()
class BeaconT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.beacon_header = Connectat11.BeaconHeaderT(self._io, self, self._root)
if self.beacon_header.beacon_id == 3:
self.beacon_data = Connectat11.BeaconDataT(self._io, self, self._root)
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class BeaconDataT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rx_packet_cnt = self._io.read_u2le()
self.data0001 = self._io.read_u2le()
self.tmtc_temperature1 = self._io.read_s1()
self.tmtc_temperature2 = self._io.read_s1()
self.data0002 = self._io.read_u8le()
self.data0003 = self._io.read_u4le()
self.data0004 = self._io.read_u4le()
self.data0005 = self._io.read_u4le()
self.data0006 = self._io.read_u4le()
self.data0007 = self._io.read_u4le()
self.mppt_converter_voltage_1 = self._io.read_u2le()
self.mppt_converter_voltage_2 = self._io.read_u2le()
self.mppt_converter_voltage_3 = self._io.read_u2le()
self.mppt_converter_voltage_4 = self._io.read_u2le()
self.panel1_current = self._io.read_u2le()
self.panel3_current = self._io.read_u2le()
self.panel2_current = self._io.read_u2le()
self.panel5_current = self._io.read_u2le()
self.panel6_current = self._io.read_u2le()
self.panel4_current = self._io.read_u2le()
self.vbat = self._io.read_u2le()
self.data0008 = self._io.read_u8le()
self.data0008a = self._io.read_u8le()
self.data0008b = self._io.read_u8le()
self.data0008c = self._io.read_u8le()
self.data0008d = self._io.read_u8le()
self.eps_temperature1 = self._io.read_s1()
self.eps_temperature2 = self._io.read_s1()
self.eps_temperature3 = self._io.read_s1()
self.eps_temperature4 = self._io.read_s1()
self.data0009 = self._io.read_u1()
self.obc_unix_time = self._io.read_u4le()
self.obc_boot_time = self._io.read_u4le()
self.obc_boot_count = self._io.read_u4le()
self.data0010 = self._io.read_u8le()
self.data0010a = self._io.read_u8le()
self.data0010b = self._io.read_u1()
self.panel1_temperature = self._io.read_s1()
self.panel2_temperature = self._io.read_s1()
self.panel3_temperature = self._io.read_s1()
self.panel4_temperature = self._io.read_s1()
self.panel5_temperature = self._io.read_s1()
self.panel6_temperature = self._io.read_s1()
self.data_0011 = self._io.read_u8le()
self.data_0011a = self._io.read_u8le()
self.data_0011b = self._io.read_u8le()
self.data_0011c = self._io.read_u8le()
self.data_0011d = self._io.read_u8le()
self.data_0011e = self._io.read_u8le()
self.data_0011f = self._io.read_u8le()
self.data_0011g = self._io.read_u8le()
self.data_0011h = self._io.read_u8le()
self.data_0011i = self._io.read_u8le()
self.data_0011j = self._io.read_u8le()
self.data_0011k = self._io.read_u4le()
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Connectat11.Callsign(_io__raw_callsign_ror, self, self._root)
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/connectat11.py
| 0.517815 | 0.281821 |
connectat11.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
from enum import Enum
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Armadillo(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.payload.pid
:field time_since_epoch: ax25_frame.payload.data_payload.time_since_epoch
:field uptime: ax25_frame.payload.data_payload.uptime
:field avail_nvmem: ax25_frame.payload.data_payload.avail_nvmem
:field pos_x: ax25_frame.payload.data_payload.pos_x
:field pos_y: ax25_frame.payload.data_payload.pos_y
:field pos_z: ax25_frame.payload.data_payload.pos_z
:field vel_x: ax25_frame.payload.data_payload.vel_x
:field vel_y: ax25_frame.payload.data_payload.vel_y
:field vel_z: ax25_frame.payload.data_payload.vel_z
:field pwr_states_reserved: ax25_frame.payload.data_payload.pwr_states_reserved
:field gps_power: ax25_frame.payload.data_payload.gps_power
:field adc_power: ax25_frame.payload.data_payload.adc_power
:field antenna_power: ax25_frame.payload.data_payload.antenna_power
:field pdd_power: ax25_frame.payload.data_payload.pdd_power
:field spacecraft_mode: ax25_frame.payload.data_payload.spacecraft_mode
:field vbatt: ax25_frame.payload.data_payload.vbatt
:field input_current: ax25_frame.payload.data_payload.input_current
:field output_current: ax25_frame.payload.data_payload.output_current
:field boot_count: ax25_frame.payload.data_payload.boot_count
:field boot_cause: ax25_frame.payload.data_payload.boot_cause
:field eps_temp_1: ax25_frame.payload.data_payload.eps_temp_1
:field eps_temp_2: ax25_frame.payload.data_payload.eps_temp_2
:field eps_temp_3: ax25_frame.payload.data_payload.eps_temp_3
:field eps_temp_4: ax25_frame.payload.data_payload.eps_temp_4
:field eps_bp4a: ax25_frame.payload.data_payload.eps_bp4a
:field eps_bp4b: ax25_frame.payload.data_payload.eps_bp4b
:field eps_output_1_current: ax25_frame.payload.data_payload.eps_output_1_current
:field eps_output_2_current: ax25_frame.payload.data_payload.eps_output_2_current
:field eps_output_3_current: ax25_frame.payload.data_payload.eps_output_3_current
:field eps_output_4_current: ax25_frame.payload.data_payload.eps_output_4_current
:field eps_output_5_current: ax25_frame.payload.data_payload.eps_output_5_current
:field eps_output_6_current: ax25_frame.payload.data_payload.eps_output_6_current
:field rxwl_temp_x: ax25_frame.payload.data_payload.rxwl_temp_x
:field rxwl_temp_y: ax25_frame.payload.data_payload.rxwl_temp_y
:field rxwl_temp_z: ax25_frame.payload.data_payload.rxwl_temp_z
:field gyro_temp_x: ax25_frame.payload.data_payload.gyro_temp_x
:field gyro_temp_y: ax25_frame.payload.data_payload.gyro_temp_y
:field gyro_temp_z: ax25_frame.payload.data_payload.gyro_temp_z
:field desired_quaternion_a: ax25_frame.payload.data_payload.desired_quaternion_a
:field desired_quaternion_b: ax25_frame.payload.data_payload.desired_quaternion_b
:field desired_quaternion_c: ax25_frame.payload.data_payload.desired_quaternion_c
:field desired_quaternion_d: ax25_frame.payload.data_payload.desired_quaternion_d
:field estimated_quaternion_a: ax25_frame.payload.data_payload.estimated_quaternion_a
:field estimated_quaternion_b: ax25_frame.payload.data_payload.estimated_quaternion_b
:field estimated_quaternion_c: ax25_frame.payload.data_payload.estimated_quaternion_c
:field estimated_quaternion_d: ax25_frame.payload.data_payload.estimated_quaternion_d
:field rotation_rate_x: ax25_frame.payload.data_payload.rotation_rate_x
:field rotation_rate_y: ax25_frame.payload.data_payload.rotation_rate_y
:field rotation_rate_z: ax25_frame.payload.data_payload.rotation_rate_z
:field sun_sensor_address: ax25_frame.payload.data_payload.sun_sensor_address
:field message: ax25_frame.payload.data_payload.message
"""
class BootCauses(Enum):
unknown_reset = 0
dedicated_wdt_reset = 1
i2c_wdt_reset = 2
hard_reset = 3
soft_reset = 4
stack_overflow = 5
timer_overflow = 6
brownout_or_power_on_reset = 7
internal_wdt_reset = 8
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Armadillo.Ax25Frame(self._io, self, self._root)
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Armadillo.Ax25Header(self._io, self, self._root)
_on = (self.ax25_header.ctl & 19)
if _on == 0:
self.payload = Armadillo.IFrame(self._io, self, self._root)
elif _on == 3:
self.payload = Armadillo.UiFrame(self._io, self, self._root)
elif _on == 19:
self.payload = Armadillo.UiFrame(self._io, self, self._root)
elif _on == 16:
self.payload = Armadillo.IFrame(self._io, self, self._root)
elif _on == 18:
self.payload = Armadillo.IFrame(self._io, self, self._root)
elif _on == 2:
self.payload = Armadillo.IFrame(self._io, self, self._root)
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Armadillo.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Armadillo.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Armadillo.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Armadillo.SsidMask(self._io, self, self._root)
self.ctl = self._io.read_u1()
class UiFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
_on = self._parent.ax25_header.src_callsign_raw.callsign_ror.callsign
if _on == u"KE5DTW":
self.data_payload = Armadillo.ArmadilloPayload(self._io, self, self._root)
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"ASCII")
class IFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self.ax25_info = self._io.read_bytes_full()
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class ArmadilloPayload(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pb_magic = self._io.read_bytes(5)
self.time_since_epoch = self._io.read_u4le()
self.uptime = self._io.read_u4le()
self.avail_nvmem = self._io.read_u4le()
self.pos_x = self._io.read_f4le()
self.pos_y = self._io.read_f4le()
self.pos_z = self._io.read_f4le()
self.vel_x = self._io.read_f4le()
self.vel_y = self._io.read_f4le()
self.vel_z = self._io.read_f4le()
self.pwr_states_reserved = self._io.read_bits_int_be(3)
self.gps_power = self._io.read_bits_int_be(1) != 0
self.adc_power = self._io.read_bits_int_be(1) != 0
self.antenna_power = self._io.read_bits_int_be(1) != 0
self.pdd_power = self._io.read_bits_int_be(1) != 0
self.spacecraft_mode = self._io.read_bits_int_be(1) != 0
self._io.align_to_byte()
self.vbatt = self._io.read_u2le()
self.input_current = self._io.read_u2le()
self.output_current = self._io.read_u2le()
self.boot_count = self._io.read_u4le()
self.boot_cause = self._io.read_u1()
self.eps_temp_1 = self._io.read_s2le()
self.eps_temp_2 = self._io.read_s2le()
self.eps_temp_3 = self._io.read_s2le()
self.eps_temp_4 = self._io.read_s2le()
self.eps_bp4a = self._io.read_s2le()
self.eps_bp4b = self._io.read_s2le()
self.eps_output_1_current = self._io.read_u2le()
self.eps_output_2_current = self._io.read_u2le()
self.eps_output_3_current = self._io.read_u2le()
self.eps_output_4_current = self._io.read_u2le()
self.eps_output_5_current = self._io.read_u2le()
self.eps_output_6_current = self._io.read_u2le()
self.rxwl_temp_x = self._io.read_f4le()
self.rxwl_temp_y = self._io.read_f4le()
self.rxwl_temp_z = self._io.read_f4le()
self.gyro_temp_x = self._io.read_f4le()
self.gyro_temp_y = self._io.read_f4le()
self.gyro_temp_z = self._io.read_f4le()
self.desired_quaternion_a = self._io.read_f4le()
self.desired_quaternion_b = self._io.read_f4le()
self.desired_quaternion_c = self._io.read_f4le()
self.desired_quaternion_d = self._io.read_f4le()
self.estimated_quaternion_a = self._io.read_f4le()
self.estimated_quaternion_b = self._io.read_f4le()
self.estimated_quaternion_c = self._io.read_f4le()
self.estimated_quaternion_d = self._io.read_f4le()
self.rotation_rate_x = self._io.read_f4le()
self.rotation_rate_y = self._io.read_f4le()
self.rotation_rate_z = self._io.read_f4le()
self.sun_sensor_address = self._io.read_u1()
self.message = (KaitaiStream.bytes_terminate(self._io.read_bytes(110), 0, False)).decode(u"ASCII")
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Armadillo.Callsign(_io__raw_callsign_ror, self, self._root)
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/armadillo.py
| 0.629433 | 0.164516 |
armadillo.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Cubebel1(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.payload.pid
:field hdr_rf_id: ax25_frame.payload.ax25_info.header.rf_id
:field hdr_opr_time: ax25_frame.payload.ax25_info.header.opr_time
:field hdr_reboot_cnt: ax25_frame.payload.ax25_info.header.reboot_cnt
:field hdr_mcusr: ax25_frame.payload.ax25_info.header.mcusr
:field hdr_pamp_temp: ax25_frame.payload.ax25_info.header.pamp_temp
:field hdr_pamp_voltage: ax25_frame.payload.ax25_info.header.pamp_voltage
:field hdr_tx_attenuator: ax25_frame.payload.ax25_info.header.tx_attenuator
:field hdr_battery_voltage: ax25_frame.payload.ax25_info.header.battery_voltage
:field hdr_system_voltage: ax25_frame.payload.ax25_info.header.system_voltage
:field hdr_seq_number: ax25_frame.payload.ax25_info.header.seq_number
:field hdr_pwr_save_state: ax25_frame.payload.ax25_info.header.pwr_save_state
:field hdr_modem_on_period: ax25_frame.payload.ax25_info.header.modem_on_period
:field hdr_obc_can_status: ax25_frame.payload.ax25_info.header.obc_can_status
:field hdr_eps_can_status: ax25_frame.payload.ax25_info.header.eps_can_status
:field hdr_info_size: ax25_frame.payload.ax25_info.header.info_size
:field hdr_data_type: ax25_frame.payload.ax25_info.header.data_type
:field fec_crc_status: ax25_frame.payload.ax25_info.data.fec_crc_status
:field rx_msg_state: ax25_frame.payload.ax25_info.data.rx_msg_state
:field rssi: ax25_frame.payload.ax25_info.data.rssi
:field rf_msg: ax25_frame.payload.ax25_info.data.rf_msg
:field current_to_gamma: ax25_frame.payload.ax25_info.data.current_to_gamma
:field current_to_irsensor: ax25_frame.payload.ax25_info.data.current_to_irsensor
:field current_to_extflash: ax25_frame.payload.ax25_info.data.current_to_extflash
:field current_to_solarsens: ax25_frame.payload.ax25_info.data.current_to_solarsens
:field current_to_magnetcoils: ax25_frame.payload.ax25_info.data.current_to_magnetcoils
:field current_to_coil_x: ax25_frame.payload.ax25_info.data.current_to_coil_x
:field current_to_coil_y: ax25_frame.payload.ax25_info.data.current_to_coil_y
:field current_to_coil_pz: ax25_frame.payload.ax25_info.data.current_to_coil_pz
:field current_to_coil_nz: ax25_frame.payload.ax25_info.data.current_to_coil_nz
:field battery1_temp: ax25_frame.payload.ax25_info.data.battery1_temp
:field battery2_temp: ax25_frame.payload.ax25_info.data.battery2_temp
:field numb_oc_obc: ax25_frame.payload.ax25_info.data.numb_oc_obc
:field numb_oc_out_gamma: ax25_frame.payload.ax25_info.data.numb_oc_out_gamma
:field numb_oc_out_rf1: ax25_frame.payload.ax25_info.data.numb_oc_out_rf1
:field numb_oc_out_rf2: ax25_frame.payload.ax25_info.data.numb_oc_out_rf2
:field numb_oc_out_flash: ax25_frame.payload.ax25_info.data.numb_oc_out_flash
:field numb_oc_out_irsens: ax25_frame.payload.ax25_info.data.numb_oc_out_irsens
:field numb_oc_coil_x: ax25_frame.payload.ax25_info.data.numb_oc_coil_x
:field numb_oc_coil_y: ax25_frame.payload.ax25_info.data.numb_oc_coil_y
:field numb_oc_coil_pz: ax25_frame.payload.ax25_info.data.numb_oc_coil_pz
:field numb_oc_coil_nz: ax25_frame.payload.ax25_info.data.numb_oc_coil_nz
:field numb_oc_magnetcoils: ax25_frame.payload.ax25_info.data.numb_oc_magnetcoils
:field numb_oc_solarsens: ax25_frame.payload.ax25_info.data.numb_oc_solarsens
:field reset_num: ax25_frame.payload.ax25_info.data.reset_num
:field reset_reason: ax25_frame.payload.ax25_info.data.reset_reason
:field pwr_sat: ax25_frame.payload.ax25_info.data.pwr_sat
:field pwr_rf1: ax25_frame.payload.ax25_info.data.pwr_rf1
:field pwr_rf2: ax25_frame.payload.ax25_info.data.pwr_rf2
:field pwr_sunsensor: ax25_frame.payload.ax25_info.data.pwr_sunsensor
:field pwr_gamma: ax25_frame.payload.ax25_info.data.pwr_gamma
:field pwr_irsensor: ax25_frame.payload.ax25_info.data.pwr_irsensor
:field pwr_flash: ax25_frame.payload.ax25_info.data.pwr_flash
:field pwr_magnet_x: ax25_frame.payload.ax25_info.data.pwr_magnet_x
:field pwr_magnet_y: ax25_frame.payload.ax25_info.data.pwr_magnet_y
:field pwr_magnet_z: ax25_frame.payload.ax25_info.data.pwr_magnet_z
:field sys_time: ax25_frame.payload.ax25_info.data.sys_time
:field adc_correctness: ax25_frame.payload.ax25_info.data.adc_correctness
:field t_adc1: ax25_frame.payload.ax25_info.data.t_adc1
:field t_adc2: ax25_frame.payload.ax25_info.data.t_adc2
:field stepup_current: ax25_frame.payload.ax25_info.data.stepup_current
:field stepup_voltage: ax25_frame.payload.ax25_info.data.stepup_voltage
:field afterbq_current: ax25_frame.payload.ax25_info.data.afterbq_current
:field battery_voltage: ax25_frame.payload.ax25_info.data.battery_voltage
:field sys_voltage_50: ax25_frame.payload.ax25_info.data.sys_voltage_50
:field sys_voltage_33: ax25_frame.payload.ax25_info.data.sys_voltage_33
:field eps_uc_current: ax25_frame.payload.ax25_info.data.eps_uc_current
:field obc_uc_current: ax25_frame.payload.ax25_info.data.obc_uc_current
:field rf1_uc_current: ax25_frame.payload.ax25_info.data.rf1_uc_current
:field rf2_uc_current: ax25_frame.payload.ax25_info.data.rf2_uc_current
:field solar_voltage: ax25_frame.payload.ax25_info.data.solar_voltage
:field side_x_current: ax25_frame.payload.ax25_info.data.side_x_current
:field side_py_current: ax25_frame.payload.ax25_info.data.side_py_current
:field side_ny_current: ax25_frame.payload.ax25_info.data.side_ny_current
:field side_pz_current: ax25_frame.payload.ax25_info.data.side_pz_current
:field side_nz_current: ax25_frame.payload.ax25_info.data.side_nz_current
.. seealso::
Source - https://bsusat.com/media/docs/2018/bsusat-1_data_struct.xlsx
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Cubebel1.Ax25Frame(self._io, self, self._root)
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Cubebel1.Ax25Header(self._io, self, self._root)
_on = (self.ax25_header.ctl & 19)
if _on == 0:
self.payload = Cubebel1.IFrame(self._io, self, self._root)
elif _on == 3:
self.payload = Cubebel1.UiFrame(self._io, self, self._root)
elif _on == 19:
self.payload = Cubebel1.UiFrame(self._io, self, self._root)
elif _on == 16:
self.payload = Cubebel1.IFrame(self._io, self, self._root)
elif _on == 18:
self.payload = Cubebel1.IFrame(self._io, self, self._root)
elif _on == 2:
self.payload = Cubebel1.IFrame(self._io, self, self._root)
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Cubebel1.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Cubebel1.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Cubebel1.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Cubebel1.SsidMask(self._io, self, self._root)
self.ctl = self._io.read_u1()
class UiFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self._raw_ax25_info = self._io.read_bytes_full()
_io__raw_ax25_info = KaitaiStream(BytesIO(self._raw_ax25_info))
self.ax25_info = Cubebel1.Frame(_io__raw_ax25_info, self, self._root)
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"ASCII")
class RfMessage(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rf_msg = (self._io.read_bytes((self._parent.header.info_size - 2))).decode(u"utf-8")
class IFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self.ax25_info = self._io.read_bytes_full()
class Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.header = Cubebel1.Header(self._io, self, self._root)
if self.header.info_size > 0:
_on = self.header.data_type
if _on == 1:
self.data = Cubebel1.RfResponse(self._io, self, self._root)
elif _on == 3:
self.data = Cubebel1.RfMessage(self._io, self, self._root)
elif _on == 254:
self.data = Cubebel1.EpsFullTel(self._io, self, self._root)
elif _on == 255:
self.data = Cubebel1.EpsShortTel(self._io, self, self._root)
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class EpsShortTel(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.bytes = []
i = 0
while not self._io.is_eof():
self.bytes.append(self._io.read_u1())
i += 1
@property
def eps_uc_current(self):
"""12 bits."""
if hasattr(self, '_m_eps_uc_current'):
return self._m_eps_uc_current if hasattr(self, '_m_eps_uc_current') else None
self._m_eps_uc_current = (((self.bytes[14] | (self.bytes[15] << 8)) >> 2) & 4095)
return self._m_eps_uc_current if hasattr(self, '_m_eps_uc_current') else None
@property
def side_py_current(self):
"""12 bits."""
if hasattr(self, '_m_side_py_current'):
return self._m_side_py_current if hasattr(self, '_m_side_py_current') else None
self._m_side_py_current = ((((self.bytes[22] | (self.bytes[23] << 8)) | (self.bytes[24] << 16)) >> 6) & 4095)
return self._m_side_py_current if hasattr(self, '_m_side_py_current') else None
@property
def t_adc1(self):
"""12 bits."""
if hasattr(self, '_m_t_adc1'):
return self._m_t_adc1 if hasattr(self, '_m_t_adc1') else None
self._m_t_adc1 = (((self.bytes[2] | (self.bytes[3] << 8)) >> 2) & 4095)
return self._m_t_adc1 if hasattr(self, '_m_t_adc1') else None
@property
def afterbq_current(self):
"""12 bits."""
if hasattr(self, '_m_afterbq_current'):
return self._m_afterbq_current if hasattr(self, '_m_afterbq_current') else None
self._m_afterbq_current = (((self.bytes[8] | (self.bytes[9] << 8)) >> 2) & 4095)
return self._m_afterbq_current if hasattr(self, '_m_afterbq_current') else None
@property
def battery_voltage(self):
"""12 bits."""
if hasattr(self, '_m_battery_voltage'):
return self._m_battery_voltage if hasattr(self, '_m_battery_voltage') else None
self._m_battery_voltage = ((((self.bytes[9] | (self.bytes[10] << 8)) | (self.bytes[11] << 16)) >> 6) & 4095)
return self._m_battery_voltage if hasattr(self, '_m_battery_voltage') else None
@property
def sys_voltage_50(self):
"""12 bits."""
if hasattr(self, '_m_sys_voltage_50'):
return self._m_sys_voltage_50 if hasattr(self, '_m_sys_voltage_50') else None
self._m_sys_voltage_50 = (((self.bytes[11] | (self.bytes[12] << 8)) >> 2) & 4095)
return self._m_sys_voltage_50 if hasattr(self, '_m_sys_voltage_50') else None
@property
def stepup_current(self):
"""12 bits."""
if hasattr(self, '_m_stepup_current'):
return self._m_stepup_current if hasattr(self, '_m_stepup_current') else None
self._m_stepup_current = (((self.bytes[5] | (self.bytes[6] << 8)) >> 2) & 4095)
return self._m_stepup_current if hasattr(self, '_m_stepup_current') else None
@property
def side_pz_current(self):
"""12 bits."""
if hasattr(self, '_m_side_pz_current'):
return self._m_side_pz_current if hasattr(self, '_m_side_pz_current') else None
self._m_side_pz_current = ((((self.bytes[25] | (self.bytes[26] << 8)) | (self.bytes[27] << 16)) >> 6) & 4095)
return self._m_side_pz_current if hasattr(self, '_m_side_pz_current') else None
@property
def sys_voltage_33(self):
"""12 bits."""
if hasattr(self, '_m_sys_voltage_33'):
return self._m_sys_voltage_33 if hasattr(self, '_m_sys_voltage_33') else None
self._m_sys_voltage_33 = ((((self.bytes[12] | (self.bytes[13] << 8)) | (self.bytes[14] << 16)) >> 6) & 4095)
return self._m_sys_voltage_33 if hasattr(self, '_m_sys_voltage_33') else None
@property
def rf2_uc_current(self):
"""12 bits."""
if hasattr(self, '_m_rf2_uc_current'):
return self._m_rf2_uc_current if hasattr(self, '_m_rf2_uc_current') else None
self._m_rf2_uc_current = (((self.bytes[18] | (self.bytes[19] << 8)) >> 2) & 4095)
return self._m_rf2_uc_current if hasattr(self, '_m_rf2_uc_current') else None
@property
def solar_voltage(self):
"""12 bits."""
if hasattr(self, '_m_solar_voltage'):
return self._m_solar_voltage if hasattr(self, '_m_solar_voltage') else None
self._m_solar_voltage = ((((self.bytes[19] | (self.bytes[20] << 8)) | (self.bytes[21] << 16)) >> 6) & 4095)
return self._m_solar_voltage if hasattr(self, '_m_solar_voltage') else None
@property
def side_x_current(self):
"""12 bits."""
if hasattr(self, '_m_side_x_current'):
return self._m_side_x_current if hasattr(self, '_m_side_x_current') else None
self._m_side_x_current = (((self.bytes[21] | (self.bytes[22] << 8)) >> 2) & 4095)
return self._m_side_x_current if hasattr(self, '_m_side_x_current') else None
@property
def obc_uc_current(self):
"""10 bits."""
if hasattr(self, '_m_obc_uc_current'):
return self._m_obc_uc_current if hasattr(self, '_m_obc_uc_current') else None
self._m_obc_uc_current = ((self.bytes[15] | (self.bytes[16] << 8)) >> 6)
return self._m_obc_uc_current if hasattr(self, '_m_obc_uc_current') else None
@property
def side_nz_current(self):
"""12 bits."""
if hasattr(self, '_m_side_nz_current'):
return self._m_side_nz_current if hasattr(self, '_m_side_nz_current') else None
self._m_side_nz_current = (((self.bytes[27] | (self.bytes[28] << 8)) >> 2) & 4095)
return self._m_side_nz_current if hasattr(self, '_m_side_nz_current') else None
@property
def adc_correctness(self):
"""2 bits."""
if hasattr(self, '_m_adc_correctness'):
return self._m_adc_correctness if hasattr(self, '_m_adc_correctness') else None
self._m_adc_correctness = (self.bytes[2] & 3)
return self._m_adc_correctness if hasattr(self, '_m_adc_correctness') else None
@property
def sys_time(self):
"""16 bits."""
if hasattr(self, '_m_sys_time'):
return self._m_sys_time if hasattr(self, '_m_sys_time') else None
self._m_sys_time = (self.bytes[0] | (self.bytes[1] << 8))
return self._m_sys_time if hasattr(self, '_m_sys_time') else None
@property
def stepup_voltage(self):
"""12 bits."""
if hasattr(self, '_m_stepup_voltage'):
return self._m_stepup_voltage if hasattr(self, '_m_stepup_voltage') else None
self._m_stepup_voltage = ((((self.bytes[6] | (self.bytes[7] << 8)) | (self.bytes[8] << 16)) >> 6) & 4095)
return self._m_stepup_voltage if hasattr(self, '_m_stepup_voltage') else None
@property
def side_ny_current(self):
"""12 bits."""
if hasattr(self, '_m_side_ny_current'):
return self._m_side_ny_current if hasattr(self, '_m_side_ny_current') else None
self._m_side_ny_current = (((self.bytes[24] | (self.bytes[25] << 8)) >> 2) & 4095)
return self._m_side_ny_current if hasattr(self, '_m_side_ny_current') else None
@property
def t_adc2(self):
"""12 bits."""
if hasattr(self, '_m_t_adc2'):
return self._m_t_adc2 if hasattr(self, '_m_t_adc2') else None
self._m_t_adc2 = ((((self.bytes[3] | (self.bytes[4] << 8)) | (self.bytes[5] << 16)) >> 6) & 4095)
return self._m_t_adc2 if hasattr(self, '_m_t_adc2') else None
@property
def rf1_uc_current(self):
"""10 bits."""
if hasattr(self, '_m_rf1_uc_current'):
return self._m_rf1_uc_current if hasattr(self, '_m_rf1_uc_current') else None
self._m_rf1_uc_current = ((self.bytes[17] | (self.bytes[18] << 8)) & 1023)
return self._m_rf1_uc_current if hasattr(self, '_m_rf1_uc_current') else None
class RfResponse(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.status_bits = self._io.read_u1()
self.rssi = self._io.read_u1()
@property
def fec_crc_status(self):
if hasattr(self, '_m_fec_crc_status'):
return self._m_fec_crc_status if hasattr(self, '_m_fec_crc_status') else None
self._m_fec_crc_status = (self.status_bits & 1)
return self._m_fec_crc_status if hasattr(self, '_m_fec_crc_status') else None
@property
def rx_msg_state(self):
if hasattr(self, '_m_rx_msg_state'):
return self._m_rx_msg_state if hasattr(self, '_m_rx_msg_state') else None
self._m_rx_msg_state = (self.status_bits >> 1)
return self._m_rx_msg_state if hasattr(self, '_m_rx_msg_state') else None
class EpsFullTel(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.bytes = []
i = 0
while not self._io.is_eof():
self.bytes.append(self._io.read_u1())
i += 1
@property
def eps_uc_current(self):
"""12 bits."""
if hasattr(self, '_m_eps_uc_current'):
return self._m_eps_uc_current if hasattr(self, '_m_eps_uc_current') else None
self._m_eps_uc_current = (((self.bytes[14] | (self.bytes[15] << 8)) >> 2) & 4095)
return self._m_eps_uc_current if hasattr(self, '_m_eps_uc_current') else None
@property
def pwr_sat(self):
"""1 bits."""
if hasattr(self, '_m_pwr_sat'):
return self._m_pwr_sat if hasattr(self, '_m_pwr_sat') else None
self._m_pwr_sat = (self.bytes[61] & 1)
return self._m_pwr_sat if hasattr(self, '_m_pwr_sat') else None
@property
def pwr_irsensor(self):
"""1 bits."""
if hasattr(self, '_m_pwr_irsensor'):
return self._m_pwr_irsensor if hasattr(self, '_m_pwr_irsensor') else None
self._m_pwr_irsensor = ((self.bytes[61] >> 5) & 1)
return self._m_pwr_irsensor if hasattr(self, '_m_pwr_irsensor') else None
@property
def numb_oc_coil_y(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_coil_y'):
return self._m_numb_oc_coil_y if hasattr(self, '_m_numb_oc_coil_y') else None
self._m_numb_oc_coil_y = self.bytes[53]
return self._m_numb_oc_coil_y if hasattr(self, '_m_numb_oc_coil_y') else None
@property
def side_py_current(self):
"""12 bits."""
if hasattr(self, '_m_side_py_current'):
return self._m_side_py_current if hasattr(self, '_m_side_py_current') else None
self._m_side_py_current = ((((self.bytes[22] | (self.bytes[23] << 8)) | (self.bytes[24] << 16)) >> 6) & 4095)
return self._m_side_py_current if hasattr(self, '_m_side_py_current') else None
@property
def current_to_solarsens(self):
"""12 bits."""
if hasattr(self, '_m_current_to_solarsens'):
return self._m_current_to_solarsens if hasattr(self, '_m_current_to_solarsens') else None
self._m_current_to_solarsens = (((self.bytes[33] | (self.bytes[34] << 8)) >> 2) & 4095)
return self._m_current_to_solarsens if hasattr(self, '_m_current_to_solarsens') else None
@property
def current_to_extflash(self):
"""12 bits."""
if hasattr(self, '_m_current_to_extflash'):
return self._m_current_to_extflash if hasattr(self, '_m_current_to_extflash') else None
self._m_current_to_extflash = ((((self.bytes[31] | (self.bytes[32] << 8)) | (self.bytes[33] << 16)) >> 6) & 4095)
return self._m_current_to_extflash if hasattr(self, '_m_current_to_extflash') else None
@property
def pwr_magnet_x(self):
"""1 bits."""
if hasattr(self, '_m_pwr_magnet_x'):
return self._m_pwr_magnet_x if hasattr(self, '_m_pwr_magnet_x') else None
self._m_pwr_magnet_x = (self.bytes[61] >> 7)
return self._m_pwr_magnet_x if hasattr(self, '_m_pwr_magnet_x') else None
@property
def numb_oc_solarsens(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_solarsens'):
return self._m_numb_oc_solarsens if hasattr(self, '_m_numb_oc_solarsens') else None
self._m_numb_oc_solarsens = self.bytes[57]
return self._m_numb_oc_solarsens if hasattr(self, '_m_numb_oc_solarsens') else None
@property
def t_adc1(self):
"""12 bits."""
if hasattr(self, '_m_t_adc1'):
return self._m_t_adc1 if hasattr(self, '_m_t_adc1') else None
self._m_t_adc1 = (((self.bytes[2] | (self.bytes[3] << 8)) >> 2) & 4095)
return self._m_t_adc1 if hasattr(self, '_m_t_adc1') else None
@property
def afterbq_current(self):
"""12 bits."""
if hasattr(self, '_m_afterbq_current'):
return self._m_afterbq_current if hasattr(self, '_m_afterbq_current') else None
self._m_afterbq_current = (((self.bytes[8] | (self.bytes[9] << 8)) >> 2) & 4095)
return self._m_afterbq_current if hasattr(self, '_m_afterbq_current') else None
@property
def pwr_sunsensor(self):
"""1 bits."""
if hasattr(self, '_m_pwr_sunsensor'):
return self._m_pwr_sunsensor if hasattr(self, '_m_pwr_sunsensor') else None
self._m_pwr_sunsensor = ((self.bytes[61] >> 3) & 1)
return self._m_pwr_sunsensor if hasattr(self, '_m_pwr_sunsensor') else None
@property
def numb_oc_obc(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_obc'):
return self._m_numb_oc_obc if hasattr(self, '_m_numb_oc_obc') else None
self._m_numb_oc_obc = self.bytes[46]
return self._m_numb_oc_obc if hasattr(self, '_m_numb_oc_obc') else None
@property
def current_to_coil_pz(self):
"""12 bits."""
if hasattr(self, '_m_current_to_coil_pz'):
return self._m_current_to_coil_pz if hasattr(self, '_m_current_to_coil_pz') else None
self._m_current_to_coil_pz = (((self.bytes[39] | (self.bytes[40] << 8)) >> 2) & 4095)
return self._m_current_to_coil_pz if hasattr(self, '_m_current_to_coil_pz') else None
@property
def battery_voltage(self):
"""12 bits."""
if hasattr(self, '_m_battery_voltage'):
return self._m_battery_voltage if hasattr(self, '_m_battery_voltage') else None
self._m_battery_voltage = ((((self.bytes[9] | (self.bytes[10] << 8)) | (self.bytes[11] << 16)) >> 6) & 4095)
return self._m_battery_voltage if hasattr(self, '_m_battery_voltage') else None
@property
def pwr_rf1(self):
"""1 bits."""
if hasattr(self, '_m_pwr_rf1'):
return self._m_pwr_rf1 if hasattr(self, '_m_pwr_rf1') else None
self._m_pwr_rf1 = ((self.bytes[61] >> 1) & 1)
return self._m_pwr_rf1 if hasattr(self, '_m_pwr_rf1') else None
@property
def sys_voltage_50(self):
"""12 bits."""
if hasattr(self, '_m_sys_voltage_50'):
return self._m_sys_voltage_50 if hasattr(self, '_m_sys_voltage_50') else None
self._m_sys_voltage_50 = (((self.bytes[11] | (self.bytes[12] << 8)) >> 2) & 4095)
return self._m_sys_voltage_50 if hasattr(self, '_m_sys_voltage_50') else None
@property
def numb_oc_magnetcoils(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_magnetcoils'):
return self._m_numb_oc_magnetcoils if hasattr(self, '_m_numb_oc_magnetcoils') else None
self._m_numb_oc_magnetcoils = self.bytes[56]
return self._m_numb_oc_magnetcoils if hasattr(self, '_m_numb_oc_magnetcoils') else None
@property
def pwr_magnet_z(self):
"""1 bits."""
if hasattr(self, '_m_pwr_magnet_z'):
return self._m_pwr_magnet_z if hasattr(self, '_m_pwr_magnet_z') else None
self._m_pwr_magnet_z = ((self.bytes[62] >> 1) & 1)
return self._m_pwr_magnet_z if hasattr(self, '_m_pwr_magnet_z') else None
@property
def stepup_current(self):
"""12 bits."""
if hasattr(self, '_m_stepup_current'):
return self._m_stepup_current if hasattr(self, '_m_stepup_current') else None
self._m_stepup_current = (((self.bytes[5] | (self.bytes[6] << 8)) >> 2) & 4095)
return self._m_stepup_current if hasattr(self, '_m_stepup_current') else None
@property
def side_pz_current(self):
"""12 bits."""
if hasattr(self, '_m_side_pz_current'):
return self._m_side_pz_current if hasattr(self, '_m_side_pz_current') else None
self._m_side_pz_current = ((((self.bytes[25] | (self.bytes[26] << 8)) | (self.bytes[27] << 16)) >> 6) & 4095)
return self._m_side_pz_current if hasattr(self, '_m_side_pz_current') else None
@property
def current_to_coil_y(self):
"""12 bits."""
if hasattr(self, '_m_current_to_coil_y'):
return self._m_current_to_coil_y if hasattr(self, '_m_current_to_coil_y') else None
self._m_current_to_coil_y = ((((self.bytes[37] | (self.bytes[38] << 8)) | (self.bytes[39] << 16)) >> 6) & 4095)
return self._m_current_to_coil_y if hasattr(self, '_m_current_to_coil_y') else None
@property
def numb_oc_out_irsens(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_out_irsens'):
return self._m_numb_oc_out_irsens if hasattr(self, '_m_numb_oc_out_irsens') else None
self._m_numb_oc_out_irsens = self.bytes[51]
return self._m_numb_oc_out_irsens if hasattr(self, '_m_numb_oc_out_irsens') else None
@property
def sys_voltage_33(self):
"""12 bits."""
if hasattr(self, '_m_sys_voltage_33'):
return self._m_sys_voltage_33 if hasattr(self, '_m_sys_voltage_33') else None
self._m_sys_voltage_33 = ((((self.bytes[12] | (self.bytes[13] << 8)) | (self.bytes[14] << 16)) >> 6) & 4095)
return self._m_sys_voltage_33 if hasattr(self, '_m_sys_voltage_33') else None
@property
def rf2_uc_current(self):
"""12 bits."""
if hasattr(self, '_m_rf2_uc_current'):
return self._m_rf2_uc_current if hasattr(self, '_m_rf2_uc_current') else None
self._m_rf2_uc_current = (((self.bytes[18] | (self.bytes[19] << 8)) >> 2) & 4095)
return self._m_rf2_uc_current if hasattr(self, '_m_rf2_uc_current') else None
@property
def battery2_temp(self):
"""12 bits."""
if hasattr(self, '_m_battery2_temp'):
return self._m_battery2_temp if hasattr(self, '_m_battery2_temp') else None
self._m_battery2_temp = ((((self.bytes[43] | (self.bytes[44] << 8)) | (self.bytes[45] << 16)) >> 6) & 4095)
return self._m_battery2_temp if hasattr(self, '_m_battery2_temp') else None
@property
def current_to_magnetcoils(self):
"""12 bits."""
if hasattr(self, '_m_current_to_magnetcoils'):
return self._m_current_to_magnetcoils if hasattr(self, '_m_current_to_magnetcoils') else None
self._m_current_to_magnetcoils = ((((self.bytes[34] | (self.bytes[35] << 8)) | (self.bytes[36] << 16)) >> 6) & 4095)
return self._m_current_to_magnetcoils if hasattr(self, '_m_current_to_magnetcoils') else None
@property
def current_to_coil_x(self):
"""12 bits."""
if hasattr(self, '_m_current_to_coil_x'):
return self._m_current_to_coil_x if hasattr(self, '_m_current_to_coil_x') else None
self._m_current_to_coil_x = (((self.bytes[36] | (self.bytes[37] << 8)) >> 2) & 4095)
return self._m_current_to_coil_x if hasattr(self, '_m_current_to_coil_x') else None
@property
def numb_oc_out_rf2(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_out_rf2'):
return self._m_numb_oc_out_rf2 if hasattr(self, '_m_numb_oc_out_rf2') else None
self._m_numb_oc_out_rf2 = self.bytes[49]
return self._m_numb_oc_out_rf2 if hasattr(self, '_m_numb_oc_out_rf2') else None
@property
def solar_voltage(self):
"""12 bits."""
if hasattr(self, '_m_solar_voltage'):
return self._m_solar_voltage if hasattr(self, '_m_solar_voltage') else None
self._m_solar_voltage = ((((self.bytes[19] | (self.bytes[20] << 8)) | (self.bytes[21] << 16)) >> 6) & 4095)
return self._m_solar_voltage if hasattr(self, '_m_solar_voltage') else None
@property
def numb_oc_out_flash(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_out_flash'):
return self._m_numb_oc_out_flash if hasattr(self, '_m_numb_oc_out_flash') else None
self._m_numb_oc_out_flash = self.bytes[50]
return self._m_numb_oc_out_flash if hasattr(self, '_m_numb_oc_out_flash') else None
@property
def numb_oc_coil_nz(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_coil_nz'):
return self._m_numb_oc_coil_nz if hasattr(self, '_m_numb_oc_coil_nz') else None
self._m_numb_oc_coil_nz = self.bytes[55]
return self._m_numb_oc_coil_nz if hasattr(self, '_m_numb_oc_coil_nz') else None
@property
def numb_oc_out_gamma(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_out_gamma'):
return self._m_numb_oc_out_gamma if hasattr(self, '_m_numb_oc_out_gamma') else None
self._m_numb_oc_out_gamma = self.bytes[47]
return self._m_numb_oc_out_gamma if hasattr(self, '_m_numb_oc_out_gamma') else None
@property
def side_x_current(self):
"""12 bits."""
if hasattr(self, '_m_side_x_current'):
return self._m_side_x_current if hasattr(self, '_m_side_x_current') else None
self._m_side_x_current = (((self.bytes[21] | (self.bytes[22] << 8)) >> 2) & 4095)
return self._m_side_x_current if hasattr(self, '_m_side_x_current') else None
@property
def obc_uc_current(self):
"""10 bits."""
if hasattr(self, '_m_obc_uc_current'):
return self._m_obc_uc_current if hasattr(self, '_m_obc_uc_current') else None
self._m_obc_uc_current = ((self.bytes[15] | (self.bytes[16] << 8)) >> 6)
return self._m_obc_uc_current if hasattr(self, '_m_obc_uc_current') else None
@property
def current_to_coil_nz(self):
"""12 bits."""
if hasattr(self, '_m_current_to_coil_nz'):
return self._m_current_to_coil_nz if hasattr(self, '_m_current_to_coil_nz') else None
self._m_current_to_coil_nz = ((((self.bytes[40] | (self.bytes[41] << 8)) | (self.bytes[42] << 16)) >> 6) & 4095)
return self._m_current_to_coil_nz if hasattr(self, '_m_current_to_coil_nz') else None
@property
def pwr_flash(self):
"""1 bits."""
if hasattr(self, '_m_pwr_flash'):
return self._m_pwr_flash if hasattr(self, '_m_pwr_flash') else None
self._m_pwr_flash = ((self.bytes[61] >> 6) & 1)
return self._m_pwr_flash if hasattr(self, '_m_pwr_flash') else None
@property
def battery1_temp(self):
"""12 bits."""
if hasattr(self, '_m_battery1_temp'):
return self._m_battery1_temp if hasattr(self, '_m_battery1_temp') else None
self._m_battery1_temp = (((self.bytes[42] | (self.bytes[43] << 8)) >> 2) & 4095)
return self._m_battery1_temp if hasattr(self, '_m_battery1_temp') else None
@property
def current_to_gamma(self):
"""12 bits."""
if hasattr(self, '_m_current_to_gamma'):
return self._m_current_to_gamma if hasattr(self, '_m_current_to_gamma') else None
self._m_current_to_gamma = ((((self.bytes[28] | (self.bytes[29] << 8)) | (self.bytes[30] << 16)) >> 6) & 4095)
return self._m_current_to_gamma if hasattr(self, '_m_current_to_gamma') else None
@property
def current_to_irsensor(self):
"""12 bits."""
if hasattr(self, '_m_current_to_irsensor'):
return self._m_current_to_irsensor if hasattr(self, '_m_current_to_irsensor') else None
self._m_current_to_irsensor = (((self.bytes[30] | (self.bytes[31] << 8)) >> 2) & 4095)
return self._m_current_to_irsensor if hasattr(self, '_m_current_to_irsensor') else None
@property
def side_nz_current(self):
"""12 bits."""
if hasattr(self, '_m_side_nz_current'):
return self._m_side_nz_current if hasattr(self, '_m_side_nz_current') else None
self._m_side_nz_current = (((self.bytes[27] | (self.bytes[28] << 8)) >> 2) & 4095)
return self._m_side_nz_current if hasattr(self, '_m_side_nz_current') else None
@property
def adc_correctness(self):
"""2 bits."""
if hasattr(self, '_m_adc_correctness'):
return self._m_adc_correctness if hasattr(self, '_m_adc_correctness') else None
self._m_adc_correctness = (self.bytes[2] & 3)
return self._m_adc_correctness if hasattr(self, '_m_adc_correctness') else None
@property
def reset_reason(self):
"""8 bits."""
if hasattr(self, '_m_reset_reason'):
return self._m_reset_reason if hasattr(self, '_m_reset_reason') else None
self._m_reset_reason = self.bytes[60]
return self._m_reset_reason if hasattr(self, '_m_reset_reason') else None
@property
def sys_time(self):
"""16 bits."""
if hasattr(self, '_m_sys_time'):
return self._m_sys_time if hasattr(self, '_m_sys_time') else None
self._m_sys_time = (self.bytes[0] | (self.bytes[1] << 8))
return self._m_sys_time if hasattr(self, '_m_sys_time') else None
@property
def pwr_rf2(self):
"""1 bits."""
if hasattr(self, '_m_pwr_rf2'):
return self._m_pwr_rf2 if hasattr(self, '_m_pwr_rf2') else None
self._m_pwr_rf2 = ((self.bytes[61] >> 2) & 1)
return self._m_pwr_rf2 if hasattr(self, '_m_pwr_rf2') else None
@property
def numb_oc_coil_pz(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_coil_pz'):
return self._m_numb_oc_coil_pz if hasattr(self, '_m_numb_oc_coil_pz') else None
self._m_numb_oc_coil_pz = self.bytes[54]
return self._m_numb_oc_coil_pz if hasattr(self, '_m_numb_oc_coil_pz') else None
@property
def stepup_voltage(self):
"""12 bits."""
if hasattr(self, '_m_stepup_voltage'):
return self._m_stepup_voltage if hasattr(self, '_m_stepup_voltage') else None
self._m_stepup_voltage = ((((self.bytes[6] | (self.bytes[7] << 8)) | (self.bytes[8] << 16)) >> 6) & 4095)
return self._m_stepup_voltage if hasattr(self, '_m_stepup_voltage') else None
@property
def side_ny_current(self):
"""12 bits."""
if hasattr(self, '_m_side_ny_current'):
return self._m_side_ny_current if hasattr(self, '_m_side_ny_current') else None
self._m_side_ny_current = (((self.bytes[24] | (self.bytes[25] << 8)) >> 2) & 4095)
return self._m_side_ny_current if hasattr(self, '_m_side_ny_current') else None
@property
def numb_oc_out_rf1(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_out_rf1'):
return self._m_numb_oc_out_rf1 if hasattr(self, '_m_numb_oc_out_rf1') else None
self._m_numb_oc_out_rf1 = self.bytes[48]
return self._m_numb_oc_out_rf1 if hasattr(self, '_m_numb_oc_out_rf1') else None
@property
def t_adc2(self):
"""12 bits."""
if hasattr(self, '_m_t_adc2'):
return self._m_t_adc2 if hasattr(self, '_m_t_adc2') else None
self._m_t_adc2 = ((((self.bytes[3] | (self.bytes[4] << 8)) | (self.bytes[5] << 16)) >> 6) & 4095)
return self._m_t_adc2 if hasattr(self, '_m_t_adc2') else None
@property
def rf1_uc_current(self):
"""10 bits."""
if hasattr(self, '_m_rf1_uc_current'):
return self._m_rf1_uc_current if hasattr(self, '_m_rf1_uc_current') else None
self._m_rf1_uc_current = ((self.bytes[17] | (self.bytes[18] << 8)) & 1023)
return self._m_rf1_uc_current if hasattr(self, '_m_rf1_uc_current') else None
@property
def pwr_gamma(self):
"""1 bits."""
if hasattr(self, '_m_pwr_gamma'):
return self._m_pwr_gamma if hasattr(self, '_m_pwr_gamma') else None
self._m_pwr_gamma = ((self.bytes[61] >> 4) & 1)
return self._m_pwr_gamma if hasattr(self, '_m_pwr_gamma') else None
@property
def numb_oc_coil_x(self):
"""8 bits."""
if hasattr(self, '_m_numb_oc_coil_x'):
return self._m_numb_oc_coil_x if hasattr(self, '_m_numb_oc_coil_x') else None
self._m_numb_oc_coil_x = self.bytes[52]
return self._m_numb_oc_coil_x if hasattr(self, '_m_numb_oc_coil_x') else None
@property
def reset_num(self):
"""16 bits."""
if hasattr(self, '_m_reset_num'):
return self._m_reset_num if hasattr(self, '_m_reset_num') else None
self._m_reset_num = (self.bytes[58] | (self.bytes[59] << 8))
return self._m_reset_num if hasattr(self, '_m_reset_num') else None
@property
def pwr_magnet_y(self):
"""1 bits."""
if hasattr(self, '_m_pwr_magnet_y'):
return self._m_pwr_magnet_y if hasattr(self, '_m_pwr_magnet_y') else None
self._m_pwr_magnet_y = (self.bytes[62] & 1)
return self._m_pwr_magnet_y if hasattr(self, '_m_pwr_magnet_y') else None
class Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rf_id = self._io.read_u1()
self.opr_time = self._io.read_u2le()
self.reboot_cnt = self._io.read_u1()
self.mcusr = self._io.read_u1()
self.pamp_temp = self._io.read_u2le()
self.pamp_voltage = self._io.read_u1()
self.tx_attenuator = self._io.read_u1()
self.battery_voltage = self._io.read_u2le()
self.system_voltage = self._io.read_u2le()
self.seq_number = self._io.read_u2le()
self.pwr_save_state = self._io.read_u1()
self.modem_on_period = self._io.read_u2le()
self.obc_can_status = self._io.read_u1()
self.eps_can_status = self._io.read_u1()
self.info_size = self._io.read_u1()
self.data_type = self._io.read_u1()
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Cubebel1.Callsign(_io__raw_callsign_ror, self, self._root)
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/cubebel1.py
| 0.46223 | 0.207295 |
cubebel1.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Uwe4(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.payload.pid
:field beacon_header_flags1: ax25_frame.payload.ax25_info.beacon_header.beacon_header_flags1
:field beacon_header_flags2: ax25_frame.payload.ax25_info.beacon_header.beacon_header_flags2
:field beacon_header_packet_id: ax25_frame.payload.ax25_info.beacon_header.beacon_header_packet_id
:field beacon_header_fm_system_id: ax25_frame.payload.ax25_info.beacon_header.beacon_header_fm_system_id
:field beacon_header_fm_subsystem_id: ax25_frame.payload.ax25_info.beacon_header.beacon_header_fm_subsystem_id
:field beacon_header_to_system_id: ax25_frame.payload.ax25_info.beacon_header.beacon_header_to_system_id
:field beacon_header_to_subsystem_id: ax25_frame.payload.ax25_info.beacon_header.beacon_header_to_subsystem_id
:field beacon_header_api: ax25_frame.payload.ax25_info.beacon_header.beacon_header_api
:field beacon_header_payload_size: ax25_frame.payload.ax25_info.beacon_header.beacon_header_payload_size
:field beacon_payload_command: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_command
:field beacon_payload_var_id: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_var_id
:field beacon_payload_typeandlength: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_typeandlength
:field beacon_payload_timestamp: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_timestamp
:field beacon_payload_beacon_rate: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_beacon_rate
:field beacon_payload_vals_out_of_range: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_vals_out_of_range
:field beacon_payload_uptime: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_uptime
:field beacon_payload_subsystem_status_bitmap: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_subsystem_status.beacon_payload_subsystem_status_bitmap
:field beacon_payload_batt_a_temp: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_batt_a_temp
:field beacon_payload_batt_a_state_of_charge: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_batt_a_state_of_charge
:field beacon_payload_batt_b_temp: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_batt_b_temp
:field beacon_payload_batt_b_state_of_charge: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_batt_b_state_of_charge
:field beacon_payload_batt_a_current: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_batt_a_current
:field beacon_payload_batt_a_voltage: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_batt_a_voltage
:field beacon_payload_batt_b_current: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_batt_b_current
:field beacon_payload_batt_b_voltage: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_batt_b_voltage
:field beacon_payload_power_consumption: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_power_consumption
:field beacon_payload_obc_temp: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_obc_temp
:field beacon_payload_panel_pos_x_temp: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_panel_pos_x_temp
:field beacon_payload_panel_neg_x_temp: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_panel_neg_x_temp
:field beacon_payload_panel_pos_y_temp: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_panel_pos_y_temp
:field beacon_payload_panel_neg_y_temp: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_panel_neg_y_temp
:field beacon_payload_panel_pos_z_temp: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_panel_pos_z_temp
:field beacon_payload_panel_neg_z_temp: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_panel_neg_z_temp
:field beacon_payload_freq: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_freq
:field beacon_payload_crc: ax25_frame.payload.ax25_info.beacon_payload.beacon_payload_crc
:field rf_message: ax25_frame.payload.ax25_info.beacon_payload.message
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Uwe4.Ax25Frame(self._io, self, self._root)
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Uwe4.Ax25Header(self._io, self, self._root)
_on = (self.ax25_header.ctl & 19)
if _on == 0:
self.payload = Uwe4.IFrame(self._io, self, self._root)
elif _on == 3:
self.payload = Uwe4.UiFrame(self._io, self, self._root)
elif _on == 19:
self.payload = Uwe4.UiFrame(self._io, self, self._root)
elif _on == 16:
self.payload = Uwe4.IFrame(self._io, self, self._root)
elif _on == 18:
self.payload = Uwe4.IFrame(self._io, self, self._root)
elif _on == 2:
self.payload = Uwe4.IFrame(self._io, self, self._root)
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Uwe4.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Uwe4.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Uwe4.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Uwe4.SsidMask(self._io, self, self._root)
self.ctl = self._io.read_u1()
class HskpPayload(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.beacon_payload_command = self._io.read_u1()
self.beacon_payload_var_id = self._io.read_u2le()
self.beacon_payload_typeandlength = self._io.read_u2le()
self.beacon_payload_timestamp_raw = [None] * (6)
for i in range(6):
self.beacon_payload_timestamp_raw[i] = self._io.read_u1()
self.beacon_payload_beacon_rate = self._io.read_u4le()
self.beacon_payload_vals_out_of_range = self._io.read_u2le()
self.beacon_payload_uptime = self._io.read_u4le()
self.beacon_payload_subsystem_status = Uwe4.Bitmap16SubsystemStatus(self._io, self, self._root)
self.beacon_payload_batt_a_temp = self._io.read_s1()
self.beacon_payload_batt_a_state_of_charge = self._io.read_s1()
self.beacon_payload_batt_b_temp = self._io.read_s1()
self.beacon_payload_batt_b_state_of_charge = self._io.read_s1()
self.beacon_payload_batt_a_current = self._io.read_s2le()
self.beacon_payload_batt_a_voltage = self._io.read_s2le()
self.beacon_payload_batt_b_current = self._io.read_s2le()
self.beacon_payload_batt_b_voltage = self._io.read_s2le()
self.beacon_payload_power_consumption = self._io.read_s2le()
self.beacon_payload_obc_temp = self._io.read_s1()
self.beacon_payload_panel_pos_x_temp = self._io.read_s1()
self.beacon_payload_panel_neg_x_temp = self._io.read_s1()
self.beacon_payload_panel_pos_y_temp = self._io.read_s1()
self.beacon_payload_panel_neg_y_temp = self._io.read_s1()
self.beacon_payload_panel_pos_z_temp = self._io.read_s1()
self.beacon_payload_panel_neg_z_temp = self._io.read_s1()
self.beacon_payload_freq = self._io.read_u2le()
self.beacon_payload_crc = self._io.read_u2le()
@property
def beacon_payload_timestamp(self):
if hasattr(self, '_m_beacon_payload_timestamp'):
return self._m_beacon_payload_timestamp if hasattr(self, '_m_beacon_payload_timestamp') else None
self._m_beacon_payload_timestamp = (((((self.beacon_payload_timestamp_raw[0] + (self.beacon_payload_timestamp_raw[1] << 8)) + (self.beacon_payload_timestamp_raw[2] << 16)) + (self.beacon_payload_timestamp_raw[3] << 24)) + (self.beacon_payload_timestamp_raw[4] << 32)) + (self.beacon_payload_timestamp_raw[5] << 48))
return self._m_beacon_payload_timestamp if hasattr(self, '_m_beacon_payload_timestamp') else None
class UiFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self._raw_ax25_info = self._io.read_bytes_full()
_io__raw_ax25_info = KaitaiStream(BytesIO(self._raw_ax25_info))
self.ax25_info = Uwe4.Beacon(_io__raw_ax25_info, self, self._root)
class Bitmap16SubsystemStatus(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.beacon_payload_subsystem_status_bitmap = self._io.read_u2le()
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"utf-8")
class RfMessage(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.offset_0 = [None] * (6)
for i in range(6):
self.offset_0[i] = self._io.read_u1()
self.message = (self._io.read_bytes((self._parent.beacon_header.beacon_header_payload_size - 6))).decode(u"utf-8")
self.rf_message_crc = self._io.read_u2le()
class IFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self.ax25_info = self._io.read_bytes_full()
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class Beacon(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
_on = self.is_valid_source
if _on == True:
self.beacon_header = Uwe4.BeaconHeader(self._io, self, self._root)
if self.is_valid_payload:
_on = self.beacon_header.beacon_header_api
if _on == 14:
self.beacon_payload = Uwe4.HskpPayload(self._io, self, self._root)
elif _on == 103:
self.beacon_payload = Uwe4.RfMessage(self._io, self, self._root)
@property
def is_valid_source(self):
"""This is work in progress as it never returns `true` without the
`(1 == 1)` statement. It DOES NOT check the source for now!
"""
if hasattr(self, '_m_is_valid_source'):
return self._m_is_valid_source if hasattr(self, '_m_is_valid_source') else None
self._m_is_valid_source = ((1 == 1) or (self._root.ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign == u"DP0UWH"))
return self._m_is_valid_source if hasattr(self, '_m_is_valid_source') else None
@property
def is_valid_payload(self):
if hasattr(self, '_m_is_valid_payload'):
return self._m_is_valid_payload if hasattr(self, '_m_is_valid_payload') else None
self._m_is_valid_payload = (( ((self.beacon_header.beacon_header_fm_system_id == 2) and (self.beacon_header.beacon_header_fm_subsystem_id == 1) and (self.beacon_header.beacon_header_to_system_id == 1) and (self.beacon_header.beacon_header_to_subsystem_id == 0) and (self.beacon_header.beacon_header_payload_size == 46) and (self.beacon_header.beacon_header_api == 14)) ) or ( ((self.beacon_header.beacon_header_fm_system_id == 2) and (self.beacon_header.beacon_header_fm_subsystem_id == 1) and (self.beacon_header.beacon_header_to_system_id == 1) and (self.beacon_header.beacon_header_to_subsystem_id == 0) and (self.beacon_header.beacon_header_api == 103)) ))
return self._m_is_valid_payload if hasattr(self, '_m_is_valid_payload') else None
class BeaconHeader(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.beacon_header_flags1 = self._io.read_u1()
self.beacon_header_flags2 = self._io.read_u1()
self.beacon_header_packet_id = self._io.read_u2le()
self.beacon_header_fm_system_id = self._io.read_u1()
self.beacon_header_fm_subsystem_id = self._io.read_u1()
self.beacon_header_to_system_id = self._io.read_u1()
self.beacon_header_to_subsystem_id = self._io.read_u1()
self.beacon_header_api = self._io.read_u1()
self.beacon_header_payload_size = self._io.read_u1()
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Uwe4.Callsign(_io__raw_callsign_ror, self, self._root)
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/uwe4.py
| 0.479016 | 0.180504 |
uwe4.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Asuphoenix(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.payload.pid
:field priority: ax25_frame.payload.ax25_info.csp_header.priority
:field source: ax25_frame.payload.ax25_info.csp_header.source
:field destination: ax25_frame.payload.ax25_info.csp_header.destination
:field destination_port: ax25_frame.payload.ax25_info.csp_header.destination_port
:field source_port: ax25_frame.payload.ax25_info.csp_header.source_port
:field reserved: ax25_frame.payload.ax25_info.csp_header.reserved
:field hmac: ax25_frame.payload.ax25_info.csp_header.hmac
:field xtea: ax25_frame.payload.ax25_info.csp_header.xtea
:field rdp: ax25_frame.payload.ax25_info.csp_header.rdp
:field crc: ax25_frame.payload.ax25_info.csp_header.crc
:field comms_idx_int: ax25_frame.payload.ax25_info.csp_node.csp_node_port.comms_idx_int
:field total_obc_resets: ax25_frame.payload.ax25_info.csp_node.csp_node_port.total_obc_resets
:field current_bat_volt_flt: ax25_frame.payload.ax25_info.csp_node.csp_node_port.current_bat_volt_flt
:field obc_clock: ax25_frame.payload.ax25_info.csp_node.csp_node_port.obc_clock
:field current_3v3_flt: ax25_frame.payload.ax25_info.csp_node.csp_node_port.current_3v3_flt
:field current_5v_flt: ax25_frame.payload.ax25_info.csp_node.csp_node_port.current_5v_flt
:field current_adcs_flt: ax25_frame.payload.ax25_info.csp_node.csp_node_port.current_adcs_flt
:field eps_charge_volt_bat_flt: ax25_frame.payload.ax25_info.csp_node.csp_node_port.eps_charge_volt_bat_flt
:field eps_charge_current_bat: ax25_frame.payload.ax25_info.csp_node.csp_node_port.eps_charge_current_bat
:field eps_temp: ax25_frame.payload.ax25_info.csp_node.csp_node_port.eps_temp
:field bat_temp: ax25_frame.payload.ax25_info.csp_node.csp_node_port.bat_temp
:field brownouts: ax25_frame.payload.ax25_info.csp_node.csp_node_port.brownouts
:field ax100_rssi: ax25_frame.payload.ax25_info.csp_node.csp_node_port.ax100_rssi
:field ax100_board_temp: ax25_frame.payload.ax25_info.csp_node.csp_node_port.ax100_board_temp
:field gps_sats_used: ax25_frame.payload.ax25_info.csp_node.csp_node_port.gps_sats_used
:field ants_deployed: ax25_frame.payload.ax25_info.csp_node.csp_node_port.ants_deployed
:field gpio_state: ax25_frame.payload.ax25_info.csp_node.csp_node_port.gpio_state
:field temp_brd: ax25_frame.payload.ax25_info.csp_node.csp_node_port.temp_brd
:field temp_pa: ax25_frame.payload.ax25_info.csp_node.csp_node_port.temp_pa
:field last_rssi: ax25_frame.payload.ax25_info.csp_node.csp_node_port.last_rssi
:field last_rferr: ax25_frame.payload.ax25_info.csp_node.csp_node_port.last_rferr
:field tx_count: ax25_frame.payload.ax25_info.csp_node.csp_node_port.tx_count
:field rx_count: ax25_frame.payload.ax25_info.csp_node.csp_node_port.rx_count
:field tx_bytes: ax25_frame.payload.ax25_info.csp_node.csp_node_port.tx_bytes
:field rx_bytes: ax25_frame.payload.ax25_info.csp_node.csp_node_port.rx_bytes
:field active_conf: ax25_frame.payload.ax25_info.csp_node.csp_node_port.active_conf
:field boot_count: ax25_frame.payload.ax25_info.csp_node.csp_node_port.boot_count
:field boot_cause: ax25_frame.payload.ax25_info.csp_node.csp_node_port.boot_cause
:field last_contact: ax25_frame.payload.ax25_info.csp_node.csp_node_port.last_contact
:field bgnd_rssi: ax25_frame.payload.ax25_info.csp_node.csp_node_port.bgnd_rssi
:field tx_duty: ax25_frame.payload.ax25_info.csp_node.csp_node_port.tx_duty
:field tot_tx_count: ax25_frame.payload.ax25_info.csp_node.csp_node_port.tot_tx_count
:field tot_rx_count: ax25_frame.payload.ax25_info.csp_node.csp_node_port.tot_rx_count
:field tot_tx_bytes: ax25_frame.payload.ax25_info.csp_node.csp_node_port.tot_tx_bytes
:field tot_rx_bytes: ax25_frame.payload.ax25_info.csp_node.csp_node_port.tot_rx_bytes
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Asuphoenix.Ax25Frame(self._io, self, self._root)
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Asuphoenix.Ax25Header(self._io, self, self._root)
_on = (self.ax25_header.ctl & 19)
if _on == 0:
self.payload = Asuphoenix.IFrame(self._io, self, self._root)
elif _on == 3:
self.payload = Asuphoenix.UiFrame(self._io, self, self._root)
elif _on == 19:
self.payload = Asuphoenix.UiFrame(self._io, self, self._root)
elif _on == 16:
self.payload = Asuphoenix.IFrame(self._io, self, self._root)
elif _on == 18:
self.payload = Asuphoenix.IFrame(self._io, self, self._root)
elif _on == 2:
self.payload = Asuphoenix.IFrame(self._io, self, self._root)
class ObcHkT(KaitaiStruct):
"""
.. seealso::
Source - http://phxcubesat.asu.edu/content/amateur-operations
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.beacon_type_magic = self._io.read_bytes(3)
if not self.beacon_type_magic == b"\x68\x6B\x3A":
raise kaitaistruct.ValidationNotEqualError(b"\x68\x6B\x3A", self.beacon_type_magic, self._io, u"/types/obc_hk_t/seq/0")
self.int_comms_idx_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.total_obc_resets_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.current_bat_volt_int_str = (self._io.read_bytes_term(46, False, True, True)).decode(u"utf-8")
self.current_bat_volt_frac_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.obc_disk_space_used_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.obc_clock_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.current_3v3_int_str = (self._io.read_bytes_term(46, False, True, True)).decode(u"utf-8")
self.current_3v3_frac_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.current_5v_int_str = (self._io.read_bytes_term(46, False, True, True)).decode(u"utf-8")
self.current_5v_frac_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.current_adcs_int_str = (self._io.read_bytes_term(46, False, True, True)).decode(u"utf-8")
self.current_adcs_frac_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.eps_charge_volt_bat_int_str = (self._io.read_bytes_term(46, False, True, True)).decode(u"utf-8")
self.eps_charge_volt_bat_frac_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.eps_charge_current_bat_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.eps_temp_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.bat_temp_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.brownouts_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.ax100_rssi_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.ax100_board_temp_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.gps_sats_used_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.ants_deployed_str = (self._io.read_bytes_term(44, False, True, True)).decode(u"utf-8")
self.gpio_state_str = (self._io.read_bytes(1)).decode(u"utf-8")
@property
def obc_clock(self):
if hasattr(self, '_m_obc_clock'):
return self._m_obc_clock if hasattr(self, '_m_obc_clock') else None
self._m_obc_clock = int(self.obc_clock_str)
return self._m_obc_clock if hasattr(self, '_m_obc_clock') else None
@property
def ax100_board_temp(self):
if hasattr(self, '_m_ax100_board_temp'):
return self._m_ax100_board_temp if hasattr(self, '_m_ax100_board_temp') else None
self._m_ax100_board_temp = int(self.ax100_board_temp_str)
return self._m_ax100_board_temp if hasattr(self, '_m_ax100_board_temp') else None
@property
def total_obc_resets(self):
if hasattr(self, '_m_total_obc_resets'):
return self._m_total_obc_resets if hasattr(self, '_m_total_obc_resets') else None
self._m_total_obc_resets = int(self.total_obc_resets_str)
return self._m_total_obc_resets if hasattr(self, '_m_total_obc_resets') else None
@property
def gps_sats_used(self):
if hasattr(self, '_m_gps_sats_used'):
return self._m_gps_sats_used if hasattr(self, '_m_gps_sats_used') else None
self._m_gps_sats_used = int(self.gps_sats_used_str)
return self._m_gps_sats_used if hasattr(self, '_m_gps_sats_used') else None
@property
def ax100_rssi(self):
if hasattr(self, '_m_ax100_rssi'):
return self._m_ax100_rssi if hasattr(self, '_m_ax100_rssi') else None
self._m_ax100_rssi = int(self.ax100_rssi_str)
return self._m_ax100_rssi if hasattr(self, '_m_ax100_rssi') else None
@property
def current_5v_flt(self):
if hasattr(self, '_m_current_5v_flt'):
return self._m_current_5v_flt if hasattr(self, '_m_current_5v_flt') else None
self._m_current_5v_flt = (int(self.current_5v_int_str) + ((((int(int(self.current_5v_int_str) < 0) * -2) + 1) * int(self.current_5v_frac_str)) / 1000.0))
return self._m_current_5v_flt if hasattr(self, '_m_current_5v_flt') else None
@property
def ants_deployed(self):
if hasattr(self, '_m_ants_deployed'):
return self._m_ants_deployed if hasattr(self, '_m_ants_deployed') else None
self._m_ants_deployed = int(self.ants_deployed_str)
return self._m_ants_deployed if hasattr(self, '_m_ants_deployed') else None
@property
def eps_temp(self):
if hasattr(self, '_m_eps_temp'):
return self._m_eps_temp if hasattr(self, '_m_eps_temp') else None
self._m_eps_temp = int(self.eps_temp_str)
return self._m_eps_temp if hasattr(self, '_m_eps_temp') else None
@property
def current_adcs_flt(self):
if hasattr(self, '_m_current_adcs_flt'):
return self._m_current_adcs_flt if hasattr(self, '_m_current_adcs_flt') else None
self._m_current_adcs_flt = (int(self.current_adcs_int_str) + ((((int(int(self.current_adcs_int_str) < 0) * -2) + 1) * int(self.current_adcs_frac_str)) / 1000.0))
return self._m_current_adcs_flt if hasattr(self, '_m_current_adcs_flt') else None
@property
def bat_temp(self):
if hasattr(self, '_m_bat_temp'):
return self._m_bat_temp if hasattr(self, '_m_bat_temp') else None
self._m_bat_temp = int(self.bat_temp_str)
return self._m_bat_temp if hasattr(self, '_m_bat_temp') else None
@property
def comms_idx_int(self):
if hasattr(self, '_m_comms_idx_int'):
return self._m_comms_idx_int if hasattr(self, '_m_comms_idx_int') else None
self._m_comms_idx_int = int(self.int_comms_idx_str)
return self._m_comms_idx_int if hasattr(self, '_m_comms_idx_int') else None
@property
def current_3v3_flt(self):
if hasattr(self, '_m_current_3v3_flt'):
return self._m_current_3v3_flt if hasattr(self, '_m_current_3v3_flt') else None
self._m_current_3v3_flt = (int(self.current_3v3_int_str) + ((((int(int(self.current_3v3_int_str) < 0) * -2) + 1) * int(self.current_3v3_frac_str)) / 1000.0))
return self._m_current_3v3_flt if hasattr(self, '_m_current_3v3_flt') else None
@property
def gpio_state(self):
if hasattr(self, '_m_gpio_state'):
return self._m_gpio_state if hasattr(self, '_m_gpio_state') else None
self._m_gpio_state = int(self.gpio_state_str)
return self._m_gpio_state if hasattr(self, '_m_gpio_state') else None
@property
def current_bat_volt_flt(self):
if hasattr(self, '_m_current_bat_volt_flt'):
return self._m_current_bat_volt_flt if hasattr(self, '_m_current_bat_volt_flt') else None
self._m_current_bat_volt_flt = (int(self.current_bat_volt_int_str) + ((((int(int(self.current_bat_volt_int_str) < 0) * -2) + 1) * int(self.current_bat_volt_frac_str)) / 100.0))
return self._m_current_bat_volt_flt if hasattr(self, '_m_current_bat_volt_flt') else None
@property
def eps_charge_current_bat(self):
if hasattr(self, '_m_eps_charge_current_bat'):
return self._m_eps_charge_current_bat if hasattr(self, '_m_eps_charge_current_bat') else None
self._m_eps_charge_current_bat = int(self.eps_charge_current_bat_str)
return self._m_eps_charge_current_bat if hasattr(self, '_m_eps_charge_current_bat') else None
@property
def brownouts(self):
if hasattr(self, '_m_brownouts'):
return self._m_brownouts if hasattr(self, '_m_brownouts') else None
self._m_brownouts = int(self.brownouts_str)
return self._m_brownouts if hasattr(self, '_m_brownouts') else None
@property
def eps_charge_volt_bat_flt(self):
if hasattr(self, '_m_eps_charge_volt_bat_flt'):
return self._m_eps_charge_volt_bat_flt if hasattr(self, '_m_eps_charge_volt_bat_flt') else None
self._m_eps_charge_volt_bat_flt = (int(self.eps_charge_volt_bat_int_str) + ((((int(int(self.eps_charge_volt_bat_int_str) < 0) * -2) + 1) * int(self.eps_charge_volt_bat_frac_str)) / 100.0))
return self._m_eps_charge_volt_bat_flt if hasattr(self, '_m_eps_charge_volt_bat_flt') else None
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Asuphoenix.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Asuphoenix.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Asuphoenix.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Asuphoenix.SsidMask(self._io, self, self._root)
self.ctl = self._io.read_u1()
class UiFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self._raw_ax25_info = self._io.read_bytes_full()
_io__raw_ax25_info = KaitaiStream(BytesIO(self._raw_ax25_info))
self.ax25_info = Asuphoenix.Ax25InfoData(_io__raw_ax25_info, self, self._root)
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"ASCII")
class Ax100ControlPortT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.temp_brd = self._io.read_s2be()
self.temp_pa = self._io.read_s2be()
self.last_rssi = self._io.read_s2be()
self.last_rferr = self._io.read_s2be()
self.tx_count = self._io.read_u4be()
self.rx_count = self._io.read_u4be()
self.tx_bytes = self._io.read_u4be()
self.rx_bytes = self._io.read_u4be()
self.active_conf = self._io.read_u1()
self.boot_count = self._io.read_u2be()
self.boot_cause = self._io.read_u4be()
self.last_contact = self._io.read_u4be()
self.bgnd_rssi = self._io.read_s2be()
self.tx_duty = self._io.read_u1()
self.tot_tx_count = self._io.read_u4be()
self.tot_rx_count = self._io.read_u4be()
self.tot_tx_bytes = self._io.read_u4be()
self.tot_rx_bytes = self._io.read_u4be()
class IFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self._raw_ax25_info = self._io.read_bytes_full()
_io__raw_ax25_info = KaitaiStream(BytesIO(self._raw_ax25_info))
self.ax25_info = Asuphoenix.Ax25InfoData(_io__raw_ax25_info, self, self._root)
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class Repeaters(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rpt_callsign_raw = Asuphoenix.CallsignRaw(self._io, self, self._root)
self.rpt_ssid_raw = Asuphoenix.SsidMask(self._io, self, self._root)
class Repeater(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rpt_instance = []
i = 0
while True:
_ = Asuphoenix.Repeaters(self._io, self, self._root)
self.rpt_instance.append(_)
if (_.rpt_ssid_raw.ssid_mask & 1) == 1:
break
i += 1
class CspHeaderT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.raw_csp_header = self._io.read_u4be()
@property
def source(self):
if hasattr(self, '_m_source'):
return self._m_source if hasattr(self, '_m_source') else None
self._m_source = ((self.raw_csp_header >> 25) & 31)
return self._m_source if hasattr(self, '_m_source') else None
@property
def source_port(self):
if hasattr(self, '_m_source_port'):
return self._m_source_port if hasattr(self, '_m_source_port') else None
self._m_source_port = ((self.raw_csp_header >> 8) & 63)
return self._m_source_port if hasattr(self, '_m_source_port') else None
@property
def destination_port(self):
if hasattr(self, '_m_destination_port'):
return self._m_destination_port if hasattr(self, '_m_destination_port') else None
self._m_destination_port = ((self.raw_csp_header >> 14) & 63)
return self._m_destination_port if hasattr(self, '_m_destination_port') else None
@property
def rdp(self):
if hasattr(self, '_m_rdp'):
return self._m_rdp if hasattr(self, '_m_rdp') else None
self._m_rdp = ((self.raw_csp_header & 2) >> 1)
return self._m_rdp if hasattr(self, '_m_rdp') else None
@property
def destination(self):
if hasattr(self, '_m_destination'):
return self._m_destination if hasattr(self, '_m_destination') else None
self._m_destination = ((self.raw_csp_header >> 20) & 31)
return self._m_destination if hasattr(self, '_m_destination') else None
@property
def priority(self):
if hasattr(self, '_m_priority'):
return self._m_priority if hasattr(self, '_m_priority') else None
self._m_priority = (self.raw_csp_header >> 30)
return self._m_priority if hasattr(self, '_m_priority') else None
@property
def reserved(self):
if hasattr(self, '_m_reserved'):
return self._m_reserved if hasattr(self, '_m_reserved') else None
self._m_reserved = ((self.raw_csp_header >> 4) & 15)
return self._m_reserved if hasattr(self, '_m_reserved') else None
@property
def xtea(self):
if hasattr(self, '_m_xtea'):
return self._m_xtea if hasattr(self, '_m_xtea') else None
self._m_xtea = ((self.raw_csp_header & 4) >> 2)
return self._m_xtea if hasattr(self, '_m_xtea') else None
@property
def hmac(self):
if hasattr(self, '_m_hmac'):
return self._m_hmac if hasattr(self, '_m_hmac') else None
self._m_hmac = ((self.raw_csp_header & 8) >> 3)
return self._m_hmac if hasattr(self, '_m_hmac') else None
@property
def crc(self):
if hasattr(self, '_m_crc'):
return self._m_crc if hasattr(self, '_m_crc') else None
self._m_crc = (self.raw_csp_header & 1)
return self._m_crc if hasattr(self, '_m_crc') else None
class Ax100T(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
_on = self._parent.csp_header.source_port
if _on == 0:
self.csp_node_port = Asuphoenix.Ax100ControlPortT(self._io, self, self._root)
class ObcT(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
_on = self._parent.csp_header.source_port
if _on == 27:
self.csp_node_port = Asuphoenix.ObcHkT(self._io, self, self._root)
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (0), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Asuphoenix.Callsign(_io__raw_callsign_ror, self, self._root)
class Ax25InfoData(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.csp_header = Asuphoenix.CspHeaderT(self._io, self, self._root)
_on = self.csp_header.source
if _on == 2:
self.csp_node = Asuphoenix.ObcT(self._io, self, self._root)
elif _on == 5:
self.csp_node = Asuphoenix.Ax100T(self._io, self, self._root)
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/asuphoenix.py
| 0.505371 | 0.168617 |
asuphoenix.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
from enum import Enum
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Catsat(KaitaiStruct):
""":field type: packet.header.type
:field callsign: packet.payload.callsign
:field motd: packet.payload.motd
:field obc_temp_mcu: packet.payload.obc_temp_mcu
:field obc_boot_cnt: packet.payload.obc_boot_cnt
:field obc_clock: packet.payload.obc_clock
:field batt_vbatt: packet.payload.bpx_vbatt
:field batt_temp_0: packet.payload.bpx_temp
:field batt_boot_cnt: packet.payload.bpx_boot_cnt
:field ax100_temp_brd: packet.payload.ax100_temp_brd
:field ax100_boot_cnt: packet.payload.ax100_boot_cnt
:field ax100_last_contact: packet.payload.ax100_last_contact
:field p60_boot_cnt: packet.payload.p60_boot_cnt
:field p60_batt_mode: packet.payload.p60_batt_mode
:field p60_batt_v: packet.payload.p60_batt_v
:field p60_batt_c: packet.payload.p60_batt_c
:field pdu_x2_cout_obc: packet.payload.pdu_x2_cout.0
:field pdu_x2_cout_hdcam: packet.payload.pdu_x2_cout.1
:field pdu_x2_cout_ant_sel: packet.payload.pdu_x2_cout.2
:field pdu_x2_cout_met_pwr: packet.payload.pdu_x2_cout.3
:field pdu_x2_cout_wspr_dep: packet.payload.pdu_x2_cout.5
:field pdu_x2_cout_asdr: packet.payload.pdu_x2_cout.6
:field pdu_x2_cout_ax100: packet.payload.pdu_x2_cout.7
:field pdu_x2_cout_inf_5v: packet.payload.pdu_x2_cout.8
:field pdu_x3_cout_hf_up: packet.payload.pdu_x3_cout.0
:field pdu_x3_cout_xband: packet.payload.pdu_x3_cout.1
:field pdu_x3_cout_adcs: packet.payload.pdu_x3_cout.2
:field pdu_x3_cout_rwheels: packet.payload.pdu_x3_cout.3
:field pdu_x3_cout_gyro: packet.payload.pdu_x3_cout.4
:field pdu_x3_cout_met_sel: packet.payload.pdu_x3_cout.5
:field pdu_x3_cout_inf_12v: packet.payload.pdu_x3_cout.6
:field pdu_x3_cout_inf_3v: packet.payload.pdu_x3_cout.7
:field acu_power_0: packet.payload.acu_power.0
:field acu_power_1: packet.payload.acu_power.1
:field acu_power_2: packet.payload.acu_power.2
:field acu_power_3: packet.payload.acu_power.3
:field acu_power_4: packet.payload.acu_power.4
:field acu_power_5: packet.payload.acu_power.5
:field adcs_boot_cnt: packet.payload.adcs_boot_cnt
:field adcs_clock: packet.payload.adcs_clock
:field extgyro_x: packet.payload.extgyro.0
:field extgyro_y: packet.payload.extgyro.1
:field extgyro_z: packet.payload.extgyro.2
:field gps_pos_x: packet.payload.gps_pos.0
:field gps_pos_y: packet.payload.gps_pos.1
:field gps_pos_z: packet.payload.gps_pos.2
:field gps_vel_x: packet.payload.gps_vel.0
:field gps_vel_y: packet.payload.gps_vel.1
:field gps_vel_z: packet.payload.gps_vel.2
:field acs_mode: packet.payload.acs_mode
:field status_extmag: packet.payload.status_extmag
:field status_fss_xneg: packet.payload.status_fss.0
:field status_fss_yneg: packet.payload.status_fss.1
:field status_fss_zneg: packet.payload.status_fss.2
:field status_fss_xpos: packet.payload.status_fss.3
:field status_fss_ypos: packet.payload.status_fss.4
:field status_extgyro: packet.payload.status_extgyro
:field status_gps: packet.payload.status_gps
:field obc_fs_mnted: packet.payload.obc_fs_mnted
:field obc_temp_ram: packet.payload.obc_temp_ram
:field obc_resetcause: packet.payload.obc_resetcause
:field obc_bootcause: packet.payload.obc_bootcause
:field obc_uptime: packet.payload.obc_uptime
:field batt_charge: packet.payload.batt_charge
:field batt_dcharge: packet.payload.batt_dcharge
:field batt_heater: packet.payload.batt_heater
:field batt_temp_1: packet.payload.batt_temp2
:field batt_temp_2: packet.payload.batt_temp3
:field batt_temp_3: packet.payload.batt_temp4
:field batt_bootcause: packet.payload.batt_bootcause
:field sat_temps_met_cam: packet.payload.sat_temps.0
:field sat_temps_hd_cam: packet.payload.sat_temps.1
:field sat_temps_asdr: packet.payload.sat_temps.2
:field sat_temps_xband: packet.payload.sat_temps.3
:field sat_temps_rad_y: packet.payload.sat_temps.4
:field sat_temps_rad_z: packet.payload.sat_temps.5
:field ax100_reboot_in: packet.payload.ax100_reboot_in
:field ax100_tx_inhibit: packet.payload.ax100_tx_inhibit
:field ax100_rx_freq: packet.payload.ax100_rx_freq
:field ax100_rx_baud: packet.payload.ax100_rx_baud
:field ax100_temp_pa: packet.payload.ax100_temp_pa
:field ax100_last_rssi: packet.payload.ax100_last_rssi
:field ax100_active_conf: packet.payload.ax100_active_conf
:field ax100_bootcause: packet.payload.ax100_bootcause
:field ax100_bgnd_rssi: packet.payload.ax100_bgnd_rssi
:field ax100_tx_duty: packet.payload.ax100_tx_duty
:field ax100_tx_freq: packet.payload.ax100_tx_freq
:field ax100_tx_baud: packet.payload.ax100_tx_baud
:field p60_cout_acu_x1_vcc: packet.payload.p60_cout.0
:field p60_cout_pdu_x2_vcc: packet.payload.p60_cout.1
:field p60_cout_pdu_x3_vcc: packet.payload.p60_cout.2
:field p60_cout_acu_x1_vbatt: packet.payload.p60_cout.4
:field p60_cout_pdu_x2_vbatt: packet.payload.p60_cout.5
:field p60_cout_pdu_x3_vbatt: packet.payload.p60_cout.6
:field p60_cout_stk_vbatt: packet.payload.p60_cout.8
:field p60_cout_stk_3v: packet.payload.p60_cout.9
:field p60_cout_stk_5v: packet.payload.p60_cout.10
:field p60_cout_gssb_3v: packet.payload.p60_cout.11
:field p60_cout_gssb_5v: packet.payload.p60_cout.12
:field p60_out_en_acu_x1_vcc: packet.payload.p60_out_en.0
:field p60_out_en_pdu_x2_vcc: packet.payload.p60_out_en.1
:field p60_out_en_pdu_x3_vcc: packet.payload.p60_out_en.2
:field p60_out_en_acu_x1_vbatt: packet.payload.p60_out_en.4
:field p60_out_en_pdu_x2_vbatt: packet.payload.p60_out_en.5
:field p60_out_en_pdu_x3_vbatt: packet.payload.p60_out_en.6
:field p60_out_en_stk_vbatt:packet.payload.p60_out_en.8
:field p60_out_en_stk_3v: packet.payload.p60_out_en.9
:field p60_out_en_stk_5v: packet.payload.p60_out_en.10
:field p60_out_en_gssb_3v: packet.payload.p60_out_en.11
:field p60_out_en_gssb_5v: packet.payload.p60_out_en.12
:field p60_temp_0: packet.payload.p60_temp.0
:field p60_temp_1: packet.payload.p60_temp.1
:field p60_bootcause: packet.payload.p60_bootcause
:field p60_uptime: packet.payload.p60_uptime
:field p60_resetcause: packet.payload.p60_resetcause
:field p60_latchup_acu_x1_vcc: packet.payload.p60_latchup.0
:field p60_latchup_pdu_x2_vcc: packet.payload.p60_latchup.1
:field p60_latchup_pdu_x3_vcc: packet.payload.p60_latchup.2
:field p60_latchup_acu_x1_vbatt: packet.payload.p60_latchup.4
:field p60_latchup_pdu_x2_vbatt: packet.payload.p60_latchup.5
:field p60_latchup_pdu_x3_vbatt: packet.payload.p60_latchup.6
:field p60_latchup_stk_vbatt: packet.payload.p60_latchup.8
:field p60_latchup_stk_3v: packet.payload.p60_latchup.9
:field p60_latchup_stk_5v: packet.payload.p60_latchup.10
:field p60_latchup_gssb_3v: packet.payload.p60_latchup.11
:field p60_latchup_gssb_5v: packet.payload.p60_latchup.12
:field p60_vcc_c: packet.payload.p60_vcc_c
:field p60_batt_v: packet.payload.p60_batt_v
:field p60_dearm_status: packet.payload.p60_dearm_status
:field p60_wdt_cnt_gnd: packet.payload.p60_wdt_cnt_gnd
:field p60_wdt_cnt_can: packet.payload.p60_wdt_cnt_can
:field p60_wdt_cnt_left: packet.payload.p60_wdt_cnt_left
:field p60_batt_chrg: packet.payload.p60_batt_chrg
:field p60_batt_dchrg: packet.payload.p60_batt_dchrg
:field ant6_depl: packet.payload.ant6_depl
:field ar6_depl: packet.payload.ar6_depl
:field pdu_x2_vout_obc: packet.payload.pdu_x2_vout.0
:field pdu_x2_vout_hdcam: packet.payload.pdu_x2_vout.1
:field pdu_x2_vout_ant_sel: packet.payload.pdu_x2_vout.2
:field pdu_x2_vout_met_pwr: packet.payload.pdu_x2_vout.3
:field pdu_x2_vout_wspr_dep: packet.payload.pdu_x2_vout.5
:field pdu_x2_vout_asdr: packet.payload.pdu_x2_vout.6
:field pdu_x2_vout_ax100: packet.payload.pdu_x2_vout.7
:field pdu_x2_vout_inf_5v: packet.payload.pdu_x2_vout.8
:field pdu_x2_temp: packet.payload.pdu_x2_temp
:field pdu_x2_out_en_obc: packet.payload.pdu_x2_out_en.0
:field pdu_x2_out_en_hdcam: packet.payload.pdu_x2_out_en.1
:field pdu_x2_out_en_ant_sel: packet.payload.pdu_x2_out_en.2
:field pdu_x2_out_en_met_pwr: packet.payload.pdu_x2_out_en.3
:field pdu_x2_out_en_wspr_dep: packet.payload.pdu_x2_out_en.5
:field pdu_x2_out_en_asdr: packet.payload.pdu_x2_out_en.6
:field pdu_x2_out_en_ax100: packet.payload.pdu_x2_out_en.7
:field pdu_x2_out_en_inf_5v: packet.payload.pdu_x2_out_en.8
:field pdu_x2_bootcause: packet.payload.pdu_x2_bootcause
:field pdu_x2_boot_cnt: packet.payload.pdu_x2_boot_cnt
:field pdu_x2_uptime: packet.payload.pdu_x2_uptime
:field pdu_x2_resetcause: packet.payload.pdu_x2_resetcause
:field pdu_x2_latchup_obc: packet.payload.pdu_x2_latchup.0
:field pdu_x2_latchup_hdcam: packet.payload.pdu_x2_latchup.1
:field pdu_x2_latchup_ant_sel: packet.payload.pdu_x2_latchup.2
:field pdu_x2_latchup_met_pwr: packet.payload.pdu_x2_latchup.3
:field pdu_x2_latchup_wspr_dep: packet.payload.pdu_x2_latchup.5
:field pdu_x2_latchup_asdr: packet.payload.pdu_x2_latchup.6
:field pdu_x2_latchup_ax100: packet.payload.pdu_x2_latchup.7
:field pdu_x2_latchup_inf_5v: packet.payload.pdu_x2_latchup.8
:field pdu_x3_vout_hf_up: packet.payload.pdu_x3_vout.0
:field pdu_x3_vout_xband: packet.payload.pdu_x3_vout.1
:field pdu_x3_vout_adcs: packet.payload.pdu_x3_vout.2
:field pdu_x3_vout_rwheels: packet.payload.pdu_x3_vout.3
:field pdu_x3_vout_gyro: packet.payload.pdu_x3_vout.4
:field pdu_x3_vout_met_sel: packet.payload.pdu_x3_vout.5
:field pdu_x3_vout_inf_12v: packet.payload.pdu_x3_vout.6
:field pdu_x3_vout_inf_3v: packet.payload.pdu_x3_vout.7
:field pdu_x3_temp: packet.payload.pdu_x3_temp
:field pdu_x3_out_en_hf_up: packet.payload.pdu_x3_out_en.0
:field pdu_x3_out_en_xband: packet.payload.pdu_x3_out_en.1
:field pdu_x3_out_en_adcs: packet.payload.pdu_x3_out_en.2
:field pdu_x3_out_en_rwheels: packet.payload.pdu_x3_out_en.3
:field pdu_x3_out_en_gyro: packet.payload.pdu_x3_out_en.4
:field pdu_x3_out_en_met_sel: packet.payload.pdu_x3_out_en.5
:field pdu_x3_out_en_inf_12v: packet.payload.pdu_x3_out_en.6
:field pdu_x3_out_en_inf_3v: packet.payload.pdu_x3_out_en.7
:field pdu_x3_bootcause: packet.payload.pdu_x3_bootcause
:field pdu_x3_boot_cnt: packet.payload.pdu_x3_boot_cnt
:field pdu_x3_uptime: packet.payload.pdu_x3_uptime
:field pdu_x3_resetcause: packet.payload.pdu_x3_resetcause
:field pdu_x3_latchup_hf_up: packet.payload.pdu_x3_latchup.0
:field pdu_x3_latchup_xband: packet.payload.pdu_x3_latchup.1
:field pdu_x3_latchup_adcs: packet.payload.pdu_x3_latchup.2
:field pdu_x3_latchup_rwheels: packet.payload.pdu_x3_latchup.3
:field pdu_x3_latchup_gyro: packet.payload.pdu_x3_latchup.4
:field pdu_x3_latchup_met_sel: packet.payload.pdu_x3_latchup.5
:field pdu_x3_latchup_inf_12v: packet.payload.pdu_x3_latchup.6
:field pdu_x3_latchup_inf_3v: packet.payload.pdu_x3_latchup.7
:field acu_cin_0: packet.payload.acu_cin.0
:field acu_cin_1: packet.payload.acu_cin.1
:field acu_cin_2: packet.payload.acu_cin.2
:field acu_cin_3: packet.payload.acu_cin.3
:field acu_cin_4: packet.payload.acu_cin.4
:field acu_cin_5: packet.payload.acu_cin.5
:field acu_vin_0: packet.payload.acu_vin.0
:field acu_vin_1: packet.payload.acu_vin.1
:field acu_vin_2: packet.payload.acu_vin.2
:field acu_vin_3: packet.payload.acu_vin.3
:field acu_vin_4: packet.payload.acu_vin.4
:field acu_vin_5: packet.payload.acu_vin.5
:field acu_vbatt: packet.payload.acu_vbatt
:field acu_temp_0: packet.payload.acu_temp.0
:field acu_temp_1: packet.payload.acu_temp.1
:field acu_temp_2: packet.payload.acu_temp.2
:field acu_mppt_mode: packet.payload.acu_mppt_mode
:field acu_vboost_0: packet.payload.acu_vboost.0
:field acu_vboost_1: packet.payload.acu_vboost.1
:field acu_vboost_2: packet.payload.acu_vboost.2
:field acu_vboost_3: packet.payload.acu_vboost.3
:field acu_vboost_4: packet.payload.acu_vboost.4
:field acu_vboost_5: packet.payload.acu_vboost.5
:field acu_bootcause: packet.payload.acu_bootcause
:field acu_boot_cnt: packet.payload.acu_boot_cnt
:field acu_uptime: packet.payload.acu_uptime
:field acu_resetcause: packet.payload.acu_resetcause
:field ant_1_brn: packet.payload_ant_1_brn
:field ant_2_brn: packet.payload_ant_2_brn
:field ant_3_brn: packet.payload_ant_3_brn
:field ant_4_brn: packet.payload_ant_4_brn
:field ant_1_rel: packet.payload_ant_1_rel
:field ant_2_rel: packet.payload_ant_2_rel
:field ant_3_rel: packet.payload_ant_3_rel
:field ant_4_rel: packet.payload_ant_4_rel
:field dsp_1_brn: packet.payload_dsp_1_brn
:field dsp_2_brn: packet.payload_dsp_2_brn
:field dsp_1_rel: packet.payload_dsp_1_rel
:field dsp_2_rel: packet.payload_dsp_2_rel
:field extmag_x: packet.payload.extmag.0
:field extmag_y: packet.payload.extmag.1
:field extmag_z: packet.payload.extmag.2
:field extmag_temp: packet.payload.extmag_temp
:field extmag_valid: packet.payload.extmag_valid
:field suns_xneg: packet.payload.suns.0
:field suns_yneg: packet.payload.suns.1
:field suns_xpos: packet.payload.suns.3
:field suns_ypos: packet.payload.suns.4
:field suns_zpos: packet.payload.suns.5
:field suns_temp_xneg: packet.payload.suns_temp.0
:field suns_temp_yneg: packet.payload.suns_temp.1
:field suns_temp_xpos: packet.payload.suns_temp.3
:field suns_temp_ypos: packet.payload.suns_temp.4
:field suns_temp_zpos: packet.payload.suns_temp.5
:field suns_valid: packet.payload.suns_valid
:field extgyro_x: packet.payload.extgyro.0
:field extgyro_y: packet.payload.extgyro.1
:field extgyro_z: packet.payload.extgyro.2
:field extgyro_temp: packet.payload.extgyro_temp
:field extgyro_valid: packet.payload.extgyro_valid
:field fss_xneg_x: packet.payload.fss.0
:field fss_xneg_y: packet.payload.fss.1
:field fss_xneg_z: packet.payload.fss.2
:field fss_yneg_x: packet.payload.fss.3
:field fss_yneg_y: packet.payload.fss.4
:field fss_yneg_z: packet.payload.fss.5
:field fss_zneg_x: packet.payload.fss.6
:field fss_zneg_y: packet.payload.fss.7
:field fss_zneg_z: packet.payload.fss.8
:field fss_xpos_x: packet.payload.fss.9
:field fss_xpos_y: packet.payload.fss.10
:field fss_xpos_z: packet.payload.fss.11
:field fss_ypos_x: packet.payload.fss.12
:field fss_ypos_y: packet.payload.fss.13
:field fss_ypos_z: packet.payload.fss.14
:field fss_temp: packet.payload.fss_temp
:field fss_valid_xneg: packet.payload.fss_valid.0
:field fss_valid_yneg: packet.payload.fss_valid.1
:field fss_valid_zneg: packet.payload.fss_valid.2
:field fss_valid_xpos: packet.payload.fss_valid.3
:field fss_valid_ypos: packet.payload.fss_valid.4
:field gps_pos_x: packet.payload.gps_pos.0
:field gps_pos_y: packet.payload.gps_pos.1
:field gps_pos_z: packet.payload.gps_pos.2
:field gps_vel_x: packet.payload.gps_vel.0
:field gps_vel_y: packet.payload.gps_vel.1
:field gps_vel_z: packet.payload.gps_vel.2
:field gps_epoch: packet.payload.gps_epoch
:field gps_valid: packet.payload.gps_valid
:field gps_sat: packet.payload.gps_sat
:field gps_satsol: packet.payload.gps_satsol
:field pps_unix: packet.payload.pps_unix
:field wheel_torque_0: packet.payload.wheel_torque.0
:field wheel_torque_1: packet.payload.wheel_torque.1
:field wheel_torque_2: packet.payload.wheel_torque.2
:field wheel_torque_3: packet.payload.wheel_torque.3
:field wheel_momentum_0: packet.payload.wheel_momentum.0
:field wheel_momentum_1: packet.payload.wheel_momentum.1
:field wheel_momentum_2: packet.payload.wheel_momentum.2
:field wheel_momentum_3: packet.payload.wheel_momentum.3
:field wheel_speed_0: packet.payload.wheel_speed.0
:field wheel_speed_1: packet.payload.wheel_speed.1
:field wheel_speed_2: packet.payload.wheel_speed.2
:field wheel_speed_3: packet.payload.wheel_speed.3
:field wheel_enable_0: packet.payload.wheel_enable.0
:field wheel_enable_1: packet.payload.wheel_enable.1
:field wheel_enable_2: packet.payload.wheel_enable.2
:field wheel_enable_3: packet.payload.wheel_enable.3
:field wheel_current_0: packet.payload.wheel_current.0
:field wheel_current_1: packet.payload.wheel_current.1
:field wheel_current_2: packet.payload.wheel_current.2
:field wheel_current_3: packet.payload.wheel_current.3
:field torquer_duty_x: packet.payload.torquer_duty.0
:field torquer_duty_y: packet.payload.torquer_duty.1
:field torquer_duty_z: packet.payload.torquer_duty.2
:field torquer_calib_x: packet.payload.torquer_calib.0
:field torquer_calib_y: packet.payload.torquer_calib.1
:field torquer_calib_z: packet.payload.torquer_calib.2
:field acs_mode: packet.payload.acs_mode
:field acs_dmode: packet.payload.acs_dmode
:field ads_mode: packet.payload.ads_mode
:field ads_dmode: packet.payload.ads_dmode
:field ephem_mode: packet.payload.ephem_mode
:field ephem_dmode: packet.payload.ephem_dmode
:field spin_mode: packet.payload.spin_mode
:field status_mag: packet.payload.status_mag
:field status_extmag: packet.payload.status_extmag
:field status_css: packet.payload.status_css
:field status_fss_xneg: packet.payload.status_fss.0
:field status_fss_yneg: packet.payload.status_fss.1
:field status_fss_zneg: packet.payload.status_fss.2
:field status_fss_xpos: packet.payload.status_fss.3
:field status_fss_ypos: packet.payload.status_fss.4
:field status_gyro: packet.payload.status_gyro
:field status_extgyro: packet.payload.status_extgyro
:field status_gps: packet.payload.status_gps
:field status_bdot: packet.payload.status_bdot
:field status_ukf: packet.payload.status_ukf
:field status_etime: packet.payload.status_etime
:field status_ephem: packet.payload.status_ephem
:field status_run: packet.payload.status_run
:field looptime: packet.payload.looptime
:field max_looptime: packet.payload.max_looptime
:field bdot_rate_filter1: packet.payload.bdot_rate.0
:field bdot_rate_filter2: packet.payload.bdot_rate.1
:field bdot_dmag_x: packet.payload.bdot_dmag.0
:field bdot_dmag_y: packet.payload.bdot_dmag.1
:field bdot_dmag_z: packet.payload.bdot_dmag.2
:field bdot_torquer_x: packet.payload.bdot_torquer.0
:field bdot_torquer_y: packet.payload.bdot_torquer.1
:field bdot_torquer_z: packet.payload.bdot_torquer.2
:field bdot_detumble: packet.payload.bdot_detumble
:field ukf_x_0: packet.payload.ukf_x.0
:field ukf_x_1: packet.payload.ukf_x.1
:field ukf_x_2: packet.payload.ukf_x.2
:field ukf_x_3: packet.payload.ukf_x.3
:field ukf_x_4: packet.payload.ukf_x.4
:field ukf_x_5: packet.payload.ukf_x.5
:field ukf_x_6: packet.payload.ukf_x.6
:field ukf_x_7: packet.payload.ukf_x.7
:field ukf_x_8: packet.payload.ukf_x.8
:field ukf_x_9: packet.payload.ukf_x.9
:field ukf_x_10: packet.payload.ukf_x.10
:field ukf_x_11: packet.payload.ukf_x.11
:field ukf_x_12: packet.payload.ukf_x.12
:field ukf_q_0: packet.payload.ukf_q.0
:field ukf_q_1: packet.payload.ukf_q.1
:field ukf_q_2: packet.payload.ukf_q.2
:field ukf_q_3: packet.payload.ukf_q.3
:field ukf_w_0: packet.payload.ukf_w.0
:field ukf_w_1: packet.payload.ukf_w.1
:field ukf_w_2: packet.payload.ukf_w.2
:field ukf_xpred_0: packet.payload.ukf_xpred.0
:field ukf_xpred_1: packet.payload.ukf_xpred.1
:field ukf_xpred_2: packet.payload.ukf_xpred.2
:field ukf_xpred_3: packet.payload.ukf_xpred.3
:field ukf_xpred_4: packet.payload.ukf_xpred.4
:field ukf_xpred_5: packet.payload.ukf_xpred.5
:field ukf_xpred_6: packet.payload.ukf_xpred.6
:field ukf_xpred_7: packet.payload.ukf_xpred.7
:field ukf_xpred_8: packet.payload.ukf_xpred.8
:field ukf_xpred_9: packet.payload.ukf_xpred.9
:field ukf_xpred_10: packet.payload.ukf_xpred.10
:field ukf_xpred_11: packet.payload.ukf_xpred.11
:field ukf_xpred_12: packet.payload.ukf_xpred.12
:field ukf_zpred_0: packet.payload.ukf_zpred.0
:field ukf_zpred_1: packet.payload.ukf_zpred.1
:field ukf_zpred_2: packet.payload.ukf_zpred.2
:field ukf_zpred_3: packet.payload.ukf_zpred.3
:field ukf_zpred_4: packet.payload.ukf_zpred.4
:field ukf_zpred_5: packet.payload.ukf_zpred.5
:field ukf_zpred_6: packet.payload.ukf_zpred.6
:field ukf_zpred_7: packet.payload.ukf_zpred.7
:field ukf_zpred_8: packet.payload.ukf_zpred.8
:field ukf_zpred_9: packet.payload.ukf_zpred.9
:field ukf_zpred_10: packet.payload.ukf_zpred.10
:field ukf_zpred_11: packet.payload.ukf_zpred.11
:field ukf_z_0: packet.payload.ukf_z.0
:field ukf_z_1: packet.payload.ukf_z.1
:field ukf_z_2: packet.payload.ukf_z.2
:field ukf_z_3: packet.payload.ukf_z.3
:field ukf_z_4: packet.payload.ukf_z.4
:field ukf_z_5: packet.payload.ukf_z.5
:field ukf_z_6: packet.payload.ukf_z.6
:field ukf_z_7: packet.payload.ukf_z.7
:field ukf_z_8: packet.payload.ukf_z.8
:field ukf_z_9: packet.payload.ukf_z.9
:field ukf_z_10: packet.payload.ukf_z.10
:field ukf_z_11: packet.payload.ukf_z.11
:field ukf_enable_0: packet.payload.ukf_enable.0
:field ukf_enable_1: packet.payload.ukf_enable.1
:field ukf_enable_2: packet.payload.ukf_enable.2
:field ukf_enable_3: packet.payload.ukf_enable.3
:field ukf_enable_4: packet.payload.ukf_enable.4
:field ukf_enable_5: packet.payload.ukf_enable.5
:field ukf_enable_6: packet.payload.ukf_enable.6
:field ukf_enable_7: packet.payload.ukf_enable.7
:field ukf_enable_8: packet.payload.ukf_enable.8
:field ukf_enable_9: packet.payload.ukf_enable.9
:field ukf_enable_10: packet.payload.ukf_enable.10
:field ukf_enable_11: packet.payload.ukf_enable.11
:field ukf_sunmax_0: packet.payload.ukf_sunmax.0
:field ukf_sunmax_1: packet.payload.ukf_sunmax.1
:field ukf_sunmax_2: packet.payload.ukf_sunmax.2
:field ukf_sunmax_3: packet.payload.ukf_sunmax.3
:field ukf_sunmax_4: packet.payload.ukf_sunmax.4
:field ukf_sunmax_5: packet.payload.ukf_sunmax.5
:field ukf_in_ecl: packet.payload.ukf_in_eclipse
:field ukf_choice: packet.payload.ukf_choice
:field ukf_ctrl_t_0: packet.payload.ukf_ctrl_t.0
:field ukf_ctrl_t_1: packet.payload.ukf_ctrl_t.1
:field ukf_ctrl_t_2: packet.payload.ukf_ctrl_t.2
:field ukf_ctrl_m_0: packet.payload.ukf_ctrl_m.0
:field ukf_ctrl_m_1: packet.payload.ukf_ctrl_m.1
:field ukf_ctrl_m_2: packet.payload.ukf_ctrl_m.2
:field ukf_rate_x: packet.payload.ukf_rate.0
:field ukf_rate_y: packet.payload.ukf_rate.1
:field ukf_rate_z: packet.payload.ukf_rate.2
:field ephem_jdat: packet.payload.ephem_jdat
:field ephem_reci_0: packet.payload.ephem_reci.0
:field ephem_reci_1: packet.payload.ephem_reci.1
:field ephem_reci_2: packet.payload.ephem_reci.2
:field ephem_veci_0: packet.payload.ephem_veci.0
:field ephem_veci_1: packet.payload.ephem_veci.1
:field ephem_veci_2: packet.payload.ephem_veci.2
:field ephem_sun_eci_x: packet.payload.ephem_sun_eci.0
:field ephem_sun_eci_y: packet.payload.ephem_sun_eci.1
:field ephem_sun_eci_z: packet.payload.ephem_sun_eci.2
:field ephem_quat_ie_0: packet.payload.ephem_quat_ie.0
:field ephem_quat_ie_1: packet.payload.ephem_quat_ie.1
:field ephem_quat_ie_2: packet.payload.ephem_quat_ie.2
:field ephem_quat_ie_3: packet.payload.ephem_quat_ie.3
:field ephem_quat_io_0: packet.payload.ephem_quat_io.0
:field ephem_quat_io_1: packet.payload.ephem_quat_io.1
:field ephem_quat_io_2: packet.payload.ephem_quat_io.2
:field ephem_quat_io_3: packet.payload.ephem_quat_io.3
:field ephem_quat_il_0: packet.payload.ephem_quat_il.0
:field ephem_quat_il_1: packet.payload.ephem_quat_il.1
:field ephem_quat_il_2: packet.payload.ephem_quat_il.2
:field ephem_quat_il_3: packet.payload.ephem_quat_il.3
:field ephem_rate_io_x: packet.payload.ephem_rate_io.0
:field ephem_rate_io_y: packet.payload.ephem_rate_io.1
:field ephem_rate_io_z: packet.payload.ephem_rate_io.2
:field ephem_rate_il_x: packet.payload.ephem_rate_il.0
:field ephem_rate_il_y: packet.payload.ephem_rate_il.1
:field ephem_rate_il_z: packet.payload.ephem_rate_il.2
:field ephem_t_eclipse: packet.payload.ephem_t_eclipse
:field ephem_time: packet.payload.ephem_time
:field ads_time: packet.payload.ads_time
:field acs_time: packet.payload.acs_time
:field sens_time: packet.payload.sens_time
:field adcs_swload_cnt1: packet.payload.adcs_swload_cnt1
:field adcs_fs_mounted: packet.payload.adcs_fs_mounted
:field adcs_temp_mcu: packet.payload.adcs_temp_mcu
:field adcs_temp_ram: packet.payload.adcs_temp_ram
:field adcs_resetcause: packet.payload.adcs_resetcause
:field adcs_bootcause: packet.payload.adcs_bootcause
:field adcs_boot_cnt: packet.payload.adcs_boot_cnt
:field adcs_clock: packet.payload.adcs_clock
:field adcs_uptime: packet.payload.adcs_uptime
:field core: packet.payload.core_loaded
:field sector_history_0: packet.payload.sector_history.0
:field sector_history_1: packet.payload.sector_history.1
:field sector_history_2: packet.payload.sector_history.2
:field sector_history_3: packet.payload.sector_history.3
:field sector_history_4: packet.payload.sector_history.4
:field sector_history_5: packet.payload.sector_history.5
:field sector_history_6: packet.payload.sector_history.6
:field sector_history_7: packet.payload.sector_history.7
:field sector_history_8: packet.payload.sector_history.8
:field sector_history_9: packet.payload.sector_history.9
:field sector_history_10: packet.payload.sector_history.10
:field sector_history_11: packet.payload.sector_history.11
:field sector_history_12: packet.payload.sector_history.12
:field sector_history_13: packet.payload.sector_history.13
:field sector_history_14: packet.payload.sector_history.14
:field sector_history_15: packet.payload.sector_history.15
:field mbytes_history_0: packet.payload.mbytes_history.0
:field mbytes_history_1: packet.payload.mbytes_history.1
:field mbytes_history_2: packet.payload.mbytes_history.2
:field mbytes_history_3: packet.payload.mbytes_history.3
:field mbytes_history_4: packet.payload.mbytes_history.4
:field mbytes_history_5: packet.payload.mbytes_history.5
:field mbytes_history_6: packet.payload.mbytes_history.6
:field mbytes_history_7: packet.payload.mbytes_history.7
:field mbytes_history_8: packet.payload.mbytes_history.8
:field mbytes_history_9: packet.payload.mbytes_history.9
:field mbytes_history_10: packet.payload.mbytes_history.10
:field mbytes_history_11: packet.payload.mbytes_history.11
:field mbytes_history_12: packet.payload.mbytes_history.12
:field mbytes_history_13: packet.payload.mbytes_history.13
:field mbytes_history_14: packet.payload.mbytes_history.14
:field mbytes_history_15: packet.payload.mbytes_history.15
:field hdcam_exposure: packet.payload.exposure
:field hdcam_gain: packet.payload.gain
:field chan_ref_lock: packet.payload.chan_ref_lock
:field chan_temp: packet.payload.chan_temp
:field chan_inited: packet.payload.chan_inited
:field chan_written: packet.payload.chan_written
:field chan_rec_status: packet.payload.chan_rec_status
:field chan_req_mbytes: packet.payload.chan_req_mbytes
:field chan_time: packet.payload.chan_time
:field chan_pps_present: packet.payload.chan_pps_present
:field chan_pps_count: packet.payload.chan_pps_count
:field rec_inited: packet.payload.rec_inited
:field rec_written: packet.payload.rec_written
:field rec_rec_status: packet.payload.rec_rec_status
:field rec_req_mbytes: packet.payload.rec_req_mbytes
:field rec_time: packet.payload.rec_time
:field rec_temp: packet.payload.rec_temp
:field trans_inited: packet.payload.trans_inited
:field trans_mbytes_sent: packet.payload.trans_mbytes_sent
:field trans_system_time: packet.payload.trans_system_time
:field mis1_temp: packet.payload.mis1_temp
:field mis1_fsk_incr: packet.payload.mis1_fsk_incr
:field mis1_system_time: packet.payload.mis1_system_time
:field inf_blob: packet.payload.inf_blob
"""
class CoreType(Enum):
channelizer = 0
mission1_fsk = 1
recorder = 2
transmitter = 3
asdr_bsp = 4
failed = 127
none = 255
class MpptType(Enum):
tracking = 1
fixed = 2
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.packet = Catsat.BeaconFrame(self._io, self, self._root)
class Asdr2BcnLow(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_14_29_22 = Catsat.ElementHeader(self._io, self, self._root)
self.chan_pps_present = self._io.read_u1()
self.chan_pps_count = self._io.read_s4be()
self.hk_14_37_22 = Catsat.ElementHeader(self._io, self, self._root)
self.rec_inited = self._io.read_u1()
self.hk_14_38_22 = Catsat.ElementHeader(self._io, self, self._root)
self.rec_written = self._io.read_f4be()
self.rec_rec_status = self._io.read_u1()
self.rec_req_mbytes = self._io.read_s4be()
self.rec_time = self._io.read_f4be()
self.hk_14_43_22 = Catsat.ElementHeader(self._io, self, self._root)
self.rec_temp = self._io.read_f4be()
self.hk_14_52_22 = Catsat.ElementHeader(self._io, self, self._root)
self.trans_inited = self._io.read_u1()
self.trans_mbytes_sent = self._io.read_f4be()
self.hk_14_53_22 = Catsat.ElementHeader(self._io, self, self._root)
self.trans_system_time = self._io.read_s8be()
self.hk_14_33_22 = Catsat.ElementHeader(self._io, self, self._root)
self.mis1_temp = self._io.read_f4be()
self.hk_14_34_22 = Catsat.ElementHeader(self._io, self, self._root)
self.mis1_fsk_incr = self._io.read_s4be()
self.hk_14_35_22 = Catsat.ElementHeader(self._io, self, self._root)
self.mis1_system_time = self._io.read_s8be()
class CspHeader(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.csp_flags = [None] * (4)
for i in range(4):
self.csp_flags[i] = self._io.read_u1()
@property
def source(self):
if hasattr(self, '_m_source'):
return self._m_source if hasattr(self, '_m_source') else None
self._m_source = ((self.csp_flags[3] >> 1) & 31)
return self._m_source if hasattr(self, '_m_source') else None
@property
def rdp(self):
if hasattr(self, '_m_rdp'):
return self._m_rdp if hasattr(self, '_m_rdp') else None
self._m_rdp = ((self.csp_flags[0] >> 1) & 1)
return self._m_rdp if hasattr(self, '_m_rdp') else None
@property
def src_port(self):
if hasattr(self, '_m_src_port'):
return self._m_src_port if hasattr(self, '_m_src_port') else None
self._m_src_port = (self.csp_flags[1] & 63)
return self._m_src_port if hasattr(self, '_m_src_port') else None
@property
def destination(self):
if hasattr(self, '_m_destination'):
return self._m_destination if hasattr(self, '_m_destination') else None
self._m_destination = (((self.csp_flags[2] >> 2) | (self.csp_flags[3] << 4)) & 31)
return self._m_destination if hasattr(self, '_m_destination') else None
@property
def dst_port(self):
if hasattr(self, '_m_dst_port'):
return self._m_dst_port if hasattr(self, '_m_dst_port') else None
self._m_dst_port = (((self.csp_flags[1] >> 6) | (self.csp_flags[2] << 2)) & 63)
return self._m_dst_port if hasattr(self, '_m_dst_port') else None
@property
def priority(self):
if hasattr(self, '_m_priority'):
return self._m_priority if hasattr(self, '_m_priority') else None
self._m_priority = (self.csp_flags[3] >> 6)
return self._m_priority if hasattr(self, '_m_priority') else None
@property
def reserved(self):
if hasattr(self, '_m_reserved'):
return self._m_reserved if hasattr(self, '_m_reserved') else None
self._m_reserved = (self.csp_flags[0] >> 4)
return self._m_reserved if hasattr(self, '_m_reserved') else None
@property
def xtea(self):
if hasattr(self, '_m_xtea'):
return self._m_xtea if hasattr(self, '_m_xtea') else None
self._m_xtea = ((self.csp_flags[0] >> 2) & 1)
return self._m_xtea if hasattr(self, '_m_xtea') else None
@property
def hmac(self):
if hasattr(self, '_m_hmac'):
return self._m_hmac if hasattr(self, '_m_hmac') else None
self._m_hmac = ((self.csp_flags[0] >> 3) & 1)
return self._m_hmac if hasattr(self, '_m_hmac') else None
@property
def crc(self):
if hasattr(self, '_m_crc'):
return self._m_crc if hasattr(self, '_m_crc') else None
self._m_crc = (self.csp_flags[0] & 1)
return self._m_crc if hasattr(self, '_m_crc') else None
class ObcBcnMed(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_1_4_3 = Catsat.ElementHeader(self._io, self, self._root)
self.obc_fs_mnted = self._io.read_u1()
self.obc_temp_ram = self._io.read_s2be()
self.obc_resetcause = self._io.read_u4be()
self.obc_bootcause = self._io.read_u4be()
self.obc_uptime = self._io.read_u4be()
self.hk_1_91_3 = Catsat.ElementHeader(self._io, self, self._root)
self.batt_charge = self._io.read_u2be()
self.batt_dcharge = self._io.read_u2be()
self.batt_heater = self._io.read_u2be()
self.batt_temp2 = self._io.read_s2be()
self.batt_temp3 = self._io.read_s2be()
self.batt_temp4 = self._io.read_s2be()
self.batt_bootcause = self._io.read_u1()
self.hk_1_94_3 = Catsat.ElementHeader(self._io, self, self._root)
self.sat_temps = [None] * (6)
for i in range(6):
self.sat_temps[i] = self._io.read_f4be()
self.hk_5_0_3 = Catsat.ElementHeader(self._io, self, self._root)
self.ax100_reboot_in = self._io.read_u2be()
self.ax100_tx_inhibit = self._io.read_u4be()
self.hk_5_1_3 = Catsat.ElementHeader(self._io, self, self._root)
self.ax100_rx_freq = self._io.read_u4be()
self.ax100_rx_baud = self._io.read_u4be()
self.hk_5_4_3 = Catsat.ElementHeader(self._io, self, self._root)
self.ax100_temp_pa = self._io.read_s2be()
self.ax100_last_rssi = self._io.read_s2be()
self.ax100_last_rferr = self._io.read_s2be()
self.ax100_active_conf = self._io.read_u1()
self.ax100_bootcause = self._io.read_u2be()
self.ax100_bgnd_rssi = self._io.read_s2be()
self.ax100_tx_duty = self._io.read_u1()
self.hk_5_5_3 = Catsat.ElementHeader(self._io, self, self._root)
self.ax100_tx_freq = self._io.read_u4be()
self.ax100_tx_baud = self._io.read_u4be()
class Adcs2BcnLow(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_4_150_12 = Catsat.ElementHeader(self._io, self, self._root)
self.wheel_torque = [None] * (4)
for i in range(4):
self.wheel_torque[i] = self._io.read_f4be()
self.wheel_momentum = [None] * (4)
for i in range(4):
self.wheel_momentum[i] = self._io.read_f4be()
self.wheel_speed = [None] * (4)
for i in range(4):
self.wheel_speed[i] = self._io.read_f4be()
self.wheel_enable = [None] * (4)
for i in range(4):
self.wheel_enable[i] = self._io.read_u1()
self.wheel_current = [None] * (4)
for i in range(4):
self.wheel_current[i] = self._io.read_u2be()
self.wheel_temp = [None] * (4)
for i in range(4):
self.wheel_temp[i] = self._io.read_s2be()
self.torquer_duty = [None] * (3)
for i in range(3):
self.torquer_duty[i] = self._io.read_f4be()
self.torquer_calib = [None] * (3)
for i in range(3):
self.torquer_calib[i] = self._io.read_f4be()
self.hk_4_151_12 = Catsat.ElementHeader(self._io, self, self._root)
self.acs_mode = self._io.read_s1()
self.acs_dmode = self._io.read_s1()
self.ads_mode = self._io.read_s1()
self.ads_dmode = self._io.read_s1()
self.ephem_mode = self._io.read_s1()
self.ephem_dmode = self._io.read_s1()
self.spin_mode = self._io.read_s1()
self.status_mag = self._io.read_s1()
self.status_extmag = self._io.read_s1()
self.status_css = self._io.read_s1()
self.status_fss = [None] * (5)
for i in range(5):
self.status_fss[i] = self._io.read_s1()
self.status_gyro = self._io.read_s1()
self.status_extgyro = self._io.read_s1()
self.status_gps = self._io.read_s1()
self.status_bdot = self._io.read_s1()
self.status_ukf = self._io.read_s1()
self.status_etime = self._io.read_s1()
self.status_ephem = self._io.read_s1()
self.status_run = self._io.read_s1()
self.looptime = self._io.read_s2be()
self.max_looptime = self._io.read_s2be()
self.bdot_rate = [None] * (2)
for i in range(2):
self.bdot_rate[i] = self._io.read_f4be()
self.bdot_dmag = [None] * (3)
for i in range(3):
self.bdot_dmag[i] = self._io.read_f4be()
self.bdot_torquer = [None] * (3)
for i in range(3):
self.bdot_torquer[i] = self._io.read_f4be()
self.bdot_detumb = self._io.read_u1()
class Pdu2BcnMed(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_10_4_5 = Catsat.ElementHeader(self._io, self, self._root)
self.pdu_x3_vout = [None] * (9)
for i in range(9):
self.pdu_x3_vout[i] = self._io.read_s2be()
self.pdu_x3_temp = self._io.read_s2be()
self.pdu_x3_out_en = [None] * (9)
for i in range(9):
self.pdu_x3_out_en[i] = self._io.read_u1()
self.pdu_x3_bootcause = self._io.read_u4be()
self.pdu_x3_boot_cnt = self._io.read_u4be()
self.pdu_x3_uptime = self._io.read_u4be()
self.pdu_x3_resetcause = self._io.read_u2be()
self.pdu_x3_latchup = [None] * (9)
for i in range(9):
self.pdu_x3_latchup[i] = self._io.read_u2be()
self.hk_11_4_5 = Catsat.ElementHeader(self._io, self, self._root)
self.acu_cin = [None] * (6)
for i in range(6):
self.acu_cin[i] = self._io.read_s2be()
self.acu_vin = [None] * (6)
for i in range(6):
self.acu_vin[i] = self._io.read_u2be()
self.acu_vbatt = self._io.read_u2be()
self.acu_temp = [None] * (3)
for i in range(3):
self.acu_temp[i] = self._io.read_s2be()
self.acu_mppt_mode = KaitaiStream.resolve_enum(Catsat.MpptType, self._io.read_u1())
self.acu_vboost = [None] * (6)
for i in range(6):
self.acu_vboost[i] = self._io.read_u2be()
self.acu_bootcause = self._io.read_u4be()
self.acu_boot_cnt = self._io.read_u4be()
self.acu_uptime = self._io.read_u4be()
self.acu_resetcause = self._io.read_u2be()
class BeaconFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.header = Catsat.Header(self._io, self, self._root)
_on = self.header.type
if _on == 93:
self.payload = Catsat.BcnInf(self._io, self, self._root)
elif _on == 14:
self.payload = Catsat.Adcs4BcnLow(self._io, self, self._root)
elif _on == 0:
self.payload = Catsat.MotdBcn(self._io, self, self._root)
elif _on == 4:
self.payload = Catsat.Pdu1BcnMed(self._io, self, self._root)
elif _on == 6:
self.payload = Catsat.DepBcnLow(self._io, self, self._root)
elif _on == 1:
self.payload = Catsat.Crit1BcnHigh(self._io, self, self._root)
elif _on == 13:
self.payload = Catsat.Adcs3BcnLow(self._io, self, self._root)
elif _on == 11:
self.payload = Catsat.Adcs1BcnLow(self._io, self, self._root)
elif _on == 12:
self.payload = Catsat.Adcs2BcnLow(self._io, self, self._root)
elif _on == 3:
self.payload = Catsat.ObcBcnMed(self._io, self, self._root)
elif _on == 5:
self.payload = Catsat.Pdu2BcnMed(self._io, self, self._root)
elif _on == 15:
self.payload = Catsat.Adcs5BcnLow(self._io, self, self._root)
elif _on == 21:
self.payload = Catsat.Asdr1BcnLow(self._io, self, self._root)
elif _on == 16:
self.payload = Catsat.Adcs6BcnLow(self._io, self, self._root)
elif _on == 2:
self.payload = Catsat.Crit2BcnHigh(self._io, self, self._root)
elif _on == 22:
self.payload = Catsat.Asdr2BcnLow(self._io, self, self._root)
class BcnInf(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_1_93_93 = Catsat.ElementHeader(self._io, self, self._root)
self.inf_blob = [None] * (42)
for i in range(42):
self.inf_blob[i] = self._io.read_u1()
class Adcs4BcnLow(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_4_152_14 = Catsat.ElementHeader(self._io, self, self._root)
self.ukf_z = [None] * (12)
for i in range(12):
self.ukf_z[i] = self._io.read_f4be()
self.ukf_enable = [None] * (12)
for i in range(12):
self.ukf_enable[i] = self._io.read_u1()
self.ukf_sunmax = [None] * (6)
for i in range(6):
self.ukf_sunmax[i] = self._io.read_f4be()
self.ukf_in_eclipse = self._io.read_u1()
self.ukf_choice = self._io.read_u1()
self.ukf_ctrl_t = [None] * (3)
for i in range(3):
self.ukf_ctrl_t[i] = self._io.read_f4be()
self.ukf_ctrl_m = [None] * (3)
for i in range(3):
self.ukf_ctrl_m[i] = self._io.read_f4be()
self.ukf_rate = [None] * (3)
for i in range(3):
self.ukf_rate[i] = self._io.read_f4be()
class Adcs6BcnLow(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_4_1_16 = Catsat.ElementHeader(self._io, self, self._root)
self.adcs_swload_cnt1 = self._io.read_u2be()
self.hk_4_4_16 = Catsat.ElementHeader(self._io, self, self._root)
self.adcs_fs_mounted = self._io.read_u1()
self.adcs_temp_mcu = self._io.read_s2be()
self.adcs_temp_ram = self._io.read_s2be()
self.adcs_resetcause = self._io.read_u4be()
self.adcs_bootcause = self._io.read_u4be()
self.adcs_boot_cnt = self._io.read_u2be()
self.adcs_clock = self._io.read_u4be()
self.adcs_uptime = self._io.read_u4be()
class Adcs5BcnLow(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_4_153_15 = Catsat.ElementHeader(self._io, self, self._root)
self.ephem_jdat = self._io.read_f8be()
self.ephem_reci = [None] * (3)
for i in range(3):
self.ephem_reci[i] = self._io.read_f4be()
self.ephem_veci = [None] * (3)
for i in range(3):
self.ephem_veci[i] = self._io.read_f4be()
self.ephem_sun_eci = [None] * (3)
for i in range(3):
self.ephem_sun_eci[i] = self._io.read_f4be()
self.ephem_quat_ie = [None] * (4)
for i in range(4):
self.ephem_quat_ie[i] = self._io.read_f4be()
self.ephem_quat_io = [None] * (4)
for i in range(4):
self.ephem_quat_io[i] = self._io.read_f4be()
self.ephem_quat_il = [None] * (4)
for i in range(4):
self.ephem_quat_il[i] = self._io.read_f4be()
self.ephem_rate_io = [None] * (3)
for i in range(3):
self.ephem_rate_io[i] = self._io.read_f4be()
self.ephem_rate_il = [None] * (3)
for i in range(3):
self.ephem_rate_il[i] = self._io.read_f4be()
self.ephem_t_eclipse = self._io.read_s4be()
self.hk_4_156_15 = Catsat.ElementHeader(self._io, self, self._root)
self.ephem_time = self._io.read_u4be()
self.ads_time = self._io.read_u4be()
self.acs_time = self._io.read_u4be()
self.sens_time = self._io.read_u4be()
class MotdBcn(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_1_95 = Catsat.ElementHeader(self._io, self, self._root)
self.callsign = (KaitaiStream.bytes_terminate(self._io.read_bytes(8), 0, False)).decode(u"ASCII")
self.motd = (KaitaiStream.bytes_terminate(self._io.read_bytes(80), 0, False)).decode(u"ASCII")
class Adcs1BcnLow(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_4_150_11 = Catsat.ElementHeader(self._io, self, self._root)
self.extmag = [None] * (3)
for i in range(3):
self.extmag[i] = self._io.read_f4be()
self.extmag_temp = self._io.read_f4be()
self.extmag_valid = self._io.read_u1()
self.suns = [None] * (6)
for i in range(6):
self.suns[i] = self._io.read_f4be()
self.suns_valid = self._io.read_u1()
self.suns_temp = [None] * (6)
for i in range(6):
self.suns_temp[i] = self._io.read_s2be()
self.extgyro = [None] * (3)
for i in range(3):
self.extgyro[i] = self._io.read_f4be()
self.extgyro_temp = self._io.read_f4be()
self.extgyro_valid = self._io.read_u1()
self.fss = [None] * (16)
for i in range(16):
self.fss[i] = self._io.read_f4be()
self.fss_temp = self._io.read_f4be()
self.fss_valid = [None] * (5)
for i in range(5):
self.fss_valid[i] = self._io.read_u1()
self.gps_pos = [None] * (3)
for i in range(3):
self.gps_pos[i] = self._io.read_f4be()
self.gps_vel = [None] * (3)
for i in range(3):
self.gps_vel[i] = self._io.read_f4be()
self.gps_epoch = self._io.read_u4be()
self.gps_valid = self._io.read_u1()
self.gps_sat = self._io.read_u1()
self.gps_satsol = self._io.read_u1()
self.pps_unix = self._io.read_u4be()
class DepBcnLow(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_1_96_6 = Catsat.ElementHeader(self._io, self, self._root)
self.ant_1_brn = self._io.read_s2be()
self.ant_2_brn = self._io.read_s2be()
self.ant_3_brn = self._io.read_s2be()
self.ant_4_brn = self._io.read_s2be()
self.ant_1_rel = self._io.read_s1()
self.ant_2_rel = self._io.read_s1()
self.ant_3_rel = self._io.read_s1()
self.ant_4_rel = self._io.read_s1()
self.dsp_1_brn = self._io.read_s2be()
self.dsp_2_brn = self._io.read_s2be()
self.dsp_1_rel = self._io.read_s1()
self.dsp_2_rel = self._io.read_s1()
class ElementHeader(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.checksum = self._io.read_u2be()
self.timestamp = self._io.read_u4be()
self.source = self._io.read_u2be()
class Adcs3BcnLow(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_4_152_13 = Catsat.ElementHeader(self._io, self, self._root)
self.ukf_x = [None] * (13)
for i in range(13):
self.ukf_x[i] = self._io.read_f4be()
self.ukf_q = [None] * (4)
for i in range(4):
self.ukf_q[i] = self._io.read_f4be()
self.ukf_w = [None] * (3)
for i in range(3):
self.ukf_w[i] = self._io.read_f4be()
self.ukf_xpred = [None] * (13)
for i in range(13):
self.ukf_xpred[i] = self._io.read_f4be()
self.ukf_zpred = [None] * (12)
for i in range(12):
self.ukf_zpred[i] = self._io.read_f4be()
class Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.csp_header = Catsat.CspHeader(self._io, self, self._root)
self.protocol_version = self._io.read_u1()
self.type = self._io.read_u1()
self.version = self._io.read_u1()
self.satid = self._io.read_u2be()
class Pdu1BcnMed(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_8_4_4 = Catsat.ElementHeader(self._io, self, self._root)
self.p60_cout = [None] * (13)
for i in range(13):
self.p60_cout[i] = self._io.read_s2be()
self.p60_out_en = [None] * (13)
for i in range(13):
self.p60_out_en[i] = self._io.read_u1()
self.p60_temp = [None] * (2)
for i in range(2):
self.p60_temp[i] = self._io.read_s2be()
self.p60_bootcause = self._io.read_u4be()
self.p60_uptime = self._io.read_u4be()
self.p60_resetcause = self._io.read_u2be()
self.p60_latchup = [None] * (13)
for i in range(13):
self.p60_latchup[i] = self._io.read_u2be()
self.p60_vcc_c = self._io.read_s2be()
self.p60_batt_v = self._io.read_u2be()
self.p60_dearm_status = self._io.read_u1()
self.p60_wdt_cnt_gnd = self._io.read_u4be()
self.p60_wdt_cnt_can = self._io.read_u4be()
self.p60_wdt_cnt_left = self._io.read_u4be()
self.p60_batt_chrg = self._io.read_s2be()
self.p60_batt_dchrg = self._io.read_s2be()
self.ant6_depl = self._io.read_s1()
self.ar6_depl = self._io.read_s1()
self.hk_9_4_4 = Catsat.ElementHeader(self._io, self, self._root)
self.pdu_x2_vout = [None] * (9)
for i in range(9):
self.pdu_x2_vout[i] = self._io.read_s2be()
self.pdu_x2_temp = self._io.read_s2be()
self.pdu_x2_out_en = [None] * (9)
for i in range(9):
self.pdu_x2_out_en[i] = self._io.read_u1()
self.pdu_x2_bootcause = self._io.read_u4be()
self.pdu_x2_boot_cnt = self._io.read_u4be()
self.pdu_x2_uptime = self._io.read_u4be()
self.pdu_x2_resetcause = self._io.read_u2be()
self.pdu_x2_latchup = [None] * (9)
for i in range(9):
self.pdu_x2_latchup[i] = self._io.read_u2be()
class Crit2BcnHigh(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_10_4_2 = Catsat.ElementHeader(self._io, self, self._root)
self.pdu_x3_cout = [None] * (9)
for i in range(9):
self.pdu_x3_cout[i] = self._io.read_s2be()
self.hk_11_4_2 = Catsat.ElementHeader(self._io, self, self._root)
self.acu_power = [None] * (6)
for i in range(6):
self.acu_power[i] = self._io.read_u2be()
self.hk_4_4_2 = Catsat.ElementHeader(self._io, self, self._root)
self.adcs_boot_cnt = self._io.read_u2be()
self.adcs_clock = self._io.read_u4be()
self.hk_4_150_2 = Catsat.ElementHeader(self._io, self, self._root)
self.extgyro = [None] * (3)
for i in range(3):
self.extgyro[i] = self._io.read_f4be()
self.gps_pos = [None] * (3)
for i in range(3):
self.gps_pos[i] = self._io.read_f4be()
self.gps_vel = [None] * (3)
for i in range(3):
self.gps_vel[i] = self._io.read_f4be()
self.hk_4_151_2 = Catsat.ElementHeader(self._io, self, self._root)
self.acs_mode = self._io.read_s1()
self.status_extmag = self._io.read_s1()
self.status_fss = [None] * (5)
for i in range(5):
self.status_fss[i] = self._io.read_s1()
self.status_extgyro = self._io.read_s1()
self.status_gps = self._io.read_s1()
class Crit1BcnHigh(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_1_4_1 = Catsat.ElementHeader(self._io, self, self._root)
self.obc_temp_mcu = self._io.read_s2be()
self.obc_boot_cnt = self._io.read_u2be()
self.obc_clock = self._io.read_u4be()
self.hk_1_91 = Catsat.ElementHeader(self._io, self, self._root)
self.bpx_vbatt = self._io.read_u2be()
self.bpx_temp = self._io.read_s2be()
self.bpx_boot_cnt = self._io.read_u4be()
self.hk_5_4_1 = Catsat.ElementHeader(self._io, self, self._root)
self.ax100_temp_brd = self._io.read_s2be()
self.ax100_boot_cnt = self._io.read_u2be()
self.ax100_last_contact = self._io.read_u4be()
self.hk_8_4_1 = Catsat.ElementHeader(self._io, self, self._root)
self.p60_boot_cnt = self._io.read_u4be()
self.p60_batt_mode = self._io.read_u1()
self.p60_batt_v = self._io.read_u2be()
self.p60_batt_c = self._io.read_s2be()
self.hk_9_4 = Catsat.ElementHeader(self._io, self, self._root)
self.pdu_x2_cout = [None] * (9)
for i in range(9):
self.pdu_x2_cout[i] = self._io.read_s2be()
class Asdr1BcnLow(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.hk_14_0_21 = Catsat.ElementHeader(self._io, self, self._root)
self.core_loaded = KaitaiStream.resolve_enum(Catsat.CoreType, self._io.read_u1())
self.hk_14_1_21 = Catsat.ElementHeader(self._io, self, self._root)
self.sector_history = [None] * (16)
for i in range(16):
self.sector_history[i] = self._io.read_u2be()
self.mbytes_history = [None] * (16)
for i in range(16):
self.mbytes_history[i] = self._io.read_u2be()
self.exposure = self._io.read_u4be()
self.gain = self._io.read_f4be()
self.hk_14_12_21 = Catsat.ElementHeader(self._io, self, self._root)
self.chan_ref_lock = self._io.read_u1()
self.hk_14_13_21 = Catsat.ElementHeader(self._io, self, self._root)
self.chan_temp = self._io.read_f4be()
self.hk_14_16_21 = Catsat.ElementHeader(self._io, self, self._root)
self.chan_inited = self._io.read_u1()
self.hk_14_18_21 = Catsat.ElementHeader(self._io, self, self._root)
self.chan_written = self._io.read_f4be()
self.chan_rec_status = self._io.read_u1()
self.chan_req_mbytes = self._io.read_s4be()
self.chan_time = self._io.read_f4be()
@property
def frame_length(self):
if hasattr(self, '_m_frame_length'):
return self._m_frame_length if hasattr(self, '_m_frame_length') else None
self._m_frame_length = self._io.size()
return self._m_frame_length if hasattr(self, '_m_frame_length') else None
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/catsat.py
| 0.52074 | 0.167832 |
catsat.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Origamisat1(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field rpt_callsign: ax25_frame.ax25_header.repeater.rpt_instance[0].rpt_callsign_raw.callsign_ror.callsign
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.payload.pid
:field last_exec_obc_id: ax25_frame.payload.ax25_info.hk_data.chunk.last_exec_obc_id
:field obc_cmd_status: ax25_frame.payload.ax25_info.hk_data.chunk.obc_cmd_status
:field data_obtained_time_year: ax25_frame.payload.ax25_info.hk_data.chunk.data_obtained_time_year
:field data_obtained_time_month: ax25_frame.payload.ax25_info.hk_data.chunk.data_obtained_time_month
:field data_obtained_time_day: ax25_frame.payload.ax25_info.hk_data.chunk.data_obtained_time_day
:field data_obtained_time_hour: ax25_frame.payload.ax25_info.hk_data.chunk.data_obtained_time_hour
:field data_obtained_time_minute: ax25_frame.payload.ax25_info.hk_data.chunk.data_obtained_time_minute
:field data_obtained_time_second: ax25_frame.payload.ax25_info.hk_data.chunk.data_obtained_time_second
:field battery_voltage: ax25_frame.payload.ax25_info.hk_data.chunk.battery_voltage
:field battery_current: ax25_frame.payload.ax25_info.hk_data.chunk.battery_current
:field bat_status: ax25_frame.payload.ax25_info.hk_data.chunk.bat_status
:field eps_sw_status: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_status
:field eps_bus_status: ax25_frame.payload.ax25_info.hk_data.chunk.eps_bus_status
:field satellite_mode: ax25_frame.payload.ax25_info.hk_data.chunk.satellite_mode
:field sap_voltage: ax25_frame.payload.ax25_info.hk_data.chunk.sap_voltage
:field sap_current: ax25_frame.payload.ax25_info.hk_data.chunk.sap_current
:field sap_1_gen_pwr: ax25_frame.payload.ax25_info.hk_data.chunk.sap_1_gen_pwr
:field sap_2_gen_pwr: ax25_frame.payload.ax25_info.hk_data.chunk.sap_2_gen_pwr
:field sap_3_gen_pwr: ax25_frame.payload.ax25_info.hk_data.chunk.sap_3_gen_pwr
:field sap_4_gen_pwr: ax25_frame.payload.ax25_info.hk_data.chunk.sap_4_gen_pwr
:field sap_5_gen_pwr: ax25_frame.payload.ax25_info.hk_data.chunk.sap_5_gen_pwr
:field sap_2_current: ax25_frame.payload.ax25_info.hk_data.chunk.sap_2_current
:field sap_3_current: ax25_frame.payload.ax25_info.hk_data.chunk.sap_3_current
:field sap_4_current: ax25_frame.payload.ax25_info.hk_data.chunk.sap_4_current
:field sap_5_current: ax25_frame.payload.ax25_info.hk_data.chunk.sap_5_current
:field eps_temp: ax25_frame.payload.ax25_info.hk_data.chunk.eps_temp
:field obc_cmd_status: ax25_frame.payload.ax25_info.hk_data.chunk.
:field obc_temp_0: ax25_frame.payload.ax25_info.hk_data.chunk.obc_temp_0
:field obc_temp_1: ax25_frame.payload.ax25_info.hk_data.chunk.obc_temp_1
:field amp_5g8hz_temp: ax25_frame.payload.ax25_info.hk_data.chunk.amp_5g8hz_temp
:field rad_plate_5g8hz_temp: ax25_frame.payload.ax25_info.hk_data.chunk.rad_plate_5g8hz_temp
:field tx_temp: ax25_frame.payload.ax25_info.hk_data.chunk.tx_temp
:field rx_temp: ax25_frame.payload.ax25_info.hk_data.chunk.rx_temp
:field bat_mbrd_temp: ax25_frame.payload.ax25_info.hk_data.chunk.bat_mbrd_temp
:field ci_brd_temp: ax25_frame.payload.ax25_info.hk_data.chunk.ci_brd_temp
:field panel_pos_y: ax25_frame.payload.ax25_info.hk_data.chunk.panel_pos_y
:field panel_pos_x: ax25_frame.payload.ax25_info.hk_data.chunk.panel_pos_x
:field panel_neg_x: ax25_frame.payload.ax25_info.hk_data.chunk.panel_neg_x
:field obc_gpu_temp: ax25_frame.payload.ax25_info.hk_data.chunk.obc_gpu_temp
:field panel_neg_y: ax25_frame.payload.ax25_info.hk_data.chunk.panel_neg_y
:field accel_x: ax25_frame.payload.ax25_info.hk_data.chunk.accel_x
:field accel_y: ax25_frame.payload.ax25_info.hk_data.chunk.accel_y
:field accel_z: ax25_frame.payload.ax25_info.hk_data.chunk.accel_z
:field ang_vcty_x: ax25_frame.payload.ax25_info.hk_data.chunk.ang_vcty_x
:field ang_vcty_z: ax25_frame.payload.ax25_info.hk_data.chunk.ang_vcty_z
:field raspi_last_exec: ax25_frame.payload.ax25_info.hk_data.chunk.raspi_last_exec
:field raspi_mode: ax25_frame.payload.ax25_info.hk_data.chunk.raspi_mode
:field eps_sw_1_volt: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_1_volt
:field eps_sw_1_curr: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_1_curr
:field eps_sw_2_volt: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_2_volt
:field eps_sw_2_curr: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_2_curr
:field eps_sw_5_volt: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_5_volt
:field eps_sw_5_curr: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_5_curr
:field eps_sw_6_volt: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_6_volt
:field eps_sw_6_curr: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_6_curr
:field eps_sw_7_volt: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_7_volt
:field eps_sw_7_curr: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_7_curr
:field eps_sw_8_volt: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_8_volt
:field eps_sw_8_curr: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_8_curr
:field eps_sw_9_volt: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_9_volt
:field eps_sw_10_volt: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_10_volt
:field eps_sw_10_curr: ax25_frame.payload.ax25_info.hk_data.chunk.eps_sw_10_curr
:field eps_3v3_voltage: ax25_frame.payload.ax25_info.hk_data.chunk.eps_3v3_voltage
:field eps_3v3_current: ax25_frame.payload.ax25_info.hk_data.chunk.eps_3v3_current
:field eps_5v_voltage: ax25_frame.payload.ax25_info.hk_data.chunk.eps_5v_voltage
:field eps_5v_current: ax25_frame.payload.ax25_info.hk_data.chunk.eps_5v_current
:field eps_12v_voltage: ax25_frame.payload.ax25_info.hk_data.chunk.eps_12v_voltage
:field eps_12v_current: ax25_frame.payload.ax25_info.hk_data.chunk.eps_12v_current
:field bcr_1_voltage: ax25_frame.payload.ax25_info.hk_data.chunk.bcr_1_voltage
:field bcr_2_voltage: ax25_frame.payload.ax25_info.hk_data.chunk.bcr_2_voltage
:field bcr_3_voltage: ax25_frame.payload.ax25_info.hk_data.chunk.bcr_3_voltage
:field pwr5g8hz_12v_voltage: ax25_frame.payload.ax25_info.hk_data.chunk.pwr5g8hz_12v_voltage
Attention: `rpt_callsign` cannot be accessed because `rpt_instance` is an
array of unknown size at the beginning of the parsing process! Left an
example in here.
.. seealso::
'http://www.origami.titech.ac.jp/wp/wp-content/uploads/2019/01/OP-S1-0115_FMDownLinkDataFormat_20190118.pdf'
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Origamisat1.Ax25Frame(self._io, self, self._root)
class HkDataChunk3(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ang_vcty_y_b1 = self._io.read_u1()
self.ang_vcty_z = self._io.read_u2be()
self.raspi_last_exec = self._io.read_u1()
self.raspi_mode = self._io.read_u1()
self.eps_sw_1_volt = self._io.read_u2be()
self.eps_sw_1_curr = self._io.read_u2be()
self.eps_sw_2_volt = self._io.read_u2be()
self.eps_sw_2_curr = self._io.read_u2be()
self.eps_sw_5_volt = self._io.read_u2be()
self.eps_sw_5_curr = self._io.read_u2be()
self.eps_sw_6_volt = self._io.read_u2be()
self.eps_sw_6_curr = self._io.read_u2be()
self.eps_sw_7_volt = self._io.read_u2be()
self.eps_sw_7_curr = self._io.read_u2be()
self.eps_sw_8_volt = self._io.read_u2be()
self.eps_sw_8_curr = self._io.read_u2be()
self.eps_sw_9_volt = self._io.read_u2be()
self.eps_sw_9_curr_b0 = self._io.read_u1()
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Origamisat1.Ax25Header(self._io, self, self._root)
_on = (self.ax25_header.ctl & 19)
if _on == 0:
self.payload = Origamisat1.IFrame(self._io, self, self._root)
elif _on == 3:
self.payload = Origamisat1.UiFrame(self._io, self, self._root)
elif _on == 19:
self.payload = Origamisat1.UiFrame(self._io, self, self._root)
elif _on == 16:
self.payload = Origamisat1.IFrame(self._io, self, self._root)
elif _on == 18:
self.payload = Origamisat1.IFrame(self._io, self, self._root)
elif _on == 2:
self.payload = Origamisat1.IFrame(self._io, self, self._root)
class HkDataChunk1(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.last_exec_obc_id = self._io.read_u1()
self.obc_cmd_status = self._io.read_u1()
self.data_obtained_time_year = self._io.read_u1()
self.data_obtained_time_month = self._io.read_u1()
self.data_obtained_time_day = self._io.read_u1()
self.data_obtained_time_hour = self._io.read_u1()
self.data_obtained_time_minute = self._io.read_u1()
self.data_obtained_time_second = self._io.read_u1()
self.battery_voltage = self._io.read_u2be()
self.battery_current = self._io.read_u2be()
self.bat_status = self._io.read_u1()
self.eps_sw_status = self._io.read_u2be()
self.eps_bus_status = self._io.read_u1()
self.satellite_mode = self._io.read_u1()
self.sap_voltage = self._io.read_u2be()
self.sap_current = self._io.read_u2be()
self.sap_1_gen_pwr = self._io.read_u2be()
self.sap_2_gen_pwr = self._io.read_u2be()
self.sap_3_gen_pwr = self._io.read_u2be()
self.sap_4_gen_pwr = self._io.read_u2be()
self.sap_5_gen_pwr = self._io.read_u2be()
self.sap_1_current_b0 = self._io.read_u1()
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Origamisat1.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Origamisat1.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Origamisat1.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Origamisat1.SsidMask(self._io, self, self._root)
if (self.src_ssid_raw.ssid_mask & 1) == 0:
self.repeater = Origamisat1.Repeater(self._io, self, self._root)
self.ctl = self._io.read_u1()
class UiFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
_on = self._io.size()
if _on == 48:
self.ax25_info = Origamisat1.EchoBack(self._io, self, self._root)
elif _on == 51:
self.ax25_info = Origamisat1.HkOrMissionData(self._io, self, self._root)
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"ASCII")
class HkDataChunk4(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.eps_sw_9_curr_b1 = self._io.read_u1()
self.eps_sw_10_volt = self._io.read_u2be()
self.eps_sw_10_curr = self._io.read_u2be()
self.eps_3v3_voltage = self._io.read_u2be()
self.eps_3v3_current = self._io.read_u2be()
self.eps_5v_voltage = self._io.read_u2be()
self.eps_5v_current = self._io.read_u2be()
self.eps_12v_voltage = self._io.read_u2be()
self.eps_12v_current = self._io.read_u2be()
self.bcr_1_voltage = self._io.read_u2be()
self.bcr_2_voltage = self._io.read_u2be()
self.bcr_3_voltage = self._io.read_u2be()
self.sap_5_current = self._io.read_u2be()
self.pwr5g8hz_12v_voltage = self._io.read_u1()
class IFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self.ax25_info = self._io.read_bytes_full()
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class HkOrMissionData(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pkt_num = [None] * (3)
for i in range(3):
self.pkt_num[i] = self._io.read_u1()
if ((self.pkt_num[0] == self.pkt_num[1]) and (self.pkt_num[1] == self.pkt_num[2])) :
self.hk_data = Origamisat1.HkData(self._io, self, self._root)
if self.pkt_num[0] != self.pkt_num[1]:
self._raw_mission_data = self._io.read_bytes(32)
_io__raw_mission_data = KaitaiStream(BytesIO(self._raw_mission_data))
self.mission_data = Origamisat1.MissionData(_io__raw_mission_data, self, self._root)
@property
def packet_number(self):
if hasattr(self, '_m_packet_number'):
return self._m_packet_number if hasattr(self, '_m_packet_number') else None
self._m_packet_number = ((self.pkt_num[0] | self.pkt_num[1]) | self.pkt_num[2])
return self._m_packet_number if hasattr(self, '_m_packet_number') else None
class HkData(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
_on = (self._parent.packet_number % 4)
if _on == 0:
self._raw_chunk = self._io.read_bytes_full()
_io__raw_chunk = KaitaiStream(BytesIO(self._raw_chunk))
self.chunk = Origamisat1.HkDataChunk4(_io__raw_chunk, self, self._root)
elif _on == 1:
self._raw_chunk = self._io.read_bytes_full()
_io__raw_chunk = KaitaiStream(BytesIO(self._raw_chunk))
self.chunk = Origamisat1.HkDataChunk1(_io__raw_chunk, self, self._root)
elif _on == 3:
self._raw_chunk = self._io.read_bytes_full()
_io__raw_chunk = KaitaiStream(BytesIO(self._raw_chunk))
self.chunk = Origamisat1.HkDataChunk3(_io__raw_chunk, self, self._root)
elif _on == 2:
self._raw_chunk = self._io.read_bytes_full()
_io__raw_chunk = KaitaiStream(BytesIO(self._raw_chunk))
self.chunk = Origamisat1.HkDataChunk2(_io__raw_chunk, self, self._root)
else:
self.chunk = self._io.read_bytes_full()
class Repeaters(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rpt_callsign_raw = Origamisat1.CallsignRaw(self._io, self, self._root)
self.rpt_ssid_raw = Origamisat1.SsidMask(self._io, self, self._root)
class Repeater(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rpt_instance = []
i = 0
while True:
_ = Origamisat1.Repeaters(self._io, self, self._root)
self.rpt_instance.append(_)
if (_.rpt_ssid_raw.ssid_mask & 1) == 1:
break
i += 1
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Origamisat1.Callsign(_io__raw_callsign_ror, self, self._root)
class HkDataChunk2(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.sap_1_current_b1 = self._io.read_u1()
self.sap_2_current = self._io.read_u2be()
self.sap_3_current = self._io.read_u2be()
self.sap_4_current = self._io.read_u2be()
self.eps_temp = self._io.read_u2be()
self.obc_temp_0 = self._io.read_u1()
self.obc_temp_1 = self._io.read_u1()
self.amp_5g8hz_temp = self._io.read_u1()
self.rad_plate_5g8hz_temp = self._io.read_u1()
self.tx_temp = self._io.read_u1()
self.rx_temp = self._io.read_u1()
self.bat_mbrd_temp = self._io.read_u2be()
self.ci_brd_temp = self._io.read_u1()
self.panel_pos_y = self._io.read_u1()
self.panel_pos_x = self._io.read_u1()
self.panel_neg_x = self._io.read_u1()
self.obc_gpu_temp = self._io.read_u1()
self.panel_neg_y = self._io.read_u1()
self.accel_x = self._io.read_u2be()
self.accel_y = self._io.read_u2be()
self.accel_z = self._io.read_u2be()
self.ang_vcty_x = self._io.read_u2be()
self.ang_vcty_y_b0 = self._io.read_u1()
class EchoBack(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.response_data = self._io.read_bytes(32)
class MissionData(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.unparsed = self._io.read_bytes_full()
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/origamisat1.py
| 0.455925 | 0.163079 |
origamisat1.py
|
pypi
|
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Delfin3xt(KaitaiStruct):
""":field dest_callsign: ax25_frame.ax25_header.dest_callsign_raw.callsign_ror.callsign
:field src_callsign: ax25_frame.ax25_header.src_callsign_raw.callsign_ror.callsign
:field src_ssid: ax25_frame.ax25_header.src_ssid_raw.ssid
:field dest_ssid: ax25_frame.ax25_header.dest_ssid_raw.ssid
:field ctl: ax25_frame.ax25_header.ctl
:field pid: ax25_frame.payload.pid
:field elapsed_time: ax25_frame.payload.ax25_payload.elapsed_time
:field framecounter: ax25_frame.payload.ax25_payload.framecounter
:field frametype: ax25_frame.payload.ax25_payload.frametype
:field boot_counter: ax25_frame.payload.ax25_payload.boot_counter
:field ptrx_dv: ax25_frame.payload.ax25_payload.ptrx_dv_raw
:field ptrx_rss: ax25_frame.payload.ax25_payload.ptrx_rss_raw
:field ptrx_rp: ax25_frame.payload.ax25_payload.ptrx_rp_raw
:field ptrx_fp: ax25_frame.payload.ax25_payload.ptrx_fp_raw
:field ptrx_tsc: ax25_frame.payload.ax25_payload.ptrx_tsc_raw
:field ptrx_rsc: ax25_frame.payload.ax25_payload.ptrx_rsc_raw
:field ptrx_pa_temp: ax25_frame.payload.ax25_payload.ptrx_pa_temp_raw
:field ptrx_pbv: ax25_frame.payload.ax25_payload.ptrx_pbv_raw
:field depl_sol_x_minus: ax25_frame.payload.ax25_payload.depl_sol_x_minus
:field depl_sol_x_plus: ax25_frame.payload.ax25_payload.depl_sol_x_plus
:field depl_sol_y_minus: ax25_frame.payload.ax25_payload.depl_sol_y_minus
:field depl_sol_y_plus: ax25_frame.payload.ax25_payload.depl_sol_y_plus
:field depl_ant_x_minus: ax25_frame.payload.ax25_payload.depl_ant_x_minus
:field depl_ant_x_plus: ax25_frame.payload.ax25_payload.depl_ant_x_plus
:field depl_ant_y_minus: ax25_frame.payload.ax25_payload.depl_ant_y_minus
:field depl_ant_y_plus: ax25_frame.payload.ax25_payload.depl_ant_y_plus
:field dab_temp: ax25_frame.payload.ax25_payload.dab_temp
:field eps_bus_current: ax25_frame.payload.ax25_payload.eps_bus_current
:field eps_bus_voltage: ax25_frame.payload.ax25_payload.eps_bus_voltage
:field eps_variable_bus_v: ax25_frame.payload.ax25_payload.eps_variable_bus_v
:field power_status_solar_panel_xpzp: ax25_frame.payload.ax25_payload.power_status_solar_panel_xpzp
:field power_status_solar_panel_xpzm: ax25_frame.payload.ax25_payload.power_status_solar_panel_xpzm
:field power_status_solar_panel_xmzp: ax25_frame.payload.ax25_payload.power_status_solar_panel_xmzp
:field power_status_solar_panel_xmzm: ax25_frame.payload.ax25_payload.power_status_solar_panel_xmzm
:field power_status_solar_panel_ypzp: ax25_frame.payload.ax25_payload.power_status_solar_panel_ypzp
:field power_status_solar_panel_ypzm: ax25_frame.payload.ax25_payload.power_status_solar_panel_ypzm
:field power_status_solar_panel_ymzp: ax25_frame.payload.ax25_payload.power_status_solar_panel_ymzp
:field power_status_solar_panel_ymzm: ax25_frame.payload.ax25_payload.power_status_solar_panel_ymzm
:field oppsp_xpzp_c: ax25_frame.payload.ax25_payload.oppsp_xpzp_c
:field oppsp_xpzm_c: ax25_frame.payload.ax25_payload.oppsp_xpzm_c
:field oppsp_xmzp_c: ax25_frame.payload.ax25_payload.oppsp_xmzp_c
:field oppsp_xmzm_c: ax25_frame.payload.ax25_payload.oppsp_xmzm_c
:field oppsp_ypzp_c: ax25_frame.payload.ax25_payload.oppsp_ypzp_c
:field oppsp_ypzm_c: ax25_frame.payload.ax25_payload.oppsp_ypzm_c
:field oppsp_ymzp_c: ax25_frame.payload.ax25_payload.oppsp_ymzp_c
:field oppsp_ymzm_c: ax25_frame.payload.ax25_payload.oppsp_ymzm_c
:field oppsp_xpzp_v: ax25_frame.payload.ax25_payload.oppsp_xpzp_v
:field oppsp_xpzm_v: ax25_frame.payload.ax25_payload.oppsp_xpzm_v
:field oppsp_xmzp_v: ax25_frame.payload.ax25_payload.oppsp_xmzp_v
:field oppsp_xmzm_v: ax25_frame.payload.ax25_payload.oppsp_xmzm_v
:field oppsp_ypzp_v: ax25_frame.payload.ax25_payload.oppsp_ypzp_v
:field oppsp_ypzm_v: ax25_frame.payload.ax25_payload.oppsp_ypzm_v
:field oppsp_ymzp_v: ax25_frame.payload.ax25_payload.oppsp_ymzp_v
:field oppsp_ymzm_v: ax25_frame.payload.ax25_payload.oppsp_ymzm_v
:field eps_solar_panel_xpzp_temp: ax25_frame.payload.ax25_payload.eps_solar_panel_xpzp_temp
:field eps_solar_panel_xpzm_temp: ax25_frame.payload.ax25_payload.eps_solar_panel_xpzm_temp
:field eps_solar_panel_xmzp_temp: ax25_frame.payload.ax25_payload.eps_solar_panel_xmzp_temp
:field eps_solar_panel_xmzm_temp: ax25_frame.payload.ax25_payload.eps_solar_panel_xmzm_temp
:field eps_solar_panel_ypzp_temp: ax25_frame.payload.ax25_payload.eps_solar_panel_ypzp_temp
:field eps_solar_panel_ypzm_temp: ax25_frame.payload.ax25_payload.eps_solar_panel_ypzm_temp
:field eps_solar_panel_ymzp_temp: ax25_frame.payload.ax25_payload.eps_solar_panel_ymzp_temp
:field eps_solar_panel_ymzm_temp: ax25_frame.payload.ax25_payload.eps_solar_panel_ymzm_temp
:field eps_reg_board_temp: ax25_frame.payload.ax25_payload.eps_reg_board_temp
:field bat1_dod: ax25_frame.payload.ax25_payload.bat1_dod
:field bat1_cc: ax25_frame.payload.ax25_payload.bat1_cc
:field bat1_dc: ax25_frame.payload.ax25_payload.bat1_dc
:field bat1_v: ax25_frame.payload.ax25_payload.bat1_v
:field bat1_temp: ax25_frame.payload.ax25_payload.bat1_temp
:field bat2_dod: ax25_frame.payload.ax25_payload.bat2_dod
:field bat2_cc: ax25_frame.payload.ax25_payload.bat2_cc
:field bat2_dc: ax25_frame.payload.ax25_payload.bat2_dc
:field bat2_v: ax25_frame.payload.ax25_payload.bat2_v
:field bat2_temp: ax25_frame.payload.ax25_payload.bat2_temp
:field bat3_dod: ax25_frame.payload.ax25_payload.bat3_dod
:field bat3_cc: ax25_frame.payload.ax25_payload.bat3_cc
:field bat3_dc: ax25_frame.payload.ax25_payload.bat3_dc
:field bat3_v: ax25_frame.payload.ax25_payload.bat3_v
:field bat3_temp: ax25_frame.payload.ax25_payload.bat3_temp
:field bat4_dod: ax25_frame.payload.ax25_payload.bat4_dod
:field bat4_cc: ax25_frame.payload.ax25_payload.bat4_cc
:field bat4_dc: ax25_frame.payload.ax25_payload.bat4_dc
:field bat4_v: ax25_frame.payload.ax25_payload.bat4_v
:field bat4_temp: ax25_frame.payload.ax25_payload.bat4_temp
:field t3_vc: ax25_frame.payload.ax25_payload.t3_vc
:field t3_ic: ax25_frame.payload.ax25_payload.t3_ic
:field t3_iv: ax25_frame.payload.ax25_payload.t3_iv
:field t3_pt: ax25_frame.payload.ax25_payload.t3_pt
:field t3_mt: ax25_frame.payload.ax25_payload.t3_mt
:field t3_pp_1: ax25_frame.payload.ax25_payload.t3_pp_1
:field t3_pp_2: ax25_frame.payload.ax25_payload.t3_pp_2
:field t3_pp_3: ax25_frame.payload.ax25_payload.t3_pp_3
:field t3_pp_4: ax25_frame.payload.ax25_payload.t3_pp_4
:field t3_pp_5: ax25_frame.payload.ax25_payload.t3_pp_5
:field t3_pp_6: ax25_frame.payload.ax25_payload.t3_pp_6
:field t3_pp_7: ax25_frame.payload.ax25_payload.t3_pp_7
:field t3_pp_8: ax25_frame.payload.ax25_payload.t3_pp_8
:field t3_pp_9: ax25_frame.payload.ax25_payload.t3_pp_9
:field t3_pp_10: ax25_frame.payload.ax25_payload.t3_pp_10
:field t3_pp_11: ax25_frame.payload.ax25_payload.t3_pp_11
:field t3_pp_12: ax25_frame.payload.ax25_payload.t3_pp_12
:field t3_pp_13: ax25_frame.payload.ax25_payload.t3_pp_13
:field t3_pp_14: ax25_frame.payload.ax25_payload.t3_pp_14
:field t3_pp_15: ax25_frame.payload.ax25_payload.t3_pp_15
:field t3_pp_16: ax25_frame.payload.ax25_payload.t3_pp_16
:field t3_pp_17: ax25_frame.payload.ax25_payload.t3_pp_17
:field t3_pp_18: ax25_frame.payload.ax25_payload.t3_pp_18
:field t3_pp_19: ax25_frame.payload.ax25_payload.t3_pp_19
:field t3_pp_20: ax25_frame.payload.ax25_payload.t3_pp_20
:field t3_pp_21: ax25_frame.payload.ax25_payload.t3_pp_21
:field t3_pp_22: ax25_frame.payload.ax25_payload.t3_pp_22
:field t3_pp_23: ax25_frame.payload.ax25_payload.t3_pp_23
:field t3_pp_24: ax25_frame.payload.ax25_payload.t3_pp_24
:field t3_pp_25: ax25_frame.payload.ax25_payload.t3_pp_25
:field t3_pp_26: ax25_frame.payload.ax25_payload.t3_pp_26
:field t3_pp_27: ax25_frame.payload.ax25_payload.t3_pp_27
:field t3_pp_28: ax25_frame.payload.ax25_payload.t3_pp_28
:field t3_pp_29: ax25_frame.payload.ax25_payload.t3_pp_29
:field t3_pp_30: ax25_frame.payload.ax25_payload.t3_pp_30
:field t3_pp_31: ax25_frame.payload.ax25_payload.t3_pp_31
:field t3_pp_32: ax25_frame.payload.ax25_payload.t3_pp_32
:field t3_pp_33: ax25_frame.payload.ax25_payload.t3_pp_33
:field t3_pp_34: ax25_frame.payload.ax25_payload.t3_pp_34
:field t3_pp_35: ax25_frame.payload.ax25_payload.t3_pp_35
:field t3_pp_36: ax25_frame.payload.ax25_payload.t3_pp_36
:field t3_pp_37: ax25_frame.payload.ax25_payload.t3_pp_37
:field t3_pp_38: ax25_frame.payload.ax25_payload.t3_pp_38
:field t3_pp_39: ax25_frame.payload.ax25_payload.t3_pp_39
:field t3_pp_40: ax25_frame.payload.ax25_payload.t3_pp_40
:field t3_pp_41: ax25_frame.payload.ax25_payload.t3_pp_41
:field t3_pp_42: ax25_frame.payload.ax25_payload.t3_pp_42
:field t3_pp_43: ax25_frame.payload.ax25_payload.t3_pp_43
:field t3_pp_44: ax25_frame.payload.ax25_payload.t3_pp_44
:field t3_pp_45: ax25_frame.payload.ax25_payload.t3_pp_45
:field t3_pp_46: ax25_frame.payload.ax25_payload.t3_pp_46
:field t3_pp_47: ax25_frame.payload.ax25_payload.t3_pp_47
:field t3_pp_48: ax25_frame.payload.ax25_payload.t3_pp_48
:field t3_pp_49: ax25_frame.payload.ax25_payload.t3_pp_49
:field t3_pp_50: ax25_frame.payload.ax25_payload.t3_pp_50
:field t3_pp_51: ax25_frame.payload.ax25_payload.t3_pp_51
:field t3_pp_52: ax25_frame.payload.ax25_payload.t3_pp_52
:field t3_pp_53: ax25_frame.payload.ax25_payload.t3_pp_53
:field t3_pp_54: ax25_frame.payload.ax25_payload.t3_pp_54
:field t3_pp_55: ax25_frame.payload.ax25_payload.t3_pp_55
:field t3_pp_56: ax25_frame.payload.ax25_payload.t3_pp_56
:field t3_pp_57: ax25_frame.payload.ax25_payload.t3_pp_57
:field t3_pp_58: ax25_frame.payload.ax25_payload.t3_pp_58
:field t3_pp_59: ax25_frame.payload.ax25_payload.t3_pp_59
:field t3_pp_60: ax25_frame.payload.ax25_payload.t3_pp_60
:field sdm_iv_id: ax25_frame.payload.ax25_payload.sdm_iv_id
:field skip: ax25_frame.payload.ax25_payload.skip
:field sdm_status_cell_temp_ym: ax25_frame.payload.ax25_payload.sdm_status_cell_temp_ym
:field sdm_status_cell_temp_yp: ax25_frame.payload.ax25_payload.sdm_status_cell_temp_yp
:field sdm_iv_curve_c1: ax25_frame.payload.ax25_payload.sdm_iv_curve_c1
:field sdm_iv_curve_c2: ax25_frame.payload.ax25_payload.sdm_iv_curve_c2
:field sdm_iv_curve_c3: ax25_frame.payload.ax25_payload.sdm_iv_curve_c3
:field sdm_iv_curve_c4: ax25_frame.payload.ax25_payload.sdm_iv_curve_c4
:field sdm_iv_curve_c5: ax25_frame.payload.ax25_payload.sdm_iv_curve_c5
:field sdm_iv_curve_c6: ax25_frame.payload.ax25_payload.sdm_iv_curve_c6
:field sdm_iv_curve_c7: ax25_frame.payload.ax25_payload.sdm_iv_curve_c7
:field sdm_iv_curve_c8: ax25_frame.payload.ax25_payload.sdm_iv_curve_c8
:field sdm_iv_curve_v1: ax25_frame.payload.ax25_payload.sdm_iv_curve_v1
:field sdm_iv_curve_v2: ax25_frame.payload.ax25_payload.sdm_iv_curve_v2
:field sdm_iv_curve_v3: ax25_frame.payload.ax25_payload.sdm_iv_curve_v3
:field sdm_iv_curve_v4: ax25_frame.payload.ax25_payload.sdm_iv_curve_v4
:field sdm_iv_curve_v5: ax25_frame.payload.ax25_payload.sdm_iv_curve_v5
:field sdm_iv_curve_v6: ax25_frame.payload.ax25_payload.sdm_iv_curve_v6
:field sdm_iv_curve_v7: ax25_frame.payload.ax25_payload.sdm_iv_curve_v7
:field sdm_iv_curve_v8: ax25_frame.payload.ax25_payload.sdm_iv_curve_v8
:field sdm_cell_temp_ym: ax25_frame.payload.ax25_payload.sdm_cell_temp_ym
:field sdm_cell_temp_yp: ax25_frame.payload.ax25_payload.sdm_cell_temp_yp
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_frame = Delfin3xt.Ax25Frame(self._io, self, self._root)
class Ax25Frame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ax25_header = Delfin3xt.Ax25Header(self._io, self, self._root)
_on = (self.ax25_header.ctl & 19)
if _on == 0:
self.payload = Delfin3xt.IFrame(self._io, self, self._root)
elif _on == 3:
self.payload = Delfin3xt.UiFrame(self._io, self, self._root)
elif _on == 19:
self.payload = Delfin3xt.UiFrame(self._io, self, self._root)
elif _on == 16:
self.payload = Delfin3xt.IFrame(self._io, self, self._root)
elif _on == 18:
self.payload = Delfin3xt.IFrame(self._io, self, self._root)
elif _on == 2:
self.payload = Delfin3xt.IFrame(self._io, self, self._root)
class Ax25Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.dest_callsign_raw = Delfin3xt.CallsignRaw(self._io, self, self._root)
self.dest_ssid_raw = Delfin3xt.SsidMask(self._io, self, self._root)
self.src_callsign_raw = Delfin3xt.CallsignRaw(self._io, self, self._root)
self.src_ssid_raw = Delfin3xt.SsidMask(self._io, self, self._root)
self.ctl = self._io.read_u1()
class UiFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self._raw_ax25_payload = self._io.read_bytes_full()
_io__raw_ax25_payload = KaitaiStream(BytesIO(self._raw_ax25_payload))
self.ax25_payload = Delfin3xt.Delfin3xtPayload(_io__raw_ax25_payload, self, self._root)
class Callsign(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.callsign = (self._io.read_bytes(6)).decode(u"ASCII")
class IFrame(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pid = self._io.read_u1()
self._raw_ax25_payload = self._io.read_bytes_full()
_io__raw_ax25_payload = KaitaiStream(BytesIO(self._raw_ax25_payload))
self.ax25_payload = Delfin3xt.Delfin3xtPayload(_io__raw_ax25_payload, self, self._root)
class SsidMask(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ssid_mask = self._io.read_u1()
@property
def ssid(self):
if hasattr(self, '_m_ssid'):
return self._m_ssid if hasattr(self, '_m_ssid') else None
self._m_ssid = ((self.ssid_mask & 15) >> 1)
return self._m_ssid if hasattr(self, '_m_ssid') else None
class Delfin3xtPayload(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.elapsed_time = self._io.read_u4be()
self.boot_counter = self._io.read_u2be()
self.frame_ctr_type = self._io.read_u4be()
self.ptrx_dv_raw = self._io.read_bits_int_be(12)
self.ptrx_rss_raw = self._io.read_bits_int_be(12)
self.ptrx_rp_raw = self._io.read_bits_int_be(12)
self.ptrx_fp_raw = self._io.read_bits_int_be(12)
self.ptrx_tsc_raw = self._io.read_bits_int_be(12)
self.ptrx_rsc_raw = self._io.read_bits_int_be(12)
self.ptrx_pa_temp_raw = self._io.read_bits_int_be(12)
self.ptrx_pbv_raw = self._io.read_bits_int_be(12)
self.depl_sol_x_minus = self._io.read_bits_int_be(1) != 0
self.depl_sol_x_plus = self._io.read_bits_int_be(1) != 0
self.depl_sol_y_minus = self._io.read_bits_int_be(1) != 0
self.depl_sol_y_plus = self._io.read_bits_int_be(1) != 0
self.depl_ant_x_minus = self._io.read_bits_int_be(1) != 0
self.depl_ant_x_plus = self._io.read_bits_int_be(1) != 0
self.depl_ant_y_minus = self._io.read_bits_int_be(1) != 0
self.depl_ant_y_plus = self._io.read_bits_int_be(1) != 0
self._io.align_to_byte()
self.dab_temp = self._io.read_u1()
self.eps_bus_current = self._io.read_bits_int_be(12)
self.eps_bus_voltage = self._io.read_bits_int_be(12)
self._io.align_to_byte()
self.eps_variable_bus_v = self._io.read_u1()
self.power_status_solar_panel_xpzp = self._io.read_bits_int_be(1) != 0
self.power_status_solar_panel_xpzm = self._io.read_bits_int_be(1) != 0
self.power_status_solar_panel_xmzp = self._io.read_bits_int_be(1) != 0
self.power_status_solar_panel_xmzm = self._io.read_bits_int_be(1) != 0
self.power_status_solar_panel_ypzp = self._io.read_bits_int_be(1) != 0
self.power_status_solar_panel_ypzm = self._io.read_bits_int_be(1) != 0
self.power_status_solar_panel_ymzp = self._io.read_bits_int_be(1) != 0
self.power_status_solar_panel_ymzm = self._io.read_bits_int_be(1) != 0
self._io.align_to_byte()
self.oppsp_xpzp_c = self._io.read_u1()
self.oppsp_xpzm_c = self._io.read_u1()
self.oppsp_xmzp_c = self._io.read_u1()
self.oppsp_xmzm_c = self._io.read_u1()
self.oppsp_ypzp_c = self._io.read_u1()
self.oppsp_ypzm_c = self._io.read_u1()
self.oppsp_ymzp_c = self._io.read_u1()
self.oppsp_ymzm_c = self._io.read_u1()
self.oppsp_xpzp_v = self._io.read_u1()
self.oppsp_xpzm_v = self._io.read_u1()
self.oppsp_xmzp_v = self._io.read_u1()
self.oppsp_xmzm_v = self._io.read_u1()
self.oppsp_ypzp_v = self._io.read_u1()
self.oppsp_ypzm_v = self._io.read_u1()
self.oppsp_ymzp_v = self._io.read_u1()
self.oppsp_ymzm_v = self._io.read_u1()
self.eps_solar_panel_xpzp_temp = self._io.read_u1()
self.eps_solar_panel_xpzm_temp = self._io.read_u1()
self.eps_solar_panel_xmzp_temp = self._io.read_u1()
self.eps_solar_panel_xmzm_temp = self._io.read_u1()
self.eps_solar_panel_ypzp_temp = self._io.read_u1()
self.eps_solar_panel_ypzm_temp = self._io.read_u1()
self.eps_solar_panel_ymzp_temp = self._io.read_u1()
self.eps_solar_panel_ymzm_temp = self._io.read_u1()
self.eps_reg_board_temp = self._io.read_u1()
self.bat1_dod = self._io.read_u1()
self.bat1_cc = self._io.read_u1()
self.bat1_dc = self._io.read_u1()
self.bat1_v = self._io.read_u1()
self.bat1_temp = self._io.read_u1()
self.bat2_dod = self._io.read_u1()
self.bat2_cc = self._io.read_u1()
self.bat2_dc = self._io.read_u1()
self.bat2_v = self._io.read_u1()
self.bat2_temp = self._io.read_u1()
self.bat3_dod = self._io.read_u1()
self.bat3_cc = self._io.read_u1()
self.bat3_dc = self._io.read_u1()
self.bat3_v = self._io.read_u1()
self.bat3_temp = self._io.read_u1()
self.bat4_dod = self._io.read_u1()
self.bat4_cc = self._io.read_u1()
self.bat4_dc = self._io.read_u1()
self.bat4_v = self._io.read_u1()
self.bat4_temp = self._io.read_u1()
self.t3_vc = self._io.read_u1()
self.t3_ic = self._io.read_u1()
self.t3_iv = self._io.read_u1()
self.t3_pt = self._io.read_u1()
self.t3_mt = self._io.read_u1()
self.t3_pp_1 = self._io.read_bits_int_be(12)
self.t3_pp_2 = self._io.read_bits_int_be(12)
self.t3_pp_3 = self._io.read_bits_int_be(12)
self.t3_pp_4 = self._io.read_bits_int_be(12)
self.t3_pp_5 = self._io.read_bits_int_be(12)
self.t3_pp_6 = self._io.read_bits_int_be(12)
self.t3_pp_7 = self._io.read_bits_int_be(12)
self.t3_pp_8 = self._io.read_bits_int_be(12)
self.t3_pp_9 = self._io.read_bits_int_be(12)
self.t3_pp_10 = self._io.read_bits_int_be(12)
self.t3_pp_11 = self._io.read_bits_int_be(12)
self.t3_pp_12 = self._io.read_bits_int_be(12)
self.t3_pp_13 = self._io.read_bits_int_be(12)
self.t3_pp_14 = self._io.read_bits_int_be(12)
self.t3_pp_15 = self._io.read_bits_int_be(12)
self.t3_pp_16 = self._io.read_bits_int_be(12)
self.t3_pp_17 = self._io.read_bits_int_be(12)
self.t3_pp_18 = self._io.read_bits_int_be(12)
self.t3_pp_19 = self._io.read_bits_int_be(12)
self.t3_pp_20 = self._io.read_bits_int_be(12)
self.t3_pp_21 = self._io.read_bits_int_be(12)
self.t3_pp_22 = self._io.read_bits_int_be(12)
self.t3_pp_23 = self._io.read_bits_int_be(12)
self.t3_pp_24 = self._io.read_bits_int_be(12)
self.t3_pp_25 = self._io.read_bits_int_be(12)
self.t3_pp_26 = self._io.read_bits_int_be(12)
self.t3_pp_27 = self._io.read_bits_int_be(12)
self.t3_pp_28 = self._io.read_bits_int_be(12)
self.t3_pp_29 = self._io.read_bits_int_be(12)
self.t3_pp_30 = self._io.read_bits_int_be(12)
self.t3_pp_31 = self._io.read_bits_int_be(12)
self.t3_pp_32 = self._io.read_bits_int_be(12)
self.t3_pp_33 = self._io.read_bits_int_be(12)
self.t3_pp_34 = self._io.read_bits_int_be(12)
self.t3_pp_35 = self._io.read_bits_int_be(12)
self.t3_pp_36 = self._io.read_bits_int_be(12)
self.t3_pp_37 = self._io.read_bits_int_be(12)
self.t3_pp_38 = self._io.read_bits_int_be(12)
self.t3_pp_39 = self._io.read_bits_int_be(12)
self.t3_pp_40 = self._io.read_bits_int_be(12)
self.t3_pp_41 = self._io.read_bits_int_be(12)
self.t3_pp_42 = self._io.read_bits_int_be(12)
self.t3_pp_43 = self._io.read_bits_int_be(12)
self.t3_pp_44 = self._io.read_bits_int_be(12)
self.t3_pp_45 = self._io.read_bits_int_be(12)
self.t3_pp_46 = self._io.read_bits_int_be(12)
self.t3_pp_47 = self._io.read_bits_int_be(12)
self.t3_pp_48 = self._io.read_bits_int_be(12)
self.t3_pp_49 = self._io.read_bits_int_be(12)
self.t3_pp_50 = self._io.read_bits_int_be(12)
self.t3_pp_51 = self._io.read_bits_int_be(12)
self.t3_pp_52 = self._io.read_bits_int_be(12)
self.t3_pp_53 = self._io.read_bits_int_be(12)
self.t3_pp_54 = self._io.read_bits_int_be(12)
self.t3_pp_55 = self._io.read_bits_int_be(12)
self.t3_pp_56 = self._io.read_bits_int_be(12)
self.t3_pp_57 = self._io.read_bits_int_be(12)
self.t3_pp_58 = self._io.read_bits_int_be(12)
self.t3_pp_59 = self._io.read_bits_int_be(12)
self.t3_pp_60 = self._io.read_bits_int_be(12)
self.sdm_iv_id = self._io.read_bits_int_be(4)
self.skip = self._io.read_bits_int_be(2)
self.sdm_status_cell_temp_ym = self._io.read_bits_int_be(1) != 0
self.sdm_status_cell_temp_yp = self._io.read_bits_int_be(1) != 0
self._io.align_to_byte()
self.sdm_iv_curve_c1 = self._io.read_u2be()
self.sdm_iv_curve_c2 = self._io.read_u2be()
self.sdm_iv_curve_c3 = self._io.read_u2be()
self.sdm_iv_curve_c4 = self._io.read_u2be()
self.sdm_iv_curve_c5 = self._io.read_u2be()
self.sdm_iv_curve_c6 = self._io.read_u2be()
self.sdm_iv_curve_c7 = self._io.read_u2be()
self.sdm_iv_curve_c8 = self._io.read_u2be()
self.sdm_iv_curve_v1 = self._io.read_u2be()
self.sdm_iv_curve_v2 = self._io.read_u2be()
self.sdm_iv_curve_v3 = self._io.read_u2be()
self.sdm_iv_curve_v4 = self._io.read_u2be()
self.sdm_iv_curve_v5 = self._io.read_u2be()
self.sdm_iv_curve_v6 = self._io.read_u2be()
self.sdm_iv_curve_v7 = self._io.read_u2be()
self.sdm_iv_curve_v8 = self._io.read_u2be()
self.sdm_cell_temp_ym = self._io.read_u1()
self.sdm_cell_temp_yp = self._io.read_u1()
@property
def payloadsize(self):
if hasattr(self, '_m_payloadsize'):
return self._m_payloadsize if hasattr(self, '_m_payloadsize') else None
self._m_payloadsize = self._io.size()
return self._m_payloadsize if hasattr(self, '_m_payloadsize') else None
@property
def framecounter(self):
if hasattr(self, '_m_framecounter'):
return self._m_framecounter if hasattr(self, '_m_framecounter') else None
self._m_framecounter = ((self.frame_ctr_type & 4294967294) >> 1)
return self._m_framecounter if hasattr(self, '_m_framecounter') else None
@property
def frametype(self):
if hasattr(self, '_m_frametype'):
return self._m_frametype if hasattr(self, '_m_frametype') else None
self._m_frametype = (self.frame_ctr_type & 1)
return self._m_frametype if hasattr(self, '_m_frametype') else None
class CallsignRaw(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self._raw__raw_callsign_ror = self._io.read_bytes(6)
self._raw_callsign_ror = KaitaiStream.process_rotate_left(self._raw__raw_callsign_ror, 8 - (1), 1)
_io__raw_callsign_ror = KaitaiStream(BytesIO(self._raw_callsign_ror))
self.callsign_ror = Delfin3xt.Callsign(_io__raw_callsign_ror, self, self._root)
|
/satnogs_decoders-1.60.0-py3-none-any.whl/satnogsdecoders/decoder/delfin3xt.py
| 0.445047 | 0.182316 |
delfin3xt.py
|
pypi
|
class OpenApiException(Exception):
"""The base exception class for all OpenAPIExceptions"""
class ApiTypeError(OpenApiException, TypeError):
def __init__(self, msg, path_to_item=None, valid_classes=None,
key_type=None):
""" Raises an exception for TypeErrors
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list): a list of keys an indices to get to the
current_item
None if unset
valid_classes (tuple): the primitive classes that current item
should be an instance of
None if unset
key_type (bool): False if our value is a value in a dict
True if it is a key in a dict
False if our item is an item in a list
None if unset
"""
self.path_to_item = path_to_item
self.valid_classes = valid_classes
self.key_type = key_type
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiTypeError, self).__init__(full_msg)
class ApiValueError(OpenApiException, ValueError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list) the path to the exception in the
received_data dict. None if unset
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiValueError, self).__init__(full_msg)
class ApiAttributeError(OpenApiException, AttributeError):
def __init__(self, msg, path_to_item=None):
"""
Raised when an attribute reference or assignment fails.
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiAttributeError, self).__init__(full_msg)
class ApiKeyError(OpenApiException, KeyError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiKeyError, self).__init__(full_msg)
class ApiException(OpenApiException):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message
class NotFoundException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(NotFoundException, self).__init__(status, reason, http_resp)
class UnauthorizedException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(UnauthorizedException, self).__init__(status, reason, http_resp)
class ForbiddenException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(ForbiddenException, self).__init__(status, reason, http_resp)
class ServiceException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(ServiceException, self).__init__(status, reason, http_resp)
def render_path(path_to_item):
"""Returns a string representation of a path"""
result = ""
for pth in path_to_item:
if isinstance(pth, int):
result += "[{0}]".format(pth)
else:
result += "['{0}']".format(pth)
return result
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/exceptions.py
| 0.769514 | 0.279315 |
exceptions.py
|
pypi
|
import re # noqa: F401
import sys # noqa: F401
from satnogsnetworkapiclient.api_client import ApiClient, Endpoint as _Endpoint
from satnogsnetworkapiclient.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from satnogsnetworkapiclient.model.new_observation import NewObservation
from satnogsnetworkapiclient.model.observation import Observation
from satnogsnetworkapiclient.model.paginated_observation_list import PaginatedObservationList
from satnogsnetworkapiclient.model.patched_observation import PatchedObservation
from satnogsnetworkapiclient.model.update_observation import UpdateObservation
class ObservationsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.observations_create_endpoint = _Endpoint(
settings={
'response_type': (NewObservation,),
'auth': [
'tokenAuth'
],
'endpoint_path': '/api/observations/',
'operation_id': 'observations_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'new_observation',
],
'required': [
'new_observation',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'new_observation':
(NewObservation,),
},
'attribute_map': {
},
'location_map': {
'new_observation': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/x-www-form-urlencoded',
'multipart/form-data'
]
},
api_client=api_client
)
self.observations_list_endpoint = _Endpoint(
settings={
'response_type': (PaginatedObservationList,),
'auth': [
'tokenAuth'
],
'endpoint_path': '/api/observations/',
'operation_id': 'observations_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'end',
'ground_station',
'id',
'observation_id',
'observer',
'page',
'satellite__norad_cat_id',
'start',
'status',
'transmitter_mode',
'transmitter_type',
'transmitter_uuid',
'vetted_status',
'vetted_user',
'waterfall_status',
],
'required': [],
'nullable': [
'waterfall_status',
],
'enum': [
'status',
'transmitter_type',
'vetted_status',
'waterfall_status',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('status',): {
"NULL": null,
"NULL": null,
"NULL": null,
"NULL": null,
"NULL": null
},
('transmitter_type',): {
"TRANSCEIVER": "Transceiver",
"TRANSMITTER": "Transmitter",
"TRANSPONDER": "Transponder"
},
('vetted_status',): {
"NULL": null,
"NULL": null,
"NULL": null,
"NULL": null
},
('waterfall_status',): {
'None': None,
"FALSE": "false",
"FALSE": "false"
},
},
'openapi_types': {
'end':
(datetime,),
'ground_station':
(int,),
'id':
(int,),
'observation_id':
([int],),
'observer':
(int,),
'page':
(int,),
'satellite__norad_cat_id':
(int,),
'start':
(datetime,),
'status':
(int,),
'transmitter_mode':
(str,),
'transmitter_type':
(str,),
'transmitter_uuid':
(str,),
'vetted_status':
(int,),
'vetted_user':
(int,),
'waterfall_status':
(bool, none_type,),
},
'attribute_map': {
'end': 'end',
'ground_station': 'ground_station',
'id': 'id',
'observation_id': 'observation_id',
'observer': 'observer',
'page': 'page',
'satellite__norad_cat_id': 'satellite__norad_cat_id',
'start': 'start',
'status': 'status',
'transmitter_mode': 'transmitter_mode',
'transmitter_type': 'transmitter_type',
'transmitter_uuid': 'transmitter_uuid',
'vetted_status': 'vetted_status',
'vetted_user': 'vetted_user',
'waterfall_status': 'waterfall_status',
},
'location_map': {
'end': 'query',
'ground_station': 'query',
'id': 'query',
'observation_id': 'query',
'observer': 'query',
'page': 'query',
'satellite__norad_cat_id': 'query',
'start': 'query',
'status': 'query',
'transmitter_mode': 'query',
'transmitter_type': 'query',
'transmitter_uuid': 'query',
'vetted_status': 'query',
'vetted_user': 'query',
'waterfall_status': 'query',
},
'collection_format_map': {
'observation_id': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.observations_partial_update_endpoint = _Endpoint(
settings={
'response_type': (Observation,),
'auth': [
'tokenAuth'
],
'endpoint_path': '/api/observations/{id}/',
'operation_id': 'observations_partial_update',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'id',
'patched_observation',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
'patched_observation':
(PatchedObservation,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'patched_observation': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/x-www-form-urlencoded',
'multipart/form-data'
]
},
api_client=api_client
)
self.observations_retrieve_endpoint = _Endpoint(
settings={
'response_type': (Observation,),
'auth': [
'tokenAuth'
],
'endpoint_path': '/api/observations/{id}/',
'operation_id': 'observations_retrieve',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.observations_update_endpoint = _Endpoint(
settings={
'response_type': (UpdateObservation,),
'auth': [
'tokenAuth'
],
'endpoint_path': '/api/observations/{id}/',
'operation_id': 'observations_update',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'id',
'update_observation',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
'update_observation':
(UpdateObservation,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'update_observation': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/x-www-form-urlencoded',
'multipart/form-data'
]
},
api_client=api_client
)
def observations_create(
self,
new_observation,
**kwargs
):
"""observations_create # noqa: E501
Creates observations from a list of observation data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.observations_create(new_observation, async_req=True)
>>> result = thread.get()
Args:
new_observation (NewObservation):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
NewObservation
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['new_observation'] = \
new_observation
return self.observations_create_endpoint.call_with_http_info(**kwargs)
def observations_list(
self,
**kwargs
):
"""observations_list # noqa: E501
SatNOGS Network Observation API view class # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.observations_list(async_req=True)
>>> result = thread.get()
Keyword Args:
end (datetime): [optional]
ground_station (int): [optional]
id (int): [optional]
observation_id ([int]): Multiple values may be separated by commas.. [optional]
observer (int): observer. [optional]
page (int): A page number within the paginated result set.. [optional]
satellite__norad_cat_id (int): [optional]
start (datetime): [optional]
status (int): [optional]
transmitter_mode (str): [optional]
transmitter_type (str): [optional]
transmitter_uuid (str): [optional]
vetted_status (int): Vetted status (deprecated: please use Status). [optional]
vetted_user (int): Vetted user (deprecated: will be removed in next version). [optional]
waterfall_status (bool, none_type): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
PaginatedObservationList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.observations_list_endpoint.call_with_http_info(**kwargs)
def observations_partial_update(
self,
id,
**kwargs
):
"""observations_partial_update # noqa: E501
SatNOGS Network Observation API view class # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.observations_partial_update(id, async_req=True)
>>> result = thread.get()
Args:
id (int): A unique integer value identifying this observation.
Keyword Args:
patched_observation (PatchedObservation): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Observation
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.observations_partial_update_endpoint.call_with_http_info(**kwargs)
def observations_retrieve(
self,
id,
**kwargs
):
"""observations_retrieve # noqa: E501
SatNOGS Network Observation API view class # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.observations_retrieve(id, async_req=True)
>>> result = thread.get()
Args:
id (int): A unique integer value identifying this observation.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Observation
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.observations_retrieve_endpoint.call_with_http_info(**kwargs)
def observations_update(
self,
id,
**kwargs
):
"""observations_update # noqa: E501
Updates observation with audio, waterfall or demoded data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.observations_update(id, async_req=True)
>>> result = thread.get()
Args:
id (int): A unique integer value identifying this observation.
Keyword Args:
update_observation (UpdateObservation): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
UpdateObservation
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.observations_update_endpoint.call_with_http_info(**kwargs)
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/api/observations_api.py
| 0.407923 | 0.173323 |
observations_api.py
|
pypi
|
import re # noqa: F401
import sys # noqa: F401
from satnogsnetworkapiclient.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from satnogsnetworkapiclient.exceptions import ApiAttributeError
class TransmitterTypeEnum(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'TRANSMITTER': "Transmitter",
'TRANSCEIVER': "Transceiver",
'TRANSPONDER': "Transponder",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""TransmitterTypeEnum - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str):, must be one of ["Transmitter", "Transceiver", "Transponder", ] # noqa: E501
Keyword Args:
value (str):, must be one of ["Transmitter", "Transceiver", "Transponder", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""TransmitterTypeEnum - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str):, must be one of ["Transmitter", "Transceiver", "Transponder", ] # noqa: E501
Keyword Args:
value (str):, must be one of ["Transmitter", "Transceiver", "Transponder", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/model/transmitter_type_enum.py
| 0.593727 | 0.24121 |
transmitter_type_enum.py
|
pypi
|
import re # noqa: F401
import sys # noqa: F401
from satnogsnetworkapiclient.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from satnogsnetworkapiclient.exceptions import ApiAttributeError
class Transmitter(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'uuid': (str,), # noqa: E501
'stats': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'uuid': 'uuid', # noqa: E501
'stats': 'stats', # noqa: E501
}
read_only_vars = {
'uuid', # noqa: E501
'stats', # noqa: E501
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, uuid, stats, *args, **kwargs): # noqa: E501
"""Transmitter - a model defined in OpenAPI
Args:
uuid (str):
stats (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.uuid = uuid
self.stats = stats
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""Transmitter - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/model/transmitter.py
| 0.580114 | 0.21595 |
transmitter.py
|
pypi
|
import re # noqa: F401
import sys # noqa: F401
from satnogsnetworkapiclient.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from satnogsnetworkapiclient.exceptions import ApiAttributeError
def lazy_import():
from satnogsnetworkapiclient.model.observation import Observation
globals()['Observation'] = Observation
class PaginatedObservationList(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'count': (int,), # noqa: E501
'next': (str, none_type,), # noqa: E501
'previous': (str, none_type,), # noqa: E501
'results': ([Observation],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'count': 'count', # noqa: E501
'next': 'next', # noqa: E501
'previous': 'previous', # noqa: E501
'results': 'results', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""PaginatedObservationList - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
count (int): [optional] # noqa: E501
next (str, none_type): [optional] # noqa: E501
previous (str, none_type): [optional] # noqa: E501
results ([Observation]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""PaginatedObservationList - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
count (int): [optional] # noqa: E501
next (str, none_type): [optional] # noqa: E501
previous (str, none_type): [optional] # noqa: E501
results ([Observation]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/model/paginated_observation_list.py
| 0.588416 | 0.205197 |
paginated_observation_list.py
|
pypi
|
import re # noqa: F401
import sys # noqa: F401
from satnogsnetworkapiclient.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from satnogsnetworkapiclient.exceptions import ApiAttributeError
class NewObservation(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
('transmitter_uuid',): {
'max_length': 22,
'min_length': 22,
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'start': (datetime,), # noqa: E501
'end': (datetime,), # noqa: E501
'ground_station': (int,), # noqa: E501
'transmitter_uuid': (str,), # noqa: E501
'center_frequency': (int,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'start': 'start', # noqa: E501
'end': 'end', # noqa: E501
'ground_station': 'ground_station', # noqa: E501
'transmitter_uuid': 'transmitter_uuid', # noqa: E501
'center_frequency': 'center_frequency', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, start, end, ground_station, transmitter_uuid, *args, **kwargs): # noqa: E501
"""NewObservation - a model defined in OpenAPI
Args:
start (datetime):
end (datetime):
ground_station (int):
transmitter_uuid (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
center_frequency (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.start = start
self.end = end
self.ground_station = ground_station
self.transmitter_uuid = transmitter_uuid
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, start, end, ground_station, transmitter_uuid, *args, **kwargs): # noqa: E501
"""NewObservation - a model defined in OpenAPI
Args:
start (datetime):
end (datetime):
ground_station (int):
transmitter_uuid (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
center_frequency (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.start = start
self.end = end
self.ground_station = ground_station
self.transmitter_uuid = transmitter_uuid
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/model/new_observation.py
| 0.570331 | 0.207155 |
new_observation.py
|
pypi
|
import re # noqa: F401
import sys # noqa: F401
from satnogsnetworkapiclient.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from satnogsnetworkapiclient.exceptions import ApiAttributeError
class DemodData(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'payload_demod': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'payload_demod': 'payload_demod', # noqa: E501
}
read_only_vars = {
'payload_demod', # noqa: E501
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, payload_demod, *args, **kwargs): # noqa: E501
"""DemodData - a model defined in OpenAPI
Args:
payload_demod (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.payload_demod = payload_demod
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""DemodData - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/model/demod_data.py
| 0.537041 | 0.245718 |
demod_data.py
|
pypi
|
import re # noqa: F401
import sys # noqa: F401
from satnogsnetworkapiclient.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from satnogsnetworkapiclient.exceptions import ApiAttributeError
class Station(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
('name',): {
'max_length': 45,
},
('altitude',): {
'inclusive_minimum': 0,
},
('lat',): {
'inclusive_maximum': 90,
'inclusive_minimum': -90,
},
('lng',): {
'inclusive_maximum': 180,
'inclusive_minimum': -180,
},
('qthlocator',): {
'max_length': 8,
},
('description',): {
'max_length': 500,
},
('client_version',): {
'max_length': 45,
},
('target_utilization',): {
'inclusive_maximum': 100,
'inclusive_minimum': 0,
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'id': (int,), # noqa: E501
'name': (str,), # noqa: E501
'altitude': (int,), # noqa: E501
'min_horizon': (str,), # noqa: E501
'antenna': (str,), # noqa: E501
'created': (datetime,), # noqa: E501
'status': (str,), # noqa: E501
'observations': (str,), # noqa: E501
'lat': (float, none_type,), # noqa: E501
'lng': (float, none_type,), # noqa: E501
'qthlocator': (str,), # noqa: E501
'last_seen': (datetime, none_type,), # noqa: E501
'description': (str,), # noqa: E501
'client_version': (str,), # noqa: E501
'target_utilization': (int, none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'name': 'name', # noqa: E501
'altitude': 'altitude', # noqa: E501
'min_horizon': 'min_horizon', # noqa: E501
'antenna': 'antenna', # noqa: E501
'created': 'created', # noqa: E501
'status': 'status', # noqa: E501
'observations': 'observations', # noqa: E501
'lat': 'lat', # noqa: E501
'lng': 'lng', # noqa: E501
'qthlocator': 'qthlocator', # noqa: E501
'last_seen': 'last_seen', # noqa: E501
'description': 'description', # noqa: E501
'client_version': 'client_version', # noqa: E501
'target_utilization': 'target_utilization', # noqa: E501
}
read_only_vars = {
'id', # noqa: E501
'min_horizon', # noqa: E501
'antenna', # noqa: E501
'created', # noqa: E501
'status', # noqa: E501
'observations', # noqa: E501
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, name, altitude, min_horizon, antenna, created, status, observations, *args, **kwargs): # noqa: E501
"""Station - a model defined in OpenAPI
Args:
id (int):
name (str):
altitude (int):
min_horizon (str):
antenna (str):
created (datetime):
status (str):
observations (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
lat (float, none_type): eg. 38.01697. [optional] # noqa: E501
lng (float, none_type): eg. 23.7314. [optional] # noqa: E501
qthlocator (str): [optional] # noqa: E501
last_seen (datetime, none_type): [optional] # noqa: E501
description (str): Max 500 characters. [optional] # noqa: E501
client_version (str): [optional] # noqa: E501
target_utilization (int, none_type): Target utilization factor for your station. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.name = name
self.altitude = altitude
self.min_horizon = min_horizon
self.antenna = antenna
self.created = created
self.status = status
self.observations = observations
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, name, altitude, *args, **kwargs): # noqa: E501
"""Station - a model defined in OpenAPI
name (str):
altitude (int):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
lat (float, none_type): eg. 38.01697. [optional] # noqa: E501
lng (float, none_type): eg. 23.7314. [optional] # noqa: E501
qthlocator (str): [optional] # noqa: E501
last_seen (datetime, none_type): [optional] # noqa: E501
description (str): Max 500 characters. [optional] # noqa: E501
client_version (str): [optional] # noqa: E501
target_utilization (int, none_type): Target utilization factor for your station. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.name = name
self.altitude = altitude
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/model/station.py
| 0.607197 | 0.156975 |
station.py
|
pypi
|
import re # noqa: F401
import sys # noqa: F401
from satnogsnetworkapiclient.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from satnogsnetworkapiclient.exceptions import ApiAttributeError
class UpdateObservation(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
('client_version',): {
'max_length': 255,
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'id': (int,), # noqa: E501
'payload': (str,), # noqa: E501
'waterfall': (str,), # noqa: E501
'client_metadata': (str,), # noqa: E501
'client_version': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'payload': 'payload', # noqa: E501
'waterfall': 'waterfall', # noqa: E501
'client_metadata': 'client_metadata', # noqa: E501
'client_version': 'client_version', # noqa: E501
}
read_only_vars = {
'id', # noqa: E501
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501
"""UpdateObservation - a model defined in OpenAPI
Args:
id (int):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
payload (str): [optional] # noqa: E501
waterfall (str): [optional] # noqa: E501
client_metadata (str): [optional] # noqa: E501
client_version (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""UpdateObservation - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
payload (str): [optional] # noqa: E501
waterfall (str): [optional] # noqa: E501
client_metadata (str): [optional] # noqa: E501
client_version (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/model/update_observation.py
| 0.563138 | 0.174445 |
update_observation.py
|
pypi
|
import re # noqa: F401
import sys # noqa: F401
from satnogsnetworkapiclient.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from satnogsnetworkapiclient.exceptions import ApiAttributeError
class StationConfiguration(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
('satnogs_soapy_rx_device',): {
'max_length': 40,
},
('satnogs_antenna',): {
'max_length': 40,
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'satnogs_station_id': (str,), # noqa: E501
'satnogs_api_token': (str,), # noqa: E501
'satnogs_station_elev': (int,), # noqa: E501
'satnogs_station_lat': (float,), # noqa: E501
'satnogs_station_lon': (float,), # noqa: E501
'satnogs_soapy_rx_device': (str,), # noqa: E501
'satnogs_antenna': (str,), # noqa: E501
'satnogs_rx_samp_rate': (int, none_type,), # noqa: E501
'satnogs_rf_gain': (float, none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'satnogs_station_id': 'satnogs_station_id', # noqa: E501
'satnogs_api_token': 'satnogs_api_token', # noqa: E501
'satnogs_station_elev': 'satnogs_station_elev', # noqa: E501
'satnogs_station_lat': 'satnogs_station_lat', # noqa: E501
'satnogs_station_lon': 'satnogs_station_lon', # noqa: E501
'satnogs_soapy_rx_device': 'satnogs_soapy_rx_device', # noqa: E501
'satnogs_antenna': 'satnogs_antenna', # noqa: E501
'satnogs_rx_samp_rate': 'satnogs_rx_samp_rate', # noqa: E501
'satnogs_rf_gain': 'satnogs_rf_gain', # noqa: E501
}
read_only_vars = {
'satnogs_station_id', # noqa: E501
'satnogs_api_token', # noqa: E501
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, satnogs_station_id, satnogs_api_token, satnogs_station_elev, satnogs_station_lat, satnogs_station_lon, *args, **kwargs): # noqa: E501
"""StationConfiguration - a model defined in OpenAPI
Args:
satnogs_station_id (str):
satnogs_api_token (str):
satnogs_station_elev (int):
satnogs_station_lat (float):
satnogs_station_lon (float):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
satnogs_soapy_rx_device (str): [optional] # noqa: E501
satnogs_antenna (str): [optional] # noqa: E501
satnogs_rx_samp_rate (int, none_type): [optional] # noqa: E501
satnogs_rf_gain (float, none_type): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.satnogs_station_id = satnogs_station_id
self.satnogs_api_token = satnogs_api_token
self.satnogs_station_elev = satnogs_station_elev
self.satnogs_station_lat = satnogs_station_lat
self.satnogs_station_lon = satnogs_station_lon
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, satnogs_station_elev, satnogs_station_lat, satnogs_station_lon, *args, **kwargs): # noqa: E501
"""StationConfiguration - a model defined in OpenAPI
satnogs_station_elev (int):
satnogs_station_lat (float):
satnogs_station_lon (float):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
satnogs_soapy_rx_device (str): [optional] # noqa: E501
satnogs_antenna (str): [optional] # noqa: E501
satnogs_rx_samp_rate (int, none_type): [optional] # noqa: E501
satnogs_rf_gain (float, none_type): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.satnogs_station_elev = satnogs_station_elev
self.satnogs_station_lat = satnogs_station_lat
self.satnogs_station_lon = satnogs_station_lon
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/model/station_configuration.py
| 0.508544 | 0.209247 |
station_configuration.py
|
pypi
|
import re # noqa: F401
import sys # noqa: F401
from satnogsnetworkapiclient.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from satnogsnetworkapiclient.exceptions import ApiAttributeError
def lazy_import():
from satnogsnetworkapiclient.model.transmitter import Transmitter
globals()['Transmitter'] = Transmitter
class PaginatedTransmitterList(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'count': (int,), # noqa: E501
'next': (str, none_type,), # noqa: E501
'previous': (str, none_type,), # noqa: E501
'results': ([Transmitter],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'count': 'count', # noqa: E501
'next': 'next', # noqa: E501
'previous': 'previous', # noqa: E501
'results': 'results', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""PaginatedTransmitterList - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
count (int): [optional] # noqa: E501
next (str, none_type): [optional] # noqa: E501
previous (str, none_type): [optional] # noqa: E501
results ([Transmitter]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""PaginatedTransmitterList - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
count (int): [optional] # noqa: E501
next (str, none_type): [optional] # noqa: E501
previous (str, none_type): [optional] # noqa: E501
results ([Transmitter]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/model/paginated_transmitter_list.py
| 0.54698 | 0.18101 |
paginated_transmitter_list.py
|
pypi
|
import re # noqa: F401
import sys # noqa: F401
from satnogsnetworkapiclient.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from satnogsnetworkapiclient.exceptions import ApiAttributeError
def lazy_import():
from satnogsnetworkapiclient.model.station import Station
globals()['Station'] = Station
class PaginatedStationList(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'count': (int,), # noqa: E501
'next': (str, none_type,), # noqa: E501
'previous': (str, none_type,), # noqa: E501
'results': ([Station],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'count': 'count', # noqa: E501
'next': 'next', # noqa: E501
'previous': 'previous', # noqa: E501
'results': 'results', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""PaginatedStationList - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
count (int): [optional] # noqa: E501
next (str, none_type): [optional] # noqa: E501
previous (str, none_type): [optional] # noqa: E501
results ([Station]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""PaginatedStationList - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
count (int): [optional] # noqa: E501
next (str, none_type): [optional] # noqa: E501
previous (str, none_type): [optional] # noqa: E501
results ([Station]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
/satnogs-network-api-client-1.104.tar.gz/satnogs-network-api-client-1.104/satnogsnetworkapiclient/model/paginated_station_list.py
| 0.497803 | 0.204382 |
paginated_station_list.py
|
pypi
|
import django_filters
from django.utils.timezone import now
from django_filters.rest_framework import FilterSet
from network.base.models import Observation, Station
from network.users.models import User
class NumberInFilter(django_filters.BaseInFilter, django_filters.NumberFilter):
"""Filter for comma separated numbers"""
class ObservationViewFilter(FilterSet):
"""SatNOGS Network Observation API View Filter"""
OBSERVATION_STATUS_CHOICES = [
('failed', 'Failed'),
('bad', 'Bad'),
('unknown', 'Unknown'),
('future', 'Future'),
('good', 'Good'),
]
WATERFALL_STATUS_CHOICES = [
(1, 'With Signal'),
(0, 'Without Signal'),
]
# DEPRECATED
VETTED_STATUS_CHOICES = [
('failed', 'Failed'),
('bad', 'Bad'),
('unknown', 'Unknown'),
('good', 'Good'),
]
start = django_filters.IsoDateTimeFilter(field_name='start', lookup_expr='gte')
end = django_filters.IsoDateTimeFilter(field_name='end', lookup_expr='lte')
status = django_filters.ChoiceFilter(
field_name='status', choices=OBSERVATION_STATUS_CHOICES, method='filter_status'
)
waterfall_status = django_filters.ChoiceFilter(
field_name='waterfall_status', choices=WATERFALL_STATUS_CHOICES, null_label='Unknown'
)
vetted_status = django_filters.ChoiceFilter(
label='Vetted status (deprecated: please use Status)',
field_name='status',
choices=VETTED_STATUS_CHOICES,
method='filter_status'
)
vetted_user = django_filters.ModelChoiceFilter(
label='Vetted user (deprecated: will be removed in next version)',
field_name='waterfall_status_user',
queryset=User.objects.all()
)
observer = django_filters.ModelChoiceFilter(
label="observer",
field_name='author',
queryset=User.objects.filter(observations__isnull=False).distinct()
)
observation_id = NumberInFilter(field_name='id', label="Observation ID(s)")
# see https://django-filter.readthedocs.io/en/master/ref/filters.html for W0613
def filter_status(self, queryset, name, value): # pylint: disable=W0613,R0201
""" Returns filtered observations for a given observation status"""
if value == 'failed':
observations = queryset.filter(status__lt=-100)
if value == 'bad':
observations = queryset.filter(status__range=(-100, -1))
if value == 'unknown':
observations = queryset.filter(status__range=(0, 99), end__lte=now())
if value == 'future':
observations = queryset.filter(end__gt=now())
if value == 'good':
observations = queryset.filter(status__gte=100)
return observations
class Meta:
model = Observation
fields = [
'id', 'status', 'ground_station', 'start', 'end', 'satellite__norad_cat_id',
'transmitter_uuid', 'transmitter_mode', 'transmitter_type', 'waterfall_status',
'vetted_status', 'vetted_user', 'observer'
]
class StationViewFilter(FilterSet):
"""SatNOGS Network Station API View Filter"""
class Meta:
model = Station
fields = ['id', 'name', 'status', 'client_version']
class TransmitterViewFilter(FilterSet):
"""SatNOGS Network Transmitter API View Filter"""
uuid = django_filters.CharFilter(field_name='transmitter_uuid')
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/api/filters.py
| 0.654674 | 0.16175 |
filters.py
|
pypi
|
# pylint: disable=no-self-use
from collections import defaultdict
from PIL import Image
from rest_framework import serializers
from network.base.db_api import DBConnectionError, get_tle_sets_by_norad_id_set, \
get_transmitters_by_uuid_set
from network.base.models import Antenna, DemodData, FrequencyRange, Observation, Satellite, Station
from network.base.perms import UserNoPermissionError, \
check_schedule_perms_of_violators_per_station, check_schedule_perms_per_station
from network.base.scheduling import create_new_observation
from network.base.stats import transmitter_stats_by_uuid
from network.base.validators import ObservationOverlapError, OutOfRangeError, check_end_datetime, \
check_overlaps, check_start_datetime, check_start_end_datetimes, \
check_transmitter_station_pairs, check_violators_scheduling_limit
class CreateDemodDataSerializer(serializers.ModelSerializer):
"""SatNOGS Network DemodData API Serializer for creating demoddata."""
class Meta:
model = DemodData
fields = (
'observation',
'demodulated_data',
)
def create(self, validated_data):
"""Creates demoddata from a list of validated data after checking if demodulated_data is an
image and add the result in is_image field
"""
try:
image = Image.open(validated_data['demodulated_data'])
image.verify()
validated_data['is_image'] = True
except Exception: # pylint: disable=W0703
validated_data['is_image'] = False
return DemodData.objects.create(**validated_data)
def update(self, instance, validated_data):
"""Updates demoddata from a list of validated data
currently disabled and returns None
"""
return None
class DemodDataSerializer(serializers.ModelSerializer):
"""SatNOGS Network DemodData API Serializer"""
payload_demod = serializers.SerializerMethodField()
class Meta:
model = DemodData
fields = ('payload_demod', )
def get_payload_demod(self, obj):
"""Returns DemodData Link"""
request = self.context.get("request")
if obj.payload_demod:
return request.build_absolute_uri(obj.payload_demod.url)
if obj.demodulated_data:
return request.build_absolute_uri(obj.demodulated_data.url)
return None
class UpdateObservationSerializer(serializers.ModelSerializer):
"""SatNOGS Network Observation API Serializer for uploading audio and waterfall.
This is Serializer is used temporarily until waterfall_old and payload_old fields are removed.
"""
class Meta:
model = Observation
fields = ('id', 'payload', 'waterfall', 'client_metadata', 'client_version')
class ObservationSerializer(serializers.ModelSerializer): # pylint: disable=R0904
"""SatNOGS Network Observation API Serializer"""
transmitter = serializers.SerializerMethodField()
transmitter_updated = serializers.SerializerMethodField()
norad_cat_id = serializers.SerializerMethodField()
payload = serializers.SerializerMethodField()
waterfall = serializers.SerializerMethodField()
station_name = serializers.SerializerMethodField()
station_lat = serializers.SerializerMethodField()
station_lng = serializers.SerializerMethodField()
station_alt = serializers.SerializerMethodField()
status = serializers.SerializerMethodField()
waterfall_status = serializers.SerializerMethodField()
vetted_status = serializers.SerializerMethodField() # Deprecated
vetted_user = serializers.SerializerMethodField() # Deprecated
vetted_datetime = serializers.SerializerMethodField() # Deprecated
demoddata = DemodDataSerializer(required=False, many=True)
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
observer = serializers.SerializerMethodField()
center_frequency = serializers.SerializerMethodField()
observation_frequency = serializers.SerializerMethodField()
transmitter_status = serializers.SerializerMethodField()
transmitter_unconfirmed = serializers.SerializerMethodField()
class Meta:
model = Observation
fields = (
'id', 'start', 'end', 'ground_station', 'transmitter', 'norad_cat_id', 'payload',
'waterfall', 'demoddata', 'station_name', 'station_lat', 'station_lng', 'station_alt',
'vetted_status', 'vetted_user', 'vetted_datetime', 'archived', 'archive_url',
'client_version', 'client_metadata', 'status', 'waterfall_status',
'waterfall_status_user', 'waterfall_status_datetime', 'rise_azimuth', 'set_azimuth',
'max_altitude', 'transmitter_uuid', 'transmitter_description', 'transmitter_type',
'transmitter_uplink_low', 'transmitter_uplink_high', 'transmitter_uplink_drift',
'transmitter_downlink_low', 'transmitter_downlink_high', 'transmitter_downlink_drift',
'transmitter_mode', 'transmitter_invert', 'transmitter_baud', 'transmitter_updated',
'transmitter_status', 'tle0', 'tle1', 'tle2', 'center_frequency', 'observer',
'observation_frequency', 'transmitter_unconfirmed'
)
read_only_fields = [
'id', 'start', 'end', 'observation', 'ground_station', 'transmitter', 'norad_cat_id',
'archived', 'archive_url', 'station_name', 'station_lat', 'station_lng',
'waterfall_status_user', 'status', 'waterfall_status', 'station_alt', 'vetted_status',
'vetted_user', 'vetted_datetime', 'waterfall_status_datetime', 'rise_azimuth',
'set_azimuth', 'max_altitude', 'transmitter_uuid', 'transmitter_description',
'transmitter_type', 'transmitter_uplink_low', 'transmitter_uplink_high',
'transmitter_uplink_drift', 'transmitter_downlink_low', 'transmitter_downlink_high',
'transmitter_downlink_drift', 'transmitter_mode', 'transmitter_invert',
'transmitter_baud', 'transmitter_created', 'transmitter_updated', 'transmitter_status',
'tle0', 'tle1', 'tle2', 'observer', 'center_frequency', 'observation_frequency',
'transmitter_unconfirmed'
]
def update(self, instance, validated_data):
"""Updates observation object with validated data"""
super().update(instance, validated_data)
return instance
def get_observation_frequency(self, obj):
"""Returns observation center frequency"""
frequency = obj.center_frequency or obj.transmitter_downlink_low
frequency_drift = obj.transmitter_downlink_drift
if obj.center_frequency or frequency_drift is None:
return frequency
return int(round(frequency + ((frequency * frequency_drift) / 1e9)))
def get_transmitter_unconfirmed(self, obj):
"""Returns whether the transmitter was unconfirmed at the time of observation"""
return obj.transmitter_unconfirmed
def get_transmitter_status(self, obj):
"""Returns the status of the transmitter at the time of observation"""
if obj.transmitter_status:
return "active"
if obj.transmitter_status is not None:
return "inactive"
return "unknown"
def get_center_frequency(self, obj):
"""Returns observation center frequency"""
return obj.center_frequency
def get_transmitter(self, obj):
"""Returns Transmitter UUID"""
try:
return obj.transmitter_uuid
except AttributeError:
return ''
def get_transmitter_updated(self, obj):
"""Returns Transmitter last update date"""
try:
return obj.transmitter_created
except AttributeError:
return ''
def get_norad_cat_id(self, obj):
"""Returns Satellite NORAD ID"""
return obj.satellite.norad_cat_id
def get_payload(self, obj):
"""Returns Audio Link"""
request = self.context.get("request")
if obj.payload_old:
return request.build_absolute_uri(obj.payload_old.url)
if obj.payload:
return request.build_absolute_uri(obj.payload.url)
return None
def get_waterfall(self, obj):
"""Returns Watefall Link"""
request = self.context.get("request")
if obj.waterfall_old:
return request.build_absolute_uri(obj.waterfall_old.url)
if obj.waterfall:
return request.build_absolute_uri(obj.waterfall.url)
return None
def get_station_name(self, obj):
"""Returns Station name"""
try:
return obj.ground_station.name
except AttributeError:
return None
def get_station_lat(self, obj):
"""Returns Station latitude"""
try:
return obj.ground_station.lat
except AttributeError:
return None
def get_station_lng(self, obj):
"""Returns Station longitude"""
try:
return obj.ground_station.lng
except AttributeError:
return None
def get_station_alt(self, obj):
"""Returns Station elevation"""
try:
return obj.ground_station.alt
except AttributeError:
return None
def get_status(self, obj):
"""Returns Observation status"""
return obj.status_badge
def get_waterfall_status(self, obj):
"""Returns Observation status"""
return obj.waterfall_status_badge
def get_vetted_status(self, obj):
"""DEPRECATED: Returns vetted status"""
if obj.status_badge == 'future':
return 'unknown'
return obj.status_badge
def get_vetted_user(self, obj):
"""DEPRECATED: Returns vetted user"""
if obj.waterfall_status_user:
return obj.waterfall_status_user.pk
return None
def get_vetted_datetime(self, obj):
"""DEPRECATED: Returns vetted datetime"""
return obj.waterfall_status_datetime
def get_tle0(self, obj):
"""Returns tle0"""
return obj.tle_line_0
def get_tle1(self, obj):
"""Returns tle1"""
return obj.tle_line_1
def get_tle2(self, obj):
"""Returns tle2"""
return obj.tle_line_2
def get_observer(self, obj):
"""Returns the author of the observation"""
if obj.author:
return obj.author.pk
return None
class NewObservationListSerializer(serializers.ListSerializer):
"""SatNOGS Network New Observation API List Serializer"""
transmitters = {}
tle_sets = set()
violators = []
def validate(self, attrs):
"""Validates data from a list of new observations"""
(
station_set, transmitter_uuid_set, transmitter_uuid_station_set, norad_id_set,
transm_uuid_station_center_freq_set
) = (set() for _ in range(5))
uuid_to_norad_id = {}
start_end_per_station = defaultdict(list)
for observation in attrs:
station = observation.get('ground_station')
transmitter_uuid = observation.get('transmitter_uuid')
station_set.add(station)
transmitter_uuid_set.add(transmitter_uuid)
transmitter_uuid_station_set.add((transmitter_uuid, station))
start_end_per_station[int(station.id)].append(
(observation.get('start'), observation.get('end'))
)
try:
check_overlaps(start_end_per_station)
except ObservationOverlapError as error:
raise serializers.ValidationError(error, code='invalid')
try:
check_schedule_perms_per_station(self.context['request'].user, station_set)
except UserNoPermissionError as error:
raise serializers.ValidationError(error, code='forbidden')
try:
self.transmitters = get_transmitters_by_uuid_set(transmitter_uuid_set)
for uuid in transmitter_uuid_set:
norad_id_set.add(self.transmitters[uuid]['norad_cat_id'])
uuid_to_norad_id[uuid] = self.transmitters[uuid]['norad_cat_id']
self.tle_sets = get_tle_sets_by_norad_id_set(norad_id_set)
except ValueError as error:
raise serializers.ValidationError(error, code='invalid')
except DBConnectionError as error:
raise serializers.ValidationError(error)
self.violators = Satellite.objects.filter(
norad_cat_id__in=norad_id_set, is_frequency_violator=True
)
violators_norad_ids = [satellite.norad_cat_id for satellite in self.violators]
station_with_violators_set = {
station
for transmitter_uuid, station in transmitter_uuid_station_set
if uuid_to_norad_id[transmitter_uuid] in violators_norad_ids
}
try:
check_schedule_perms_of_violators_per_station(
self.context['request'].user, station_with_violators_set
)
except UserNoPermissionError as error:
raise serializers.ValidationError(error, code='forbidden')
for observation in attrs:
transmitter_uuid = observation.get('transmitter_uuid')
station = observation.get('ground_station')
center_frequency = observation.get('center_frequency', None)
transmitter = self.transmitters[transmitter_uuid]
if transmitter["type"] == "Transponder" and center_frequency is None:
observation["center_frequency"
] = (transmitter['downlink_high'] + transmitter['downlink_low']) // 2
transm_uuid_station_center_freq_set.add((transmitter_uuid, station, center_frequency))
transmitter_station_list = [
(self.transmitters[transmitter_uuid], station, center_freq)
for transmitter_uuid, station, center_freq in transm_uuid_station_center_freq_set
]
try:
check_transmitter_station_pairs(transmitter_station_list)
except OutOfRangeError as error:
raise serializers.ValidationError(error, code='invalid')
return attrs
def create(self, validated_data):
"""Creates new observations from a list of new observations validated data"""
new_observations = []
observations_per_norad_id = defaultdict(list)
for observation_data in validated_data:
transmitter_uuid = observation_data['transmitter_uuid']
transmitter = self.transmitters[transmitter_uuid]
tle_set = self.tle_sets[transmitter['norad_cat_id']]
observations_per_norad_id[transmitter['norad_cat_id']].append(
observation_data['start']
)
observation = create_new_observation(
station=observation_data['ground_station'],
transmitter=transmitter,
start=observation_data['start'],
end=observation_data['end'],
author=self.context['request'].user,
tle_set=tle_set,
center_frequency=observation_data.get('center_frequency', None)
)
new_observations.append(observation)
if self.violators and not self.context['request'].user.groups.filter(name='Operators'
).exists():
check_violators_scheduling_limit(self.violators, observations_per_norad_id)
for observation in new_observations:
observation.save()
return new_observations
def update(self, instance, validated_data):
"""Updates observations from a list of validated data
currently disabled and returns None
"""
return None
class NewObservationSerializer(serializers.Serializer):
"""SatNOGS Network New Observation API Serializer"""
start = serializers.DateTimeField(
input_formats=['%Y-%m-%d %H:%M:%S.%f', '%Y-%m-%d %H:%M:%S'],
error_messages={
'invalid': 'Start datetime should have either \'%Y-%m-%d %H:%M:%S.%f\' or '
'\'%Y-%m-%d %H:%M:%S\' '
'format.',
'required': 'Start(\'start\' key) datetime is required.'
}
)
end = serializers.DateTimeField(
input_formats=['%Y-%m-%d %H:%M:%S.%f', '%Y-%m-%d %H:%M:%S'],
error_messages={
'invalid': 'End datetime should have either \'%Y-%m-%d %H:%M:%S.%f\' or '
'\'%Y-%m-%d %H:%M:%S\' '
'format.',
'required': 'End datetime(\'end\' key) is required.'
}
)
ground_station = serializers.PrimaryKeyRelatedField(
queryset=Station.objects.filter(
status__gt=0, alt__isnull=False, lat__isnull=False, lng__isnull=False
),
allow_null=False,
error_messages={
'does_not_exist': 'Station should exist, be online and have a defined location.',
'required': 'Station(\'ground_station\' key) is required.'
}
)
transmitter_uuid = serializers.CharField(
max_length=22,
min_length=22,
error_messages={
'invalid': 'Transmitter UUID should be valid.',
'required': 'Transmitter UUID(\'transmitter_uuid\' key) is required.'
}
)
center_frequency = serializers.IntegerField(
error_messages={'negative': 'Frequency cannot be a negative value.'}, required=False
)
def validate_start(self, value):
"""Validates start datetime of a new observation"""
try:
check_start_datetime(value)
except ValueError as error:
raise serializers.ValidationError(error, code='invalid')
return value
def validate_end(self, value):
"""Validates end datetime of a new observation"""
try:
check_end_datetime(value)
except ValueError as error:
raise serializers.ValidationError(error, code='invalid')
return value
def validate(self, attrs):
"""Validates combination of start and end datetimes of a new observation"""
start = attrs['start']
end = attrs['end']
try:
check_start_end_datetimes(start, end)
except ValueError as error:
raise serializers.ValidationError(error, code='invalid')
return attrs
def create(self, validated_data):
"""Creates a new observation
Currently not implemented and raises exception. If in the future we want to implement this
serializer accepting and creating observation from single object instead from a list of
objects, we should remove raising the exception below and implement the validations that
exist now only on NewObservationListSerializer
"""
raise serializers.ValidationError(
"Serializer is implemented for accepting and schedule\
only lists of observations"
)
def update(self, instance, validated_data):
"""Updates an observation from validated data, currently disabled and returns None"""
return None
class Meta:
list_serializer_class = NewObservationListSerializer
class FrequencyRangeSerializer(serializers.ModelSerializer):
"""SatNOGS Network FrequencyRange API Serializer"""
class Meta:
model = FrequencyRange
fields = ('min_frequency', 'max_frequency', 'bands')
class AntennaSerializer(serializers.ModelSerializer):
"""SatNOGS Network Antenna API Serializer"""
antenna_type = serializers.StringRelatedField()
frequency_ranges = FrequencyRangeSerializer(many=True)
class Meta:
model = Antenna
fields = ('antenna_type', 'frequency_ranges')
class StationSerializer(serializers.ModelSerializer):
"""SatNOGS Network Station API Serializer"""
# Using SerializerMethodField instead of directly the reverse relation (antennas) with the
# AntennaSerializer for not breaking the API, it should change in next API version
antenna = serializers.SerializerMethodField()
min_horizon = serializers.SerializerMethodField()
observations = serializers.SerializerMethodField()
status = serializers.SerializerMethodField()
altitude = serializers.IntegerField(min_value=0, source='alt')
class Meta:
model = Station
fields = (
'id', 'name', 'altitude', 'min_horizon', 'lat', 'lng', 'qthlocator', 'antenna',
'created', 'last_seen', 'status', 'observations', 'description', 'client_version',
'target_utilization'
)
def get_min_horizon(self, obj):
"""Returns Station minimum horizon"""
return obj.horizon
def get_antenna(self, obj):
"""Returns Station antenna list"""
antenna_types = {
'Dipole': 'dipole',
'V-Dipole': 'v-dipole',
'Discone': 'discone',
'Ground Plane': 'ground',
'Yagi': 'yagi',
'Cross Yagi': 'cross-yagi',
'Helical': 'helical',
'Parabolic': 'parabolic',
'Vertical': 'vertical',
'Turnstile': 'turnstile',
'Quadrafilar': 'quadrafilar',
'Eggbeater': 'eggbeater',
'Lindenblad': 'lindenblad',
'Parasitic Lindenblad': 'paralindy',
'Patch': 'patch',
'Other Directional': 'other direct',
'Other Omni-Directional': 'other omni',
}
serializer = AntennaSerializer(obj.antennas, many=True)
antennas = []
for antenna in serializer.data:
for frequency_range in antenna['frequency_ranges']:
antennas.append(
{
'frequency': frequency_range['min_frequency'],
'frequency_max': frequency_range['max_frequency'],
'band': frequency_range['bands'],
'antenna_type': antenna_types[antenna['antenna_type']],
'antenna_type_name': antenna['antenna_type'],
}
)
return antennas
def get_observations(self, obj):
"""Returns Station observations number"""
return obj.total_obs
def get_status(self, obj):
"""Returns Station status"""
try:
return obj.get_status_display()
except AttributeError:
return None
class StationConfigurationSerializer(serializers.ModelSerializer):
"""SatNOGS Network Station Configuration API Serializer"""
satnogs_api_token = serializers.SerializerMethodField()
satnogs_station_elev = serializers.IntegerField(source='alt')
satnogs_station_id = serializers.SerializerMethodField()
satnogs_station_lat = serializers.FloatField(source='lat')
satnogs_station_lon = serializers.FloatField(source='lng')
class Meta:
model = Station
fields = [
'satnogs_station_id', 'satnogs_api_token', 'satnogs_station_elev',
'satnogs_station_lat', 'satnogs_station_lon', 'satnogs_soapy_rx_device',
'satnogs_antenna', 'satnogs_rx_samp_rate', 'satnogs_rf_gain'
]
def get_satnogs_api_token(self, obj):
"""Returns API key of station owner"""
if obj.owner:
return obj.owner.auth_token.key
return None
def get_satnogs_station_id(self, obj):
"""Returns API key of station owner"""
return obj.pk
class JobSerializer(serializers.ModelSerializer):
"""SatNOGS Network Job API Serializer"""
frequency = serializers.SerializerMethodField()
mode = serializers.SerializerMethodField()
transmitter = serializers.SerializerMethodField()
baud = serializers.SerializerMethodField()
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
class Meta:
model = Observation
fields = (
'id', 'start', 'end', 'ground_station', 'tle0', 'tle1', 'tle2', 'frequency', 'mode',
'transmitter', 'baud'
)
def get_tle0(self, obj):
"""Returns tle0"""
return obj.tle_line_0
def get_tle1(self, obj):
"""Returns tle1"""
return obj.tle_line_1
def get_tle2(self, obj):
"""Returns tle2"""
return obj.tle_line_2
def get_frequency(self, obj):
"""Returns Observation frequency"""
frequency = obj.center_frequency or obj.transmitter_downlink_low
frequency_drift = obj.transmitter_downlink_drift
if obj.center_frequency or frequency_drift is None:
return frequency
return int(round(frequency + ((frequency * frequency_drift) / 1e9)))
def get_transmitter(self, obj):
"""Returns Transmitter UUID"""
return obj.transmitter_uuid
def get_mode(self, obj):
"""Returns Transmitter mode"""
try:
return obj.transmitter_mode
except AttributeError:
return ''
def get_baud(self, obj):
"""Returns Transmitter baudrate"""
return obj.transmitter_baud
class TransmitterSerializer(serializers.Serializer):
"""SatNOGS Network Transmitter API Serializer"""
uuid = serializers.SerializerMethodField()
stats = serializers.SerializerMethodField()
def get_uuid(self, obj):
"""Returns Transmitter UUID"""
return obj['transmitter_uuid']
def get_stats(self, obj):
"""Returns Transmitter statistics"""
stats = transmitter_stats_by_uuid(obj['transmitter_uuid'])
for statistic in stats:
stats[statistic] = int(stats[statistic])
return stats
def create(self, validated_data):
"""Creates an object instance of transmitter, currently disabled and returns None"""
return None
def update(self, instance, validated_data):
"""Updates an object instance of transmitter, currently disabled and returns None"""
return None
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/api/serializers.py
| 0.819316 | 0.172974 |
serializers.py
|
pypi
|
from django.core.exceptions import ObjectDoesNotExist
class UserNoPermissionError(Exception):
"""Error when user has not persmission"""
def check_stations_without_permissions(stations_perms):
"""
Check if in the given dictionary of scheduling permissions per station, there are stations that
don\'t have scheduling permissions.
"""
stations_without_permissions = [
int(station_id) for station_id in stations_perms.keys() if not stations_perms[station_id]
]
if stations_without_permissions:
if len(stations_without_permissions) == 1:
raise UserNoPermissionError(
'No permission to schedule observations on station: {0}'.format(
stations_without_permissions[0]
)
)
raise UserNoPermissionError(
'No permission to schedule observations on stations: {0}'.
format(stations_without_permissions)
)
def schedule_station_violators_perms(user, station):
"""
This context flag will determine if user can schedule satellites that violate frequencies on
the given station.
"""
if user.is_authenticated:
if station.violator_scheduling > 0:
if station.violator_scheduling == 2 or user.groups.filter(name='Operators').exists():
return True
return False
def schedule_stations_violators_perms(user, stations):
"""
This context flag will determine if user can schedule satellites that violate frequencies on
the given stations.
"""
if user.is_authenticated:
return {
station.id: schedule_station_violators_perms(user, station)
for station in stations
}
return {station.id: False for station in stations}
def check_schedule_perms_of_violators_per_station(user, station_set):
"""Checks if user has permissions to schedule on stations"""
stations_perms = schedule_stations_violators_perms(user, station_set)
check_stations_without_permissions(stations_perms)
def schedule_perms(user):
"""
This context flag will determine if user can schedule an observation.
That includes station owners, moderators, admins.
see: https://wiki.satnogs.org/Operation#Network_permissions_matrix
"""
if user.is_authenticated:
stations_statuses = user.ground_stations.values_list('status', flat=True)
# User has online station (status=2)
if 2 in stations_statuses:
return True
# User has testing station (status=1)
if 1 in stations_statuses:
return True
# User has special permissions
if user.groups.filter(name='Moderators').exists():
return True
if user.is_superuser:
return True
return False
def schedule_station_perms(user, station):
"""
This context flag will determine if user can schedule an observation.
That includes station owners, moderators, admins.
see: https://wiki.satnogs.org/Operation#Network_permissions_matrix
"""
if user.is_authenticated:
# User has online station (status=2) and station is online
try:
if user.ground_stations.filter(status=2).exists() and station.status == 2:
return True
except ObjectDoesNotExist:
pass
# If the station is testing (status=1) and user is its owner
if station.status == 1 and station.owner == user:
return True
# User has special permissions
if user.groups.filter(name='Moderators').exists():
return True
if user.is_superuser:
return True
return False
def schedule_stations_perms(user, stations):
"""
This context flag will determine if user can schedule an observation.
That includes station owners, moderators, admins.
see: https://wiki.satnogs.org/Operation#Network_permissions_matrix
"""
if user.is_authenticated:
# User has special permissions
if user.groups.filter(name='Moderators').exists():
return {station.id: True for station in stations}
if user.is_superuser:
return {station.id: True for station in stations}
# User has online station (status=2) and station is online
try:
if user.ground_stations.filter(status=2).exists():
return {
s.id: s.status == 2 or (s.owner == user and s.status == 1)
for s in stations
}
except ObjectDoesNotExist:
pass
# If the station is testing (status=1) and user is its owner
return {station.id: station.owner == user and station.status == 1 for station in stations}
return {station.id: False for station in stations}
def check_schedule_perms_per_station(user, station_set):
"""Checks if user has permissions to schedule on stations"""
stations_perms = schedule_stations_perms(user, station_set)
check_stations_without_permissions(stations_perms)
def delete_perms(user, observation):
"""
This context flag will determine if a delete button appears for the observation.
That includes observer, station owner involved, moderators, admins.
see: https://wiki.satnogs.org/Operation#Network_permissions_matrix
"""
if not observation.is_started and user.is_authenticated:
# User owns the observation
try:
if observation.author == user:
return True
except AttributeError:
pass
# User owns the station
try:
if observation.ground_station and observation.ground_station.owner == user:
return True
except (AttributeError, ObjectDoesNotExist):
pass
# User has special permissions
if user.groups.filter(name='Moderators').exists():
return True
if user.is_superuser:
return True
return False
def vet_perms(user, observation):
"""
This context flag will determine if vet buttons appears for the observation.
That includes observer, station owner involved, moderators, admins.
see: https://wiki.satnogs.org/Operation#Network_permissions_matrix
"""
if user.is_authenticated:
# User has online station (status=2)
if user.ground_stations.filter(status=2).exists():
return True
# User owns the observation
try:
if observation.author == user:
return True
except AttributeError:
pass
# User owns the station
try:
if observation.ground_station and observation.ground_station.owner == user:
return True
except AttributeError:
pass
# User has special permissions
if user.groups.filter(name='Moderators').exists():
return True
if user.is_superuser:
return True
return False
def modify_delete_station_perms(user, station):
"""
This context flag will determine if the user can modify or delete a station
or bulk-delete future observations on a station.
That includes station owners, moderators and admins.
"""
if user.is_authenticated:
# User owns the station
try:
if user == station.owner:
return True
except AttributeError:
pass
# User has special permissions
if user.groups.filter(name='Moderators').exists():
return True
if user.is_superuser:
return True
return False
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/perms.py
| 0.793546 | 0.473718 |
perms.py
|
pypi
|
import math
from django.core.cache import cache
from django.db.models import Count, Q
from django.utils.timezone import now
from network.base.models import Observation
def transmitter_stats_by_uuid(uuid):
"""Calculate and put in cache transmitter statistics"""
stats = cache.get('tr-{0}-stats'.format(uuid))
if stats is None:
stats = Observation.objects.filter(transmitter_uuid=uuid).exclude(
status__lt=-100
).aggregate(
future=Count('pk', filter=Q(end__gt=now())),
bad=Count('pk', filter=Q(status__range=(-100, -1))),
unknown=Count('pk', filter=Q(status__range=(0, 99), end__lte=now())),
good=Count('pk', filter=Q(status__gte=100)),
)
cache.set('tr-{0}-stats'.format(uuid), stats, 3600)
total_count = 0
unknown_count = 0 if stats['unknown'] is None else stats['unknown']
future_count = 0 if stats['future'] is None else stats['future']
good_count = 0 if stats['good'] is None else stats['good']
bad_count = 0 if stats['bad'] is None else stats['bad']
total_count = unknown_count + future_count + good_count + bad_count
unknown_rate = 0
future_rate = 0
success_rate = 0
bad_rate = 0
if total_count:
unknown_rate = math.trunc(10000 * (unknown_count / total_count)) / 100
future_rate = math.trunc(10000 * (future_count / total_count)) / 100
success_rate = math.trunc(10000 * (good_count / total_count)) / 100
bad_rate = math.trunc(10000 * (bad_count / total_count)) / 100
return {
'total_count': total_count,
'unknown_count': unknown_count,
'future_count': future_count,
'good_count': good_count,
'bad_count': bad_count,
'unknown_rate': unknown_rate,
'future_rate': future_rate,
'success_rate': success_rate,
'bad_rate': bad_rate
}
def satellite_stats_by_transmitter_list(transmitter_list):
"""Calculate satellite statistics"""
total_count = 0
unknown_count = 0
future_count = 0
good_count = 0
bad_count = 0
unknown_rate = 0
future_rate = 0
success_rate = 0
bad_rate = 0
for transmitter in transmitter_list:
transmitter_stats = transmitter_stats_by_uuid(transmitter['uuid'])
total_count += transmitter_stats['total_count']
unknown_count += transmitter_stats['unknown_count']
future_count += transmitter_stats['future_count']
good_count += transmitter_stats['good_count']
bad_count += transmitter_stats['bad_count']
if total_count:
unknown_rate = math.trunc(10000 * (unknown_count / total_count)) / 100
future_rate = math.trunc(10000 * (future_count / total_count)) / 100
success_rate = math.trunc(10000 * (good_count / total_count)) / 100
bad_rate = math.trunc(10000 * (bad_count / total_count)) / 100
return {
'total_count': total_count,
'unknown_count': unknown_count,
'future_count': future_count,
'good_count': good_count,
'bad_count': bad_count,
'unknown_rate': unknown_rate,
'future_rate': future_rate,
'success_rate': success_rate,
'bad_rate': bad_rate
}
def transmitters_with_stats(transmitters_list):
"""Returns a list of transmitters with their statistics"""
transmitters_with_stats_list = []
for transmitter in transmitters_list:
transmitter_stats = transmitter_stats_by_uuid(transmitter['uuid'])
transmitter_with_stats = dict(transmitter, **transmitter_stats)
transmitters_with_stats_list.append(transmitter_with_stats)
return transmitters_with_stats_list
def unknown_observations_count(user):
"""Returns a count of unknown status observations per user"""
user_unknown_count = cache.get('user-{0}-unknown-count'.format(user.id))
if user_unknown_count is None:
user_unknown_count = Observation.objects.filter(
author=user, status__range=(0, 99), end__lte=now()
).exclude(
waterfall_old='', waterfall=''
).count()
cache.set('user-{0}-unknown-count'.format(user.id), user_unknown_count, 120)
return user_unknown_count
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/stats.py
| 0.549641 | 0.24919 |
stats.py
|
pypi
|
from collections import defaultdict
from datetime import datetime, timedelta
from django.conf import settings
from django.utils.timezone import make_aware, utc
class ObservationOverlapError(Exception):
"""Error when observation overlaps with already scheduled one"""
class OutOfRangeError(Exception):
"""Error when a frequency is out of a transmitter's or station's antenna frequency range"""
class NegativeElevationError(Exception):
"""Error when satellite doesn't raise above station's horizon"""
class SinglePassError(Exception):
"""Error when between given start and end datetimes there are more than one satellite passes"""
class NoTleSetError(Exception):
"""Error when satellite doesn't have available TLE set"""
class SchedulingLimitError(Exception):
"""Error when observations exceed scheduling limit"""
def check_start_datetime(start):
"""Validate start datetime"""
if start < make_aware(datetime.now(), utc):
raise ValueError("Start datetime should be in the future!")
if start < make_aware(datetime.now() + timedelta(minutes=settings.OBSERVATION_DATE_MIN_START),
utc):
raise ValueError(
"Start datetime should be in the future, at least {0} minutes from now".format(
settings.OBSERVATION_DATE_MIN_START
)
)
def check_end_datetime(end):
"""Validate end datetime"""
if end < make_aware(datetime.now(), utc):
raise ValueError("End datetime should be in the future!")
max_duration = settings.OBSERVATION_DATE_MIN_START + settings.OBSERVATION_DATE_MAX_RANGE
if end > make_aware(datetime.now() + timedelta(minutes=max_duration), utc):
raise ValueError(
"End datetime should be in the future, at most {0} minutes from now".
format(max_duration)
)
def check_start_end_datetimes(start, end):
"""Validate the pair of start and end datetimes"""
if start > end:
raise ValueError("End datetime should be after Start datetime!")
if (end - start) < timedelta(seconds=settings.OBSERVATION_DURATION_MIN):
raise ValueError(
"Duration of observation should be at least {0} seconds".format(
settings.OBSERVATION_DURATION_MIN
)
)
def downlink_is_in_range(antenna, transmitter, center_frequency=None):
"""Return true if center or transmitter frequency is in station's antenna range"""
downlink = center_frequency or transmitter['downlink_low']
if not downlink:
return False
for frequency_range in antenna.frequency_ranges.all():
if frequency_range.min_frequency <= downlink <= frequency_range.max_frequency:
return True
return False
def is_transmitter_in_station_range(transmitter, station, center_frequency=None):
"""Return true if center or transmitter frequency is in one of the station's antennas ranges"""
if transmitter["type"] == "Transponder" and center_frequency is None:
center_frequency = (transmitter['downlink_high'] + transmitter['downlink_low']) // 2
for gs_antenna in station.antennas.all():
if downlink_is_in_range(gs_antenna, transmitter, center_frequency):
return True
return False
def is_frequency_in_transmitter_range(center_frequency, transmitter):
"""Validate whether center frequency is in transmitter range"""
downlink_low = transmitter['downlink_low']
downlink_high = transmitter['downlink_high']
downlink_drift = transmitter['downlink_drift']
if not downlink_low:
return False
if not downlink_high:
return downlink_low == center_frequency
if downlink_drift:
if downlink_drift < 0:
downlink_low += downlink_drift
else:
downlink_high += downlink_drift
return downlink_low <= center_frequency <= downlink_high
def check_transmitter_station_pairs(transmitter_station_list):
"""Validate the pairs of transmitter and stations"""
out_of_range_triads = []
frequencies_out_of_transmitter_range_pairs = []
for transmitter, station, center_frequency in transmitter_station_list:
if center_frequency and not is_frequency_in_transmitter_range(center_frequency,
transmitter):
frequencies_out_of_transmitter_range_pairs.append(
(str(transmitter['uuid']), center_frequency)
)
if not is_transmitter_in_station_range(transmitter, station, center_frequency):
out_of_range_triads.append(
(
str(transmitter['uuid']), int(station.id), center_frequency
or transmitter['downlink_low']
)
)
if frequencies_out_of_transmitter_range_pairs:
if len(frequencies_out_of_transmitter_range_pairs) == 1:
raise OutOfRangeError(
'Center frequency out of transmitter range.'
' Transmitter-frequency pair: {0}'.format(
frequencies_out_of_transmitter_range_pairs[0]
)
)
raise OutOfRangeError(
'Center frequency out of transmitter range.'
' Transmitter-frequency pairs: {0}'.format(
len(frequencies_out_of_transmitter_range_pairs)
)
)
if out_of_range_triads:
if len(out_of_range_triads) == 1:
raise OutOfRangeError(
'Transmitter out of station frequency range.'
' Transmitter-Station-Observation Frequency triad: {0}'.format(
out_of_range_triads[0]
)
)
raise OutOfRangeError(
'Transmitter out of station frequency range. '
'Transmitter-Station-Observation Frequency triads: {0}'.format(out_of_range_triads)
)
def check_overlaps(stations_dict):
"""Check for overlaps among requested observations"""
for station in stations_dict.keys():
periods = stations_dict[station]
total_periods = len(periods)
for i in range(0, total_periods):
start_i = periods[i][0]
end_i = periods[i][1]
for j in range(i + 1, total_periods):
start_j = periods[j][0]
end_j = periods[j][1]
if ((start_j <= start_i <= end_j) or (start_j <= end_i <= end_j)
or (start_i <= start_j and end_i >= end_j)): # noqa: W503
raise ObservationOverlapError(
'Observations of station {0} overlap'.format(station)
)
def return_no_fit_periods(scheduled_observations, observations_limit, time_limit):
"""
Return periods that can not fit any other observation due to observation limit for a certain
time limit.
"""
scheduled_observations.sort()
no_fit_periods = []
obs_to_reach_limit = observations_limit - 1
for pointer in range(0, len(scheduled_observations) - obs_to_reach_limit):
first_obs_start = scheduled_observations[pointer]
last_obs_start = scheduled_observations[pointer + obs_to_reach_limit]
first_last_timedelta = last_obs_start - first_obs_start
if first_last_timedelta.total_seconds() < time_limit:
time_limit_period = timedelta(seconds=time_limit)
no_fit_periods.append(
(last_obs_start - time_limit_period, first_obs_start + time_limit_period)
)
return no_fit_periods
def fit_observation_into_scheduled_observations(
observation, scheduled_observations, observations_limit, time_limit, limit_reason
):
"""
Checks if given observation exceeds the scheduling limit and if not then appends it in given
scheduled observations list
"""
no_fit_periods = return_no_fit_periods(scheduled_observations, observations_limit, time_limit)
for period in no_fit_periods:
if period[0] <= observation <= period[1]:
observation_start = observation.strftime("%Y-%m-%d %H:%M:%S UTC")
period_start = period[0].strftime("%Y-%m-%d %H:%M:%S UTC")
period_end = period[1].strftime("%Y-%m-%d %H:%M:%S UTC")
raise SchedulingLimitError(
(
'Scheduling observation that starts at {0} exceeds scheduling limit for the'
' period from {1} to {2}\nReason for scheduling limit: {3}'
).format(observation_start, period_start, period_end, limit_reason)
)
scheduled_observations.append(observation)
def check_violators_scheduling_limit(violators, observations_per_norad_id):
"""
Check if observations to be scheduled for satellite violators exceed the scheduling limit.
"""
scheduled_observations_per_norad_id = defaultdict(list)
time_limit = settings.OBSERVATIONS_PER_VIOLATOR_SATELLITE_PERIOD
observations_limit = settings.OBSERVATIONS_PER_VIOLATOR_SATELLITE
for satellite in violators:
for observation in satellite.observations.filter(
start__gte=make_aware(datetime.now() - timedelta(seconds=time_limit), utc)):
scheduled_observations_per_norad_id[satellite.norad_cat_id].append(observation.start)
for observation in observations_per_norad_id[satellite.norad_cat_id]:
fit_observation_into_scheduled_observations(
observation, scheduled_observations_per_norad_id[satellite.norad_cat_id],
observations_limit, time_limit, '{0}({1}) is frequency violator satellite'.format(
satellite.name, satellite.norad_cat_id
)
)
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/validators.py
| 0.848046 | 0.521715 |
validators.py
|
pypi
|
import csv
from builtins import str
from datetime import datetime
import requests # pylint: disable=C0412
from django.conf import settings
from django.contrib.admin.helpers import label_for_field
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.utils.text import slugify
from requests.exceptions import RequestException
def format_frequency(value):
"""Returns Hz formatted frequency html string"""
try:
to_format = float(value)
except (TypeError, ValueError):
return '-'
if to_format >= 1000000000000:
formatted = format(to_format / 1000000000000, '.3f')
formatted = formatted + ' THz'
elif to_format >= 1000000000:
formatted = format(to_format / 1000000000, '.3f')
formatted = formatted + ' GHz'
elif to_format >= 1000000:
formatted = format(to_format / 1000000, '.3f')
formatted = formatted + ' MHz'
elif to_format >= 1000:
formatted = format(to_format / 1000, '.3f')
formatted = formatted + ' KHz'
else:
formatted = format(to_format, '.3f')
formatted = formatted + ' Hz'
return formatted
def populate_formset_error_messages(messages, request, formset):
"""Add errors to django messages framework by extracting them from formset)"""
non_form_errors = formset.non_form_errors()
if non_form_errors:
messages.error(request, str(non_form_errors[0]))
return
for error in formset.errors:
if error:
for field in error:
messages.error(request, str(error[field][0]))
return
def bands_from_range(min_frequency, max_frequency):
"""Returns band names of the given frequency range based on
https://www.itu.int/rec/R-REC-V.431-8-201508-I/en recommendation from ITU
"""
if max_frequency < min_frequency:
return []
frequency_bands = {
'ULF': (300, 3000),
'VLF': (3000, 30000),
'LF': (30000, 300000),
'MF': (300000, 3000000),
'HF': (3000000, 30000000),
'VHF': (30000000, 300000000),
'UHF': (300000000, 1000000000),
'L': (1000000000, 2000000000),
'S': (2000000000, 4000000000),
'C': (4000000000, 8000000000),
'X': (8000000000, 12000000000),
'Ku': (12000000000, 18000000000),
'K': (18000000000, 27000000000),
'Ka': (27000000000, 40000000000),
}
bands = []
found_min = False
for name, (min_freq, max_freq) in frequency_bands.items():
if not found_min:
if min_freq <= min_frequency <= max_freq:
bands.append(name)
if min_freq <= max_frequency <= max_freq:
return bands
found_min = True
continue
continue
bands.append(name)
if min_freq < max_frequency <= max_freq:
return bands
return []
def export_as_csv(modeladmin, request, queryset):
"""Exports admin panel table in csv format"""
if not request.user.is_staff:
raise PermissionDenied
field_names = modeladmin.list_display
if 'action_checkbox' in field_names:
field_names.remove('action_checkbox')
response = HttpResponse(content_type="text/csv")
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(
str(modeladmin.model._meta).replace('.', '_')
)
writer = csv.writer(response)
headers = []
for field_name in list(field_names):
label = label_for_field(field_name, modeladmin.model, modeladmin)
if label.islower():
label = label.title()
headers.append(label)
writer.writerow(headers)
for row in queryset:
values = []
for field in field_names:
try:
value = (getattr(row, field))
except AttributeError:
value = (getattr(modeladmin, field))
if callable(value):
try:
# get value from model
value = value()
except TypeError:
# get value from modeladmin e.g: admin_method_1
value = value(row)
if value is None:
value = ''
values.append(str(value).encode('utf-8'))
writer.writerow(values)
return response
def export_station_status(self, request, queryset):
"""Exports status of selected stations in csv format"""
meta = self.model._meta
field_names = ["id", "status"]
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
writer = csv.writer(response)
writer.writerow(field_names)
for obj in queryset:
writer.writerow([getattr(obj, field) for field in field_names])
return response
def community_get_discussion_details(
observation_id, satellite_name, norad_cat_id, observation_url
):
"""
Return the details of a discussion of the observation (if existent) in the
satnogs community (discourse)
"""
discussion_url = ('https://community.libre.space/new-topic?title=Observation {0}: {1}'
' ({2})&body=Regarding [Observation {0}]({3}) ...'
'&category=observations') \
.format(observation_id, satellite_name, norad_cat_id, observation_url)
discussion_slug = 'https://community.libre.space/t/observation-{0}-{1}-{2}' \
.format(observation_id, slugify(satellite_name),
norad_cat_id)
try:
response = requests.get(
'{}.json'.format(discussion_slug), timeout=settings.COMMUNITY_TIMEOUT
)
response.raise_for_status()
has_comments = (response.status_code == 200)
except RequestException:
# Community is unreachable
has_comments = False
return {'url': discussion_url, 'slug': discussion_slug, 'has_comments': has_comments}
def sync_demoddata_to_db(frame):
"""
Task to send a frame from SatNOGS Network to SatNOGS DB
Raises requests.exceptions.RequestException if sync fails."""
obs = frame.observation
sat = obs.satellite
ground_station = obs.ground_station
try:
# need to abstract the timestamp from the filename. hacky..
if frame.payload_demod:
file_datetime = frame.payload_demod.name.split('/')[-1].split('_')[2]
else:
file_datetime = frame.demodulated_data.name.split('/')[-1].split('_')[2]
frame_datetime = datetime.strptime(file_datetime, '%Y-%m-%dT%H-%M-%S')
submit_datetime = datetime.strftime(frame_datetime, '%Y-%m-%dT%H:%M:%S.000Z')
except ValueError:
return
# SiDS parameters
params = {
'noradID': sat.norad_cat_id,
'source': "Unknown",
'timestamp': submit_datetime,
'locator': 'longLat',
'longitude': obs.station_lng,
'latitude': obs.station_lat,
'frame': frame.display_payload_hex().replace(' ', ''),
'satnogs_network': 'True', # NOT a part of SiDS
'observation_id': obs.id, # NOT a part of SiDS
}
if ground_station:
params['source'] = ground_station.name
params['station_id'] = ground_station.id # NOT a part of SiDS
telemetry_url = f"{settings.DB_API_ENDPOINT}telemetry/"
response = requests.post(telemetry_url, data=params, timeout=settings.DB_API_TIMEOUT)
response.raise_for_status()
frame.copied_to_db = True
frame.save(update_fields=['copied_to_db'])
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/utils.py
| 0.58059 | 0.151278 |
utils.py
|
pypi
|
from datetime import timedelta
from celery import shared_task
from django.conf import settings
from django.db import transaction
from django.utils.timezone import now
from network.base.models import Observation
@shared_task
def find_and_rate_failed_observations():
"""
Task for checking failed observations, filters all the observation without artifacts after
some minutes from the end of the observation. These observations rated as "failed" with value
"-1000".
"""
time_limit = now() - timedelta(seconds=settings.OBS_NO_RESULTS_IGNORE_TIME)
Observation.objects.filter(
waterfall_old='',
waterfall='',
archived=False,
payload_old='',
payload='',
demoddata__payload_demod__isnull=True,
demoddata__demodulated_data__isnull=True,
end__lt=time_limit
).exclude(status=-1000).update(status=-1000)
@shared_task
def rate_observation(observation_id, action, action_value=None):
"""
Rate observation for given observation and action and return the result in all forms (integer,
badge name, display name).
Logic of returned value of action "set_waterfall_status":
Action value can be one of (True, False, None) and depending on the current observation
status returns a value following the table bellow:
With(True) Without(False) Unknown(None)
Failed 100 current current
Bad 100 current 0
Unknown 100 -100 0
Good 100 -100 0
Logic of returned value of action "waterfall_upload":
If waterfall is uploaded and observation status is "failed" then it changes it back to
"unknown" with value "0". On any other case it returns the current observation status.
Logic of returned value of action "audio_upload":
Action value is the duration of the audio file in seconds. If its difference from the
scheduled duration is over 60 seconds and the observation is not rated as "good" then the
observation is rated as "failed" with value "-1000". If not and observation status is
"failed" then it changes it back to "unknown" with value "0". On any other case it returns
the current observation status.
Logic of returned value of action "data_upload":
If transmitter mode is other than "CW" or "FM" then observation is rated as good by
returning "100". If not and observation status is "failed" then it changes it back to
"unknown" with value "0". On any other case it returns the current observation status.
"""
observations = Observation.objects.select_for_update()
with transaction.atomic():
observation = observations.get(pk=observation_id)
status = observation.status
if action == "set_waterfall_status":
if action_value:
status = 100
elif action_value is None and observation.status >= -100:
status = 0
elif not action_value and observation.status >= 0:
status = -100
elif action == "waterfall_upload":
if observation.status == -1000:
status = 0
elif action == "audio_upload":
scheduled_duration = observation.end - observation.start
if abs(scheduled_duration.seconds - action_value) > 60 and observation.status < 100:
status = -1000
elif observation.status == -1000:
status = 0
elif action == "data_upload":
if observation.transmitter_mode not in ['CW', 'FM']:
status = 100
elif observation.status == -1000:
status = 0
observation.status = status
observation.save(update_fields=['status'])
return (observation.status, observation.status_badge, observation.status_display)
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/rating_tasks.py
| 0.646795 | 0.374762 |
rating_tasks.py
|
pypi
|
import codecs
import re
from datetime import timedelta
from operator import truth
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import ValidationError
from django.core.files.storage import DefaultStorage
from django.core.validators import MaxLengthValidator, MaxValueValidator, MinLengthValidator, \
MinValueValidator
from django.db import models
from django.db.models import Count, Q
from django.dispatch import receiver
from django.urls import reverse
from django.utils.html import format_html
from django.utils.timezone import now
from shortuuidfield import ShortUUIDField
from storages.backends.s3boto3 import S3Boto3Storage
from network.base.db_api import DBConnectionError, get_artifact_metadata_by_observation_id
from network.base.managers import ObservationManager
from network.base.utils import bands_from_range
from network.users.models import User
OBSERVATION_STATUSES = (
('unknown', 'Unknown'),
('good', 'Good'),
('bad', 'Bad'),
('failed', 'Failed'),
)
STATION_STATUSES = (
(2, 'Online'),
(1, 'Testing'),
(0, 'Offline'),
)
STATION_VIOLATOR_SCHEDULING_CHOICES = (
(0, 'No one'),
(1, 'Only Operators'),
(2, 'Everyone'),
)
SATELLITE_STATUS = ['alive', 'dead', 'future', 're-entered']
TRANSMITTER_STATUS = ['active', 'inactive', 'invalid']
TRANSMITTER_TYPE = ['Transmitter', 'Transceiver', 'Transponder']
def _decode_pretty_hex(binary_data):
"""Return the binary data as hex dump of the following form: `DE AD C0 DE`"""
data = codecs.encode(binary_data, 'hex').decode('ascii').upper()
return ' '.join(data[i:i + 2] for i in range(0, len(data), 2))
def _name_obs_files(instance, filename):
"""Return a filepath formatted by Observation ID"""
return 'data_obs/{0}/{1}'.format(instance.id, filename)
def _name_obs_demoddata(instance, filename):
"""Return a filepath for DemodData formatted by Observation ID"""
# On change of the string bellow, change it also at api/views.py
return 'data_obs/{0}/{1}'.format(instance.observation.id, filename)
def _name_observation_data(instance, filename):
"""Return a filepath formatted by Observation ID"""
return 'data_obs/{0}/{1}/{2}/{3}/{4}/{5}'.format(
instance.start.year, instance.start.month, instance.start.day, instance.start.hour,
instance.id, filename
)
def _name_observation_demoddata(instance, filename):
"""Return a filepath for DemodData formatted by Observation ID"""
# On change of the string bellow, change it also at api/views.py
return 'data_obs/{0}/{1}/{2}/{3}/{4}/{5}'.format(
instance.observation.start.year, instance.observation.start.month,
instance.observation.start.day, instance.observation.start.hour, instance.observation.id,
filename
)
def _select_audio_storage():
return S3Boto3Storage() if settings.USE_S3_STORAGE_FOR_AUDIO else DefaultStorage()
def _select_waterfall_storage():
return S3Boto3Storage() if settings.USE_S3_STORAGE_FOR_WATERFALL else DefaultStorage()
def _select_data_storage():
return S3Boto3Storage() if settings.USE_S3_STORAGE_FOR_DATA else DefaultStorage()
def validate_image(fieldfile_obj):
"""Validates image size"""
filesize = fieldfile_obj.file.size
megabyte_limit = 2.0
if filesize > megabyte_limit * 1024 * 1024:
raise ValidationError("Max file size is %sMB" % str(megabyte_limit))
class Station(models.Model):
"""Model for SatNOGS ground stations."""
owner = models.ForeignKey(
User, related_name="ground_stations", on_delete=models.SET_NULL, null=True, blank=True
)
name = models.CharField(max_length=45)
image = models.ImageField(upload_to='ground_stations', blank=True, validators=[validate_image])
alt = models.PositiveIntegerField(null=True, blank=True, help_text='In meters above sea level')
lat = models.FloatField(
null=True,
blank=True,
validators=[MaxValueValidator(90), MinValueValidator(-90)],
help_text='eg. 38.01697'
)
lng = models.FloatField(
null=True,
blank=True,
validators=[MaxValueValidator(180), MinValueValidator(-180)],
help_text='eg. 23.7314'
)
# https://en.wikipedia.org/wiki/Maidenhead_Locator_System
qthlocator = models.CharField(max_length=8, blank=True)
featured_date = models.DateField(null=True, blank=True)
created = models.DateTimeField(auto_now_add=True)
testing = models.BooleanField(default=True)
last_seen = models.DateTimeField(null=True, blank=True)
status = models.IntegerField(choices=STATION_STATUSES, default=0)
violator_scheduling = models.IntegerField(
choices=STATION_VIOLATOR_SCHEDULING_CHOICES, default=0
)
horizon = models.PositiveIntegerField(help_text='In degrees above 0', default=10)
description = models.TextField(max_length=500, blank=True, help_text='Max 500 characters')
client_version = models.CharField(max_length=45, blank=True)
target_utilization = models.IntegerField(
validators=[MaxValueValidator(100), MinValueValidator(0)],
help_text='Target utilization factor for '
'your station',
null=True,
blank=True
)
client_id = models.CharField(max_length=128, blank=True)
# Basic client configuration
satnogs_soapy_rx_device = models.CharField(max_length=40, blank=True)
satnogs_antenna = models.CharField(max_length=40, blank=True)
satnogs_rx_samp_rate = models.PositiveIntegerField(null=True, blank=True)
satnogs_rf_gain = models.FloatField(null=True, blank=True)
class Meta:
ordering = ['-status']
indexes = [models.Index(fields=['-status', 'id'])]
def get_image(self):
"""Return the image of the station or the default image if there is a defined one"""
if self.image and hasattr(self.image, 'url'):
return self.image.url
return settings.STATION_DEFAULT_IMAGE
@property
def is_online(self):
"""Return true if station is online"""
try:
heartbeat = self.last_seen + timedelta(minutes=int(settings.STATION_HEARTBEAT_TIME))
return heartbeat > now()
except TypeError:
return False
@property
def is_offline(self):
"""Return true if station is offline"""
return not self.is_online
@property
def has_location(self):
"""Return true if station location is defined"""
if self.alt is None or self.lat is None or self.lng is None:
return False
return True
@property
def is_testing(self):
"""Return true if station is online and in testing mode"""
if self.is_online:
if self.status == 1:
return True
return False
def state(self):
"""Return the station status in html format"""
if not self.status:
return format_html('<span style="color:red;">Offline</span>')
if self.status == 1:
return format_html('<span style="color:orange;">Testing</span>')
return format_html('<span style="color:green">Online</span>')
@property
def success_rate(self):
"""Return the success rate of the station - successful observation over failed ones"""
rate = cache.get('station-{0}-rate'.format(self.id))
if not rate:
observations = self.observations.exclude(testing=True).exclude(status__range=(0, 99))
stats = observations.aggregate(
bad=Count('pk', filter=Q(status__range=(-100, -1))),
good=Count('pk', filter=Q(status__gte=100)),
failed=Count('pk', filter=Q(status__lt=100))
)
good_count = 0 if stats['good'] is None else stats['good']
bad_count = 0 if stats['bad'] is None else stats['bad']
failed_count = 0 if stats['failed'] is None else stats['failed']
total = good_count + bad_count + failed_count
if total:
rate = int(100 * ((bad_count + good_count) / total))
cache.set('station-{0}-rate'.format(self.id), rate, 60 * 60 * 6)
else:
rate = False
return rate
def __str__(self):
if self.pk:
return "%d - %s" % (self.pk, self.name)
return "%s" % (self.name)
def clean(self):
if re.search('[^\x20-\x7E\xA0-\xFF]', self.name, re.IGNORECASE):
raise ValidationError(
{
'name': (
'Please use characters that belong to ISO-8859-1'
' (https://en.wikipedia.org/wiki/ISO/IEC_8859-1).'
)
}
)
if re.search('[^\n\r\t\x20-\x7E\xA0-\xFF]', self.description, re.IGNORECASE):
raise ValidationError(
{
'description': (
'Please use characters that belong to ISO-8859-1'
' (https://en.wikipedia.org/wiki/ISO/IEC_8859-1).'
)
}
)
def update_status(self, created: bool = False):
"""
Update the status of the station
:param created: Whether the model is being created
"""
if not created:
current_status = self.status
if self.is_offline:
self.status = 0
elif self.testing:
self.status = 1
else:
self.status = 2
self.save()
if self.status != current_status:
StationStatusLog.objects.create(station=self, status=self.status)
else:
StationStatusLog.objects.create(station=self, status=self.status)
class AntennaType(models.Model):
"""Model for antenna types."""
name = models.CharField(max_length=25, unique=True)
def __str__(self):
return self.name
class Antenna(models.Model):
"""Model for antennas of SatNOGS ground stations."""
antenna_type = models.ForeignKey(
AntennaType, on_delete=models.PROTECT, related_name='antennas'
)
station = models.ForeignKey(Station, on_delete=models.CASCADE, related_name='antennas')
@property
def bands(self):
"""Return comma separated string of the bands that the antenna works on"""
bands = []
for frequency_range in self.frequency_ranges.all():
for band in bands_from_range(frequency_range.min_frequency,
frequency_range.max_frequency):
if band not in bands:
bands.append(band)
return ', '.join(bands)
def __str__(self):
if self.pk:
return "%d - %s (#%s)" % (self.pk, self.antenna_type.name, self.station.id)
if self.station.id:
return "%s (#%s)" % (self.antenna_type.name, self.station.id)
return "%s" % (self.antenna_type.name)
class FrequencyRange(models.Model):
"""Model for frequency ranges of antennas."""
antenna = models.ForeignKey(Antenna, on_delete=models.CASCADE, related_name='frequency_ranges')
min_frequency = models.BigIntegerField()
max_frequency = models.BigIntegerField()
@property
def bands(self):
"""Return comma separated string of the bands that of the frequeny range"""
bands = bands_from_range(self.min_frequency, self.max_frequency)
return ', '.join(bands)
class Meta:
ordering = ['min_frequency']
def clean(self):
if self.max_frequency < self.min_frequency:
raise ValidationError(
{
'min_frequency': (
'Minimum frequency is greater than the maximum one ({0} > {1}).'.format(
self.min_frequency, self.max_frequency
)
),
'max_frequency': (
'Maximum frequency is less than the minimum one ({0} < {1}).'.format(
self.max_frequency, self.min_frequency
)
),
}
)
if self.min_frequency < settings.MIN_FREQUENCY_FOR_RANGE:
raise ValidationError(
{
'min_frequency': ('Minimum frequency should be more than {0}.').format(
settings.MIN_FREQUENCY_FOR_RANGE
)
}
)
if self.max_frequency > settings.MAX_FREQUENCY_FOR_RANGE:
raise ValidationError(
{
'max_frequency': ('Maximum frequency should be less than {0}.').format(
settings.MAX_FREQUENCY_FOR_RANGE
)
}
)
class StationStatusLog(models.Model):
"""Model for keeping Status log for Station."""
station = models.ForeignKey(
Station, related_name='station_logs', on_delete=models.CASCADE, null=True, blank=True
)
status = models.IntegerField(choices=STATION_STATUSES, default=0)
changed = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ['-changed']
indexes = [models.Index(fields=['-changed'])]
def __str__(self):
return '{0} - {1}'.format(self.station, self.status)
class Satellite(models.Model):
"""Model for SatNOGS satellites."""
norad_cat_id = models.PositiveIntegerField(db_index=True)
name = models.CharField(max_length=45)
names = models.TextField(blank=True)
image = models.CharField(max_length=100, blank=True, null=True)
status = models.CharField(
choices=list(zip(SATELLITE_STATUS, SATELLITE_STATUS)), max_length=10, default='alive'
)
is_frequency_violator = models.BooleanField(default=False)
class Meta:
ordering = ['norad_cat_id']
def get_image(self):
"""Return the station image or the default if doesn't exist one"""
if self.image:
return self.image
return settings.SATELLITE_DEFAULT_IMAGE
def __str__(self):
return self.name
class Observation(models.Model):
"""Model for SatNOGS observations."""
satellite = models.ForeignKey(
Satellite, related_name='observations', on_delete=models.SET_NULL, null=True, blank=True
)
tle_line_0 = models.CharField(
max_length=69, blank=True, validators=[MinLengthValidator(1),
MaxLengthValidator(69)]
)
tle_line_1 = models.CharField(
max_length=69, blank=True, validators=[MinLengthValidator(69),
MaxLengthValidator(69)]
)
tle_line_2 = models.CharField(
max_length=69, blank=True, validators=[MinLengthValidator(69),
MaxLengthValidator(69)]
)
tle_source = models.CharField(max_length=300, blank=True)
tle_updated = models.DateTimeField(null=True, blank=True)
author = models.ForeignKey(
User, related_name='observations', on_delete=models.SET_NULL, null=True, blank=True
)
start = models.DateTimeField(db_index=True)
end = models.DateTimeField(db_index=True)
ground_station = models.ForeignKey(
Station, related_name='observations', on_delete=models.SET_NULL, null=True, blank=True
)
client_version = models.CharField(max_length=255, blank=True)
client_metadata = models.TextField(blank=True)
payload_old = models.FileField(upload_to=_name_obs_files, blank=True, null=True)
payload = models.FileField(
upload_to=_name_observation_data, storage=_select_audio_storage, blank=True
)
waterfall_old = models.ImageField(upload_to=_name_obs_files, blank=True, null=True)
waterfall = models.ImageField(
upload_to=_name_observation_data, storage=_select_waterfall_storage, blank=True
)
"""
Meaning of values:
True -> Waterfall has signal of the observed satellite (with-signal)
False -> Waterfall has not signal of the observed satellite (without-signal)
None -> Uknown whether waterfall has or hasn't signal of the observed satellite (unknown)
"""
waterfall_status = models.BooleanField(blank=True, null=True, default=None)
waterfall_status_datetime = models.DateTimeField(null=True, blank=True)
waterfall_status_user = models.ForeignKey(
User, related_name='waterfalls_vetted', on_delete=models.SET_NULL, null=True, blank=True
)
vetted_status = models.CharField(
choices=OBSERVATION_STATUSES, max_length=20, default='unknown'
)
"""
Meaning of values:
x < -100 -> Failed
-100 =< x < 0 -> Bad
0 =< x < 100 -> Unknown (Future if observation not completed)
100 =< x -> Good
"""
status = models.SmallIntegerField(default=0)
testing = models.BooleanField(default=False)
rise_azimuth = models.FloatField(blank=True, null=True)
max_altitude = models.FloatField(blank=True, null=True)
set_azimuth = models.FloatField(blank=True, null=True)
audio_zipped = models.BooleanField(default=False)
archived = models.BooleanField(default=False)
archive_identifier = models.CharField(max_length=255, blank=True)
archive_url = models.URLField(blank=True, null=True)
transmitter_uuid = ShortUUIDField(auto=False, db_index=True)
transmitter_description = models.TextField(default='')
transmitter_type = models.CharField(
choices=list(zip(TRANSMITTER_TYPE, TRANSMITTER_TYPE)),
max_length=11,
default='Transmitter'
)
transmitter_uplink_low = models.BigIntegerField(blank=True, null=True)
transmitter_uplink_high = models.BigIntegerField(blank=True, null=True)
transmitter_uplink_drift = models.IntegerField(blank=True, null=True)
transmitter_downlink_low = models.BigIntegerField(blank=True, null=True)
transmitter_downlink_high = models.BigIntegerField(blank=True, null=True)
transmitter_downlink_drift = models.IntegerField(blank=True, null=True)
transmitter_mode = models.CharField(max_length=25, blank=True, null=True)
transmitter_invert = models.BooleanField(default=False)
transmitter_baud = models.FloatField(validators=[MinValueValidator(0)], blank=True, null=True)
transmitter_created = models.DateTimeField(default=now)
transmitter_status = models.BooleanField(null=True, blank=True)
transmitter_unconfirmed = models.BooleanField(blank=True, null=True)
station_alt = models.PositiveIntegerField(null=True, blank=True)
station_lat = models.FloatField(
validators=[MaxValueValidator(90), MinValueValidator(-90)], null=True, blank=True
)
station_lng = models.FloatField(
validators=[MaxValueValidator(180), MinValueValidator(-180)], null=True, blank=True
)
station_antennas = models.TextField(null=True, blank=True)
center_frequency = models.BigIntegerField(blank=True, null=True)
objects = ObservationManager.as_manager()
@property
def is_past(self):
"""Return true if observation is in the past (end time is in the past)"""
return self.end < now()
@property
def is_future(self):
"""Return true if observation is in the future (end time is in the future)"""
return self.end > now()
@property
def is_started(self):
"""Return true if observation has started (start time is in the past)"""
return self.start < now()
# The values bellow are used as returned values in the API and for css rules in templates
@property
def status_badge(self):
"""Return badge for status field"""
if self.is_future:
return "future"
if self.status < -100:
return "failed"
if -100 <= self.status < 0:
return "bad"
if 0 <= self.status < 100:
return "unknown"
return "good"
# The values bellow are used as displayed values in templates
@property
def status_display(self):
"""Return display name for status field"""
if self.is_future:
return "Future"
if self.status < -100:
return "Failed"
if -100 <= self.status < 0:
return "Bad"
if 0 <= self.status < 100:
return "Unknown"
return "Good"
# The values bellow are used as returned values in the API and for css rules in templates
@property
def waterfall_status_badge(self):
"""Return badge for waterfall_status field"""
if self.waterfall_status is None:
return 'unknown'
if self.waterfall_status:
return 'with-signal'
return 'without-signal'
# The values bellow are used as displayed values in templates
@property
def waterfall_status_display(self):
"""Return display name for waterfall_status field"""
if self.waterfall_status is None:
return 'Unknown'
if self.waterfall_status:
return 'With Signal'
return 'Without Signal'
@property
def has_waterfall(self):
"""Run some checks on the waterfall for existence of data."""
if self.waterfall_old:
if (not self.waterfall_old.storage.exists(self.waterfall_old.name
)) or self.waterfall_old.size == 0:
return False
return True
if self.waterfall:
return True
return False
@property
def has_audio(self):
"""Run some checks on the payload for existence of data."""
if self.archive_url:
return True
if self.payload_old:
if (not self.payload_old.storage.exists(self.payload_old.name
)) or self.payload_old.size == 0:
return False
return True
if self.payload:
return True
return False
@property
def has_demoddata(self):
"""Check if the observation has Demod Data."""
if self.demoddata.exists():
return True
return False
@property
def has_artifact(self):
"""Check if the observation has an associated artifact in satnogs-db."""
try:
artifact_metadata = get_artifact_metadata_by_observation_id(self.id)
except DBConnectionError:
return False
return truth(artifact_metadata)
@property
def artifact_url(self):
"""Return url for the oberations artifact file (if it exists)"""
try:
artifact_metadata = get_artifact_metadata_by_observation_id(self.id)
except DBConnectionError:
return ''
if not artifact_metadata:
return ''
return artifact_metadata[0]['artifact_file']
@property
def audio_url(self):
"""Return url for observation's audio file"""
if self.has_audio:
if self.archive_url:
return self.archive_url
if self.payload_old:
return self.payload_old.url
return self.payload.url
return ''
class Meta:
ordering = ['-start', '-end']
indexes = [models.Index(fields=['-start', '-end'])]
def __str__(self):
return str(self.id)
def get_absolute_url(self):
"""Return absolute url of the model object"""
return reverse('base:observation_view', kwargs={'observation_id': self.id})
@receiver(models.signals.post_delete, sender=Observation)
def observation_remove_files(sender, instance, **kwargs): # pylint: disable=W0613
"""Remove audio and waterfall files of an observation if the observation is deleted"""
if instance.payload_old:
instance.payload_old.delete(save=False)
if instance.waterfall_old:
instance.waterfall_old.delete(save=False)
if instance.payload:
instance.payload.delete(save=False)
if instance.waterfall:
instance.waterfall.delete(save=False)
class DemodData(models.Model):
"""Model for DemodData."""
observation = models.ForeignKey(
Observation, related_name='demoddata', on_delete=models.CASCADE
)
payload_demod = models.FileField(upload_to=_name_obs_demoddata, blank=True)
demodulated_data = models.FileField(
upload_to=_name_observation_demoddata, storage=_select_data_storage, blank=True
)
copied_to_db = models.BooleanField(default=False)
is_image = models.BooleanField(default=False)
class Meta:
indexes = [models.Index(fields=["copied_to_db", "is_image"])]
def display_payload_hex(self):
"""
Return the content of the data file as hex dump of the following form: `DE AD C0 DE`.
"""
if self.payload_demod:
with self.payload_demod.storage.open(self.payload_demod.name, mode='rb') as data_file:
payload = data_file.read()
else:
with self.demodulated_data.storage.open(self.demodulated_data.name,
mode='rb') as data_file:
payload = data_file.read()
return _decode_pretty_hex(payload)
def display_payload_utf8(self):
"""
Return the content of the data file decoded as UTF-8. If this fails,
show as hex dump.
"""
if self.payload_demod:
with self.payload_demod.storage.open(self.payload_demod.name, mode='rb') as data_file:
payload = data_file.read()
else:
with self.demodulated_data.storage.open(self.demodulated_data.name,
mode='rb') as data_file:
payload = data_file.read()
try:
return payload.decode('utf-8')
except UnicodeDecodeError:
return _decode_pretty_hex(payload)
def __str__(self):
if self.payload_demod:
return '{} - {}'.format(self.id, self.payload_demod)
return '{} - {}'.format(self.id, self.demodulated_data)
@receiver(models.signals.post_delete, sender=DemodData)
def demoddata_remove_files(sender, instance, **kwargs): # pylint: disable=W0613
"""Remove data file of an observation if the observation is deleted"""
if instance.payload_demod:
instance.payload_demod.delete(save=False)
if instance.demodulated_data:
instance.demodulated_data.delete(save=False)
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/models.py
| 0.598782 | 0.150434 |
models.py
|
pypi
|
from django.db import migrations
from django.conf import settings
from django.core.files.base import ContentFile
def forwards_func(apps, schema_editor):
if not settings.IGNORE_MIGRATION:
Observation = apps.get_model("base", "Observation")
DemodData = apps.get_model("base", "DemodData")
observations = Observation.objects.filter(payload='').exclude(payload_old='')
for observation in observations:
if observation.payload_old.storage.exists(observation.payload_old.name):
audio_file = ContentFile(observation.payload_old.read())
audio_file.name = observation.payload_old.name.split('/')[-1]
observation.payload = audio_file
observation.save()
if observation.payload.storage.exists(observation.payload.name):
observation.payload_old.delete()
observations = Observation.objects.filter(waterfall='').exclude(waterfall_old='')
for observation in observations:
if observation.waterfall_old.storage.exists(observation.waterfall_old.name):
waterfall_file = ContentFile(observation.waterfall_old.read())
waterfall_file.name = observation.waterfall_old.name.split('/')[-1]
observation.waterfall = waterfall_file
observation.save()
if observation.waterfall.storage.exists(observation.waterfall.name):
observation.waterfall_old.delete()
demoddata = DemodData.objects.filter(demodulated_data='').exclude(payload_demod='')
for datum in demoddata:
if datum.payload_demod.storage.exists(datum.payload_demod.name):
data_file = ContentFile(datum.payload_demod.read())
data_file.name = datum.payload_demod.name.split('/')[-1]
datum.demodulated_data = data_file
datum.save()
if datum.demodulated_data.storage.exists(datum.demodulated_data.name):
datum.payload_demod.delete()
def reverse_func(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('base', '0093_check_old_demoddata_if_are_images'),
]
operations = [
migrations.RunPython(forwards_func, reverse_func),
]
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/migrations/0094_migrate_artifact_files_from_old_to_new_fields.py
| 0.578805 | 0.194521 |
0094_migrate_artifact_files_from_old_to_new_fields.py
|
pypi
|
from django.db import migrations, models
import django.db.models.deletion
def initialize_antenna_types(apps, schema_editor):
AntennaType = apps.get_model("base", "AntennaType")
AntennaType.objects.bulk_create(
[
AntennaType(name="Dipole"),
AntennaType(name="V-Dipole"),
AntennaType(name="Discone"),
AntennaType(name="Ground Plane"),
AntennaType(name="Yagi"),
AntennaType(name="Cross Yagi"),
AntennaType(name="Helical"),
AntennaType(name="Parabolic"),
AntennaType(name="Vertical"),
AntennaType(name="Turnstile"),
AntennaType(name="Quadrafilar"),
AntennaType(name="Eggbeater"),
AntennaType(name="Lindenblad"),
AntennaType(name="Parasitic Lindenblad"),
AntennaType(name="Patch"),
AntennaType(name="Other Directional"),
AntennaType(name="Other Omni-Directional"),
]
)
def revert_initialize_antenna_types(apps, schema_editor):
AntennaType = apps.get_model("base", "AntennaType")
AntennaType.objects.filter(
name__in=[
"Dipole", "V-Dipole", "Discone", "Ground Plane", "Yagi", "Cross Yagi", "Helical",
"Parabolic", "Vertical", "Turnstile", "Quadrafilar", "Eggbeater", "Lindenblad",
"Parasitic Lindenblad", "Patch", "Other Directional", "Other Omni-Directional"
]
).delete()
class Migration(migrations.Migration):
dependencies = [
('base', '0064_increase_mode_name_char_limit'),
]
operations = [
migrations.CreateModel(
name='AntennaType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=25, unique=True)),
],
),
migrations.RunPython(initialize_antenna_types, revert_initialize_antenna_types),
migrations.CreateModel(
name='FrequencyRange',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('min_frequency', models.PositiveIntegerField()),
('max_frequency', models.PositiveIntegerField()),
],
),
migrations.CreateModel(
name='StationAntenna',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('antenna_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='station_antennas', to='base.AntennaType')),
('station', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='antennas', to='base.Station')),
],
),
migrations.AddField(
model_name='frequencyrange',
name='antenna',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='frequency_ranges', to='base.StationAntenna'),
),
]
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/migrations/0065_new_antenna_schema.py
| 0.623148 | 0.21626 |
0065_new_antenna_schema.py
|
pypi
|
import django.core.validators
from django.db import migrations, models
def from_tle_to_observation(apps, schema_editor):
Observation = apps.get_model('base', 'Observation')
observations = Observation.objects.filter(tle__isnull=False)
for observation in observations:
observation.tle_line_0 = observation.tle.tle0
observation.tle_line_1 = observation.tle.tle1
observation.tle_line_2 = observation.tle.tle2
observation.tle_source = observation.tle.tle_source
observation.tle_updated = observation.tle.updated
observation.save()
def reverse_from_tle_to_observation(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('base', '0078_fix_waterfall_status'),
]
operations = [
migrations.AddField(
model_name='observation',
name='tle_line_0',
field=models.CharField(blank=True, max_length=69, validators=[django.core.validators.MinLengthValidator(1), django.core.validators.MaxLengthValidator(69)]),
),
migrations.AddField(
model_name='observation',
name='tle_line_1',
field=models.CharField(blank=True, max_length=69, validators=[django.core.validators.MinLengthValidator(69), django.core.validators.MaxLengthValidator(69)]),
),
migrations.AddField(
model_name='observation',
name='tle_line_2',
field=models.CharField(blank=True, max_length=69, validators=[django.core.validators.MinLengthValidator(69), django.core.validators.MaxLengthValidator(69)]),
),
migrations.AddField(
model_name='observation',
name='tle_source',
field=models.CharField(blank=True, max_length=300),
),
migrations.AddField(
model_name='observation',
name='tle_updated',
field=models.DateTimeField(blank=True, null=True),
),
migrations.RunPython(from_tle_to_observation, reverse_from_tle_to_observation),
]
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/migrations/0079_add_tle_field_in_observation_model.py
| 0.671686 | 0.296948 |
0079_add_tle_field_in_observation_model.py
|
pypi
|
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import shortuuidfield.fields
def from_transmitter_to_observation(apps, schema_editor):
Observation = apps.get_model('base', 'Observation')
observations = Observation.objects.all()
for observation in observations:
observation.transmitter_uuid = observation.transmitter.uuid
observation.transmitter_description = observation.transmitter.description
observation.transmitter_type = observation.transmitter.type
observation.transmitter_uplink_low = observation.transmitter.uplink_low
observation.transmitter_uplink_high = observation.transmitter.uplink_high
observation.transmitter_uplink_drift = observation.transmitter.uplink_drift
observation.transmitter_downlink_low = observation.transmitter.downlink_low
observation.transmitter_downlink_high = observation.transmitter.downlink_high
observation.transmitter_downlink_drift = observation.transmitter.downlink_drift
observation.transmitter_mode = observation.transmitter.mode
observation.transmitter_invert = observation.transmitter.invert
observation.transmitter_baud = observation.transmitter.baud
observation.save()
def reverse_from_transmitter_to_observation(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('base', '0057_no_null_demoddata_observation_field'),
]
operations = [
migrations.AddField(
model_name='observation',
name='transmitter_baud',
field=models.FloatField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)]),
),
migrations.AddField(
model_name='observation',
name='transmitter_created',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AddField(
model_name='observation',
name='transmitter_description',
field=models.TextField(default=''),
),
migrations.AddField(
model_name='observation',
name='transmitter_downlink_drift',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='observation',
name='transmitter_downlink_high',
field=models.BigIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='observation',
name='transmitter_downlink_low',
field=models.BigIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='observation',
name='transmitter_invert',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='observation',
name='transmitter_mode',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='observations', to='base.Mode'),
),
migrations.AddField(
model_name='observation',
name='transmitter_type',
field=models.CharField(choices=[('Transmitter', 'Transmitter'), ('Transceiver', 'Transceiver'), ('Transponder', 'Transponder')], default='Transmitter', max_length=11),
),
migrations.AddField(
model_name='observation',
name='transmitter_uplink_drift',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='observation',
name='transmitter_uplink_high',
field=models.BigIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='observation',
name='transmitter_uplink_low',
field=models.BigIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='observation',
name='transmitter_uuid',
field=shortuuidfield.fields.ShortUUIDField(blank=True, db_index=True, editable=False, max_length=22),
),
migrations.RunPython(from_transmitter_to_observation, reverse_from_transmitter_to_observation),
migrations.RemoveField(
model_name='observation',
name='transmitter',
),
migrations.RemoveField(
model_name='transmitter',
name='alive',
),
migrations.RemoveField(
model_name='transmitter',
name='baud',
),
migrations.RemoveField(
model_name='transmitter',
name='description',
),
migrations.RemoveField(
model_name='transmitter',
name='downlink_drift',
),
migrations.RemoveField(
model_name='transmitter',
name='downlink_high',
),
migrations.RemoveField(
model_name='transmitter',
name='downlink_low',
),
migrations.RemoveField(
model_name='transmitter',
name='invert',
),
migrations.RemoveField(
model_name='transmitter',
name='mode',
),
migrations.RemoveField(
model_name='transmitter',
name='satellite',
),
migrations.RemoveField(
model_name='transmitter',
name='type',
),
migrations.RemoveField(
model_name='transmitter',
name='uplink_drift',
),
migrations.RemoveField(
model_name='transmitter',
name='uplink_high',
),
migrations.RemoveField(
model_name='transmitter',
name='uplink_low',
),
]
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/migrations/0058_add_transmitter_into_observation_model.py
| 0.657209 | 0.358606 |
0058_add_transmitter_into_observation_model.py
|
pypi
|
from django.db import models, migrations
import shortuuidfield.fields
import django.db.models.deletion
from django.conf import settings
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Antenna',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('frequency', models.FloatField(validators=[django.core.validators.MinValueValidator(0)])),
('band', models.CharField(max_length=5, choices=[('HF', 'HF'), ('VHF', 'VHF'), ('UHF', 'UHF'), ('L', 'L'), ('S', 'S'), ('C', 'C'), ('X', 'X'), ('KU', 'KU')])),
('antenna_type', models.CharField(max_length=15, choices=[('dipole', 'Dipole'), ('yagi', 'Yagi'), ('helical', 'Helical'), ('parabolic', 'Parabolic')])),
],
),
migrations.CreateModel(
name='Data',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('start', models.DateTimeField()),
('end', models.DateTimeField()),
('payload', models.FileField(null=True, upload_to='data_payloads', blank=True)),
],
options={
'ordering': ['-start', '-end'],
},
),
migrations.CreateModel(
name='Mode',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=10)),
],
),
migrations.CreateModel(
name='Observation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('start', models.DateTimeField()),
('end', models.DateTimeField()),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-start', '-end'],
},
),
migrations.CreateModel(
name='Satellite',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('norad_cat_id', models.PositiveIntegerField()),
('name', models.CharField(max_length=45)),
('names', models.TextField(blank=True)),
('image', models.ImageField(upload_to='satellites', blank=True)),
('tle0', models.CharField(max_length=100, blank=True)),
('tle1', models.CharField(max_length=200, blank=True)),
('tle2', models.CharField(max_length=200, blank=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['norad_cat_id'],
},
),
migrations.CreateModel(
name='Station',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=45)),
('image', models.ImageField(upload_to='ground_stations', blank=True)),
('alt', models.PositiveIntegerField(help_text='In meters above ground')),
('lat', models.FloatField(validators=[django.core.validators.MaxValueValidator(90), django.core.validators.MinValueValidator(-90)])),
('lng', models.FloatField(validators=[django.core.validators.MaxValueValidator(180), django.core.validators.MinValueValidator(-180)])),
('qthlocator', models.CharField(max_length=255, blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('featured_date', models.DateField(null=True, blank=True)),
('created', models.DateTimeField(auto_now_add=True)),
('active', models.BooleanField(default=False)),
('last_seen', models.DateTimeField(null=True, blank=True)),
('antenna', models.ManyToManyField(help_text='If you want to add a new Antenna contact SatNOGS Team', to='base.Antenna', blank=True)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-active', '-last_seen'],
},
),
migrations.CreateModel(
name='Transmitter',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('uuid', shortuuidfield.fields.ShortUUIDField(db_index=True, max_length=22, editable=False, blank=True)),
('description', models.TextField()),
('alive', models.BooleanField(default=True)),
('uplink_low', models.PositiveIntegerField(null=True, blank=True)),
('uplink_high', models.PositiveIntegerField(null=True, blank=True)),
('downlink_low', models.PositiveIntegerField(null=True, blank=True)),
('downlink_high', models.PositiveIntegerField(null=True, blank=True)),
('invert', models.BooleanField(default=False)),
('baud', models.FloatField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0)])),
('mode', models.ForeignKey(related_name='transmitters', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='base.Mode', null=True)),
('satellite', models.ForeignKey(related_name='transmitters', on_delete=django.db.models.deletion.CASCADE, to='base.Satellite', null=True)),
],
),
migrations.AddField(
model_name='observation',
name='satellite',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.Satellite'),
),
migrations.AddField(
model_name='observation',
name='transmitter',
field=models.ForeignKey(related_name='observations', on_delete=django.db.models.deletion.CASCADE, to='base.Transmitter', null=True),
),
migrations.AddField(
model_name='data',
name='ground_station',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.Station'),
),
migrations.AddField(
model_name='data',
name='observation',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.Observation'),
),
]
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/migrations/0001_initial.py
| 0.562177 | 0.179315 |
0001_initial.py
|
pypi
|
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import get_object_or_404, redirect, render
from django.urls import reverse
from django.utils.timezone import now
from django.views.generic import ListView
from network.base.db_api import DBConnectionError, get_transmitters_by_norad_id
from network.base.decorators import ajax_required
from network.base.models import Observation, Satellite, Station
from network.base.perms import delete_perms, schedule_perms, vet_perms
from network.base.rating_tasks import rate_observation
from network.base.stats import satellite_stats_by_transmitter_list, transmitters_with_stats
from network.base.utils import community_get_discussion_details
from network.users.models import User
class ObservationListView(ListView): # pylint: disable=R0901
"""
Displays a list of observations with pagination
"""
model = Observation
context_object_name = "observations"
paginate_by = settings.ITEMS_PER_PAGE
template_name = 'base/observations.html'
str_filters = ['norad', 'observer', 'station', 'start', 'end', 'transmitter_mode']
flag_filters = ['bad', 'good', 'unknown', 'future', 'failed']
filtered = None
def get_filter_params(self):
"""
Get the parsed filter parameters from the HTTP GET parameters
- str_filters vaues are str, default to ''
- flag_filters values are Boolean, default to False
Returns a dict, filter_name is the key, the parsed parameter is the value.
"""
filter_params = {}
for parameter_name in self.str_filters:
filter_params[parameter_name] = self.request.GET.get(parameter_name, '')
for parameter_name in self.flag_filters:
param = self.request.GET.get(parameter_name, 1)
filter_params[parameter_name] = (param != '0')
return filter_params
def get_queryset(self):
"""
Optionally filter based on norad get argument
Optionally filter based on future/good/bad/unknown/failed
"""
filter_params = self.get_filter_params()
results = self.request.GET.getlist('results')
rated = self.request.GET.getlist('rated')
observations = Observation.objects.prefetch_related(
'satellite', 'demoddata', 'author', 'ground_station'
)
# Mapping between the HTTP POST parameters and the fiter keys
parameter_filter_mapping = {
'norad': 'satellite__norad_cat_id',
'observer': 'author',
'station': 'ground_station_id',
'start': 'start__gt',
'end': 'end__lt',
'transmitter_mode': 'transmitter_mode__icontains',
}
# Create observations filter based on the received HTTP POST parameters
filter_dict = {}
for parameter_key, filter_key in parameter_filter_mapping.items():
if filter_params[parameter_key] == '':
continue
filter_dict[filter_key] = filter_params[parameter_key]
self.filtered = (
(
not all(
[
filter_params['bad'], filter_params['good'], filter_params['unknown'],
filter_params['future'], filter_params['failed']
]
)
) or results or rated or filter_dict
)
observations = observations.filter(**filter_dict)
if not filter_params['failed']:
observations = observations.exclude(status__lt=-100)
if not filter_params['bad']:
observations = observations.exclude(status__range=(-100, -1))
if not filter_params['unknown']:
observations = observations.exclude(status__range=(0, 99), end__lte=now())
if not filter_params['future']:
observations = observations.exclude(end__gt=now())
if not filter_params['good']:
observations = observations.exclude(status__gte=100)
if results:
if 'w0' in results:
observations = observations.filter(waterfall_old='', waterfall='')
elif 'w1' in results:
observations = observations.exclude(waterfall_old='', waterfall='')
if 'a0' in results:
observations = observations.filter(archived=False, payload_old='', payload='')
elif 'a1' in results:
observations = observations.exclude(archived=False, payload_old='', payload='')
if 'd0' in results:
observations = observations.filter(
demoddata__payload_demod__isnull=True,
demoddata__demodulated_data__isnull=True
)
elif 'd1' in results:
observations = observations.exclude(
demoddata__payload_demod__isnull=True,
demoddata__demodulated_data__isnull=True
)
if 'i1' in results:
observations = observations.filter(demoddata__is_image=True)
if rated:
if 'rwu' in rated:
observations = observations.filter(waterfall_status__isnull=True).exclude(
waterfall_old='', waterfall=''
)
elif 'rw1' in rated:
observations = observations.filter(waterfall_status=True)
elif 'rw0' in rated:
observations = observations.filter(waterfall_status=False)
return observations
def get_context_data(self, **kwargs): # pylint: disable=W0221
"""
Need to add a list of satellites to the context for the template
"""
context = super().get_context_data(**kwargs)
context['satellites'] = Satellite.objects.all()
context['authors'] = User.objects.all().order_by('first_name', 'last_name', 'username')
context['stations'] = Station.objects.all().order_by('id')
norad_cat_id = self.request.GET.get('norad', None)
observer = self.request.GET.get('observer', None)
station = self.request.GET.get('station', None)
start = self.request.GET.get('start', None)
end = self.request.GET.get('end', None)
context['future'] = self.request.GET.get('future', '1')
context['bad'] = self.request.GET.get('bad', '1')
context['good'] = self.request.GET.get('good', '1')
context['unknown'] = self.request.GET.get('unknown', '1')
context['failed'] = self.request.GET.get('failed', '1')
context['results'] = self.request.GET.getlist('results')
context['rated'] = self.request.GET.getlist('rated')
context['transmitter_mode'] = self.request.GET.get('transmitter_mode', None)
context['filtered'] = bool(self.filtered)
if norad_cat_id is not None and norad_cat_id != '':
context['norad'] = int(norad_cat_id)
if observer is not None and observer != '':
context['observer_id'] = int(observer)
if station is not None and station != '':
context['station_id'] = int(station)
if start is not None and start != '':
context['start'] = start
if end is not None and end != '':
context['end'] = end
if 'scheduled' in self.request.session:
context['scheduled'] = self.request.session['scheduled']
try:
del self.request.session['scheduled']
except KeyError:
pass
context['can_schedule'] = schedule_perms(self.request.user)
return context
def observation_view(request, observation_id):
"""View for single observation page."""
observation = get_object_or_404(Observation, id=observation_id)
can_vet = vet_perms(request.user, observation)
can_delete = delete_perms(request.user, observation)
if observation.has_audio and not observation.audio_url:
messages.error(
request, 'Audio file is not currently available,'
' if the problem persists please contact an administrator.'
)
has_comments = False
discuss_url = ''
discuss_slug = ''
if settings.ENVIRONMENT == 'production':
discussion_details = community_get_discussion_details(
observation.id, observation.satellite.name, observation.satellite.norad_cat_id,
'https:%2F%2F{}{}'.format(request.get_host(), request.path)
)
has_comments = discussion_details['has_comments']
discuss_url = discussion_details['url']
discuss_slug = discussion_details['slug']
has_demoddata = observation.demoddata.all().exists()
demoddata = observation.demoddata.all()
demoddata_count = len(demoddata)
demoddata_details = []
show_hex_to_ascii_button = False
if has_demoddata:
if observation.transmitter_mode == 'CW':
content_type = 'text'
else:
content_type = 'binary'
for datum in demoddata:
if datum.is_image:
if datum.payload_demod:
demoddata_details.append(
{
'url': datum.payload_demod.url,
'name': datum.payload_demod.name,
'type': 'image'
}
)
else:
demoddata_details.append(
{
'url': datum.demodulated_data.url,
'name': datum.demodulated_data.name,
'type': 'image'
}
)
else:
show_hex_to_ascii_button = True
if datum.payload_demod:
demoddata_details.append(
{
'url': datum.payload_demod.url,
'name': datum.payload_demod.name,
'type': content_type
}
)
else:
demoddata_details.append(
{
'url': datum.demodulated_data.url,
'name': datum.demodulated_data.name,
'type': content_type
}
)
demoddata_details = sorted(demoddata_details, key=lambda d: d['name'])
return render(
request, 'base/observation_view.html', {
'observation': observation,
'demoddata_count': demoddata_count,
'demoddata_details': demoddata_details,
'show_hex_to_ascii_button': show_hex_to_ascii_button,
'can_vet': can_vet,
'can_delete': can_delete,
'has_comments': has_comments,
'discuss_url': discuss_url,
'discuss_slug': discuss_slug
}
)
@login_required
def observation_delete(request, observation_id):
"""View for deleting observation."""
observation = get_object_or_404(Observation, id=observation_id)
can_delete = delete_perms(request.user, observation)
if can_delete:
observation.delete()
messages.success(request, 'Observation deleted successfully.')
else:
messages.error(request, 'Permission denied.')
return redirect(reverse('base:observations_list'))
@login_required
@ajax_required
def waterfall_vet(request, observation_id):
"""Handles request for vetting a waterfall"""
try:
observation = Observation.objects.get(id=observation_id)
except Observation.DoesNotExist:
data = {'error': 'Observation does not exist.'}
return JsonResponse(data, safe=False)
status = request.POST.get('status', None)
can_vet = vet_perms(request.user, observation)
if not can_vet:
data = {'error': 'Permission denied.'}
return JsonResponse(data, safe=False)
if not observation.has_waterfall:
data = {'error': 'Observation without waterfall.'}
return JsonResponse(data, safe=False)
if status not in ['with-signal', 'without-signal', 'unknown']:
data = {
'error': 'Invalid status, select one of \'with-signal\', \'without-signal\' and '
'\'unknown\'.'
}
return JsonResponse(data, safe=False)
if status == 'with-signal':
observation.waterfall_status = True
elif status == 'without-signal':
observation.waterfall_status = False
elif status == 'unknown':
observation.waterfall_status = None
observation.waterfall_status_user = request.user
observation.waterfall_status_datetime = now()
observation.save(
update_fields=['waterfall_status', 'waterfall_status_user', 'waterfall_status_datetime']
)
(observation_status, observation_status_badge, observation_status_display
) = rate_observation(observation.id, 'set_waterfall_status', observation.waterfall_status)
data = {
'waterfall_status_user': observation.waterfall_status_user.displayname,
'waterfall_status_datetime': observation.waterfall_status_datetime.
strftime('%Y-%m-%d %H:%M:%S'),
'waterfall_status': observation.waterfall_status,
'waterfall_status_badge': observation.waterfall_status_badge,
'waterfall_status_display': observation.waterfall_status_display,
'status': observation_status,
'status_badge': observation_status_badge,
'status_display': observation_status_display,
}
return JsonResponse(data, safe=False)
def satellite_view(request, norad_id):
"""Returns a satellite JSON object with information and statistics"""
try:
sat = Satellite.objects.get(norad_cat_id=norad_id)
except Satellite.DoesNotExist:
data = {'error': 'Unable to find that satellite.'}
return JsonResponse(data, safe=False)
try:
transmitters = get_transmitters_by_norad_id(norad_id=norad_id)
except DBConnectionError as error:
data = [{'error': str(error)}]
return JsonResponse(data, safe=False)
satellite_stats = satellite_stats_by_transmitter_list(transmitters)
data = {
'id': norad_id,
'name': sat.name,
'names': sat.names,
'image': sat.image,
'success_rate': satellite_stats['success_rate'],
'good_count': satellite_stats['good_count'],
'bad_count': satellite_stats['bad_count'],
'unknown_count': satellite_stats['unknown_count'],
'future_count': satellite_stats['future_count'],
'total_count': satellite_stats['total_count'],
'transmitters': transmitters_with_stats(transmitters)
}
return JsonResponse(data, safe=False)
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/base/views/observation.py
| 0.725843 | 0.190235 |
observation.py
|
pypi
|
/**
* Returns a polar plot of a pass at the given groundstation as SVG in a string.
*
* @param {timeframe} Timeframe of the oberservation.
* @param {groundstation} The observing groundstation.
* @param {tleLine1} TLE line 1 of the observed satellite.
* @param {tleLine2} TLE line 2 of the observed satellite.
*/
function calcPolarPlotSVG(timeframe, groundstation, tleLine1, tleLine2) {
'use strict';
const pi = Math.PI;
const deg2rad = pi / 180.0;
const rad2deg = 180 / pi;
// Get the observer at lat/lon in RADIANS, altitude in km above ground (NOTE: BUG, should be elevation?)
var observerGd = {
longitude: groundstation.lon * deg2rad,
latitude: groundstation.lat * deg2rad,
height: groundstation.alt / 1000
};
var polarGetXY = function(az, el) {
var ret = new Object();
ret.x = (90 - el) * Math.sin(az * deg2rad);
ret.y = (el - 90) * Math.cos(az * deg2rad);
return ret;
};
var svg_namespace = 'http://www.w3.org/2000/svg';
var polarOrbit = document.createElementNS(svg_namespace, 'path');
polarOrbit.setAttributeNS(null, 'fill', 'none');
polarOrbit.setAttributeNS(null, 'stroke', 'blue');
polarOrbit.setAttributeNS(null, 'stroke-opacity', '1.0');
polarOrbit.setAttributeNS(null, 'stroke-width', '3');
// Initialize the satellite record
var satrec = satellite.twoline2satrec(tleLine1, tleLine2);
function polarGetAzEl(t) {
var positionAndVelocity = satellite.propagate(satrec, t.toDate());
var gmst = satellite.gstime(t.toDate());
var positionEci = positionAndVelocity.position;
var positionEcf = satellite.eciToEcf(positionEci, gmst);
var lookAngles = satellite.ecfToLookAngles(observerGd, positionEcf);
return {'azimuth': lookAngles.azimuth * rad2deg,
'elevation': lookAngles.elevation * rad2deg};
}
// Draw the orbit pass on the polar az/el plot
var g = '';
for (var t = moment(timeframe.start); t < moment(timeframe.end); t.add(20, 's')) {
var sky_position = polarGetAzEl(t);
var coord = polarGetXY(sky_position.azimuth, sky_position.elevation);
if (g == '') {
// Start of line
g += 'M';
} else {
// Continue line
g += ' L';
}
g += coord.x + ' ' + coord.y;
}
polarOrbit.setAttribute('d', g);
// Draw observation start
var point_start = document.createElementNS(svg_namespace, 'circle');
point_start.setAttributeNS(null, 'fill', 'lightgreen');
point_start.setAttributeNS(null, 'stroke', 'black');
point_start.setAttributeNS(null, 'stroke-width', '1');
var sky_position_rise = polarGetAzEl(moment(timeframe.start));
var coord_rise = polarGetXY(sky_position_rise.azimuth, sky_position_rise.elevation);
point_start.setAttribute('cx', coord_rise.x);
point_start.setAttribute('cy', coord_rise.y);
point_start.setAttribute('r', 7);
// Draw oberservation end
var point_end = document.createElementNS(svg_namespace, 'circle');
point_end.setAttributeNS(null, 'fill', 'red');
point_end.setAttributeNS(null, 'stroke', 'black');
point_end.setAttributeNS(null, 'stroke-width', '1');
var sky_position_set = polarGetAzEl(moment(timeframe.end));
var coord_set = polarGetXY(sky_position_set.azimuth, sky_position_set.elevation);
point_end.setAttribute('cx', coord_set.x);
point_end.setAttribute('cy', coord_set.y);
point_end.setAttribute('r', 7);
return [polarOrbit, point_start, point_end];
}
|
/satnogs-network-1.104.tar.gz/satnogs-network-1.104/network/static/js/polar_svg.js
| 0.886911 | 0.587352 |
polar_svg.js
|
pypi
|
from os.path import exists
import hashlib
import json
import dask
from dask.diagnostics import ProgressBar
import os
from bs4 import BeautifulSoup as bs
import html5lib
import satnogs_webscraper.constants as cnst
import satnogs_webscraper.image_utils as iu
import satnogs_webscraper.request_utils as ru
import satnogs_webscraper.progress_utils as pu
class ObservationScraper:
def __init__(self, fetch_waterfalls=True, fetch_logging=True, prints=True, check_disk=True, cpus=1,
grey_scale=True):
"""
Scrapes the webpages for satellite observations. Waterfall fetches are set to false by default due to the
very large file sizes.
:param fetch_waterfalls: Boolean on whether to pull the waterfalls from the observations
:param fetch_logging: Boolean for logging the fetches
:param prints: Boolean for printing output in operation.
"""
self.progress_dict = None
self.observations_list = []
self.fetch_waterfalls = fetch_waterfalls
self.fetch_logging = fetch_logging
self.json_file_loc = cnst.files["observation_json"]
self.observation_save_dir = cnst.directories['observations']
self.log_file_loc = cnst.files["log_file"]
self.waterfall_path = cnst.directories['waterfalls']
self.demod_path = cnst.directories['demods']
self.prints = prints
self.check_disk = check_disk
cnst.verify_directories()
self.cpus = cpus
self.grey_scale = grey_scale
def multiprocess_scrape_observations(self, observations_list):
"""
Functions similar to scrape_observations, but does multiple simultaneously
:param observations_list: The list of observations to scrape
:return: None. Updates the instantiated object's observations_list
"""
urls = [f'{cnst.web_address}{cnst.observations}{observation}/' for observation in observations_list]
#self.progress_dict = pu.setup_progress_dict(items_total=len(urls), items_done=0)
# pool = Pool(self.cpus)
# self.observations_list = pool.map(self.scrape_observation, urls)
tasks = [dask.delayed(self.scrape_observation)(url) for url in urls]
with ProgressBar():
dask.compute(*[tasks])
return self.observations_list
def scrape_observation(self, url):
"""
Scrapes a webpage for an observation
:param url: The url to the website to scrape
:return: A dictionary of the scraped webpage
"""
observation = url.split("/")[-2]
file_name = os.path.join(cnst.directories['observations'], f"{observation}.json")
if (not self.check_disk) and (os.path.isfile(file_name)):
os.remove(file_name)
if not os.path.isfile(file_name): # make sure the observation has not already been downloaded
template = cnst.observation_template.copy()
r = ru.get_request(url)
if r is None:
# TODO: Make a null template for easy filtering after scraping
return template
observation_web_page = bs(r.content, "html5lib")
table_rows = observation_web_page.find_all("tr")
for tr in table_rows:
key, value = self.scrape_tr(tr)
if key is not None:
template[key] = value
waterfall_status = observation_web_page.find(id="waterfall-status-badge")
if waterfall_status is not None:
template['Waterfall_Status'] = waterfall_status.text.strip()
status = observation_web_page.select("#rating-status > span")
if (status is not None) & (status[0] is not None):
template['Status'] = status[0].text.strip()
template['Status_Message'] = status[0].attrs['title'].strip()
template['Observation_id'] = observation
template['demods'] = []
for data_a in observation_web_page.find_all("a", class_='data-link'):
template['demods'].append(self.fetch_demod(data_a))
with open(os.path.join(cnst.directories['observations'], f"{observation}.json"), 'w') as obs_out:
json.dump(template, obs_out)
return template
else:
with open(file_name, 'r') as file_in:
return json.load(file_in)
def scrape_tr(self, tr):
"""
SATNOGS was updated to use tables instead of Divs. This function is very similar to scrape_div
with the exception
:param div: HTML Table Row (TR)
:return: Key, Value pair
"""
first_child = tr.select_one('td:nth-child(1)')
if first_child is not None:
contents = str(first_child.contents)
else:
return None, None
if contents.find("Satellite") != -1:
try:
second_element = tr.select_one('td:nth-child(2)')
second_element = second_element.find("a")
return "Satellite", second_element.text.strip()
except:
return "Satellite", ""
if contents.find("Station") != -1:
try:
second_element = tr.select_one('td:nth-child(2)')
second_element = second_element.find("a")
return "Station", second_element.text.strip()
except:
return "Station", ""
if contents.find("Transmitter") != -1:
try:
second_element = tr.select_one('td:nth-child(2)')
return "Transmitter", second_element.text.strip()
except:
return "Transmitter", ""
if contents.find("Frequency") != -1:
try:
second_element = tr.select_one('td:nth-child(2)')
element = second_element.find('span')
return "Frequency", element.attrs['title'].strip()
except:
return "Frequency", ""
if contents.find("Mode") != -1:
try:
second_element = tr.select_one('td:nth-child(2)')
return "Mode", [span.text.strip() for span in second_element.select("span") if span is not None]
except:
return "Mode", ""
if contents.find("Metadata") != -1:
try:
second_element = tr.select_one('td:nth-child(2)')
element = second_element.find("pre")
return "Metadata", element.attrs['data-json']
except:
return "Metadata", ""
if contents.find("Polar Plot") != -1:
element = tr.select_one("svg")
try:
polar_dict = {
'tle1': element.attrs['data-tle1'],
'tle2': element.attrs['data-tle2'],
'timeframe-start': element.attrs['data-timeframe-start'],
'timeframe-end': element.attrs['data-timeframe-end'],
'groundstation-lat': element.attrs['data-groundstation-lat'],
'groundstation-lon': element.attrs['data-groundstation-lon'],
'groundstation-alt': element.attrs['data-groundstation-alt'],
}
except:
polar_dict = dict()
return "Polar_Plot", polar_dict
if contents.find("Downloads") != -1:
audio = None
waterfall = None
waterfall_hash_name = None
waterfall_shape = None
for a in tr.find_all("a", href=True):
if str(a).find("Audio") != -1:
audio = a.attrs['href']
if str(a).find("Waterfall") != -1:
waterfall = a.attrs['href']
waterfall_hash_name = f'{hashlib.sha256(bytearray(waterfall, encoding="utf-8")).hexdigest()}.png'
if self.fetch_waterfalls:
waterfall_shape, waterfall_hash_name = self.fetch_waterfall(waterfall, waterfall_hash_name)
return 'Downloads', {'audio': audio, "waterfall": waterfall, "waterfall_hash_name": waterfall_hash_name,
"waterfall_shape": waterfall_shape}
return None, None
def fetch_waterfall(self, url, file_name):
"""
Fetches and writes waterfall PNGs to the disk, then crops the image and converts it to grey scale.
:param url: The URL to the waterfall file to pull
:param file_name: The name the file should be saved as.
:return: The shape of the cropped image and name of the waterfall written to disk as a bytes object.
"""
res = ru.get_request(url)
waterfall_name = os.path.abspath(self.waterfall_path + file_name)
with open(waterfall_name, 'wb') as out:
out.write(res.content)
cropped_shape, bytes_name = iu.crop_and_save_psd(waterfall_name, greyscale=self.grey_scale)
return cropped_shape, bytes_name
def fetch_demod(self, a):
"""
"""
url = a.attrs['href']
res = ru.get_request(url)
original_name = a.text.strip()
file_name = f'{hashlib.sha256(bytearray(original_name, encoding="utf-8")).hexdigest()}.bin'
demod_name = os.path.abspath(self.demod_path + file_name)
with open(demod_name, 'wb') as out:
out.write(res.content)
return {
'original_name': original_name,
'location': demod_name
}
if __name__ == '__main__':
# Demonstration of use
print("Single Scrapes")
scraper = ObservationScraper(check_disk=False)
scrape1 = scraper.scrape_observation('https://network.satnogs.org/observations/5025420/')
scrape2 = scraper.scrape_observation('https://network.satnogs.org/observations/6881948/')
print(f"{scrape1}")
print(f"{scrape2}")
print("Multiprocess Observations Pull")
scraper.multiprocess_scrape_observations([5025420, 6881948])
|
/satnogs_webscraper-1.4.2.tar.gz/satnogs_webscraper-1.4.2/satnogs_webscraper/observation_scraper.py
| 0.523908 | 0.187021 |
observation_scraper.py
|
pypi
|
import os
from PIL import Image, ImageFile
ImageFile.LOAD_TRUNCATED_IMAGES = True
from collections import Counter
import numpy as np
def find_left_bound(im):
left_lengths = []
x_max, y_max = im.size
for y in range(0, y_max):
for x in range(0, x_max):
if im.getpixel((x, y)) != 255:
left_lengths.append(x)
break
return Counter(left_lengths).most_common(1)[0][0]
def find_upper_bound(im):
upper_lengths = []
x_max, y_max = im.size
for x in range(0, x_max):
for y in range(0, y_max):
if im.getpixel((x, y)) != 255:
upper_lengths.append(y)
break
return Counter(upper_lengths).most_common(1)[0][0]
def find_bottom_bound(im):
bottom_lengths = []
x_max, y_max = im.size
for x in range(0, x_max):
for y in range(y_max - 1, 0, -1):
if im.getpixel((x, y)) != 255:
bottom_lengths.append(y)
break
return Counter(bottom_lengths).most_common(1)[0][0]
def find_right_bound(im):
x_max, y_max = im.size
for x in range(x_max // 2, x_max):
broke = False
for y in range(0, y_max):
if im.getpixel((x, y)) != 255:
broke = True
break
if not broke:
return x
def crop_and_save_psd(input_image, delete_original=True, greyscale=True, resize=True, resize_dimen=(623, 1542)):
im_source_grey = Image.open(input_image).convert('L')
# Find the boundaries of the center most PSD and crop the image.
left_bound = find_left_bound(im_source_grey)
right_bound = find_right_bound(im_source_grey)
upper_bound = find_upper_bound(im_source_grey)
bottom_bound = find_bottom_bound(im_source_grey)
if greyscale:
im_source = Image.open(input_image).convert('L')
else:
im_source = Image.open(input_image).convert('RGB')
im_cropped = im_source.crop([left_bound, upper_bound, right_bound, bottom_bound])
if resize:
size = im_cropped.size
if size[0] != resize_dimen[0] or size[1] != resize_dimen[1]:
im_cropped = im_cropped.resize(resize_dimen, Image.Resampling.LANCZOS)
# Convert to greyscale and save as unit8 bytes to disk, using the original file name, minus the file extension
numpy_im = np.array(im_cropped)
# store the shape and write to a file
shape = numpy_im.shape
new_file_name = input_image[:-4]
numpy_im.tofile(new_file_name)
# remove the original, larger image
if delete_original:
os.remove(input_image)
return shape, new_file_name
|
/satnogs_webscraper-1.4.2.tar.gz/satnogs_webscraper-1.4.2/satnogs_webscraper/image_utils.py
| 0.433742 | 0.279607 |
image_utils.py
|
pypi
|
import tempfile
import datetime
import time
import json
def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=50, fill='█', printEnd="\r"):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
printEnd - Optional : end character (e.g. "\r", "\r\n") (Str)
"""
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
print(f'\r{prefix} |{bar}| {percent}% {suffix}', end=printEnd)
# Print New Line on Complete
if iteration == total:
print()
def setup_temp_file(items_total, items_done):
temp = tempfile.NamedTemporaryFile()
setup = {
'start_time': int(time.time()),
'items_total': items_total,
'items_done': items_done
}
with open(temp.name, 'w') as file_out:
json.dump(setup, file_out)
return temp
def setup_progress_dict(items_total, items_done):
setup = {
'start_time': int(time.time()),
'items_total': items_total,
'items_done': items_done
}
return setup
def check_progress(temp_file, items_completed):
current_time = int(time.time())
with open(temp_file, 'r') as file_in:
setup = json.load(file_in)
num_completed_since_start = abs(items_completed - setup['items_done'])
if num_completed_since_start != 0:
time_per_item = (current_time - setup['start_time']) / num_completed_since_start
else:
time_per_item = 0
seconds_left = time_per_item * (setup['items_total'] - items_completed)
iteration = items_completed
start_time = datetime.datetime.fromtimestamp(setup['start_time'])
prefix = datetime.datetime.strftime(start_time, "%d/%m/%y %H:%M:%S")
end_time = datetime.datetime.now() + datetime.timedelta(seconds=seconds_left)
suffix = datetime.datetime.strftime(end_time, "%d/%m/%y %H:%M:%S.")
printProgressBar(iteration, setup['items_total'], prefix=prefix, suffix=suffix)
def check_progress_dict(setup, items_completed):
current_time = int(time.time())
num_completed_since_start = abs(items_completed - setup['items_done'])
if num_completed_since_start != 0:
time_per_item = (current_time - setup['start_time']) / num_completed_since_start
else:
time_per_item = 0
seconds_left = time_per_item * (setup['items_total'] - items_completed)
iteration = items_completed
start_time = datetime.datetime.fromtimestamp(setup['start_time'])
prefix = datetime.datetime.strftime(start_time, "%d/%m/%y %H:%M:%S")
end_time = datetime.datetime.now() + datetime.timedelta(seconds=seconds_left)
suffix = datetime.datetime.strftime(end_time, "%d/%m/%y %H:%M:%S.")
printProgressBar(iteration, setup['items_total'], prefix=prefix, suffix=suffix)
|
/satnogs_webscraper-1.4.2.tar.gz/satnogs_webscraper-1.4.2/satnogs_webscraper/progress_utils.py
| 0.555194 | 0.227255 |
progress_utils.py
|
pypi
|
from pathlib import Path
import yaml
from flatdict import FlatDict
from satorici.validator import (
INPUT_REGEX,
is_command_group,
is_import_group,
validate_playbook,
)
from satorici.validator.exceptions import NoExecutionsError, PlaybookVariableError
def get_unbound(commands: list[list[str]], key: str, flat_config: dict[str]):
variables = set()
for command in commands:
variables.update(INPUT_REGEX.findall(command[0]))
keys: list[str] = flat_config.keys()
previous_paths = keys[: keys.index(key)]
path = key.split(":")
levels = len(path)
for variable in variables:
prefixes = tuple(":".join(path[:i] + [variable]) for i in range(levels))
valid_prefixes = [path for path in previous_paths if path.startswith(prefixes)]
if not valid_prefixes:
yield variable
def get_parameters(config: dict):
"""Returns the needed parameters from the yaml loaded config"""
flat_config = FlatDict(config)
parameters: set[str] = set()
for key, value in flat_config.items():
if is_command_group(value):
parameters.update(get_unbound(value, key, flat_config))
return parameters
def validate_parameters(params: dict):
if isinstance(params, dict):
if all(isinstance(v, (str, int, list)) for v in params.values()):
return True
def has_executions(config: dict, base_dir: Path):
flat_config = FlatDict(config)
imports: set[str] = set()
for value in flat_config.values():
if is_import_group(value):
imports.update([i for i in value if i.startswith("file")])
elif is_command_group(value):
return True
for i in imports:
path = base_dir / i[7:]
if not path.is_file():
continue
try:
imported = yaml.safe_load((base_dir / i[7:]).read_text())
validate_playbook(imported)
except (PlaybookVariableError, NoExecutionsError):
pass
except Exception:
continue
for value in FlatDict(imported).values():
if is_command_group(value):
return True
return False
|
/satori_ci-1.5.2-py3-none-any.whl/satoricli/classes/validations.py
| 0.514644 | 0.211794 |
validations.py
|
pypi
|
# Language Asserts
## Asserts
You can assert what will be the behavior of [executions](language_execution.md):
| Assert | Value | Description |
|-------------------------|----------------|------------------------------------------------------------------------|
| assertStdout | Boolean | Is output produced?
| assertStdoutEquals | String\* | Is the output equal to the String?
| assertStdoutNotEquals | String | Is the output different than String?
| assertStdoutContains | String | Does the output contains the String?
| assertStdoutNotContains | String | Does the output not contain the String?
| assertStdoutSHA256 | SHA256Checksum | Is the output equal to this SHA256 hash?
| assertStdoutRegex | Regex | Does the output matches your regexp?
| assertStdoutNotRegex | Regex | Does the output not match your regexp?
| assertStderr | Boolean | Are errors produced?
| assertStderrEquals | String\* | Is the error equal to the String?
| assertStderrNotEquals | String | Is the error different than String?
| assertStderrContains | String | Does the error contains the String?
| assertStderrNotContains | String | Does the error not contain the String?
| assertStderrSHA256 | SHA256Checksum | Is the error equal to this SHA256 hash?
| assertStderrRegex | Regex | Does the error matches your regexp?
| assertStderrNotRegex | Regex | Does the error not match your regexp?
| assertReturnCode | Integer | Is the return code equal to the Integer?
| assertSoftwareExists | Boolean | Does the software being executed exists? True by default
| assertDifferent | Boolean | Does the execution behaves differently when using different inputs?
| assertKilled | Boolean | Did the software timed out?
---
### Parametrized Asserts
Whenever you need to define addicional settings for an assert, you start by defining its value. For example, lets start by asserting that the output will be "Hello World"
```yml
HelloWorld:
assertStdoutContains: "Hello World"
echo:
- [ echo Hello World ]
```
You would define its value first:
```yml
HelloWorld:
assertStdoutContains:
- value: "Hello World"
echo:
- [ echo Hello World ]
```
#### Severity
Now lets define its severity
```yml
HelloWorld:
assertStdoutContains:
- value: "Hello World"
- severity: 1
echo:
- [ echo Hello World ]
```
#### Quantity
Now you may need to add the weight of how many occurrence are affecting your assertion. The amount of blockers within a report should depic the priority of the test:
```yml
Blocker:
assertStdoutContains:
- value: whatever
- severity: 1
- count:
[ wc - l whatever ]
run:
- [ “echo Whatever\nwhatever >> whatever” ]
```
This technique is used for [testing AWS environments with ScoutSuite using the playbook satori://code/scoutsuite.yml](https://github.com/satorici/playbooks/blob/main/aws/scoutsuite.yml)
---
#### assertStdout
| Input | Description |
|---------|-----------------------------------------
| Boolean | Asserts if an output has been produced |
- <span style="color:green">Example Pass Test</span>: the program should deliver output, and it does:
```yml
test:
assertStdout: True
run:
- [ echo Hello World ]
```
- <span style="color:red">Example Fail Test</span>: the program should deliver output, but no output is produced:
```yml
test:
assertStdout: True
run:
- [ ./broken_executable ]
```
---
#### assertStdoutEquals
| Input | Description |
|--------|-------------------------------------------------
| String | Asserts that the output is equal to the String |
- <span style="color:green">Example Pass Test</span>: the program should only output "Hello World", and it does:
```yml
test:
assertStdoutEquals: "Hello World"
run:
- [ echo Hello World ]
```
- <span style="color:red">Example Fail Test</span>: the program should only output "Hello World", but it doesn't:
```yml
test:
assertStdoutEquals: "Hello World"
run:
- [ echo 'hello world' ]
```
---
#### assertStdoutNotEquals
| Input | Description |
|-------|---------------------------------------
|String | Is the output different than String? |
- <span style="color:green">Example Pass Test</span>: the program output should not be equal to "Hello World", and is not:
```yml
test:
assertStdoutNotEquals: "Hello World"
input:
- "Hello World"
mutate_qty: 1
run:
- [ echo $(input) ]
```
---
#### assertStdoutContains
| Input | Description |
|-------|--------------
| String | Does the output contains the String?
- <span style="color:green">Example Pass Test</span>: the program output should contain the string "Hello World", and it does:
```yml
test:
assertStdoutContains: "Hello World"
run:
- [ echo Hello World 2023 ]
```
---
#### assertStdoutNotContains
| Input | Description |
|--------|------------------------------------------
| String | Does the output not contain the String? |
- <span style="color:green">Example Pass Test</span>: the program output should not contain the string "Error", and it does not:
```yml
test:
assertStdoutNotContains: "Error"
run:
- [ echo Hello World ]
```
---
#### assertStdoutSHA256
| Input | Description |
|----------------|-------------------------------------------
| SHA256Checksum | Is the output equal to this SHA256 hash? |
- <span style="color:green">Example Pass Test</span>: Network ports of , and it does:
```yml
settings:
name: "Nmap: did any service changed?"
install:
assertReturnCode: 0
nmap:
- [ apt install -y nmap ]
nmap:
assertReturnCode: 0
run:
- [ "nmap -n www.example.com -Pn -p21,22,80,443,3000,3306,5432 -sT -oG nmap" ]
services:
assertStdoutSHA256:
- "e3b0c44298fc1c142afbf4c8996fb92427ac41e4649b934ca49599ab7852b855"
running:
- [ "grep Ports nmap | sort -u" ]
```
---
#### assertStdoutRegex
| Input | Description |
|-------|---------------------------------------
| Regex | Does the output matches your regexp? |
- <span style="color:green">Example Pass Test</span>: the program output should contain the string "Hello " and additional characters, and it does:
```yml
test:
assertStdoutRegex: "Hello .*"
run:
- [ echo Hello World ]
```
---
#### assertStdoutNotRegex
| Input | Description |
|-------|-----------------------------------------
| Regex | Does the output not match your regexp? |
- <span style="color:gray">Example Unknown Test</span>: the program output should not contain the string "Hello World" anywhere on the output, but the input could be mutated to "somethingHello World" and the result depends on the mutation:
```yml
test:
assertStdoutNotRegex: "*Hello World*"
input:
- "Hello World"
mutate_qty: 1
run:
- [ echo Hello $(input) ]
```
---
#### assertStderr
| Input | Description |
|---------|-----------------------|
| Boolean | Are errors produced? |
- <span style="color:green">Example Pass Test</span>: the program output should not output errors, and it does not:
```yml
test:
assertStderr: True
run:
- [ echo Hello World ]
```
---
#### assertStderrEquals
| Input | Description |
|----------|------------------------------------
| String\* | Is the error equal to the String? |
---
#### assertStderrNotEquals
| Input | Description |
|--------|--------------------------------------
| String | Is the error different than String? |
---
#### assertStderrContains
| Input | Description |
|--------|--------------------------------------
| String | Does the error contains the String? |
- <span style="color:pass">Example Pass Test</span>: the programs errors should contain the string Traceback, and it does:
```yml
install:
- [ "echo import nonexistent > test.py "]
test:
assertStderrContains: "Traceback"
run:
- [ python3 test.py ]
```
---
#### assertStderrNotContains
| Input | Description |
|--------|-----------------------------------------
| String | Does the error not contain the String? |
- <span style="color:fail">Example Fail Test</span>: the programs errors should not contain the string Traceback, but it does:
```yml
install:
- [ "echo import nonexistent > test.py "]
test:
assertStderrNotContains: "Traceback"
run:
- [ python3 test.py ]
```
---
#### assertStderrSHA256
| Input | Description |
|----------------|------------------------------------------
| SHA256Checksum | Is the error equal to this SHA256 hash? |
- <span style="color:fail">Example Fail Test</span>: the programs errors should not contain the string Traceback, but it does:
```yml
install:
- [ "echo import nonexistent > test.py "]
test:
assertStderrSHA256: "69827a4c85154b891cae9c35d99887375d815ec676bb7ce86e1f7601f6fec3ad"
run:
- [ python3 test.py ]
```
---
#### assertStderrRegex
| Input | Description |
|-------|--------------------------------------
| Regex | Does the error matches your regexp? |
- <span style="color:gray">Example Unknown Test</span>: the Python script my_script.py might throw a KeyError exception with 'unexpected_key' if a certain condition in the script isn't met:
```yml
RunPythonScriptTest:
assertStderrRegex: ".*KeyError: 'unexpected_key'.*"
run:
- [ python3, my_script.py ]
```
---
#### assertStderrNotRegex
| Input | Description |
|-------|----------------------------------------
| Regex | Does the error not match your regexp? |
- <span style="color:green">Example Pass Test</span>: the programs errors should not throw a Traceback, and it doesn't:
```yml
install:
- [ "echo import os > test.py "]
test:
assertStderrNotRegex: "*Traceback*"
run:
- [ python3 test.py ]
```
---
#### assertReturnCode
| Input | Description |
|---------|-------------------------------------------
| Integer | Is the return code equal to the Integer? |
- <span style="color:green">Example Pass Test</span>: the programs should return the code 0, and it does:
```yml
test:
assertReturnCode: 0
run:
- [ echo This program is executed correctly ]
```
---
#### assertSoftwareExists
| Input | Description |
|-------|--------------
| Boolean | Does the software being executed exists? True by default
- <span style="color:fail">Example Fail Test</span>: the programs should exist, and it does not:
```yml
test:
assertSoftwareExists: True # by default
run:
- [ ./your_program ]
```
---
#### assertDifferent
| Input | Description |
|---------|----------------------------------------------------------------------
| Boolean | Does the execution behaves differently when using different inputs? |
- <span style="color:fail">Example Fail Test</span>: the production and staging environment should look the same, and it does not:
```yml
API:
- [ "www.example.com" ]
- [ "staging.example.com" ]
test:
assertDifferent: False
run:
- [ curl $API ]
```
---
#### assertKilled
| Input | Description |
|---------|------------------------------
| Boolean | Did the software timed out? |
- <span style="color:fail">Example Fail Test</span>: the software should finished execution within 10 seconds, and it does not:
```yml
settings:
software_timeout: 10
test:
assertKilled: False
run:
- [ sleep 20 ]
```
---
If you need any help, please reach out to us on [Discord](https://discord.gg/F6Uzz7fc2s) or via [Email](mailto:[email protected])
|
/satori_docs-1.3.1-py3-none-any.whl/docs/language_asserts.md
| 0.725746 | 0.943086 |
language_asserts.md
|
pypi
|
import os
import struct
from miniws4py.framing import Frame, OPCODE_CONTINUATION, OPCODE_TEXT, \
OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG
from miniws4py.compat import unicode, py3k
__all__ = ['Message', 'TextMessage', 'BinaryMessage', 'CloseControlMessage',
'PingControlMessage', 'PongControlMessage']
class Message(object):
def __init__(self, opcode, data=b'', encoding='utf-8'):
"""
A message is a application level entity. It's usually built
from one or many frames. The protocol defines several kind
of messages which are grouped into two sets:
* data messages which can be text or binary typed
* control messages which provide a mechanism to perform
in-band control communication between peers
The ``opcode`` indicates the message type and ``data`` is
the possible message payload.
The payload is held internally as a a :func:`bytearray` as they are
faster than pure strings for append operations.
Unicode data will be encoded using the provided ``encoding``.
"""
self.opcode = opcode
self._completed = False
self.encoding = encoding
if isinstance(data, unicode):
if not encoding:
raise TypeError("unicode data without an encoding")
data = data.encode(encoding)
elif isinstance(data, bytearray):
data = bytes(data)
elif not isinstance(data, bytes):
raise TypeError("%s is not a supported data type" % type(data))
self.data = data
def single(self, masked=False):
return Frame(body=self.data, opcode=self.opcode, fin=1, masked=masked).build()
def fragment(self, first=False, last=False, masked=False):
"""
Returns a :class:`miniws4py.framing.Frame` bytes.
The behavior depends on the given flags:
* ``first``: the frame uses ``self.opcode`` else a continuation opcode
* ``last``: the frame has its ``fin`` bit set
"""
fin = 1 if last is True else 0
opcode = self.opcode if first is True else OPCODE_CONTINUATION
return Frame(body=self.data,
opcode=opcode,
fin=fin,
masked=masked).build()
@property
def completed(self):
"""
Indicates the the message is complete, meaning
the frame's ``fin`` bit was set.
"""
return self._completed
@completed.setter
def completed(self, state):
"""
Sets the state for this message. Usually
set by the stream's parser.
"""
self._completed = state
def extend(self, data):
"""
Add more ``data`` to the message.
"""
if isinstance(data, bytes):
self.data += data
elif isinstance(data, bytearray):
self.data += bytes(data)
elif isinstance(data, unicode):
self.data += data.encode(self.encoding)
else:
raise TypeError("%s is not a supported data type" % type(data))
def __len__(self):
return len(self.__unicode__())
def __str__(self):
if py3k:
if self.encoding:
return self.data.decode(self.encoding)
else:
return '{0}'.format(self.data)
return self.data
def __unicode__(self):
return self.data.decode(self.encoding)
class TextMessage(Message):
def __init__(self, text=None):
Message.__init__(self, OPCODE_TEXT, text)
@property
def is_binary(self):
return False
@property
def is_text(self):
return True
class BinaryMessage(Message):
def __init__(self, bytes=None):
Message.__init__(self, OPCODE_BINARY, bytes, encoding=None)
@property
def is_binary(self):
return True
@property
def is_text(self):
return False
def __len__(self):
return len(self.data)
class CloseControlMessage(Message):
def __init__(self, code=1000, reason=''):
data = b""
if code:
data += struct.pack("!H", code)
if reason is not None:
if isinstance(reason, unicode):
reason = reason.encode('utf-8')
data += reason
Message.__init__(self, OPCODE_CLOSE, data, 'utf-8')
self.code = code
self.reason = reason
def __str__(self):
if py3k:
return self.reason.decode('utf-8')
return self.reason
def __unicode__(self):
return self.reason.decode(self.encoding)
class PingControlMessage(Message):
def __init__(self, data=None):
Message.__init__(self, OPCODE_PING, data)
class PongControlMessage(Message):
def __init__(self, data):
Message.__init__(self, OPCODE_PONG, data)
|
/satori-rtm-sdk-1.5.0.tar.gz/satori-rtm-sdk-1.5.0/miniws4py/messaging.py
| 0.750553 | 0.280875 |
messaging.py
|
pypi
|
import struct
from struct import unpack
from miniws4py.utf8validator import Utf8Validator
from miniws4py.messaging import TextMessage, BinaryMessage, CloseControlMessage,\
PingControlMessage, PongControlMessage
from miniws4py.framing import Frame, OPCODE_CONTINUATION, OPCODE_TEXT, \
OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG
from miniws4py.exc import FrameTooLargeException, ProtocolException
from miniws4py.compat import py3k
VALID_CLOSING_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011]
class Stream(object):
def __init__(self):
""" Represents a websocket stream of bytes flowing in and out.
The stream doesn't know about the data provider itself and
doesn't even know about sockets. Instead the stream simply
yields for more bytes whenever it requires them. The stream owner
is responsible to provide the stream with those bytes until
a frame can be interpreted.
.. code-block:: python
:linenos:
>>> s = Stream()
>>> s.parser.send(BYTES)
>>> s.has_messages
False
>>> s.parser.send(MORE_BYTES)
>>> s.has_messages
True
>>> s.message
<TextMessage ... >
"""
self.message = None
"""
Parsed test or binary messages. Whenever the parser
reads more bytes from a fragment message, those bytes
are appended to the most recent message.
"""
self.pings = []
"""
Parsed ping control messages. They are instances of
:class:`miniws4py.messaging.PingControlMessage`
"""
self.pongs = []
"""
Parsed pong control messages. They are instances of
:class:`miniws4py.messaging.PongControlMessage`
"""
self.closing = None
"""
Parsed close control messsage. Instance of
:class:`miniws4py.messaging.CloseControlMessage`
"""
self.errors = []
"""
Detected errors while parsing. Instances of
:class:`miniws4py.messaging.CloseControlMessage`
"""
self._parser = None
"""
Parser in charge to process bytes it is fed with.
"""
@property
def parser(self):
if self._parser is None:
self._parser = self.receiver()
# Python generators must be initialized once.
next(self.parser)
return self._parser
def _cleanup(self):
"""
Frees the stream's resources rendering it unusable.
"""
self.message = None
if self._parser is not None:
if not self._parser.gi_running:
self._parser.close()
self._parser = None
self.errors = None
self.pings = None
self.pongs = None
self.closing = None
def text_message(self, text):
"""
Returns a :class:`miniws4py.messaging.TextMessage` instance
ready to be built. Convenience method so
that the caller doesn't need to import the
:class:`miniws4py.messaging.TextMessage` class itself.
"""
return TextMessage(text=text)
def binary_message(self, bytes):
"""
Returns a :class:`miniws4py.messaging.BinaryMessage` instance
ready to be built. Convenience method so
that the caller doesn't need to import the
:class:`miniws4py.messaging.BinaryMessage` class itself.
"""
return BinaryMessage(bytes)
@property
def has_message(self):
"""
Checks if the stream has received any message
which, if fragmented, is now completed.
"""
if self.message is not None:
return self.message.completed
return False
def close(self, code=1000, reason=''):
"""
Returns a close control message built from
a :class:`miniws4py.messaging.CloseControlMessage` instance,
using the given status ``code`` and ``reason`` message.
"""
return CloseControlMessage(code=code, reason=reason)
def ping(self, data=''):
"""
Returns a ping control message built from
a :class:`miniws4py.messaging.PingControlMessage` instance.
"""
return PingControlMessage(data).single()
def pong(self, data=''):
"""
Returns a ping control message built from
a :class:`miniws4py.messaging.PongControlMessage` instance.
"""
return PongControlMessage(data).single()
def receiver(self):
"""
Parser that keeps trying to interpret bytes it is fed with as
incoming frames part of a message.
Control message are single frames only while data messages, like text
and binary, may be fragmented accross frames.
The way it works is by instanciating a :class:`wspy.framing.Frame` object,
then running its parser generator which yields how much bytes
it requires to performs its task. The stream parser yields this value
to its caller and feeds the frame parser.
When the frame parser raises :exc:`StopIteration`, the stream parser
tries to make sense of the parsed frame. It dispatches the frame's bytes
to the most appropriate message type based on the frame's opcode.
Overall this makes the stream parser totally agonstic to
the data provider.
"""
utf8validator = Utf8Validator()
running = True
frame = None
while running:
frame = Frame()
while 1:
try:
some_bytes = (yield next(frame.parser))
frame.parser.send(some_bytes)
except GeneratorExit:
running = False
break
except StopIteration:
frame._cleanup()
some_bytes = frame.body
if some_bytes:
some_bytes = bytearray(some_bytes)
if frame.opcode == OPCODE_TEXT:
if self.message and not self.message.completed:
# We got a text frame before we completed the previous one
msg = CloseControlMessage(code=1002, reason='Received a new message before completing previous')
self.errors.append(msg)
break
m = TextMessage(some_bytes)
m.completed = (frame.fin == 1)
self.message = m
if some_bytes:
is_valid, end_on_code_point, _, _ = utf8validator.validate(some_bytes)
if not is_valid or (m.completed and not end_on_code_point):
self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
break
elif frame.opcode == OPCODE_BINARY:
if self.message and not self.message.completed:
# We got a text frame before we completed the previous one
msg = CloseControlMessage(code=1002, reason='Received a new message before completing previous')
self.errors.append(msg)
break
m = BinaryMessage(some_bytes)
m.completed = (frame.fin == 1)
self.message = m
elif frame.opcode == OPCODE_CONTINUATION:
m = self.message
if m is None:
self.errors.append(CloseControlMessage(code=1002, reason='Message not started yet'))
break
m.extend(some_bytes)
m.completed = (frame.fin == 1)
if m.opcode == OPCODE_TEXT:
if some_bytes:
is_valid, end_on_code_point, _, _ = utf8validator.validate(some_bytes)
if not is_valid or (m.completed and not end_on_code_point):
self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
break
elif frame.opcode == OPCODE_CLOSE:
code = 1000
reason = ""
if frame.payload_length == 0:
self.closing = CloseControlMessage(code=1000)
elif frame.payload_length == 1:
self.closing = CloseControlMessage(code=1002, reason='Payload has invalid length')
else:
try:
# at this stage, some_bytes have been unmasked
# so actually are held in a bytearray
code = int(unpack("!H", bytes(some_bytes[0:2]))[0])
except struct.error:
code = 1002
reason = 'Failed at decoding closing code'
else:
# Those codes are reserved or plainly forbidden
if code not in VALID_CLOSING_CODES and not (2999 < code < 5000):
reason = 'Invalid Closing Frame Code: %d' % code
code = 1002
elif frame.payload_length > 1:
reason = frame.body[2:]
if not py3k: reason = bytearray(reason)
is_valid, end_on_code_point, _, _ = utf8validator.validate(reason)
if not is_valid or not end_on_code_point:
self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
break
reason = bytes(reason)
self.closing = CloseControlMessage(code=code, reason=reason)
elif frame.opcode == OPCODE_PING:
self.pings.append(PingControlMessage(some_bytes))
elif frame.opcode == OPCODE_PONG:
self.pongs.append(PongControlMessage(some_bytes))
else:
self.errors.append(CloseControlMessage(code=1003))
break
except ProtocolException:
self.errors.append(CloseControlMessage(code=1002))
break
except FrameTooLargeException:
self.errors.append(CloseControlMessage(code=1002, reason="Frame was too large"))
break
frame._cleanup()
frame.body = None
frame = None
if self.message is not None and self.message.completed:
utf8validator.reset()
utf8validator.reset()
utf8validator = None
self._cleanup()
|
/satori-rtm-sdk-1.5.0.tar.gz/satori-rtm-sdk-1.5.0/miniws4py/streaming.py
| 0.726134 | 0.229136 |
streaming.py
|
pypi
|
from __future__ import print_function
from contextlib import contextmanager
import satori.rtm.auth as auth
from satori.rtm.exceptions import AuthError
import satori.rtm.internal_queue as queue
import satori.rtm.internal_json as json
import threading
import satori.rtm.internal_client_action as a
from satori.rtm.internal_client import InternalClient
import satori.rtm.internal_subscription as s
from satori.rtm.internal_logger import logger
SubscriptionMode = s.SubscriptionMode
Full = queue.Full
class Client(object):
"""
This is the documentation for Client class
"""
def __init__(
self, endpoint, appkey,
fail_count_threshold=float('inf'),
reconnect_interval=1, max_reconnect_interval=300,
observer=None, restore_auth_on_reconnect=True,
max_queue_size=20000, https_proxy=None, protocol='json'):
r"""
Description
Constructor for the Client.
Parameters
* endpoint {string} [required] - RTM endpoint as a string. Example:
"wss://rtm:8443/foo/bar". If port number is omitted, it defaults to 80 for
ws:// and 443 for wss://. Available from the Dev Portal.
* appkey {string} [required] - Appkey used to access RTM.
Available from the Dev Portal.
* reconnect_interval {int} [optional] - Time period, in seconds, between
reconnection attempts. The timeout period between each successive
connection attempt increases, but starts with this value. Use
max_reconnect_interval to specify the maximum number of seconds between
reconnection attempts. Default is 1.
* max_reconnect_interval {int} [optional] - Maximum period of time, in
seconds, to wait between reconnection attempts. Default is 300.
* fail_count_threshold {int} [optional] - Number of times the SDK should
attempt to reconnect if the connection disconnects. Specify any value
that resolves to an integer. Default is inf (infinity).
* observer {client_observer} [optional] - Instance of a client observer
class, used to define functionality based on the state changes of a
Client.
Set this property with client.observer or in the `make_client(*args,
**kwargs)` or `Client(*args, **kwargs)` methods.
* restore_auth_on_reconnect {boolean} optional - Whether to restore
authentication after reconnects. Default is True.
* max_queue_size {int} optional - this parameter limits the amount of
concurrent requests in order to avoid 'out of memory' situation.
For example is max_queue_size is 10 and the client code sends 11
publish requests so fast that by the time it sends 11th one the reply
for the first one has not yet arrived, this 11th call to `client.publish`
will throw the `satori.rtm.client.Full` exception.
* https_proxy (string, int) [optional] - (host, port) tuple for https proxy
* protocol {string} [optional] - one of 'cbor' or 'json' (default)
"""
assert endpoint
assert endpoint.startswith('ws://') or endpoint.startswith('wss://'),\
'Endpoint must start with "ws(s)://" but "%s" does not' % endpoint
self._queue = queue.Queue(maxsize=max_queue_size)
self._internal = InternalClient(
self._queue,
endpoint, appkey,
fail_count_threshold,
reconnect_interval, max_reconnect_interval,
observer, restore_auth_on_reconnect, https_proxy,
protocol)
self._disposed = False
self._protocol = protocol
self._thread = threading.Thread(
target=self._internal_event_loop,
name='ClientLoop')
self._thread.daemon = True
self._thread.start()
if protocol == 'cbor':
import cbor2
self._dumps = cbor2.dumps
else:
self._dumps = json.dumps
def last_connecting_error(self):
"""
Description
If there were unsuccessful connection attempts, this function returns
the exception for the last such attempt. Otherwise returns None.
"""
return self._internal.last_connecting_error
def _enqueue(self, msg, timeout=0.1):
if not self._disposed:
self._queue.put(msg, block=True, timeout=timeout)
else:
raise RuntimeError(
'Trying to use a disposed satori.rtm.client.Client')
def start(self):
"""
Description
Starts a WebSocket connection to RTM for the Client object. You
must call the start() method before you subscribe to a channel using the
Client object methods.
If you publish any messages before calling this method, the SDK queues the
messages to publish after establishing the WebSocket connection.
"""
self._enqueue(a.Start())
def stop(self):
"""
Description
Closes a WebSocket connection to RTM for the Client object.
Use this method if you want to explicitly stop all interaction with RTM.
After you use this method, if you call publish or subscribe methods
while the client is stopped, the SDK queues the requests and sends them when
the client reconnects.
"""
self._enqueue(a.Stop())
def authenticate(self, auth_delegate, callback):
"""
Description
Validates the identity of an application user after connecting to RTM
with the Client class. After the user authenticates with RTM, the operations
that the client can perform depends on the role.
Since the authentication process is an asynchronous operation, the callback
function is required. The callback function processes the PDU response from
RTM.
For more information about authentication, see
*Authentication and Authorization* in the online docs.
Parameters
* auth_delegate {AuthDelegate | RoleSecretAuthDelegate} [required] - An
authentication delegate object. Use a
satori.rtm.auth.RoleSecretAuthDelegate class for the role-based
authentication process.
* callback {function} [required] - Function to execute after RTM
returns a response.
"""
self._enqueue(a.Authenticate(auth_delegate, callback))
def publish(self, channel, message, callback=None):
"""
Description
Publishes a message to the specified channel.
The channel and message parameters are required. The `message` parameter can
be any JSON-supported value. For more information, see www.json.org.
By default, this method does not acknowledge the completion of the publish
operation. Optionally, you can specify a callback function to process the
response from RTM. If you specify a callback, RTM
returns an object that represents the Protocol Data Unit (PDU) response to
the publish request. For more information about PDUs, see *RTM API* in the
online docs.
Reference.
Since this is an asynchronous method, you can also use the Python threading
module to create an event to track completion of the publish operation in
the callback function.
Parameters
* message {string} [required] - JSON value to publish as message. It must be
serializable using `json.dumps` from the Python standard `JSON` module.
* channel {string} [required] - Name of the channel to which you want to
publish.
* callback {function} [optional] - Callback function to execute on the PDU
response returned by RTM to the publish request.
"""
self._enqueue(a.Publish(channel, self._dumps(message), callback))
def read(self, channel, args=None, callback=None):
"""
Description
Asynchronously reads a value from the specified channel. This function
has no return value, but you can inspect
the reply PDU in the callback function.
You can also use the `args` parameter to add additional JSON key-value pairs
to the PDU in the read request that the SDK sends
to RTM. For more information about PDUs, see *RTM API* in the online docs.
Parameters
* channel {string} [required] - Name of the channel to read from.
* args {object} [optional] - Any JSON key-value pairs to send in the
read request. To create a filter, use the desired fSQL query as a string
value for `filter` key.
* callback {function} [optional] - Callback function to execute on the PDU
response returned to the subscribe request by RTM.
"""
self._enqueue(a.Read(channel, args, callback))
def write(self, channel, value, callback=None):
"""
Description
Asynchronously writes the given value to the specified channel.
Parameters
* channel {string} [required] - Channel name.
* value {json value} [required] - JSON that represents the message payload
to publish.
* callback {function} [optional] - Callback passed the response PDU from
RTM.
"""
self._enqueue(a.Write(channel, self._dumps(value), callback))
def delete(self, channel, callback=None):
"""
Description
Asynchronously deletes any value from the specified channel.
Parameters
* channel {string} [required] - Channel name.
* callback {function} [optional] - Callback passed the response PDU from
RTM.
"""
self._enqueue(a.Delete(channel, callback))
def subscribe(
self, channel_or_subscription_id, mode,
subscription_observer, args=None):
"""
Description
Subscribes to the specified channel.
Optionally, you can also use an observer that implements the subscription
callback functions and pass the observer as the `subscription_observer`
parameter. The callback functions represent each possible state for the
channel subscription. See *Subscription Observer*.
You can also use the `args` parameter to add additional JSON key-value pairs
to the PDU in the subscribe request that the SDK sends
to RTM. For more information about PDUs, see *RTM API* in the online docs.
.. note:: To receive data published to a channel after you subscribe to it,
use the `on_subscription_data()` callback function in a
subscription observer.
Parameters
* channel_or_subscription_id {string} [required] - String that identifies
the channel. If you do not use the `filter` parameter, it is the channel
name. Otherwise, it is a unique identifier for the channel (subscription
id).
* subscription_mode {SubscriptionMode} [required] - this mode determines the
behaviour of the Python SDK and RTM when resubscribing after a
reconnection. Use SubscriptionMode.ADVANCED, SubscriptionMode.RELIABLE, or
SubscriptionMode.SIMPLE.
* subscription_observer {object} [optional] - Instance of an observer class
that implements the subscription observer callback functions.
* args {object} [optional] - Any JSON key-value pairs to send in the
subscribe request. To include a filter, put the desired fSQL query
as a string value for the `filter` key. See *Subscribe PDU* in the
online docs.
"""
self._enqueue(
a.Subscribe(
channel_or_subscription_id, mode,
subscription_observer, args))
def unsubscribe(self, channel_or_subscription_id):
"""
Description
Unsubscribes from a channel.
After you unsubscribe, the application no longer receives messages for the
channel. To identify when the unsubscribe operation has completed, use the
`on_leave_subscribed()` callback function of a subscription observer class.
Parameters
* channel {string} [required] - Name of the channel from which you want to
unsubscribe.
"""
self._enqueue(a.Unsubscribe(channel_or_subscription_id))
def dispose(self):
"""
Description
Client finishes all work, release all resources and becomes unusable.
Upon completion, `client.observer.on_enter_disposed()` is called.
"""
if not self._disposed:
self._enqueue(a.Dispose(), timeout=None)
self._disposed = True
if self._thread != threading.current_thread():
self._thread.join()
@property
def observer(self):
return self._internal.observer
@observer.setter
def observer(self, o):
self._internal.observer = o
def is_connected(self):
"""
Description
Returns `True` if the Client object is connected via a
WebSocket connection to RTM and `False` otherwise.
Returns
Boolean
"""
return self._internal.is_connected()
def _internal_event_loop(self):
while True:
if self._internal.process_one_message(timeout=None):
break
class ClientStateObserver(object):
def on_enter_stopped(self):
logger.info('on_enter_stopped')
def on_leave_stopped(self):
logger.info('on_leave_stopped')
def on_enter_connecting(self):
logger.info('on_enter_connecting')
def on_leave_connecting(self):
logger.info('on_leave_connecting')
def on_enter_awaiting(self):
logger.info('on_enter_awaiting')
def on_leave_awaiting(self):
logger.info('on_leave_awaiting')
def on_enter_connected(self):
logger.info('on_enter_connected')
def on_leave_connected(self):
logger.info('on_leave_connected')
def on_enter_disposed(self):
logger.info('on_enter_disposed')
def on_enter_stopping(self):
logger.info('on_enter_stopping')
def on_leave_stopping(self):
logger.info('on_leave_stopping')
@contextmanager
def make_client(*args, **kwargs):
r"""
make_client(\*args, \*\*kwargs)
-------------------------------
Description
The `make_client()` function is a context manager. Call `make_client()`
using a `with` statement and the SDK automatically starts the WebSocket
connection. The SDK stops and then closes the WebSocket connection when the
statement completes or terminates due to an error.
This function takes the same parameters as the Client constructor plus
optional `auth_delegate`.
To use this function, import it from the client module::
`from satori.rtm.client import make_client`
Parameters
* endpoint {string} [required] - RTM endpoint as a string. Example:
"wss://rtm:8443/foo/bar". If port number is omitted, it defaults to 80 for
ws:// and 443 for wss://. Available from the Dev Portal.
* appkey {string} [required] - Appkey used to access RTM.
Available from the Dev Portal.
* reconnect_interval {int} [optional] - Time period, in seconds, between
reconnection attempts. The timeout period between each successive
connection attempt increases, but starts with this value. Use
max_reconnect_interval to specify the maximum number of seconds between
reconnection attempts. Default is 1.
* max_reconnect_interval {int} [optional] - Maximum period of time, in
seconds, to wait between reconnection attempts. Default is 300.
* fail_count_threshold {int} [optional] - Number of times the SDK should
attempt to reconnect if the connection disconnects. Specify any value
that resolves to an integer. Default is inf (infinity).
* observer {client_observer} [optional] - Instance of a client observer
class, used to define functionality based on the state changes of a
Client.
Set this property with client.observer or in the `make_client(*args,
**kwargs)` or `Client(*args, **kwargs)` methods.
* restore_auth_on_reconnect {boolean} optional - Whether to restore
authentication after reconnects. Default is True.
* max_queue_size {int} optional - this parameter limits the amount of
concurrent requests in order to avoid 'out of memory' situation.
For example is max_queue_size is 10 and the client code sends 11
publish requests so fast that by the time it sends 11th one the reply
for the first one has not yet arrived, this 11th call to `client.publish`
will throw the `satori.rtm.client.Full` exception.
* auth_delegate {AuthDelegate} [optional] - if auth_delegate parameter is
present, the client yielded by make_client will be already authenticated.
Client Observer
---------------
Use the client observer callback functions in an observer to implement
functionality based on the Client object state changes.
Set this observer with the `client.observer` property on the Client.
The following table lists the Client object states and the associated
callback functions:
============ ====================== =====================
Client State Enter Callback Exit Callback
============ ====================== =====================
Awaiting on_enter_awaiting() on_leave_awaiting()
Connecting on_enter_connecting() on_leave_connecting()
Connected on_enter_connected() on_leave_connected()
Stopped on_enter_stopped() on_leave_stopped()
Disposed on_enter_disposed() n/a
============ ====================== =====================
The following figure shows an example client observer with implemented callback
function::
class ClientObserver(object):
def __init__(self):
self.connection_attempt_count = 0
def on_enter_connecting(self):
self.connection_attempt_count += 1
print('Establishing connection #{0}'.format(
self.connection_attempt_count))
client = Client(endpoint='<ENDPOINT>', appkey=None)
client.observer = ClientObserver()
client.start()
client.stop()
client.start()
Subscription Observer
---------------------
Use callback functions in a subscription observer to implement functionality
based on the state changes for a channel subscription. The subscribe(channel,
SubscriptionMode.RELIABLE, subscription_observer, args) method takes
a subscription observer for the subscription_observer parameter.
.. note:: Depending on your application, these callbacks are optional, except
`on_subscription_data`. To process received messages, you must
implement `on_subscription_data(data)` callback.
The following table lists a subscription observer subscription states and
callback functions:
============= ======================== ========================
State Enter Callback Exit Callback
============= ======================== ========================
Subscribing on_enter_subscribing() on_leave_subscribing()
Subscribed on_enter_subscribed() on_leave_subscribed()
Unsubscribing on_enter_unsubscribing() on_leave_unsubscribing()
Unsubscribed on_enter_unsubscribed() on_leave_unsubscribed()
Failed on_enter_failed() on_leave_failed()
Deleted on_deleted() n/a
============= ======================== ========================
Other Callbacks
=================== ======================
Event Callback
=================== ======================
Created on_created()
Message(s) Received on_subscription_data()
=================== ======================
The following figure shows an example subscription observer with an implemented
callback function::
class SubscriptionObserver(object):
def __init__(self, channel):
self.message_count = 0
self.channel = channel
def on_subscription_data(self, data):
for message in data['messages']:
print('Got message {0}'.format(message))
self.message_count += len(data['messages'])
def on_enter_subscribed(self):
print('Subscription is now active')
def on_deleted(self):
print('Received {0} messages from channel ""{1}""'.format(
self.message_count, self.channel))
subscription_observer = SubscriptionObserver()
client.subscribe(
channel,
SubscriptionMode.RELIABLE,
subscription_observer(channel))
# wait for some time
client.unsubscribe(channel)
"""
observer = kwargs.get('observer')
auth_delegate = kwargs.get('auth_delegate')
if 'auth_delegate' in kwargs:
del kwargs['auth_delegate']
client = Client(*args, **kwargs)
ready_event = threading.Event()
class Observer(ClientStateObserver):
def on_enter_connected(self):
ClientStateObserver.on_enter_connected(self)
ready_event.set()
def on_enter_stopped(self):
ClientStateObserver.on_enter_stopped(self)
ready_event.set()
client.observer = Observer()
client.start()
if not ready_event.wait(70):
if client.last_connecting_error():
client.dispose()
raise RuntimeError(
"Client connection timeout, last connection error: {0}".format(
client.last_connecting_error()))
else:
raise RuntimeError("Client connection timeout")
ready_event.clear()
if not client.is_connected():
client.dispose()
raise RuntimeError(
"Client connection error: {0}".format(
client.last_connecting_error()))
auth_mailbox = []
def auth_callback(auth_result):
auth_mailbox.append(auth_result)
ready_event.set()
if auth_delegate:
client.authenticate(auth_delegate, callback=auth_callback)
if not ready_event.wait(20):
client.dispose()
raise AuthError('Authentication process has timed out')
auth_result = auth_mailbox[0]
if type(auth_result) == auth.Error:
raise AuthError(auth_result.message)
logger.debug('Auth success in make_client')
try:
client.observer = observer
yield client
finally:
logger.info('make_client.finally')
client.dispose()
|
/satori-rtm-sdk-1.5.0.tar.gz/satori-rtm-sdk-1.5.0/satori/rtm/client.py
| 0.931236 | 0.227834 |
client.py
|
pypi
|
r'''
satori.rtm.auth
===============
You can perform role-based authentication with the Python SDK. This method
uses a role and role secret key from the Dev Portal and authenticates a
client session with that role.
The operations that the client can perform depend
on the permissions for the role.
The role-based authentication method is a two-step authentication process
based on the HMAC process, using the MD5 hashing routine:
* The client obtains a nonce from the server in a handshake request.
* The client then sends an authorization request with its role secret key
hashed with the received nonce.
Use the provided class `satori.rtm.auth.RoleSecretAuthDelegate` to
create a delegate (that knows the authentication process) and use the
delegate with the authenticate(role_auth_delegate, auth_callback) method of the
`satori.rtm.client.Client` or `satori.rtm.connection.Connection` class. The SDK
calls `auth_callback` on the response from RTM.
2. Custom authentication.
You must manually create the delegate to use with this method.
For more information, see
*Authentication and Authorization* in the online docs.
.. note:: Automatic reauthentication can be disable by passing
'restore_auth_on_reconnect=False' to Client constructor or
to make_client.
Use the client or connection authenticate method with the authentication
delegate and a callback to process the RTM response to the authentication
request::
secret_key = '<ROLE_SECRET_KEY>'
with sc.make_client(
endpoint=endpoint,
appkey=platform_appkey) as client:
role_auth_delegate = auth.RoleSecretAuthDelegate(\
'<USER_ROLE>', secret_key)
auth_ack = threading.Event()
def auth_callback(auth_result):
if type(auth_result) == auth.Done:
print('Auth success')
auth_ack.set()
else:
print('Auth failure: {0}'.format(auth_result))
auth_ack.set()
client.authenticate(role_auth_delegate, auth_callback)
if not auth_ack.wait(10):
raise RuntimeError('No authentication reply in reasonable time')
'''
from __future__ import print_function
from collections import namedtuple as t
import base64
import hashlib
import hmac
Authenticate = t('Authenticate', ['method', 'credentials', 'callback'])
AuthenticateOK = t('AuthenticateOK', [])
Handshake = t('Handshake', ['method', 'data', 'callback'])
HandshakeOK = t('HandshakeOK', ['data'])
Done = t('Done', [])
Error = t('Error', ['message'])
class AuthDelegate(object):
def start(self):
return Done()
class RoleSecretAuthDelegate(AuthDelegate):
def __init__(self, role, role_secret):
self.role = role
if isinstance(role_secret, bytes):
self.role_secret = role_secret
else:
self.role_secret = role_secret.encode('utf8')
def start(self):
method = u'role_secret'
def after_handshake(reply):
if type(reply) == Error:
return reply
assert type(reply) == HandshakeOK
if 'nonce' not in reply.data:
return Error('No nonce in handshake reply')
nonce = reply.data[u'nonce'].encode('utf8')
binary_hash = hmac.new(
self.role_secret, nonce, hashlib.md5).digest()
ascii_hash = base64.b64encode(binary_hash)
return Authenticate(
method,
{u'hash': ascii_hash.decode('ascii')},
after_authenticate)
def after_authenticate(reply):
if type(reply) == Error:
return reply
assert type(reply) == AuthenticateOK
return Done()
return Handshake(method, {u'role': self.role}, after_handshake)
|
/satori-rtm-sdk-1.5.0.tar.gz/satori-rtm-sdk-1.5.0/satori/rtm/auth.py
| 0.824603 | 0.386445 |
auth.py
|
pypi
|
import os
import struct
from miniws4py.framing import Frame, OPCODE_CONTINUATION, OPCODE_TEXT, \
OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG
from miniws4py.compat import unicode, py3k
__all__ = ['Message', 'TextMessage', 'BinaryMessage', 'CloseControlMessage',
'PingControlMessage', 'PongControlMessage']
class Message(object):
def __init__(self, opcode, data=b'', encoding='utf-8'):
"""
A message is a application level entity. It's usually built
from one or many frames. The protocol defines several kind
of messages which are grouped into two sets:
* data messages which can be text or binary typed
* control messages which provide a mechanism to perform
in-band control communication between peers
The ``opcode`` indicates the message type and ``data`` is
the possible message payload.
The payload is held internally as a a :func:`bytearray` as they are
faster than pure strings for append operations.
Unicode data will be encoded using the provided ``encoding``.
"""
self.opcode = opcode
self._completed = False
self.encoding = encoding
if isinstance(data, unicode):
if not encoding:
raise TypeError("unicode data without an encoding")
data = data.encode(encoding)
elif isinstance(data, bytearray):
data = bytes(data)
elif not isinstance(data, bytes):
raise TypeError("%s is not a supported data type" % type(data))
self.data = data
def single(self, masked=False):
return Frame(body=self.data, opcode=self.opcode, fin=1, masked=masked).build()
def fragment(self, first=False, last=False, masked=False):
"""
Returns a :class:`miniws4py.framing.Frame` bytes.
The behavior depends on the given flags:
* ``first``: the frame uses ``self.opcode`` else a continuation opcode
* ``last``: the frame has its ``fin`` bit set
"""
fin = 1 if last is True else 0
opcode = self.opcode if first is True else OPCODE_CONTINUATION
return Frame(body=self.data,
opcode=opcode,
fin=fin,
masked=masked).build()
@property
def completed(self):
"""
Indicates the the message is complete, meaning
the frame's ``fin`` bit was set.
"""
return self._completed
@completed.setter
def completed(self, state):
"""
Sets the state for this message. Usually
set by the stream's parser.
"""
self._completed = state
def extend(self, data):
"""
Add more ``data`` to the message.
"""
if isinstance(data, bytes):
self.data += data
elif isinstance(data, bytearray):
self.data += bytes(data)
elif isinstance(data, unicode):
self.data += data.encode(self.encoding)
else:
raise TypeError("%s is not a supported data type" % type(data))
def __len__(self):
return len(self.__unicode__())
def __str__(self):
if py3k:
return self.data.decode(self.encoding)
return self.data
def __unicode__(self):
return self.data.decode(self.encoding)
class TextMessage(Message):
def __init__(self, text=None):
Message.__init__(self, OPCODE_TEXT, text)
@property
def is_binary(self):
return False
@property
def is_text(self):
return True
class BinaryMessage(Message):
def __init__(self, bytes=None):
Message.__init__(self, OPCODE_BINARY, bytes, encoding=None)
@property
def is_binary(self):
return True
@property
def is_text(self):
return False
def __len__(self):
return len(self.data)
class CloseControlMessage(Message):
def __init__(self, code=1000, reason=''):
data = b""
if code:
data += struct.pack("!H", code)
if reason is not None:
if isinstance(reason, unicode):
reason = reason.encode('utf-8')
data += reason
Message.__init__(self, OPCODE_CLOSE, data, 'utf-8')
self.code = code
self.reason = reason
def __str__(self):
if py3k:
return self.reason.decode('utf-8')
return self.reason
def __unicode__(self):
return self.reason.decode(self.encoding)
class PingControlMessage(Message):
def __init__(self, data=None):
Message.__init__(self, OPCODE_PING, data)
class PongControlMessage(Message):
def __init__(self, data):
Message.__init__(self, OPCODE_PONG, data)
|
/satori_sdk_python-1.0.3-py3-none-any.whl/miniws4py/messaging.py
| 0.772273 | 0.283698 |
messaging.py
|
pypi
|
import struct
from struct import unpack
from miniws4py.utf8validator import Utf8Validator
from miniws4py.messaging import TextMessage, BinaryMessage, CloseControlMessage,\
PingControlMessage, PongControlMessage
from miniws4py.framing import Frame, OPCODE_CONTINUATION, OPCODE_TEXT, \
OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG
from miniws4py.exc import FrameTooLargeException, ProtocolException
from miniws4py.compat import py3k
VALID_CLOSING_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011]
class Stream(object):
def __init__(self):
""" Represents a websocket stream of bytes flowing in and out.
The stream doesn't know about the data provider itself and
doesn't even know about sockets. Instead the stream simply
yields for more bytes whenever it requires them. The stream owner
is responsible to provide the stream with those bytes until
a frame can be interpreted.
.. code-block:: python
:linenos:
>>> s = Stream()
>>> s.parser.send(BYTES)
>>> s.has_messages
False
>>> s.parser.send(MORE_BYTES)
>>> s.has_messages
True
>>> s.message
<TextMessage ... >
"""
self.message = None
"""
Parsed test or binary messages. Whenever the parser
reads more bytes from a fragment message, those bytes
are appended to the most recent message.
"""
self.pings = []
"""
Parsed ping control messages. They are instances of
:class:`miniws4py.messaging.PingControlMessage`
"""
self.pongs = []
"""
Parsed pong control messages. They are instances of
:class:`miniws4py.messaging.PongControlMessage`
"""
self.closing = None
"""
Parsed close control messsage. Instance of
:class:`miniws4py.messaging.CloseControlMessage`
"""
self.errors = []
"""
Detected errors while parsing. Instances of
:class:`miniws4py.messaging.CloseControlMessage`
"""
self._parser = None
"""
Parser in charge to process bytes it is fed with.
"""
@property
def parser(self):
if self._parser is None:
self._parser = self.receiver()
# Python generators must be initialized once.
next(self.parser)
return self._parser
def _cleanup(self):
"""
Frees the stream's resources rendering it unusable.
"""
self.message = None
if self._parser is not None:
if not self._parser.gi_running:
self._parser.close()
self._parser = None
self.errors = None
self.pings = None
self.pongs = None
self.closing = None
def text_message(self, text):
"""
Returns a :class:`miniws4py.messaging.TextMessage` instance
ready to be built. Convenience method so
that the caller doesn't need to import the
:class:`miniws4py.messaging.TextMessage` class itself.
"""
return TextMessage(text=text)
def binary_message(self, bytes):
"""
Returns a :class:`miniws4py.messaging.BinaryMessage` instance
ready to be built. Convenience method so
that the caller doesn't need to import the
:class:`miniws4py.messaging.BinaryMessage` class itself.
"""
return BinaryMessage(bytes)
@property
def has_message(self):
"""
Checks if the stream has received any message
which, if fragmented, is now completed.
"""
if self.message is not None:
return self.message.completed
return False
def close(self, code=1000, reason=''):
"""
Returns a close control message built from
a :class:`miniws4py.messaging.CloseControlMessage` instance,
using the given status ``code`` and ``reason`` message.
"""
return CloseControlMessage(code=code, reason=reason)
def ping(self, data=''):
"""
Returns a ping control message built from
a :class:`miniws4py.messaging.PingControlMessage` instance.
"""
return PingControlMessage(data).single()
def pong(self, data=''):
"""
Returns a ping control message built from
a :class:`miniws4py.messaging.PongControlMessage` instance.
"""
return PongControlMessage(data).single()
def receiver(self):
"""
Parser that keeps trying to interpret bytes it is fed with as
incoming frames part of a message.
Control message are single frames only while data messages, like text
and binary, may be fragmented accross frames.
The way it works is by instanciating a :class:`wspy.framing.Frame` object,
then running its parser generator which yields how much bytes
it requires to performs its task. The stream parser yields this value
to its caller and feeds the frame parser.
When the frame parser raises :exc:`StopIteration`, the stream parser
tries to make sense of the parsed frame. It dispatches the frame's bytes
to the most appropriate message type based on the frame's opcode.
Overall this makes the stream parser totally agonstic to
the data provider.
"""
utf8validator = Utf8Validator()
running = True
frame = None
while running:
frame = Frame()
while 1:
try:
some_bytes = (yield next(frame.parser))
frame.parser.send(some_bytes)
except GeneratorExit:
running = False
break
except StopIteration:
frame._cleanup()
some_bytes = frame.body
if some_bytes:
some_bytes = bytearray(some_bytes)
if frame.opcode == OPCODE_TEXT:
if self.message and not self.message.completed:
# We got a text frame before we completed the previous one
msg = CloseControlMessage(code=1002, reason='Received a new message before completing previous')
self.errors.append(msg)
break
m = TextMessage(some_bytes)
m.completed = (frame.fin == 1)
self.message = m
if some_bytes:
is_valid, end_on_code_point, _, _ = utf8validator.validate(some_bytes)
if not is_valid or (m.completed and not end_on_code_point):
self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
break
elif frame.opcode == OPCODE_BINARY:
if self.message and not self.message.completed:
# We got a text frame before we completed the previous one
msg = CloseControlMessage(code=1002, reason='Received a new message before completing previous')
self.errors.append(msg)
break
m = BinaryMessage(some_bytes)
m.completed = (frame.fin == 1)
self.message = m
elif frame.opcode == OPCODE_CONTINUATION:
m = self.message
if m is None:
self.errors.append(CloseControlMessage(code=1002, reason='Message not started yet'))
break
m.extend(some_bytes)
m.completed = (frame.fin == 1)
if m.opcode == OPCODE_TEXT:
if some_bytes:
is_valid, end_on_code_point, _, _ = utf8validator.validate(some_bytes)
if not is_valid or (m.completed and not end_on_code_point):
self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
break
elif frame.opcode == OPCODE_CLOSE:
code = 1000
reason = ""
if frame.payload_length == 0:
self.closing = CloseControlMessage(code=1000)
elif frame.payload_length == 1:
self.closing = CloseControlMessage(code=1002, reason='Payload has invalid length')
else:
try:
# at this stage, some_bytes have been unmasked
# so actually are held in a bytearray
code = int(unpack("!H", bytes(some_bytes[0:2]))[0])
except struct.error:
code = 1002
reason = 'Failed at decoding closing code'
else:
# Those codes are reserved or plainly forbidden
if code not in VALID_CLOSING_CODES and not (2999 < code < 5000):
reason = 'Invalid Closing Frame Code: %d' % code
code = 1002
elif frame.payload_length > 1:
reason = frame.body[2:]
if not py3k: reason = bytearray(reason)
is_valid, end_on_code_point, _, _ = utf8validator.validate(reason)
if not is_valid or not end_on_code_point:
self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
break
reason = bytes(reason)
self.closing = CloseControlMessage(code=code, reason=reason)
elif frame.opcode == OPCODE_PING:
self.pings.append(PingControlMessage(some_bytes))
elif frame.opcode == OPCODE_PONG:
self.pongs.append(PongControlMessage(some_bytes))
else:
self.errors.append(CloseControlMessage(code=1003))
break
except ProtocolException:
self.errors.append(CloseControlMessage(code=1002))
break
except FrameTooLargeException:
self.errors.append(CloseControlMessage(code=1002, reason="Frame was too large"))
break
frame._cleanup()
frame.body = None
frame = None
if self.message is not None and self.message.completed:
utf8validator.reset()
utf8validator.reset()
utf8validator = None
self._cleanup()
|
/satori_sdk_python-1.0.3-py3-none-any.whl/miniws4py/streaming.py
| 0.726134 | 0.229136 |
streaming.py
|
pypi
|
from __future__ import print_function
import itertools
import posixpath
try:
import rapidjson as json
except ImportError:
import json
import re
import sys
import threading
import time
import satori.rtm.logger
from satori.rtm.internal_connection_miniws4py import RtmWsClient
import satori.rtm.auth as auth
ping_interval_in_seconds = 60
high_ack_count_watermark = 20000
# FIXME: *_sync functions are very similar
class Connection(object):
"""
You can use the Connection object as long as it stays connected to the RTM.
If a disconnect occurs, you must create a new Connection object,
resubscribe to all channels, and perform authentication again, if necessary.
.. note:: The `satori.rtm.client` module includes a default implementation to
handle disconnects automatically and reconnect and resubscribes as
necessary.
"""
def __init__(self, endpoint, appkey, delegate=None):
"""
Description
Constructor for the Connection class. Creates and returns an instance of the
Connection class. Use this function to create a instance from which you can
subscribe and publish, authenticate an application user, and manage the
WebSocket connection to RTM. The Connection class allows you to
publish and subscribe synchronously and asynchronously.
The `endpoint` and `appkey` parameters are required. Optionally, you can
choose to create a delegate to process received messages and handle
connection and channel errors. To set the delegate property, specify it in
the constructor or use `connection.delegate = MyCustomDelegate()`.
Returns
Connection
Parameters
* endpoint {string} [required] - RTM endpoint as a string.
* appkey {string} [required] - Appkey used to access RTM. Available from the
Dev Portal.
* delegate {object] [optional] - Delegate object to handle received
messages, channel errors, internal errors, and closed connections.
Syntax
::
...
connection = Connection(endpoint, appkey, delegate=None)
after_receive = threading.Event()
class ConnectionDelegate(object):
def on_connection_closed(self):
print('connection closed')
def on_internal_error(error):
print('internal error', error)
def on_subscription_data(data):
print('data:', data)
after_receive.set()
connection.delegate = ConnectionDelegate()
connection.start()
"""
assert endpoint
assert appkey
assert endpoint.startswith('ws://') or endpoint.startswith('wss://'),\
'Endpoint must start with "ws(s)://" but "%s" does not' % endpoint
self.logger = satori.rtm.logger.logger
re_version = re.compile(r'/v(\d+)$')
version_match = re_version.search(endpoint)
if version_match:
warning = (
'Specifying a version as a part of the endpoint is deprecated.'
' Please remove the {0} from {1}.'.format(
version_match.group(), endpoint))
print(warning, file=sys.stderr)
endpoint = re_version.sub('', endpoint)
self.url = posixpath.join(endpoint, 'v2')
self.url += '?appkey={0}'.format(appkey)
self.delegate = delegate
self.ack_callbacks_by_id = {}
self.action_id_iterator = itertools.count()
self._auth_lock = threading.RLock()
self._next_auth_action = None
self.ws = None
self._last_ping_time = None
self._last_ponged_time = None
self._time_to_stop_pinging = False
self._auth_callback = None
self._ping_thread = None
self._ws_thread = None
def __del__(self):
try:
self.stop()
except Exception:
pass
def start(self):
"""
Description
Starts a WebSocket connection to RTM for the Connection object.
You must call the `start()` method before publish or subscribe requests
using the Connection object methods will completed successfully.
"""
if self.ws:
raise RuntimeError('Connection is already open')
self.logger.debug('connection.start %s', self.url)
self.ws = RtmWsClient(self.url)
self.ws.delegate = self
try:
self.ws.connect()
except Exception:
self.ws.delegate = None
self.ws = None
raise
self._ws_thread = threading.Thread(target=self.ws.run)
self._ws_thread.name = 'WebSocketReader'
self._ws_thread.daemon = True
self._ws_thread.start()
def stop(self):
"""
Description
Closes a WebSocket connection to RTM for the Connection object.
Use this method if you want to explicitly stop all interaction with RTM.
After you use this method, you can no longer publish or subscribe
to any channels for the Connection object. You must use `start()` to restart
the WebSocket connection and then publish or subscribe.
"""
self._time_to_stop_pinging = True
if self.ws:
try:
self.ws.close()
self.logger.debug('Waiting for WS thread')
self._ws_thread.join()
self.logger.debug('WS thread finished normally')
except OSError as e:
# we could be trying to write a goodbye
# into already closed socket
self.logger.exception(e)
else:
raise RuntimeError('Connection is not open yet')
def send(self, payload):
"""
Description
Synchronously sends the specified message to RTM.
This is a lower-level method suitable for manually performing
PDU serialization.
"""
if not self.ws:
raise RuntimeError(
'Attempting to send data, but connection is not open yet')
self.logger.debug('Sending payload %s', payload)
try:
self.ws.send(payload)
except Exception as e:
self.logger.exception(e)
self.on_ws_closed()
raise
def action(self, name, body, callback=None):
"""
Description
Synchronously sends a PDU created with the specified `action` and `body` to
RTM. This is a lower-level method that can be used, for example, to take
advantage of changes to PDU specifications by Satori without requiring an
updated SDK.
"""
payload = {'action': name, 'body': body}
if callback:
# throttle if waiting for many acks already
if len(self.ack_callbacks_by_id) >= high_ack_count_watermark:
self.logger.debug('Throttling %s request', name)
time.sleep(0.001)
action_id = next(self.action_id_iterator)
payload['id'] = action_id
self.ack_callbacks_by_id[action_id] = callback
self.send(json.dumps(payload))
def publish(self, channel, message, callback=None):
"""
Description
Publishes a message to the specified channel.
The channel and message parameters are required. The `message` parameter can
be any JSON-supported value. For more information, see www.json.org.
By default, this method does not acknowledge the completion of the publish
operation. Optionally, you can specify a callback function to process the
response from RTM. If you specify a callback, RTM
returns an object that represents the PDU response to
the publish request. For more information about PDUs, see *RTM API* in the
online docs.
Because this is an asynchronous method, you can also use the Python
`threading` module to create an event to track completion of the publish
operation in the callback function.
Parameters
* message {string} [required] - JSON value to publish as a message. It must
be serializable using `json.dumps` from the Python standard `JSON` module.
* channel {string} [required] - Name of the channel to which you want to
publish.
* callback {function} [optional] - Callback function to execute on the PDU
returned by RTM as a response to the publish request.
Syntax
::
connection.start()
connection.publish("My Channel", "Message text to publish")
"""
self.action(
'rtm/publish',
{'channel': channel, 'message': message},
callback)
def read(self, channel, args=None, callback=None):
"""
Description
Asynchronously reads a value from the specified channel. This function
has no return value, but you can inspect
the response PDU in the callback function.
You can also use the `args` parameter to add additional JSON key-value pairs
to the PDU in the read request that the SDK sends
to RTM. For more information about PDUs, see *RTM API* in the online docs.
By default, this method does not acknowledge the completion of the subscribe
operation. Optionally, you can specify a callback function to process the
response from RTM. If you specify a callback, RTM
returns an object that represents the response to
the publish request as a PDU.
Parameters
* channel {string} [required] - Name of the channel to read from.
* callback {function} [optional] - Callback function to execute on the
response returned to the subscribe request as a PDU.
* args {object} [optional] - Any JSON key-value pairs to send in the
subscribe request. See *Subscribe PDU* in the online docs.
Syntax
::
connection.start()
position = connection.publish_sync(channel, message)
connection.subscribe(channel, {'position': position})
"""
body = args or {}
body['channel'] = channel
self.action('rtm/read', body, callback)
def read_sync(self, channel, args=None, timeout=60):
"""
Description
Synchronously reads a message from the specified channel.
This method generates a `RuntimeError` if the read operation does not
complete within the timeout period.
Returns
JSON value
Parameters
* channel {string} [required] - Name of the channel to read from.
* timeout {int} [optional] - Amount of time, in seconds, to allow RTM
to complete the read operation before it generates an error.
Default is 60.
Syntax
::
connection.start()
message = 'hello'
connection.publish_sync(channel, message)
value = connection.read_sync(channel)
# value should be "hello"
...
"""
mailbox = []
time_to_return = threading.Event()
def callback(ack):
mailbox.append(ack)
time_to_return.set()
body = args or {}
body['channel'] = channel
self.action('rtm/read', body, callback)
if not time_to_return.wait(timeout):
raise RuntimeError('Timeout in read_sync')
ack = mailbox[0]
if ack['action'] == 'rtm/read/ok':
return ack['body']['message']
raise RuntimeError(ack)
def write(self, channel, value, callback=None):
"""
Description
Asynchronously writes a value into the specified channel.
The `channel` and `value` parameters are required. The `value` parameter can
be any JSON-supported value. For more information, see www.json.org.
By default, this method does not acknowledge the completion of the publish
operation. Optionally, you can specify a callback function to process the
response from RTM. If you specify a callback, RTM returns an object that
represents the response to the publish request as a PDU. For more
information about PDUs, see the RTM API Reference.
Because this is an asynchronous method, you can also use the Python
`threading` module to create an event to track completion of the write
operation in the callback function.
Parameters
* message {string} [required] - JSON value to publish as message. It must be
serializable using `json.dumps` from the Python standard `JSON` module.
* channel {string} [required] - Name of the channel.
* callback {function} [optional] - Callback function to execute on the
response to the publish request, returned by RTM as a PDU.
Syntax
::
connection.start()
connection.write("my_dog", {"latitude": 52.52, "longitude":13.405})
"""
self.action(
'rtm/write',
{'channel': channel, 'message': value},
callback)
def delete(self, key, callback=None):
"""
Description
Asynchronously deletes any value from the specified channel.
Parameters
* channel {string} [required] - Name of the channel.
* callback {function} [optional] - Callback to execute on the response
PDU from RTM. The response PDU is passed as a parameter to this function.
RTM does not send a response PDU if a callback is not specified.
Syntax
::
connection.start()
mailbox = []
event = threading.Event()
def delete_callback(reply):
mailbox.append(reply)
event.set()
connection.delete("old_stuff", callback=delete_callback)
if not event.wait(5):
print('Delete request timed out')
else:
print('Delete request returned {0}'.format(mailbox[0]))
"""
self.action('rtm/delete', {'channel': key}, callback)
def publish_sync(self, channel, message, timeout=60):
"""
Description
Synchronously publishes a message to the specified channel and returns the
`position` property for the message stream position to which the message was
published. For more information about the position value, see *RTM API*
in the online docs.
This method generates a `RuntimeError` if the publish operation does not
complete within the timeout period.
The message parameter can be any JSON-supported value. For more information,
see www.json.org.
.. note:: To send a publish request asynchronously for a Connection object,
use publish(channel, message, callback).
Returns
position
Parameters
* message {string} [required] - JSON value to publish as message. It must be
serializable using `json.dumps` from the Python standard `JSON` module.
* channel {string} [required] - Name of the channel.
* timeout {int} [optional] - Amount of time, in seconds, to allow RTM
to complete the publish operation before it generates an error.
Default is 60.
Syntax
::
connection.start()
position = connection.publish_sync(channel, message)
connection.subscribe_sync(channel, {'position': position})
...
"""
error = []
position = []
time_to_return = threading.Event()
def callback(ack):
if ack['action'] != 'rtm/publish/ok':
error.append(ack)
else:
position.append(ack['body']['position'])
time_to_return.set()
self.publish(channel, message, callback)
if not time_to_return.wait(timeout):
raise RuntimeError('Timeout in publish_sync')
if error:
raise RuntimeError(error[0])
return position[0]
def subscribe(
self, channel_or_subscription_id,
args=None, callback=None):
"""
Description
Subscribes to the specified channel.
You can use the `args` parameter to add additional JSON values to the
Protocol Data Unit (PDU) in the subscribe request that the SDK sends to RTM.
For more information about PDUs, see *RTM API* in the online docs.
By default, this method does not acknowledge the completion of the subscribe
operation. Optionally, you can specify a callback function to process the
response from RTM. If you specify a callback, RTM
returns an object that represents the PDU response to
the publish request.
.. note:: To receive data published to a channel after you subscribe to it,
use the `on_subscription_data()` callback function in a
subscription observer class.
Parameters
* channel {string} [required] - Name of the channel.
* callback {function} [optional] - Callback function to execute on the
response to the subscribe request, returned by RTM as a PDU.
* args {object} [optional] - Any JSON key-value pairs to send in the
subscribe request. See *Subscribe PDU* in the online docs.
Syntax
::
connection.start()
position = connection.publish_sync(channel, message)
connection.subscribe(channel, {'position': position})
"""
if args is not None and args.get('filter'):
body = {'subscription_id': channel_or_subscription_id}
else:
body = {'channel': channel_or_subscription_id}
if args:
body.update(args)
self.action('rtm/subscribe', body, callback)
def subscribe_sync(self, channel, args=None, timeout=60):
"""
Description
Subscribes to the specified channel and generates a `RuntimeError` if the
request does not complete within the timeout period.
You can use the `args` parameter to add additional JSON values to the PDU
in the subscribe request that the SDK sends to RTM.
For more information about PDUs, see *RTM API* in the online docs.
Parameters
* channel {string} [required] - Name of the channel.
* args {object} [optional] - Any additional JSON values to send in the
subscribe request.
* timeout {int} [optional] - Amount of time, in seconds, to allow RTM
to complete the subscribe operation before it generates an error.
Default is 60.
Syntax
::
...
connection.start()
position = connection.publish_sync(channel, message)
connection.subscribe_sync(channel, {'position': position})
...
"""
error = []
time_to_return = threading.Event()
def callback(ack):
if ack['action'] != 'rtm/subscribe/ok':
error.append(ack)
time_to_return.set()
self.subscribe(channel, args, callback=callback)
if not time_to_return.wait(timeout):
raise RuntimeError('Timeout in subscribe_sync')
if error:
raise RuntimeError(error[0])
def unsubscribe(self, channel, callback=None):
"""
Description
Unsubscribes from the specified channel.
After you unsubscribe, the application no longer receives messages for the
channel until after RTM completes the unsubscribe operation.
By default, this method does not acknowledge the completion of the subscribe
operation. Optionally, you can specify a callback function to process the
response from RTM. If you specify a callback, RTM
returns an object that represents the PDU response to
the publish request. For more information about PDUs, see *RTM API*
in the online docs.
Parameters
* channel {string} [required] - Name of the channel.
* callback {function} [optional] - Callback function to execute on the
response to the unsubscribe request, returned by RTM as a PDU.
Syntax
::
...
connection.start()
position = connection.publish_sync(channel, message)
connection.subscribe(channel, {'position': position})
...
connection.unsubscribe(channel)
...
"""
self.action('rtm/unsubscribe', {'subscription_id': channel}, callback)
def unsubscribe_sync(self, channel, timeout=60):
"""
unsubscribe_sync(channel, timeout)
----------------------------------
Description
Unsubscribes from all messages for a channel and generates a `RuntimeError`
if the unsubscribe operation does not complete within the timeout period.
Parameters
* channel {string} [required] - Name of the channel.
* timeout {int} [optional] - Amount of time, in seconds, to allow RTM
to complete the unsubscribe operation before it generates an
error. Default is 60.
Syntax
::
...
connection.start()
position = connection.publish_sync(channel, message)
connection.subscribe_sync(channel, {'position': position})
...
unsubscribe_sync(channel)
...
"""
error = []
time_to_return = threading.Event()
def callback(ack):
if ack['action'] != 'rtm/unsubscribe/ok':
error.append(ack)
time_to_return.set()
self.unsubscribe(channel, callback)
if not time_to_return.wait(timeout):
raise RuntimeError('Timeout in unsubscribe_sync')
if error:
raise RuntimeError(error[0])
def search(self, prefix, callback):
"""
Description
Asynchronously performs a channel search for a given user-defined prefix.
This method passes RTM replies to the callback. RTM may send multiple
responses to the same search request: zero or more search result PDUs with
an action of `rtm/search/data` (depending on the results of the search).
Each channel found is only sent once.
After the search result PDUs, RTM follows with a positive response PDU:
`rtm/search/ok`. Callback must inspect the reply object passed to the
callback for the reply['body']['channels'] list. The callback is called on
each response.
"""
self.action('rtm/search', {'prefix': prefix}, callback)
def authenticate(self, auth_delegate, callback):
"""
authenticate(auth_delegate, callback)
-------------------------------------
Description
Validates the identity of a client after connecting to RTM
with the Connection module. After the user authenticates with
RTM, the operations that the client can perform depends on the role.
Since the authentication process is an asynchronous operation, the callback
function is required. The callback function processes the PDU response from
RTM.
For more information about authentication, see *Authentication and
Authorization* in the online docs.
Parameters
* auth_delegate {AuthDelegate | RoleSecretAuthDelegate} [required] - An
authentication delegate object created with
the `RoleSecretAuthDelegate(role, role_key)` method for
the role-based authentication process.
* callback {function} [required] - Function to execute after RTM
returns a response.
Syntax
::
secret_key = '<ROLE_SECRET_KEY>'
auth_delegate = auth.RoleSecretAuthDelegate('<ROLE>', secret_key)
auth_event = threading.Event()
def auth_callback(auth_result):
if type(auth_result) == auth.Done:
auth_event.set()
"""
with self._auth_lock:
if self._next_auth_action:
return callback(
auth.Error('Authentication is already in progress'))
self._next_auth_action = auth_delegate.start()
self._auth_callback = callback
return self._handle_next_auth_action()
def _handle_next_auth_action(self):
with self._auth_lock:
if type(self._next_auth_action) in [auth.Done, auth.Error]:
self._auth_callback(self._next_auth_action)
self._auth_callback = None
self._next_auth_action = None
return
if type(self._next_auth_action) == auth.Handshake:
action_id = next(self.action_id_iterator)
payload = json.dumps({
'action': 'auth/handshake',
'body': {
'method': self._next_auth_action.method,
'data': self._next_auth_action.data},
'id': action_id
})
return self.send(payload)
elif type(self._next_auth_action) == auth.Authenticate:
action_id = next(self.action_id_iterator)
payload = json.dumps({
'action': 'auth/authenticate',
'body': {
'method': self._next_auth_action.method,
'credentials': self._next_auth_action.credentials},
'id': action_id
})
return self.send(payload)
self._auth_callback(auth.Error(
'auth_delegate returned {0} instead of an auth action'.format(
self._next_auth_action)))
self._auth_callback = None
def on_ws_opened(self):
self.logger.debug('on_ws_opened')
self._ping_thread = threading.Thread(
target=self._ping_until_the_end,
name='Pinger')
self._ping_thread.daemon = True
self._ping_thread.start()
def _ping_until_the_end(self):
self.logger.debug('Starting ping thread')
try:
while not self._time_to_stop_pinging:
time.sleep(ping_interval_in_seconds)
self.logger.debug('send ping')
self.ws.send_ping()
if self._last_ping_time:
if not self._last_ponged_time or\
self._last_ping_time > self._last_ponged_time:
self.logger.error(
'Server has not responded to WS ping')
try:
ws = self.ws
self.on_ws_closed()
ws.delegate = None
ws.close()
except Exception as e:
self.logger.exception(e)
self._last_ping_time = time.time()
self.logger.debug('pinging')
except Exception:
pass
self.logger.debug('Finishing ping thread')
def on_ws_closed(self):
self._time_to_stop_pinging = True
if self.delegate:
self.delegate.on_connection_closed()
if self.ws:
self.ws.delegate = None
try:
self.ws.close()
except Exception as e:
self.logger.exception(e)
self.ws = None
def on_ws_ponged(self):
self._last_ponged_time = time.time()
def on_auth_reply(self, reply):
self.logger.debug('on_auth_reply: %s', reply)
with self._auth_lock:
if self._next_auth_action:
continuation = getattr(self._next_auth_action, 'callback')
if continuation:
self._next_auth_action = continuation(reply)
self._handle_next_auth_action()
else:
self._auth_callback(reply)
else:
self.logger.error(
'Unexpected auth reply %s while not doing auth',
reply)
def on_subscription_data(self, data):
if self.delegate:
self.delegate.on_subscription_data(data)
def on_subscription_error(self, payload):
channel = payload.get('subscription_id')
if self.delegate:
self.delegate.on_subscription_error(channel, payload)
def on_fast_forward(self, payload):
channel = payload.get('subscription_id')
if self.delegate:
self.delegate.on_fast_forward(channel, payload)
def on_internal_error(self, message):
if self.delegate:
self.delegate.on_internal_error(message)
def on_incoming_text_frame(self, incoming_text):
self.logger.debug('incoming text: %s', incoming_text)
self.on_ws_ponged()
try:
if isinstance(incoming_text, bytes):
incoming_text = incoming_text.decode('utf-8')
incoming_json = json.loads(incoming_text)
except ValueError as e:
self.logger.exception(e)
message = '"{0}" is not valid JSON'.format(incoming_text)
return self.on_internal_error(message)
action = incoming_json.get('action')
if not action:
message = '"{0}" has no "action" field'.format(incoming_text)
return self.on_internal_error(message)
body = incoming_json.get('body')
maybe_bodyless_actions = ['rtm/delete/ok', 'rtm/publish/ok']
if body is None and action not in maybe_bodyless_actions:
message = '"{0}" has no "body" field'.format(incoming_text)
self.logger.error(message)
return self.on_internal_error(message)
if action == 'rtm/subscription/data':
return self.on_subscription_data(body)
elif action == 'rtm/subscription/error':
return self.on_subscription_error(body)
elif action == 'rtm/subscription/info'\
and body.get('info') == 'fast_forward':
return self.on_fast_forward(body)
if action == '/error':
return self.on_internal_error(
'General error: {0}'.format(incoming_json))
id_ = incoming_json.get('id')
if id_ is None:
message = '"{0}" has no "id" field'.format(incoming_text)
return self.on_internal_error(message)
if action.startswith('auth/'):
def convert(pdu):
if pdu['action'] == 'auth/handshake/ok':
return auth.HandshakeOK(pdu['body']['data'])
if pdu['action'] == 'auth/authenticate/ok':
return auth.AuthenticateOK()
return auth.Error(pdu['body']['reason'])
return self.on_auth_reply(convert(incoming_json))
callback = self.ack_callbacks_by_id.get(id_)
if callback:
callback(incoming_json)
if not incoming_json.get('action').endswith('/data'):
del self.ack_callbacks_by_id[id_]
def enable_wsaccel():
"""
Use optimized Cython versions of CPU-intensive routines
provided by the `wsaccel` package.
"""
import wsaccel.utf8validator
import wsaccel.xormask
import miniws4py.streaming
import miniws4py.framing
miniws4py.streaming.Utf8Validator = wsaccel.utf8validator.Utf8Validator
def fast_mask(data):
masker = wsaccel.xormask.XorMaskerSimple(b'\xFF\xFF\xFF\xFF')
return masker.process(data)
miniws4py.framing.mask = fast_mask
|
/satori_sdk_python-1.0.3-py3-none-any.whl/satori/rtm/connection.py
| 0.567457 | 0.221025 |
connection.py
|
pypi
|
from __future__ import print_function
from contextlib import contextmanager
import satori.rtm.auth as auth
from satori.rtm.exceptions import AuthError
import satori.rtm.internal_queue as queue
import threading
import satori.rtm.internal_client_action as a
from satori.rtm.internal_client import InternalClient
import satori.rtm.internal_subscription as s
from satori.rtm.logger import logger
SubscriptionMode = s.SubscriptionMode
Full = queue.Full
class Client(object):
"""
This is the documentation for Client class
"""
def __init__(
self, endpoint, appkey,
fail_count_threshold=float('inf'),
reconnect_interval=1, max_reconnect_interval=300,
observer=None, restore_auth_on_reconnect=True,
max_queue_size=20000):
r"""
Description
Constructor for the Client.
Parameters
* endpoint {string} [required] - RTM endpoint as a string. Example:
"wss://rtm:8443/foo/bar". If port number is omitted, it defaults to 80 for
ws:// and 443 for wss://. Available from the Dev Portal.
* appkey {string} [required] - Appkey used to access RTM.
Available from the Dev Portal.
* reconnect_interval {int} [optional] - Time period, in seconds, between
reconnection attempts. The timeout period between each successive
connection attempt increases, but starts with this value. Use
max_reconnect_interval to specify the maximum number of seconds between
reconnection attempts. Default is 1.
* max_reconnect_interval {int} [optional] - Maximum period of time, in
seconds, to wait between reconnection attempts. Default is 300.
* fail_count_threshold {int} [optional] - Number of times the SDK should
attempt to reconnect if the connection disconnects. Specify any value
that resolves to an integer. Default is inf (infinity).
* observer {client_observer} [optional] - Instance of a client observer
class, used to define functionality based on the state changes of a
Client.
Set this property with client.observer or in the `make_client(*args,
**kwargs)` or `Client(*args, **kwargs)` methods.
* restore_auth_on_reconnect {boolean} optional - Whether to restore
authentication after reconnects. Default is True.
* max_queue_size {int} optional - this parameter limits the amount of
concurrent requests in order to avoid 'out of memory' situation.
For example is max_queue_size is 10 and the client code sends 11
publish requests so fast that by the time it sends 11th one the reply
for the first one has not yet arrived, this 11th call to `client.publish`
will throw the `satori.rtm.client.Full` exception.
Syntax
::
from satori.rtm.client import Client
client = Client(endpoint='<ENDPOINT>', appkey=<APP_KEY>)
...
"""
assert endpoint
assert endpoint.startswith('ws://') or endpoint.startswith('wss://'),\
'Endpoint must start with "ws(s)://" but "%s" does not' % endpoint
self._queue = queue.Queue(maxsize=max_queue_size)
self._internal = InternalClient(
self._queue,
endpoint, appkey,
fail_count_threshold,
reconnect_interval, max_reconnect_interval,
observer, restore_auth_on_reconnect)
self._disposed = False
self._thread = threading.Thread(
target=self._internal_event_loop,
name='ClientLoop')
self._thread.daemon = True
self._thread.start()
def last_connecting_error(self):
"""
Description
If there were unsuccessful connection attempts, this function returns
the exception for the last such attempt. Otherwise returns None.
"""
return self._internal.last_connecting_error
def _enqueue(self, msg, timeout=0.1):
if not self._disposed:
self._queue.put(msg, block=True, timeout=timeout)
else:
raise RuntimeError(
'Trying to use a disposed satori.rtm.client.Client')
def start(self):
"""
Description
Starts a WebSocket connection to RTM for the Client object. You
must call the start() method before you subscribe to a channel using the
Client object methods.
If you publish any messages before calling this method, the SDK queues the
messages to publish after establishing the WebSocket connection.
Syntax
::
with sc.make_client(
endpoint=endpoint, appkey=appkey) as client:
client.stop()
...
client.start()
...
"""
self._enqueue(a.Start())
def stop(self):
"""
Description
Closes a WebSocket connection to RTM for the Client object.
Use this method if you want to explicitly stop all interaction with RTM.
After you use this method, if you call publish or subscribe methods
while the client is stopped, the SDK queues the requests and sends them when
the client reconnects.
Syntax
::
with make_client(
endpoint=endpoint, appkey=appkey) as client:
...
client.stop()
...
"""
self._enqueue(a.Stop())
def authenticate(self, auth_delegate, callback):
"""
Description
Validates the identity of an application user after connecting to RTM
with the Client class. After the user authenticates with RTM, the operations
that the client can perform depends on the role.
Since the authentication process is an asynchronous operation, the callback
function is required. The callback function processes the PDU response from
RTM.
For more information about authentication, see
*Authentication and Authorization* in the online docs.
Parameters
* auth_delegate {AuthDelegate | RoleSecretAuthDelegate} [required] - An
authentication delegate object. Use a
satori.rtm.auth.RoleSecretAuthDelegate class for the role-based
authentication process.
* callback {function} [required] - Function to execute after RTM
returns a response.
Syntax
::
secret_key = '<ROLE_SECRET_KEY>'
auth_delegate = auth.RoleSecretAuthDelegate('<ROLE>', secret_key)
auth_event = threading.Event()
def auth_callback(auth_result):
if type(auth_result) == auth.Done:
auth_event.set()
client.authenticate(auth_delegate, auth_callback)
auth_event.wait()
"""
self._enqueue(a.Authenticate(auth_delegate, callback))
def publish(self, channel, message, callback=None):
"""
Description
Publishes a message to the specified channel.
The channel and message parameters are required. The `message` parameter can
be any JSON-supported value. For more information, see www.json.org.
By default, this method does not acknowledge the completion of the publish
operation. Optionally, you can specify a callback function to process the
response from RTM. If you specify a callback, RTM
returns an object that represents the Protocol Data Unit (PDU) response to
the publish request. For more information about PDUs, see *RTM API* in the
online docs.
Reference.
Since this is an asynchronous method, you can also use the Python threading
module to create an event to track completion of the publish operation in
the callback function.
Parameters
* message {string} [required] - JSON value to publish as message. It must be
serializable using `json.dumps` from the Python standard `JSON` module.
* channel {string} [required] - Name of the channel to which you want to
publish.
* callback {function} [optional] - Callback function to execute on the PDU
response returned by RTM to the publish request.
Syntax
::
with sc.make_client(
endpoint=endpoint, appkey=appkey) as client:
...
print('Publishing a message')
client.publish(channel=channel, message=message)
"""
self._enqueue(a.Publish(channel, message, callback))
def read(self, channel, args=None, callback=None):
"""
Description
Asynchronously reads a value from the specified channel. This function
has no return value, but you can inspect
the reply PDU in the callback function.
You can also use the `args` parameter to add additional JSON key-value pairs
to the PDU in the read request that the SDK sends
to RTM. For more information about PDUs, see *RTM API* in the online docs.
Parameters
* channel {string} [required] - Name of the channel to read from.
* args {object} [optional] - Any JSON key-value pairs to send in the
read request. To create a filter, use the desired fSQL query as a string
value for `filter` key.
* callback {function} [optional] - Callback function to execute on the PDU
response returned to the subscribe request by RTM.
Syntax
::
with make_client(endpoint=endpoint, appkey=appkey) as client:
mailbox = []
event = threading.Event()
def read_callback(reply):
mailbox.append(reply)
event.set()
client.read(channel, callback=read_callback)
if not event.wait(5):
print('Read request timed out')
else:
print('Read request returned {0}'.format(mailbox[0]))
"""
self._enqueue(a.Read(channel, args, callback))
def write(self, channel, value, callback=None):
"""
Description
Asynchronously writes the given value to the specified channel.
Parameters
* channel {string} [required] - Channel name.
* value {json value} [required] - JSON that represents the message payload
to publish.
* callback {function} [optional] - Callback passed the response PDU from
RTM.
Syntax
::
with make_client(endpoint=endpoint, appkey=appkey) as client:
mailbox = []
event = threading.Event()
def write_callback(reply):
mailbox.append(reply)
event.set()
client.write("answer", 42, callback=write_callback)
if not event.wait(5):
print('Write request timed out')
else:
print('Write request returned {0}'.format(mailbox[0]))
"""
self._enqueue(a.Write(channel, value, callback))
def delete(self, channel, callback=None):
"""
Description
Asynchronously deletes any value from the specified channel.
Parameters
* channel {string} [required] - Channel name.
* callback {function} [optional] - Callback passed the response PDU from
RTM.
Syntax
::
with make_client(endpoint=endpoint, appkey=appkey) as client:
mailbox = []
event = threading.Event()
def delete_callback(reply):
mailbox.append(reply)
event.set()
client.delete("old_stuff", callback=delete_callback)
if not event.wait(5):
print('Delete request timed out')
else:
print('Delete request returned {0}'.format(mailbox[0]))
"""
self._enqueue(a.Delete(channel, callback))
def subscribe(
self, channel_or_subscription_id, mode,
subscription_observer, args=None):
"""
Description
Subscribes to the specified channel.
Optionally, you can also use an observer that implements the subscription
callback functions and pass the observer as the `subscription_observer`
parameter. The callback functions represent each possible state for the
channel subscription. See *Subscription Observer*.
You can also use the `args` parameter to add additional JSON key-value pairs
to the PDU in the subscribe request that the SDK sends
to RTM. For more information about PDUs, see *RTM API* in the online docs.
.. note:: To receive data published to a channel after you subscribe to it,
use the `on_subscription_data()` callback function in a
subscription observer.
Parameters
* channel_or_subscription_id {string} [required] - String that identifies
the channel. If you do not use the `filter` parameter, it is the channel
name. Otherwise, it is a unique identifier for the channel (subscription
id).
* subscription_mode {SubscriptionMode} [required] - this mode determines the
behaviour of the Python SDK and RTM when resubscribing after a
reconnection. Use SubscriptionMode.ADVANCED, SubscriptionMode.RELIABLE, or
SubscriptionMode.SIMPLE.
* subscription_observer {object} [optional] - Instance of an observer class
that implements the subscription observer callback functions.
* args {object} [optional] - Any JSON key-value pairs to send in the
subscribe request. To include a filter, put the desired fSQL query
as a string value for the `filter` key. See *Subscribe PDU* in the
online docs.
Syntax
::
with make_client(
endpoint=endpoint, appkey=appkey) as client:
class SubscriptionObserver(object):
def on_subscription_data(self, data):
for message in data['messages']:
print('Client got message {0}'.format(message))
subscription_observer = SubscriptionObserver()
client.subscribe(
channel,
SubscriptionMode.RELIABLE,
subscription_observer)
"""
self._enqueue(
a.Subscribe(
channel_or_subscription_id, mode,
subscription_observer, args))
def unsubscribe(self, channel_or_subscription_id):
"""
Description
Unsubscribes from a channel.
After you unsubscribe, the application no longer receives messages for the
channel. To identify when the unsubscribe operation has completed, use the
`on_leave_subscribed()` callback function of a subscription observer class.
Parameters
* channel {string} [required] - Name of the channel from which you want to
unsubscribe.
Syntax
::
with make_client(
endpoint=endpoint, appkey=appkey) as client:
...
client.subscribe(
"My Channel",
SubscriptionMode.RELIABLE,
subscription_observer)
...
client.unsubscribe("My Channel")
"""
self._enqueue(a.Unsubscribe(channel_or_subscription_id))
def search(self, prefix, callback):
"""
Description
Asynchronously performs a channel search for a given user-defined prefix.
This method passes RTM replies to the callback. RTM may send multiple
responses to the same search request: zero or more search result PDUs with
an action of `rtm/search/data` (depending on the results of the search).
Each channel found is only sent once.
After the search result PDUs, RTM follows with a positive response PDU:
`rtm/search/ok`. Callback must inspect the reply object passed to the
callback for the reply['body']['channels'] list. The callback is called on
each response.
"""
self._enqueue(a.Search(prefix, callback))
def dispose(self):
"""
Description
Client finishes all work, release all resources and becomes unusable.
Upon completion, `client.observer.on_enter_disposed()` is called.
"""
if not self._disposed:
self._enqueue(a.Dispose(), timeout=None)
self._disposed = True
if self._thread != threading.current_thread():
self._thread.join()
@property
def observer(self):
return self._internal.observer
@observer.setter
def observer(self, o):
self._internal.observer = o
def is_connected(self):
"""
Description
Returns `True` if the Client object is connected via a
WebSocket connection to RTM and `False` otherwise.
Returns
Boolean
Syntax
::
with sc.make_client(
endpoint=platform_endpoint,
appkey=platform_appkey) as client:
...
if client.is_connected()
# do something
else:
# do something else
"""
return self._internal.is_connected()
def _internal_event_loop(self):
while True:
if self._internal.process_one_message(timeout=None):
break
class ClientStateObserver(object):
def on_enter_stopped(self):
logger.info('on_enter_stopped')
def on_leave_stopped(self):
logger.info('on_leave_stopped')
def on_enter_connecting(self):
logger.info('on_enter_connecting')
def on_leave_connecting(self):
logger.info('on_leave_connecting')
def on_enter_awaiting(self):
logger.info('on_enter_awaiting')
def on_leave_awaiting(self):
logger.info('on_leave_awaiting')
def on_enter_connected(self):
logger.info('on_enter_connected')
def on_leave_connected(self):
logger.info('on_leave_connected')
def on_enter_disposed(self):
logger.info('on_enter_disposed')
def on_enter_stopping(self):
logger.info('on_enter_stopping')
def on_leave_stopping(self):
logger.info('on_leave_stopping')
@contextmanager
def make_client(*args, **kwargs):
r"""
make_client(\*args, \*\*kwargs)
-------------------------------
Description
The `make_client()` function is a context manager. Call `make_client()`
using a `with` statement and the SDK automatically starts the WebSocket
connection. The SDK stops and then closes the WebSocket connection when the
statement completes or terminates due to an error.
This function takes the same parameters as the Client constructor plus
optional `auth_delegate`.
To use this function, import it from the client module::
`from satori.rtm.client import make_client`
Parameters
* endpoint {string} [required] - RTM endpoint as a string. Example:
"wss://rtm:8443/foo/bar". If port number is omitted, it defaults to 80 for
ws:// and 443 for wss://. Available from the Dev Portal.
* appkey {string} [required] - Appkey used to access RTM.
Available from the Dev Portal.
* reconnect_interval {int} [optional] - Time period, in seconds, between
reconnection attempts. The timeout period between each successive
connection attempt increases, but starts with this value. Use
max_reconnect_interval to specify the maximum number of seconds between
reconnection attempts. Default is 1.
* max_reconnect_interval {int} [optional] - Maximum period of time, in
seconds, to wait between reconnection attempts. Default is 300.
* fail_count_threshold {int} [optional] - Number of times the SDK should
attempt to reconnect if the connection disconnects. Specify any value
that resolves to an integer. Default is inf (infinity).
* observer {client_observer} [optional] - Instance of a client observer
class, used to define functionality based on the state changes of a
Client.
Set this property with client.observer or in the `make_client(*args,
**kwargs)` or `Client(*args, **kwargs)` methods.
* restore_auth_on_reconnect {boolean} optional - Whether to restore
authentication after reconnects. Default is True.
* max_queue_size {int} optional - this parameter limits the amount of
concurrent requests in order to avoid 'out of memory' situation.
For example is max_queue_size is 10 and the client code sends 11
publish requests so fast that by the time it sends 11th one the reply
for the first one has not yet arrived, this 11th call to `client.publish`
will throw the `satori.rtm.client.Full` exception.
* auth_delegate {AuthDelegate} [optional] - if auth_delegate parameter is
present, the client yielded by make_client will be already authenticated.
Syntax
::
import satori.rtm.client as sc
endpoint = 'ENDPOINT'
appkey = 'APPKEY'
with sc.make_client(endpoint=endpoint, appkey=appkey) as client:
Client Observer
---------------
Use the client observer callback functions in an observer to implement
functionality based on the Client object state changes.
Set this observer with the `client.observer` property on the Client.
The following table lists the Client object states and the associated
callback functions:
============ ====================== =====================
Client State Enter Callback Exit Callback
============ ====================== =====================
Awaiting on_enter_awaiting() on_leave_awaiting()
Connecting on_enter_connecting() on_leave_connecting()
Connected on_enter_connected() on_leave_connected()
Stopped on_enter_stopped() on_leave_stopped()
Disposed on_enter_disposed() n/a
============ ====================== =====================
The following figure shows an example client observer with implemented callback
function::
class ClientObserver(object):
def __init__(self):
self.connection_attempt_count = 0
def on_enter_connecting(self):
self.connection_attempt_count += 1
print('Establishing connection #{0}'.format(
self.connection_attempt_count))
client = Client(endpoint='<ENDPOINT>', appkey=None)
client.observer = ClientObserver()
client.start()
client.stop()
client.start()
Subscription Observer
---------------------
Use callback functions in a subscription observer to implement functionality
based on the state changes for a channel subscription. The subscribe(channel,
SubscriptionMode.RELIABLE, subscription_observer, args) method takes
a subscription observer for the subscription_observer parameter.
.. note:: Depending on your application, these callbacks are optional, except
`on_subscription_data`. To process received messages, you must
implement `on_subscription_data(data)` callback.
The following table lists a subscription observer subscription states and
callback functions:
============= ======================== ========================
State Enter Callback Exit Callback
============= ======================== ========================
Subscribing on_enter_subscribing() on_leave_subscribing()
Subscribed on_enter_subscribed() on_leave_subscribed()
Unsubscribing on_enter_unsubscribing() on_leave_unsubscribing()
Unsubscribed on_enter_unsubscribed() on_leave_unsubscribed()
Failed on_enter_failed() on_leave_failed()
Deleted on_deleted() n/a
============= ======================== ========================
Other Callbacks
=================== ======================
Event Callback
=================== ======================
Created on_created()
Message(s) Received on_subscription_data()
=================== ======================
The following figure shows an example subscription observer with an implemented
callback function::
class SubscriptionObserver(object):
def __init__(self, channel):
self.message_count = 0
self.channel = channel
def on_subscription_data(self, data):
for message in data['messages']:
print('Got message {0}'.format(message))
self.message_count += len(data['messages'])
def on_enter_subscribed(self):
print('Subscription is now active')
def on_deleted(self):
print('Received {0} messages from channel ""{1}""'.format(
self.message_count, self.channel))
subscription_observer = SubscriptionObserver()
client.subscribe(
channel,
SubscriptionMode.RELIABLE,
subscription_observer(channel))
# wait for some time
client.unsubscribe(channel)
"""
observer = kwargs.get('observer')
auth_delegate = kwargs.get('auth_delegate')
if 'auth_delegate' in kwargs:
del kwargs['auth_delegate']
client = Client(*args, **kwargs)
ready_event = threading.Event()
class Observer(ClientStateObserver):
def on_enter_connected(self):
ClientStateObserver.on_enter_connected(self)
ready_event.set()
def on_enter_stopped(self):
ClientStateObserver.on_enter_stopped(self)
ready_event.set()
client.observer = Observer()
client.start()
if not ready_event.wait(70):
if client.last_connecting_error():
client.dispose()
raise RuntimeError(
"Client connection timeout, last connection error: {0}".format(
client.last_connecting_error()))
else:
raise RuntimeError("Client connection timeout")
ready_event.clear()
if not client.is_connected():
client.dispose()
raise RuntimeError(
"Client connection error: {0}".format(
client.last_connecting_error()))
auth_mailbox = []
def auth_callback(auth_result):
auth_mailbox.append(auth_result)
ready_event.set()
if auth_delegate:
client.authenticate(auth_delegate, callback=auth_callback)
if not ready_event.wait(20):
client.dispose()
raise AuthError('Authentication process has timed out')
auth_result = auth_mailbox[0]
if type(auth_result) == auth.Error:
raise AuthError(auth_result.message)
logger.debug('Auth success in make_client')
try:
client.observer = observer
yield client
finally:
logger.info('make_client.finally')
client.dispose()
|
/satori_sdk_python-1.0.3-py3-none-any.whl/satori/rtm/client.py
| 0.85266 | 0.208642 |
client.py
|
pypi
|
r'''
satori.rtm.auth
===============
You can perform role-based authentication with the Python SDK. This method
uses a role and role secret key from the Dev Portal and authenticates a
client session with that role.
The operations that the client can perform depend
on the permissions for the role.
The role-based authentication method is a two-step authentication process
based on the HMAC process, using the MD5 hashing routine:
* The client obtains a nonce from the server in a handshake request.
* The client then sends an authorization request with its role secret key
hashed with the received nonce.
Use the provided class `satori.rtm.auth.RoleSecretAuthDelegate` to
create a delegate (that knows the authentication process) and use the
delegate with the authenticate(role_auth_delegate, auth_callback) method of the
`satori.rtm.client.Client` or `satori.rtm.connection.Connection` class. The SDK
calls `auth_callback` on the response from RTM.
2. Custom authentication.
You must manually create the delegate to use with this method.
For more information, see
*Authentication and Authorization* in the online docs.
.. note:: Automatic reauthentication can be disable by passing
'restore_auth_on_reconnect=False' to Client constructor or
to make_client.
Use the client or connection authenticate method with the authentication
delegate and a callback to process the RTM response to the authentication
request::
secret_key = '<ROLE_SECRET_KEY>'
with sc.make_client(
endpoint=endpoint,
appkey=platform_appkey) as client:
role_auth_delegate = auth.RoleSecretAuthDelegate(\
'<USER_ROLE>', secret_key)
auth_ack = threading.Event()
def auth_callback(auth_result):
if type(auth_result) == auth.Done:
print('Auth success')
auth_ack.set()
else:
print('Auth failure: {0}'.format(auth_result))
auth_ack.set()
client.authenticate(role_auth_delegate, auth_callback)
if not auth_ack.wait(10):
raise RuntimeError('No authentication reply in reasonable time')
'''
from __future__ import print_function
from collections import namedtuple as t
import base64
import hashlib
import hmac
Authenticate = t('Authenticate', ['method', 'credentials', 'callback'])
AuthenticateOK = t('AuthenticateOK', [])
Handshake = t('Handshake', ['method', 'data', 'callback'])
HandshakeOK = t('HandshakeOK', ['data'])
Done = t('Done', [])
Error = t('Error', ['message'])
class AuthDelegate(object):
def start(self):
return Done()
class RoleSecretAuthDelegate(AuthDelegate):
def __init__(self, role, role_secret):
self.role = role
if isinstance(role_secret, bytes):
self.role_secret = role_secret
else:
self.role_secret = role_secret.encode('utf8')
def start(self):
method = 'role_secret'
def after_handshake(reply):
if type(reply) == Error:
return reply
assert type(reply) == HandshakeOK
if 'nonce' not in reply.data:
return Error('No nonce in handshake reply')
nonce = reply.data['nonce'].encode('utf8')
binary_hash = hmac.new(
self.role_secret, nonce, hashlib.md5).digest()
ascii_hash = base64.b64encode(binary_hash)
return Authenticate(
method,
{'hash': ascii_hash.decode('ascii')},
after_authenticate)
def after_authenticate(reply):
if type(reply) == Error:
return reply
assert type(reply) == AuthenticateOK
return Done()
return Handshake(method, {'role': self.role}, after_handshake)
|
/satori_sdk_python-1.0.3-py3-none-any.whl/satori/rtm/auth.py
| 0.825625 | 0.385953 |
auth.py
|
pypi
|
import asyncio
import concurrent
import uuid
from typing import List
from uuid import uuid1
import networkx as nx
from serpentarium.engine.CompositionNode import CompositionNode
from serpentarium.engine.ModContext import ModContext
from serpentarium.monitoring.Monitor import Monitor
from serpentarium.util.helpers import get_class
class ExecutionContext(ModContext):
"""
An execution context of the system.
Encapsulates Mod composition graph, event loop and thread pool for blocking tasks.
"""
def __init__(self, mods: List[dict], monitor: Monitor, monitor_polling: int) -> None:
"""
Builds an execution graph, creates eent loop and thread pool.
:param mods: mod configuration
:param monitor: execution monitor
"""
self.monitor_polling = monitor_polling
self._monitor = monitor
self._graph = nx.MultiDiGraph()
self._mods_instances = {}
self._loop = asyncio.get_event_loop()
self._exec = concurrent.futures.ThreadPoolExecutor(max_workers=3)
mod_map = {}
for mod in mods:
mod_class = get_class(mod['class'])
instance_id = uuid1()
mod_instance = mod_class(name=mod['name'], id=instance_id, settings=mod['settings'], context=self)
execution_node = ExecutionContext.map_mod_instance(mod, instance_id)
self._mods_instances[instance_id] = (mod_instance, execution_node)
mod_map[mod['name']] = execution_node
self._graph.add_nodes_from(mod_map.values())
for (name, mod) in mod_map.items():
for connector in mod.connectors:
connect_with = mod_map[connector['name']]
self._graph.add_edge(connect_with, mod)
def start(self) -> None:
"""
Starts the mods and monitoring coroutine.
"""
for (instance, node) in self._mods_instances.values():
instance.on_start()
if self.monitor_polling > 0:
asyncio.ensure_future(self.async_monitor(self.monitor_polling), loop=self._loop)
self._loop.run_forever()
def shutdown(self) -> None:
"""
Shuts down the event loop and thread pool.
"""
self._loop.stop()
self._exec.shutdown()
def emit(self, id: uuid, message: dict) -> None:
"""
Creates a coroutine to pass the message downstraem.
:param id: unique mod id
:param message: message to pass
"""
asyncio.ensure_future(self.async_emit(id, message), loop=self._loop)
def execute_blocking(self, callback, *args) -> None:
"""
Executes a piece of blocking code in a thread pool
:param callback: function to execute
:param args: parameters to pass
"""
self._loop.run_in_executor(self._exec, callback, *args)
def draw(self, label: str, path_to_save: str) -> None:
"""
Draws a visualisation of the execution graph
:param label: description to add to the image
:param path_to_save: path to save file
:return:
"""
pydot = nx.drawing.nx_pydot.to_pydot(self._graph)
pydot.write_png(path_to_save)
async def async_emit(self, id: uuid, message: dict):
"""
Finds a neighbours of current mod and passes a copy of the message to them.
:param id: unique mod id
:param message: message to pass
"""
(instance, execution_node) = self._mods_instances[id]
neighbours = list(self._graph.neighbors(execution_node))
for neighbour in neighbours:
(neighbour_instance, neighbour_composition) = self._mods_instances[neighbour.id]
await neighbour_instance.on_message(message.copy())
async def async_monitor(self, polling_interval: int):
"""
A coroutine which collects metrics periodically
"""
while True:
await asyncio.sleep(polling_interval)
for (instance, node) in self._mods_instances.values():
instance.on_stats(self._monitor)
@staticmethod
def map_mod_instance(mod: dict, id: uuid) -> CompositionNode:
"""
Creates a composition node object from mod
:param mod: mod
:param id: mod id
:return:
"""
if 'connectors' in mod:
return CompositionNode(name=mod['name'], id=id, connectors=mod['connectors'])
else:
return CompositionNode(name=mod['name'], id=id)
|
/satori-serpentarium-0.0.2a3.tar.gz/satori-serpentarium-0.0.2a3/serpentarium/engine/ExecutionContext.py
| 0.809991 | 0.226677 |
ExecutionContext.py
|
pypi
|
import logging
from satosa.context import Context
from satosa.internal import InternalData
from satosa.micro_services.base import ResponseMicroService
logger = logging.getLogger(__name__)
class SessionStartedWith(ResponseMicroService):
"""
This Satosa microservice checks, if configured attribute's value is present
in list of persisted auth parameters' "session_started_with" values.
Attribute typing microservice is expected to run after this microservice to convert
string value to boolean.
"""
def __init__(self, config, *args, **kwargs):
super().__init__(*args, **kwargs)
self.LOG_PREFIX = "SessionStartedWith: "
logger.info("SessionStartedWith is active")
self.attribute_to_check = config.get("attribute_to_check", "firstrpinsession")
self.list_name = "session_started_with"
self.persisted_attrs_name = "persisted_auth_params"
self.result_attr_name = config.get(
"result_attr_name", "sessionstartedwithchecked"
)
def process(self, context: Context, data: InternalData):
attr_to_check = data.attributes.get(self.attribute_to_check)
if attr_to_check is None:
logger.debug(
self.LOG_PREFIX + f"Missing {self.attribute_to_check} in user data."
)
return super().process(context, data)
persisted_auth_params = context.state.get(self.persisted_attrs_name)
if (
persisted_auth_params is None
or persisted_auth_params.get(self.list_name) is None
):
logger.debug(
self.LOG_PREFIX + f"Missing {self.persisted_attrs_name} in state data."
)
return super().process(context, data)
persisted_attr_values = persisted_auth_params[self.list_name].split(" ")
if attr_to_check in persisted_attr_values:
data.attributes[self.result_attr_name] = ["true"]
logger.info(
self.LOG_PREFIX + f"{attr_to_check} found in {persisted_attr_values}"
)
return super().process(context, data)
|
/satosacontrib.perun-4.1.0-py3-none-any.whl/satosacontrib/perun/micro_services/session_started_with_microservice.py
| 0.7237 | 0.201538 |
session_started_with_microservice.py
|
pypi
|
import logging
from typing import Dict
from satosa.context import Context
from satosa.internal import InternalData
from satosa.micro_services.base import RequestMicroService
logger = logging.getLogger(__name__)
class ForwardAuthorizationParams(RequestMicroService):
"""
This Satosa microservice picks configured properties from request
and forwards them using context under 'params_to_forward' property. Optionally,
it can add default params with preconfigured values for certain IdPs and SPs.
params passed via HTTP GET and POST requests are prioritized over default ones
from config in case of a conflict.
"""
def __init__(self, config, *args, **kwargs):
super().__init__(*args, **kwargs)
logger.info("ForwardAuthorizationParams is active")
self.__param_names_to_forward = config.get(
"param_names_to_forward", ["session_started_with"]
)
self.__default_values = config.get("default_values", [])
def apply_default_values(
self, data: InternalData, params_to_forward: Dict[str, str]
) -> None:
current_idp = data.auth_info["issuer"]
current_sp = data.requester
idps_to_forward = ["", current_idp]
sps_to_forward = ["", current_sp]
for default_value in self.__default_values:
if (
default_value["idp"] in idps_to_forward
and default_value["sp"] in sps_to_forward
):
param_to_forward = self.__default_values["param"]
params_to_forward.update(param_to_forward)
def extract_params_to_forward(
self, context: Context, context_params_to_forward: Dict[str, str]
) -> None:
for param_name_to_forward in self.__param_names_to_forward:
# For GET methods
if context.qs_params:
param_value_to_forward = context.qs_params.get(param_name_to_forward)
# For POST methods
elif context.request:
param_value_to_forward = context.request.get(param_name_to_forward)
else:
param_value_to_forward = None
# Overwriting of default values can happen here
if param_value_to_forward:
context_params_to_forward[
param_name_to_forward
] = param_value_to_forward
def add_params_to_context(
self, context: Context, context_params_to_forward: Dict[str, str]
):
state_auth_req_params = context.state.get(Context.KEY_AUTH_REQ_PARAMS) or {}
context_auth_req_params = (
context.get_decoration(Context.KEY_AUTH_REQ_PARAMS) or {}
)
for k, v in context_params_to_forward.items():
state_auth_req_params[k] = v
context_auth_req_params[k] = v
context.state[Context.KEY_AUTH_REQ_PARAMS] = state_auth_req_params
context.decorate(Context.KEY_AUTH_REQ_PARAMS, context_auth_req_params)
def process(self, context: Context, data: InternalData):
# Prevent forwarding of default values in case of unsupported HTTP method
if context.request_method not in ["GET", "POST"]:
return super().process(context, data)
context_params_to_forward = {}
self.apply_default_values(data, context_params_to_forward)
self.extract_params_to_forward(context, context_params_to_forward)
self.add_params_to_context(context, context_params_to_forward)
return super().process(context, data)
|
/satosacontrib.perun-4.1.0-py3-none-any.whl/satosacontrib/perun/micro_services/forward_authorization_params_microservice.py
| 0.820613 | 0.323073 |
forward_authorization_params_microservice.py
|
pypi
|
from perun.connector.utils.Logger import Logger
from satosa.context import Context
from satosa.internal import InternalData
from satosa.micro_services.base import ResponseMicroService
import importlib
logger = Logger.get_logger(__name__)
class ComputeEligibility(ResponseMicroService):
def __init__(self, config, *args, **kwargs):
super().__init__(*args, **kwargs)
logger.info("ComputeEligibility is active")
self.__ELIGIBILITY_TIMESTAMPS_DICT_ATTRIBUTE = "internal_eligibility_attribute"
self.__SUPPORTED_ELIGIBILITY_TYPES = "supported_eligibility_types"
self.__eligibility_timestamps_dict_attribute = config.get(
self.__ELIGIBILITY_TIMESTAMPS_DICT_ATTRIBUTE
)
self.__supported_eligibility_types = config.get(
self.__SUPPORTED_ELIGIBILITY_TYPES
)
def process(self, context: Context, data: InternalData):
"""
Obtains dict with format { eligiblility_type: <unix_timestamp> }
from the internal data and runs a function configured for the
given eligibility type. The function either returns a false or
a new timestamp, in which case the time in the dictionary is
updated. It strongly relies on PerunAttributes to fill the dict
beforehand.
@param context: object for sharing proxy data through the current
request
@param data: data carried between frontend and backend,
namely dict of services and timestamps of the last
eligible accesses of user within said service
"""
last_seen_eligible_timestamps_dict = data.attributes.get(
self.__eligibility_timestamps_dict_attribute, {}
)
if not last_seen_eligible_timestamps_dict:
logger.info(
"No eligibility timestamps found. Skipping eligibility computation. "
"Attribute {self.__eligibility_timestamps_dict_attribute} might"
" be missing in provided data"
)
for (
eligibility_type,
function_path,
) in self.__supported_eligibility_types.items():
provided_timestamp = last_seen_eligible_timestamps_dict.get(
eligibility_type, None
)
if provided_timestamp:
mod_name, func_name = function_path.rsplit(".", 1)
mod = importlib.import_module(mod_name)
function = getattr(mod, func_name, None)
if function:
kwargs = {}
try:
new_timestamp = function(data, **kwargs)
logger.info(
f"Function {func_name} calculated new timestamp"
f" {new_timestamp}, "
f"provided timestamp is {provided_timestamp}."
)
if new_timestamp > provided_timestamp:
last_seen_eligible_timestamps_dict[
eligibility_type
] = new_timestamp
except Exception as e:
logger.error(
f"Function {function} failed with an exception {e}."
)
else:
logger.error("Function {} not found".format(function_path))
return super().process(context, data)
|
/satosacontrib.perun-4.1.0-py3-none-any.whl/satosacontrib/perun/micro_services/compute_eligibility.py
| 0.727879 | 0.181299 |
compute_eligibility.py
|
pypi
|
from datetime import datetime
from perun.connector.utils.Logger import Logger
from satosa.context import Context
from satosa.internal import InternalData
from satosa.micro_services.base import ResponseMicroService
logger = Logger.get_logger(__name__)
class IsEligible(ResponseMicroService):
def __init__(self, config, *args, **kwargs):
super().__init__(*args, **kwargs)
logger.info("IsEligible is active")
self.__TARGET_ATTRIBUTES = "target_attributes"
self.__DEFAULT_TARGET_ATTRIBUTES = ["eduPersonAssurance"]
self.__ELIGIBILITY_TIMESTAMPS_DICT_ATTRIBUTE = (
"eligibility_timestamps_dict_attribute"
)
self.__SUPPORTED_ELIGIBILITY_TYPES = "supported_eligibility_types"
self.__target_attributes = config.get(self.__TARGET_ATTRIBUTES)
self.__eligibility_timestamps_dict_attribute = config.get(
self.__ELIGIBILITY_TIMESTAMPS_DICT_ATTRIBUTE
)
self.__supported_eligibility_types = config.get(
self.__SUPPORTED_ELIGIBILITY_TYPES
)
def process(self, context: Context, data: InternalData):
"""
Obtains dict with format { eligiblility_type: <unix_timestamp> }
and compares the eligibility timestamp of each type with preconfigured
thresholds. All applicable eligibility intervals for each
eligibility_type are included in the result.
For example, if the input includes an eligibility type T with a
2-day-old timestamp and the microservice config includes possible
eligibility intervals of 1d, 1m and 1y for eligibility type T,
this type T will be granted eligibility values of 1m and 1y in the
result.
@param context: object for sharing proxy data through the current
request
@param data: data carried between frontend and backend,
namely dict of services and timestamps of the last
eligible accesses of user within said service
"""
if not self.__target_attributes:
self.__target_attributes = self.__DEFAULT_TARGET_ATTRIBUTES
last_seen_eligible_timestamps_dict = data.attributes.get(
self.__eligibility_timestamps_dict_attribute, {}
)
if not last_seen_eligible_timestamps_dict:
logger.info(
"No eligibility timestamps found. Skipping this service. "
"Attribute {self.__eligibility_timestamps_dict_attribute} might"
" be missing in provided data"
)
return
all_assurances = {}
for (
eligibility_type,
details,
) in self.__supported_eligibility_types.items():
provided_timestamp = last_seen_eligible_timestamps_dict.get(
eligibility_type
)
if provided_timestamp:
days_since_last_eligible_timestamp = (
datetime.now() - datetime.fromtimestamp(provided_timestamp)
).days
assurance_prefixes = details["prefixes"]
for period_suffix, period_duration_days in details["suffixes"].items():
if days_since_last_eligible_timestamp <= period_duration_days:
if all_assurances.get(eligibility_type) is None:
all_assurances[eligibility_type] = []
for prefix in assurance_prefixes:
all_assurances[eligibility_type].append(
f"{prefix}-{period_suffix}"
)
for target_attr in self.__target_attributes:
data.attributes.setdefault(target_attr, {}).update(all_assurances)
return super().process(context, data)
|
/satosacontrib.perun-4.1.0-py3-none-any.whl/satosacontrib/perun/micro_services/is_eligible_microservice.py
| 0.801509 | 0.189972 |
is_eligible_microservice.py
|
pypi
|
import hashlib
import hmac
import json
import logging
import random
import string
import time
import requests
from jwcrypto import jwk, jwt
from jwcrypto.jwk import JWKSet, JWK
from perun.connector.utils.Logger import Logger
from satosa.context import Context
from satosa.internal import InternalData
from satosa.response import Redirect
class Utils:
@staticmethod
def generate_nonce() -> str:
letters = string.ascii_lowercase
actual_time = str(int(time.time()))
rand = random.SystemRandom()
return actual_time + "".join(rand.choice(letters) for _ in range(54))
@staticmethod
def __import_keys(file_path: str) -> JWKSet:
jwk_set = jwk.JWKSet()
with open(file_path, "r") as keystore:
jwk_set.import_keyset(keystore.read())
return jwk_set
@staticmethod
def __get_signing_jwk(keystore: str, key_id: str) -> JWK:
jwk_set = Utils.__import_keys(keystore)
return jwk_set.get_key(key_id)
@staticmethod
def __get_jwt(data: dict[str, str], jwk_key: JWK, token_alg: str):
token = jwt.JWT(header={"alg": token_alg, "typ": "JWT"}, claims=data)
token.make_signed_token(jwk_key)
return token.serialize()
@staticmethod
def sign_data(
data: dict[str, str], keystore: str, key_id: str, token_alg: str
) -> str:
token_signing_key = Utils.__get_signing_jwk(keystore, key_id)
return Utils.__get_jwt(data, token_signing_key, token_alg)
@staticmethod
def secure_redirect_with_nonce(
context: Context,
data: InternalData,
request_data: dict[str, str],
url: str,
signing_cfg: dict[str, str],
caller_name: str,
) -> Redirect:
"""
Performs secure redirect to given url using signed data with nonce
@param caller_name: name of invoking microservice
@param signing_cfg: config with data necessary for signing
@param context: object for sharing proxy data through the current
request
@param data: data carried between frontend and backend
@param request_data: data to be signed, it also carries nonce
@param url: url where secure redirect should be performed
@return: secure redirect to the desired url using signed request
with nonce
"""
nonce = Utils.generate_nonce()
request_data["nonce"] = nonce
request_data["time"] = str(int(time.time()))
signed_request_data = Utils.sign_data(
request_data,
signing_cfg["keystore"],
signing_cfg["key_id"],
signing_cfg["token_alg"],
)
data["nonce"] = nonce
context.state[caller_name] = data.to_dict()
return Redirect(f"{url}/{signed_request_data}")
@staticmethod
def handle_registration_response(
context: Context,
signing_cfg: dict[str, str],
registration_result_url: str,
caller_name: str,
) -> tuple[Context, InternalData]:
"""
Handles response from external service with the result of registration
@param caller_name: name of invoking microservice
@param registration_result_url: url where result of registration is
sent
@param signing_cfg: config with data necessary for signing
@param context: request context
@return: loaded newly registered group if registration was successful
"""
saved_state = context.state[caller_name]
internal_response = InternalData.from_dict(saved_state)
request_data = {
"nonce": internal_response["nonce"],
"time": str(int(time.time())),
}
signed_data = Utils.sign_data(
request_data,
signing_cfg["keystore"],
signing_cfg["key_id"],
signing_cfg["token_alg"],
)
request = f"{registration_result_url}/{signed_data}"
response = requests.get(request)
response_dict = json.loads(response.text)
if response_dict["result"] != "okay" or not hmac.compare_digest(
response_dict["nonce"], internal_response["nonce"]
):
logger = Logger.get_logger(__name__)
logger.info("Registration was unsuccessful.")
return context, internal_response
@staticmethod
def allow_by_requester(
context: Context,
data: InternalData,
allowed_cfg: dict[str, dict[str, list[str]]],
) -> bool:
"""
Checks whether the requester for target entity is allowed to use Perun.
The rules are defined by either allow or deny list. All requesters not present
in the allow (deny) list are implicitly denied (allowed).
@param data: the Internal Data
@param context: the request context
@param allowed_cfg: the dictionary of either deny or allow requesters for
given entity
@return: True if allowed False otherwise
"""
logger = logging.getLogger()
target_entity_id = (
context.get_decoration(Context.KEY_TARGET_ENTITYID)
if context.get_decoration(Context.KEY_TARGET_ENTITYID)
else ""
)
target_specific_rules = allowed_cfg.get(target_entity_id, allowed_cfg.get(""))
allow_rules = target_specific_rules.get("allow")
if allow_rules:
logger.debug(
"Requester '{0}' is {2} allowed for '{1}' due to allow rules".format(
data.requester,
target_entity_id,
"" if data.requester in allow_rules else "not",
)
)
return data.requester in allow_rules
deny_rules = target_specific_rules.get("deny")
if deny_rules:
logger.debug(
"Requester '{0}' is {2} allowed for '{1}' due to deny rules".format(
data.requester,
target_entity_id,
"not" if data.requester in deny_rules else "",
)
)
return data.requester not in deny_rules
logger.debug(
"Requester '{}' is not allowed for '{}' due to final deny all rule".format(
data.requester, target_entity_id
)
)
return False
@staticmethod
def get_hash_function(function_name):
hashlib_algs = hashlib.algorithms_available
if function_name in hashlib_algs:
hash_func = getattr(hashlib, function_name)
return hash_func
else:
raise ValueError(f"Invalid hashing algorithm, supported: {hashlib_algs}")
|
/satosacontrib.perun-4.1.0-py3-none-any.whl/satosacontrib/perun/utils/Utils.py
| 0.681515 | 0.168823 |
Utils.py
|
pypi
|
from sqlalchemy import (
Column,
String,
ForeignKey,
Integer,
)
from sqlalchemy.dialects.postgresql import TIMESTAMP
from sqlalchemy.orm import declarative_base
Base = declarative_base()
class AuthEventLoggingTable(Base):
__tablename__ = "auth_event_logging"
id = Column(Integer, primary_key=True)
day = Column(TIMESTAMP)
user = Column(String)
idp_id = Column(Integer, ForeignKey("logging_idp.id"))
sp_id = Column(Integer, ForeignKey("logging_sp.id"))
ip_address = Column(String)
geolocation_city = Column(String)
geolocation_country = Column(String)
session_id = Column(Integer, ForeignKey("session_id_values.id"))
requested_acrs_id = Column(Integer, ForeignKey("requested_acrs_values.id"))
upstream_acrs_id = Column(Integer, ForeignKey("upstream_acrs_values.id"))
user_agent_raw_id = Column(Integer, ForeignKey("user_agent_raw_values.id"))
user_agent_id = Column(Integer, ForeignKey("user_agent_values.id"))
class LoggingIdpTable(Base):
__tablename__ = "logging_idp"
id = Column(Integer, primary_key=True)
identifier = Column(String, unique=True)
name = Column(String)
class LoggingSpTable(Base):
__tablename__ = "logging_sp"
id = Column(Integer, primary_key=True)
identifier = Column(String, unique=True)
name = Column(String)
class SessionIdTable(Base):
__tablename__ = "session_id_values"
id = Column(Integer, primary_key=True)
value = Column(String)
class RequestedAcrsTable(Base):
__tablename__ = "requested_acrs_values"
id = Column(Integer, primary_key=True)
value = Column(String)
class UpstreamAcrsTable(Base):
__tablename__ = "upstream_acrs_values"
id = Column(Integer, primary_key=True)
value = Column(String)
class UserAgentRawTable(Base):
__tablename__ = "user_agent_raw_values"
id = Column(Integer, primary_key=True)
value = Column(String)
class UserAgentTable(Base):
__tablename__ = "user_agent_values"
id = Column(Integer, primary_key=True)
value = Column(String)
|
/satosacontrib.perun-4.1.0-py3-none-any.whl/satosacontrib/perun/utils/AuthEventLoggingDbModels.py
| 0.517571 | 0.260102 |
AuthEventLoggingDbModels.py
|
pypi
|
=======
Readers
=======
.. todo::
How to read cloud products from NWCSAF software. (separate document?)
Satpy supports reading and loading data from many input file formats and
schemes. The :class:`~satpy.scene.Scene` object provides a simple interface
around all the complexity of these various formats through its ``load``
method. The following sections describe the different way data can be loaded,
requested, or added to a Scene object.
Available Readers
=================
For readers currently available in Satpy see :ref:`reader_table`.
Additionally to get a list of available readers you can use the `available_readers`
function. By default, it returns the names of available readers.
To return additional reader information use `available_readers(as_dict=True)`::
>>> from satpy import available_readers
>>> available_readers()
Filter loaded files
===================
Coming soon...
Load data
=========
Datasets in Satpy are identified by certain pieces of metadata set during
data loading. These include `name`, `wavelength`, `calibration`,
`resolution`, `polarization`, and `modifiers`. Normally, once a ``Scene``
is created requesting datasets by `name` or `wavelength` is all that is
needed::
>>> from satpy import Scene
>>> scn = Scene(reader="seviri_l1b_hrit", filenames=filenames)
>>> scn.load([0.6, 0.8, 10.8])
>>> scn.load(['IR_120', 'IR_134'])
However, in many cases datasets are available in multiple spatial resolutions,
multiple calibrations (``brightness_temperature``, ``reflectance``,
``radiance``, etc),
multiple polarizations, or have corrections or other modifiers already applied
to them. By default Satpy will provide the version of the dataset with the
highest resolution and the highest level of calibration (brightness
temperature or reflectance over radiance). It is also possible to request one
of these exact versions of a dataset by using the
:class:`~satpy.dataset.DataQuery` class::
>>> from satpy import DataQuery
>>> my_channel_id = DataQuery(name='IR_016', calibration='radiance')
>>> scn.load([my_channel_id])
>>> print(scn['IR_016'])
Or request multiple datasets at a specific calibration, resolution, or
polarization::
>>> scn.load([0.6, 0.8], resolution=1000)
Or multiple calibrations::
>>> scn.load([0.6, 10.8], calibration=['brightness_temperature', 'radiance'])
In the above case Satpy will load whatever dataset is available and matches
the specified parameters. So the above ``load`` call would load the ``0.6``
(a visible/reflectance band) radiance data and ``10.8`` (an IR band)
brightness temperature data.
For geostationary satellites that have the individual channel data
separated to several files (segments) the missing segments are padded
by default to full disk area. This is made to simplify caching of
resampling look-up tables (see :doc:`resample` for more information).
To disable this, the user can pass ``pad_data`` keyword argument when
loading datasets::
>>> scn.load([0.6, 10.8], pad_data=False)
For geostationary products, where the imagery is stored in the files in an unconventional orientation
(e.g. MSG SEVIRI L1.5 data are stored with the southwest corner in the upper right), the keyword argument
``upper_right_corner`` can be passed into the load call to automatically flip the datasets to the
wished orientation. Accepted argument values are ``'NE'``, ``'NW'``, ``'SE'``, ``'SW'``,
and ``'native'``.
By default, no flipping is applied (corresponding to ``upper_right_corner='native'``) and
the data are delivered in the original format. To get the data in the common upright orientation,
load the datasets using e.g.::
>>> scn.load(['VIS008'], upper_right_corner='NE')
.. note::
If a dataset could not be loaded there is no exception raised. You must
check the
:meth:`scn.missing_datasets <satpy.scene.Scene.missing_datasets>`
property for any ``DataID`` that could not be loaded.
To find out what datasets are available from a reader from the files that were
provided to the ``Scene`` use
:meth:`~satpy.scene.Scene.available_dataset_ids`::
>>> scn.available_dataset_ids()
Or :meth:`~satpy.scene.Scene.available_dataset_names` for just the string
names of Datasets::
>>> scn.available_dataset_names()
Load remote data
================
Starting with Satpy version 0.25.1 with supported readers it is possible to
load data from remote file systems like ``s3fs`` or ``fsspec``.
For example:
::
>>> from satpy import Scene
>>> from satpy.readers import FSFile
>>> import fsspec
>>> filename = 'noaa-goes16/ABI-L1b-RadC/2019/001/17/*_G16_s20190011702186*'
>>> the_files = fsspec.open_files("simplecache::s3://" + filename, s3={'anon': True})
>>> fs_files = [FSFile(open_file) for open_file in the_files]
>>> scn = Scene(filenames=fs_files, reader='abi_l1b')
>>> scn.load(['true_color_raw'])
Check the list of :ref:`reader_table` to see which reader supports remote
files. For the usage of ``fsspec`` and advanced features like caching files
locally see the `fsspec Documentation <https://filesystem-spec.readthedocs.io/en/latest>`_ .
.. _search_for_files:
Search for local/remote files
=============================
Satpy provides a utility
:func:`~satpy.readers.find_files_and_readers` for searching for files in
a base directory matching various search parameters. This function discovers
files based on filename patterns. It returns a dictionary mapping reader name
to a list of filenames supported. This dictionary can be passed directly to
the :class:`~satpy.scene.Scene` initialization.
::
>>> from satpy import find_files_and_readers, Scene
>>> from datetime import datetime
>>> my_files = find_files_and_readers(base_dir='/data/viirs_sdrs',
... reader='viirs_sdr',
... start_time=datetime(2017, 5, 1, 18, 1, 0),
... end_time=datetime(2017, 5, 1, 18, 30, 0))
>>> scn = Scene(filenames=my_files)
See the :func:`~satpy.readers.find_files_and_readers` documentation for
more information on the possible parameters as well as for searching on
remote file systems.
.. _dataset_metadata:
Metadata
========
The datasets held by a scene also provide vital metadata such as dataset name, units, observation
time etc. The following attributes are standardized across all readers:
* ``name``, and other identifying metadata keys: See :doc:`dev_guide/satpy_internals`.
* ``start_time``: Left boundary of the time interval covered by the dataset.
For more information see the :ref:`time_metadata` section below.
* ``end_time``: Right boundary of the time interval covered by the dataset.
For more information see the :ref:`time_metadata` section below.
* ``area``: :class:`~pyresample.geometry.AreaDefinition` or
:class:`~pyresample.geometry.SwathDefinition` if data is geolocated. Areas are used for gridded
projected data and Swaths when data must be described by individual longitude/latitude
coordinates. See the Coordinates section below.
* ``reader``: The name of the Satpy reader that produced the dataset.
* ``orbital_parameters``: Dictionary of orbital parameters describing the satellite's position.
See the :ref:`orbital_parameters` section below for more information.
* ``time_parameters``: Dictionary of additional time parameters describing the
time ranges related to the requests or schedules for when observations
should happen and when they actually do. See :ref:`time_metadata` below for
details.
* ``raw_metadata``: Raw, unprocessed metadata from the reader.
Note that the above attributes are not necessarily available for each dataset.
.. _time_metadata:
Time Metadata
-------------
In addition to the generic ``start_time`` and ``end_time`` pieces of metadata
there are other time fields that may be provided if the reader supports them.
These items are stored in a ``time_parameters`` sub-dictionary and they include
values like:
* ``observation_start_time``: The point in time when a sensor began recording
for the current data.
* ``observation_end_time``: Same as ``observation_start_time``, but when data
has stopped being recorded.
* ``nominal_start_time``: The "human friendly" time describing the start of
the data observation interval or repeat cycle. This time is often on a round
minute (seconds=0). Along with the nominal end time, these times define the
regular interval of the data collection. For example, GOES-16 ABI full disk
images are collected every 10 minutes (in the common configuration) so
``nominal_start_time`` and ``nominal_end_time`` would be 10 minutes apart
regardless of when the instrument recorded data inside that interval.
This time may also be referred to as the repeat cycle, repeat slot, or time
slot.
* ``nominal_end_time``: Same as ``nominal_start_time``, but the end of the
interval.
In general, ``start_time`` and ``end_time`` will be set to the "nominal"
time by the reader. This ensures that other Satpy components get a
consistent time for calculations (ex. generation of solar zenith angles)
and can be reused between bands.
See the :ref:`data_array_coordinates` section below for more information on
time information that may show up as a per-element/row "coordinate" on the
DataArray (ex. acquisition time) instead of as metadata.
.. _orbital_parameters:
Orbital Parameters
------------------
Orbital parameters describe the position of the satellite. As such they
typically come in a few "flavors" for the common types of orbits a satellite
may have.
For *geostationary* satellites it is described using the following scalar attributes:
* ``satellite_actual_longitude/latitude/altitude``: Current position of the satellite at the
time of observation in geodetic coordinates (i.e. altitude is relative and normal to the
surface of the ellipsoid). The longitude and latitude are given in degrees, the altitude in meters.
* ``satellite_nominal_longitude/latitude/altitude``: Center of the station keeping box (a
confined area in which the satellite is actively maintained in using maneuvers). Inbetween
major maneuvers, when the satellite is permanently moved, the nominal position is constant.
The longitude and latitude are given in degrees, the altitude in meters.
* ``nadir_longitude/latitude``: Intersection of the instrument's Nadir with the surface of the
earth. May differ from the actual satellite position, if the instrument is pointing slightly
off the axis (satellite, earth-center). If available, this should be used to compute viewing
angles etc. Otherwise, use the actual satellite position. The values are given in degrees.
* ``projection_longitude/latitude/altitude``: Projection center of the re-projected data. This
should be used to compute lat/lon coordinates. Note that the projection center can differ
considerably from the actual satellite position. For example MSG-1 was at times positioned
at 3.4 degrees west, while the image data was re-projected to 0 degrees.
The longitude and latitude are given in degrees, the altitude in meters.
.. note:: For use in pyorbital, the altitude has to be converted to kilometers, see for example
:func:`pyorbital.orbital.get_observer_look`.
For *polar orbiting* satellites the readers usually provide coordinates and viewing angles of
the swath as ancillary datasets. Additional metadata related to the satellite position includes:
* ``tle``: Two-Line Element (TLE) set used to compute the satellite's orbit
.. _data_array_coordinates:
Coordinates
===========
Each :class:`~xarray.DataArray` produced by Satpy has several Xarray
coordinate variables added to them.
* ``x`` and ``y``: Projection coordinates for gridded and projected data.
By default `y` and `x` are the preferred **dimensions** for all 2D data, but
these **coordinates** are only added for gridded (non-swath) data. For 1D
data only the ``y`` dimension may be specified.
* ``crs``: A :class:`~pyproj.crs.CRS` object defined the Coordinate Reference
System for the data. Requires pyproj 2.0 or later to be installed. This is
stored as a scalar array by Xarray so it must be accessed by doing
``crs = my_data_arr.attrs['crs'].item()``. For swath data this defaults
to a ``longlat`` CRS using the WGS84 datum.
* ``longitude``: Array of longitude coordinates for swath data.
* ``latitude``: Array of latitude coordinates for swath data.
Readers are free to define any coordinates in addition to the ones above that
are automatically added. Other possible coordinates you may see:
* ``acq_time``: Instrument data acquisition time per scan or row of data.
Adding a Reader to Satpy
========================
This is described in the developer guide, see :doc:`dev_guide/custom_reader`.
Implemented readers
===================
SEVIRI L1.5 data readers
------------------------
.. automodule:: satpy.readers.seviri_base
:noindex:
SEVIRI HRIT format reader
^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: satpy.readers.seviri_l1b_hrit
:noindex:
SEVIRI Native format reader
^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: satpy.readers.seviri_l1b_native
:noindex:
SEVIRI netCDF format reader
^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: satpy.readers.seviri_l1b_nc
:noindex:
Other xRIT-based readers
------------------------
.. automodule:: satpy.readers.hrit_base
:noindex:
JMA HRIT format reader
^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: satpy.readers.hrit_jma
:noindex:
GOES HRIT format reader
^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: satpy.readers.goes_imager_hrit
:noindex:
Electro-L HRIT format reader
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: satpy.readers.electrol_hrit
:noindex:
hdf-eos based readers
---------------------
.. automodule:: satpy.readers.modis_l1b
:noindex:
.. automodule:: satpy.readers.modis_l2
:noindex:
satpy cf nc readers
---------------------
.. automodule:: satpy.readers.satpy_cf_nc
:noindex:
hdf5 based readers
------------------
.. automodule:: satpy.readers.agri_l1
:noindex:
.. automodule:: satpy.readers.ghi_l1
:noindex:
Arctica-M N1 HDF5 format reader
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: satpy.readers.msu_gsa_l1b
:noindex:
|
/satpy-0.43.0.tar.gz/satpy-0.43.0/doc/source/readers.rst
| 0.886795 | 0.748858 |
readers.rst
|
pypi
|
MultiScene (Experimental)
=========================
Scene objects in Satpy are meant to represent a single geographic region at
a specific single instant in time or range of time. This means they are not
suited for handling multiple orbits of polar-orbiting satellite data,
multiple time steps of geostationary satellite data, or other special data
cases. To handle these cases Satpy provides the `MultiScene` class. The below
examples will walk through some basic use cases of the MultiScene.
.. warning::
These features are still early in development and may change overtime as
more user feedback is received and more features added.
MultiScene Creation
-------------------
There are two ways to create a ``MultiScene``. Either by manually creating and
providing the scene objects,
>>> from satpy import Scene, MultiScene
>>> from glob import glob
>>> scenes = [
... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_1/*t180*.h5')),
... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_2/*t180*.h5'))
... ]
>>> mscn = MultiScene(scenes)
>>> mscn.load(['I04'])
or by using the :meth:`MultiScene.from_files <satpy.multiscene.MultiScene.from_files>`
class method to create a ``MultiScene`` from a series of files. This uses the
:func:`~satpy.readers.group_files` utility function to group files by start
time or other filenames parameters.
>>> from satpy import MultiScene
>>> from glob import glob
>>> mscn = MultiScene.from_files(glob('/data/abi/day_1/*C0[12]*.nc'), reader='abi_l1b')
>>> mscn.load(['C01', 'C02'])
.. versionadded:: 0.12
The ``from_files`` and ``group_files`` functions were added in Satpy 0.12.
See below for an alternative solution.
For older versions of Satpy we can manually create the `Scene` objects used.
The :func:`~glob.glob` function and for loops are used to group files into
Scene objects that, if used individually, could load the data we want. The
code below is equivalent to the ``from_files`` code above:
>>> from satpy import Scene, MultiScene
>>> from glob import glob
>>> scene_files = []
>>> for time_step in ['1800', '1810', '1820', '1830']:
... scene_files.append(glob('/data/abi/day_1/*C0[12]*s???????{}*.nc'.format(time_step)))
>>> scenes = [
... Scene(reader='abi_l1b', filenames=files) for files in sorted(scene_files)
... ]
>>> mscn = MultiScene(scenes)
>>> mscn.load(['C01', 'C02'])
Blending Scenes in MultiScene
-----------------------------
Scenes contained in a MultiScene can be combined in different ways.
Stacking scenes
***************
The code below uses the :meth:`~satpy.multiscene.MultiScene.blend` method of
the ``MultiScene`` object to stack two separate orbits from a VIIRS sensor. By
default the ``blend`` method will use the :func:`~satpy.multiscene.stack`
function which uses the first dataset as the base of the image and then
iteratively overlays the remaining datasets on top.
>>> from satpy import Scene, MultiScene
>>> from glob import glob
>>> from pyresample.geometry import AreaDefinition
>>> my_area = AreaDefinition(...)
>>> scenes = [
... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_1/*t180*.h5')),
... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_2/*t180*.h5'))
... ]
>>> mscn = MultiScene(scenes)
>>> mscn.load(['I04'])
>>> new_mscn = mscn.resample(my_area)
>>> blended_scene = new_mscn.blend()
>>> blended_scene.save_datasets()
Stacking scenes using weights
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
It is also possible to blend scenes together in a bit more sophisticated manner
using pixel based weighting instead of just stacking the scenes on top of each
other as described above. This can for instance be useful to make a cloud
parameter (cover, height, etc) composite combining cloud parameters derived
from both geostationary and polar orbiting satellite data close in time and
over a given area. This is useful for instance at high latitudes where
geostationary data degrade quickly with latitude and polar data are more
frequent.
This weighted blending can be accomplished via the use of the builtin
:func:`~functools.partial` function (see `Partial
<https://docs.python.org/3/library/functools.html#partial-objects>`_) and the
default :func:`~satpy.multiscene.stack` function. The
:func:`~satpy.multiscene.stack` function can take the optional argument
`weights` (`None` on default) which should be a sequence (of length equal to
the number of scenes being blended) of arrays with pixel weights.
The code below gives an example of how two cloud scenes can be blended using
the satellite zenith angles to weight which pixels to take from each of the two
scenes. The idea being that the reliability of the cloud parameter is higher
when the satellite zenith angle is small.
>>> from satpy import Scene, MultiScene, DataQuery
>>> from functools import partial
>>> from satpy.resample import get_area_def
>>> areaid = get_area_def("myarea")
>>> geo_scene = Scene(filenames=glob('/data/to/nwcsaf/geo/files/*nc'), reader='nwcsaf-geo')
>>> geo_scene.load(['ct'])
>>> polar_scene = Scene(filenames=glob('/data/to/nwcsaf/pps/noaa18/files/*nc'), reader='nwcsaf-pps_nc')
>>> polar_scene.load(['cma', 'ct'])
>>> mscn = MultiScene([geo_scene, polar_scene])
>>> groups = {DataQuery(name='CTY_group'): ['ct']}
>>> mscn.group(groups)
>>> resampled = mscn.resample(areaid, reduce_data=False)
>>> weights = [1./geo_satz, 1./n18_satz]
>>> stack_with_weights = partial(stack, weights=weights)
>>> blended = resampled.blend(blend_function=stack_with_weights)
>>> blended_scene.save_dataset('CTY_group', filename='./blended_stack_weighted_geo_polar.nc')
Grouping Similar Datasets
^^^^^^^^^^^^^^^^^^^^^^^^^
By default, ``MultiScene`` only operates on datasets shared by all scenes.
Use the :meth:`~satpy.multiscene.MultiScene.group` method to specify groups
of datasets that shall be treated equally by ``MultiScene``, even if their
names or wavelengths are different.
Example: Stacking scenes from multiple geostationary satellites acquired at
roughly the same time. First, create scenes and load datasets individually:
>>> from satpy import Scene
>>> from glob import glob
>>> h8_scene = satpy.Scene(filenames=glob('/data/HS_H08_20200101_1200*'),
... reader='ahi_hsd')
>>> h8_scene.load(['B13'])
>>> g16_scene = satpy.Scene(filenames=glob('/data/OR_ABI*s20200011200*.nc'),
... reader='abi_l1b')
>>> g16_scene.load(['C13'])
>>> met10_scene = satpy.Scene(filenames=glob('/data/H-000-MSG4*-202001011200-__'),
... reader='seviri_l1b_hrit')
>>> met10_scene.load(['IR_108'])
Now create a ``MultiScene`` and group the three similar IR channels together:
>>> from satpy import MultiScene, DataQuery
>>> mscn = MultiScene([h8_scene, g16_scene, met10_scene])
>>> groups = {DataQuery('IR_group', wavelength=(10, 11, 12)): ['B13', 'C13', 'IR_108']}
>>> mscn.group(groups)
Finally, resample the datasets to a common grid and blend them together:
>>> from pyresample.geometry import AreaDefinition
>>> my_area = AreaDefinition(...)
>>> resampled = mscn.resample(my_area, reduce_data=False)
>>> blended = resampled.blend() # you can also use a custom blend function
You can access the results via ``blended['IR_group']``.
Timeseries
**********
Using the :meth:`~satpy.multiscene.MultiScene.blend` method with the
:func:`~satpy.multiscene.timeseries` function will combine
multiple scenes from different time slots by time. A single `Scene` with each
dataset/channel extended by the time dimension will be returned. If used
together with the :meth:`~satpy.scene.Scene.to_geoviews` method, creation of
interactive timeseries Bokeh plots is possible.
>>> from satpy import Scene, MultiScene
>>> from satpy.multiscene import timeseries
>>> from glob import glob
>>> from pyresample.geometry import AreaDefinition
>>> my_area = AreaDefinition(...)
>>> scenes = [
... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_1/*t180*.h5')),
... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_2/*t180*.h5'))
... ]
>>> mscn = MultiScene(scenes)
>>> mscn.load(['I04'])
>>> new_mscn = mscn.resample(my_area)
>>> blended_scene = new_mscn.blend(blend_function=timeseries)
>>> blended_scene['I04']
<xarray.DataArray (time: 2, y: 1536, x: 6400)>
dask.array<shape=(2, 1536, 6400), dtype=float64, chunksize=(1, 1536, 4096)>
Coordinates:
* time (time) datetime64[ns] 2012-02-25T18:01:24.570942 2012-02-25T18:02:49.975797
Dimensions without coordinates: y, x
Saving frames of an animation
-----------------------------
The MultiScene can take "frames" of data and join them together in a single
animation movie file. Saving animations requires the `imageio` python library
and for most available formats the ``ffmpeg`` command line tool suite should
also be installed. The below example saves a series of GOES-EAST ABI channel
1 and channel 2 frames to MP4 movie files.
>>> from satpy import Scene, MultiScene
>>> from glob import glob
>>> mscn = MultiScene.from_files(glob('/data/abi/day_1/*C0[12]*.nc'), reader='abi_l1b')
>>> mscn.load(['C01', 'C02'])
>>> mscn.save_animation('{name}_{start_time:%Y%m%d_%H%M%S}.mp4', fps=2)
This will compute one video frame (image) at a time and write it to the MPEG-4
video file. For users with more powerful systems it is possible to use
the ``client`` and ``batch_size`` keyword arguments to compute multiple frames
in parallel using the dask ``distributed`` library (if installed).
See the :doc:`dask distributed <dask:deploying-python>` documentation
for information on creating a ``Client`` object. If working on a cluster
you may want to use :doc:`dask jobqueue <jobqueue:index>` to take advantage
of multiple nodes at a time.
It is possible to add an overlay or decoration to each frame of an
animation. For text added as a decoration, string substitution will be
applied based on the attributes of the dataset, for example:
>>> mscn.save_animation(
... "{name:s}_{start_time:%Y%m%d_%H%M}.mp4",
... enh_args={
... "decorate": {
... "decorate": [
... {"text": {
... "txt": "time {start_time:%Y-%m-%d %H:%M}",
... "align": {
... "top_bottom": "bottom",
... "left_right": "right"},
... "font": '/usr/share/fonts/truetype/arial.ttf',
... "font_size": 20,
... "height": 30,
... "bg": "black",
... "bg_opacity": 255,
... "line": "white"}}]}})
If your file covers ABI MESO data for an hour for channel 2 lasting
from 2020-04-12 01:00-01:59, then the output file will be called
``C02_20200412_0100.mp4`` (because the first dataset/frame corresponds to
an image that started to be taken at 01:00), consist of sixty frames (one
per minute for MESO data), and each frame will have the start time for
that frame floored to the minute blended into the frame. Note that this
text is "burned" into the video and cannot be switched on or off later.
.. warning::
GIF images, although supported, are not recommended due to the large file
sizes that can be produced from only a few frames.
Saving multiple scenes
----------------------
The ``MultiScene`` object includes a
:meth:`~satpy.multiscene.MultiScene.save_datasets` method for saving the
data from multiple Scenes to disk. By default this will operate on one Scene
at a time, but similar to the ``save_animation`` method above this method can
accept a dask distributed ``Client`` object via the ``client`` keyword
argument to compute scenes in parallel (see documentation above). Note however
that some writers, like the ``geotiff`` writer, do not support multi-process
operations at this time and will fail when used with dask distributed. To save
multiple Scenes use:
>>> from satpy import Scene, MultiScene
>>> from glob import glob
>>> mscn = MultiScene.from_files(glob('/data/abi/day_1/*C0[12]*.nc'), reader='abi_l1b')
>>> mscn.load(['C01', 'C02'])
>>> mscn.save_datasets(base_dir='/path/for/output')
Combining multiple readers
--------------------------
.. versionadded:: 0.23
The :meth:`~satpy.multiscene.MultiScene.from_files` constructor allows to
automatically combine multiple readers into a single MultiScene. It is no
longer necessary for the user to create the :class:`~satpy.scene.Scene`
objects themselves. For example, you can combine Advanced Baseline
Imager (ABI) and Global Lightning Mapper (GLM) measurements.
Constructing a multi-reader MultiScene requires more parameters than a
single-reader MultiScene, because Satpy can poorly guess how to group
files belonging to different instruments. For an example creating
a video with lightning superimposed on ABI channel 14 (11.2 µm)
using the built-in composite ``C14_flash_extent_density``,
which superimposes flash extent density from GLM (read with the
:class:`~satpy.readers.glm_l2.NCGriddedGLML2` or ``glm_l2`` reader) on ABI
channel 14 data (read with the :class:`~satpy.readers.abi_l1b.NC_ABI_L1B`
or ``abi_l1b`` reader), and therefore needs Scene objects that combine
both readers:
>>> glm_dir = "/path/to/GLMC/"
>>> abi_dir = "/path/to/ABI/"
>>> ms = satpy.MultiScene.from_files(
... glob.glob(glm_dir + "OR_GLM-L2-GLMC-M3_G16_s202010418*.nc") +
... glob.glob(abi_dir + "C*/OR_ABI-L1b-RadC-M6C*_G16_s202010418*_e*_c*.nc"),
... reader=["glm_l2", "abi_l1b"],
... ensure_all_readers=True,
... group_keys=["start_time"],
... time_threshold=30)
>>> ms.load(["C14_flash_extent_density"])
>>> ms = ms.resample(ms.first_scene["C14"].attrs["area"])
>>> ms.save_animation("/path/for/output/{name:s}_{start_time:%Y%m%d_%H%M}.mp4")
In this example, we pass to
:meth:`~satpy.multiscene.MultiScene.from_files` the additional parameters
``ensure_all_readers=True, group_keys=["start_time"], time_threshold=30``
so we only get scenes at times that both ABI and GLM have a file starting
within 30 seconds from each other, and ignore all other differences for
the purposes of grouping the two. For this example, the ABI files occur
every 5 minutes but the GLM files (processed with glmtools) every minute.
Scenes where there is a GLM file without an ABI file starting within at
most ±30 seconds are skipped. The ``group_keys`` and ``time_threshold``
keyword arguments are processed by the :func:`~satpy.readers.group_files`
function. The heavy work of blending the two instruments together is
performed by the :class:`~satpy.composites.BackgroundCompositor` class
through the `"C14_flash_extent_density"` composite.
|
/satpy-0.43.0.tar.gz/satpy-0.43.0/doc/source/multiscene.rst
| 0.903198 | 0.797399 |
multiscene.rst
|
pypi
|
Modifiers
=========
Modifiers are filters applied to datasets prior to computing composites.
They take at least one input (a dataset) and have exactly one output
(the same dataset, modified). They can take additional input datasets
or parameters.
Modifiers are defined in composites files in ``etc/composites`` within
``$SATPY_CONFIG_PATH``.
The instruction to use a certain modifier can be contained in a composite
definition or in a reader definition. If it is defined in a composite
definition, it is applied upon constructing the composite.
When using built-in composites, Satpy users do not need to understand
the mechanics of modifiers, as they are applied automatically.
The :doc:`composites` documentation contains information on how to apply
modifiers when creating new composites.
Some readers read data where certain modifiers are already applied. Here,
the reader definition will refer to the Satpy modifier. This marking
adds the modifier to the metadata to prevent it from being applied again
upon composite calculation.
Commonly used modifiers are listed in the table below. Further details
on those modifiers can be found in the linked API documentation.
.. list-table:: Commonly used modifiers
:header-rows: 1
* - Label
- Class
- Description
* - ``sunz_corrected``
- :class:`~satpy.modifiers.geometry.SunZenithCorrector`
- Modifies solar channels for the solar zenith angle to provide
smoother images.
* - ``effective_solar_pathlength_corrected``
- :class:`~satpy.modifiers.geometry.EffectiveSolarPathLengthCorrector`
- Modifies solar channels for atmospheric path length of solar radiation.
* - ``nir_reflectance``
- :class:`~satpy.modifiers.spectral.NIRReflectance`
- Calculates reflective part of channels at the edge of solar and
terrestrial radiation (3.7 µm or 3.9 µm).
* - ``nir_emissive``
- :class:`~satpy.modifiers.spectral.NIREmissivePartFromReflectance`
- Calculates emissive part of channels at the edge of solar and terrestrial
radiation (3.7 µm or 3.9 µm)
* - ``rayleigh_corrected``
- :class:`~satpy.modifiers.atmosphere.PSPRayleighReflectance`
- Modifies solar channels to filter out the visual impact of rayleigh
scattering.
A complete list can be found in the `etc/composites
<https://github.com/pytroll/satpy/tree/main/satpy/etc/composites>`_
source code and in the :mod:`~satpy.modifiers` module documentation.
Parallax correction
-------------------
.. warning::
The Satpy parallax correction is experimental and subject to change.
Since version 0.37 (mid 2022), Satpy has included a
modifier for parallax correction, implemented in the
:class:`~satpy.modifiers.parallax.ParallaxCorrectionModifier` class.
This modifier is important for some applications, but not applied
by default to any Satpy datasets or composites, because it can be
applied to any input dataset and used with any source of (cloud top)
height. Therefore, users wishing to apply the parallax correction
semi-automagically have to define their own modifier and then apply
that modifier for their datasets. An example is included
with the :class:`~satpy.modifiers.parallax.ParallaxCorrectionModifier`
API documentation. Note that Satpy cannot apply modifiers to
composites, so users wishing to apply parallax correction to a composite
will have to use a lower level API or duplicate an existing composite
recipe to use modified inputs.
The parallax correction is directly calculated from the cloud top height.
Information on satellite position is obtained from cloud top height
metadata. If no orbital parameters are present in the cloud top height
metadata, Satpy will attempt to calculate orbital parameters from the
platform name and start time. The backup calculation requires skyfield
and astropy to be installed. If the metadata include neither orbital
parameters nor platform name and start time, parallax calculation will
fail. Because the cloud top height metadata are used, it is essential
that the cloud top height data are derived from the same platform as
the measurements to be corrected are taken by.
The parallax error moves clouds away from the observer. Therefore, the
parallax correction shifts clouds in the direction of the observer. The
space left behind by the cloud will be filled with fill values. As the
cloud is shifted toward the observer, it may occupy less pixels than before,
because pixels closer to the observer have a smaller surface area. It can
also be deformed (a "rectangular" cloud may get the shape of a parallelogram).
.. figure:: https://figshare.com/ndownloader/files/36422616/preview/36422616/preview.jpg
:width: 512
:height: 512
:alt: Satellite image without parallax correction.
SEVIRI view of southern Sweden, 2021-11-30 12:15Z, without parallax correction.
This is the ``natural_color`` composite as built into Satpy.
.. figure:: https://figshare.com/ndownloader/files/36422613/preview/36422613/preview.jpg
:width: 512
:height: 512
:alt: Satellite image with parallax correction.
The same satellite view with parallax correction. The most obvious change
are the gaps left behind by the parallax correction, shown as black pixels.
Otherwise it shows that clouds have "moved" south-south-west in the direction
of the satellite. To view the images side-by-side or alternating, look at
`the figshare page <https://figshare.com/articles/figure/20211130121510-Meteosat-11-seviri-sswe-parallax_corrected_natural_color_jpg/20377203>`_
The utility function :func:`~satpy.modifiers.parallax.get_surface_parallax_displacement` allows to calculate the magnitude of the parallax error. For a cloud with a cloud top height of 10 km:
.. figure:: https://figshare.com/ndownloader/files/36462435/preview/36462435/preview.jpg
:width: 512
:height: 512
:alt: Figure showing magnitude of parallax effect.
Magnitude of the parallax error for a fictitious cloud with a cloud top
height of 10 km for the GOES-East (GOES-16) full disc.
The parallax correction is currently experimental and subject to change.
Although it is covered by tests, there may be cases that yield unexpected
or incorrect results. It does not yet perform any checks that the
provided (cloud top) height covers the area of the dataset for which
the parallax correction shall be applied.
For more general background information and web routines related to the
parallax effect, see also `this collection at the CIMSS website <https://cimss.ssec.wisc.edu/goes/webapps/parallax/>_`.
.. versionadded:: 0.37
|
/satpy-0.43.0.tar.gz/satpy-0.43.0/doc/source/modifiers.rst
| 0.91365 | 0.742982 |
modifiers.rst
|
pypi
|
============================
Migrating to xarray and dask
============================
Many python developers dealing with meteorologic satellite data begin with
using NumPy arrays directly. This work usually involves masked arrays,
boolean masks, index arrays, and reshaping. Due to the libraries used by
Satpy these operations can't always be done in the same way. This guide acts
as a starting point for new Satpy developers in transitioning from NumPy's
array operations to Satpy's operations, although they are very similar.
To provide the most functionality for users,
Satpy uses the `xarray <http://xarray.pydata.org/en/stable/>`_ library's
:class:`~xarray.DataArray` object as the main representation for its data.
DataArray objects can also benefit from the
`dask <https://dask.pydata.org/en/latest/>`_ library. The combination of
these libraries allow Satpy to easily distribute operations over multiple
workers, lazy evaluate operations, and keep track additional metadata and
coordinate information.
XArray
------
.. code-block:: python
import xarray as xr
:class:`XArray's DataArray <xarray.DataArray>` is now the standard data
structure for arrays in satpy. They allow the array to define dimensions,
coordinates, and attributes (that we use for metadata).
To create such an array, you can do for example
.. code-block:: python
my_dataarray = xr.DataArray(my_data, dims=['y', 'x'],
coords={'x': np.arange(...)},
attrs={'sensor': 'olci'})
where ``my_data`` can be a regular numpy array, a numpy memmap, or, if you
want to keep things lazy, a dask array (more on dask later). Satpy uses dask
arrays with all of its DataArrays.
Dimensions
**********
In satpy, the dimensions of the arrays should include:
- `x` for the x or column or pixel dimension
- `y` for the y or row or line dimension
- `bands` for composites
- `time` can also be provided, but we have limited support for it at the
moment. Use metadata for common cases (`start_time`, `end_time`)
Dimensions are accessible through
:attr:`my_dataarray.dims <xarray.DataArray.dims>`. To get the size of a
given dimension, use :attr:`~xarray.DataArray.sizes`:
.. code-block:: python
my_dataarray.sizes['x']
Coordinates
***********
Coordinates can be defined for those dimensions when it makes sense:
- `x` and `y`: Usually defined when the data's area is an
:class:`~pyresample.geometry.AreaDefinition`, and they contain
the projection coordinates in x and y.
- `bands`: Contain the letter of the color they represent, eg
``['R', 'G', 'B']`` for an RGB composite.
This allows then to select for example a single band like this:
.. code-block:: python
red = my_composite.sel(bands='R')
or even multiple bands:
.. code-block:: python
red_and_blue = my_composite.sel(bands=['R', 'B'])
To access the coordinates of the data array, use the following syntax:
.. code-block:: python
x_coords = my_dataarray['x']
my_dataarray['y'] = np.arange(...)
Most of the time, satpy will fill the coordinates for you, so you just need to provide the dimension names.
Attributes
**********
To save metadata, we use the :attr:`~xarray.DataArray.attrs` dictionary.
.. code-block:: python
my_dataarray.attrs['platform_name'] = 'Sentinel-3A'
Some metadata that should always be present in our dataarrays:
- ``area`` the area of the dataset. This should be handled in the reader.
- ``start_time``, ``end_time``
- ``sensor``
Operations on DataArrays
************************
DataArrays work with regular arithmetic operation as one would expect of eg
numpy arrays, with the exception that using an operator on two DataArrays
requires both arrays to share the same dimensions, and coordinates if those
are defined.
For mathematical functions like cos or log, you can use numpy functions
directly and they will return a DataArray object:
.. code-block:: python
import numpy as np
cos_zen = np.cos(zen_xarray)
Masking data
************
In DataArrays, masked data is represented with NaN values. Hence the default
type is ``float64``, but ``float32`` works also in this case. XArray can't
handle masked data for integer data, but in satpy we try to use the special
``_FillValue`` attribute (in ``.attrs``) to handle this case. If you come
across a case where this isn't handled properly, contact us.
Masking data from a condition can be done with:
.. code-block:: python
result = my_dataarray.where(my_dataarray > 5)
Result is then analogous to my_dataarray, with values lower or equal to 5 replaced by NaNs.
Further reading
***************
http://xarray.pydata.org/en/stable/generated/xarray.DataArray.html#xarray.DataArray
Dask
----
.. code-block:: python
import dask.array as da
The data part of the DataArrays we use in satpy are mostly dask Arrays. That allows lazy and chunked operations for efficient processing.
Creation
********
From a numpy array
++++++++++++++++++
To create a dask array from a numpy array, one can call the
:func:`~dask.array.from_array` function:
.. code-block:: python
darr = da.from_array(my_numpy_array, chunks=4096)
The *chunks* keyword tells dask the size of a chunk of data. If the numpy
array is 3-dimensional, the chunk size provide above means that one chunk
will be 4096x4096x4096 elements. To prevent this, one can provide a tuple:
.. code-block:: python
darr = da.from_array(my_numpy_array, chunks=(4096, 1024, 2))
meaning a chunk will be 4096x1024x2 elements in size.
Even more detailed sizes for the chunks can be provided if needed, see the
:doc:`dask documentation <dask:array-chunks>`.
From memmaps or other lazy objects
++++++++++++++++++++++++++++++++++
To avoid loading the data into memory when creating a dask array, other kinds
of arrays can be passed to :func:`~dask.array.from_array`. For example, a
numpy memmap allows dask to know where the data is, and will only be loaded
when the actual values need to be computed. Another example is a hdf5
variable read with h5py.
Procedural generation of data
+++++++++++++++++++++++++++++
Some procedural generation function are available in dask, eg
:func:`~dask.array.meshgrid`, :func:`~dask.array.arange`, or
:func:`random.random <dask.array.random.random>`.
From XArray to Dask and back
****************************
Certain operations are easiest to perform on dask arrays by themselves,
especially when certain functions are only available from the dask library.
In these cases you can operate on the dask array beneath the DataArray and
create a new DataArray when done. Note dask arrays do not support in-place
operations. In-place operations on xarray DataArrays will reassign the dask
array automatically.
.. code-block:: python
dask_arr = my_dataarray.data
dask_arr = dask_arr + 1
# ... other non-xarray operations ...
new_dataarr = xr.DataArray(dask_arr, dims=my_dataarray.dims, attrs=my_dataarray.attrs.copy())
Or if the operation should be assigned back to the original DataArray (if and
only if the data is the same size):
.. code-block:: python
my_dataarray.data = dask_arr
Operations and how to get actual results
****************************************
Regular arithmetic operations are provided, and generate another dask array.
>>> arr1 = da.random.uniform(0, 1000, size=(1000, 1000), chunks=100)
>>> arr2 = da.random.uniform(0, 1000, size=(1000, 1000), chunks=100)
>>> arr1 + arr2
dask.array<add, shape=(1000, 1000), dtype=float64, chunksize=(100, 100)>
In order to compute the actual data during testing, use the
:func:`~dask.compute` method.
In normal Satpy operations you will want the data to be evaluated as late as
possible to improve performance so `compute` should only be used when needed.
>>> (arr1 + arr2).compute()
array([[ 898.08811639, 1236.96107629, 1154.40255292, ...,
1537.50752674, 1563.89278664, 433.92598566],
[ 1657.43843608, 1063.82390257, 1265.08687916, ...,
1103.90421234, 1721.73564104, 1276.5424228 ],
[ 1620.11393216, 212.45816261, 771.99348555, ...,
1675.6561068 , 585.89123159, 935.04366354],
...,
[ 1533.93265862, 1103.33725432, 191.30794159, ...,
520.00434673, 426.49238283, 1090.61323471],
[ 816.6108554 , 1526.36292498, 412.91953023, ...,
982.71285721, 699.087645 , 1511.67447362],
[ 1354.6127365 , 1671.24591983, 1144.64848757, ...,
1247.37586051, 1656.50487092, 978.28184726]])
Dask also provides `cos`, `log` and other mathematical function, that you
can use with :func:`da.cos <dask.array.cos>` and
:func:`da.log <dask.array.log>`. However, since satpy uses xarrays as
standard data structure, prefer the xarray functions when possible (they call
in turn the dask counterparts when possible).
Wrapping non-dask friendly functions
************************************
Some operations are not supported by dask yet or are difficult to convert to
take full advantage of dask's multithreaded operations. In these cases you
can wrap a function to run on an entire dask array when it is being computed
and pass on the result. Note that this requires fully computing all of the
dask inputs to the function and are passed as a numpy array or in the case
of an XArray DataArray they will be a DataArray with a numpy array
underneath. You should *NOT* use dask functions inside the delayed function.
.. code-block:: python
import dask
import dask.array as da
def _complex_operation(my_arr1, my_arr2):
return my_arr1 + my_arr2
delayed_result = dask.delayed(_complex_operation)(my_dask_arr1, my_dask_arr2)
# to create a dask array to use in the future
my_new_arr = da.from_delayed(delayed_result, dtype=my_dask_arr1.dtype, shape=my_dask_arr1.shape)
Dask Delayed objects can also be computed ``delayed_result.compute()`` if
the array is not needed or if the function doesn't return an array.
http://dask.pydata.org/en/latest/array-api.html#dask.array.from_delayed
Map dask blocks to non-dask friendly functions
**********************************************
If the complicated operation you need to perform can be vectorized and does
not need the entire data array to do its operations you can use
:func:`da.map_blocks <dask.array.core.map_blocks>` to get better performance
than creating a delayed function. Similar to delayed functions the inputs to
the function are fully computed DataArrays or numpy arrays, but only the
individual chunks of the dask array at a time. Note that ``map_blocks`` must
be provided dask arrays and won't function properly on XArray DataArrays.
It is recommended that the function object passed to ``map_blocks`` **not**
be an internal function (a function defined inside another function) or it
may be unserializable and can cause issues in some environments.
.. code-block:: python
my_new_arr = da.map_blocks(_complex_operation, my_dask_arr1, my_dask_arr2, dtype=my_dask_arr1.dtype)
Helpful functions
*****************
- :func:`~dask.array.core.map_blocks`
- :func:`~dask.array.map_overlap`
- :func:`~dask.array.core.atop`
- :func:`~dask.array.store`
- :func:`~dask.array.tokenize`
- :func:`~dask.compute`
- :doc:`dask:delayed`
- :func:`~dask.array.rechunk`
- :attr:`~dask.array.Array.vindex`
|
/satpy-0.43.0.tar.gz/satpy-0.43.0/doc/source/dev_guide/xarray_migration.rst
| 0.93729 | 0.875681 |
xarray_migration.rst
|
pypi
|
======================================================
Satpy internal workings: having a look under the hood
======================================================
Querying and identifying data arrays
====================================
DataQuery
---------
The loading of data in Satpy is usually done through giving the name or the wavelength of the data arrays we are interested
in. This way, the highest, most calibrated data arrays is often returned.
However, in some cases, we need more control over the loading of the data arrays. The way to accomplish this is to load
data arrays using queries, eg::
scn.load([DataQuery(name='channel1', resolution=400)]
Here a data array with name `channel1` and of resolution `400` will be loaded if available.
Note that None is not a valid value, and keys having a value set to None will simply be ignored.
If one wants to use wildcards to query data, just provide `'*'`, eg::
scn.load([DataQuery(name='channel1', resolution=400, calibration='*')]
Alternatively, one can provide a list as parameter to query data, like this::
scn.load([DataQuery(name='channel1', resolution=[400, 800])]
DataID
------
Satpy stores loaded data arrays in a special dictionary (`DatasetDict`) inside scene objects.
In order to identify each data array uniquely, Satpy is assigning an ID to each data array, which is then used as the key in
the scene object. These IDs are of type `DataID` and are immutable. They are not supposed to be used by regular users and should only be
created in special circumstances. Satpy should take care of creating and assigning these automatically. They are also stored in the
`attrs` of each data array as `_satpy_id`.
Default and custom metadata keys
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
One thing however that the user has control over is which metadata keys are relevant to which datasets. Satpy provides two default sets
of metadata key (or ID keys), one for regular imager bands, and the other for composites.
The first one contains: name, wavelength, resolution, calibration, modifiers.
The second one contains: name, resolution.
As an example here is the definition of the first one in yaml:
.. code-block:: yaml
data_identification_keys:
name:
required: true
wavelength:
type: !!python/name:satpy.dataset.WavelengthRange
resolution:
calibration:
enum:
- reflectance
- brightness_temperature
- radiance
- counts
transitive: true
modifiers:
required: true
default: []
type: !!python/name:satpy.dataset.ModifierTuple
To create a new set, the user can provide indications in the relevant yaml file.
It has to be provided in header of the reader configuration file, under the `reader`
section, as `data_identification_keys`. Each key under this is the name of relevant
metadata key that will used to find relevant information in the attributes of the data
arrays. Under each of this, a few options are available:
- `required`: if the item is required, False by default
- `type`: the type to use. More on this further down.
- `enum`: if the item has to be limited to a finite number of options, an enum can be used.
Be sure to place the options in the order of preference, with the most desirable option on top.
- `default`: the default value to assign to the item if nothing (or None) is provided. If this
option isn't provided, the key will simply be omitted if it is not present in the attrs or if it
is None. It will be passed to the type's `convert` method if available.
- `transitive`: whether the key is to be passed when looking for dependencies of composites/modifiers.
Here for example, a composite that has in a given calibration type will pass this calibration
type requirement to its dependencies.
If the definition of the metadata keys need to be done in python rather than in a yaml file, it will
be a dictionary very similar to the yaml code. Here is the same example as above in python:
.. code-block:: python
from satpy.dataset import WavelengthRange, ModifierTuple
id_keys_config = {'name': {
'required': True,
},
'wavelength': {
'type': WavelengthRange,
},
'resolution': None,
'calibration': {
'enum': [
'reflectance',
'brightness_temperature',
'radiance',
'counts'
],
'transitive': True,
},
'modifiers': {
'required': True,
'default': ModifierTuple(),
'type': ModifierTuple,
},
}
Types
~~~~~
Types are classes that implement a type to be used as value for metadata in the `DataID`. They have
to implement a few methods:
- a `convert` class method that returns it's argument as an instance of the class
- `__hash__`, `__eq__` and `__ne__` methods
- a `distance` method the tells how "far" an instance of this class is from it's argument.
An example of such a class is the :class:`WavelengthRange <satpy.dataset.WavelengthRange>` class.
Through its implementation, it allows us to use the wavelength in a query to find out which of the
DataID in a list which has its central wavelength closest to that query for example.
DataID and DataQuery interactions
=================================
Different DataIDs and DataQuerys can have different metadata items defined. As such
we define equality between different instances of these classes, and across the classes
as equality between the sorted key/value pairs shared between the instances.
If a DataQuery has one or more values set to `'*'`, the corresponding key/value pair will be omitted from the comparison.
Instances sharing no keys will no be equal.
Breaking changes from DatasetIDs
================================
- The way to access values from the DataID and DataQuery is through getitem: `my_dataid['resolution']`
- For checking if a dataset is loaded, use `'mydataset' in scene`, as `'mydataset' in scene.keys()` will always return `False`:
the `DatasetDict` instance only supports `DataID` as key type.
Creating DataID for tests
=========================
Sometimes, it is useful to create `DataID` instances for testing purposes. For these cases, the `satpy.tests.utils` module
now has a `make_dsid` function that can be used just for this::
from satpy.tests.utils import make_dataid
did = make_dataid(name='camembert', modifiers=('runny',))
|
/satpy-0.43.0.tar.gz/satpy-0.43.0/doc/source/dev_guide/satpy_internals.rst
| 0.931641 | 0.846895 |
satpy_internals.rst
|
pypi
|
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
|
/satpy-0.43.0.tar.gz/satpy-0.43.0/utils/fetch_avhrr_calcoeffs.py
| 0.48121 | 0.212692 |
fetch_avhrr_calcoeffs.py
|
pypi
|
"""Benchmark VIIRS SDR operations.."""
from __future__ import annotations
import glob
import os
from pyspectral.rayleigh import check_and_download as download_luts
from pyspectral.rsr_reader import check_and_download as download_rsr
class VIIRSSDRBenchmarkBase:
"""Shared methods for working with VIIRS SDR data."""
timeout = 600
data_files: list[str] = []
def setup_cache(self):
"""Fetch the data files."""
try:
from satpy.demo import get_viirs_sdr_20170128_1229
get_viirs_sdr_20170128_1229(
channels=("I01", "M03", "M04", "M05"),
granules=(2, 3, 4))
except ImportError:
assert len(self.get_filenames()) == 6 * 3 # nosec
download_rsr()
download_luts(aerosol_type='rayleigh_only')
def setup(self, name):
"""Set up the benchmarks."""
import dask.config
self.data_files = self.get_filenames()
dask.config.set({"array.chunk-size": "32MiB"})
def get_filenames(self):
"""Get the data filenames manually."""
base_dir = os.environ.get("SATPY_DEMO_DATA_DIR", ".")
return glob.glob(os.path.join(base_dir, "viirs_sdr", "20170128_1229", "*.h5"))
def load(self, composite):
"""Load one composite."""
from satpy import Scene
scn = Scene(filenames=self.data_files, reader='viirs_sdr')
scn.load([composite])
return scn
def load_and_native_resample(self, composite):
"""Load and native resample a composite."""
scn = self.load(composite)
lscn = scn.resample(resampler='native')
return lscn
class VIIRSSDRReaderBenchmarks(VIIRSSDRBenchmarkBase):
"""Benchmark reading and writing VIIRS SDR data."""
params = ["I01", "M03"]
param_names = ["name"]
def time_load_one_channel(self, name):
"""Time the loading of one channel."""
self.compute_product(name)
def peakmem_load_one_channel(self, name):
"""Check peak memory usage of loading one channel."""
self.compute_product(name)
def compute_product(self, name):
"""Load and compute one channel."""
scn = self.load(name)
scn[name].compute()
class VIIRSSDRCompositeBenchmarks(VIIRSSDRBenchmarkBase):
"""Benchmark generating and writing composites from VIIRS SDR data."""
params = ["true_color", "true_color_crefl", "true_color_raw"]
param_names = ["name"]
def time_load_composite(self, name):
"""Time the loading of the generation of a composite."""
self.compute_composite(name)
def peakmem_load_composite(self, name):
"""Check peak memory usage of the generation of a composite."""
self.compute_composite(name)
def time_save_composite_to_geotiff(self, name):
"""Time the generation and saving of a composite."""
self.save_composite_as_geotiff(name)
def peakmem_save_composite_raw_to_geotiff(self, name):
"""Check peak memory usage of the generation and saving of a composite."""
self.save_composite_as_geotiff(name)
def compute_composite(self, name):
"""Compute a composite."""
lscn = self.load_and_native_resample(name)
lscn[name].compute()
def save_composite_as_geotiff(self, name):
"""Save a composite to disk as geotiff."""
lscn = self.load_and_native_resample(name)
lscn.save_dataset(name, filename='test.tif', tiled=True)
|
/satpy-0.43.0.tar.gz/satpy-0.43.0/benchmarks/viirs_sdr_benchmarks.py
| 0.85376 | 0.427337 |
viirs_sdr_benchmarks.py
|
pypi
|
"""Benchmark AHI HSD operations.."""
from __future__ import annotations
import os
from pyspectral.rayleigh import check_and_download as download_luts
from pyspectral.rsr_reader import check_and_download as download_rsr
from benchmarks.utils import GeoBenchmarks, get_filenames
class HimawariHSD(GeoBenchmarks):
"""Benchmark Himawari HSD reading."""
timeout = 600
data_files: list[str] = []
subdir = os.path.join("ahi_hsd", "20210417_0500_typhoon_surigae")
reader = 'ahi_hsd'
def setup_cache(self):
"""Fetch the data files."""
try:
from satpy.demo import download_typhoon_surigae_ahi
download_typhoon_surigae_ahi(channels=[1, 2, 3, 4], segments=[4])
except ImportError:
assert len(get_filenames(self.subdir)) == 4 # nosec
download_rsr()
download_luts(aerosol_type='rayleigh_only')
def setup(self):
"""Set up the benchmarks."""
import dask.config
self.data_files = get_filenames(self.subdir)
dask.config.set({"array.chunk-size": "32MiB"})
def time_load_one_channel(self):
"""Time the loading of one channel."""
self.compute_channel("B01")
def peakmem_load_one_channel(self):
"""Check peak memory usage of loading one channel."""
self.compute_channel("B01")
def time_load_true_color(self):
"""Time the loading of the generation of true_color."""
self.compute_composite("true_color")
def peakmem_load_true_color(self):
"""Check peak memory usage of the generation of true_color."""
self.compute_composite("true_color")
def time_save_true_color_nocorr_to_geotiff(self):
"""Time the generation and saving of true_color_nocorr."""
self.save_composite_as_geotiff("true_color_nocorr")
def peakmem_save_true_color_to_geotiff(self):
"""Check peak memory usage of the generation and saving of true_color_nocorr."""
self.save_composite_as_geotiff("true_color_nocorr")
|
/satpy-0.43.0.tar.gz/satpy-0.43.0/benchmarks/ahi_hsd_benchmarks.py
| 0.782538 | 0.527682 |
ahi_hsd_benchmarks.py
|
pypi
|
from __future__ import annotations
import fnmatch
import os
import satpy
import satpy.demo.fci
from .utils import GeoBenchmarks
class FCI(GeoBenchmarks):
"""Benchmark FCI FDHSI test data reading."""
timeout = 600
region = "eurol"
reader = "fci_l1c_nc"
filenames: list[str] = []
def setup_cache(self, *args):
"""Fetch the data files."""
fns = self.get_filenames()
cnt = len(fns)
if cnt > 40:
raise ValueError(f"Expected 41 files, found {cnt:d}")
if cnt < 40:
fns = satpy.demo.download_fci_test_data()
def setup(self, *args):
"""Set location of data files."""
self.filenames = self.get_filenames()
def get_filenames(self):
"""Get filenames of FCI test data as already available."""
p = satpy.demo.fci.get_fci_test_data_dir()
g = p.glob("UNCOMPRESSED/NOMINAL/*-CHK-BODY-*.nc")
return [os.fspath(fn) for fn in g]
def time_create_scene(self, chunk):
"""Time to create a scene."""
names = self._get_filename_selection(chunk)
self.create_scene(names)
time_create_scene.params = ["some", "all"] # type: ignore
time_create_scene.param_names = ["channel subset"] # type: ignore
def peakmem_create_scene(self, chunk):
"""Peak RAM to create a scene."""
names = self._get_filename_selection(chunk)
self.create_scene(names)
peakmem_create_scene.params = time_create_scene.params # type: ignore
peakmem_create_scene.param_names = time_create_scene.param_names # type: ignore
def time_load(self, chunk, loadable):
"""Time to create a scene and load one channel or composite."""
names = self._get_filename_selection(chunk)
self.load_no_padding(loadable, names)
time_load.params = (time_create_scene.params, # type: ignore
["ir_105", "natural_color_raw"])
time_load.param_names = time_create_scene.param_names + ["dataset"] # type: ignore
def peakmem_load(self, chunk, loadable):
"""Peak RAM to create a scene and load one channel or composite."""
names = self._get_filename_selection(chunk)
self.load_no_padding(loadable, names)
peakmem_load.params = time_load.params # type: ignore
peakmem_load.param_names = time_load.param_names # type: ignore
def time_compute(self, chunk, loadable):
"""Time to create a scene and load and compute one channel."""
names = self._get_filename_selection(chunk)
self.compute_channel(loadable, names)
time_compute.params = time_load.params # type: ignore
time_compute.param_names = time_load.param_names # type: ignore
def peakmem_compute(self, chunk, loadable):
"""Peak memory for creating a scene and loading and computing one channel."""
names = self._get_filename_selection(chunk)
self.compute_channel(loadable, names)
peakmem_compute.params = time_compute.params # type: ignore
peakmem_compute.param_names = time_compute.param_names # type: ignore
def time_load_resample_compute(self, chunk, loadable, mode):
"""Time to load all chunks, resample, and compute."""
names = self._get_filename_selection(chunk)
self.compute_composite(loadable, mode, self.region, names)
time_load_resample_compute.params = time_load.params + ( # type: ignore
["nearest", "bilinear", "gradient_search"],)
time_load_resample_compute.param_names = time_load.param_names + ["resampler"] # type: ignore
def peakmem_load_resample_compute(self, chunk, loadable, mode):
"""Peak memory to load all chunks, resample, and compute."""
names = self._get_filename_selection(chunk)
self.compute_composite(loadable, mode, self.region, names)
peakmem_load_resample_compute.params = time_load_resample_compute.params # type: ignore
peakmem_load_resample_compute.param_names = time_load_resample_compute.param_names # type: ignore
def time_load_resample_save(self, chunk, loadable, mode):
"""Time to load all chunks, resample, and save."""
names = self._get_filename_selection(chunk)
self.save_composite_as_geotiff(loadable, mode, self.region, names)
time_load_resample_save.params = time_load_resample_compute.params # type: ignore
time_load_resample_save.param_names = time_load_resample_compute.param_names # type: ignore
def peakmem_load_resample_save(self, chunk, loadable, mode):
"""Peak memory to load all chunks, resample, and save."""
names = self._get_filename_selection(chunk)
self.save_composite_as_geotiff(loadable, mode, self.region, names)
peakmem_load_resample_save.params = time_load_resample_save.params # type: ignore
peakmem_load_resample_save.param_names = time_load_resample_save.param_names # type: ignore
def _get_filename_selection(self, selection):
if selection == "some":
return fnmatch.filter(self.filenames, "*3[0123].nc")
if selection == "all":
return self.filenames
raise ValueError("Expected selection some or all, got " +
selection)
|
/satpy-0.43.0.tar.gz/satpy-0.43.0/benchmarks/fci_benchmarks.py
| 0.800887 | 0.242116 |
fci_benchmarks.py
|
pypi
|
"""Benchmark utilities."""
import os
def get_filenames(subdir):
"""Get the data filenames manually."""
import glob
base_dir = os.environ.get("SATPY_DEMO_DATA_DIR", ".")
return glob.glob(os.path.join(base_dir, subdir, "*"))
class GeoBenchmarks:
"""Class for geo benchmarks."""
def create_scene(self, filenames=None):
"""Create a scene."""
from satpy import Scene
scn = Scene(filenames=filenames or self.data_files, reader=self.reader)
return scn
def load_no_padding(self, composite, filenames=None):
"""Load one composite or channel."""
scn = self.create_scene(filenames=filenames)
scn.load([composite], pad_data=False)
return scn
def load_and_native_resample(self, composite):
"""Load and native resample a composite or channel."""
return self.load_and_resample(composite, "native")
def load_and_resample(self, composite, resampler, area=None, filenames=None):
"""Load and resample a composite or channel with resampler and area."""
scn = self.load_no_padding(composite, filenames=filenames)
ls = scn.resample(area, resampler=resampler)
ls._readers = scn._readers # workaround for GH#1861
return ls
def compute_composite(self, composite, resampler="native",
area=None, filenames=None):
"""Compute a true color image."""
lscn = self.load_and_resample(
composite, resampler, area, filenames)
lscn[composite].compute()
def save_composite_as_geotiff(self, composite, resampler="native",
area=None, filenames=None):
"""Save a composite to disk as geotiff."""
lscn = self.load_and_resample(composite, resampler, area, filenames)
lscn.save_dataset(composite, filename='test.tif', tiled=True)
def compute_channel(self, channel, filenames=None):
"""Load and compute one channel."""
scn = self.load_no_padding(channel, filenames=filenames)
scn[channel].compute()
|
/satpy-0.43.0.tar.gz/satpy-0.43.0/benchmarks/utils.py
| 0.728941 | 0.263454 |
utils.py
|
pypi
|
"""Benchmark ABI L1B operations."""
from __future__ import annotations
import os
from pyspectral.rayleigh import check_and_download as download_luts
from pyspectral.rsr_reader import check_and_download as download_rsr
from benchmarks.utils import GeoBenchmarks, get_filenames
class ABIL1B(GeoBenchmarks):
"""Benchmark ABI L1B reading."""
timeout = 600
data_files: list[str] = []
subdir = os.path.join("abi_l1b", "20190314_us_midlatitude_cyclone")
reader = "abi_l1b"
def setup_cache(self):
"""Fetch the data files."""
try:
from satpy.demo import get_us_midlatitude_cyclone_abi
get_us_midlatitude_cyclone_abi()
except ImportError:
if len(get_filenames(self.subdir)) != 16:
raise RuntimeError("Existing data files do not match the expected number of files.")
download_rsr()
download_luts(aerosol_type='rayleigh_only')
def setup(self):
"""Set up the benchmarks."""
import dask.config
self.data_files = get_filenames(self.subdir)
dask.config.set({"array.chunk-size": "32MiB"})
def time_load_one_channel(self):
"""Time the loading of one channel."""
self.compute_channel("C01")
def peakmem_load_one_channel(self):
"""Check peak memory usage of loading one channel."""
self.compute_channel("C01")
def time_load_true_color(self):
"""Time the loading of the generation of true_color."""
self.compute_composite("true_color")
def peakmem_load_true_color(self):
"""Check peak memory usage of the generation of true_color."""
self.compute_composite("true_color")
def time_save_true_color_nocorr_to_geotiff(self):
"""Time the generation and saving of true_color_nocorr."""
self.save_composite_as_geotiff("true_color_nocorr")
def peakmem_save_true_color_to_geotiff(self):
"""Check peak memory usage of the generation and saving of true_color_nocorr."""
self.save_composite_as_geotiff("true_color_nocorr")
|
/satpy-0.43.0.tar.gz/satpy-0.43.0/benchmarks/abi_l1b_benchmarks.py
| 0.816736 | 0.520984 |
abi_l1b_benchmarks.py
|
pypi
|
# Sats Receiver
[][pypi_proj]
[][pypi_proj]
[][license]
[][tests]
[][codecov]
[][upload]
[pypi_proj]: https://pypi.org/project/sats-receiver/
[license]: https://github.com/baskiton/sats-receiver/blob/main/LICENSE
[tests]: https://github.com/baskiton/sats-receiver/actions/workflows/tests.yml
[codecov]: https://app.codecov.io/gh/baskiton/sats-receiver
[upload]: https://github.com/baskiton/sats-receiver/actions/workflows/pypi-upload.yml
Satellites data receiver based on GNURadio
<!-- TOC -->
* [About](#About)
* [Requirements](#Requirements)
* [Installation](#Installation)
* [From source](#From-source)
* [From PYPI](#From-PYPI)
* [Usage](#Usage)
* [Configure](#Configure)
* [observer](#observer)
* [tle](#tle)
* [receivers](#receivers)
* [sats](#sats)
* [frequencies](#frequencies)
* [modulations](#modulations)
* [decoders](#decoders)
* [gr-satellites](#gr-satellites)
* [Map Shapes](#Map-Shapes)
* [shapes](#shapes)
* [points](#points)
<!-- TOC -->
### About
This program is written to automate the process of receiving signals from
various orbiting satellites on your SDR. The basis for digital signal
processing is GNU Radio - a free software development toolkit that provides
signal processing blocks to implement software-defined radios and
signal-processing systems. [[wikipedia](https://en.wikipedia.org/wiki/GNU_Radio)]
For example, this program is perfect for receiving weather
satellites like NOAA (image below).
If you have ideas or knowledge on how to improve this project, feel free to submit issues or pull requests.

### Requirements
The program has only been tested on Linux. Work on Windows is not guaranteed!
* Python>=3.10 (or lower, see below)
* GNURadio>=3.10 (or lower if gr-soapy installed); GUI-modules is not required
* librtlsdr (if you use RTL-SDR)
### Installation
I recommended to use miniconda. So, first of all,
[install it.](https://docs.conda.io/en/latest/miniconda.html#linux-installers)
#### From source
```commandline
cd sats-receiver
conda create -n sats-receiver-env
conda activate sats-receiver-env
conda config --env --add channels conda-forge
conda config --env --set channel_priority strict
conda env update -f environment.yml
pip install -r requirements.txt
```
#### From PYPI
```commandline
conda create -n sats-receiver-env python
conda activate sats-receiver-env
conda config --env --add channels conda-forge
conda config --env --set channel_priority strict
conda install gnuradio gnuradio-satellites
pip install sats-receiver
```
### Usage
First, activate conda environment:
`conda activate sats-receiver-env`
`python -u -m sats_receiver [-h, --help] [--log LOG] [--sysu SYSU] config`
* `config` Config file path. See [Configure](#Configure)
* `-h, --help` Help message
* `--log LOG` Logging level, INFO default
* `--sysu SYSU` System Usages debug info timeout in seconds, 1 hour default
For example, simple command line to launch program:
`python -u -m sats_receiver /path/to/config.json`
You can copy the `default.json` config file from the root of the repository to a
location of your choice
Program home directory is `~/sats_receiver`
Logfile saved to program home directory (`~/sats_receiver/logs`)
Tle files stored to program home directory (`~/sats_receiver/tle`)
### Configure
The configuration file is in JSON format.
You can copy the `default.json` file from the root of the repository to a
location of your choice and edit it.
| Field | Type | Description |
|:----------|:----------------|:-----------------------------------------------------------|
| observer | Object | Observer/receiver parameters (see [observer](#observer)) |
| tle | Object | TLE data parameters (see [tle](#tle)) |
| receivers | Array of Object | List of receivers parameters (see [receivers](#receivers)) |
#### observer
| Field | Type | Description |
|:----------|:---------------|:--------------------------------------------------------------------------------------------------------------------|
| latitude | Number | Receiver Latitude, degrees |
| longitude | Number | Receiver Longitude, degrees |
| elevation | Number or null | Receiver Elevation, meters. `null` means that the height will be obtained from the weather information or set to 0 |
| weather | Boolean | Whether to receive weather information from the Internet. The weather will be taken from the service open-meteo.com |
#### tle
| Field | Type | Description |
|:--------------|:-------|:--------------------------|
| url | String | URL to TLE file |
| update_period | Number | TLE Update period, hours. |
#### receivers
Each receiver object contain:
| Field | Type | Description |
|:-----------------|:----------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| name | String | Name of the Receiver |
| source | String | String value for gr-soapy driver key, e.g. `rtlsdr`, `lime`, `uhd`, `remote` |
| tune | Number | Receiver tune frequency, Hz |
| samp_rate | Number | Receiver sample rate, Hz |
| output_directory | String | Directory to save received files. You also might specify `~` symbol to specify User home directory |
| sats | Array of Object | List of Satellites configurations (see [sats](#sats)) |
| enabled | Boolean | _Optional._ Enable or Disable this Receiver. `true` by default |
| serial | String | _Optional._ Serial number of the receiver. Empty by default |
| biast | Boolean | _Optional._ Bias-T enable/disable (only for RTL-SDR at this time). `false` by default. **WARNING! Be careful when enabling this option! Use only if you know what it is and why!** |
| gain | Boolean | _Optional._ Receiver gain, dB. `0` by default |
| freq_correction | Number | _Optional._ Receiver frequency correction. `0.0` by default |
#### sats
Each satellite object contain:
| Field | Type | Description |
|:--------------|:----------------|:-----------------------------------------------------------------------------------------------------------|
| name | String | Name or NORAD number of the satellite. Note: name/norad-number must be contained in the above TLE file |
| frequencies | Array of Object | List of frequency configuration (see [frequencies](#frequencies)) |
| enabled | Boolean | _Optional._ Enable/Disable this frequency. `true` by default |
| min_elevation | Number | _Optional._ Elevation angle above the horizon, degrees. `0` by default. Negative number is equivalent to 0 |
| doppler | Boolean | _Optional._ Enable/Disable doppler correction. `true` by default |
#### frequencies
Each frequency object contain:
| Field | Type | Description |
|:----------------|:----------------|:---------------------------------------------------------------------------------------|
| freq | Number | Basic signal frequency, Hz |
| bandwidth | Number | Received signal bandwidth, Hz |
| enabled | Boolean | _Optional._ Enable/Disable this frequency. `true` by default |
| subname | String | _Optional._ Subname added to result filename. Empty by default |
| freq_correction | Boolean | _Optional._ Correction for basic frequency, Hz. `0` by default |
| mode | String | _Optional._ Modulation option (see [modulations](#modulations)). `RAW` by default |
| decode | String | _Optional._ Decoder option (see [decoders](#decoders)). `RAW` by default |
| channels | Array of Number | _Required only for **GMSK** mode._ Demodulation baudrates, bps |
| grs_file | String | _Optional. Only for **SATS** decoder._ See [gr-satellites](#gr-satellites) for details |
| grs_name | String | _Optional. Only for **SATS** decoder._ See [gr-satellites](#gr-satellites) for details |
| grs_norad | Integer | _Optional. Only for **SATS** decoder._ See [gr-satellites](#gr-satellites) for details |
| grs_tlm_decode | Boolean | _Optional. Only for **SATS** decoder._ Save decoded telemetry. `true` by default |
| qpsk_baudrate | Number | _Required only for **(O)QPSK** mode._ (O)QPSK Baudrate, bps |
| qpsk_excess_bw | Number | _Optional. Only for **(O)QPSK** mode._ (O)QPSK Excess bandwidth. `0.35` by default |
| qpsk_ntaps | Integer | _Optional. Only for **(O)QPSK** mode._ (O)QPSK number of taps. `33` by default |
| qpsk_costas_bw | Number | _Optional. Only for **(O)QPSK** mode._ (O)QPSK Costas bandwidth. `0.005` by default |
| sstv_wsr | Number | _Optional. Only for **SSTV** decoder._ SSTV work samplerate. `16000` by default |
| sstv_sync | Number | _Optional. Only for **SSTV** decoder._ SSTV syncing. `true` by default |
#### modulations
* `RAW`
* `AM`
* `FM`
* `WFM`
* `WFM_STEREO`
* `QUAD`
* `QPSK`
* `OQPSK`
* `GMSK`
#### decoders
* `RAW` Saved to 2-channel float32 WAV file with `bandwidth` sample rate
* `CSOFT` Constellation Soft Decoder - 1-channel binary int8. Suitable for further processing, for example, in SatDump
* `APT` Sats-Receiver APT binary file format. See [APT](sats_receiver/systems/README.md#APT)
* `SSTV` SSTV saved to PNG image with EXIF. Supported modes:
* Robot (24, 24, 72)
* Martin (M1, M2, M3, M4)
* PD (50, 90, 120, 160, 180, 240, 290)
* Scottie (S1, S2, S3, S4)
* `SATS` See [gr-satellites](#gr-satellites) for details
* ~~`LRPT`~~ Not implemented yet
##### gr-satellites
See [gr-satellites Documentation][grs-doc]
**IMPORTANT:** For this decoder need to leave the `modulation` on `RAW`
This decoder need to specify one of the parameters for recognize satellite option:
* grs_file - Path to your own [SatYAML-file][grs-satyaml]
* grs_name - Satellite name (may different from [sats name](#sats))
* grs_norad - Satellite NORAD ID
[List of builtin supported satellites][grs-satlist]
Additionally supported satellites can be found in the [satyaml](satyaml) directory of this repository
[grs-doc]: https://gr-satellites.readthedocs.io/en/latest/
[grs-satyaml]: https://gr-satellites.readthedocs.io/en/latest/satyaml.html
[grs-satlist]: https://gr-satellites.readthedocs.io/en/latest/supported_satellites.html
### Map Shapes
Map shapes config file `map_shapes.json` can be found at the root of this repository.
Shapefiles can be downloaded from [Natural Earth](https://www.naturalearthdata.com/downloads/)
| Field | Type | Description |
|:-----------|:-----------------|:-----------------------------------------------------------------------------------|
| shapes | Array of Array | _Optional._ List of shapes data (see [shapes](#shapes)) |
| shapes_dir | String | _Optional. Only when `shapes` specified._ Path to directory contains shapes file |
| points | Object of Object | _Optional._ Additional points to draw on map (see [points](#points)) |
| line_width | Number | _Optional._ Overlay lines width, pixels. `1` by default |
#### shapes
Each shape contain:
| Offset | Field | Type | Description |
|:-------|:----------|:---------------------------|:-------------------------------------------------------------------------------------------------------------------|
| 0 | order | Number | Num in order of drawing. The more, the later it will be drawn. |
| 1 | shapefile | String | Filename of shapefile in shapes dir. Can be separates file or ZIP archive |
| 2 | color | String or Array of Integer | Color. Can be string representing (`red` e.g.), web hex (`#abcdef` e.g.) or 3-4-Array 0-255 (`[0, 127, 255]` e.g.) |
#### points
Each point object has name.
If name is `observer`, then lonlat field is filled with lonlat from apt-file.
Each point object contain:
| Field | Type | Description |
|:-------|:----------------------------|:-------------------------------------------------------------------------------------------------------------------------------|
| color | String or Array of Integer | Color. Can be string representing (`red` e.g.), web hex (`#abcdef` e.g.) or 3-4-Array 0-255 (`[0, 127, 255]` e.g.) |
| type | String | Type of marker view. Can be `+`, `o` |
| size | Integer or Array of Integer | If `type` is `+` then Array with line width and line length, pixels. If `type` is `o` then Integer as radius of circle, pixels |
| lonlat | Array of Number | _Optional. **Only for non-observer name.**_ 2-Array of point longitude and latitude, degrees |
| order | Number | _Optional._ Same as in `shapes`. Default to last |
|
/sats_receiver-0.1.76.tar.gz/sats_receiver-0.1.76/README.md
| 0.402627 | 0.820972 |
README.md
|
pypi
|
import datetime as dt
import logging
import pathlib
import shutil
import urllib.error
import urllib.parse
import urllib.request
from typing import Mapping, Optional, Union
import ephem
from sats_receiver import TLEDIR
class Tle:
TD_ERR_DEF = dt.timedelta(seconds=5)
def __init__(self, config: Mapping):
self.prefix = self.__class__.__name__
self.log = logging.getLogger(self.prefix)
self.config = {}
self.tle_file = pathlib.Path(TLEDIR / 'dummy')
self.last_update_tle = dt.datetime.fromtimestamp(0, dt.timezone.utc)
self.objects: dict[str, tuple[ephem.EarthSatellite, tuple[str, str, str]]] = {}
self.t_err = self.last_update_tle
self.td_err = self.TD_ERR_DEF
if not self.update_config(config):
raise ValueError(f'{self.prefix}: Invalid config!')
self.t_next = self.last_update_tle + dt.timedelta(days=self.update_period)
@staticmethod
def calc_checksum(full_line: str):
checksum = 0
for c in full_line[:-1]:
if c.isnumeric():
checksum += int(c)
elif c == '-':
checksum += 1
return str(checksum)[-1]
def fill_objects(self, tle_f: pathlib.Path, t: dt.datetime):
if tle_f is None:
if t >= self.t_err:
self.t_err = t + self.td_err
self.td_err *= 2
self.log.error('TLE file failed')
return
objects = {}
with tle_f.open() as f:
for line in f:
names = set()
while 0 < len(line) <= 69:
names.add(line.strip())
line = f.readline()
try:
names.add(int(line[2:7]))
except ValueError:
if t >= self.t_err:
self.t_err = t + self.td_err
self.td_err *= 2
self.log.error('Not TLE. Break')
return
l1 = line.rstrip()
l2 = f.readline().rstrip()
for name in names:
try:
objects[name] = ephem.readtle(str(name), l1, l2), (str(name), l1, l2)
except ValueError as e:
if str(e).startswith('incorrect TLE checksum'):
self.log.warning('%s: for `%s` expect %s:%s, got %s:%s',
e, name,
self.calc_checksum(l1), l1[-1],
self.calc_checksum(l2), l2[-1])
else:
raise e
self.objects = objects
shutil.move(tle_f, self.tle_file)
self.td_err = self.TD_ERR_DEF
self.t_err = t
return 1
def fetch_tle(self, t: dt.datetime):
try:
x = urllib.request.urlretrieve(self.url)
except urllib.error.HTTPError as e:
if t >= self.t_err:
self.t_err = t + self.td_err
self.td_err *= 2
msg = f'Tle not fetched: {e}'
if e.code == 400:
msg = f'{msg}: "{e.url}"'
self.log.error('%s', msg)
return
except (urllib.error.URLError, ValueError) as e:
if t >= self.t_err:
self.t_err = t + self.td_err
self.td_err *= 2
self.log.error('Tle not fetched: %s', e)
return
if self.fill_objects(x and pathlib.Path(x[0]) or None, t):
self.last_update_tle = t
self.log.info('Tle updated')
return 1
def update_config(self, config: Mapping):
"""
:return: True if config update success
"""
if config != self.config:
if not self._validate_config(config):
self.log.warning('invalid new config!')
return
self.log.debug('reconf')
self.config = config
fn = pathlib.Path(urllib.parse.urlparse(self.url).path).name
self.tle_file = pathlib.Path(TLEDIR / fn)
if self.tle_file.is_file():
self.last_update_tle = dt.datetime.fromtimestamp(self.tle_file.stat().st_mtime, dt.timezone.utc)
else:
if self.tle_file.is_dir():
shutil.rmtree(self.tle_file, True)
else:
self.tle_file.unlink(True)
self.tle_file.touch()
self.last_update_tle = dt.datetime.fromtimestamp(0, dt.timezone.utc)
self.fill_objects(self.tle_file, dt.datetime.now(dt.timezone.utc))
return 1
@staticmethod
def _validate_config(config: Mapping) -> bool:
return all(map(lambda x: x in config, [
'url',
'update_period',
]))
@property
def url(self) -> str:
return self.config['url']
@property
def update_period(self) -> Union[int, float]:
"""
Period of TLE update, days
"""
return self.config['update_period']
def action(self, t: dt.datetime):
if t >= self.t_next and self.fetch_tle(t):
self.t_next = self.last_update_tle + dt.timedelta(days=self.update_period)
return 1
def get(self, name: str) -> Optional[tuple[ephem.EarthSatellite, tuple[str, str, str]]]:
"""
Get TLE info by satellite name or NORAD number
:return: Tuple of EarthSatellite object and 3 lines of TLE. Or None
"""
return self.objects.get(name, None)
def get_ephem(self, name: str) -> Optional[ephem.EarthSatellite]:
"""
Get TLE object by satellite name or NORAD number
"""
x = self.objects.get(name, None)
return x and x[0]
def get_tle(self, name: str) -> Optional[tuple[str, str, str]]:
"""
Get raw TLE lines by satellite name or NORAD number
"""
x = self.objects.get(name, None)
return x and x[1]
|
/sats_receiver-0.1.76.tar.gz/sats_receiver-0.1.76/sats_receiver/tle.py
| 0.696681 | 0.177668 |
tle.py
|
pypi
|
import datetime as dt
import json
import logging
import urllib.error
import urllib.parse
import urllib.request
from typing import Mapping, Optional, Union
import ephem
from sats_receiver import HOMEDIR
class Observer:
TD_ERR_DEF = dt.timedelta(seconds=5)
def __init__(self, config: Mapping):
self.prefix = self.__class__.__name__
self.log = logging.getLogger(self.prefix)
self.config = {}
self.last_weather_time = dt.datetime.fromtimestamp(0, dt.timezone.utc)
self.update_period = 1 # hours
self.t_next = self.last_weather_time + dt.timedelta(hours=self.update_period, minutes=1)
self._observer = ephem.Observer()
self.t_err = self.last_weather_time
self.td_err = self.TD_ERR_DEF
self.weather_fp = HOMEDIR / 'weather.json'
if not self.update_config(config):
raise ValueError(f'{self.prefix}: Invalid config!')
@property
def with_weather(self) -> bool:
return self.config['weather']
@property
def fetch_elev(self) -> bool:
return self.config['elevation'] is None
@property
def lon(self) -> Union[int, float]:
return self.config['longitude']
@property
def lat(self) -> Union[int, float]:
return self.config['latitude']
@property
def elev(self) -> Union[int, float]:
return self.config['elevation'] or 0
@property
def lonlat(self) -> tuple[Union[int, float], Union[int, float]]:
return self.lon, self.lat
def update_config(self, config: Mapping) -> Optional[int]:
"""
:return: True if config update success
"""
if self.config != config:
if not self._validate_config(config):
self.log.warning('invalid new config!')
return
self.log.debug('reconf')
self.config = config
self._observer = ephem.Observer()
self._observer.lat = str(self.lat)
self._observer.lon = str(self.lon)
self._observer.elev = self.elev
self._observer.compute_pressure()
if self.with_weather:
try:
self.set_weather(json.loads(self.weather_fp.read_bytes()))
except (json.JSONDecodeError, FileNotFoundError) as e:
self.log.warning('Failed to load weather from file: %s', e)
return 1
@staticmethod
def _validate_config(config: Mapping) -> bool:
return all(map(lambda x: x in config, [
'latitude',
'longitude',
'elevation',
'weather',
]))
def fetch_weather(self, t: dt.datetime) -> Optional[int]:
q = urllib.parse.urlencode({
'latitude': self._observer.lat / ephem.degree,
'longitude': self._observer.lon / ephem.degree,
'hourly': 'temperature_2m,surface_pressure',
'current_weather': 'true',
'windspeed_unit': 'ms',
'start_date': dt.datetime.utcnow().date(),
'end_date': dt.datetime.utcnow().date(),
}, safe=',')
try:
with urllib.request.urlopen('https://api.open-meteo.com/v1/forecast?' + q) as r:
j_raw = r.read()
j = json.loads(j_raw)
self.weather_fp.write_bytes(j_raw)
self.td_err = self.TD_ERR_DEF
self.t_err = t
except urllib.error.HTTPError as e:
if t >= self.t_err:
self.t_err = t + self.td_err
self.td_err *= 2
msg = f'Weather not fetched!\n{e}'
if e.code == 400:
msg = f'{msg}:\n"{e.url}"'
self.log.error('%s', msg)
return
except urllib.error.URLError as e:
if t >= self.t_err:
self.t_err = t + self.td_err
self.td_err *= 2
self.log.error('Weather not fetched: %s', e)
return
except json.JSONDecodeError as e:
if t >= self.t_err:
self.t_err = t + self.td_err
self.td_err *= 2
self.log.error('JSON error: %s', e)
return
self.set_weather(j)
self.log.info('weather updated: %s°C %shPa', self._observer.temp, self._observer.pressure)
return 1
def set_weather(self, j):
self.last_weather_time = dt.datetime.fromisoformat(j['current_weather']['time']).replace(tzinfo=dt.timezone.utc)
self._observer.temp = float(j['current_weather']['temperature'])
if self.fetch_elev:
self._observer.elev = j.get('elevation', self._observer.elev)
press = None
for i, val in enumerate(j['hourly']['time']):
if dt.datetime.fromisoformat(val).replace(tzinfo=dt.timezone.utc) == self.last_weather_time:
try:
press = float(j['hourly']['surface_pressure'][i])
except TypeError:
pass
break
if press is None:
self._observer.compute_pressure()
else:
self._observer.pressure = press
def action(self, t: dt.datetime) -> Optional[int]:
self.set_date(t)
if self.with_weather and t >= self.t_next and self.fetch_weather(t):
self.t_next = self.last_weather_time + dt.timedelta(hours=self.update_period, minutes=1)
return 1
def next_pass(self,
body: ephem.EarthSatellite,
start_time: dt.datetime = None) -> tuple[dt.datetime, float,
dt.datetime, float,
dt.datetime, float]:
"""
Calculate next pass of the `body` from `start_time`
:return: rise_t, rise_az, culm_t, culm_alt, set_t, set_az
"""
o = self._observer.copy()
if start_time is not None:
o.date = start_time
rise_t, rise_az, culm_t, culm_alt, set_t, set_az = o.next_pass(body, False)
return (ephem.to_timezone(rise_t, dt.timezone.utc), rise_az / ephem.degree,
ephem.to_timezone(culm_t, dt.timezone.utc), culm_alt / ephem.degree,
ephem.to_timezone(set_t, dt.timezone.utc), set_az / ephem.degree)
def set_date(self, t: dt.datetime):
self._observer.date = t
def get_obj(self) -> ephem.Observer:
return self._observer
|
/sats_receiver-0.1.76.tar.gz/sats_receiver-0.1.76/sats_receiver/observer.py
| 0.753104 | 0.179261 |
observer.py
|
pypi
|
import collections
import datetime as dt
import enum
import errno
import gc
import heapq
import itertools
import logging
import math
import os
import pathlib
import sys
import tempfile
import threading
import time
from typing import Any, Callable, Iterable, Mapping, Union
import ephem
import numpy as np
import psutil
import shapefile
from PIL import Image, ImageColor, ImageOps, ExifTags
THIRD_PI = math.pi / 3
class Mode(enum.Enum):
RAW = 'RAW'
AM = 'AM'
FM = 'FM'
WFM = 'WFM'
WFM_STEREO = 'WFM_STEREO'
QUAD = 'QUAD'
QPSK = 'QPSK'
OQPSK = 'OQPSK'
GMSK = 'GMSK'
class Decode(enum.Enum):
RAW = 'RAW'
CSOFT = 'CSOFT'
APT = 'APT'
LRPT = 'LRPT'
SSTV = 'SSTV'
SATS = 'SATS'
Event = collections.namedtuple('Event', 't, prior, seq, fn, a, kw')
class Scheduler:
"""
The scheduler idea is taken from the python stdlib
and adapted to my needs
https://github.com/python/cpython/blob/main/Lib/sched.py
"""
def __init__(self):
self._queue = []
self._lock = threading.RLock()
self._sequence_generator = itertools.count()
def plan(self, t: dt.datetime, fn: Callable, *a: Any, prior: int = 0, **kw: Any) -> Event:
with self._lock:
event = Event(t, prior, next(self._sequence_generator), fn, a, kw)
heapq.heappush(self._queue, event)
return event
def cancel(self, *evt: Event):
if not evt:
return
with self._lock:
for e in evt:
try:
self._queue.remove(e)
heapq.heapify(self._queue)
except ValueError:
pass
def clear(self):
with self._lock:
self._queue.clear()
heapq.heapify(self._queue)
def empty(self) -> bool:
with self._lock:
return not self._queue
def action(self):
pop = heapq.heappop
while True:
with self._lock:
if not self._queue:
break
t, prior, seq, fn, a, kw = self._queue[0]
now = dt.datetime.now(dt.timezone.utc)
if t > now:
delay = True
else:
delay = False
pop(self._queue)
if delay:
return t - now
fn(*a, **kw)
# time.sleep(0)
class SysUsage:
DEFAULT_INTV = 3600
def __init__(self, ctx: str, intv: Union[int, float] = DEFAULT_INTV):
self.prefix = f'{self.__class__.__name__}: {ctx}'
self.log = logging.getLogger(self.prefix)
self.proc = psutil.Process()
gc.set_debug(gc.DEBUG_UNCOLLECTABLE)
self.now = 0
self.intv = intv
self.next = self.t + intv
self.ctx = ctx
def collect(self):
if self.t >= self.next:
self.next = self.now + self.intv
gc.collect()
with self.proc.oneshot():
mi = self.proc.memory_info()
ct = self.proc.cpu_times()
self.log.debug('%s rss %s utime %s stime %s',
numbi_disp(sum(sys.getsizeof(i) for i in gc.get_objects())),
numbi_disp(mi.rss),
sec(ct.user),
sec(ct.system))
@property
def t(self) -> float:
self.now = time.monotonic()
return self.now
class MapShapes:
def __init__(self, config: Mapping):
self.config = config
self.shapes = []
for i, shf, col in sorted(config.get('shapes', []), key=lambda x: x[0]):
self.shapes.append((shf, self._gen_color(col)))
for name, v in config.get('points', {}).items():
if name != 'observer':
v['lonlat'] = np.radians(v['lonlat'])
v['name'] = name
v['color'] = self._gen_color(v['color'])
if v['type'] == '+':
assert len(v['size']) == 2
v['size'] = [*map(int, v['size'])]
elif v['type'] == 'o':
v['size'] = int(v['size'])
else:
raise ValueError(f'Invalid point type: `{v["type"]}`')
order = int(v.get('order', len(self.shapes)))
self.shapes.insert(order, (v, v['color']))
@property
def shapes_dir(self) -> pathlib.Path:
return pathlib.Path(self.config['shapes_dir']).expanduser()
@property
def line_width(self) -> int:
return self.config.get('line_width', 1)
def iter(self) -> tuple[Union[Iterable, Mapping], tuple]:
for shf, color in self.shapes:
if isinstance(shf, Mapping):
yield shf, color
continue
for i in shapefile.Reader(self.shapes_dir / shf).iterShapes():
pts = np.radians(i.points)
if len(i.parts) <= 1:
yield pts, color
else:
for j, k in itertools.pairwise(i.parts):
yield pts[j:k], color
@staticmethod
def _gen_color(col) -> Iterable[int]:
alpha = 255
if isinstance(col, (tuple, list)):
if len(col) == 4:
alpha = col[3]
col = tuple(col[:3]) + (alpha,)
elif isinstance(col, str):
col = ImageColor.getcolor(col, 'RGBA')
elif isinstance(col, int):
col = col, alpha
else:
raise TypeError('Invalid color value type')
return col
def numbi_disp(number, zero=None):
"""
Actual for data sizes in bytes
"""
try:
number = len(number)
except TypeError:
pass
if not number or number <= 0:
if zero is not None:
return zero
number = 0
# rememberings on BinkleyTerm
rgch_size = 'bKMGTPEZY'
i = 0
oldq = 0
quotient = number
while quotient >= 1024:
oldq = quotient
quotient = oldq >> 10
i += 1
intq = quotient
if intq > 999:
# If more than 999 but less than 1024, it's a big fraction of
# the next power of 1024. Get top two significant digits
# (so 1023 would come out .99K, for example)
intq = (intq * 25) >> 8 # 100/1024
e_stuff = '.%2d%s' % (intq, rgch_size[i + 1])
elif intq < 10 and i:
# If less than 10 and not small units, then get some decimal
# places (e.g. 1.2M)
intq = (oldq * 5) >> 9 # 10/1024
tempstr = '%02d' % intq
e_stuff = '%s.%s%s' % (tempstr[0], tempstr[1], rgch_size[i])
else:
# Simple case. Just do it.
e_stuff = '%d%s' % (intq, rgch_size[i])
return e_stuff
def num_disp(num, res=2):
mag = 0
while abs(num) >= 1000:
mag += 1
num /= 1000
return f"{num:.{res}f}".rstrip('0').rstrip('.') + ('', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')[mag]
def sec(t, res=2):
return ('%.*f' % (res, t)).rstrip('0').rstrip('.') + 's'
def doppler_shift(freq: Union[int, float], vel: Union[int, float]):
"""
Calculate Doppler shift by relative velocity
:param freq: base signal frequency
:param vel: relative velocity, m/s
:return: Result frequency with doppler shift. NOTE: if vel is negative, result match for UPLINK, else for DOWNLINK
"""
return freq * ephem.c / (ephem.c + vel)
def azimuth(a_lonlat: [float, float], b_lonlat: [float, float]) -> float:
"""
Calculate azimuth between two points
:param a_lonlat: Point A lonlat, radians
:param b_lonlat: Point B lonlat, radians
:return: azimuth in radians
"""
lon_a, lat_a = a_lonlat
lon_b, lat_b = b_lonlat
if lon_b - lon_a < -math.pi:
delta_lon = math.tau + lon_b - lon_a
elif lon_b - lon_a > math.pi:
delta_lon = lon_b - lon_a - math.tau
else: # abs(lon_b - lon_a) <= math.pi
delta_lon = lon_b - lon_a
return math.atan2(
math.sin(delta_lon),
math.cos(lat_a) * math.tan(lat_b) - math.sin(lat_a) * math.cos(delta_lon)
)
def mktmp(dir: pathlib.Path = None, prefix: str = None, suffix='.tmp') -> pathlib.Path:
if dir:
dir.mkdir(parents=True, exist_ok=True)
f = tempfile.NamedTemporaryFile(dir=dir, prefix=prefix, suffix=suffix, delete=False)
f.close()
return pathlib.Path(f.name)
def mktmp2(mode='w+b', buffering=-1, dir: pathlib.Path = None, prefix: str = None, suffix='.tmp'):
if dir:
dir.mkdir(parents=True, exist_ok=True)
return tempfile.NamedTemporaryFile(mode=mode,
buffering=buffering,
dir=dir,
prefix=prefix,
suffix=suffix,
delete=False)
def close(*ff) -> None:
for f in ff:
try:
if hasattr(f, 'close'):
f.close()
elif f is not None and f >= 0:
os.close(f)
except OSError:
pass
def unlink(*pp: pathlib.Path) -> None:
for p in pp:
try:
p.unlink(True)
except OSError as e:
if e.errno == errno.EISDIR:
try:
p.rmdir()
except:
pass
def img_add_exif(img: Image.Image,
d: dt.datetime = None,
observer: ephem.Observer = None,
comment='') -> Image.Image:
exif = img.getexif()
exif[ExifTags.Base.Software] = 'SatsReceiver' # TODO: add version
if d is not None:
exif[ExifTags.Base.DateTime] = d.strftime('%Y:%m:%d %H:%M:%S')
if observer is not None:
img.info['exif'] = exif.tobytes()
img = ImageOps.exif_transpose(img)
exif = img.getexif()
gps = exif.get_ifd(ExifTags.IFD.GPSInfo)
gps[ExifTags.GPS.GPSLatitudeRef] = 'S' if observer.lat < 0 else 'N'
gps[ExifTags.GPS.GPSLatitude] = list(map(lambda x: abs(float(x)), str(observer.lat).split(':')))
gps[ExifTags.GPS.GPSLongitudeRef] = 'W' if observer.lon < 0 else 'E'
gps[ExifTags.GPS.GPSLongitude] = list(map(lambda x: abs(float(x)), str(observer.lon).split(':')))
gps[ExifTags.GPS.GPSAltitudeRef] = int(observer.elev < 0)
gps[ExifTags.GPS.GPSAltitude] = abs(observer.elev)
exif[ExifTags.IFD.GPSInfo] = gps
if comment:
img.info['exif'] = exif.tobytes()
img = ImageOps.exif_transpose(img)
exif = img.getexif()
ee = exif.get_ifd(ExifTags.IFD.Exif)
ee[ExifTags.Base.UserComment] = comment
exif[ExifTags.IFD.Exif] = ee
img.info['exif'] = exif.tobytes()
return ImageOps.exif_transpose(img)
|
/sats_receiver-0.1.76.tar.gz/sats_receiver-0.1.76/sats_receiver/utils.py
| 0.539226 | 0.158891 |
utils.py
|
pypi
|
import datetime as dt
import dateutil.tz
import logging
import math
import pathlib
from typing import Optional, Union
import numpy as np
import scipy as sp
import scipy.signal
from PIL import Image
from sats_receiver import utils
class Sstv:
MODE = 'L'
HSYNC_GAP_MS = 0
HDR_PIX_S = 0.01
SYNC_PIX_S = 0.001
"""
SSTV Header Sync Word
blocks per 10 ms
_______ _______
x30 |1| x30 | x30
|_| |_______
"""
HDR_SYNC_WORD = np.array([1] * 30 + [-1] + [1] * 30 + [-1] * 30)
def __init__(self,
sat_name: str,
out_dir: pathlib.Path,
srate: Union[int, float],
do_sync=True):
self.name = self.__class__.__name__
self.prefix = f'{self.name}: {sat_name}'
self.log = logging.getLogger(self.prefix)
self.sat_name = sat_name
self.out_dir = out_dir
self.srate = srate
self.do_sync = do_sync
self.line_len_fp = self.LINE_S * srate
self.line_len = int(self.LINE_S * srate)
# horizontal sync
self.sync_pix_width = int(srate * self.SYNC_PIX_S)
self.img = None
self.img_data_max_size = int(self.line_len_fp * (self.IMG_H + 1)) * np.float32().itemsize
self.img_data_file = utils.mktmp2(mode='wb',
buffering=0,
dir=out_dir,
prefix='_'.join(self.name.lower().split()),
suffix='.tmp')
self.img_data_file.truncate(self.img_data_max_size)
self.img_data_fp = pathlib.Path(self.img_data_file.name)
def stop(self):
utils.close(self.img_data_file)
self.img_data_file = None
def feed(self, data: np.ndarray) -> int:
self.img_data_file.write(data.tobytes())
if self.img_data_file.tell() >= self.img_data_max_size:
utils.close(self.img_data_file)
# self.image_process()
return 1
def image_process(self):
self.log.debug('image process...')
data = np.fromfile(self.img_data_fp, dtype=np.float32, count=self.img_data_max_size)
data.resize(self.img_data_max_size, refcheck=False) # resize in-place with zero-filling
data = self._sync_process(data) if self.do_sync else self._no_sync_process(data)
data = (data - SstvRecognizer._1500) / (SstvRecognizer._2300 - SstvRecognizer._1500)
data = self._image_process(data)
img = Image.fromarray((data * 255).clip(0, 255).astype(np.uint8), self.MODE)
if self.MODE != 'RGB':
img = img.convert('RGB')
self.log.debug('add EXIF')
self.img = utils.img_add_exif(
img,
d=dt.datetime.fromtimestamp(self.img_data_fp.stat().st_mtime, dateutil.tz.tzutc()),
comment=self.name,
)
utils.unlink(self.img_data_fp)
self.log.debug('image process done')
def get_image(self) -> Image.Image:
if not self.img:
self.image_process()
return self.img
def _no_sync_process(self, data: np.ndarray) -> np.ndarray:
hsync_len = int(self.HSYNC_MS * self.sync_pix_width)
return np.resize(data, (self.IMG_H, self.line_len))[:, hsync_len:]
def _sync_process(self, data: np.ndarray) -> np.ndarray:
self.log.debug('syncing...')
hsync_len = int(self.HSYNC_MS * self.sync_pix_width)
line_len = self.line_len - hsync_len
sync_word = np.array([-1] * hsync_len)
corrs = np.correlate(data - np.mean(data), sync_word, 'valid')
corrs_mean = np.mean(corrs)
corrs_up = corrs > corrs_mean
k = 0
peaks = np.empty(self.IMG_H, int)
for i in range(peaks.size):
k += np.argmax(corrs_up[k:k + line_len])
peaks[i] = np.argmax(corrs[k:k + line_len]) + k + hsync_len
k += line_len
img_raw = np.zeros((self.IMG_H, line_len), data.dtype)
for i, line in enumerate(img_raw):
line[:] = data[peaks[i]:peaks[i] + line_len]
return img_raw
def _image_process(self, data: np.ndarray) -> np.ndarray:
return data
class _Robot(Sstv):
MODE = 'YCbCr'
HSYNC_GAP_MS = 3
C_GAP_MS = 1.5
def _420(self, data: np.ndarray) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
y, c = np.hsplit(data, [int(self.srate * self.Y_MS / 1000)])
indices = [int(self.srate * self.CSYNC_MS / 1000)]
c0_s, c0 = np.hsplit(c[::2], indices)
c1_s, c1 = np.hsplit(c[1::2], indices)
cr = c0
cb = c1
if np.median(c0_s) > np.median(c1_s):
cr, cb = cb, cr
return y, np.repeat(cb, 2, axis=0), np.repeat(cr, 2, axis=0)
def _422(self, data: np.ndarray) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
indices = [0]
for i in self.Y_MS, self.CSYNC_MS, self.C_MS, self.CSYNC_MS:
indices.append(indices[-1] + i)
indices = np.array(indices[1:]) / 1000
y, c0_s, c0, c1_s, c1 = np.hsplit(data, (indices * self.srate).astype(int))
# csync_gap_len = int(self.srate * self.CSYNC_GAP_MS / 1000)
# c0_m = np.median(c0_s[:, csync_gap_len:])
# c1_m = np.median(c1_s[:, csync_gap_len:])
cr = c0
cb = c1
return y, cb, cr
def _image_process(self, data: np.ndarray) -> np.ndarray:
y, cb, cr = self._color_method(data)
c_gap_len = int(self.srate * self.C_GAP_MS / 1000)
# Image.fromarray((data * 255).clip(0, 255).astype(np.uint8), 'L').save('/home/baskiton/pretest.png')
# Image.fromarray((y * 255).clip(0, 255).astype(np.uint8), 'L').save('/home/baskiton/pretest_y.png')
# Image.fromarray((cb * 255).clip(0, 255).astype(np.uint8), 'L').save('/home/baskiton/pretest_cb.png')
# Image.fromarray((cr * 255).clip(0, 255).astype(np.uint8), 'L').save('/home/baskiton/pretest_cr.png')
return np.dstack((
sp.signal.resample(y[:, c_gap_len:], self.IMG_W, axis=1),
sp.signal.resample(cb[:, c_gap_len:], self.IMG_W, axis=1),
sp.signal.resample(cr[:, c_gap_len:], self.IMG_W, axis=1)
))
class Robot24(_Robot):
VIS = 0x04
HSYNC_MS = 12
CSYNC_MS = 6
Y_MS = 88
C_MS = 44
LINE_S = (HSYNC_MS + Y_MS + CSYNC_MS + C_MS + CSYNC_MS + C_MS) / 1000
IMG_W = 160
IMG_H = 120
_color_method = _Robot._422
class Robot36(_Robot):
VIS = 0x08
HSYNC_MS = 10.5
CSYNC_MS = 4.5
Y_MS = 90
C_MS = 45
LINE_S = (HSYNC_MS + Y_MS + CSYNC_MS + C_MS) / 1000
IMG_W = 320
IMG_H = 240
_color_method = _Robot._420
class Robot72(_Robot):
VIS = 0x0c
HSYNC_MS = 12
CSYNC_MS = 6
Y_MS = 138
C_MS = 69
LINE_S = (HSYNC_MS + Y_MS + CSYNC_MS + C_MS + CSYNC_MS + C_MS) / 1000
IMG_W = 320
IMG_H = 240
_color_method = _Robot._422
class _Martin(Sstv):
MODE = 'RGB'
HSYNC_MS = 4.862
CSYNC_MS = 0.572
C_MS = CSYNC_MS * 256
LINE_S = (HSYNC_MS + CSYNC_MS + C_MS + CSYNC_MS + C_MS + CSYNC_MS + C_MS + CSYNC_MS) / 1000
def _image_process(self, data: np.ndarray) -> np.ndarray:
indices = [0]
for i in self.CSYNC_MS, self.C_MS, self.CSYNC_MS, self.C_MS, self.CSYNC_MS, self.C_MS:
indices.append(indices[-1] + i)
indices = np.array(indices[1:]) / 1000
_, g, _, b, _, r, _ = np.hsplit(data, (indices * self.srate).astype(int))
return np.dstack((
sp.signal.resample(r, self.IMG_W, axis=1),
sp.signal.resample(g, self.IMG_W, axis=1),
sp.signal.resample(b, self.IMG_W, axis=1)
))
class MartinM1(_Martin):
VIS = 0x2c
IMG_W = 320
IMG_H = 256
class MartinM2(_Martin):
VIS = 0x28
C_MS = _Martin.CSYNC_MS * 128
LINE_S = (_Martin.HSYNC_MS + _Martin.CSYNC_MS
+ C_MS + _Martin.CSYNC_MS
+ C_MS + _Martin.CSYNC_MS
+ C_MS + _Martin.CSYNC_MS) / 1000
IMG_W = 160
IMG_H = 256
class MartinM3(_Martin):
VIS = 0x24
IMG_W = 320
IMG_H = 128
class MartinM4(_Martin):
VIS = 0x20
C_MS = _Martin.CSYNC_MS * 128
LINE_S = (_Martin.HSYNC_MS + _Martin.CSYNC_MS
+ C_MS + _Martin.CSYNC_MS
+ C_MS + _Martin.CSYNC_MS
+ C_MS + _Martin.CSYNC_MS) / 1000
IMG_W = 160
IMG_H = 128
class _PD(Sstv):
MODE = 'YCbCr'
HSYNC_MS = 20
HSYNC_GAP_MS = 2.08
def _image_process(self, data: np.ndarray) -> np.ndarray:
indices = [0]
for i in self.HSYNC_GAP_MS, self.C_MS, self.C_MS, self.C_MS, self.C_MS:
indices.append(indices[-1] + i)
indices = np.array(indices[1:]) / 1000
_, y1, cr, cb, y2, _ = np.hsplit(data, (indices * self.srate).astype(int))
y = np.stack((y1, y2), axis=1).reshape((self.IMG_H * 2, -1))
return np.dstack((
sp.signal.resample(y, self.IMG_W, axis=1),
sp.signal.resample(cb.repeat(2, axis=0), self.IMG_W, axis=1),
sp.signal.resample(cr.repeat(2, axis=0), self.IMG_W, axis=1)
))
class PD50(_PD):
VIS = 0x5d
C_MS = 91.52
LINE_S = (_PD.HSYNC_MS + _PD.HSYNC_GAP_MS + C_MS * 4) / 1000
IMG_W = 320
IMG_H = 256 // 2
class PD90(_PD):
VIS = 0x63
C_MS = 170.24
LINE_S = (_PD.HSYNC_MS + _PD.HSYNC_GAP_MS + C_MS * 4) / 1000
IMG_W = 320
IMG_H = 256 // 2
class PD120(_PD):
VIS = 0x5f
C_MS = 121.6
LINE_S = (_PD.HSYNC_MS + _PD.HSYNC_GAP_MS + C_MS * 4) / 1000
IMG_W = 640
IMG_H = 496 // 2
class PD160(_PD):
VIS = 0x62
C_MS = 195.854
LINE_S = (_PD.HSYNC_MS + _PD.HSYNC_GAP_MS + C_MS * 4) / 1000
IMG_W = 512
IMG_H = 400 // 2
class PD180(_PD):
VIS = 0x60
C_MS = 183.04
LINE_S = (_PD.HSYNC_MS + _PD.HSYNC_GAP_MS + C_MS * 4) / 1000
IMG_W = 640
IMG_H = 496 // 2
class PD240(_PD):
VIS = 0x61
C_MS = 244.48
LINE_S = (_PD.HSYNC_MS + _PD.HSYNC_GAP_MS + C_MS * 4) / 1000
IMG_W = 640
IMG_H = 496 // 2
class PD290(_PD):
VIS = 0x5e
C_MS = 228.8
LINE_S = (_PD.HSYNC_MS + _PD.HSYNC_GAP_MS + C_MS * 4) / 1000
IMG_W = 800
IMG_H = 616 // 2
class _Scottie(Sstv):
MODE = 'RGB'
HSYNC_MS = 9.0
CSYNC_MS = 1.5
C_MS = 138.24
LINE_S = (CSYNC_MS + C_MS + CSYNC_MS + C_MS + HSYNC_MS + CSYNC_MS + C_MS) / 1000
IMG_W = 320
IMG_H = 256
def _image_process(self, data: np.ndarray) -> np.ndarray:
indices = [0]
for i in self.CSYNC_MS, self.C_MS, self.CSYNC_MS, self.C_MS, self.CSYNC_MS:
indices.append(indices[-1] + i)
indices = np.array(indices[1:]) / 1000
_, r, _, g, _, b = np.hsplit(data, (indices * self.srate).astype(int))
g = np.concatenate((np.zeros((1, g.shape[1]), dtype=g.dtype), g[:-1]), axis=0)
b = np.concatenate((np.zeros((1, b.shape[1]), dtype=b.dtype), b[:-1]), axis=0)
return np.dstack((
sp.signal.resample(r, self.IMG_W, axis=1),
sp.signal.resample(g, self.IMG_W, axis=1),
sp.signal.resample(b, self.IMG_W, axis=1)
))
class ScottieS1(_Scottie):
VIS = 0x3c
class ScottieS2(_Scottie):
VIS = 0x38
C_MS = 88.064
LINE_S = (_Scottie.CSYNC_MS + C_MS + _Scottie.CSYNC_MS + C_MS + _Scottie.HSYNC_MS + _Scottie.CSYNC_MS + C_MS) / 1000
class ScottieS3(ScottieS1):
VIS = 0x34
IMG_W = 160
IMG_H = 128
class ScottieS4(ScottieS2):
VIS = 0x30
IMG_W = 160
IMG_H = 128
class ScottieDX(_Scottie):
VIS = 0x4c
C_MS = 345.6
LINE_S = (_Scottie.CSYNC_MS + C_MS + _Scottie.CSYNC_MS + C_MS + _Scottie.HSYNC_MS + _Scottie.CSYNC_MS + C_MS) / 1000
class SstvRecognizer:
STATUS_OK = 0
STATUS_CALIB_FAIL = 1
STATUS_VIS_FAIL = 2
STATUS_VIS_UNKNOWN = 3
STATUS_FOUND = 4
STATUS_DONE = 5
_STATE_0 = 0
_STATE_GET_PEAKS = 1
_STATE_GET_HDR = 2
_STATE_GET_VIS = 3
_STATE_GET_LINE = 4
SIGNAL_FREQ_SHIFT = 2300 - 1100
SIGNAL_TOLERANCE = 50 / SIGNAL_FREQ_SHIFT
CALIB_LEADER_S = 0.3
CALIB_BREAK_S = 0.01
CALIB_AVG_TOLERANCE = 100 / SIGNAL_FREQ_SHIFT
VIS_BITS = 10
VIS_BIT_S = 0.03
VIS_S = VIS_BITS * VIS_BIT_S
_1100 = (1100 - 1100) / SIGNAL_FREQ_SHIFT
_1200 = (1200 - 1100) / SIGNAL_FREQ_SHIFT
_1300 = (1300 - 1100) / SIGNAL_FREQ_SHIFT
_1400 = (1400 - 1100) / SIGNAL_FREQ_SHIFT
_1500 = (1500 - 1100) / SIGNAL_FREQ_SHIFT
_1900 = (1900 - 1100) / SIGNAL_FREQ_SHIFT
_2300 = (2300 - 1100) / SIGNAL_FREQ_SHIFT
CODES = {
Robot24.VIS: Robot24,
Robot36.VIS: Robot36,
Robot72.VIS: Robot72,
MartinM1.VIS: MartinM1,
MartinM2.VIS: MartinM2,
MartinM3.VIS: MartinM3,
MartinM4.VIS: MartinM4,
PD50.VIS: PD50,
PD90.VIS: PD90,
PD120.VIS: PD120,
PD160.VIS: PD160,
PD180.VIS: PD180,
PD240.VIS: PD240,
PD290.VIS: PD290,
ScottieS1.VIS: ScottieS1,
ScottieS2.VIS: ScottieS2,
ScottieS3.VIS: ScottieS3,
ScottieS4.VIS: ScottieS4,
ScottieDX.VIS: ScottieDX,
}
def __init__(self,
sat_name: str,
out_dir: pathlib.Path,
srate: Union[int, float],
start_peak: int,
do_sync=True):
self.prefix = f'{self.__class__.__name__}: {sat_name}'
self.log = logging.getLogger(self.prefix)
self.sat_name = sat_name
self.out_dir = out_dir
self.srate = srate
self.start_peak = start_peak
self.do_sync = do_sync
self.state = self._STATE_GET_PEAKS
# calibration header setup
self.calib_leader_len = int(self.CALIB_LEADER_S * srate)
self.calib_break_len = int(self.CALIB_BREAK_S * srate)
self.calib_len = self.calib_leader_len * 2 + self.calib_break_len
self.calib_hdr = np.full(self.calib_len, np.nan, np.float32)
self.calib_remained_sz = self.calib_len
# vis code setup
self.vis_len = int(self.VIS_S * srate)
self.vis = np.full(self.vis_len, np.nan, np.float32)
self.vis_remained_sz = self.vis_len
self.vis_code = 0
self.sstv = None
def feed(self, input_data: np.ndarray) -> int:
data = input_data
res = self.STATUS_OK
# just in case
if not self.state:
return self.STATUS_DONE
while data.size:
if self.state == self._STATE_GET_PEAKS:
data = data[self.start_peak:]
self.calib_hdr.fill(np.nan)
self.calib_remained_sz = self.calib_len
self.vis.fill(np.nan)
self.vis_remained_sz = self.vis_len
self.state = self._STATE_GET_HDR
elif self.state == self._STATE_GET_HDR:
if not np.isnan(np.amin(self.calib_hdr)):
self.calib_hdr.fill(np.nan)
self.calib_remained_sz = self.calib_len
i = np.argmin(self.calib_hdr)
x = data[:self.calib_remained_sz]
data = data[self.calib_remained_sz:]
self.calib_hdr[i:i + x.size] = x
self.calib_remained_sz -= x.size
if not self.calib_remained_sz:
# hdr is full. check it
leaders = np.append(
self.calib_hdr[:self.calib_leader_len],
self.calib_hdr[self.calib_leader_len + self.calib_break_len
:self.calib_len - self.calib_break_len]
)
breaker = self.calib_hdr[self.calib_leader_len:self.calib_leader_len + self.calib_break_len]
leaders_avg = np.median(leaders)
breaker_avg = np.median(breaker)
if (math.fabs(leaders_avg - self._1900) < self.CALIB_AVG_TOLERANCE
and math.fabs(breaker_avg - self._1200) < self.CALIB_AVG_TOLERANCE):
# print(f' {leaders_avg=} d={math.fabs(leaders_avg - self._1900)}\n'
# f' {breaker_avg=} d={math.fabs(breaker_avg - self._1200)}')
self.state = self._STATE_GET_VIS
else:
self.state = self._STATE_0
return self.STATUS_CALIB_FAIL
elif self.state == self._STATE_GET_VIS:
if not np.isnan(np.amin(self.vis)):
self.vis.fill(np.nan)
self.vis_remained_sz = self.vis_len
i = np.argmin(self.vis)
x = data[:self.vis_remained_sz]
data = data[self.vis_remained_sz:]
self.vis[i:i + x.size] = x
self.vis_remained_sz -= x.size
if not self.vis_remained_sz:
# VIS is full. check it
vis = np.median(np.resize(self.vis, (10, self.vis_len // 10)), axis=1)
vis_bits = [int(bit < self._1200) for bit in vis[8:0:-1]]
code = 0
for bit in vis_bits[1:]:
code = (code << 1) | bit
# check parity
if sum(vis_bits[1:]) % 2 != vis_bits[0]:
if code:
self.log.debug('Parity failed VIS<0x%02x>', code)
self.state = self._STATE_0
return self.STATUS_VIS_FAIL
self.vis_code = code
sstv = self.CODES.get(code)
if not sstv:
if code:
self.log.debug('Unknown VIS<0x%02x>', code)
self.state = self._STATE_0
return self.STATUS_VIS_UNKNOWN
self.sstv = sstv(sat_name=self.sat_name,
out_dir=self.out_dir,
srate=self.srate,
do_sync=self.do_sync)
self.state = self._STATE_GET_LINE
res = self.STATUS_FOUND
elif self.state == self._STATE_GET_LINE:
if self.sstv.feed(data):
self.stop()
return self.STATUS_DONE
break
return res
def get_image(self) -> Optional[Image.Image]:
if self.sstv:
return self.sstv.get_image()
def stop(self):
self.state = self._STATE_0
if self.sstv:
self.sstv.stop()
|
/sats_receiver-0.1.76.tar.gz/sats_receiver-0.1.76/sats_receiver/systems/sstv.py
| 0.661923 | 0.213849 |
sstv.py
|
pypi
|
import construct
from satellites.telemetry.ax25 import Header as ax25_hdr
from satellites.adapters import UNIXTimestampAdapter
# GEOSCAN Telemetry Protocol
# https://download.geoscan.aero/site-files/%D0%9F%D1%80%D0%BE%D1%82%D0%BE%D0%BA%D0%BE%D0%BB%20%D0%BF%D0%B5%D1%80%D0%B5%D0%B4%D0%B0%D1%87%D0%B8%20%D1%82%D0%B5%D0%BB%D0%B5%D0%BC%D0%B5%D1%82%D1%80%D0%B8%D0%B8.pdf
class SubAdapter(construct.Adapter):
def __init__(self, v, *args, **kwargs):
self.v = v
construct.Adapter.__init__(self, *args, **kwargs)
def _encode(self, obj, context, path=None):
return int(obj + self.v)
def _decode(self, obj, context, path=None):
return obj - self.v
class MulAdapter(construct.Adapter):
def __init__(self, v, *args, **kwargs):
self.v = v
construct.Adapter.__init__(self, *args, **kwargs)
def _encode(self, obj, context, path=None):
return int(round(obj / self.v))
def _decode(self, obj, context, path=None):
return float(obj) * self.v
Frame = construct.Struct(
'time' / UNIXTimestampAdapter(construct.Int32ul),
'Iab' / MulAdapter(0.0766, construct.Int16ul), # mA
'Isp' / MulAdapter(0.03076, construct.Int16ul), # mA
'Uab_per' / MulAdapter(0.00006928, construct.Int16ul), # V
'Uab_sum' / MulAdapter(0.00013856, construct.Int16ul), # V
'Tx_plus' / construct.Int8ul, # deg C
'Tx_minus' / construct.Int8ul, # deg C
'Ty_plus' / construct.Int8ul, # deg C
'Ty_minus' / construct.Int8ul, # deg C
'Tz_plus' / construct.Int8ul, # undef
'Tz_minus' / construct.Int8ul, # deg C
'Tab1' / construct.Int8ul, # deg C
'Tab2' / construct.Int8ul, # deg C
'CPU_load' / construct.Int8ul, # %
'Nres_obc' / SubAdapter(7476, construct.Int16ul),
'Nres_CommU' / SubAdapter(1505, construct.Int16ul),
'RSSI' / SubAdapter(99, construct.Int8ul), # dBm
'pad' / construct.GreedyBytes
)
geoscan = construct.Struct(
'ax25' / construct.Peek(ax25_hdr),
'ax25' / construct.If(lambda this: (bool(this.ax25) and this.ax25.addresses[0].callsign == u'BEACON'), ax25_hdr),
'geoscan' / construct.If(lambda this: (bool(this.ax25) and this.ax25.pid == 0xF0), Frame),
)
|
/sats_receiver-0.1.76.tar.gz/sats_receiver-0.1.76/sats_receiver/systems/satellites/telemetry/geoscan_tlm.py
| 0.601477 | 0.296171 |
geoscan_tlm.py
|
pypi
|
Feature Extraction Example
--------------------------
In this example we will extract the Histogram of Gradients (HoG),
Normalized Difference Vegetation Index (NDVI) and the Pantex features
from a test satelite image.
- The HoG feature captures the distribution of structure orientations.
- The NDVI feature captures the level of vegetation.
- The Pantex feature captures the level of built-up structures.
The image will be split into blocks, in this example 20 by 20 pixels,
and each feature is calculated for this block using a certain amount of
context information called a window. A feature can be calculated on
multiple windows to allow for context at different scales.
In this example
~~~~~~~~~~~~~~~
- First we will define the Features we would like to extract and with
which window shapes.
- We will then load the image using the ``Image`` class.
- Then we will split the image into blocks using the ``FullGenerator``
Class.
- Then we will extract the features using the ``extract_features``
function.
Live iPython Notebook
^^^^^^^^^^^^^^^^^^^^^
If you are reading this example on readthedocs.io a notebook of this
example is available `in the
repository <https://github.com/DynaSlum/satsense/blob/master/notebooks/FeatureExtraction/feature_extraction.ipynb>`__
.. code:: ipython3
# General imports
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
%matplotlib inline
# Satsense imports
from satsense import Image
from satsense.generators import FullGenerator
from satsense.extract import extract_features
from satsense.features import NirNDVI, HistogramOfGradients, Pantex
Define the features to calculate
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
First we define a list of windows for each of the features to use.
Hog and Pantex will be calculated on 2 windows of 25x25 pixels and 23x27
pixels. NDVI will be calculated on one window with 37x37 pixels.
These window shapes are chose arbitrarily to show the capabilities of
satsense, for your own feature extraction you should think and
experiment with these window shapes to give you the best results.
N.B. The NDVI feature here is called NirNDVI because that implementation
uses the near-infrared band of the image, there are several other
implementations of NDVI available in satsense, see `the
documentation <https://satsense.readthedocs.io/en/latest/api/satsense.features.html>`__
.. code:: ipython3
# Multiple windows
two_windows = [(25, 25), (23, 37)]
# Single window
one_window = [(37, 37),]
features = [
HistogramOfGradients(two_windows),
NirNDVI(one_window),
Pantex(two_windows),
]
Load the image
~~~~~~~~~~~~~~
Here we load the image and normalize it to values between 0 and 1.
Normalization by default is performed per band using the 2nd and 98th
percentiles.
The image class can provide the individual bands, or a number of useful
derivatives such as the RGB image or Grayscale, we call these base
images. More advanced base images are also available, for instance Canny
Edge
.. code:: ipython3
image = Image('../../test/data/source/section_2_sentinel.tif',
'quickbird')
image.precompute_normalization()
fig, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(24, 8), sharey=True)
ax1.axis('off')
ax1.imshow(image['rgb'])
ax1.set_title('RGB image')
ax2.axis('off')
ax2.imshow(image['grayscale'], cmap="gray")
ax2.set_title('Grayscale image')
ax3.axis('off')
ax3.imshow(image['canny_edge'], cmap="gray")
ax3.set_title('Canny Edge Image')
plt.show()
.. image:: feature_extraction_files/feature_extraction_5_0.png
Generator
~~~~~~~~~
Next we create a FullGenerator which creates patches of the image in
steps of 20x20 pixels.
In this cell we also show the images, therefore we load the rgb base
image into the generator. This is only needed here so we can show the
blocks using matplotlib. In the next section we will be using the
``extract_features`` function to extract features, which will be loading
the correct base images for you based on the features that will be
calculated.
The patch sizes are determined by the list of window shapes that you
supply the ``load_image`` function. This is normally also provided by
the ``extract_features`` function.
.. code:: ipython3
generator = FullGenerator(image, (20, 20))
print("The generator is {} by {}".format(*generator.shape), " blocks")
# Create a gridspec to show the images
gs = gridspec.GridSpec(*generator.shape)
gs.update(wspace=0.05, hspace=0.05)
# Load a baseimage into the generator.
# The window is the same as the block size to show the blocks used
generator.load_image('rgb', ((20, 20),))
fig = plt.figure(figsize=(8, 8))
for i, img in enumerate(generator):
ax = plt.subplot(gs[i])
ax.imshow(img.filled(0.5))
ax.axis('off')
.. parsed-literal::
The generator is 8 by 8 blocks
.. image:: feature_extraction_files/feature_extraction_7_1.png
Calculate all the features and append them to a vector
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In this cell we use the ``extract_features`` function from satsense to
extract all features.
``extract_features`` returns a python generator that we can loop over.
Each invocation of this generator returns the feature vector for one
feature in the order of the features list. The shape of this vector is
(x, y, w, v) where:
- x is the number of blocks of the generator in the x direction
- y is the number of blocks of the generator in the y direction
- w is the number of windows the feature is calculated on
- v is the length of the feature per window
We use a little numpy reshaping to merge these feature vectors into a
single feature vector of shape (x, y, n) where n is the total length of
all features over all windows. In this example it will be (8, 8, 13)
because:
- HoG has 5 numbers per window and 2 windows: 10
- NirNDVI has 1 number per window and 1 window: 1
- Pantex has 1 number per window and2 windows: 2
- Total: 13
.. code:: ipython3
vector = []
for feature_vector in extract_features(features, generator):
# The shape returned is (x, y, w, v)
# Reshape the resulting vector so it is (x, y, w * v)
# e.g. flattened along the windows and features
data = feature_vector.vector.reshape(
*feature_vector.vector.shape[0:2], -1)
vector.append(data)
# dstack reshapes the vector into and (x, y, n)
# where n is the total length of all features
featureset = np.dstack(vector)
print("Feature set has shape:", featureset.shape)
.. parsed-literal::
Feature set has shape: (8, 8, 13)
Showing the resulting features
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Below we show the results for the calculated features.
In the result images you can see the edges of the feature vector have
been masked as the windows at the edge of the original image contain
masked values. Furthermore, please keep in mind that the value for the
feature in each block depends on an area around the block.
HoG
^^^
Here is the result of the HoG feature, we display the first value for
each window.
Histogram of Gradients is a feature that first calculates a histogram of
the gradient orientations in the window. Using this histogram 5 values
are calculated. This first value is the 1st heaved central shift moment.
Heaved central shift moments are a measure of spikiness of a histogram.
The other values are: the 2nd heaved central shift moment, the
orientation of the highest and second highest peaks and the sine of the
absolute difference between the highest and second highest peak (this is
1 for right angles).
.. code:: ipython3
fig, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(24, 8))
ax1.axis('off')
ax1.imshow(image['rgb'])
ax1.set_title('Input image')
ax2.axis('off')
ax2.imshow(featureset[:, :, 0], cmap="PRGn")
ax2.set_title('Hog Feature for window {}'.format(two_windows[0]))
ax3.axis('off')
ax3.imshow(featureset[:, :, 5], cmap="PRGn")
ax3.set_title('Hog Feature for window {}'.format(two_windows[1]))
plt.show()
.. image:: feature_extraction_files/feature_extraction_11_0.png
Normalized Difference Vegetation Index
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Here we show the result for the NDVI feature The NDVI feature captures
the level of vegetation, purple means very little vegetation, green
means a lot of vegetation.
.. code:: ipython3
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(16, 8))
ax1.axis('off')
ax1.imshow(image['rgb'])
ax1.set_title('Input image')
ax2.axis('off')
ax2.imshow(featureset[:, :, 10], cmap="PRGn")
ax2.set_title('NirNDVI for window {}'.format(one_window[0]))
plt.show()
.. image:: feature_extraction_files/feature_extraction_13_0.png
Texton
^^^^^^
Here we show the results for the Texton feature. The Pantex feature
captures the level of built-up structures, purple means very little
built-up while green means very built-up.
.. code:: ipython3
fig, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(24, 8))
ax1.axis('off')
ax1.imshow(image['rgb'])
ax1.set_title('Input image')
ax2.axis('off')
ax2.imshow(featureset[:, :, 11], cmap="PRGn")
ax2.set_title('Texton for window {}'.format(two_windows[0]))
ax3.axis('off')
ax3.imshow(featureset[:, :, 12], cmap="PRGn")
ax3.set_title('Texton for window {}'.format(two_windows[1]))
plt.show()
.. image:: feature_extraction_files/feature_extraction_15_0.png
|
/satsense-0.9.tar.gz/satsense-0.9/doc/notebooks/feature_extraction.rst
| 0.957774 | 0.980562 |
feature_extraction.rst
|
pypi
|
```
%load_ext autoreload
%autoreload 2
```
# Define training and test data
```
from pathlib import Path
sampling_step_size = 10, 10
windows = (
(50, 50),
(100, 100),
(200, 200),
)
home = Path.home()
data = home / 'DynaSlum' / 'Work'
train_files = (
data / 'section_1_multiband.tif',
data / 'section_2_multiband.tif',
)
test_files = (
data / 'section_3_multiband.tif',
)
ground_truth_file = data / 'slum_approved.shp'
# Path where temporary files are saved
work = home / 'satsense_notebook'
```
# Define the set of features for classification
```
from satsense.features import (NirNDVI, HistogramOfGradients, Pantex, Sift,
Lacunarity, Texton)
from satsense import Image
train_images = [Image(file, 'worldview3') for file in train_files]
ndvi = NirNDVI(windows)
hog = HistogramOfGradients(windows)
pantex = Pantex(windows)
lacunarity = Lacunarity(windows)
sift = Sift.from_images(windows, train_images)
texton = Texton.from_images(windows, train_images)
features = [
ndvi,
hog,
pantex,
lacunarity,
sift,
texton,
]
```
# Compute and save features
```
import os
from pathlib import Path
from satsense import extract_features
from satsense.generators import FullGenerator
def compute_features(filenames):
paths = []
for filename in filenames:
image = Image(filename, 'worldview3')
path = str(work / Path(filename).stem) + os.sep
paths.append(path)
if not os.path.exists(path):
os.makedirs(path)
generator = FullGenerator(image, sampling_step_size)
for feature_vector in extract_features(features, generator):
feature_vector.save(path)
return paths
train_data_paths = compute_features(train_files)
```
# Load training data
```
import numpy as np
from satsense import Image, FeatureVector
from satsense.util.mask import get_ndxi_mask, load_mask_from_shapefile, resample, save_mask2file
from satsense.features import NirNDVI, WVSI
from satsense.generators import FullGenerator
def load_feature_vector(features, path):
"""Load feature values from file."""
feature_vector = []
for feature in features:
vector = FeatureVector.from_file(feature, path).vector
# flatten window/feature_size dimensions
vector.shape = (vector.shape[0], vector.shape[1], -1)
feature_vector.append(vector)
feature_vector = np.ma.dstack(feature_vector)
return feature_vector
def load_ground_truth(filename, sampling_step_size, path, shape, crs, transform):
ground_truth = load_mask_from_shapefile(filename, shape, transform)
mask_file = path / 'ground_truth_mask.tif'
ground_truth_mask = save_mask2file(ground_truth, mask_file, crs, transform)
ground_truth_image = Image(mask_file, 'monochrome', normalization_parameters=False)
ground_truth = resample(FullGenerator(ground_truth_image, sampling_step_size))
return ground_truth
labels = {
'other': 0,
'deprived_neighbourhood': 1,
'vegetation': 2,
}
x_train = []
y_train = []
for path, image in zip(train_data_paths, train_images):
print("Processing", image.filename)
# Load feature vector
feature_vector = load_feature_vector(features, path)
label_vector = np.zeros(feature_vector.shape[:2], dtype=np.uint8)
# Create deprived neighbourhood labels
ground_truth = load_ground_truth(
ground_truth_file, sampling_step_size, path, image.shape, image.crs, image.transform)
label_vector[ground_truth] = labels['deprived_neighbourhood']
# Create vegetation labels
generator = FullGenerator(image, sampling_step_size)
vegetation_mask = get_ndxi_mask(generator, NirNDVI)
label_vector[vegetation_mask] = labels['vegetation']
# Create x_train and y_train
feature_vector.shape = (-1, feature_vector.shape[2])
label_vector.shape = (-1, )
x_train.append(feature_vector)
y_train.append(label_vector)
x_train = np.concatenate(x_train)
y_train = np.concatenate(y_train)
```
# Train a classifier
```
from sklearn.ensemble import GradientBoostingClassifier
classifier = GradientBoostingClassifier(verbose=True)
classifier.fit(x_train, y_train)
```
# Load test data and assess performance
```
from sklearn.metrics import classification_report, matthews_corrcoef, confusion_matrix
test_data_paths = compute_features(test_files)
test_images = [Image(f, 'worldview3') for f in test_files]
for path, image in zip(test_data_paths, test_images):
print('Performance on', image.filename)
# Create x_test
x_test = load_feature_vector(features, path)
shape = x_test.shape
x_test.shape = (-1, shape[2])
# Predict the labels
y_pred = classifier.predict(x_test)
# Create y_test
y_test = np.zeros(shape[:2], dtype=np.uint8)
# Create deprived neighbourhood labels
ground_truth = load_ground_truth(
ground_truth_file, sampling_step_size, path, image.shape, image.crs, image.transform)
y_test[ground_truth] = labels['deprived_neighbourhood']
# Create vegetation labels
generator = FullGenerator(image, sampling_step_size)
vegetation_mask = get_ndxi_mask(generator, NirNDVI)
y_test[vegetation_mask] = labels['vegetation']
y_test.shape = (-1, )
# Assess performance
# Label the vegetation as buildings to create more accurate representation of the performance
# y_pred[y_pred == labels['vegetation']] = labels['other']
# y_test[y_test == labels['vegetation']] = labels['other']
print(matthews_corrcoef(y_test, y_pred))
print(classification_report(y_test, y_pred, labels=list(labels.values()), target_names=list(labels.keys())))
print(confusion_matrix(y_test, y_pred))
```
|
/satsense-0.9.tar.gz/satsense-0.9/notebooks/Classification/Classification_Example.ipynb
| 0.721253 | 0.835551 |
Classification_Example.ipynb
|
pypi
|
# Jaccard index between two multi-polygons
## Simple example
This notebook illustrates the computation of Jaccard similarity index between two simple multi-polygons using `shapely` and `satsense` python libraries.
### Creating two simple multi-polygons
```
# Python, shapely and satsense package imports
from matplotlib import pyplot
from shapely.geometry import MultiPolygon
from satsense.util.shapefile import show_multipolygon as shmp # visualization of multipolygons
# define the nodes of valid multi-polygons
a = [(0, 0), (0, 1), (1, 1), (1, 0), (0, 0)]
b = [(1, 1), (1, 2), (2, 2), (2, 1), (1, 1)]
c = [(2,3), (4,3), (4,4), (2,4), (2,3)]
multi1 = MultiPolygon([[a, []], [b, []] , [c, []]])
d = [(0, 0), (0, 2), (2, 2), (2, 0), (0, 0)]
e = [(3, 3), (3, 4), (4, 4), (4, 3), (3, 3)]
multi2 = MultiPolygon([[d, []], [e, []]])
print("Multi-polygon 1 valid?", multi1.is_valid)
print("Multi-polygon 2 valid?", multi2.is_valid)
```
### Visualizing the multi-polygons
```
# Visualization parameters
RED = '#FF0000'
YOLK = '#FFE600'
al = 0.8
al_over = al - 0.2
show_verticies = True
extent = [-1, -1, 5, 5] # format of extent is [xmin, ymin, xmax, ymax]
# Visualize the multi-polygons
fig = pyplot.figure(1, dpi=90)
ax = fig.add_subplot(131)
shmp(multi1, ax, show_verticies, extent, RED, al, 'multi-polygon 1')
ax = fig.add_subplot(132)
shmp(multi2, ax, show_verticies, extent, YOLK, al, 'multi-polygon 2')
ax = fig.add_subplot(133)
shmp(multi1, ax, show_verticies, extent, RED, al_over, '')
shmp(multi2, ax, show_verticies, extent, YOLK, al_over, 'overlay multi-\n polygons 1 and 2')
pyplot.show()
```
### Jaccard Index between the two multi-polygons
```
# Satsense package import
from satsense.performance.jaccard_similarity import jaccard_index_multipolygons as jim # jaccard index computation
# intersections between the multi-polygons
intersec = multi1.intersection(multi2).area
print("The area of the intersection between the 2 multi-polygons is ",intersec)
# union
union = multi1.union(multi2).area
print("The area of the uinion between the 2 multi-polygons is ",union)
# compute the Jaccard index (defined as intersection/union)
print("The Jaccard index between the 2 multi-polygons is ", jim(multi1, multi2))
```
|
/satsense-0.9.tar.gz/satsense-0.9/notebooks/Performance/JaccardIndex_Multipolygons.ipynb
| 0.579876 | 0.982955 |
JaccardIndex_Multipolygons.ipynb
|
pypi
|
History
=======
0.17.0
---------------------
* Add JSON schema definitions. See `schema/v1/Document.json` for root schema file.
* Add observation object type.
* Add support for cropped sensors.
* Add altitude to site object. Set site `alt` parameter in km. Default is 0.
0.16.0
---------------------
* Add option to annotate stars. Set sim option `star_annotation_threshold` to the minimum star brightness magnitude or `false` to disable. Disabled by default.
* Add option to show annotated stars in annotated images. Set sim option `show_star_boxes` to `true` to enable.
0.15.1
---------------------
* Remove clipping of negative values in ground truth files by default.
* Fix missing dependencies for ground truth file generation.
0.15.0
---------------------
* Add support to save ground truth image data to the Annotations directory. Set sim option `save_ground_truth` to `true`.
* Add support for running on CPU with no GPU acceleration.
* Add CZML options for sensor visualization and object billboard image.
0.14.0
---------------------
* Add vector math library.
* Add CZML output for sensor visualization.
* Fix objects not updating properly when image renderer is off.
0.13.1
---------------------
* Add argument to set folder name in `gen_multi`.
* Add environment variable, `SATSIM_SKYFIELD_LOAD_DIR`, to specify location of Skyfield ephemeris files.
* Fix incorrect CZML output when image renderer is off.
0.13.0
---------------------
* Add ephemeris objects that are propagated with the Lagrange interpolator.
* Add Cesium CZML output. Set sim option `save_czml` to `false` to disable.
* Add CSV text file star catalog loader. This feature is useful for small catalogs such as Hipparcos and simulating wide FOV sensors.
* Add multiplier and clipping for radial cosine.
* Add option to skip image rendering. Set sim option `mode` to `none` to bypass image rendering.
* Update interfaces for newest version of Skyfield, Poliastro, POPPY, and AstroPy.
* Fix star renderer issue removing stars in field of view for non-square arrays.
0.12.0
---------------------
* Add augmentation of SatNet `tf.data.Dataset`. This feature allows injecting synthetic targets into real data during training.
* Add FFT convolution to `add_patch` sprite render and `scatter_shift` image augmenter for speed improvement.
* Add cache last PSF FFT to `fftconv2p` for speed improvement for static PSFs.
* Add two-body state vector as a trackable target.
* Add moon and sun model and misc methods to calculate phase angle and target brightness.
0.11.0
---------------------
* Add support to render star motion with FFT. Set sim option `star_render_mode` to `fft`.
* Add option to sample photon noise multiple times. Set sim option `num_shot_noise_samples` to integer number.
* Add support to render a satellite as a sprite. Set `model` option in obs.
* Add support to load and augment sprite model with `$pipeline` operator.
* Add cropped POPPY PSF generation.
* Fix GreatCircle propagator tracking offset.
* Fix runtime exception when site and track_mode are not defined.
* Add TensorFlow 2.6 and update TensorFlow 2.2 and 2.4 Docker build file.
0.10.0
---------------------
* Add support for piecewise rendering. Set sim option `render_size` to enable. For example, [256, 256].
* Add `fixed` tracking mode with mount azimuth and elevation.
* Add great circle propagator for targets.
* Add in-memory image generation. See generator function `image_generator`.
* Fix missing stars when FOV crosses zero degree RA.
* Add curved targets using bezier curve raster. Enabled by default. Set sim option `num_target_samples` to 2 to enable linear raster.
* Add LRU cache to star catalog reader.
* Add option to turn off SNR calculation. Set sim option `calculate_snr` to false will render targets and stars together.
* Handle unstable SGP4 TLEs.
* Add TensorFlow 2.4 Docker build file.
* Add debug output for pristine images of targets and stars.
0.9.1
---------------------
* Calculate POPPY input wavefront resolution to avoid PSF aliasing.
* Add support for additional FITS image data types (`int16`, `uint16`, `int32`, `uint32`, `float32`).
* Add batch processing to `transform_and_add_counts` to support batch processing of stars.
* Add `auto` option to calculate temporal oversample factor based on star velocities.
* Add option to turn off serializing config data to pickle file (`save_pickle`).
* Add option to turn off png movie output (`save_movie`).
* Add `crop_and_resize` and `flip` image augmentation.
* Set pixels with values beyond the pixel data type's capacity to the maximum value for that data type.
* Add `lognormal` function to generate a distribution with a true target mean.
* Fix issue with sidereal track.
* Fix issue with fragment velocity not being randomly sampled.
0.9.0
---------------------
* Add Physical Optics Propagation in Python (POPPY) PSF generation.
* Add PSF augmentation with `$pipeline` replacement key.
* Add `$function` and `$compound` replacement key.
* Add ability to generate stray light from a `$function` replacement key.
* Add built-in 2D polynomial image generator for stray light, `polygrid2d`.
* Add built-in cosine fourth image generator for irradiance falloff, `radial_cos2d`.
* Add built-in sine wave image generator for fix pattern noise, `sin2d`.
* Add built-in image generator from AstroPy model, `astropy_model2d`.
* Add built-in image augmentation, `scatter_shift` and `scatter_shift_polar`.
* Add `$cache` replacement key (caching works for PSF and `$function`).
0.8.3
---------------------
* Fix new Skyfield incompatibility.
0.8.2
---------------------
* Prefix replacement keys with `$` in SatSim configuration file.
* Add option to scale collision fragments by cosine of the exit angle.
0.8.1
---------------------
* Add astrometric metadata into FITS header
* Refactor WCS library
* Add option to flip images about x or y axis
* Add option to refresh stars for each frame
* Add RPO from TLE generator
0.8.0
---------------------
* Add two body propagator
* Add object `create`, `delete`, and `update` events
* Add collision generator
* Add breakup generator
* Add `ref` keyword to configuration
* Add `key` keyword to `import` configuration
* Refactor astrometric library
0.7.2
---------------------
* Add option to specify star and obs velocity in polar coordinates
0.7.1
---------------------
* Add option to turn off shot noise: `sim.enable_shot_noise: true`
* Add option to turn off annotation boxes in image: `sim.show_obs_boxes: true`
* Add option to specify velocity in arcseconds: `sim.velocity_units: arcsec`
* Fix PNG output threading issue
0.7.0
---------------------
* Add function pipelines to support variable target brightness
0.6.1
---------------------
* Fix built-in generators not included in distribution
* Add dockerfile
0.6.0
---------------------
* Add configuration import.
* Add configuration generator functions.
* Add built-in generator for breakups.
* Add built-in generator for CSOs.
* Add built-in generator for loading TLE files.
0.5.0
---------------------
* Runtime optimization.
* Add parallel processing and multi-gpu utilization.
* Add option to limit gpu memory usage.
0.4.0
---------------------
* Add signal to noise calculation for target pixels.
0.3.0
---------------------
* Add support for two line element set SGP4 satellite propagator.
* Add support for rate and sidereal track from topocentric site.
0.2.0
---------------------
* Add support for SSTR7 star catalog.
0.1.1
---------------------
* Add target position to annotation file.
* Updates to run GitLab CI.
0.1.0
---------------------
* First release.
|
/satsim-0.17.0.tar.gz/satsim-0.17.0/HISTORY.md
| 0.947088 | 0.70562 |
HISTORY.md
|
pypi
|
import os
import json
from random import randint
from subprocess import Popen, PIPE
from .aws_config_file import AWSConfig
class GetS3Handler(AWSConfig):
"""Returns an object with methods to interact with aws S3 storage service.
This module allows the user to interact with S3 storage service.
The module contains the following functions:
- `list_buckets()` - Returns List of all S3 buckets of an aws account.
- `list_objects()` - Returns List of all the objects in the bucket/bucket_path recursively.
- `upload_objects()` - Upload files/folders to s3 bucket.
- `download_objects()` - Download files/folders from s3 bucket.
Example :
```
>> from satsure_cloud_utils import GetS3Handler
>> s3_handler = GetS3Handler(
access_key_id = "*****",
secret_access_key="*****"
)
>> output = s3_handler.get_buckets()
>> print(output)
```
"""
AWS_PROFILE_NAME = ""
def __init__(self,config_file_path:str):
AWSConfig.__init__(self,config_file_path)
def _list_buckets(self):
command = f'aws s3api list-buckets --profile {self.AWS_PROFILE_NAME} --query "Buckets[].Name" --output text'
process = Popen(command,shell=True,stdout=PIPE)
stdout, _ = process.communicate()
command_output_str = stdout.decode("utf-8")
try:
bucket_names_list = command_output_str.strip("\n").strip("\r").split("\t")
return bucket_names_list
except Exception as E:
print("No output found")
return [command_output_str]
def list_buckets(self):
"""Lists all s3 buckets of an aws account
Returns:
list: output/error list
"""
return self._list_buckets()
def _list_objects(self,
bucket_name: str,
obj_path: str = "",
include_filters: list = [],
exclude_filters: list = []):
include_pattern_str = ""
exclude_pattern_str = ""
if len(include_filters) > 0:
include_filters_str = "&&".join( [f"contains(Key,'{include_filter}')" for include_filter in include_filters ] )
include_pattern_str = f""" --query "Contents[? {include_filters_str} ].Key" """
if len(exclude_filters) > 0:
exclude_filters_str = "&&".join( [f"!contains(Key,'{exclude_filter}')" for exclude_filter in exclude_filters ] )
exclude_pattern_str = f""" --query "Contents[? {exclude_filters_str} ].Key" """
obj_path_command = ""
if obj_path:
if obj_path[-1] != "/":
obj_path += "/"
obj_path_command = f'--prefix "{obj_path}"'
command = f"""aws s3api list-objects --bucket "{bucket_name}" {obj_path_command} --delimiter "/" {include_pattern_str} {exclude_pattern_str} --request-payer "requester" --profile {self.AWS_PROFILE_NAME} --output json"""
process = Popen(command,shell=True,stdout=PIPE)
stdout, _ = process.communicate()
command_output_str = stdout.decode("utf-8")
try:
object_names_list = []
output_list = json.loads(command_output_str)
if include_filters or exclude_filters:
return output_list
else:
if "Contents" in output_list:
for content in output_list["Contents"]:
object_names_list.append(content["Key"])
if "CommonPrefixes" in output_list:
for prefix in output_list["CommonPrefixes"]:
object_names_list.append(prefix["Prefix"])
return object_names_list
except Exception as E:
print("No output found")
return []
def list_objects(self,
bucket_name: str,
obj_path: str = "",
include_filters: list = [],
exclude_filters: list = []):
"""Lists all the objects in the bucket/bucket_path recursively
Args:
bucket_name (string): Name of the bucket
obj_path (string): Path of files in bucket (Default: '')
include_filters (list): list of sub strings to include in filtering
Eg: ["20220101",".tif"] (No Wild card character allowed, only sub-strings)
exclude_filters (list): list of sub strings to exclude in filtering
Eg: ["20220101",".tif"] (No Wild card character allowed, only sub-strings)
Returns:
list: output/error list
"""
return self._list_objects(bucket_name,obj_path,include_filters,exclude_filters)
def upload_objects(self,
bucket_name: str,
s3_path: str,
local_path: str,
include_filters: list = [],
exclude_filters: list = [],
dryrun: bool = False):
"""Upload files/folders to s3 bucket
Args: \n
bucket_name (string): Name of bucket \n
s3_path (string): Path on s3 bucket \n
local_path (string): Local path on your machine \n
include_filters (list): list of sub strings to include in filtering \n
Eg: ["20220101",".tif"] (No Wild card character allowed, only sub-strings) \n
exclude_filters (list): list of sub strings to exclude in filtering \n
Eg: ["20220101",".tif"] (No Wild card character allowed, only sub-strings) \n
dryrun (bool): Displays the operations that would be performed using the specified command without actually running them \n
Returns: \n
string: output/error string \n
"""
s3_path = s3_path.strip("/")
include_filter_pattern = ""
exclude_filter_pattern = ""
dryrun_pattern = ""
if len(include_filters) > 0:
include_filter_pattern += " ".join( [f'--include "{include_filter}"' for include_filter in include_filters ] )
include_filter_pattern = '--exclude "*" ' + include_filter_pattern
if len(exclude_filters) > 0:
exclude_filter_pattern += " ".join( [f'--exclude "{exclude_filter}"' for exclude_filter in exclude_filters ] )
if dryrun:
dryrun_pattern = f"--dryrun"
if os.path.isdir(local_path):
command = f"""aws s3 cp "{local_path}" "s3://{bucket_name}/{s3_path}" {exclude_filter_pattern} {include_filter_pattern} {dryrun_pattern} --recursive --request-payer "requester" --profile {self.AWS_PROFILE_NAME} --output json"""
else:
command = f"""aws s3 cp "{local_path}" "s3://{bucket_name}/{s3_path}" {exclude_filter_pattern} {include_filter_pattern} {dryrun_pattern} --request-payer "requester" --profile {self.AWS_PROFILE_NAME} --output json"""
process = Popen(command,shell=True,stdout=PIPE)
stdout, _ = process.communicate()
return stdout.decode("utf-8")
def _download_objects(self,
bucket_name: str,
s3_path: str ,
local_path: str,
bulk: bool,
include_filters: list = [],
exclude_filters: list = [],
dryrun: bool = False):
include_filter_pattern = ""
exclude_filter_pattern = ""
dryrun_pattern = ""
if len(include_filters) > 0:
include_filter_pattern += " ".join( [f'--include "{include_filter}"' for include_filter in include_filters ] )
include_filter_pattern = '--exclude "*" ' + include_filter_pattern
if len(exclude_filters) > 0:
exclude_filter_pattern += " ".join( [f'--exclude "{exclude_filter}"' for exclude_filter in exclude_filters ] )
if dryrun:
dryrun_pattern = f"--dryrun"
if not os.path.exists(local_path):
os.makedirs(local_path)
if bulk:
command = f"""aws s3 cp {dryrun_pattern} "s3://{bucket_name}/{s3_path}" "{local_path}" {include_filter_pattern} {exclude_filter_pattern} --recursive --request-payer "requester" --profile {self.AWS_PROFILE_NAME} """
else:
command = f"""aws s3 cp {dryrun_pattern} "s3://{bucket_name}/{s3_path}" "{local_path}" {include_filter_pattern} {exclude_filter_pattern} --request-payer "requester" --profile {self.AWS_PROFILE_NAME} """
print(command)
try:
process = Popen(command,shell=True,stdout=PIPE)
stdout, stderr = process.communicate()
return stdout.decode("utf-8")
except Exception as E:
print("No output found")
return []
def download_objects(self,
bucket_name: str,
s3_path: str ,
local_path: str=".",
bulk: bool = False,
include_filters: list = [],
exclude_filters:list = [],
dryrun: bool = False):
"""Download files/folders from s3 bucket
Args:
bucket_name (string): Name of bucket
s3_path (string): path on s3 bucket
local_path (string): Path on your machine
bulk (bool): This allows to download files in bulk from bucket
include_filters (list): list of strings to include in filtering
Eg: ["91021032*010_K021.tif","91021032*010_K019.tif"]
exclude_filters (list): list of sub strings to exclude in filtering
Eg: ["XX"]
dryrun (bool): Displays the operations that would be performed using the specified command without actually running them
Returns:
string: output/error string
"""
return self._download_objects(bucket_name,
s3_path,
local_path,
bulk,
include_filters,
exclude_filters,
dryrun)
|
/satsure_cloud_utils-0.0.26.tar.gz/satsure_cloud_utils-0.0.26/satsure_cloud_utils/s3_handler_file.py
| 0.755276 | 0.374991 |
s3_handler_file.py
|
pypi
|
import json
import logging
from time import sleep
from datetime import datetime
from typing import Any, Dict, List, Optional
from urllib.parse import urljoin
import requests
from requests.exceptions import HTTPError
from .settings import Settings
log = logging.getLogger("saturn-client")
if log.level == logging.NOTSET:
logging.basicConfig()
log.setLevel(logging.INFO)
class SaturnConnection:
"""
Create a ``SaturnConnection`` to interact with the API.
:param url: URL for the SaturnCloud instance.
:param api_token: API token for authenticating the request to Saturn API.
Get from `/api/user/token`
"""
_options = None
def __init__(
self,
url: Optional[str] = None,
api_token: Optional[str] = None,
):
"""
Create a ``SaturnConnection`` to interact with the API.
:param url: URL for the SaturnCloud instance.
Example: "https://app.community.saturnenterprise.io"
:param api_token: API token for authenticating the request to Saturn API.
Get from ``/api/user/token``
"""
self.settings = Settings(url, api_token)
# test connection to raise errors early
self._saturn_version = self._get_saturn_version()
@property
def url(self) -> str:
"""URL of Saturn instance"""
return self.settings.url
def _get_saturn_version(self) -> str:
"""Get version of Saturn"""
url = urljoin(self.url, "api/status")
response = requests.get(url, headers=self.settings.headers)
if not response.ok:
raise ValueError(response.reason)
return response.json()["version"]
@property
def options(self) -> Dict[str, Any]:
"""Options for various settings"""
if self._options is None:
url = urljoin(self.url, "api/info/servers")
response = requests.get(url, headers=self.settings.headers)
if not response.ok:
raise ValueError(response.reason)
self._options = response.json()
return self._options
def list_projects(self) -> List[Dict[str, Any]]:
"""List all projects that you have access to."""
url = urljoin(self.url, "api/projects")
response = requests.get(url, headers=self.settings.headers)
try:
response.raise_for_status()
except HTTPError as err:
raise HTTPError(response.status_code, response.json()["message"]) from err
return response.json()["projects"]
def get_project(self, project_id: str) -> Dict[str, Any]:
"""Get project by id"""
url = urljoin(self.url, f"api/projects/{project_id}")
response = requests.get(url, headers=self.settings.headers)
try:
response.raise_for_status()
except HTTPError as err:
raise _http_error(response, project_id) from err
return response.json()
def delete_project(self, project_id: str) -> str:
"""Delete project by id"""
url = urljoin(self.url, f"api/projects/{project_id}")
response = requests.delete(url, headers=self.settings.headers)
try:
response.raise_for_status()
except HTTPError as err:
raise _http_error(response, project_id) from err
def create_project(
self,
name: str,
description: Optional[str] = None,
image_uri: Optional[str] = None,
start_script: Optional[str] = None,
environment_variables: Optional[Dict] = None,
working_dir: Optional[str] = None,
jupyter_size: Optional[str] = None,
jupyter_disk_space: Optional[str] = None,
jupyter_auto_shutoff: Optional[str] = None,
jupyter_start_ssh: Optional[bool] = None,
) -> Dict[str, Any]:
"""
Create a project from scratch
:param name: Name of project. This is the only field that is required.
:param description: Short description of the project (less than 250 characters).
:param image_uri: Location of the image. Example:
485185227295.dkr.ecr.us-east-1.amazonaws.com/saturn-dask:2020.12.01.21.10
:param start_script: Script that runs on start up. Examples: "pip install dask"
:param environment_variables: Env vars expressed as a dict. The names will be
coerced to uppercase.
:param working_dir: Location to use as working directory. Example: /home/jovyan/project
:param jupyter_size: Size for the jupyter associated with the project.
The options for these are available from ``conn.options["sizes"]``.
:param jupyter_disk_space: Disk space for the jupyter associated with the project.
The options for these are available from ``conn.options["disk_space"]``.
:param jupyter_auto_shutoff: Auto shutoff interval for the jupyter associated with the
project. The options for these are available from ``conn.options["auto_shutoff"]``.
:param jupyter_start_ssh: Whether to start ssh for the jupyter associated with the project.
This is used for accessing the workspace from outside of Saturn.
"""
if environment_variables:
environment_variables = json.dumps(
{k.upper(): v for k, v in environment_variables.items()}
)
self._validate_workspace_settings(
size=jupyter_size,
disk_space=jupyter_disk_space,
auto_shutoff=jupyter_auto_shutoff,
start_ssh=jupyter_start_ssh,
)
project_config = {
"name": name,
"description": description,
"image": image_uri,
"start_script": start_script,
"environment_variables": environment_variables,
"working_dir": working_dir,
"jupyter_size": jupyter_size,
"jupyter_disk_space": jupyter_disk_space,
"jupyter_auto_shutoff": jupyter_auto_shutoff,
"jupyter_start_ssh": jupyter_start_ssh,
}
# only send kwargs that are explicitly set by user
project_config = {k: v for k, v in project_config.items() if v is not None}
url = urljoin(self.url, "api/projects")
response = requests.post(
url,
data=json.dumps(project_config),
headers=self.settings.headers,
)
try:
response.raise_for_status()
except HTTPError as err:
raise HTTPError(response.status_code, response.json()["message"]) from err
return response.json()
def update_project(
self,
project_id: str,
description: Optional[str] = None,
image_uri: Optional[str] = None,
start_script: Optional[str] = None,
environment_variables: Optional[Dict] = None,
working_dir: Optional[str] = None,
jupyter_size: Optional[str] = None,
jupyter_disk_space: Optional[str] = None,
jupyter_auto_shutoff: Optional[str] = None,
jupyter_start_ssh: Optional[bool] = None,
update_jupyter_server: Optional[bool] = True,
) -> Dict[str, Any]:
"""
Create a project from scratch
:param project_id: ID of project. This is the only field that is required.
:param description: Short description of the project (less than 250 characters).
:param image_uri: Location of the image. Example:
485185227295.dkr.ecr.us-east-1.amazonaws.com/saturn-dask:2020.12.01.21.10.
If this does not include a registry URL, Saturn will assume the image is
publicly-available on Docker Hub.
:param start_script: Script that runs on start up. Examples: "pip install dask".
This can be any valid code that can be run with ``sh``, and can be multiple lines.
:param environment_variables: Env vars expressed as a dict. The names will be
coerced to uppercase.
:param working_dir: Location to use as working directory. Example: /home/jovyan/project
:param jupyter_size: Size for the jupyter associated with the project.
The options for these are available from ``conn.options["sizes"]``.
:param jupyter_disk_space: Disk space for the jupyter associated with the project.
The options for these are available from ``conn.options["disk_space"]``.
:param jupyter_auto_shutoff: Auto shutoff interval for the jupyter associated with the
project. The options for these are available from ``conn.options["auto_shutoff"]``.
:param jupyter_start_ssh: Whether to start ssh for the jupyter associated with the project.
This is used for accessing the workspace from outside of Saturn.
:param update_jupyter_server: Whether to update the jupyter server associated with the
project. This will stop the jupyter server if it is running.
"""
if environment_variables:
environment_variables = json.dumps(
{k.upper(): v for k, v in environment_variables.items()}
)
self._validate_workspace_settings(
size=jupyter_size,
disk_space=jupyter_disk_space,
auto_shutoff=jupyter_auto_shutoff,
start_ssh=jupyter_start_ssh,
)
project_config = {
"description": description,
"image": image_uri,
"start_script": start_script,
"environment_variables": environment_variables,
"working_dir": working_dir,
"jupyter_size": jupyter_size,
"jupyter_disk_space": jupyter_disk_space,
"jupyter_auto_shutoff": jupyter_auto_shutoff,
"jupyter_start_ssh": jupyter_start_ssh,
}
# only send kwargs that are explicitly set by user
project_config = {k: v for k, v in project_config.items() if v is not None}
project_url = urljoin(self.url, f"api/projects/{project_id}")
response = requests.patch(
project_url,
data=json.dumps(project_config),
headers=self.settings.headers,
)
try:
response.raise_for_status()
except HTTPError as err:
raise _http_error(response, project_id) from err
project = response.json()
if not (project["jupyter_server_id"] and update_jupyter_server):
return project
jupyter_config = {
"image": image_uri,
"start_script": start_script,
"environment_variables": environment_variables,
"working_dir": working_dir,
"size": jupyter_size,
"disk_space": jupyter_disk_space,
"auto_shutoff": jupyter_auto_shutoff,
"start_ssh": jupyter_start_ssh,
}
# only send kwargs that are explicitly set by user
jupyter_config = {k: v for k, v in jupyter_config.items() if v is not None}
if len(jupyter_config) == 0:
return project
self.stop_jupyter_server(project["jupyter_server_id"])
jupyter_url = urljoin(self.url, f"api/jupyter_servers/{project['jupyter_server_id']}")
response = requests.patch(
jupyter_url,
data=json.dumps(jupyter_config),
headers=self.settings.headers,
)
try:
response.raise_for_status()
except HTTPError as err:
raise HTTPError(response.status_code, response.json()["message"]) from err
return project
def get_jupyter_server(self, jupyter_server_id) -> Dict[str, Any]:
"""Get a particular jupyter server"""
url = urljoin(self.url, f"api/jupyter_servers/{jupyter_server_id}")
response = requests.get(
url,
headers=self.settings.headers,
)
try:
response.raise_for_status()
except HTTPError as err:
raise _http_error(response, jupyter_server_id) from err
return response.json()
def wait_for_jupyter_server(self, jupyter_server_id: str, timeout: int = 360) -> None:
"""Wait for jupyter server to be running
:param jupyter_server_id: ID of the jupyter_server to wait for.
:param timeout: Maximum time in seconds to wait. Default is 360 (6 minutes).
"""
target_status = "running"
sleep_interval = 5
start_time = datetime.utcnow()
time_passed = 0
log.info(f"Waiting for Jupyter to be {target_status}...")
old_status = ""
while time_passed < timeout:
status = self.get_jupyter_server(jupyter_server_id)["status"]
if old_status and status != old_status:
# New status line on change
print()
old_status = status
if status == target_status:
log.info(f"Jupyter server is {status}")
return
if status == "error":
raise AssertionError(
f"Jupyter server has status: {status}. See logs in Saturn User Interface."
)
time_passed = (datetime.utcnow() - start_time).total_seconds()
print(
f"\rChecking jupyter status: {status} "
f"(seconds passed: {time_passed:.0f}/{timeout})",
end="",
)
sleep(sleep_interval)
raise TimeoutError("Timed out waiting for jupyter server")
def stop_jupyter_server(self, jupyter_server_id: str) -> None:
"""Stop a particular jupyter server.
This method will return as soon as the stop process has been triggered. It'll take
longer for the jupyter server to shut off, but you can check the status using
``get_jupyter_server``
"""
url = urljoin(self.url, f"api/jupyter_servers/{jupyter_server_id}/stop")
response = requests.post(
url,
data=json.dumps({}),
headers=self.settings.headers,
)
try:
response.raise_for_status()
except HTTPError as err:
raise _http_error(response, jupyter_server_id) from err
def start_jupyter_server(self, jupyter_server_id: str) -> None:
"""Start a particular jupyter server.
This method will return as soon as the start process has been triggered. It'll take
longer for the jupyter server to be up, but you can check the status using
``get_jupyter_server``
"""
url = urljoin(self.url, f"api/jupyter_servers/{jupyter_server_id}/start")
response = requests.post(
url,
data=json.dumps({}),
headers=self.settings.headers,
)
try:
response.raise_for_status()
except HTTPError as err:
raise _http_error(response, jupyter_server_id) from err
def stop_dask_cluster(self, dask_cluster_id: str) -> None:
"""Stop a particular dask cluster.
This method will return as soon as the stop process has been triggered. It'll take
longer for the dask cluster to actually shut down.
"""
url = urljoin(self.url, f"api/dask_clusters/{dask_cluster_id}/close")
response = requests.post(
url,
data=json.dumps({}),
headers=self.settings.headers,
)
try:
response.raise_for_status()
except HTTPError as err:
raise _http_error(response, dask_cluster_id) from err
def start_dask_cluster(self, dask_cluster_id: str) -> None:
"""Start a particular dask cluster.
This method will return as soon as the start process has been triggered.
It'll take longer for the dask cluster to be up. This is primarily
useful when the dask cluster has been stopped as a side-effect of
stopping a jupyter server or updating a project. For more fine-grain
control over the dask cluster see dask-saturn.
"""
url = urljoin(self.url, f"api/dask_clusters/{dask_cluster_id}/start")
response = requests.post(
url,
data=json.dumps({}),
headers=self.settings.headers,
)
try:
response.raise_for_status()
except HTTPError as err:
raise _http_error(response, dask_cluster_id) from err
def _validate_workspace_settings(
self,
size: Optional[str] = None,
disk_space: Optional[str] = None,
auto_shutoff: Optional[str] = None,
start_ssh: Optional[bool] = None,
):
"""Validate the options provided"""
errors = []
if size is not None:
options = list(self.options["sizes"].keys())
if size not in options:
errors.append(
f"Proposed size: {size} is not a valid option. " f"Options are: {options}."
)
if disk_space is not None:
options = self.options["disk_space"]
if disk_space not in options:
errors.append(
f"Proposed disk_space: {disk_space} is not a valid option. "
f"Options are: {options}."
)
if auto_shutoff is not None:
options = self.options["auto_shutoff"]
if auto_shutoff not in options:
errors.append(
f"Proposed auto_shutoff: {auto_shutoff} is not a valid option. "
f"Options are: {options}."
)
if start_ssh is not None and not isinstance(start_ssh, bool):
errors.append("start_ssh must be set to a boolean if defined.")
if len(errors) > 0:
raise ValueError(" ".join(errors))
def _maybe_name(_id):
"""Return message if len of id does not match expectation (32)"""
if len(_id) == 32:
return ""
return "Maybe you used name rather than id?"
def _http_error(response: requests.Response, resource_id: str):
"""Return HTTPError from response for a resource"""
response_message = response.json().get(
"message", "saturn-client encountered an unexpected error."
)
return HTTPError(response.status_code, f"{response_message} {_maybe_name(resource_id)}")
|
/saturn_client-0.0.3-py3-none-any.whl/saturn_client/core.py
| 0.827654 | 0.163212 |
core.py
|
pypi
|
import os
import cv2
import torch
import configparser
import numpy as np
import uuid
from .abstractBase import detection
class VggClassify(detection):
def __init__(self, section, cfg_path, gpu_id, model_path=None):
super(VggClassify, self).__init__(section=section, cfg_path=cfg_path)
self.section = section
self.gpu_id = gpu_id
self.model_path = model_path
self.classes = tuple(self.classes.strip(',').split(','))
self.device = self.select_device(self.gpu_id)
def model_restore(self):
"""加载模型"""
try:
if not self.model_path:
raise ValueError("model path is None")
self.model = torch.load(self.model_path)
self.model.to(self.device)
self.model.eval()
self.warmUp()
print("* load vgg model success : {0}".format(self.section))
except Exception as e:
print(e)
raise ValueError("* load vgg model failed : {0}".format(self.section))
def warmUp(self):
im = 123 * np.ones((224, 224, 3), dtype=np.uint8)
self.detect(im)
@torch.no_grad()
def detect(self, im, image_name='test.jpg'):
"""进行检测"""
if im is None:
return None, 0
else:
src_img = cv2.resize(im, (224, 224))
img = cv2.cvtColor(src_img, cv2.COLOR_BGR2RGB)
img_tensor = torch.from_numpy(img / 255.).permute(2, 0, 1).float().cuda()
img_tensor = torch.unsqueeze(img_tensor, 0)
out = self.model(img_tensor)
if hasattr(out, "data"):
# softmax
out = torch.nn.functional.softmax(out, 1)
proba, pred = out.data.max(1, keepdim=True)
pre = pred.data.item()
proba = proba.data.item()
# 清空缓存
torch.cuda.empty_cache()
return self.classes[int(pre)], proba
else:
return None, 0
|
/saturn_lib-0.0.11-py3-none-any.whl/saturn_lib/vggClassify.py
| 0.460774 | 0.154026 |
vggClassify.py
|
pypi
|
from functools import partial
from collections import OrderedDict
import torch
import torch.nn as nn
def drop_path(x, drop_prob: float = 0., training: bool = False):
"""
Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).
This is the same as the DropConnect impl I created for EfficientNet, etc networks, however,
the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper...
See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for
changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use
'survival rate' as the argument.
"""
if drop_prob == 0. or not training:
return x
keep_prob = 1 - drop_prob
shape = (x.shape[0],) + (1,) * (x.ndim - 1) # work with diff dim tensors, not just 2D ConvNets
random_tensor = keep_prob + torch.rand(shape, dtype=x.dtype, device=x.device)
random_tensor.floor_() # binarize
output = x.div(keep_prob) * random_tensor
return output
class DropPath(nn.Module):
"""
Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).
"""
def __init__(self, drop_prob=None):
super(DropPath, self).__init__()
self.drop_prob = drop_prob
def forward(self, x):
return drop_path(x, self.drop_prob, self.training)
class PatchEmbed(nn.Module):
"""
2D Image to Patch Embedding
"""
def __init__(self, img_size=224, patch_size=16, in_c=3, embed_dim=768, norm_layer=None): # 这里的embed_dim随模型不同而变化
super().__init__()
img_size = (img_size, img_size)
patch_size = (patch_size, patch_size)
self.img_size = img_size
self.patch_size = patch_size
self.grid_size = (img_size[0] // patch_size[0], img_size[1] // patch_size[1]) # 除完了是14X14
self.num_patches = self.grid_size[0] * self.grid_size[1]
self.proj = nn.Conv2d(in_c, embed_dim, kernel_size=patch_size, stride=patch_size)
self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() # Identity就是不做任何操作
def forward(self, x):
B, C, H, W = x.shape
assert H == self.img_size[0] and W == self.img_size[1], \
f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})."
# flatten: [B, C, H, W] -> [B, C, HW]
# transpose: [B, C, HW] -> [B, HW, C]
x = self.proj(x).flatten(2).transpose(1, 2)
x = self.norm(x)
return x
class Attention(nn.Module): # 这一块实现的是多头注意力模块
def __init__(self,
dim, # 输入token的dim
num_heads=8, # 这里是根据模型不同可以更改的
qkv_bias=False, # 是否使用偏置,这里没有使用
qk_scale=None,
attn_drop_ratio=0.,
proj_drop_ratio=0.):
super(Attention, self).__init__()
self.num_heads = num_heads
head_dim = dim // num_heads # 多头注意力将一个qkv分成了几个head,所以head的维度就是token维度除以头的数量
self.scale = qk_scale or head_dim ** -0.5 # 除以维度模长
self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) # 使用一个Fc同时生成了qkv,这里dim*3了,也可以使用三个Fc分别生成
self.attn_drop = nn.Dropout(attn_drop_ratio) # 随机丢掉一些神经元
self.proj = nn.Linear(dim, dim) # 也是通过Fc层实现Wo
self.proj_drop = nn.Dropout(proj_drop_ratio)
def forward(self, x):
# [batch_size, num_patches + 1, total_embed_dim]
# 一次传入的数量, patch embedding后输出的长度+1个class token, 768卷积核数
B, N, C = x.shape
# qkv(): -> [batch_size, num_patches + 1, 3 * total_embed_dim]
# reshape: -> [batch_size, num_patches + 1, 3, num_heads, embed_dim_per_head]
# permute: -> [3, batch_size, num_heads, num_patches + 1, embed_dim_per_head] 3就是拆成了q,k,v三个矩阵
qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)
# 2,0,3,1,4调整了数据的样子,是为了之后方便计算的,我也不是太看得懂这里的具体理由
# [batch_size, num_heads, num_patches + 1, embed_dim_per_head]
q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple)
# 这里就是切片,把qkv切出来
# transpose: -> [batch_size, num_heads, embed_dim_per_head, num_patches + 1]
# @: multiply -> [batch_size, num_heads, num_patches + 1, num_patches + 1]
attn = (q @ k.transpose(-2, -1)) * self.scale # @是矩阵乘法,这里是怎么做的呢,A*B * B* A=A*A
attn = attn.softmax(dim=-1) # 这里的dim=-1指的是对每一行分别进行softmax
attn = self.attn_drop(attn) # 丢
# @: multiply -> [batch_size, num_heads, num_patches + 1, embed_dim_per_head]
# transpose: -> [batch_size, num_patches + 1, num_heads, embed_dim_per_head]
# reshape: -> [batch_size, num_patches + 1, total_embed_dim]
x = (attn @ v).transpose(1, 2).reshape(B, N, C) # 和v加权求和,又开始调整数据的次序,再次懵
x = self.proj(x)
x = self.proj_drop(x)
return x
class Mlp(nn.Module):
"""
MLP as used in Vision Transformer, MLP-Mixer and related networks
"""
def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.fc1 = nn.Linear(in_features, hidden_features)
self.act = act_layer()
self.fc2 = nn.Linear(hidden_features, out_features)
self.drop = nn.Dropout(drop)
def forward(self, x):
x = self.fc1(x)
x = self.act(x)
x = self.drop(x)
x = self.fc2(x)
x = self.drop(x)
return x
class Block(nn.Module): # 这里实现的是encoder block
def __init__(self,
dim,
num_heads,
mlp_ratio=4., # 第一个Fc的节点数是输入的四倍,别问为什么,问就是炼丹。
qkv_bias=False,
qk_scale=None,
drop_ratio=0.,
attn_drop_ratio=0.,
drop_path_ratio=0., # 原论文使用的是dropout,但是此作者使用的是droppath
act_layer=nn.GELU,
norm_layer=nn.LayerNorm):
super(Block, self).__init__()
self.norm1 = norm_layer(dim)
self.attn = Attention(dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale,
attn_drop_ratio=attn_drop_ratio, proj_drop_ratio=drop_ratio) # 实例化了多头注意力这个模块
# NOTE: drop path for stochastic depth, we shall see if this is better than dropout here
# 作者在这里说 path比dropout要好,所以替换了。
self.drop_path = DropPath(drop_path_ratio) if drop_path_ratio > 0. else nn.Identity()
self.norm2 = norm_layer(dim)
mlp_hidden_dim = int(dim * mlp_ratio) # 第一个Fc的节点数是输入的四倍
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop_ratio)
def forward(self, x): # x+的意思是引入了残差
x = x + self.drop_path(self.attn(self.norm1(x)))
x = x + self.drop_path(self.mlp(self.norm2(x)))
return x
class VisionTransformer(nn.Module):
def __init__(self, img_size=224, patch_size=16, in_c=3, num_classes=1000,
embed_dim=768, depth=12, num_heads=12, mlp_ratio=4.0, qkv_bias=True,
qk_scale=None, representation_size=None, distilled=False, drop_ratio=0.,
attn_drop_ratio=0., drop_path_ratio=0., embed_layer=PatchEmbed, norm_layer=None,
act_layer=None):
# 注: representation_size=None,就不会再MLP——head中构建一个pre_Logits层了
"""
Args:
img_size (int, tuple): input image size
patch_size (int, tuple): patch size
in_c (int): number of input channels
num_classes (int): number of classes for classification head
embed_dim (int): embedding dimension
depth (int): depth of transformer # 也就是encoder这个模块重复了多少次
num_heads (int): number of attention heads
mlp_ratio (int): ratio of mlp hidden dim to embedding dim
qkv_bias (bool): enable bias for qkv if True
qk_scale (float): override default qk scale of head_dim ** -0.5 if set
representation_size (Optional[int]): enable and set representation layer (pre-logits) to this value if set
distilled (bool): model includes a distillation token and head as in DeiT models
drop_ratio (float): dropout rate
attn_drop_ratio (float): attention dropout rate
drop_path_ratio (float): stochastic depth rate
embed_layer (nn.Module): patch embedding layer
norm_layer: (nn.Module): normalization layer
"""
super(VisionTransformer, self).__init__()
self.num_classes = num_classes
self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models
self.num_tokens = 2 if distilled else 1 # distilled不用管,他是为了想兼容其他模型才有的这个参数
norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6)
act_layer = act_layer or nn.GELU
self.patch_embed = embed_layer(img_size=img_size, patch_size=patch_size, in_c=in_c, embed_dim=embed_dim)
num_patches = self.patch_embed.num_patches # 得到patch的个数
self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) # (1,1,dim)=(后面凭借用的,1,786)可训练的class token
self.dist_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) if distilled else None # ViT用不到,默认none
self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + self.num_tokens, embed_dim))
self.pos_drop = nn.Dropout(p=drop_ratio)
dpr = [x.item() for x in torch.linspace(0, drop_path_ratio, depth)] # stochastic depth decay rule
# 在这里,构造序列,是因为drop_path_ratio是递增的,这个过程重复了depth个
self.blocks = nn.Sequential(*[
Block(dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, qk_scale=qk_scale,
drop_ratio=drop_ratio, attn_drop_ratio=attn_drop_ratio, drop_path_ratio=dpr[i],
norm_layer=norm_layer, act_layer=act_layer)
for i in range(depth)
]) # 循环一次,就在表里加一个block,复制给blocks
self.norm = norm_layer(embed_dim)
# Representation layer
if representation_size and not distilled: # 再次不用管distilled
self.has_logits = True
self.num_features = representation_size
self.pre_logits = nn.Sequential(OrderedDict([
("fc", nn.Linear(embed_dim, representation_size)),
("act", nn.Tanh())
]))
else:
self.has_logits = False
self.pre_logits = nn.Identity() # 可以没有这一层
# Classifier head(s),这里是最后的Fc层
self.head = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()
self.head_dist = None
if distilled: # 无关
self.head_dist = nn.Linear(self.embed_dim, self.num_classes) if num_classes > 0 else nn.Identity()
# Weight init
nn.init.trunc_normal_(self.pos_embed, std=0.02)
if self.dist_token is not None:
nn.init.trunc_normal_(self.dist_token, std=0.02)
nn.init.trunc_normal_(self.cls_token, std=0.02)
self.apply(_init_vit_weights)
def forward_features(self, x):
# [B, C, H, W] -> [B, num_patches, embed_dim]
x = self.patch_embed(x) # [B, 196, 768]
# [1, 1, 768] -> [B, 1, 768]
cls_token = self.cls_token.expand(x.shape[0], -1, -1) # 根据传入的batch数量,复制B份
if self.dist_token is None:
x = torch.cat((cls_token, x), dim=1) # [B, 197, 768] 196变成了197
else:
x = torch.cat((cls_token, self.dist_token.expand(x.shape[0], -1, -1), x), dim=1)
x = self.pos_drop(x + self.pos_embed)
x = self.blocks(x)
x = self.norm(x)
if self.dist_token is None:
return self.pre_logits(x[:, 0])
else:
return x[:, 0], x[:, 1]
def forward(self, x):
x = self.forward_features(x)
if self.head_dist is not None:
x, x_dist = self.head(x[0]), self.head_dist(x[1])
if self.training and not torch.jit.is_scripting():
# during inference, return the average of both classifier predictions
return x, x_dist
else:
return (x + x_dist) / 2
else:
x = self.head(x)
return x
def _init_vit_weights(m):
"""
ViT weight initialization
:param m: module
"""
if isinstance(m, nn.Linear):
nn.init.trunc_normal_(m.weight, std=.01)
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode="fan_out")
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, nn.LayerNorm):
nn.init.zeros_(m.bias)
nn.init.ones_(m.weight)
# 下面对应了不同的ViT模型
def vit_base_patch16_224_in21k(num_classes: int = 21843, has_logits: bool = True):
"""
ViT-Base model (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929).
ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer.
weights ported from official Google JAX impl:
https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_patch16_224_in21k-e5005f0a.pth
"""
model = VisionTransformer(img_size=224,
patch_size=16,
embed_dim=768,
depth=12,
num_heads=12,
representation_size=768 if has_logits else None,
num_classes=num_classes)
return model
# 原文中并没有提到32,但是源代码中是有32的
def vit_base_patch32_224_in21k(num_classes: int = 21843, has_logits: bool = True):
"""
ViT-Base model (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929).
ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer.
weights ported from official Google JAX impl:
https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_patch32_224_in21k-8db57226.pth
"""
model = VisionTransformer(img_size=224,
patch_size=32,
embed_dim=768,
depth=12,
num_heads=12,
representation_size=768 if has_logits else None,
num_classes=num_classes)
return model
def vit_large_patch16_224_in21k(num_classes: int = 21843, has_logits: bool = True):
"""
ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929).
ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer.
weights ported from official Google JAX impl:
https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_patch16_224_in21k-606da67d.pth
"""
model = VisionTransformer(img_size=224,
patch_size=16,
embed_dim=1024,
depth=24,
num_heads=16,
representation_size=1024 if has_logits else None,
num_classes=num_classes)
return model
def vit_large_patch32_224_in21k(num_classes: int = 21843, has_logits: bool = True):
"""
ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929).
ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer.
weights ported from official Google JAX impl:
https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_patch32_224_in21k-9046d2e7.pth
"""
model = VisionTransformer(img_size=224,
patch_size=32,
embed_dim=1024,
depth=24,
num_heads=16,
representation_size=1024 if has_logits else None,
num_classes=num_classes)
return model
# 最好别用这个,预训练权重就很大很大
def vit_huge_patch14_224_in21k(num_classes: int = 21843, has_logits: bool = True):
"""
ViT-Huge model (ViT-H/14) from original paper (https://arxiv.org/abs/2010.11929).
ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer.
NOTE: converted weights not currently available, too large for github release hosting.
"""
model = VisionTransformer(img_size=224,
patch_size=14,
embed_dim=1280,
depth=32,
num_heads=16,
representation_size=1280 if has_logits else None,
num_classes=num_classes)
return model
|
/saturn_lib-0.0.11-py3-none-any.whl/saturn_lib/viT_libs/vit_model.py
| 0.862641 | 0.527803 |
vit_model.py
|
pypi
|
import re, nltk, string, TurkishStemmer
import pandas as pd
import numpy as np
import fuzzywuzzy
from sklearn.model_selection import train_test_split
# Performance Metrics
from sklearn.metrics import mean_squared_error
# PLOT
import matplotlib.pyplot as plt
import seaborn as sns
pd.set_option("display.max_columns", None)
"""
DOCUMENTATION
This script aims to automize repetitive text processing jobs that might be useful in Machine Learning processes.
"""
class TestringProcessor:
def getNumbersfromString(self, string, flag = False):
"""[Find all the number from string with regestring]
Inputs :
string : [String]
flag : [Boolean] to select return all numbers in list or select first number as integer
True : Returns all the integers in the string
False : Returns first consecutive integer
Returns:
[List or Integer]: [According to flag]
Usage:
a = t.getNumbersfromString("abc123d") --> 123
"""
if flag :
if type(string) == str :
number = re.findall('[0-9]+', string)
return number
else :
# This for deal with missing values None, NaN , etc
return string
else :
if type(string) == str :
number = re.findall('[0-9]+', string)
return int(number[0])
else :
# This for deal with missing values None, NaN , etc
return string
def replace_matches_in_column(self, df, column, string_to_match, min_ratio = 53):
"""[Helps to match the words looks similar]
Inputs :
df : [pandas.DataFrame]
column : Column that you want to work with
string_to_match : [String]
min_ratio : [Integer] How much similarity is enough for you
Usage:
replace_matches_in_column(df = df , column = 'Country', string_to_match = 'australia')
* australia
* australie
* australiEs
To match mispelled word in the dataframe
"""
# get a list of unique strings
strings = df[column].unique()
# get the top 10 closest matcher in our input string
matches = fuzzywuzzy.process.extract(string_to_match, strings , limit = 10 ,
scorer = fuzzywuzzy.fuzz.token_sort_ratio)
# only get matches with a ratio > min_ratio
close_matches = [matches[0] for matches in matches if matches[1] >= min_ratio]
# get the rows of all the close matches in our dataframe
rows_with_matches = df[column].isin(close_matches)
# replace all rows with close matches with the input matches
df.loc[rows_with_matches, column] = string_to_match
class analyzeModel:
def plot_learning_curves(self, model, X, y):
"""
Obj : Objective is to decide whether the model is overfitting or underfitting
Plots learning curves :
y - axis : RMSE
x - axis : Training Set Size
"""
X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=0.2, random_state=10)
train_errors, val_errors = [], []
for m in range(1, len(X_train) + 1):
model.fit(X_train[:m], y_train[:m])
y_train_predict = model.predict(X_train[:m])
y_val_predict = model.predict(X_val)
train_errors.append(mean_squared_error(y_train[:m], y_train_predict))
val_errors.append(mean_squared_error(y_val, y_val_predict))
plt.plot(np.sqrt(train_errors), "r-+", linewidth=2, label="train")
plt.plot(np.sqrt(val_errors), "b-", linewidth=3, label="val")
plt.legend(loc="upper right", fontsize=14)
plt.xlabel("Training set size", fontsize=14)
plt.ylabel("RMSE", fontsize=14)
class TextProcessor:
"""
This class is prepared for NLP purposes
"""
def __init__(self) -> None:
self.stopwordsAll = ["acaba","ama","aslında","az","bazı","belki","biri","birkaç","birşey","biz","bu","çok","çünkü","da","daha",
"de","defa","diye","eğer","en","gibi","hem","hep","hepsi","her","hiç","için","ile","ise","kez","ki","kim","mı","mu","mi","nasıl",
"ne","be","neden","nerde","nerede","nereye","niçin","nasıl","o","sanki","şey","siz","ben","şu","tüm","ve","veya","ya","yani"]
def preprocess(self,text):
"""String Stripper
- Strip the words
- Remove Punctuations
- Remove numbers
"""
text = text.lower()
text = text.strip()
text = re.compile('<.*?>').sub(' ', text)
text = re.compile('[%s]' % re.escape(string.punctuation)).sub(' ', text)
text = re.sub('\s+', ' ', text)
text = re.sub(r'\[[0-9]*\]', ' ', text)
text = re.sub(r'[^\w\s]', '', str(text).lower().strip())
text = re.sub(r'\d', ' ', text)
text = re.sub(r'\s+', ' ', text)
return text
def turkishStemmerGit(self, text):
stemmer = TurkishStemmer.TurkishStemmer()
stemmedword = [stemmer.stem(word) for word in nltk.word_tokenize(text)]
return ' '.join(stemmedword)
def stopword(self,string):
a = [i for i in string.split() if i not in self.stopwordsAll]
return ' '.join(a)
def finalStep(self, string):
return self.turkishStemmerGit(self.stopword(self.preprocess(string)))
class TimeReg:
def __init__(self) -> None:
pass
def laginvestigate(self, df, target_column):
df["Lag_1"] = df["target_column"].shift(1)
df = df.reindex(columns = [target_column, "Lag_1"])
fig, ax = plt.subplots()
ax = sns.regplot(x = 'Lag_1', y = target_column, data = df, ci = None, scatter_kws= dict(color = '0.25'))
ax.set_aspect('equal')
ax.set_title('Lag plor of Target columns')
plt.show()
def makelags(self, ts, lags, lead_time):
return pd.concat(
{
f'y_lag_{i}' : ts.shift(i) for i in range(lead_time, lags + lead_time)
},
axis = 1
)
#### TEST ####
# t = TestringProcessor()
# a = t.getNumbersfromString("abc123d")
# print(a)
# t = TestringProcessor()
# a = t.replace_matches_in_column()
#### TEST ####
|
/saturn_ml-1.0.1.tar.gz/saturn_ml-1.0.1/saturn_ml/processData.py
| 0.561816 | 0.460956 |
processData.py
|
pypi
|
from cv2 import grabCut
import numpy as np
from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor
from sklearn.model_selection import GridSearchCV, RandomizedSearchCV
from xgboost import XGBRegressor
class autoTuning:
def __init__(self, model) -> None:
self.model = model
def autoTuningRFr(self, train, labels):
# n_jobs = -1 uses all the cores available
rf = RandomForestRegressor(random_state= 42, n_jobs= -1 , verbose = 1)
# number of trees in random forests
n_estimators = [int(x) for x in np.linspace(start = 200 , stop = 2000, num = 10)]
# number of features to consider at every split
max_features = ['auto', 'sqrt']
# maximum number of levels in tree
max_depth = [int(x) for x in np.linspace(10, 110, num = 11)]
max_depth.append(None)
# min number of samples required to split a node
min_samples_split = [2, 5, 10]
# min number of samples required at each leaf node
min_samples_leaf = [1, 2, 4]
# method of selecting samples for training each tree
bootstrap = [True, False]
# create a random grid
random_grid = {
'n_estimators' : n_estimators,
'max_features' : max_features,
'max_depth' : max_depth,
'min_samples_split' : min_samples_split,
'min_samples_leaf' : min_samples_leaf,
'bootstrap' : bootstrap
}
rf_random = RandomizedSearchCV(estimator= rf, param_distributions= random_grid,
n_iter= 100, cv = 3, verbose = 2, random_state=42,
n_jobs= -1)
rf_random.fit(train, labels)
return rf_random
def autoTuningXGBr(self, train, labels):
"""
Fine tuning for XGBRegressor, you may increase your CV, if you have smaller
sized dataset
"""
model = XGBRegressor()
parameters = {
'nthread' : [4],
'objective' : ['reg:linear'],
'learning_rate' : [0.3, 0.05, .07],
'max_depth' : [5,6,7],
'min_child_weight' : [4],
'silent' : [1],
'subsample' : [0.7],
'colsample_bytree' : [0.7],
'n_estimators' : [350, 500, 700]
}
grid = GridSearchCV(model, parameters, cv = 2, n_jobs=-1, verbose = True)
grid.fit(train, labels)
return grid
|
/saturn_ml-1.0.1.tar.gz/saturn_ml-1.0.1/saturn_ml/fineTuning.py
| 0.561215 | 0.448366 |
fineTuning.py
|
pypi
|
# Saturn
[Screenshots](#screenshots)
## Features
* Plain-text format. Notebooks are regular Python files. Different types of
cells are comments with special formatting. Markdown rendering and syntax
highlighting in the terminal thanks to [rich](https://github.com/Textualize/rich).
* Checkpoints. Special checkpoint cells allow to save the state of the session
or individual variables.
* Terminal graphics support. When using
[kitty](https://sw.kovidgoyal.net/kitty/) terminal (or in principle anything
that supports its [graphics protocol](https://sw.kovidgoyal.net/kitty/graphics-protocol.html))
matplotlib figures are rendered inline in the terminal.
* MPI awareness. When running under MPI, only rank 0 will write out the
modified notebook. The REPL will take input on rank 0 and broadcast to other
ranks. It's also possible to suppress output from all ranks other than 0.
* Ability to convert from Jupyter to Saturn notebooks. This also allows to view
Jupyter notebooks in the terminal.
## Installation
```
pip install saturn-notebook
```
## Commands and options
* `saturn show notebook.py`
Display the notebook in the terminal. No computation is performed. Optional
`--html OUTPUT.html` flag will produce HTML output. Use `-k, --katex` flag to
embed [KaTeX](https://katex.org/) header into the HTML.
`saturn show notebook.py --html notebook.html -k`
* `saturn run [notebook.py [output.py]]`
Execute a Python notebook, either modifying it in place, or saving the result
into a new notebook `output.py`. If no input `notebook.py` is provided, drop
into REPL (`-i` is implied). When leaving, the REPL will ask whether to save
the resulting notebook.
* `-c, --clean`: run from scratch, ignoring the checkpoints.
* `-a, --auto-capture`: automatically capture matplotlib figures, without `show()`.
* `-i`, `--interactive`:
drop into REPL (using [ptpython](https://github.com/prompt-toolkit/ptpython))
after all the cells are processed; the results of the REPL interaction will
be added to the notebook.
* `--no-mpi`: disable MPI awareness.
* `-n, --dry-run`: don't save the result.
* `--only-root-output`: under MPI, suppress output from all ranks other than 0.
* `-e`, `--external notebook.zip`: use external zip archive `notebook.zip`
to store binary content (instead of embedding it inline).
Any arguments passed after `--` will be passed as `sys.argv` to the notebook.
`saturn run notebook.py -- arguments --to notebook`
* `saturn clean notebook.py [output.py]`
Remove all binary data from the notebook. Useful for getting rid of large
checkpoints.
* `saturn image notebook.py [i out.png]`
Save `i`-th image from `notebook.py` into `out.png`. If the last two
arguments are omitted, show all the images in the notebook together with
their indices.
* `saturn version`
Show version of saturn and its dependencies.
* `saturn convert notebook.ipynb [notebook.py]`
Convert a Jupyter notebook into a Saturn notebook. If the output name
`notebook.py` is missing, only show the Jupyter notebook. Optional
`--html OUTPUT.html` flag will produce HTML output.
* `saturn extract notebook.py notebook.zip`
Extract inline binary content into external archive.
* `saturn embed notebook.py notebook.zip`
Embed binary content from external archive into the notebook.
## Cell types
* Markdown cells, prefix `#m>`
```
#m> # Sample notebook
#m>
#m> Description using markdown **formatting**.
```
* Output cells `#o>`
There is not usually a reason to modify these by hand, they are filled by
Saturn with the output of code cells. If they contain PNG information, it's
base64-encoded and wrapped in `{{{` and `}}}` to allow automatic folding.
```
#o> <matplotlib.image.AxesImage object at 0x114217550>
#o> png{{{
#o> pngiVBORw0KGgoAAAANSUhEUgAAA8AAAAHgCAYAAABq5QSEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAP
...
#o> pngGAAAgBQEMAAAACkIYAAAAFL4v5JTyvRQ4v1eAAAAAElFTkSuQmCC
#o> png}}}
```
In Vim with `foldmethod=marker`:
```
#o> <matplotlib.image.AxesImage object at 0x114217550>
+--135 lines: o> png--------------------------------------------------
```
* Checkpoint cells `#chk>`
These indicate locations, where the code should checkpoint. Checkpointing
serializes the session, which is stored base64-encoded in the same cell. The
cell also stores the hash of the previous code blocks, and the checkpoint is
valid if the prior code blocks haven't changed. By default saturn will resume
from the last valid checkpoint. Same folding markers (`{{{` and `}}}`) are used.
```
#chk>{{{
#chk>gANDIJZCQePiVH9SX7wVtBfgrDpcgWu5HUFFiFEeyNF9sVjFcQB9cQEoWAwAAABfX2J1aWx0aW5zX19x
...
#chk>wAyP55wdmz+qIkdBjBrYP3EjdHEkYnWGcSUu
#chk>}}}
```
In Vim with `foldmethod=marker`:
```
+-- 36 lines: chk>----------------------------------------------------
```
* Variable cells `#var> x,y,z`
These cells save only the value of the specified variables (which is useful
if the full checkpoint is too big). If all the previous code cells haven't
changed, the cell's saved content is loaded into the specified variables and
the previous code cell is not evaluated.
* Break cells `#---#`
These are used to break code cells that don't have any other type of a cell
between them.
* REPL cells `#-REPL-#`
These instruct Saturn to drop into an interactive REPL loop, just like the
`-i` option. All the cells from the REPL will be inserted after this cell in
the notebook. Afterwards, execution proceeds as normal.
* Code cells
All contiguous lines, not marked as one of the above, are grouped together
into code cells.
* Non-skippable code cells `#no-skip#`
Adding this line anywhere within the code cell will indicate that it
shouldn't be skipped, even if we are restarting from a checkpoint. This is
useful, for example, if a cell is modifying `sys.path`, which won't be
captured in a checkpoint.
* Non-hashable code cells `#no-hash#`
Adding this line anywhere within the code cell will indicate that it
shouldn't be hashed, meaning that changing this cell (or removing it
entirely) won't invalidate the checkpoints below. This should be used only
with cells that don't change any variables, e.g., purely output or plotting
cells.
* Saturn cells `#saturn> external=out.zip`
These provide metadata. For now, the only option is to provide the name of
the external zip archive to store the binary content.
## Vim support
All the binary (non-human-readable) cell content is wrapped in `{{{`, `}}}`
markers. Adding the following comment to the notebook, ensures that Vim starts
with all the binary content folded away.
```
# vim: foldmethod=marker foldlevel=0
```
It's also helpful to have Vim recognize the `#m>` prefix to correctly re-format
markdown cells with the `gq` command. This can be done, for example, by adding
the following to `~/.vim/after/ftplugin/python.vim`:
```
setlocal comments=b:#,fb:-,b:#m>
```
## REPL
REPL supports the following keyboard shortcuts:
* `Ctrl-d`: exits the REPL.
* `Ctrl-k`: inserts a checkpoint cell. Equivalent to typing in `#chk>` manually.
* `Ctrl-w`: exits the REPL and doesn't drop into REPL, even if there are more
REPL cells or `-i` is provided on the command line.
* `Ctrl-q`: exits the REPL and terminates execution of the entire notebook.
* `F10`: aborts execution of the entire notebook and doesn't save it out, even if we are not in `--dry-run` mode.
## Screenshots
Running [samples/simple.py](https://github.com/mrzv/saturn/blob/master/samples/simple.py):
* First run performs full computation and saves the checkpoint, as well as the figure output.

* Second run resumes from the checkpoint, since no code before it has changed.

* Vim folds the binary content.

|
/saturn_notebook-1.2.2.tar.gz/saturn_notebook-1.2.2/README.md
| 0.806205 | 0.979056 |
README.md
|
pypi
|
from __future__ import annotations
from typing import BinaryIO, cast
from struct import unpack
from saturnin.base import StopError, MIME, Channel
from saturnin.lib.data.onepipe import DataProviderMicro, ErrorCode, FBDPSession, FBDPMessage
from .api import BinaryReaderConfig
# Classes
class BinaryReaderMicro(DataProviderMicro):
"""Text file reader microservice.
"""
SYSIO = ('stdin', 'stdout', 'stderr')
def _open_file(self) -> None:
"Open the input file."
self._close_file()
if self.filename.lower() in self.SYSIO:
fspec = self.SYSIO.index(self.filename.lower())
else:
fspec = self.filename
try:
self.file = open(fspec, mode='br',
closefd=self.filename.lower() not in self.SYSIO)
except Exception as exc:
raise StopError("Failed to open input file", code = ErrorCode.ERROR) from exc
def _close_file(self) -> None:
"Close the input file if necessary"
if self.file:
self.file.close()
self.file = None
def initialize(self, config: BinaryReaderConfig) -> None:
"""Verify configuration and assemble component structural parts.
"""
super().initialize(config)
self.log_context = 'main'
# Configuration
self.fmt: MIME = config.pipe_format.value
self.file: BinaryIO = None
self.filename: str = config.filename.value
self.block_size: int = config.block_size.value
def handle_accept_client(self, channel: Channel, session: FBDPSession) -> None:
"""Event handler executed when client connects to the data pipe via OPEN message.
Arguments:
session: Session associated with client.
The session attributes `data_pipe`, `pipe_socket`, `data_format` and `params`
contain information sent by client, and the event handler validates the request.
If request should be rejected, it raises the `StopError` exception with `code`
attribute containing the `ErrorCode` to be returned in CLOSE message.
"""
super().handle_accept_client(channel, session)
if self.fmt is not None and session.data_format != self.fmt:
raise StopError(f"MIME type '{cast(MIME, session.data_format).mime_type}' is not a valid input format",
code = ErrorCode.DATA_FORMAT_NOT_SUPPORTED)
# Client reqest is ok, we'll open the file we are configured to work with.
self._open_file()
def handle_produce_data(self, channel: Channel, session: FBDPSession, msg: FBDPMessage) -> None:
"""Handler is executed to store data into outgoing DATA message.
Arguments:
channel: Channel associated with data pipe.
session: Session associated with client.
msg: DATA message that will be sent to client.
The event handler must store the data in `msg.data_frame` attribute. It may also
set ACK-REQUEST flag and `type_data` attribute.
The event handler may cancel the transmission by raising the `StopError` exception
with `code` attribute containing the `ErrorCode` to be returned in CLOSE message.
Note:
To indicate end of data, raise StopError with ErrorCode.OK code.
Note:
Exceptions are handled by protocol, but only StopError is checked for protocol
ErrorCode. As we want to report INVALID_DATA properly, we have to convert
exceptions into StopError.
"""
if self.file is None:
self._open_file()
if self.block_size == -1:
if buf := self.file.read(4):
size = unpack('!I', buf)[0]
else:
raise StopError('OK', code=ErrorCode.OK)
else:
size = self.block_size
if buf := self.file.read(size):
msg.data_frame = buf
else:
raise StopError('OK', code=ErrorCode.OK)
def handle_pipe_closed(self, channel: Channel, session: FBDPSession, msg: FBDPMessage,
exc: Exception=None) -> None:
"""Event handler executed when CLOSE message is received or sent, to release any
resources associated with current transmission.
Arguments:
channel: Channel associated with data pipe.
session: Session associated with peer.
msg: Received/sent CLOSE message.
exc: Exception that caused the error.
"""
super().handle_pipe_closed(channel, session, msg, exc)
self._close_file()
|
/saturnin_core-0.8.0-py3-none-any.whl/saturnin/core/binary_reader/service.py
| 0.833019 | 0.175132 |
service.py
|
pypi
|
import json
import pandas
import requests
class Series:
"""Instancia a classe Series para obtenção dos perfis temporais dos índices vegetativos NDVI ou EVI.
Args:
api_token (str): Token de autenticação da API SATVeg.
profile_type (str, optional): tipo de índice a ser utilizado (ndvi ou evi). Defaults to 'ndvi'.
satellite (str, optional): tipo de satélite a ser utilizado (terra, aqua ou comb). Defaults to 'terra'.
pre_filter (int, optional): tipo de pré-filtro a ser utilizado. Domínio: 0 = sem pre-filtragem, 1 = correção nodata, 2 = correção nuvem, 3 = correção nuvem/nodata. Defaults to 3.
filter (int, optional): tipo de filtro a ser utilizado. Domínio: flt = Filtro FlatBottom, wav = Filtro Wavelet, sav = Filtro Savitsky Golay. Defaults to None.
filter_parameter (int, optional): parâmetro do filtro a ser utilizado. Este parâmetro é obrigatório para todos os filtros, exceto do tipo wav. Domínio: 0, 10, 20 ou 30 para o filtro FlatBottom; 2, 3, 4, 5 ou 6 para o filtro Savitsky Golay. Defaults to None.
Returns:
None
"""
def __init__(self, api_token:str, profile_type:str='ndvi', satellite:str='terra', pre_filter:int=3, filter:int=None, filter_parameter:int=None) -> None:
self.api_token = api_token
self.profile_type = profile_type
self.satellite = satellite
self.pre_filter = pre_filter
self.filter = filter
self.filter_parameter = filter_parameter
self.url = 'https://api.cnptia.embrapa.br/satveg/v1/series'
def get_json(self, latitude:float, longitude:float) -> dict:
"""Retorna os perfis temporais dos índices vegetativos NDVI ou EVI para um ponto informado.
Args:
latitude (float): latitude decimal do ponto no sistema de referência EPSG 4326.
longitude (float): longitude decimal do ponto no sistema de referência EPSG 4326.
Returns:
dict:
{
'success': True,
'status_code': 200,
'message': 'Sucesso.',
'data':
{
'listaSerie': [0.607, ... , 0.7939],
'listaDatas': ['2000-02-18', ... , '2000-03-21']
}
}
"""
headers = {'Authorization': f'Bearer {self.api_token}'}
parameters = {
'tipoPerfil': self.profile_type,
'satelite': self.satellite,
'latitude': latitude,
'longitude': longitude,
'preFiltro': self.pre_filter,
'filtro': self.filter,
'parametroFiltro': self.filter_parameter
}
try:
response = requests.get(self.url, headers=headers, params=parameters)
except:
return {
'success': False,
'status_code': 408,
'message': 'Erro de conexão.',
'data': {}
}
if response.status_code == 401:
return {
'success': False,
'status_code': 401,
'message': 'Credenciais inválidas. Verifique se você forneceu o token de autenticação correto.',
'data': {}
}
elif response.status_code == 200:
return {
'success': True,
'status_code': 200,
'message': 'Sucesso.',
'data': json.loads(response.text)
}
else:
return {
'success': False,
'status_code': response.status_code,
'message': 'A requisição não pode ser processada.',
'data': {}
}
def get(self, latitude:float, longitude:float, label:str=None) -> pandas.DataFrame:
"""Retorna os perfis temporais dos índices vegetativos NDVI ou EVI para um ponto informado no formato Pandas.DataFrame.
Args:
latitude (float): latitude decimal do ponto no sistema de referência EPSG 4326.
longitude (float): longitude decimal do ponto no sistema de referência EPSG 4326.
label (str): label da cultura existente no terreno.
Returns:
pandas.DataFrame
"""
if label is None:
input = {
'latitude': [latitude],
'longitude': [longitude],
}
else:
input = {
'label': [label],
'latitude': [latitude],
'longitude': [longitude],
}
input_df = pandas.DataFrame(input)
response = []
for coordinates in input_df.itertuples():
response.append(self.get_json(coordinates.latitude, coordinates.longitude))
response_data = pandas.DataFrame(response)
response_data = response_data.join(response_data['data'].apply(pandas.Series))
response_data = response_data.drop('data', axis=1)
return input_df.join(response_data)
def from_csv(self, file:str, delimiter:str=';') -> pandas.DataFrame:
"""Retorna os perfis temporais dos índices vegetativos NDVI ou EVI para os pontos informados no arquivo csv.
Args:
file (str): Caminho do arquivo csv.
delimiter (str, optional): Caractere separador de colunas do csv. Defaults to ';'.
Returns:
pandas.DataFrame
"""
input_data = pandas.read_csv(file, delimiter=delimiter)
response = []
for coordinates in input_data.itertuples():
response.append(self.get_json(coordinates.latitude, coordinates.longitude))
response_data = pandas.DataFrame(response)
response_data = response_data.join(response_data['data'].apply(pandas.Series))
response_data = response_data.drop('data', axis=1)
return input_data.join(response_data)
def to_learn(data:pandas.DataFrame) -> pandas.DataFrame:
"""Converte o DataFrame de entrada para o formato adequado de teste/treinamento de modelos de machine learning.
Args:
data (pandas.DataFrame)
Returns:
pandas.DataFrame
"""
learn_data = data.loc[data['success'] == True]
if len(learn_data.index) == 0:
raise Exception('Nenhuma das séries possuem dados válidos.')
else:
response_data = pandas.DataFrame(learn_data['listaSerie'].to_list(), columns=learn_data.iloc[0]['listaDatas'])
if 'label' in learn_data.columns:
response_data.insert(loc=0, column='label', value=learn_data['label'].to_list())
return response_data
|
/satveg_api-2.0.1.tar.gz/satveg_api-2.0.1/satveg_api/satveg_api.py
| 0.726037 | 0.487002 |
satveg_api.py
|
pypi
|
# **satvis**: A satellite visibility calculator.
## Description
*satvis* is a small library of functions used to calculate line-of-sight (LOS) visibility between spacecraft and plot access windows.
The core functions that the library is based on are implementations of algorithms developed by J. A. Lawton and Salvatore Alfano et. al. Visibility windows are represented as `IntervalTree`s.
Access windows are plotted using *matplotlib*.
## Install
```
pip install satvis
```
## Visibility Function Examples
The module `visibility_func.py` contains the basic building blocks of the module, including the visibility function algorithm developed by Lawton and Alfano.
Import the functions used in the following examples with:
```python
from visibility_func import visibilityFunc, isVis, zeroCrossingFit
```
### Example 1
To calculate the visibility between two Earth-centered-inertial (ECI) points:
```python
earth_radius = 6378 # km
extra_height = 0 # km
r1 = array([[earth_radius + 400, 0, 0]]).transpose() # position of object 1
r2 = array([[earth_radius, 0, 0]]).transpose() # position of object 2
[vis, phi, a1, a2] = visibilityFunc(r1, r2, earth_radius, extra_height)
print(vis)
print(phi)
print(a1)
print(a2)
# Prints:
# 0.3451182504723773
# 0.00014753614577624565
# 0.34526578661815355
# 0.0
```
where `vis` is the value of the visibility function, `phi` is the angle (in radians) drawn between the two Earth-centered-inertial points, and `a1` and `a2` are intermediate construction angles.
A value of `vis`>0 means that the two points have a direct LOS to each other.
### Example 2
If you just want to know if two points are visible to each other in a binary fashion, use `isVis`:
```python
[vis_bool] = isVis(r1, r2, earth_radius, extra_height)
print(vis_bool)
# True
```
### Example 3
A series of visibility function values can be represented as a couple of `ndarray`s or an `IntervalTree` via the `zeroCrossingFit` function.
This is handy if you want to calculate visibility windows between two objects.
```python
t = array([0, 1, 2, 3, 4]) # time vector
vis = array([-1, -0.1, 0.5, 4, 2]) # objects become visible to each other between t[1] and t[2]
[crossings, rise_set, vis_tree] = zeroCrossingFit(vis, t)
print(crossings)
print(rise_set)
print(vis_tree)
# Prints:
# [1.40896106]
# [1.]
# tree=IntervalTree([Interval(1.4089610649024726, 4)])
```
where `crossings` is a list of times at which the visibility function value crosses zero, `rise_set` indicates the direction of the crossing (1=rise, -1=set), and `tree` is an `IntervalTree` indicating time windows during which the visibility function value is positive.
See [the IntervalTree package](https://github.com/chaimleib/intervaltree) on GitHub for details on its structure.
### Example 4
If the two objects never see each other, the returned arrays and `IntervalTree` are empty.
```python
vis = array([-1, -0.1, -0.5, -4, -2])
[crossings, rise_set, vis_tree] = zeroCrossingFit(vis, t)
print(crossings)
print(rise_set)
print(vis_tree)
# Prints:
# []
# []
# IntervalTree()
```
### Example 5
You can assign an identifier to `Interval`s within an `IntervalTree`.
This is useful if you combine multiple `IntervalTree`s representing more than two objects.
```python
vis1 = array([-1, -0.1, 0.5, 4, 2])
vis2 = array([-2, -1, -0.5, 1, 1.1])
[_, _, vis_tree1] = zeroCrossingFit(vis1, t, "pair1")
[_, _, vis_tree2] = zeroCrossingFit(vis2, t, "pair2")
combined_tree = vis_tree1 | vis_tree2
print(vis_tree1)
print(vis_tree2)
print(combined_tree)
# Prints:
# tree=IntervalTree([Interval(1.4089610649024726, 4, 'pair1)])
# tree=IntervalTree([Interval(2.328702338492417, 4, 'pair2')])
# IntervalTree([Interval(1.4089610649024726, 4, 'pair1'), Interval(2.328702338492417, 4, 'pair2')])
```
## Visibility History Examples
The `vis_history.py` module contains functions to calculate the visibility function value as a time history for multiple sensors and targets.
The functions in these examples can be imported with:
```python
from vis_history import getVisHist
```
### Example 1
To get an `IntervalTree` and `ndarray` of the visibility history between a single sensor and target, define the target and sensor ids, their state histories, a time vector, and the radius of the planetoid.
```python
RE = 6371 # Earth radius, km
time = [0, 1, 2, 3] # units don't matter
# Having the ids in lists may seem redundant for now, but will make sene in later examples
target_id = [{"id": "T1"}]
sensor_id = [{"id": "S1"}]
# The third dimension is trivial in this example, but will be expanded in later examples
states_target = zeros([len(time), 6, 1]) # (T, 6, 1) array, ECI frame
states_sensor = zeros([len(time), 6, 1]) # (T, 6, 1) array, ECI frame
# The sensor and target are moving in the +I direction over time, with the sensor always being further away from the Earth
states_target[:, 0, 0] = array([8000, 9000, 10000, 11000]) # km
states_sensor[:, 0, 0] = 1.1 * array([8000, 9000, 10000, 11000]) # km
tree, vis = getVisHist(
targets=target_id,
sensors=sensor_id,
x_targets=states_target,
x_sensors=states_sensor,
time=time,
planet_radius=RE,
)
print(tree)
print(vis)
# Prints:
# IntervalTree([Interval(0, 3, {'target_id': 'T1', 'sensor_id': 'S1'})])
# [[[1.41076435 1.6559796 1.83313801 1.96935546]]]
```
Note that the 2nd dimension of the sensor and target states is 6.
The first 3 elements of this dimension are position, the last 3 elements are velocity, both in the ECI frame.
Velocity is irrelevant for the calculation, but we leave it in the argument to be consistent with the definition of a *state vector* in orbital dynamics.
Also note that the dimensions of both the states and time array are arbitrary.
As long as you are consistent and the states are in the ECI frame, units don't matter.
Also note that the target and sensor ids are just items in `dict`s; you can have other entries in the target/sensor `dict`s, just as long as one of the keys is `"id"`.
How to interpret these outputs?
- `tree (IntervalTree)`: In the interval from 0-3 along `time`, target `T1` and sensor `S1` can see each other.
- `vis (ndarray)`: The value of the visibility function is increasing over time, and is greater than 0 the entire length of `time`.
### Example 2
Now for a more interesting example.
This time we have 2 sensors and 3 targets.
```python
# time vector
time = [0, 1, 2, 3]
# create dummy target/sensor dicts
sensor_dicts = [
{"id": "A"}, # ids can be `str`...
{"id": "B"},
]
target_dicts = [
{"id": 1}, # ... or any format.
{"id": 2},
{"id": 3},
]
# create dummy state history
states_targets = zeros([len(t1), 6, 3])
states_sensors = zeros([len(t1), 6, 2])
# Build state histories for the following:
# Visible to each other:
# # Target 1 / Sensor A
# # Target 2 / Sensor B
# # Target 3 / Sensor B
# Not visible to each other:
# # Target 1 / Sensor B
# # Target 2 / Sensor A
# # Target 3 / Sensor A
# Positions must be greater than Earth radius to get through
# visibilityFunc error check. Velocities aren't used so set to zero.
states_targets[:, 0, 0] = array([8000, 9000, 10000, 11000])
states_targets[:, 0, 1] = -1 * array([8000, 9000, 10000, 11000])
states_targets[:, 0, 2] = -1 * array([8000, 9000, 10000, 11000])
states_sensors[:, 0, 0] = 1.1 * array([8000, 9000, 10000, 11000])
states_sensors[:, 0, 1] = -1.1 * array([8000, 9000, 10000, 11000])
tree, vis = getVisHist(
targets=target_dicts,
sensors=sensor_dicts,
x_targets=states_targets,
x_sensors=states_sensors,
time=time,
planet_radius=RE,
)
print(f"tree ={tree})
print(f"vis = {vis})
# Prints:
# tree = IntervalTree([Interval(0, 3, {'target_id': 3, 'sensor_id': 'B'}), Interval(0, 3, {'target_id': 1, 'sensor_id': 'A'}), Interval(0, 3, {'target_id': 2, 'sensor_id': 'B'})])
# vis =
# [[[ 1.41076435 1.6559796 1.83313801 1.96935546]
# [-1.7308283 -1.48561305 -1.30845464 -1.17223719]
# [-1.7308283 -1.48561305 -1.30845464 -1.17223719]]
#
# [[-1.7308283 -1.48561305 -1.30845464 -1.17223719]
# [ 1.41076435 1.6559796 1.83313801 1.96935546]
# [ 1.41076435 1.6559796 1.83313801 1.96935546]]]
```
Before we examine the outputs, note the format of sensor/target id.
Note that the value of "id" can be any format; here we are using `str`s and `int`s, but you can use anything.
Now onto the outputs.
First let's look at `tree`:
- Target `3`/sensor `B`, target `1`/sensor `A`, and target `2`/sensor `B` can see each other from 0-3.
- There are no `Interval`s in the `IntervalTree` for target `1`/sensor `B`, target `2`/sensor `A`, or target `3`/sensor `A`; none of these target/sensor pairs can see each other over `time`.
- Note that the order of `Interval`s in `tree` are not time-ordered; this is because `IntervalTree`s do *not* preserve order.
- The order of arrays in `vis` corresponds to the order of inputs in `targets` and `sensors`.
Now we examine `vis`:
- The output `vis` array is (M, N, T), where M is the number of sensors, N is the number of targets, and T is the length of the time array.
- Looking at the upper 3x4 array block, we see that all the entries in row 0 (1.41, 1.65, ...) are positive.
This corresponds to target `1`/sensor `A` being visible to each other.
- Conversely, all of the entries in row 1 of the upper block (-1.73, -1.48, ...) are negative.
This corresponds to target `2`/sensor `A` ***not*** being visible to each other.
- The lower array block corresponds to sensor `B`.
## Schedule Plots Examples
Access windows between sensors and targets can be plotted using `plotSchedule`.
Import the functions used in the following examples with:
```python
# local imports
from schedule_plots import plotSchedule
from int_tree_converter import intTree2WindowList
# 3rd-party imports
from intervaltree import Interval, IntervalTree
```
### Example 1
`plotSchedule` requires a specifically-formatted `ndarray` that is not easily human-readable.
To convert from the easyish-to-read output of `getVisHist` to something that `plotSchedule` can interpret, we use the converter function `intTree2WindowList`.
```python
# Build a simple IntervalTree
tree = IntervalTree([Interval(0, 3, {'target_id': 3, 'sensor_id': 'B'}), Interval(0, 3, {'target_id': 1, 'sensor_id': 'A'}), Interval(0, 3, {'target_id': 2, 'sensor_id': 'B'})])
# Convert IntervalTree
[windows, sensor_ids, target_ids] = intTree2WindowList(schedule_tree=tree)
print(windows)
print(sensor_ids)
print(target_ids)
# Prints:
# [[[(0, 3)], [], []], [[], [(0, 3)], [(0, 3)]]]
# ['A', 'B']
# [1, 2, 3]
```
The sensor and target ids are used for debugging, and generally ignored when using `intTree2WindowList`.
`windows` is formatted such that `plotSchedule` can accept it as an argument.
### Example 2: Basic Plot
Now to generate a schedule plot.
Before calling `plotSchedule`, create a *matplotlib* figure, which is passed in as an argument.
```python
f = plt.figure()
avail = [
[[(2, 1)], [(4, 1)]], # access windows for Sensor A
[[], [(2, 3)]] # access windows for Sensor B
]
target_labels = ['1', '2']
sensor_labels = ['A', 'B']
f = plotSchedule(
availability=avail,
target_labels=target_labels,
sensor_labels=sensor_labels,
fig=f
)
plt.show()
```
The above code outputs this figure:

### Example 3: Scheduled Plot
There are optional arguments to `plotSchedule` that show a "scheduled" sensor-target pairs as well as availability.
```python
f = plt.figure()
avail = [
[[(2, 1)], [(4, 1)]], # access windows for Sensor A
[[], [(2, 3)]] # access windows for Sensor B
]
sched = [
[[(2, 0.5)], []], # schedule for Sensor A
[[], [(3, 1)]], # schedule for Sensor B
]
target_labels = ['1', '2']
sensor_labels = ['A', 'B']
f = plotSchedule(
availability=avail,
target_labels=target_labels,
sensor_labels=sensor_labels,
fig=f,
scheduled=sched,
scheduled_targ_labels=target_labels,
scheduled_sensor_labels=sensor_labels,
)
plt.show()
```
The above code outputs this figure:

## Citations:
- Alfano, Salvatore & Jr, Negron, & Moore, Jennifer. (1992). Rapid Determination of Satellite Visibility Periods. Journal of The Astronautical Sciences. Vol. 40, April-June, pp 281-296.
- Lawton, J. A.. (1987). Numerical Method for Rapidly Determining Satellite-Satellite and Satellite-Ground Station In-View Periods. Journal of Guidance, Navigation and Control. Vol. 10, January-February, pp. 32-36
- Chaim Leib Halbert's IntervalTree package on GitHub, https://pypi.org/project/intervaltree/#description
|
/satvis-0.2.0.tar.gz/satvis-0.2.0/README.md
| 0.630116 | 0.982707 |
README.md
|
pypi
|
import { indexOf, lst } from "../util/misc.js"
import { cmp } from "./pos.js"
import { sawCollapsedSpans } from "./saw_special_spans.js"
import { getLine, isLine, lineNo } from "./utils_line.js"
// TEXTMARKER SPANS
export function MarkedSpan(marker, from, to) {
this.marker = marker
this.from = from; this.to = to
}
// Search an array of spans for a span matching the given marker.
export function getMarkedSpanFor(spans, marker) {
if (spans) for (let i = 0; i < spans.length; ++i) {
let span = spans[i]
if (span.marker == marker) return span
}
}
// Remove a span from an array, returning undefined if no spans are
// left (we don't store arrays for lines without spans).
export function removeMarkedSpan(spans, span) {
let r
for (let i = 0; i < spans.length; ++i)
if (spans[i] != span) (r || (r = [])).push(spans[i])
return r
}
// Add a span to a line.
export function addMarkedSpan(line, span) {
line.markedSpans = line.markedSpans ? line.markedSpans.concat([span]) : [span]
span.marker.attachLine(line)
}
// Used for the algorithm that adjusts markers for a change in the
// document. These functions cut an array of spans at a given
// character position, returning an array of remaining chunks (or
// undefined if nothing remains).
function markedSpansBefore(old, startCh, isInsert) {
let nw
if (old) for (let i = 0; i < old.length; ++i) {
let span = old[i], marker = span.marker
let startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= startCh : span.from < startCh)
if (startsBefore || span.from == startCh && marker.type == "bookmark" && (!isInsert || !span.marker.insertLeft)) {
let endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= startCh : span.to > startCh)
;(nw || (nw = [])).push(new MarkedSpan(marker, span.from, endsAfter ? null : span.to))
}
}
return nw
}
function markedSpansAfter(old, endCh, isInsert) {
let nw
if (old) for (let i = 0; i < old.length; ++i) {
let span = old[i], marker = span.marker
let endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= endCh : span.to > endCh)
if (endsAfter || span.from == endCh && marker.type == "bookmark" && (!isInsert || span.marker.insertLeft)) {
let startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= endCh : span.from < endCh)
;(nw || (nw = [])).push(new MarkedSpan(marker, startsBefore ? null : span.from - endCh,
span.to == null ? null : span.to - endCh))
}
}
return nw
}
// Given a change object, compute the new set of marker spans that
// cover the line in which the change took place. Removes spans
// entirely within the change, reconnects spans belonging to the
// same marker that appear on both sides of the change, and cuts off
// spans partially within the change. Returns an array of span
// arrays with one element for each line in (after) the change.
export function stretchSpansOverChange(doc, change) {
if (change.full) return null
let oldFirst = isLine(doc, change.from.line) && getLine(doc, change.from.line).markedSpans
let oldLast = isLine(doc, change.to.line) && getLine(doc, change.to.line).markedSpans
if (!oldFirst && !oldLast) return null
let startCh = change.from.ch, endCh = change.to.ch, isInsert = cmp(change.from, change.to) == 0
// Get the spans that 'stick out' on both sides
let first = markedSpansBefore(oldFirst, startCh, isInsert)
let last = markedSpansAfter(oldLast, endCh, isInsert)
// Next, merge those two ends
let sameLine = change.text.length == 1, offset = lst(change.text).length + (sameLine ? startCh : 0)
if (first) {
// Fix up .to properties of first
for (let i = 0; i < first.length; ++i) {
let span = first[i]
if (span.to == null) {
let found = getMarkedSpanFor(last, span.marker)
if (!found) span.to = startCh
else if (sameLine) span.to = found.to == null ? null : found.to + offset
}
}
}
if (last) {
// Fix up .from in last (or move them into first in case of sameLine)
for (let i = 0; i < last.length; ++i) {
let span = last[i]
if (span.to != null) span.to += offset
if (span.from == null) {
let found = getMarkedSpanFor(first, span.marker)
if (!found) {
span.from = offset
if (sameLine) (first || (first = [])).push(span)
}
} else {
span.from += offset
if (sameLine) (first || (first = [])).push(span)
}
}
}
// Make sure we didn't create any zero-length spans
if (first) first = clearEmptySpans(first)
if (last && last != first) last = clearEmptySpans(last)
let newMarkers = [first]
if (!sameLine) {
// Fill gap with whole-line-spans
let gap = change.text.length - 2, gapMarkers
if (gap > 0 && first)
for (let i = 0; i < first.length; ++i)
if (first[i].to == null)
(gapMarkers || (gapMarkers = [])).push(new MarkedSpan(first[i].marker, null, null))
for (let i = 0; i < gap; ++i)
newMarkers.push(gapMarkers)
newMarkers.push(last)
}
return newMarkers
}
// Remove spans that are empty and don't have a clearWhenEmpty
// option of false.
function clearEmptySpans(spans) {
for (let i = 0; i < spans.length; ++i) {
let span = spans[i]
if (span.from != null && span.from == span.to && span.marker.clearWhenEmpty !== false)
spans.splice(i--, 1)
}
if (!spans.length) return null
return spans
}
// Used to 'clip' out readOnly ranges when making a change.
export function removeReadOnlyRanges(doc, from, to) {
let markers = null
doc.iter(from.line, to.line + 1, line => {
if (line.markedSpans) for (let i = 0; i < line.markedSpans.length; ++i) {
let mark = line.markedSpans[i].marker
if (mark.readOnly && (!markers || indexOf(markers, mark) == -1))
(markers || (markers = [])).push(mark)
}
})
if (!markers) return null
let parts = [{from: from, to: to}]
for (let i = 0; i < markers.length; ++i) {
let mk = markers[i], m = mk.find(0)
for (let j = 0; j < parts.length; ++j) {
let p = parts[j]
if (cmp(p.to, m.from) < 0 || cmp(p.from, m.to) > 0) continue
let newParts = [j, 1], dfrom = cmp(p.from, m.from), dto = cmp(p.to, m.to)
if (dfrom < 0 || !mk.inclusiveLeft && !dfrom)
newParts.push({from: p.from, to: m.from})
if (dto > 0 || !mk.inclusiveRight && !dto)
newParts.push({from: m.to, to: p.to})
parts.splice.apply(parts, newParts)
j += newParts.length - 3
}
}
return parts
}
// Connect or disconnect spans from a line.
export function detachMarkedSpans(line) {
let spans = line.markedSpans
if (!spans) return
for (let i = 0; i < spans.length; ++i)
spans[i].marker.detachLine(line)
line.markedSpans = null
}
export function attachMarkedSpans(line, spans) {
if (!spans) return
for (let i = 0; i < spans.length; ++i)
spans[i].marker.attachLine(line)
line.markedSpans = spans
}
// Helpers used when computing which overlapping collapsed span
// counts as the larger one.
function extraLeft(marker) { return marker.inclusiveLeft ? -1 : 0 }
function extraRight(marker) { return marker.inclusiveRight ? 1 : 0 }
// Returns a number indicating which of two overlapping collapsed
// spans is larger (and thus includes the other). Falls back to
// comparing ids when the spans cover exactly the same range.
export function compareCollapsedMarkers(a, b) {
let lenDiff = a.lines.length - b.lines.length
if (lenDiff != 0) return lenDiff
let aPos = a.find(), bPos = b.find()
let fromCmp = cmp(aPos.from, bPos.from) || extraLeft(a) - extraLeft(b)
if (fromCmp) return -fromCmp
let toCmp = cmp(aPos.to, bPos.to) || extraRight(a) - extraRight(b)
if (toCmp) return toCmp
return b.id - a.id
}
// Find out whether a line ends or starts in a collapsed span. If
// so, return the marker for that span.
function collapsedSpanAtSide(line, start) {
let sps = sawCollapsedSpans && line.markedSpans, found
if (sps) for (let sp, i = 0; i < sps.length; ++i) {
sp = sps[i]
if (sp.marker.collapsed && (start ? sp.from : sp.to) == null &&
(!found || compareCollapsedMarkers(found, sp.marker) < 0))
found = sp.marker
}
return found
}
export function collapsedSpanAtStart(line) { return collapsedSpanAtSide(line, true) }
export function collapsedSpanAtEnd(line) { return collapsedSpanAtSide(line, false) }
export function collapsedSpanAround(line, ch) {
let sps = sawCollapsedSpans && line.markedSpans, found
if (sps) for (let i = 0; i < sps.length; ++i) {
let sp = sps[i]
if (sp.marker.collapsed && (sp.from == null || sp.from < ch) && (sp.to == null || sp.to > ch) &&
(!found || compareCollapsedMarkers(found, sp.marker) < 0)) found = sp.marker
}
return found
}
// Test whether there exists a collapsed span that partially
// overlaps (covers the start or end, but not both) of a new span.
// Such overlap is not allowed.
export function conflictingCollapsedRange(doc, lineNo, from, to, marker) {
let line = getLine(doc, lineNo)
let sps = sawCollapsedSpans && line.markedSpans
if (sps) for (let i = 0; i < sps.length; ++i) {
let sp = sps[i]
if (!sp.marker.collapsed) continue
let found = sp.marker.find(0)
let fromCmp = cmp(found.from, from) || extraLeft(sp.marker) - extraLeft(marker)
let toCmp = cmp(found.to, to) || extraRight(sp.marker) - extraRight(marker)
if (fromCmp >= 0 && toCmp <= 0 || fromCmp <= 0 && toCmp >= 0) continue
if (fromCmp <= 0 && (sp.marker.inclusiveRight && marker.inclusiveLeft ? cmp(found.to, from) >= 0 : cmp(found.to, from) > 0) ||
fromCmp >= 0 && (sp.marker.inclusiveRight && marker.inclusiveLeft ? cmp(found.from, to) <= 0 : cmp(found.from, to) < 0))
return true
}
}
// A visual line is a line as drawn on the screen. Folding, for
// example, can cause multiple logical lines to appear on the same
// visual line. This finds the start of the visual line that the
// given line is part of (usually that is the line itself).
export function visualLine(line) {
let merged
while (merged = collapsedSpanAtStart(line))
line = merged.find(-1, true).line
return line
}
export function visualLineEnd(line) {
let merged
while (merged = collapsedSpanAtEnd(line))
line = merged.find(1, true).line
return line
}
// Returns an array of logical lines that continue the visual line
// started by the argument, or undefined if there are no such lines.
export function visualLineContinued(line) {
let merged, lines
while (merged = collapsedSpanAtEnd(line)) {
line = merged.find(1, true).line
;(lines || (lines = [])).push(line)
}
return lines
}
// Get the line number of the start of the visual line that the
// given line number is part of.
export function visualLineNo(doc, lineN) {
let line = getLine(doc, lineN), vis = visualLine(line)
if (line == vis) return lineN
return lineNo(vis)
}
// Get the line number of the start of the next visual line after
// the given line.
export function visualLineEndNo(doc, lineN) {
if (lineN > doc.lastLine()) return lineN
let line = getLine(doc, lineN), merged
if (!lineIsHidden(doc, line)) return lineN
while (merged = collapsedSpanAtEnd(line))
line = merged.find(1, true).line
return lineNo(line) + 1
}
// Compute whether a line is hidden. Lines count as hidden when they
// are part of a visual line that starts with another line, or when
// they are entirely covered by collapsed, non-widget span.
export function lineIsHidden(doc, line) {
let sps = sawCollapsedSpans && line.markedSpans
if (sps) for (let sp, i = 0; i < sps.length; ++i) {
sp = sps[i]
if (!sp.marker.collapsed) continue
if (sp.from == null) return true
if (sp.marker.widgetNode) continue
if (sp.from == 0 && sp.marker.inclusiveLeft && lineIsHiddenInner(doc, line, sp))
return true
}
}
function lineIsHiddenInner(doc, line, span) {
if (span.to == null) {
let end = span.marker.find(1, true)
return lineIsHiddenInner(doc, end.line, getMarkedSpanFor(end.line.markedSpans, span.marker))
}
if (span.marker.inclusiveRight && span.to == line.text.length)
return true
for (let sp, i = 0; i < line.markedSpans.length; ++i) {
sp = line.markedSpans[i]
if (sp.marker.collapsed && !sp.marker.widgetNode && sp.from == span.to &&
(sp.to == null || sp.to != span.from) &&
(sp.marker.inclusiveLeft || span.marker.inclusiveRight) &&
lineIsHiddenInner(doc, line, sp)) return true
}
}
// Find the height above the given line.
export function heightAtLine(lineObj) {
lineObj = visualLine(lineObj)
let h = 0, chunk = lineObj.parent
for (let i = 0; i < chunk.lines.length; ++i) {
let line = chunk.lines[i]
if (line == lineObj) break
else h += line.height
}
for (let p = chunk.parent; p; chunk = p, p = chunk.parent) {
for (let i = 0; i < p.children.length; ++i) {
let cur = p.children[i]
if (cur == chunk) break
else h += cur.height
}
}
return h
}
// Compute the character length of a line, taking into account
// collapsed ranges (see markText) that might hide parts, and join
// other lines onto it.
export function lineLength(line) {
if (line.height == 0) return 0
let len = line.text.length, merged, cur = line
while (merged = collapsedSpanAtStart(cur)) {
let found = merged.find(0, true)
cur = found.from.line
len += found.from.ch - found.to.ch
}
cur = line
while (merged = collapsedSpanAtEnd(cur)) {
let found = merged.find(0, true)
len -= cur.text.length - found.from.ch
cur = found.to.line
len += cur.text.length - found.to.ch
}
return len
}
// Find the longest line in the document.
export function findMaxLine(cm) {
let d = cm.display, doc = cm.doc
d.maxLine = getLine(doc, doc.first)
d.maxLineLength = lineLength(d.maxLine)
d.maxLineChanged = true
doc.iter(line => {
let len = lineLength(line)
if (len > d.maxLineLength) {
d.maxLineLength = len
d.maxLine = line
}
})
}
|
/satyrn_python-0.10.2-py3-none-any.whl/satyrn_python/static/codemirror/src/line/spans.js
| 0.553264 | 0.528108 |
spans.js
|
pypi
|
import { countColumn } from "./misc.js"
// STRING STREAM
// Fed to the mode parsers, provides helper functions to make
// parsers more succinct.
class StringStream {
constructor(string, tabSize, lineOracle) {
this.pos = this.start = 0
this.string = string
this.tabSize = tabSize || 8
this.lastColumnPos = this.lastColumnValue = 0
this.lineStart = 0
this.lineOracle = lineOracle
}
eol() {return this.pos >= this.string.length}
sol() {return this.pos == this.lineStart}
peek() {return this.string.charAt(this.pos) || undefined}
next() {
if (this.pos < this.string.length)
return this.string.charAt(this.pos++)
}
eat(match) {
let ch = this.string.charAt(this.pos)
let ok
if (typeof match == "string") ok = ch == match
else ok = ch && (match.test ? match.test(ch) : match(ch))
if (ok) {++this.pos; return ch}
}
eatWhile(match) {
let start = this.pos
while (this.eat(match)){}
return this.pos > start
}
eatSpace() {
let start = this.pos
while (/[\s\u00a0]/.test(this.string.charAt(this.pos))) ++this.pos
return this.pos > start
}
skipToEnd() {this.pos = this.string.length}
skipTo(ch) {
let found = this.string.indexOf(ch, this.pos)
if (found > -1) {this.pos = found; return true}
}
backUp(n) {this.pos -= n}
column() {
if (this.lastColumnPos < this.start) {
this.lastColumnValue = countColumn(this.string, this.start, this.tabSize, this.lastColumnPos, this.lastColumnValue)
this.lastColumnPos = this.start
}
return this.lastColumnValue - (this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0)
}
indentation() {
return countColumn(this.string, null, this.tabSize) -
(this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0)
}
match(pattern, consume, caseInsensitive) {
if (typeof pattern == "string") {
let cased = str => caseInsensitive ? str.toLowerCase() : str
let substr = this.string.substr(this.pos, pattern.length)
if (cased(substr) == cased(pattern)) {
if (consume !== false) this.pos += pattern.length
return true
}
} else {
let match = this.string.slice(this.pos).match(pattern)
if (match && match.index > 0) return null
if (match && consume !== false) this.pos += match[0].length
return match
}
}
current(){return this.string.slice(this.start, this.pos)}
hideFirstChars(n, inner) {
this.lineStart += n
try { return inner() }
finally { this.lineStart -= n }
}
lookAhead(n) {
let oracle = this.lineOracle
return oracle && oracle.lookAhead(n)
}
baseToken() {
let oracle = this.lineOracle
return oracle && oracle.baseToken(this.pos)
}
}
export default StringStream
|
/satyrn_python-0.10.2-py3-none-any.whl/satyrn_python/static/codemirror/src/util/StringStream.js
| 0.403332 | 0.561876 |
StringStream.js
|
pypi
|
import re
import requests
def get_match(url):
data = {'file': '(binary)', 'url': url}
response = requests.post('https://danbooru.iqdb.org/', data).text
return _match_api(response)
def _match_api(response):
if re.search(r'No relevant matches', response):
ret = {
"type": "possible",
"found": []
}
similarity = re.findall(r'([0-9]{1,3})% similarity', response)
url = re.findall(r'(?:https?://)?danbooru.donmai.us/posts/[0-9]+',
response)
url = [f"https://{x}" if not x.startswith("http") else x for x in url]
size = re.findall(r'([0-9]+)×([0-9]+)', response)
rating = re.findall(r'\[.*\]', response)
for i, url in enumerate(url):
ret["found"].append({
"link": url,
"similarity": similarity[i],
"rating": rating[i + 1],
"size": {
"width": size[i + 1][0],
"height": size[i + 1][1]
}
})
return ret
else:
ret = {
"type": "definite"
}
similarity = re.search(r'([0-9]{1,3})% similarity', response)
if similarity:
similarity = similarity.group(1)
url = re.search(r'(?:https?://)?danbooru.donmai.us/posts/[0-9]+',
response).group()
if not url.startswith("http"):
url = f'https://{url}'
size = re.findall(r'([0-9]+)×([0-9]+)', response)
rating = re.findall(r'\[.*\]', response)[1]
ret["found"] = {
"link": url,
"similarity": similarity,
"rating": rating,
"size": {
"width": size[1][0],
"height": size[1][1]
}
}
return ret
def check_url(value: str) -> bool:
"""
Check that the url is direct link to a image
:param value: URL to check
:type value: str
:return: True only if 'image' is anywhere in the content-type
of the URL headers.
:rtype: bool
"""
return "image" in requests.head(value).headers["content-type"]
|
/sauce_finder-2.2.4.tar.gz/sauce_finder-2.2.4/sauce_finder/sauce_finder.py
| 0.441673 | 0.253411 |
sauce_finder.py
|
pypi
|
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2 * math.pi))) * math.exp(-0.5 * ((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces=50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval * i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2, sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev)
|
/sauce_probability-0.1.tar.gz/sauce_probability-0.1/sauce_probability/Gaussiandistribution.py
| 0.904368 | 0.904102 |
Gaussiandistribution.py
|
pypi
|
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
import math
class Binomial(Distribution):
""" Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
"""
def __init__(self, prob=.5, n=20):
self.p = prob
self.n = n
Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev())
def calculate_mean(self):
"""Function to calculate the mean from p and n
Args:
None
Returns:
float: mean of the data set
"""
mean = self.n * self.p
return mean
def calculate_stdev(self):
"""Function to calculate the standard deviation from p and n.
Args:
None
Returns:
float: standard deviation of the data set
"""
stdev = math.sqrt(self.n * self.p * (1 - self.p))
return stdev
def replace_stats_with_data(self):
"""Function to calculate p and n from the data set. The function updates the p and n variables of the object.
Args:
None
Returns:
float: the p value
float: the n value
"""
self.n = len(self.data)
self.p = sum(self.data) / len(self.data)
self.mean = self.calculate_mean()
self.stdev = self.calculate_stdev()
return self.p, self.n
def plot_bar(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.bar(x=['0', '1'], height=[(1 - self.p) * self.n, self.p * self.n])
plt.xlabel('Outcome')
plt.ylabel('Count')
plt.title('Count of Outcomes in Data')
plt.show()
def pdf(self, k):
"""Probability density function calculator for the binomial distribution.
Args:
k (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
exp = math.factorial(self.n) / (math.factorial(self.n - k) * math.factorial(k))
x = exp * (self.p ** k) * ((1 - self.p) ** (self.n - k))
return x
def plot_bar_pdf(self):
"""Function to plot the pdf of the binomial distribution
Args:
None
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
x = []
y = []
for i in range(0, len(self.data)):
x.append(i)
y.append(round(self.pdf(k=i), 2))
plt.bar(x, y)
plt.xlabel('Outcome')
plt.ylabel('Probability of Outcome')
plt.title('PDF of Binomial Distribution for all Possible Outcomes')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Binomial distributions with equal p
Args:
other (Binomial): Binomial instance
Returns:
Binomial: Binomial distribution
"""
try:
assert self.p == other.p, 'p values are not equal'
except AssertionError as error:
raise
result = Binomial()
result.n = self.n + other.n
result.p = self.p
return result
def __repr__(self):
"""Function to output the characteristics of the Binomial instance
Args:
None
Returns:
string: characteristics of the Binomial object
"""
return "Mean: {}, Standard Deviation: {}, p: {}, n: {}.".format(self.mean, self.stdev, self.p, self.n)
|
/sauce_probability-0.1.tar.gz/sauce_probability-0.1/sauce_probability/Binomialdistribution.py
| 0.89848 | 0.885434 |
Binomialdistribution.py
|
pypi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.