code
stringlengths
501
5.19M
package
stringlengths
2
81
path
stringlengths
9
304
filename
stringlengths
4
145
from __future__ import annotations from typing import List, Optional, Generator from zepben.evolve.model.cim.iec61968.assetinfo.power_transformer_info import PowerTransformerInfo from zepben.evolve.model.cim.iec61968.infiec61968.infassetinfo.transformer_construction_kind import TransformerConstructionKind from zepben.evolve.model.cim.iec61968.infiec61968.infassetinfo.transformer_function_kind import TransformerFunctionKind from zepben.evolve.model.cim.iec61970.base.core.base_voltage import BaseVoltage from zepben.evolve.model.cim.iec61970.base.core.conducting_equipment import ConductingEquipment from zepben.evolve.model.cim.iec61970.base.core.identified_object import IdentifiedObject from zepben.evolve.model.cim.iec61970.base.core.power_system_resource import PowerSystemResource from zepben.evolve.model.cim.iec61970.base.core.terminal import Terminal from zepben.evolve.model.cim.iec61970.base.wires.transformer_star_impedance import TransformerStarImpedance from zepben.evolve.model.cim.iec61970.base.wires.vector_group import VectorGroup from zepben.evolve.model.cim.iec61970.base.wires.winding_connection import WindingConnection from zepben.evolve.util import require, nlen, get_by_mrid, ngen, safe_remove __all__ = ["TapChanger", "RatioTapChanger", "PowerTransformer", "PowerTransformerEnd", "TransformerEnd"] class TapChanger(PowerSystemResource): """ Mechanism for changing transformer winding tap positions. """ control_enabled: bool = True """Specifies the regulation status of the equipment. True is regulating, false is not regulating.""" neutral_u: Optional[int] = None """Voltage at which the winding operates at the neutral tap setting.""" _high_step: Optional[int] = None _low_step: Optional[int] = None _neutral_step: Optional[int] = None _normal_step: Optional[int] = None _step: Optional[float] = None def __init__(self, high_step: int = None, low_step: int = None, neutral_step: int = None, normal_step: int = None, step: float = None, **kwargs): super(TapChanger, self).__init__(**kwargs) if high_step is not None: self._high_step = high_step if low_step is not None: self._low_step = low_step if neutral_step is not None: self._neutral_step = neutral_step if normal_step is not None: self._normal_step = normal_step if step is not None: self._step = step self._validate_steps() @property def high_step(self): """Highest possible tap step position, advance from neutral. The attribute shall be greater than lowStep.""" return self._high_step @high_step.setter def high_step(self, val): require((val is None) or (self._low_step is None) or (val > self._low_step), lambda: f"High step [{val}] must be greater than low step [{self._low_step}]") self._check_steps(self.low_step, val) self._high_step = val @property def low_step(self): """Lowest possible tap step position, retard from neutral""" return self._low_step @low_step.setter def low_step(self, val): require((val is None) or (self._high_step is None) or (val < self._high_step), lambda: f"Low step [{val}] must be less than high step [{self._high_step}]") self._check_steps(val, self.high_step) self._low_step = val @property def neutral_step(self): """The neutral tap step position for this winding. The attribute shall be equal or greater than lowStep and equal or less than highStep.""" return self._neutral_step @neutral_step.setter def neutral_step(self, val): require(self._is_in_range(val), lambda: f"Neutral step [{val}] must be between high step [{self._high_step}] and low step [{self._low_step}]") self._neutral_step = val @property def normal_step(self): """ The tap step position used in "normal" network operation for this winding. For a "Fixed" tap changer indicates the current physical tap setting. The attribute shall be equal or greater than lowStep and equal or less than highStep. """ return self._normal_step @normal_step.setter def normal_step(self, val): require(self._is_in_range(val), lambda: f"Normal step [{val}] must be between high step [{self._high_step}] and low step [{self._low_step}]") self._normal_step = val @property def step(self): """ Tap changer position. Starting step for a steady state solution. Non integer values are allowed to support continuous tap variables. The reasons for continuous value are to support study cases where no discrete tap changers has yet been designed, a solutions where a narrow voltage band force the tap step to oscillate or accommodate for a continuous solution as input. The attribute shall be equal or greater than lowStep and equal or less than highStep. """ return self._step @step.setter def step(self, val): require(self._is_in_range(val), lambda: f"Step [{val}] must be between high step [{self._high_step}] and low step [{self._low_step}]") self._step = val def _check_steps(self, low, high): if low is not None: require((self.step is None) or (low <= self.step), lambda: f"New value would invalidate current step of [{self.step}]") require((self.normal_step is None) or (low <= self.normal_step), lambda: f"New value would invalidate current normal_step of [{self.normal_step}]") require((self.neutral_step is None) or (low <= self.neutral_step), lambda: f"New value would invalidate current neutral_step of [{self.neutral_step}]") if high is not None: require((self.step is None) or (self.step <= high), lambda: f"New value would invalidate current step of [{self.step}]") require((self.normal_step is None) or (self.normal_step <= high), lambda: f"New value would invalidate current normal_step of [{self.normal_step}]") require((self.neutral_step is None) or (self.neutral_step <= high), lambda: f"New value would invalidate current neutral_step of [{self.neutral_step}]") def _validate_steps(self): require((self._high_step is None) or (self._low_step is None) or (self._high_step > self._low_step), lambda: f"High step [{self._high_step}] must be greater than low step [{self._low_step}]") require(self._is_in_range(self._neutral_step), lambda: f"Neutral step [{self.neutral_step}] must be between high step [{self._high_step}] and low step [{self._low_step}]") require(self._is_in_range(self._normal_step), lambda: f"Normal step [{self.normal_step}] must be between high step [{self._high_step}] and low step [{self._low_step}]") require(self._is_in_range(self._step), lambda: f"Step [{self._step}] must be between high step [{self._high_step}] and low step [{self._low_step}]") def _is_in_range(self, val) -> bool: if val is None: return True if self._low_step is not None: if val < self._low_step: return False if self._high_step is not None: if val > self._high_step: return False return True class RatioTapChanger(TapChanger): """ A tap changer that changes the voltage ratio impacting the voltage magnitude but not the phase angle across the transformer. Angle sign convention (general): Positive value indicates a positive phase shift from the winding where the tap is located to the other winding (for a two-winding transformer). """ transformer_end: Optional[TransformerEnd] = None """`TransformerEnd` to which this ratio tap changer belongs.""" step_voltage_increment: Optional[float] = None """Tap step increment, in per cent of neutral voltage, per step position.""" class TransformerEnd(IdentifiedObject): """ A conducting connection point of a power transformer. It corresponds to a physical transformer winding terminal. In earlier CIM versions, the TransformerWinding class served a similar purpose, but this class is more flexible because it associates to terminal but is not a specialization of ConductingEquipment. """ grounded: bool = False """(for Yn and Zn connections) True if the neutral is solidly grounded.""" r_ground: Optional[float] = None """(for Yn and Zn connections) Resistance part of neutral impedance where 'grounded' is true""" x_ground: Optional[float] = None """(for Yn and Zn connections) Reactive part of neutral impedance where 'grounded' is true""" ratio_tap_changer: Optional[RatioTapChanger] = None """Ratio tap changer associated with this transformer end.""" terminal: Optional[Terminal] = None """The terminal of the transformer that this end is associated with""" base_voltage: Optional[BaseVoltage] = None """Base voltage of the transformer end. This is essential for PU calculation.""" end_number: int = 0 """Number for this transformer end, corresponding to the end’s order in the power transformer vector group or phase angle clock number. Highest voltage winding should be 1. Each end within a power transformer should have a unique subsequent end number. Note the transformer end number need not match the terminal sequence number.""" star_impedance: Optional[TransformerStarImpedance] = None """(accurate for 2- or 3-winding transformers only) Pi-model impedances of this transformer end. By convention, for a two winding transformer, the full values of the transformer should be entered on the high voltage end (endNumber=1).""" class PowerTransformerEnd(TransformerEnd): """ A PowerTransformerEnd is associated with each Terminal of a PowerTransformer. The impedance values r, r0, x, and x0 of a PowerTransformerEnd represents a star equivalent as follows 1) for a two Terminal PowerTransformer the high voltage PowerTransformerEnd has non zero values on r, r0, x, and x0 while the low voltage PowerTransformerEnd has zero values for r, r0, x, and x0. 2) for a three Terminal PowerTransformer the three PowerTransformerEnds represents a star equivalent with each leg in the star represented by r, r0, x, and x0 values. 3) For a three Terminal transformer each PowerTransformerEnd shall have g, g0, b and b0 values corresponding the no load losses distributed on the three PowerTransformerEnds. The total no load loss shunt impedances may also be placed at one of the PowerTransformerEnds, preferably the end numbered 1, having the shunt values on end 1 is the preferred way. 4) for a PowerTransformer with more than three Terminals the PowerTransformerEnd impedance values cannot be used. Instead use the TransformerMeshImpedance or split the transformer into multiple PowerTransformers. """ _power_transformer: Optional[PowerTransformer] = None """The power transformer of this power transformer end.""" rated_s: Optional[int] = None """Normal apparent power rating. The attribute shall be a positive value. For a two-winding transformer the values for the high and low voltage sides shall be identical.""" rated_u: Optional[int] = None """Rated voltage: phase-phase for three-phase windings, and either phase-phase or phase-neutral for single-phase windings. A high voltage side, as given by TransformerEnd.endNumber, shall have a ratedU that is greater or equal than ratedU for the lower voltage sides.""" r: Optional[float] = None """Resistance (star-phases) of the transformer end. The attribute shall be equal or greater than zero for non-equivalent transformers.""" x: Optional[float] = None """Positive sequence series reactance (star-phases) of the transformer end.""" r0: Optional[float] = None """Zero sequence series resistance (star-phases) of the transformer end.""" x0: Optional[float] = None """Zero sequence series reactance of the transformer end.""" g: Optional[float] = None """Magnetizing branch conductance.""" g0: Optional[float] = None """Zero sequence magnetizing branch conductance (star-phases).""" b: Optional[float] = None """Magnetizing branch susceptance (B mag). The value can be positive or negative.""" b0: Optional[float] = None """Zero sequence magnetizing branch susceptance.""" connection_kind: WindingConnection = WindingConnection.UNKNOWN_WINDING """Kind of `zepben.protobuf.cim.iec61970.base.wires.winding_connection.WindingConnection` for this end.""" phase_angle_clock: Optional[int] = None """Terminal voltage phase angle displacement where 360 degrees are represented with clock hours. The valid values are 0 to 11. For example, for the secondary side end of a transformer with vector group code of 'Dyn11', specify the connection kind as wye with neutral and specify the phase angle of the clock as 11. The clock value of the transformer end number specified as 1, is assumed to be zero.""" def __init__(self, power_transformer: PowerTransformer = None, **kwargs): super(PowerTransformerEnd, self).__init__(**kwargs) if power_transformer: self.power_transformer = power_transformer @property def power_transformer(self): """The power transformer of this power transformer end.""" return self._power_transformer @power_transformer.setter def power_transformer(self, pt): if self._power_transformer is None or self._power_transformer is pt: self._power_transformer = pt else: raise ValueError(f"power_transformer for {str(self)} has already been set to {self._power_transformer}, cannot reset this field to {pt}") @property def nominal_voltage(self): return self.base_voltage.nominal_voltage if self.base_voltage else self.rated_u class PowerTransformer(ConductingEquipment): """ An electrical device consisting of two or more coupled windings, with or without a magnetic core, for introducing mutual coupling between electric circuits. Transformers can be used to control voltage and phase shift (active power flow). A power transformer may be composed of separate transformer tanks that need not be identical. A power transformer can be modeled with or without tanks and is intended for use in both balanced and unbalanced representations. A power transformer typically has two terminals, but may have one (grounding), three or more terminals. The inherited association ConductingEquipment.BaseVoltage should not be used. The association from TransformerEnd to BaseVoltage should be used instead. Attributes - vector_group : `zepben.protobuf.cim.iec61970.base.wires.VectorGroup` of the transformer for protective relaying. power_transformer_ends : """ vector_group: VectorGroup = VectorGroup.UNKNOWN """ Vector group of the transformer for protective relaying, e.g., Dyn1. For unbalanced transformers, this may not be simply determined from the constituent winding connections and phase angle displacements. The vectorGroup string consists of the following components in the order listed: high voltage winding connection, mid voltage winding connection(for three winding transformers), phase displacement clock number from 0 to 11, low voltage winding connection phase displacement clock number from 0 to 11. The winding connections are D(delta), Y(wye), YN(wye with neutral), Z(zigzag), ZN(zigzag with neutral), A(auto transformer). Upper case means the high voltage, lower case mid or low.The high voltage winding always has clock position 0 and is not included in the vector group string. Some examples: YNy0(two winding wye to wye with no phase displacement), YNd11(two winding wye to delta with 330 degrees phase displacement), YNyn0d5(three winding transformer wye with neutral high voltage, wye with neutral mid voltage and no phase displacement, delta low voltage with 150 degrees displacement). Phase displacement is defined as the angular difference between the phasors representing the voltages between the neutral point(real or imaginary) and the corresponding terminals of two windings, a positive sequence voltage system being applied to the high-voltage terminals, following each other in alphabetical sequence if they are lettered, or in numerical sequence if they are numbered: the phasors are assumed to rotate in a counter-clockwise sense. """ _power_transformer_ends: Optional[List[PowerTransformerEnd]] = None transformer_utilisation: Optional[float] = None """ The fraction of the transformer’s normal capacity (nameplate rating) that is in use. It may be expressed as the result of the calculation S/Sn, where S = Load on Transformer (in VA), Sn = Transformer Nameplate Rating (in VA). """ construction_kind: TransformerConstructionKind = TransformerConstructionKind.unknown """ The construction kind of this transformer. """ function: TransformerFunctionKind = TransformerFunctionKind.other """ The function of this transformer. """ def __init__(self, power_transformer_ends: List[PowerTransformerEnd] = None, **kwargs): super(PowerTransformer, self).__init__(**kwargs) if power_transformer_ends: for end in power_transformer_ends: if end.power_transformer is None: end.power_transformer = self self.add_end(end) def num_ends(self): """ Get the number of `PowerTransformerEnd`s for this `PowerTransformer`. """ return nlen(self._power_transformer_ends) @property def ends(self) -> Generator[PowerTransformerEnd, None, None]: """The `PowerTransformerEnd`s for this `PowerTransformer`.""" return ngen(self._power_transformer_ends) @property def power_transformer_info(self) -> Optional[PowerTransformerInfo]: """The `zepben.evolve.cim.iec61968.assetinfo.power_transformer_info.PowerTransformerInfo` for this `PowerTransformer`""" return self.asset_info @power_transformer_info.setter def power_transformer_info(self, pti: Optional[PowerTransformerInfo]): """ Set the `zepben.evolve.cim.iec61968.assetinfo.power_transformer_info.PowerTransformerInfo` for this `PowerTransformer` `pti` The `PowerTransformerInfo` to associate with this `PowerTransformer` """ self.asset_info = pti def get_base_voltage(self, terminal: Terminal = None): if terminal is None: return self.base_voltage for end in self.ends: if end.terminal is terminal: return end.base_voltage else: return None def get_end_by_mrid(self, mrid: str) -> PowerTransformerEnd: """ Get the `PowerTransformerEnd` for this `PowerTransformer` identified by `mrid` `mrid` the mRID of the required `PowerTransformerEnd` Returns The `PowerTransformerEnd` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._power_transformer_ends, mrid) def get_end_by_num(self, end_number: int): """ Get the `PowerTransformerEnd` on this `PowerTransformer` by its `end_number`. `end_number` The `end_number` of the `PowerTransformerEnd` in relation to this `PowerTransformer`s VectorGroup. Returns The `PowerTransformerEnd` referred to by `end_number` Raises IndexError if no `PowerTransformerEnd` was found with end_number `end_number`. """ if self._power_transformer_ends: for end in self._power_transformer_ends: if end.end_number == end_number: return end raise IndexError(f"No TransformerEnd with end_number {end_number} was found in PowerTransformer {str(self)}") def add_end(self, end: PowerTransformerEnd) -> PowerTransformer: """ Associate a `PowerTransformerEnd` with this `PowerTransformer`. If `end.end_number` == 0, the end will be assigned an end_number of `self.num_ends() + 1`. `end` the `PowerTransformerEnd` to associate with this `PowerTransformer`. Returns A reference to this `PowerTransformer` to allow fluent use. Raises `ValueError` if another `PowerTransformerEnd` with the same `mrid` already exists for this `PowerTransformer`. """ if self._validate_end(end): return self if end.end_number == 0: end.end_number = self.num_ends() + 1 self._power_transformer_ends = list() if self._power_transformer_ends is None else self._power_transformer_ends self._power_transformer_ends.append(end) self._power_transformer_ends.sort(key=lambda t: t.end_number) return self def remove_end(self, end: PowerTransformerEnd) -> PowerTransformer: """ `end` the `PowerTransformerEnd` to disassociate from this `PowerTransformer`. Raises `ValueError` if `end` was not associated with this `PowerTransformer`. Returns A reference to this `PowerTransformer` to allow fluent use. """ self._power_transformer_ends = safe_remove(self._power_transformer_ends, end) return self def clear_ends(self) -> PowerTransformer: """ Clear all `PowerTransformerEnd`s. Returns A reference to this `PowerTransformer` to allow fluent use. """ self._power_transformer_ends.clear() return self def _validate_end(self, end: PowerTransformerEnd) -> bool: """ Validate an end against this `PowerTransformer`'s `PowerTransformerEnd`s. `end` The `PowerTransformerEnd` to validate. Returns True if `end` is already associated with this `PowerTransformer`, otherwise False. Raises `ValueError` if `end.power_transformer` is not this `PowerTransformer`, or if this `PowerTransformer` has a different `PowerTransformerEnd` with the same mRID. """ if self._validate_reference(end, self.get_end_by_mrid, "A PowerTransformerEnd"): return True if self._validate_reference_by_sn(end.end_number, end, self.get_end_by_num, "A PowerTransformerEnd", "end_number"): return True if not end.power_transformer: end.power_transformer = self require(end.power_transformer is self, lambda: f"PowerTransformerEnd {end} references another PowerTransformer {end.power_transformer}, expected {str(self)}.") return False
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/wires/power_transformer.py
power_transformer.py
from __future__ import annotations from typing import List, Optional, Generator from zepben.evolve.model.cim.iec61970.base.wires.energy_connection import EnergyConnection from zepben.evolve.model.cim.iec61970.base.wires.energy_source_phase import EnergySourcePhase from zepben.evolve.util import nlen, get_by_mrid, ngen, safe_remove __all__ = ["EnergySource"] class EnergySource(EnergyConnection): """ A generic equivalent for an energy supplier on a transmission or distribution voltage level. """ _energy_source_phases: Optional[List[EnergySourcePhase]] = None active_power: Optional[float] = None """ High voltage source active injection. Load sign convention is used, i.e. positive sign means flow out from a node. Starting value for steady state solutions """ reactive_power: Optional[float] = None """High voltage source reactive injection. Load sign convention is used, i.e. positive sign means flow out from a node. Starting value for steady state solutions.""" voltage_angle: Optional[float] = None """Phase angle of a-phase open circuit.""" voltage_magnitude: Optional[float] = None """Phase-to-phase open circuit voltage magnitude.""" p_max: Optional[float] = None """ This is the maximum active power that can be produced by the source. Load sign convention is used, i.e. positive sign means flow out from a TopologicalNode (bus) into the conducting equipment. """ p_min: Optional[float] = None """ This is the minimum active power that can be produced by the source. Load sign convention is used, i.e. positive sign means flow out from a TopologicalNode (bus) into the conducting equipment. """ r: Optional[float] = None """Positive sequence Thevenin resistance.""" r0: Optional[float] = None """Zero sequence Thevenin resistance.""" rn: Optional[float] = None """Negative sequence Thevenin resistance.""" x: Optional[float] = None """Positive sequence Thevenin reactance.""" x0: Optional[float] = None """Zero sequence Thevenin reactance.""" xn: Optional[float] = None """Negative sequence Thevenin reactance.""" is_external_grid: bool = False """ True if this energy source represents the higher-level power grid connection to an external grid that normally is modelled as the slack bus for power flow calculations. """ r_min: Optional[float] = None """Minimum positive sequence Thevenin resistance.""" rn_min: Optional[float] = None """Minimum negative sequence Thevenin resistance.""" r0_min: Optional[float] = None """Minimum zero sequence Thevenin resistance.""" x_min: Optional[float] = None """Minimum positive sequence Thevenin reactance.""" xn_min: Optional[float] = None """Minimum negative sequence Thevenin reactance.""" x0_min: Optional[float] = None """Minimum zero sequence Thevenin reactance.""" r_max: Optional[float] = None """Maximum positive sequence Thevenin resistance.""" rn_max: Optional[float] = None """Maximum negative sequence Thevenin resistance.""" r0_max: Optional[float] = None """Maximum zero sequence Thevenin resistance.""" x_max: Optional[float] = None """Maximum positive sequence Thevenin reactance.""" xn_max: Optional[float] = None """Maximum negative sequence Thevenin reactance.""" x0_max: Optional[float] = None """Maximum zero sequence Thevenin reactance.""" def __init__(self, energy_source_phases: List[EnergySourcePhase] = None, **kwargs): super(EnergySource, self).__init__(**kwargs) if energy_source_phases: for phase in energy_source_phases: self.add_phase(phase) @property def phases(self) -> Generator[EnergySourcePhase, None, None]: """ The `EnergySourcePhase`s for this `EnergySource`. """ return ngen(self._energy_source_phases) def has_phases(self): """ Check if this source has any associated `EnergySourcePhase`s Returns True if there is at least one `EnergySourcePhase`, otherwise False """ return nlen(self._energy_source_phases) > 0 def num_phases(self): """Return the number of `EnergySourcePhase`s associated with this `EnergySource`""" return nlen(self._energy_source_phases) def get_phase(self, mrid: str) -> EnergySourcePhase: """ Get the `zepben.evolve.cim.iec61970.base.wires.energy_source_phase.EnergySourcePhase` for this `EnergySource` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.cim.iec61970.base.wires.energy_source_phase.EnergySourcePhase` Returns The `zepben.evolve.cim.iec61970.base.wires.energy_source_phase.EnergySourcePhase` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._energy_source_phases, mrid) def add_phase(self, phase: EnergySourcePhase) -> EnergySource: """ Associate an `zepben.evolve.cim.iec61970.base.wires.energy_source_phase.EnergySourcePhase` with this `EnergySource` `phase` the `EnergySourcePhase` to associate with this `EnergySource`. Returns A reference to this `EnergySource` to allow fluent use. Raises `ValueError` if another `EnergySourcePhase` with the same `mrid` already exists for this `EnergySource`. """ if self._validate_reference(phase, self.get_phase, "An EnergySourcePhase"): return self self._energy_source_phases = list() if self._energy_source_phases is None else self._energy_source_phases self._energy_source_phases.append(phase) return self def remove_phase(self, phase: EnergySourcePhase) -> EnergySource: """ Disassociate an `phase` from this `EnergySource` `phase` the `EnergySourcePhase` to disassociate from this `EnergySource`. Returns A reference to this `EnergySource` to allow fluent use. Raises `ValueError` if `phase` was not associated with this `EnergySource`. """ self._energy_source_phases = safe_remove(self._energy_source_phases, phase) return self def clear_phases(self) -> EnergySource: """ Clear all phases. Returns A reference to this `EnergySource` to allow fluent use. """ self._energy_source_phases = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/wires/energy_source.py
energy_source.py
from __future__ import annotations from typing import Optional, Generator, List from zepben.evolve.model.cim.iec61970.base.core.power_system_resource import PowerSystemResource from zepben.evolve.model.cim.iec61970.base.wires.energy_connection import EnergyConnection from zepben.evolve.model.cim.iec61970.base.wires.phase_shunt_connection_kind import PhaseShuntConnectionKind from zepben.evolve.model.cim.iec61970.base.wires.single_phase_kind import SinglePhaseKind __all__ = ["EnergyConsumer", "EnergyConsumerPhase"] from zepben.evolve.util import nlen, get_by_mrid, ngen, safe_remove class EnergyConsumerPhase(PowerSystemResource): """A single phase of an energy consumer.""" _energy_consumer: Optional[EnergyConsumer] = None phase: SinglePhaseKind = SinglePhaseKind.X """Phase of this energy consumer component. If the energy consumer is wye connected, the connection is from the indicated phase to the central ground or neutral point. If the energy consumer is delta connected, the phase indicates an energy consumer connected from the indicated phase to the next logical non-neutral phase. """ p: Optional[float] = None """Active power of the load. Load sign convention is used, i.e. positive sign means flow out from a node. For voltage dependent loads the value is at rated voltage. Starting value for a steady state solution.""" q: Optional[float] = None """Reactive power of the load. Load sign convention is used, i.e. positive sign means flow out from a node. For voltage dependent loads the value is at rated voltage. Starting value for a steady state solution.""" p_fixed: Optional[float] = None """Active power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.""" q_fixed: Optional[float] = None """Reactive power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.""" def __init__(self, energy_consumer: EnergyConsumer = None, **kwargs): super(EnergyConsumerPhase, self).__init__(**kwargs) if energy_consumer: self.energy_consumer = energy_consumer @property def energy_consumer(self): """The `zepben.evolve.cim.iec61970.base.wires.EnergyConsumer` that has this phase.""" return self._energy_consumer @energy_consumer.setter def energy_consumer(self, ec): if self._energy_consumer is None or self._energy_consumer is ec: self._energy_consumer = ec else: raise ValueError(f"energy_consumer for {str(self)} has already been set to {self._energy_consumer}, cannot reset this field to {ec}") class EnergyConsumer(EnergyConnection): """Generic user of energy - a point of consumption on the power system phases. May also represent a pro-sumer with negative p/q values. """ _energy_consumer_phases: Optional[List[EnergyConsumerPhase]] = None """The individual phase models for this energy consumer.""" customer_count: Optional[int] = None """Number of individual customers represented by this demand.""" grounded: bool = False """Used for Yn and Zn connections. True if the neutral is solidly grounded.""" phase_connection: PhaseShuntConnectionKind = PhaseShuntConnectionKind.D """`zepben.protobuf.cim.iec61970.base.wires.phase_shunt_connection_kind.PhaseShuntConnectionKind` - The type of phase connection, such as wye, delta, I (single phase).""" p: Optional[float] = None """Active power of the load. Load sign convention is used, i.e. positive sign means flow out from a node. For voltage dependent loads the value is at rated voltage. Starting value for a steady state solution.""" p_fixed: Optional[float] = None """Active power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.""" q: Optional[float] = None """Reactive power of the load. Load sign convention is used, i.e. positive sign means flow out from a node. For voltage dependent loads the value is at rated voltage. Starting value for a steady state solution.""" q_fixed: Optional[float] = None """Power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.""" def __init__(self, energy_consumer_phases: List[EnergyConsumerPhase] = None, **kwargs): super(EnergyConsumer, self).__init__(**kwargs) if energy_consumer_phases: for phase in energy_consumer_phases: self.add_phase(phase) def has_phases(self): """ Check if this consumer has any associated `EnergyConsumerPhases` Returns True if there is at least one `EnergyConsumerPhase`, otherwise False """ return nlen(self._energy_consumer_phases) > 0 def num_phases(self): """Get the number of `EnergySourcePhase`s for this `EnergyConsumer`.""" return nlen(self._energy_consumer_phases) @property def phases(self) -> Generator[EnergyConsumerPhase, None, None]: """The individual phase models for this energy consumer.""" return ngen(self._energy_consumer_phases) def get_phase(self, mrid: str) -> EnergyConsumerPhase: """ Get the `EnergyConsumerPhase` for this `EnergyConsumer` identified by `mrid` `mrid` The mRID of the required `EnergyConsumerPhase` Returns The `EnergyConsumerPhase` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._energy_consumer_phases, mrid) def add_phase(self, phase: EnergyConsumerPhase) -> EnergyConsumer: """ Associate an `EnergyConsumerPhase` with this `EnergyConsumer` `phase` the `EnergyConsumerPhase` to associate with this `EnergyConsumer`. Returns A reference to this `EnergyConsumer` to allow fluent use. Raises `ValueError` if another `EnergyConsumerPhase` with the same `mrid` already exists for this `EnergyConsumer`. """ if self._validate_reference(phase, self.get_phase, "An EnergyConsumerPhase"): return self self._energy_consumer_phases = list() if self._energy_consumer_phases is None else self._energy_consumer_phases self._energy_consumer_phases.append(phase) return self def remove_phase(self, phase: EnergyConsumerPhase) -> EnergyConsumer: """ Disassociate `phase` from this `OperationalRestriction`. `phase` the `EnergyConsumerPhase` to disassociate with this `EnergyConsumer`. Raises `KeyError` if `phase` was not associated with this `EnergyConsumer`. Returns A reference to this `EnergyConsumer` to allow fluent use. Raises `ValueError` if `phase` was not associated with this `EnergyConsumer`. """ self._energy_consumer_phases = safe_remove(self._energy_consumer_phases, phase) return self def clear_phases(self) -> EnergyConsumer: """ Clear all phases. Returns A reference to this `EnergyConsumer` to allow fluent use. """ self._energy_consumer_phases = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/wires/energy_consumer.py
energy_consumer.py
from typing import Optional from zepben.evolve import ShuntCompensatorInfo from zepben.evolve.model.cim.iec61970.base.wires.energy_connection import RegulatingCondEq from zepben.evolve.model.cim.iec61970.base.wires.phase_shunt_connection_kind import PhaseShuntConnectionKind __all__ = ["ShuntCompensator", "LinearShuntCompensator"] class ShuntCompensator(RegulatingCondEq): """ A shunt capacitor or reactor or switchable bank of shunt capacitors or reactors. A section of a shunt compensator is an individual capacitor or reactor. A negative value for reactivePerSection indicates that the compensator is a reactor. ShuntCompensator is a single terminal device. Ground is implied. """ grounded: bool = False """Used for Yn and Zn connections. True if the neutral is solidly grounded. nom_u : The voltage at which the nominal reactive power may be calculated. This should normally be within 10% of the voltage at which the capacitor is connected to the network.""" nom_u: Optional[int] = None """The voltage at which the nominal reactive power may be calculated. This should normally be within 10% of the voltage at which the capacitor is connected to the network.""" phase_connection: PhaseShuntConnectionKind = PhaseShuntConnectionKind.UNKNOWN """The type of phase connection, such as wye or delta.""" sections: Optional[float] = None """ Shunt compensator sections in use. Starting value for steady state solution. Non integer values are allowed to support continuous variables. The reasons for continuous value are to support study cases where no discrete shunt compensator's has yet been designed, a solutions where a narrow voltage band force the sections to oscillate or accommodate for a continuous solution as input. For `LinearShuntCompensator` the value shall be between zero and `ShuntCompensator.maximumSections`. At value zero the shunt compensator conductance and admittance is zero. Linear interpolation of conductance and admittance between the previous and next integer section is applied in case of non-integer values. For `NonlinearShuntCompensator`s shall only be set to one of the NonlinearShuntCompensatorPoint.sectionNumber. There is no interpolation between NonlinearShuntCompensatorPoint-s. """ @property def shunt_compensator_info(self) -> Optional[ShuntCompensatorInfo]: """The `zepben.evolve.cim.iec61968.assetinfo.shunt_compensator_info.ShuntCompensatorInfo` for this `ShuntCompensator`""" return self.asset_info @shunt_compensator_info.setter def shunt_compensator_info(self, sci: Optional[ShuntCompensatorInfo]): """ Set the `zepben.evolve.cim.iec61968.assetinfo.shunt_compensator_info.ShuntCompensatorInfo` for this `ShuntCompensator` `sci` The `ShuntCompensatorInfo` for this `ShuntCompensator` """ self.asset_info = sci class LinearShuntCompensator(ShuntCompensator): """A linear shunt compensator has banks or sections with equal admittance values.""" b0_per_section: Optional[float] = None """Zero sequence shunt (charging) susceptance per section""" b_per_section: Optional[float] = None """Positive sequence shunt (charging) susceptance per section""" g0_per_section: Optional[float] = None """Zero sequence shunt (charging) conductance per section""" g_per_section: Optional[float] = None """Positive sequence shunt (charging) conductance per section"""
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/wires/shunt_compensator.py
shunt_compensator.py
from typing import Optional from zepben.evolve.model.cim.iec61968.assetinfo.wire_info import CableInfo, WireInfo from zepben.evolve.model.cim.iec61970.base.core.conducting_equipment import ConductingEquipment from zepben.evolve.model.cim.iec61970.base.wires.per_length import PerLengthSequenceImpedance __all__ = ["AcLineSegment", "Conductor"] class Conductor(ConductingEquipment): """ Combination of conducting material with consistent electrical characteristics, building a single electrical system, used to carry current between points in the power system. """ length: Optional[float] = None """Segment length for calculating line section capabilities.""" @property def wire_info(self): """The `zepben.evolve.cim.iec61968.assetinfo.wire_info.WireInfo` for this `Conductor`""" return self.asset_info @wire_info.setter def wire_info(self, wi: Optional[WireInfo]): """ Set the `zepben.evolve.cim.iec61968.assetinfo.wire_info.WireInfo` for this `Conductor` `wi` The `WireInfo` for this `Conductor` """ self.asset_info = wi def is_underground(self): """ Returns True if this `Conductor` is underground. """ return isinstance(self.wire_info, CableInfo) class AcLineSegment(Conductor): """ A wire or combination of wires, with consistent electrical characteristics, building a single electrical system, used to carry alternating current between points in the power system. For symmetrical, transposed 3ph lines, it is sufficient to use attributes of the line segment, which describe impedances and admittances for the entire length of the segment. Additionally impedances can be computed by using length and associated per length impedances. The BaseVoltage at the two ends of ACLineSegments in a Line shall have the same BaseVoltage.nominalVoltage. However, boundary lines may have slightly different BaseVoltage.nominalVoltages and variation is allowed. Larger voltage difference in general requires use of an equivalent branch. """ per_length_sequence_impedance: Optional[PerLengthSequenceImpedance] = None """A `zepben.evolve.PerLengthSequenceImpedance` describing this ACLineSegment"""
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/wires/aclinesegment.py
aclinesegment.py
from __future__ import annotations from zepben.evolve.model.cim.iec61970.base.core.conducting_equipment import ConductingEquipment from zepben.evolve.model.cim.iec61970.base.wires.single_phase_kind import SinglePhaseKind __all__ = ["Switch", "Breaker", "Disconnector", "Jumper", "Fuse", "ProtectedSwitch", "Recloser", "LoadBreakSwitch"] from zepben.evolve.util import require def _calculate_open_state(current_state: int, is_open: bool, phase: SinglePhaseKind = None) -> int: require(phase != SinglePhaseKind.NONE and phase != SinglePhaseKind.INVALID, lambda: f"Invalid phase {phase} specified") if phase is None: return 0b1111 if is_open else 0 else: return current_state | phase.bit_mask if is_open else current_state & ~phase.bit_mask def _check_open(current_state: int, phase: SinglePhaseKind = None) -> bool: require(phase != SinglePhaseKind.NONE and phase != SinglePhaseKind.INVALID, lambda: f"Invalid phase {phase} specified") if phase is None: return current_state != 0 else: return (current_state & phase.bit_mask) != 0 class Switch(ConductingEquipment): """ A generic device designed to close, or open, or both, one or more electric circuits. All switches are two terminal devices including grounding switches. NOTE: The normal and currently open properties are implemented as an integer rather than a boolean to allow for the caching of measurement values if the switch is operating un-ganged. These values will cache the latest values from the measurement value for each phase of the switch. """ _open: int = 0 """Tells if the switch is considered open when used as input to topology processing.""" _normally_open: int = 0 """The attribute is used in cases when no Measurement for the status value is present. If the Switch has a status measurement the Discrete.normalValue is expected to match with the Switch.normalOpen.""" def is_normally_open(self, phase: SinglePhaseKind = None): """ Check if the switch is normally open on `phase`. `phase` The `single_phase_kind.SinglePhaseKind` to check the normal status. A `phase` of `None` (default) checks if any phase is open. Returns True if `phase` is open in its normal state, False if it is closed """ return _check_open(self._normally_open, phase) def get_normal_state(self) -> int: """ Get the underlying normal open states. Stored as 4 bits, 1 per phase. """ return self._normally_open def is_open(self, phase: SinglePhaseKind = None): """ Check if the switch is currently open on `phase`. `phase` The `zepben.evolve.cim.iec61970.base.wires.single_phase_kind.SinglePhaseKind` to check the current status. A `phase` of `None` (default) checks if any phase is open. Returns True if `phase` is open in its current state, False if it is closed """ return _check_open(self._open, phase) def get_state(self) -> int: """ The attribute tells if the switch is considered open when used as input to topology processing. Get the underlying open states. Stored as 4 bits, 1 per phase. """ return self._open def set_normally_open(self, is_normally_open: bool, phase: SinglePhaseKind = None) -> Switch: """ `is_normally_open` indicates if the phase(s) should be opened. `phase` the phase to set the normal status. If set to None will default to all phases. Returns This `Switch` to be used fluently. """ self._normally_open = _calculate_open_state(self._normally_open, is_normally_open, phase) return self def set_open(self, is_open: bool, phase: SinglePhaseKind = None) -> Switch: """ `is_open` indicates if the phase(s) should be opened. `phase` the phase to set the current status. If set to None will default to all phases. Returns This `Switch` to be used fluently. """ self._open = _calculate_open_state(self._open, is_open, phase) return self class ProtectedSwitch(Switch): """ A ProtectedSwitch is a switching device that can be operated by ProtectionEquipment. """ pass class Breaker(ProtectedSwitch): """ A mechanical switching device capable of making, carrying, and breaking currents under normal circuit conditions and also making, carrying for a specified time, and breaking currents under specified abnormal circuit conditions e.g. those of short circuit. """ def is_substation_breaker(self): """Convenience function for detecting if this breaker is part of a substation. Returns true if this Breaker is associated with a Substation.""" return self.num_substations() > 0 class Disconnector(Switch): """ A manually operated or motor operated mechanical switching device used for changing the connections in a circuit, or for isolating a circuit or equipment from a source of power. It is required to open or close circuits when negligible current is broken or made. """ pass class Fuse(Switch): """ An overcurrent protective device with a circuit opening fusible part that is heated and severed by the passage of overcurrent through it. A fuse is considered a switching device because it breaks current. """ pass class Jumper(Switch): """ A short section of conductor with negligible impedance which can be manually removed and replaced if the circuit is de-energized. Note that zero-impedance branches can potentially be modeled by other equipment types. """ pass class Recloser(ProtectedSwitch): """ Pole-mounted fault interrupter with built-in phase and ground relays, current transformer (CT), and supplemental controls. """ pass class LoadBreakSwitch(ProtectedSwitch): """A mechanical switching device capable of making, carrying, and breaking currents under normal operating conditions. """ pass
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/wires/switch.py
switch.py
from __future__ import annotations from typing import Optional, List, Generator, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import PowerElectronicsUnit from zepben.evolve.model.cim.iec61970.base.core.power_system_resource import PowerSystemResource from zepben.evolve.model.cim.iec61970.base.wires.energy_connection import RegulatingCondEq from zepben.evolve.model.cim.iec61970.base.wires.single_phase_kind import SinglePhaseKind from zepben.evolve.util import ngen, nlen, get_by_mrid, safe_remove __all__ = ["PowerElectronicsConnection", "PowerElectronicsConnectionPhase"] class PowerElectronicsConnectionPhase(PowerSystemResource): """A single phase of a power electronics connection.""" power_electronics_connection: Optional[PowerElectronicsConnection] = None """The power electronics connection to which the phase belongs.""" p: Optional[float] = None """Active power injection. Load sign convention is used, i.e. positive sign means flow into the equipment from the network.""" phase: SinglePhaseKind = SinglePhaseKind.X """ Phase of this energy producer component. If the energy producer is wye connected, the connection is from the indicated phase to the central ground or neutral point. If the energy producer is delta connected, the phase indicates an energy producer connected from the indicated phase to the next logical non-neutral phase. """ q: Optional[float] = None """Reactive power injection. Load sign convention is used, i.e. positive sign means flow into the equipment from the network.""" class PowerElectronicsConnection(RegulatingCondEq): """ A connection to the AC network for energy production or consumption that uses power electronics rather than rotating machines. """ max_i_fault: Optional[int] = None """Maximum fault current this device will contribute, in per-unit of rated current, before the converter protection will trip or bypass.""" p: Optional[float] = None """Active power injection. Load sign convention is used, i.e. positive sign means flow out from a node. Starting value for a steady state solution.""" q: Optional[float] = None """Reactive power injection. Load sign convention is used, i.e. positive sign means flow out from a node. Starting value for a steady state solution.""" max_q: Optional[float] = None """Maximum reactive power limit. This is the maximum (nameplate) limit for the unit.""" min_q: Optional[float] = None """Minimum reactive power limit for the unit. This is the minimum (nameplate) limit for the unit.""" rated_s: Optional[int] = None """Nameplate apparent power rating for the unit. The attribute shall have a positive value.""" rated_u: Optional[int] = None """Rated voltage (nameplate data, Ur in IEC 60909-0). It is primarily used for short circuit data exchange according to IEC 60909. The attribute shall be a positive value.""" _power_electronics_units: Optional[List[PowerElectronicsUnit]] = None """An AC network connection may have several power electronics units connecting through it.""" _power_electronics_connection_phases: Optional[List[PowerElectronicsConnectionPhase]] = None """The individual units models for the power electronics connection.""" def __init__(self, power_electronics_units: List[PowerElectronicsUnit] = None, power_electronics_connection_phases: List[PowerElectronicsConnectionPhase] = None, **kwargs): super(PowerElectronicsConnection, self).__init__(**kwargs) if power_electronics_units: for unit in power_electronics_units: self.add_unit(unit) if power_electronics_connection_phases: for phase in power_electronics_connection_phases: self.add_phase(phase) @property def units(self) -> Generator[PowerElectronicsUnit, None, None]: """ The `PowerElectronicsUnit`s for this `PowerElectronicsConnection`. """ return ngen(self._power_electronics_units) @property def phases(self) -> Generator[PowerElectronicsConnectionPhase, None, None]: """ The `PowerElectronicsConnectionPhase`s for this `PowerElectronicsConnection`. """ return ngen(self._power_electronics_connection_phases) def has_units(self): """ Check if this connection has any associated `PowerElectronicsUnit`s Returns True if there is at least one `PowerElectronicsUnit`, otherwise False """ return nlen(self._power_electronics_units) > 0 def num_units(self): """Return the number of `PowerElectronicsUnit`s associated with this `PowerElectronicsConnection`""" return nlen(self._power_electronics_units) def get_unit(self, mrid: str) -> PowerElectronicsUnit: """ Get the `zepben.evolve.cim.iec61970.base.wires.generation.production.power_electronics_unit.PowerElectronicsUnit` for this `PowerElectronicsConnection` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.cim.iec61970.base.wires.generation.production.power_electronics_unit.PowerElectronicsUnit` Returns The `zepben.evolve.cim.iec61970.base.wires.generation.production.power_electronics_unit.PowerElectronicsUnit` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._power_electronics_units, mrid) def add_unit(self, unit: PowerElectronicsUnit) -> PowerElectronicsConnection: """ Associate an `zepben.evolve.cim.iec61970.base.wires.generation.production.power_electronics_unit.PowerElectronicsUnit` with this `PowerElectronicsConnection` `unit` the `PowerElectronicsUnit` to associate with this `PowerElectronicsConnection`. Returns A reference to this `PowerElectronicsConnection` to allow fluent use. Raises `ValueError` if another `PowerElectronicsUnit` with the same `mrid` already exists for this `PowerElectronicsConnection`. """ if self._validate_reference(unit, self.get_unit, "A PowerElectronicsUnit"): return self self._power_electronics_units = list() if self._power_electronics_units is None else self._power_electronics_units self._power_electronics_units.append(unit) return self def remove_unit(self, unit: PowerElectronicsUnit) -> PowerElectronicsConnection: """ Disassociate `unit` from this `PowerElectronicsConnection` `unit` the `PowerElectronicsUnit` to disassociate from this `PowerElectronicsConnection`. Returns A reference to this `PowerElectronicsConnection` to allow fluent use. Raises `ValueError` if `unit` was not associated with this `PowerElectronicsConnection`. """ self._power_electronics_units = safe_remove(self._power_electronics_units, unit) return self def clear_units(self) -> PowerElectronicsConnection: """ Clear all units. Returns A reference to this `PowerElectronicsConnection` to allow fluent use. """ self._power_electronics_units = None return self def has_phases(self): """ Check if this connection has any associated `PowerElectronicsConnectionPhase`s Returns True if there is at least one `PowerElectronicsConnectionPhase`, otherwise False """ return nlen(self._power_electronics_connection_phases) > 0 def num_phases(self): """Return the number of `PowerElectronicsConnectionPhase`s associated with this `PowerElectronicsConnection`""" return nlen(self._power_electronics_connection_phases) def get_phase(self, mrid: str) -> PowerElectronicsConnectionPhase: """ Get the `PowerElectronicsConnectionPhase` for this `PowerElectronicsConnection` identified by `mrid` `mrid` the mRID of the required `PowerElectronicsConnectionPhase` Returns The `PowerElectronicsConnectionPhase` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._power_electronics_connection_phases, mrid) def add_phase(self, phase: PowerElectronicsConnectionPhase) -> PowerElectronicsConnection: """ Associate a `PowerElectronicsConnectionPhase` with this `PowerElectronicsConnection` `phase` the `PowerElectronicsConnectionPhase` to associate with this `PowerElectronicsConnection`. Returns A reference to this `PowerElectronicsConnection` to allow fluent use. Raises `ValueError` if another `PowerElectronicsConnectionPhase` with the same `mrid` already exists for this `PowerElectronicsConnection`. """ if self._validate_reference(phase, self.get_phase, "A PowerElectronicsConnectionPhase"): return self self._power_electronics_connection_phases = list() if self._power_electronics_connection_phases is None else self._power_electronics_connection_phases self._power_electronics_connection_phases.append(phase) return self def remove_phase(self, phase: PowerElectronicsConnectionPhase) -> PowerElectronicsConnection: """ Disassociate `phase` from this `PowerElectronicsConnection` `phase` the `PowerElectronicsConnectionPhase` to disassociate from this `PowerElectronicsConnection`. Returns A reference to this `PowerElectronicsConnection` to allow fluent use. Raises `ValueError` if `phase` was not associated with this `PowerElectronicsConnection`. """ self._power_electronics_connection_phases = safe_remove(self._power_electronics_connection_phases, phase) return self def clear_phases(self) -> PowerElectronicsConnection: """ Clear all phases. Returns A reference to this `PowerElectronicsConnection` to allow fluent use. """ self._power_electronics_connection_phases = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/wires/power_electronics_connection.py
power_electronics_connection.py
from __future__ import annotations from typing import List, Optional, Dict, Generator from dataclassy import dataclass from zepben.evolve.model.cim.iec61970.base.core.identified_object import IdentifiedObject from zepben.evolve.model.cim.iec61970.base.diagramlayout.diagram_style import DiagramStyle from zepben.evolve.model.cim.iec61970.base.diagramlayout.orientation_kind import OrientationKind from zepben.evolve.util import nlen, require, ngen, safe_remove, safe_remove_by_id __all__ = ["DiagramObjectPoint", "Diagram", "DiagramObject"] @dataclass(slots=True) class DiagramObjectPoint(object): """ A point in a given space defined by 3 coordinates and associated to a diagram object. The coordinates may be positive or negative as the origin does not have to be in the corner of a diagram. """ x_position: float """The X coordinate of this point.""" y_position: float """The Y coordinate of this point.""" def __str__(self): return f"x:{self.x_position}|y:{self.y_position}" class DiagramObject(IdentifiedObject): """ An object that defines one or more points in a given space. This object can be associated with anything that specializes IdentifiedObject. For single line diagrams such objects typically include such items as analog values, breakers, disconnectors, power transformers, and transmission lines. """ _diagram: Optional[Diagram] = None """A diagram object is part of a diagram.""" identified_object_mrid: Optional[str] = None """The domain object to which this diagram object is associated.""" style: Optional[str] = None """A diagram object has a style associated that provides a reference for the style used in the originating system.""" rotation: float = 0.0 """Sets the angle of rotation of the diagram object. Zero degrees is pointing to the top of the diagram. Rotation is clockwise.""" _diagram_object_points: Optional[List[DiagramObjectPoint]] = None def __init__(self, diagram: Diagram = None, diagram_object_points: List[DiagramObjectPoint] = None, **kwargs): super(DiagramObject, self).__init__(**kwargs) if diagram: self.diagram = diagram if diagram_object_points: for point in diagram_object_points: self.add_point(point) @property def diagram(self): return self._diagram @diagram.setter def diagram(self, diag): if self._diagram is None or self._diagram is diag: self._diagram = diag else: raise ValueError(f"diagram for {str(self)} has already been set to {self._diagram}, cannot reset this field to {diag}") def num_points(self): """ Returns the number of `DiagramObjectPoint`s associated with this `DiagramObject` """ return nlen(self._diagram_object_points) @property def points(self) -> Generator[DiagramObjectPoint, None, None]: """ The `DiagramObjectPoint`s for this `DiagramObject`. """ return ngen(self._diagram_object_points) def get_point(self, sequence_number: int) -> DiagramObjectPoint: """ Get the `DiagramObjectPoint` for this `DiagramObject` represented by `sequence_number` . A diagram object can have 0 or more points to reflect its layout position, routing (for polylines) or boundary (for polygons). Index in the underlying points collection corresponds to the sequence number `sequence_number` The sequence number of the `DiagramObjectPoint` to get. Returns The `DiagramObjectPoint` identified by `sequence_number` Raises IndexError if this `DiagramObject` didn't contain `sequence_number` points. """ if self._diagram_object_points is not None: return self._diagram_object_points[sequence_number] else: raise IndexError(sequence_number) def __getitem__(self, item: int) -> DiagramObjectPoint: return self.get_point(item) def add_point(self, point: DiagramObjectPoint) -> DiagramObject: """ Associate a `DiagramObjectPoint` with this `DiagramObject`, assigning it a sequence_number of `num_points`. `point` The `DiagramObjectPoint` to associate with this `DiagramObject`. Returns A reference to this `DiagramObject` to allow fluent use. """ return self.insert_point(point) def insert_point(self, point: DiagramObjectPoint, sequence_number: int = None) -> DiagramObject: """ Associate a `DiagramObjectPoint` with this `DiagramObject` `point` The `DiagramObjectPoint` to associate with this `DiagramObject`. `sequence_number` The sequence number of the `DiagramObjectPoint`. Returns A reference to this `DiagramObject` to allow fluent use. Raises `ValueError` if `sequence_number` < 0 or > `num_points()`. """ if sequence_number is None: sequence_number = self.num_points() require(0 <= sequence_number <= self.num_points(), lambda: f"Unable to add DiagramObjectPoint to {str(self)}. Sequence number {sequence_number}" f" is invalid. Expected a value between 0 and {self.num_points()}. Make sure you are " f"adding the points in the correct order and there are no gaps in the numbering.") self._diagram_object_points = list() if self._diagram_object_points is None else self._diagram_object_points self._diagram_object_points.insert(sequence_number, point) return self def __setitem__(self, key, value): self.insert_point(value, key) def remove_point(self, point: DiagramObjectPoint) -> DiagramObject: """ Disassociate `point` from this `DiagramObject` `point` The `DiagramObjectPoint` to disassociate from this `DiagramObject`. Returns A reference to this `DiagramObject` to allow fluent use. Raises `ValueError` if `point` was not associated with this `DiagramObject`. """ self._diagram_object_points = safe_remove(self._diagram_object_points, point) return self def clear_points(self) -> DiagramObject: """ Clear all points. Returns A reference to this `DiagramObject` to allow fluent use. """ self._diagram_object_points = None return self class Diagram(IdentifiedObject): """ The diagram being exchanged. The coordinate system is a standard Cartesian coordinate system and the orientation attribute defines the orientation. """ diagram_style: DiagramStyle = DiagramStyle.SCHEMATIC """A Diagram may have a DiagramStyle.""" orientation_kind: OrientationKind = OrientationKind.POSITIVE """Coordinate system orientation of the diagram.""" _diagram_objects: Optional[Dict[str, DiagramObject]] = None def __init__(self, diagram_objects: List[DiagramObject] = None, **kwargs): super(Diagram, self).__init__(**kwargs) if diagram_objects: for obj in diagram_objects: self.add_diagram_object(obj) def num_diagram_objects(self): """ Returns The number of `DiagramObject`s associated with this `Diagram` """ return nlen(self._diagram_objects) @property def diagram_objects(self) -> Generator[DiagramObject, None, None]: """ The diagram objects belonging to this diagram. """ return ngen(self._diagram_objects.values() if self._diagram_objects is not None else None) def get_diagram_object(self, mrid: str) -> DiagramObject: """ Get the `DiagramObject` for this `Diagram` identified by `mrid` `mrid` the mRID of the required `DiagramObject` Returns The `DiagramObject` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ if not self._diagram_objects: raise KeyError(mrid) try: return self._diagram_objects[mrid] except AttributeError: raise KeyError(mrid) def add_diagram_object(self, diagram_object: DiagramObject) -> Diagram: """ Associate a `DiagramObject` with this `Diagram`. `diagram_object` the `DiagramObject` to associate with this `Diagram`. Returns The previous `DiagramObject` stored by `diagram_object`s mrid, otherwise `diagram_object` is returned if there was no previous value. Raises `ValueError` if another `DiagramObject` with the same `mrid` already exists for this `Diagram`, or if `diagram_object.diagram` is not this `Diagram`. """ if not diagram_object.diagram: diagram_object.diagram = self require(diagram_object.diagram is self, lambda: f"{str(diagram_object)} references another Diagram " f"{str(diagram_object.diagram)}, expected {str(self)}.") if self._validate_reference(diagram_object, self.get_diagram_object, "A DiagramObject"): return self self._diagram_objects = dict() if self._diagram_objects is None else self._diagram_objects self._diagram_objects[diagram_object.mrid] = diagram_object return self def remove_diagram_object(self, diagram_object: DiagramObject) -> Diagram: """ Disassociate `diagram_object` from this `Diagram` `diagram_object` the `DiagramObject` to disassociate with this `Diagram`. Returns A reference to this `Diagram` to allow fluent use. Raises `KeyError` if `diagram_object` was not associated with this `Diagram`. """ self._diagram_objects = safe_remove_by_id(self._diagram_objects, diagram_object) return self def clear_diagram_objects(self) -> Diagram: """ Clear all `DiagramObject`s. Returns A reference to this `Diagram` to allow fluent use. """ self._diagram_objects = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/diagramlayout/diagram_layout.py
diagram_layout.py
from __future__ import annotations from typing import Optional, Generator, List, TYPE_CHECKING, TypeVar, Type if TYPE_CHECKING: from zepben.evolve import UsagePoint, EquipmentContainer, OperationalRestriction TEquipmentContainer = TypeVar("TEquipmentContainer", bound=EquipmentContainer) from zepben.evolve.model.cim.iec61970.base.core.equipment_container import Feeder, Site from zepben.evolve.model.cim.iec61970.base.core.power_system_resource import PowerSystemResource from zepben.evolve.model.cim.iec61970.base.core.substation import Substation from zepben.evolve.util import nlen, get_by_mrid, ngen, safe_remove __all__ = ['Equipment'] class Equipment(PowerSystemResource): """ Abstract class, should only be used through subclasses. Any part of a power system that is a physical device, electronic or mechanical. """ in_service: bool = True """If True, the equipment is in service.""" normally_in_service: bool = True """If True, the equipment is _normally_ in service.""" _usage_points: Optional[List[UsagePoint]] = None _equipment_containers: Optional[List[EquipmentContainer]] = None _operational_restrictions: Optional[List[OperationalRestriction]] = None _current_feeders: Optional[List[Feeder]] = None def __init__(self, usage_points: List[UsagePoint] = None, equipment_containers: List[EquipmentContainer] = None, operational_restrictions: List[OperationalRestriction] = None, current_feeders: List[Feeder] = None, **kwargs): super(Equipment, self).__init__(**kwargs) if usage_points: for up in usage_points: self.add_usage_point(up) if equipment_containers: for container in equipment_containers: self.add_container(container) if operational_restrictions: for restriction in operational_restrictions: self.add_operational_restriction(restriction) if current_feeders: for cf in current_feeders: self.add_current_feeder(cf) @property def containers(self) -> Generator[EquipmentContainer, None, None]: """ The `zepben.evolve.cim.iec61970.base.core.equipment_container.EquipmentContainer`s this equipment belongs to. """ return ngen(self._equipment_containers) @property def current_feeders(self) -> Generator[Feeder, None, None]: """ The current `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder`s this equipment belongs to. """ return ngen(self._current_feeders) @property def normal_feeders(self) -> Generator[Feeder, None, None]: """ The normal `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder`s this equipment belongs to. """ return ngen(self._equipment_containers_of_type(Feeder)) @property def sites(self) -> Generator[Site, None, None]: """ The `zepben.evolve.cim.iec61970.base.core.equipment_container.Site`s this equipment belongs to. """ return ngen(self._equipment_containers_of_type(Site)) @property def substations(self) -> Generator[Substation, None, None]: """ The `zepben.evolve.cim.iec61970.base.core.substation.Substation`s this equipment belongs to. """ return ngen(self._equipment_containers_of_type(Substation)) @property def usage_points(self) -> Generator[UsagePoint, None, None]: """ The `zepben.evolve.cim.iec61968.metering.metering.UsagePoint`s for this equipment. """ return ngen(self._usage_points) @property def operational_restrictions(self) -> Generator[OperationalRestriction, None, None]: """ The `zepben.evolve.cim.iec61968.operations.operational_restriction.OperationalRestriction`s that this equipment is associated with. """ return ngen(self._operational_restrictions) def num_containers(self) -> int: """ Returns The number of `zepben.evolve.cim.iec61970.base.core.equipment_container.EquipmentContainer`s associated with this `Equipment` """ return nlen(self._equipment_containers) def num_substations(self) -> int: """ Returns The number of `zepben.evolve.cim.iec61970.base.core.substation.Substation`s associated with this `Equipment` """ return len(self._equipment_containers_of_type(Substation)) def num_sites(self) -> int: """ Returns The number of `zepben.evolve.cim.iec61970.base.core.equipment_container.Site`s associated with this `Equipment` """ return len(self._equipment_containers_of_type(Site)) def num_normal_feeders(self) -> int: """ Returns The number of normal `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder`s associated with this `Equipment` """ return len(self._equipment_containers_of_type(Feeder)) def num_usage_points(self) -> int: """ Returns The number of `zepben.evolve.cim.iec61968.metering.metering.UsagePoint`s associated with this `Equipment` """ return nlen(self._usage_points) def num_current_feeders(self) -> int: """ Returns The number of `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder`s associated with this `Equipment` """ return nlen(self._current_feeders) def num_operational_restrictions(self) -> int: """ Returns The number of `zepben.evolve.cim.iec61968.operations.operational_restriction.OperationalRestriction`s associated with this `Equipment` """ return nlen(self._operational_restrictions) def get_container(self, mrid: str) -> EquipmentContainer: """ Get the `zepben.evolve.cim.iec61970.base.core.equipment_container.EquipmentContainer` for this `Equipment` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.iec61970.base.core.equipment_container.EquipmentContainer` Returns The `zepben.evolve.cim.iec61970.base.core.equipment_container.EquipmentContainer` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._equipment_containers, mrid) def add_container(self, ec: EquipmentContainer) -> Equipment: """ Associate an `zepben.evolve.cim.iec61970.base.core.equipment_container.EquipmentContainer` with this `Equipment` `ec` The `zepben.evolve.cim.iec61970.base.core.equipment_container.EquipmentContainer` to associate with this `Equipment`. Returns A reference to this `Equipment` to allow fluent use. Raises `ValueError` if another `EquipmentContainer` with the same `mrid` already exists for this `Equipment`. """ if self._validate_reference(ec, self.get_container, "An EquipmentContainer"): return self self._equipment_containers = list() if self._equipment_containers is None else self._equipment_containers self._equipment_containers.append(ec) return self def remove_container(self, ec: EquipmentContainer) -> Equipment: """ Disassociate `ec` from this `Equipment`. `ec` The `zepben.evolve.cim.iec61970.base.core.equipment_container.EquipmentContainer` to disassociate from this `Equipment`. Returns A reference to this `Equipment` to allow fluent use. Raises `ValueError` if `ec` was not associated with this `Equipment`. """ self._equipment_containers = safe_remove(self._equipment_containers, ec) return self def clear_containers(self) -> Equipment: """ Clear all equipment. Returns A reference to this `Equipment` to allow fluent use. """ self._equipment_containers = None return self def get_current_feeder(self, mrid: str) -> Feeder: """ Get the `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder` for this `Equipment` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder` Returns The `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._current_feeders, mrid) def add_current_feeder(self, feeder: Feeder) -> Equipment: """ Associate `feeder` with this `Equipment`. `feeder` The `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder` to associate with this `Equipment`. Returns A reference to this `Equipment` to allow fluent use. Raises `ValueError` if another `Feeder` with the same `mrid` already exists for this `Equipment`. """ if self._validate_reference(feeder, self.get_current_feeder, "A Feeder"): return self self._current_feeders = list() if self._current_feeders is None else self._current_feeders self._current_feeders.append(feeder) return self def remove_current_feeder(self, feeder: Feeder) -> Equipment: """ Disassociate `feeder` from this `Equipment` `feeder` The `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder` to disassociate from this `Equipment`. Returns A reference to this `Equipment` to allow fluent use. Raises `ValueError` if `feeder` was not associated with this `Equipment`. """ self._current_feeders = safe_remove(self._current_feeders, feeder) return self def clear_current_feeders(self) -> Equipment: """ Clear all current `Feeder`s. Returns A reference to this `Equipment` to allow fluent use. """ self._current_feeders = None return self def get_usage_point(self, mrid: str) -> UsagePoint: """ Get the `zepben.evolve.cim.iec61968.metering.metering.UsagePoint` for this `Equipment` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.iec61968.metering.metering.UsagePoint` Returns The `zepben.evolve.cim.iec61968.metering.metering.UsagePoint` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._usage_points, mrid) def add_usage_point(self, up: UsagePoint) -> Equipment: """ Associate `up` with this `Equipment`. `up` the `zepben.evolve.cim.iec61968.metering.metering.UsagePoint` to associate with this `Equipment`. Returns A reference to this `Equipment` to allow fluent use. Raises `ValueError` if another `UsagePoint` with the same `mrid` already exists for this `Equipment`. """ if self._validate_reference(up, self.get_usage_point, "A UsagePoint"): return self self._usage_points = list() if self._usage_points is None else self._usage_points self._usage_points.append(up) return self def remove_usage_point(self, up: UsagePoint) -> Equipment: """ Disassociate `up` from this `Equipment`. `up` The `zepben.evolve.cim.iec61968.metering.metering.UsagePoint` to disassociate from this `Equipment`. Returns A reference to this `Equipment` to allow fluent use. Raises `ValueError` if `up` was not associated with this `Equipment`. """ self._usage_points = safe_remove(self._usage_points, up) return self def clear_usage_points(self) -> Equipment: """ Clear all usage_points. Returns A reference to this `Equipment` to allow fluent use. """ self._usage_points = None return self def get_operational_restriction(self, mrid: str) -> OperationalRestriction: """ Get the `zepben.evolve.cim.iec61968.operations.operational_restriction.OperationalRestriction` for this `Equipment` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.iec61968.operations.operational_restriction.OperationalRestriction` Returns The `zepben.evolve.cim.iec61968.operations.operational_restriction.OperationalRestriction` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._operational_restrictions, mrid) def add_operational_restriction(self, op: OperationalRestriction) -> Equipment: """ Associate `op` with this `Equipment`. `op` The `zepben.evolve.cim.iec61968.operations.operational_restriction.OperationalRestriction` to associate with this `Equipment`. Returns A reference to this `Equipment` to allow fluent use. Raises `ValueError` if another `OperationalRestriction` with the same `mrid` already exists for this `Equipment`. """ if self._validate_reference(op, self.get_operational_restriction, "An OperationalRestriction"): return self self._operational_restrictions = list() if self._operational_restrictions is None else self._operational_restrictions self._operational_restrictions.append(op) return self def remove_operational_restriction(self, op: OperationalRestriction) -> Equipment: """ Disassociate `up` from this `Equipment`. `op` The `zepben.evolve.cim.iec61968.operations.operational_restriction.OperationalRestriction` to disassociate from this `Equipment`. Returns A reference to this `Equipment` to allow fluent use. Raises `ValueError` if `op` was not associated with this `Equipment`. """ self._operational_restrictions = safe_remove(self._operational_restrictions, op) return self def clear_operational_restrictions(self) -> Equipment: """ Clear all `OperationalRestrictions`. Returns A reference to this `Equipment` to allow fluent use. """ self._operational_restrictions = None return self def _equipment_containers_of_type(self, ectype: Type[TEquipmentContainer]) -> List[TEquipmentContainer]: """Get the `EquipmentContainer`s for this `Equipment` of type `ectype`""" if self._equipment_containers: return [ec for ec in self._equipment_containers if isinstance(ec, ectype)] else: return []
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/core/equipment.py
equipment.py
from __future__ import annotations from typing import Optional from typing import TYPE_CHECKING from weakref import ref, ReferenceType from zepben.evolve.services.network.tracing.phases.phase_status import NormalPhases, CurrentPhases from zepben.evolve.services.network.tracing.feeder.feeder_direction import FeederDirection if TYPE_CHECKING: from zepben.evolve import ConnectivityNode, ConductingEquipment, PhaseStatus from zepben.evolve.model.cim.iec61970.base.core.identified_object import IdentifiedObject from zepben.evolve.model.cim.iec61970.base.core.phase_code import PhaseCode from zepben.evolve.model.phases import TracedPhases __all__ = ["AcDcTerminal", "Terminal"] class AcDcTerminal(IdentifiedObject): """ An electrical connection point (AC or DC) to a piece of conducting equipment. Terminals are connected at physical connection points called connectivity nodes. """ pass class Terminal(AcDcTerminal): """ An AC electrical connection point to a piece of conducting equipment. Terminals are connected at physical connection points called connectivity nodes. """ _conducting_equipment: Optional[ConductingEquipment] = None """The conducting equipment of the terminal. Conducting equipment have terminals that may be connected to other conducting equipment terminals via connectivity nodes.""" phases: PhaseCode = PhaseCode.ABC """Represents the normal network phasing condition. If the attribute is missing three phases (ABC) shall be assumed.""" sequence_number: int = 0 """The orientation of the terminal connections for a multiple terminal conducting equipment. The sequence numbering starts with 1 and additional terminals should follow in increasing order. The first terminal is the "starting point" for a two terminal branch.""" normal_feeder_direction: FeederDirection = FeederDirection.NONE """ Stores the direction of the feeder head relative to this [Terminal] in the normal state of the network. """ current_feeder_direction: FeederDirection = FeederDirection.NONE """ Stores the direction of the feeder head relative to this [Terminal] in the current state of the network. """ traced_phases: TracedPhases = TracedPhases() """the phase object representing the traced phases in both the normal and current network. If properly configured you would expect the normal state phases to match those in `phases`""" _cn: Optional[ReferenceType] = None """This is a weak reference to the connectivity node so if a Network object goes out of scope, holding a single conducting equipment reference does not cause everything connected to it in the network to stay in memory.""" def __init__(self, conducting_equipment: ConductingEquipment = None, connectivity_node: ConnectivityNode = None, **kwargs): super(Terminal, self).__init__(**kwargs) if conducting_equipment: self.conducting_equipment = conducting_equipment # We set the connectivity node to itself if the name parameter is not used to make sure the positional argument is wrapped in a reference. if connectivity_node: self.connectivity_node = connectivity_node else: self.connectivity_node = self._cn @property def conducting_equipment(self): """ The conducting equipment of the terminal. Conducting equipment have terminals that may be connected to other conducting equipment terminals via connectivity nodes. """ return self._conducting_equipment @conducting_equipment.setter def conducting_equipment(self, ce): if self._conducting_equipment is None or self._conducting_equipment is ce: self._conducting_equipment = ce else: raise ValueError(f"conducting_equipment for {str(self)} has already been set to {self._conducting_equipment}, cannot reset this field to {ce}") @property def connectivity_node(self) -> Optional[ConnectivityNode]: if self._cn: return self._cn() else: return None @connectivity_node.setter def connectivity_node(self, cn: Optional[ConnectivityNode]): if cn: self._cn = ref(cn) else: self._cn = None @property def connected(self) -> bool: if self.connectivity_node: return True return False @property def connectivity_node_id(self): return self.connectivity_node.mrid if self.connectivity_node is not None else None def __repr__(self): return f"Terminal{{{self.mrid}}}" def get_switch(self): """ Get any associated switch for this Terminal Returns Switch if present in this terminals ConnectivityNode, else None """ return self.connectivity_node.get_switch() @property def base_voltage(self): return self.conducting_equipment.get_base_voltage(self) def get_other_terminals(self): return [t for t in self.conducting_equipment.terminals if t is not self] @property def normal_phases(self) -> PhaseStatus: """ Convenience method for accessing the normal phases. :return: The [PhaseStatus] for the terminal in the normal state of the network. """ return NormalPhases(self) @property def current_phases(self) -> PhaseStatus: """ Convenience method for accessing the current phases. :return: The `PhaseStatus` for the terminal in the normal state of the network. """ return CurrentPhases(self) def connect(self, connectivity_node: ConnectivityNode): self.connectivity_node = connectivity_node def disconnect(self): self.connectivity_node = None
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/core/terminal.py
terminal.py
from __future__ import annotations from typing import Optional, Generator, List, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import Substation from zepben.evolve.model.cim.iec61970.base.core.identified_object import IdentifiedObject from zepben.evolve.util import nlen, get_by_mrid, ngen, safe_remove __all__ = ["GeographicalRegion", "SubGeographicalRegion"] class GeographicalRegion(IdentifiedObject): """ A geographical region of a power system network phases. """ _sub_geographical_regions: Optional[List[SubGeographicalRegion]] = None def __init__(self, sub_geographical_regions: List[SubGeographicalRegion] = None, **kwargs): super(GeographicalRegion, self).__init__(**kwargs) if sub_geographical_regions: for sgr in sub_geographical_regions: self.add_sub_geographical_region(sgr) def num_sub_geographical_regions(self) -> int: """ Returns The number of `SubGeographicalRegion`s associated with this `GeographicalRegion` """ return nlen(self._sub_geographical_regions) @property def sub_geographical_regions(self) -> Generator[SubGeographicalRegion, None, None]: """ The `SubGeographicalRegion`s of this `GeographicalRegion`. """ return ngen(self._sub_geographical_regions) def get_sub_geographical_region(self, mrid: str) -> SubGeographicalRegion: """ Get the `SubGeographicalRegion` for this `GeographicalRegion` identified by `mrid` `mrid` The mRID of the required `SubGeographicalRegion` Returns The `SubGeographicalRegion` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._sub_geographical_regions, mrid) def add_sub_geographical_region(self, sub_geographical_region: SubGeographicalRegion) -> GeographicalRegion: """ Associate a `SubGeographicalRegion` with this `GeographicalRegion` `sub_geographical_region` The `SubGeographicalRegion` to associate with this `GeographicalRegion`. Returns A reference to this `GeographicalRegion` to allow fluent use. Raises `ValueError` if another `SubGeographicalRegion` with the same `mrid` already exists for this `GeographicalRegion`. """ if self._validate_reference(sub_geographical_region, self.get_sub_geographical_region, "A SubGeographicalRegion"): return self self._sub_geographical_regions = list() if self._sub_geographical_regions is None else self._sub_geographical_regions self._sub_geographical_regions.append(sub_geographical_region) return self def remove_sub_geographical_region(self, sub_geographical_region: SubGeographicalRegion) -> GeographicalRegion: """ Disassociate `sub_geographical_region` from this `GeographicalRegion` `sub_geographical_region` The `SubGeographicalRegion` to disassociate from this `GeographicalRegion`. Returns A reference to this `GeographicalRegion` to allow fluent use. Raises `ValueError` if `sub_geographical_region` was not associated with this `GeographicalRegion`. """ self._sub_geographical_regions = safe_remove(self._sub_geographical_regions, sub_geographical_region) return self def clear_sub_geographical_regions(self) -> GeographicalRegion: """ Clear all SubGeographicalRegions. Returns A reference to this `GeographicalRegion` to allow fluent use. """ self._sub_geographical_regions = None return self class SubGeographicalRegion(IdentifiedObject): """ A subset of a geographical region of a power system network model. """ geographical_region: Optional[GeographicalRegion] = None """The geographical region to which this sub-geographical region is within.""" _substations: Optional[List[Substation]] = None def __init__(self, substations: List[Substation] = None, **kwargs): super(SubGeographicalRegion, self).__init__(**kwargs) if substations: for sub in substations: self.add_substation(sub) def num_substations(self) -> int: """ Returns The number of `zepben.evolve.iec61970.base.core.substation.Substation`s associated with this `SubGeographicalRegion` """ return nlen(self._substations) @property def substations(self) -> Generator[Substation, None, None]: """ All substations belonging to this sub geographical region. """ return ngen(self._substations) def get_substation(self, mrid: str) -> Substation: """ Get the `zepben.evolve.iec61970.base.core.substation.Substation` for this `SubGeographicalRegion` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.iec61970.base.core.substation.Substation` Returns The `zepben.evolve.iec61970.base.core.substation.Substation` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._substations, mrid) def add_substation(self, substation: Substation) -> SubGeographicalRegion: """ Associate a `Substation` with this `GeographicalRegion` `substation` the `zepben.evolve.iec61970.base.core.substation.Substation` to associate with this `SubGeographicalRegion`. Returns A reference to this `SubGeographicalRegion` to allow fluent use. Raises `ValueError` if another `zepben.evolve.iec61970.base.core.substation.Substation` with the same `mrid` already exists for this `GeographicalRegion`. """ if self._validate_reference(substation, self.get_substation, "A Substation"): return self self._substations = list() if self._substations is None else self._substations self._substations.append(substation) return self def remove_substation(self, substation: Substation) -> SubGeographicalRegion: """ Disassociate `substation` from this `GeographicalRegion` `substation` The `zepben.evolve.iec61970.base.core.substation.Substation` to disassociate from this `SubGeographicalRegion`. Returns A reference to this `SubGeographicalRegion` to allow fluent use. Raises `ValueError` if `substation` was not associated with this `SubGeographicalRegion`. """ self._substations = safe_remove(self._substations, substation) return self def clear_substations(self) -> SubGeographicalRegion: """ Clear all `Substations`. Returns A reference to this `SubGeographicalRegion` to allow fluent use. """ self._substations = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/core/regions.py
regions.py
from __future__ import annotations from typing import Optional, Generator, List, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import Loop, Circuit, Feeder from zepben.evolve.model.cim.iec61970.base.core.equipment_container import EquipmentContainer from zepben.evolve.model.cim.iec61970.base.core.regions import SubGeographicalRegion from zepben.evolve.util import nlen, get_by_mrid, ngen, safe_remove __all__ = ["Substation"] class Substation(EquipmentContainer): """ A collection of equipment for purposes other than generation or utilization, through which electric energy in bulk is passed for the purposes of switching or modifying its characteristics. """ sub_geographical_region: Optional[SubGeographicalRegion] = None """The SubGeographicalRegion containing the substation.""" _normal_energized_feeders: Optional[List[Feeder]] = None _loops: Optional[List[Loop]] = None _energized_loops: Optional[List[Loop]] = None _circuits: Optional[List[Circuit]] = None def __init__(self, normal_energized_feeders: List[Feeder] = None, loops: List[Loop] = None, energized_loops: List[Loop] = None, circuits: List[Circuit] = None, **kwargs): super(Substation, self).__init__(**kwargs) if normal_energized_feeders: for feeder in normal_energized_feeders: self.add_feeder(feeder) if loops: for loop in loops: self.add_loop(loop) if energized_loops: for loop in energized_loops: self.add_energized_loop(loop) if circuits: for circuit in circuits: self.add_circuit(circuit) @property def circuits(self) -> Generator[Circuit, None, None]: """ The `zepben.evolve.cim.infiec61970.feeder.circuit.Circuit`s originating from this substation. """ return ngen(self._circuits) @property def loops(self) -> Generator[Loop, None, None]: """ The `zepben.evolve.cim.infiec61970.feeder.loop.Loop` originating from this substation. """ return ngen(self._loops) @property def energized_loops(self) -> Generator[Loop, None, None]: """ The `zepben.evolve.cim.infiec61970.feeder.loop.Loop`s originating from this substation that are energised. """ return ngen(self._energized_loops) @property def feeders(self) -> Generator[Feeder, None, None]: """ The normal energized feeders of the substation. Also used for naming purposes. """ return ngen(self._normal_energized_feeders) def num_feeders(self): """ Returns The number of `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder`s associated with this `Substation` """ return nlen(self._normal_energized_feeders) def get_feeder(self, mrid: str) -> Feeder: """ Get the `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder` for this `Substation` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder` Returns The `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._normal_energized_feeders, mrid) def add_feeder(self, feeder: Feeder) -> Substation: """ Associate a `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder` with this `Substation` `feeder` The `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder` to associate with this `Substation`. Returns A reference to this `Substation` to allow fluent use. Raises `ValueError` if another `Feeder` with the same `mrid` already exists for this `Substation`. """ if self._validate_reference(feeder, self.get_feeder, "A Feeder"): return self self._normal_energized_feeders = list() if self._normal_energized_feeders is None else self._normal_energized_feeders self._normal_energized_feeders.append(feeder) return self def remove_feeder(self, feeder: Feeder) -> Substation: """ Disassociate `feeder` from this `Substation` `feeder` The `zepben.evolve.cim.iec61970.base.core.equipment_container.Feeder` to disassociate from this `Substation`. Returns A reference to this `Substation` to allow fluent use. Raises `ValueError` if `feeder` was not associated with this `Substation`. """ self._normal_energized_feeders = safe_remove(self._normal_energized_feeders, feeder) return self def clear_feeders(self) -> Substation: """ Clear all current `Feeder`s. Returns A reference to this `Substation` to allow fluent use. """ self._normal_energized_feeders = None return self def num_loops(self): """ Returns The number of `zepben.evolve.cim.infiec61970.feeder.loop.Loop`s associated with this `Substation` """ return nlen(self._loops) def get_loop(self, mrid: str) -> Loop: """ Get the `zepben.evolve.cim.infiec61970.feeder.loop.Loop` for this `Substation` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.infiec61970.feeder.loop.Loop` Returns The `zepben.evolve.cim.infiec61970.feeder.loop.Loop` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._loops, mrid) def add_loop(self, loop: Loop) -> Substation: """ Associate a `zepben.evolve.cim.infiec61970.feeder.loop.Loop` with this `Substation` `loop` The `zepben.evolve.cim.infiec61970.feeder.loop.Loop` to associate with this `Substation`. Returns A reference to this `Substation` to allow fluent use. Raises `ValueError` if another `Loop` with the same `mrid` already exists for this `Substation`. """ if self._validate_reference(loop, self.get_loop, "A Loop"): return self self._loops = list() if self._loops is None else self._loops self._loops.append(loop) return self def remove_loop(self, loop: Loop) -> Substation: """ Disassociate `loop` from this `Substation` `loop` The `zepben.evolve.cim.infiec61970.feeder.loop.Loop` to disassociate from this `Substation`. Returns A reference to this `Substation` to allow fluent use. Raises `ValueError` if `loop` was not associated with this `Substation`. """ self._loops = safe_remove(self._loops, loop) return self def clear_loops(self) -> Substation: """ Clear all current `Loop`s. Returns A reference to this `Substation` to allow fluent use. """ self._loops = None return self def num_energized_loops(self): """ Returns The number of `zepben.evolve.cim.infiec61970.feeder.loop.Loop`s associated with this `Substation` """ return nlen(self._energized_loops) def get_energized_loop(self, mrid: str) -> Loop: """ Get the `zepben.evolve.cim.infiec61970.feeder.loop.Loop` for this `Substation` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.infiec61970.feeder.loop.Loop` Returns The `zepben.evolve.cim.infiec61970.feeder.loop.Loop` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._energized_loops, mrid) def add_energized_loop(self, loop: Loop) -> Substation: """ Associate a `zepben.evolve.cim.infiec61970.feeder.loop.Loop` with this `Substation` `loop` The `zepben.evolve.cim.infiec61970.feeder.loop.Loop` to associate with this `Substation`. Returns A reference to this `Substation` to allow fluent use. Raises `ValueError` if another `Loop` with the same `mrid` already exists for this `Substation`. """ if self._validate_reference(loop, self.get_energized_loop, "A Loop"): return self self._energized_loops = list() if self._energized_loops is None else self._energized_loops self._energized_loops.append(loop) return self def remove_energized_loop(self, loop: Loop) -> Substation: """ Disassociate `loop` from this `Substation` `loop` The `zepben.evolve.cim.infiec61970.feeder.loop.Loop` to disassociate from this `Substation`. Returns A reference to this `Substation` to allow fluent use. Raises `ValueError` if `loop` was not associated with this `Substation`. """ self._energized_loops = safe_remove(self._energized_loops, loop) return self def clear_energized_loops(self) -> Substation: """ Clear all current `Loop`s. Returns A reference to this `Substation` to allow fluent use. """ self._energized_loops = None return self def num_circuits(self): """ Returns The number of `zepben.evolve.cim.infiec61970.feeder.circuit.Circuit`s associated with this `Substation` """ return nlen(self._circuits) def get_circuit(self, mrid: str) -> Circuit: """ Get the `zepben.evolve.cim.infiec61970.feeder.circuit.Circuit` for this `Substation` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.infiec61970.feeder.circuit.Circuit` Returns The `zepben.evolve.cim.infiec61970.feeder.circuit.Circuit` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._circuits, mrid) def add_circuit(self, circuit: Circuit) -> Substation: """ Associate a `zepben.evolve.cim.infiec61970.feeder.circuit.Circuit` with this `Substation` `circuit` The `zepben.evolve.cim.infiec61970.feeder.circuit.Circuit` to associate with this `Substation`. Returns A reference to this `Substation` to allow fluent use. Raises `ValueError` if another `Circuit` with the same `mrid` already exists for this `Substation`. """ if self._validate_reference(circuit, self.get_circuit, "A Circuit"): return self self._circuits = list() if self._circuits is None else self._circuits self._circuits.append(circuit) return self def remove_circuit(self, circuit: Circuit) -> Substation: """ Disassociate `circuit` from this `Substation` `circuit` The `zepben.evolve.cim.infiec61970.feeder.circuit.Circuit` to disassociate from this `Substation`. Returns A reference to this `Substation` to allow fluent use. Raises `ValueError` if `circuit` was not associated with this `Substation`. """ self._circuits = safe_remove(self._circuits, circuit) return self def clear_circuits(self) -> Substation: """ Clear all current `Circuit`s. Returns A reference to this `Substation` to allow fluent use. """ self._circuits = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/core/substation.py
substation.py
from enum import Enum, unique from typing import List, Set from zepben.evolve.model.cim.iec61970.base.wires.single_phase_kind import SinglePhaseKind __all__ = ["PhaseCode", "phase_code_by_id", "phase_code_from_single_phases"] def phase_code_by_id(value: int): """ Get a PhaseCode by its value `value` ID of the PhaseCode from 0 as per the order of definition Returns The PhaseCode """ return _PHASE_CODE_VALUES[value] @unique class PhaseCode(Enum): """ An unordered enumeration of phase identifiers. Allows designation of phases for both transmission and distribution equipment, circuits and loads. The enumeration, by itself, does not describe how the phases are connected together or connected to ground. Ground is not explicitly denoted as a phase. Residential and small commercial loads are often served from single-phase, or split-phase, secondary circuits. For example of s12N, phases 1 and 2 refer to hot wires that are 180 degrees out of phase, while N refers to the neutral wire. Through single-phase transformer connections, these secondary circuits may be served from one or two of the primary phases A, B, and C. For three-phase loads, use the A, B, C phase codes instead of s12N. """ NONE = (0, [SinglePhaseKind.NONE]) """No phases specified""" A = (1, [SinglePhaseKind.A]) """Phase A""" B = (2, [SinglePhaseKind.B]) """Phase B""" C = (3, [SinglePhaseKind.C]) """Phase C""" N = (4, [SinglePhaseKind.N]) """Neutral Phase""" AB = (5, [SinglePhaseKind.A, SinglePhaseKind.B]) """Phases A and B""" AC = (6, [SinglePhaseKind.A, SinglePhaseKind.C]) """Phases A and C""" AN = (7, [SinglePhaseKind.A, SinglePhaseKind.N]) """Phases A and N""" BC = (8, [SinglePhaseKind.B, SinglePhaseKind.C]) """Phases B and C""" BN = (9, [SinglePhaseKind.B, SinglePhaseKind.N]) """Phases B and N""" CN = (10, [SinglePhaseKind.C, SinglePhaseKind.N]) """Phases C and N""" ABC = (11, [SinglePhaseKind.A, SinglePhaseKind.B, SinglePhaseKind.C]) """Phases A, B and C""" ABN = (12, [SinglePhaseKind.A, SinglePhaseKind.B, SinglePhaseKind.N]) """Phases A, B and neutral""" ACN = (13, [SinglePhaseKind.A, SinglePhaseKind.C, SinglePhaseKind.N]) """Phases A, C and neutral""" BCN = (14, [SinglePhaseKind.B, SinglePhaseKind.C, SinglePhaseKind.N]) """Phases B, C and neutral""" ABCN = (15, [SinglePhaseKind.A, SinglePhaseKind.B, SinglePhaseKind.C, SinglePhaseKind.N]) """Phases A, B, C and neutral""" X = (16, [SinglePhaseKind.X]) """Unknown non-neutral phase""" XN = (17, [SinglePhaseKind.X, SinglePhaseKind.N]) """Unknown non-neutral phase plus neutral""" XY = (18, [SinglePhaseKind.X, SinglePhaseKind.Y]) """Two Unknown non-neutral phases""" XYN = (19, [SinglePhaseKind.X, SinglePhaseKind.Y, SinglePhaseKind.N]) """Two Unknown non-neutral phases plus neutral""" Y = (20, [SinglePhaseKind.Y]) """Unknown non-neutral phase""" YN = (21, [SinglePhaseKind.Y, SinglePhaseKind.N]) """Unknown non-neutral phase plus neutral""" s1 = (22, [SinglePhaseKind.s1]) """Secondary phase 1""" s1N = (23, [SinglePhaseKind.s1, SinglePhaseKind.N]) """Secondary phase 1 plus neutral""" s12 = (24, [SinglePhaseKind.s1, SinglePhaseKind.s2]) """Secondary phase 1 and 2""" s12N = (25, [SinglePhaseKind.s1, SinglePhaseKind.s2, SinglePhaseKind.N]) """Secondary phases 1, 2, and neutral""" s2 = (26, [SinglePhaseKind.s2]) """Secondary phase 2""" s2N = (27, [SinglePhaseKind.s2, SinglePhaseKind.N]) """Secondary phase 2 plus neutral""" @property def short_name(self) -> str: return str(self)[10:] @property def single_phases(self) -> List[SinglePhaseKind]: return self.value[1] @property def num_phases(self) -> int: return len(self.value) @property def without_neutral(self) -> 'PhaseCode': if SinglePhaseKind.N not in self: return self else: return phase_code_from_single_phases({it for it in self.single_phases if it != SinglePhaseKind.N}) def __iter__(self): return PhaseCodeIter(self.single_phases) def __contains__(self, item): return item in self.single_phases class PhaseCodeIter: def __init__(self, single_phases: List[SinglePhaseKind]): self._index = -1 self._single_phases = single_phases def __iter__(self): return self def __next__(self): self._index += 1 if self._index < len(self._single_phases): return self._single_phases[self._index] raise StopIteration def phase_code_from_single_phases(single_phases: Set[SinglePhaseKind]) -> PhaseCode: return _PHASE_CODE_BY_PHASES.get(frozenset(single_phases), PhaseCode.NONE) _PHASE_CODE_VALUES = list(PhaseCode.__members__.values()) _PHASE_CODE_BY_PHASES = {frozenset(it.single_phases): it for it in PhaseCode}
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/core/phase_code.py
phase_code.py
from __future__ import annotations from typing import List, Optional, Generator, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import Terminal from zepben.evolve.model.cim.iec61970.base.core.base_voltage import BaseVoltage from zepben.evolve.model.cim.iec61970.base.core.equipment import Equipment __all__ = ['ConductingEquipment'] from zepben.evolve.util import get_by_mrid, require, ngen class ConductingEquipment(Equipment): """ Abstract class, should only be used through subclasses. The parts of the AC power system that are designed to carry current or that are conductively connected through terminals. ConductingEquipment are connected by `zepben.evolve.cim.iec61970.base.core.Terminal`'s which are in turn associated with `zepben.evolve.cim.iec61970.base.connectivity_node.ConnectivityNode`'s. Each `zepben.evolve.iec61970.base.core.terminal.Terminal` is associated with _exactly one_ `ConnectivityNode`, and through that `ConnectivityNode` can be linked with many other `Terminals` and `ConductingEquipment`. """ base_voltage: Optional[BaseVoltage] = None """`zepben.evolve.iec61970.base.core.base_voltage.BaseVoltage` of this `ConductingEquipment`. Use only when there is no voltage level container used and only one base voltage applies. For example, not used for transformers.""" _terminals: List[Terminal] = [] def __init__(self, terminals: List[Terminal] = None, **kwargs): super(ConductingEquipment, self).__init__(**kwargs) if terminals: for term in terminals: if term.conducting_equipment is None: term.conducting_equipment = self self.add_terminal(term) def get_base_voltage(self, terminal: Terminal = None): """ Get the `zepben.evolve.iec61970.base.core.base_voltage.BaseVoltage` of this `ConductingEquipment`. Note `terminal` is not used here, but this method can be overridden in child classes (e.g PowerTransformer). `terminal` The `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` to get the voltage at. Returns thee BaseVoltage of this `ConductingEquipment` at `terminal` """ return self.base_voltage @property def terminals(self) -> Generator[Terminal, None, None]: """ `ConductingEquipment` have `zepben.evolve.cim.iec61970.base.core.terminal.Terminal`s that may be connected to other `ConductingEquipment` `zepben.evolve.cim.iec61970.base.core.terminal.Terminal`s via `ConnectivityNode`s. """ return ngen(self._terminals) def num_terminals(self): """ Get the number of `zepben.evolve.cim.iec61970.base.core.terminal.Terminal`s for this `ConductingEquipment`. """ return len(self._terminals) def get_terminal_by_mrid(self, mrid: str) -> Terminal: """ Get the `zepben.evolve.iec61970.base.core.terminal.Terminal` for this `ConductingEquipment` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` Returns The `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._terminals, mrid) def get_terminal_by_sn(self, sequence_number: int): """ Get the `zepben.evolve.iec61970.base.core.terminal.Terminal` on this `ConductingEquipment` by its `sequence_number`. `sequence_number` The `sequence_number` of the `zepben.evolve.iec61970.base.core.terminal.Terminal` in relation to this `ConductingEquipment`. Returns The `zepben.evolve.iec61970.base.core.terminal.Terminal` on this `ConductingEquipment` with sequence number `sequence_number` Raises IndexError if no `zepben.evolve.iec61970.base.core.terminal.Terminal` was found with sequence_number `sequence_number`. """ for term in self._terminals: if term.sequence_number == sequence_number: return term raise IndexError(f"No Terminal with sequence_number {sequence_number} was found in ConductingEquipment {str(self)}") def __getitem__(self, item: int): return self.get_terminal_by_sn(item) def add_terminal(self, terminal: Terminal) -> ConductingEquipment: """ Associate `terminal` with this `ConductingEquipment`. If `terminal.sequence_number` == 0, the terminal will be assigned a sequence_number of `self.num_terminals() + 1`. `terminal` The `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` to associate with this `ConductingEquipment`. Returns A reference to this `ConductingEquipment` to allow fluent use. Raises `ValueError` if another `zepben.evolve.iec61970.base.core.terminal.Terminal` with the same `mrid` already exists for this `ConductingEquipment`. """ if self._validate_terminal(terminal): return self if terminal.sequence_number == 0: terminal.sequence_number = self.num_terminals() + 1 self._terminals.append(terminal) self._terminals.sort(key=lambda t: t.sequence_number) return self def remove_terminal(self, terminal: Terminal) -> ConductingEquipment: """ Disassociate `terminal` from this `ConductingEquipment` `terminal` the `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` to disassociate from this `ConductingEquipment`. Returns A reference to this `ConductingEquipment` to allow fluent use. Raises `ValueError` if `terminal` was not associated with this `ConductingEquipment`. """ self._terminals.remove(terminal) return self def clear_terminals(self) -> ConductingEquipment: """ Clear all terminals. Returns A reference to this `ConductingEquipment` to allow fluent use. """ self._terminals.clear() return self def __repr__(self): return (f"{super(ConductingEquipment, self).__repr__()}, in_service={self.in_service}, " f"normally_in_service={self.normally_in_service}, location={self.location}" ) def _validate_terminal(self, terminal: Terminal) -> bool: """ Validate a terminal against this `ConductingEquipment`'s `zepben.evolve.iec61970.base.core.terminal.Terminal`s. `terminal` The `zepben.evolve.iec61970.base.core.terminal.Terminal` to validate. Returns True if `zepben.evolve.iec61970.base.core.terminal.Terminal`` is already associated with this `ConductingEquipment`, otherwise False. Raises `ValueError` if `zepben.evolve.iec61970.base.core.terminal.Terminal`s `conducting_equipment` is not this `ConductingEquipment`, or if this `ConductingEquipment` has a different `zepben.evolve.iec61970.base.core.terminal.Terminal` with the same mRID. """ if self._validate_reference(terminal, self.get_terminal_by_mrid, "A Terminal"): return True if self._validate_reference_by_sn(terminal.sequence_number, terminal, self.get_terminal_by_sn, "A Terminal"): return True if not terminal.conducting_equipment: terminal.conducting_equipment = self require(terminal.conducting_equipment is self, lambda: f"Terminal {terminal} references another piece of conducting equipment {terminal.conducting_equipment}, expected {str(self)}.") return False
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/core/conducting_equipment.py
conducting_equipment.py
from __future__ import annotations import logging from abc import ABCMeta from typing import Callable, Any, List, Generator, Optional from dataclassy import dataclass from zepben.evolve.model.cim.iec61970.base.core.name import Name from zepben.evolve.util import require, CopyableUUID, nlen, ngen, safe_remove __all__ = ["IdentifiedObject"] logger = logging.getLogger(__name__) @dataclass(slots=True) class IdentifiedObject(object, metaclass=ABCMeta): """ Root class to provide common identification for all classes needing identification and naming attributes. Everything should extend this class, however it's not mandated that every subclass must use all the fields defined here. All names of attributes of classes extending this class *must* directly reflect CIM properties if they have a direct relation, however must be in snake case to keep the phases PEP compliant. """ mrid: str = CopyableUUID() """Master resource identifier issued by a model authority. The mRID is unique within an exchange context. Global uniqueness is easily achieved by using a UUID, as specified in RFC 4122, for the mRID. The use of UUID is strongly recommended.""" name: str = "" """The name is any free human readable and possibly non unique text naming the object.""" description: str = "" """a free human readable text describing or naming the object. It may be non unique and may not correlate to a naming hierarchy.""" _names: Optional[List[Name]] = None def __init__(self, names: Optional[List[Name]] = None, **kwargs): super(IdentifiedObject, self).__init__(**kwargs) if names: for name in names: self.add_name(name) def __str__(self): return f"{self.__class__.__name__}{{{'|'.join(a for a in (str(self.mrid), str(self.name)) if a)}}}" @property def names(self) -> Generator[Name, None, None]: """All names of this identified object. The returned collection is read only.""" return ngen(self._names) def num_names(self) -> int: """Get the number of entries in the `Name` collection.""" return nlen(self._names) def get_name(self, name_type: str, name: str) -> Optional[Name]: """ Find the `Name` with the matching `name_type` and `name` :return: The matched Name or None """ if self._names: for name_ in self._names: if name_.type.name == name_type and name_.name == name: return name_ return None def add_name(self, name: Name) -> IdentifiedObject: """ Associate a `Name` with this `IdentifiedObject` :param name: The `Name` to associate with this `IdentifiedObject`. :return: A reference to this `IdentifiedObject` to allow fluent use. :raise ValueError: If `name` references another `IdentifiedObject`, or another `Name` already exists with the matching `type` and `name`. """ if not name.identified_object: name.identified_object = self require(name.identified_object is self, lambda: f"Attempting to add a Name to {str(self)} that does not reference this identified object") existing = self.get_name(name.type.name, name.name) if existing: if existing is self: return self else: raise ValueError(f"Failed to add duplicate name {str(name)} to {str(self)}.") self._names = list() if not self._names else self._names self._names.append(name) return self def remove_name(self, name: Name) -> IdentifiedObject: """ Disassociate a `Name` from this `IdentifiedObject`. :param name: The `Name` to disassociate from this `IdentifiedObject`. :return: A reference to this `IdentifiedObject` to allow fluent use. :raises ValueError: Iif `name` was not associated with this `IdentifiedObject`. """ self._names = safe_remove(self._names, name) return self def clear_names(self) -> IdentifiedObject: """ Clear all names. :return: A reference to this `IdentifiedObject` to allow fluent use. """ self._names = None return self def _validate_reference(self, other: IdentifiedObject, getter: Callable[[str], IdentifiedObject], type_descr: str) -> bool: """ Validate whether a given reference exists to `other` using the provided getter function. :param other: The object to look up with the getter using its mRID. :param getter: A function that takes an mRID and returns an `IdentifiedObject`, and throws a `KeyError` if it couldn't be found. :param type_descr: The type description to use for the lazily generated error message. Should be of the form "A[n] type(other)" :return: True if `other` was retrieved with `getter` and was equivalent, False otherwise. :raises ValueError: If the object retrieved from `getter` is not `other`. """ try: get_result = getter(other.mrid) require(get_result is other, lambda: f"{type_descr} with mRID {other.mrid} already exists in {str(self)}") return True except (KeyError, AttributeError): return False def _validate_reference_by_sn(self, field: Any, other: IdentifiedObject, getter: Callable[[Any], IdentifiedObject], type_descr: str, field_name: str = "sequence_number") -> bool: """ Validate whether a given reference exists to `other` using the provided getter function called with `field`. :param other: The object to look up with the getter using its mRID. :param getter: A function that takes takes `field` and returns an `IdentifiedObject`, and throws an `IndexError` if it couldn't be found. :param type_descr: The type description to use for the lazily generated error message. Should be of the form "A[n] type(other)" :return: True if `other` was retrieved with `getter` and was equivalent, False otherwise. :raises ValueError: If the object retrieved from `getter` is not `other`. """ try: get_result = getter(field) require(get_result is other, lambda: f"{type_descr} with {field_name} {field} already exists in {str(self)}") return True except IndexError: return False
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/core/identified_object.py
identified_object.py
from __future__ import annotations from typing import Generator, List, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import Terminal from dataclassy import dataclass from zepben.evolve.model.cim.iec61970.base.core.identified_object import IdentifiedObject from zepben.evolve.util import get_by_mrid, ngen __all__ = ["ConnectivityNode"] @dataclass(slots=False) class ConnectivityNode(IdentifiedObject): """ Connectivity nodes are points where terminals of AC conducting equipment are connected together with zero impedance. """ # noinspection PyDunderSlots __slots__ = ["_terminals", "__weakref__"] _terminals: List[Terminal] = [] def __init__(self, terminals: List[Terminal] = None, **kwargs): super(ConnectivityNode, self).__init__(**kwargs) if terminals: for term in terminals: self.add_terminal(term) def __iter__(self): return iter(self._terminals) def num_terminals(self): """ Get the number of `zepben.evolve.cim.iec61970.base.core.terminal.Terminal`s for this `ConnectivityNode`. """ return len(self._terminals) @property def terminals(self) -> Generator[Terminal, None, None]: """ The `zepben.evolve.cim.iec61970.base.core.terminal.Terminal`s attached to this `ConnectivityNode` """ return ngen(self._terminals) def get_terminal(self, mrid: str) -> Terminal: """ Get the `zepben.evolve.iec61970.base.core.terminal.Terminal` for this `ConnectivityNode` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` Returns The `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._terminals, mrid) def add_terminal(self, terminal: Terminal) -> ConnectivityNode: """ Associate a `terminal.Terminal` with this `ConnectivityNode` `terminal` The `zepben.evolve.iec61970.base.core.terminal.Terminal` to add. Will only add to this object if it is not already associated. Returns A reference to this `ConnectivityNode` to allow fluent use. Raises `ValueError` if another `Terminal` with the same `mrid` already exists for this `ConnectivityNode`. """ if self._validate_reference(terminal, self.get_terminal, "A Terminal"): return self self._terminals.append(terminal) return self def remove_terminal(self, terminal: Terminal) -> ConnectivityNode: """ Disassociate `terminal` from this `ConnectivityNode`. `terminal` The `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` to disassociate from this `ConnectivityNode`. Returns A reference to this `ConnectivityNode` to allow fluent use. Raises `ValueError` if `terminal` was not associated with this `ConnectivityNode`. """ self._terminals.remove(terminal) return self def clear_terminals(self) -> ConnectivityNode: """ Clear all terminals. Returns A reference to this `ConnectivityNode` to allow fluent use. """ self._terminals.clear() return self def is_switched(self): return self.get_switch() is not None def get_switch(self): for term in self._terminals: try: # All switches should implement is_open _ = term.conducting_equipment.is_open() return term.conducting_equipment except AttributeError: pass return None
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/core/connectivity_node.py
connectivity_node.py
from __future__ import annotations from typing import Dict, List, Generator from dataclassy import dataclass from zepben.evolve.model.cim.iec61970.base.core.name import Name __all__ = ["NameType"] @dataclass(slots=True) class NameType: """ Type of name. Possible values for attribute 'name' are implementation dependent but standard profiles may specify types. An enterprise may have multiple IT systems each having its own local name for the same object, e.g. a planning system may have different names from an EMS. An object may also have different names within the same IT system, e.g. localName as defined in CIM version 14. The definition from CIM14 is: The localName is a human readable name of the object. It is a free text name local to a node in a naming hierarchy similar to a file directory structure. A power system related naming hierarchy may be: Substation, VoltageLevel, Equipment etc. Children of the same parent in such a hierarchy have names that typically are unique among them. """ name: str """Name of the name type.""" description: str = "" """Description of the name type.""" _names_index: Dict[str, Name] = dict() _names_multi_index: Dict[str, List[Name]] = dict() def __str__(self): return f"NameType(name='{self.name}', description='{self.description}')" @property def names(self) -> Generator[Name, None, None]: """All names of this type.""" for names_ in self._names_multi_index.values(): for name in names_: yield name for name_ in self._names_index.values(): yield name_ def has_name(self, name: str): """Indicates if this :class:`NameType` contains `name`.""" return name in self._names_index or name in self._names_multi_index def get_names(self, name) -> Generator[Name, None, None]: """Get all the :class:`Name` instances for the provided `name`. :return: A `Generator` of `Name` """ try: yield self._names_index[name] except KeyError: try: for name_ in self._names_multi_index[name]: yield name_ except KeyError: pass def get_or_add_name(self, name, identified_object): """ Gets a :class:`Name` for the given `name` and `identifiedObject` combination or adds a new :class:`Name` to this :class:`NameType` with the combination and returns the new instance. """ if name in self._names_index: existing = self._names_index[name] if existing.identified_object == identified_object: return existing else: # noinspection PyArgumentList name_obj = Name(name, self, identified_object) self._names_multi_index[name] = [existing, name_obj] del self._names_index[name] return name_obj elif name in self._names_multi_index: for n in self._names_multi_index[name]: if n.identified_object == identified_object: return n # noinspection PyArgumentList name_obj = Name(name, self, identified_object) self._names_multi_index[name].append(name_obj) return name_obj else: # noinspection PyArgumentList name_obj = Name(name, self, identified_object) self._names_index[name] = name_obj return name_obj def remove_name(self, name: Name): """ Removes the `name` from this name type. :return: True if the name instance was successfully removed """ if name.type is not self: return False try: del self._names_index[name.name] return True except KeyError: try: names = self._names_multi_index[name.name] names.remove(name) if not names: del self._names_multi_index[name.name] return True except KeyError: return False def remove_names(self, name: str): """ Removes all :class:`Name` instances associated with name `name`. :return: True if a matching name was removed. """ try: del self._names_index[name] return True except KeyError: try: del self._names_multi_index[name] return True except KeyError: return False def clear_names(self) -> NameType: self._names_index = dict() self._names_multi_index = dict() return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/core/name_type.py
name_type.py
from __future__ import annotations from typing import Optional, Dict, Generator, List, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import Equipment, Terminal, Substation from zepben.evolve.model.cim.iec61970.base.core.connectivity_node_container import ConnectivityNodeContainer from zepben.evolve.util import nlen, ngen, safe_remove_by_id __all__ = ['EquipmentContainer', 'Feeder', 'Site'] class EquipmentContainer(ConnectivityNodeContainer): """ A modeling construct to provide a root class for containing equipment. """ _equipment: Optional[Dict[str, Equipment]] = None """Map of Equipment in this EquipmentContainer by their mRID""" def __init__(self, equipment: List[Equipment] = None, **kwargs): super(EquipmentContainer, self).__init__(**kwargs) if equipment: for eq in equipment: self.add_equipment(eq) def num_equipment(self): """ Returns The number of `zepben.evolve.iec61970.base.core.equipment.Equipment` associated with this `EquipmentContainer` """ return nlen(self._equipment) @property def equipment(self) -> Generator[Equipment, None, None]: """ The `zepben.evolve.iec61970.base.core.equipment.Equipment` contained in this `EquipmentContainer` """ return ngen(self._equipment.values() if self._equipment is not None else None) def get_equipment(self, mrid: str) -> Equipment: """ Get the `zepben.evolve.iec61970.base.core.equipment.Equipment` for this `EquipmentContainer` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.iec61970.base.core.equipment.Equipment` Returns The `zepben.evolve.iec61970.base.core.equipment.Equipment` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ if not self._equipment: raise KeyError(mrid) try: return self._equipment[mrid] except AttributeError: raise KeyError(mrid) def add_equipment(self, equipment: Equipment) -> EquipmentContainer: """ Associate `equipment` with this `EquipmentContainer`. `equipment` The `zepben.evolve.iec61970.base.core.equipment.Equipment` to associate with this `EquipmentContainer`. Returns A reference to this `EquipmentContainer` to allow fluent use. Raises `ValueError` if another `Equipment` with the same `mrid` already exists for this `EquipmentContainer`. """ if self._validate_reference(equipment, self.get_equipment, "An Equipment"): return self self._equipment = dict() if self._equipment is None else self._equipment self._equipment[equipment.mrid] = equipment return self def remove_equipment(self, equipment: Equipment) -> EquipmentContainer: """ Disassociate `equipment` from this `EquipmentContainer` `equipment` The `zepben.evolve.iec61970.base.core.equipment.Equipment` to disassociate with this `EquipmentContainer`. Returns A reference to this `EquipmentContainer` to allow fluent use. Raises `KeyError` if `equipment` was not associated with this `EquipmentContainer`. """ self._equipment = safe_remove_by_id(self._equipment, equipment) return self def clear_equipment(self) -> EquipmentContainer: """ Clear all equipment. Returns A reference to this `EquipmentContainer` to allow fluent use. """ self._equipment = None return self def current_feeders(self) -> Generator[Feeder, None, None]: """ Convenience function to find all of the current feeders of the equipment associated with this equipment container. Returns the current feeders for all associated feeders """ seen = set() for equip in self._equipment.values(): for f in equip.current_feeders: if f not in seen: seen.add(f.mrid) yield f def normal_feeders(self) -> Generator[Feeder, None, None]: """ Convenience function to find all of the normal feeders of the equipment associated with this equipment container. Returns the normal feeders for all associated feeders """ seen = set() for equip in self._equipment.values(): for f in equip.normal_feeders: if f not in seen: seen.add(f.mrid) yield f class Feeder(EquipmentContainer): """ A collection of equipment for organizational purposes, used for grouping distribution resources. The organization of a feeder does not necessarily reflect connectivity or current operation state. """ _normal_head_terminal: Optional[Terminal] = None """The normal head terminal or terminals of the feeder.""" normal_energizing_substation: Optional[Substation] = None """The substation that nominally energizes the feeder. Also used for naming purposes.""" _current_equipment: Optional[Dict[str, Equipment]] = None def __init__(self, normal_head_terminal: Terminal = None, current_equipment: List[Equipment] = None, **kwargs): super(Feeder, self).__init__(**kwargs) if normal_head_terminal: self.normal_head_terminal = normal_head_terminal if current_equipment: for eq in current_equipment: self.add_current_equipment(eq) @property def normal_head_terminal(self) -> Optional[Terminal]: """The normal head terminal or terminals of the feeder.""" return self._normal_head_terminal @normal_head_terminal.setter def normal_head_terminal(self, term: Optional[Terminal]): if self._normal_head_terminal is None or self._normal_head_terminal is term: self._normal_head_terminal = term else: raise ValueError(f"normal_head_terminal for {str(self)} has already been set to {self._normal_head_terminal}, cannot reset this field to {term}") @property def current_equipment(self) -> Generator[Equipment, None, None]: """ Contained `zepben.evolve.iec61970.base.core.equipment.Equipment` using the current state of the network. """ return ngen(self._current_equipment.values() if self._current_equipment is not None else None) def num_current_equipment(self): """ Returns The number of `zepben.evolve.iec61970.base.core.equipment.Equipment` associated with this `Feeder` """ return nlen(self._current_equipment) def get_current_equipment(self, mrid: str) -> Equipment: """ Get the `zepben.evolve.iec61970.base.core.equipment.Equipment` for this `Feeder` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.iec61970.base.core.equipment.Equipment` Returns The `zepben.evolve.iec61970.base.core.equipment.Equipment` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ if not self._current_equipment: raise KeyError(mrid) try: return self._current_equipment[mrid] except AttributeError: raise KeyError(mrid) def add_current_equipment(self, equipment: Equipment) -> Feeder: """ Associate `equipment` with this `Feeder`. `equipment` the `zepben.evolve.iec61970.base.core.equipment.Equipment` to associate with this `Feeder`. Returns A reference to this `Feeder` to allow fluent use. Raises `ValueError` if another `Equipment` with the same `mrid` already exists for this `Feeder`. """ if self._validate_reference(equipment, self.get_current_equipment, "An Equipment"): return self self._current_equipment = dict() if self._current_equipment is None else self._current_equipment self._current_equipment[equipment.mrid] = equipment return self def remove_current_equipment(self, equipment: Equipment) -> Feeder: """ Disassociate `equipment` from this `Feeder` `equipment` The `equipment.Equipment` to disassociate from this `Feeder`. Returns A reference to this `Feeder` to allow fluent use. Raises `KeyError` if `equipment` was not associated with this `Feeder`. """ self._current_equipment = safe_remove_by_id(self._current_equipment, equipment) return self def clear_current_equipment(self) -> Feeder: """ Clear all equipment. Returns A reference to this `Feeder` to allow fluent use. """ self._current_equipment = None return self class Site(EquipmentContainer): """ A collection of equipment for organizational purposes, used for grouping distribution resources located at a site. Note this is not a CIM concept - however represents an `EquipmentContainer` in CIM. This is to avoid the use of `EquipmentContainer` as a concrete class. """ pass
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/core/equipment_container.py
equipment_container.py
from __future__ import annotations from typing import Optional, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import RemoteSource from zepben.evolve.model.cim.iec61970.base.core.phase_code import PhaseCode from zepben.evolve.model.cim.iec61970.base.core.identified_object import IdentifiedObject from zepben.evolve.model.cim.iec61970.base.domain.unit_symbol import UnitSymbol __all__ = ["Measurement", "Accumulator", "Analog", "Discrete"] class Measurement(IdentifiedObject): """ A Measurement represents any measured, calculated or non-measured non-calculated quantity. Any piece of equipment may contain Measurements, e.g. a substation may have temperature measurements and door open indications, a transformer may have oil temperature and tank pressure measurements, a bay may contain a number of power flow measurements and a Breaker may contain a switch status measurement. The PSR - Measurement association is intended to capture this use of Measurement and is included in the naming hierarchy based on EquipmentContainer. The naming hierarchy typically has Measurements as leafs, e.g. Substation-VoltageLevel-Bay-Switch-Measurement. Some Measurements represent quantities related to a particular sensor location in the network, e.g. a voltage transformer (PT) at a busbar or a current transformer (CT) at the bar between a breaker and an isolator. The sensing position is not captured in the PSR - Measurement association. Instead it is captured by the Measurement - Terminal association that is used to define the sensing location in the network topology. The location is defined by the connection of the Terminal to ConductingEquipment. If both a Terminal and PSR are associated, and the PSR is of type ConductingEquipment, the associated Terminal should belong to that ConductingEquipment instance. When the sensor location is needed both Measurement-PSR and Measurement-Terminal are used. The Measurement-Terminal association is never used alone. """ power_system_resource_mrid: Optional[str] = None """The MRID of the power system resource that contains the measurement.""" remote_source: Optional[RemoteSource] = None """The `zepben.evolve.cim.iec61970.base.scada.remote_source.RemoteSource` taking the `Measurement`""" terminal_mrid: Optional[str] = None """A measurement may be associated with a terminal in the network.""" phases: PhaseCode = PhaseCode.ABC """Indicates to which phases the measurement applies and avoids the need to use 'measurementType' to also encode phase information (which would explode the types). The phase information in Measurement, along with 'measurementType' and 'phases' uniquely defines a Measurement for a device, based on normal network phase. Their meaning will not change when the computed energizing phasing is changed due to jumpers or other reasons. If the attribute is missing three phases (ABC) shall be assumed.""" unit_symbol: UnitSymbol = UnitSymbol.NONE """Specifies the type of measurement. For example, this specifies if the measurement represents an indoor temperature, outdoor temperature, bus voltage, line flow, etc. When the measurementType is set to "Specialization", the type of Measurement is defined in more detail by the specialized class which inherits from Measurement.""" class Accumulator(Measurement): """Accumulator represents an accumulated (counted) Measurement, e.g. an energy value.""" pass class Analog(Measurement): """Analog represents an analog Measurement.""" positive_flow_in: bool = False """If true then this measurement is an active power, reactive power or current with the convention that a positive value measured at the Terminal means power is flowing into the related PowerSystemResource.""" class Discrete(Measurement): """Discrete represents a discrete Measurement, i.e. a Measurement representing discrete values, e.g. a Breaker position.""" pass
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/meas/measurement.py
measurement.py
from __future__ import annotations from typing import Optional from zepben.evolve.model.cim.iec61970.base.equivalents.equivalent_equipment import EquivalentEquipment __all__ = ["EquivalentBranch"] class EquivalentBranch(EquivalentEquipment): """ The class represents equivalent branches. In cases where a transformer phase shift is modelled and the EquivalentBranch is spanning the same nodes, the impedance quantities for the EquivalentBranch shall consider the needed phase shift. """ negative_r12: Optional[float] = None """" Negative sequence series resistance from terminal sequence 1 to terminal sequence 2. Used for short circuit data exchange according to IEC 60909. EquivalentBranch is a result of network reduction prior to the data exchange. """ negative_r21: Optional[float] = None """" Negative sequence series resistance from terminal sequence 2 to terminal sequence 1. Used for short circuit data exchange according to IEC 60909. EquivalentBranch is a result of network reduction prior to the data exchange. """ negative_x12: Optional[float] = None """" Negative sequence series reactance from terminal sequence 1 to terminal sequence 2. Used for short circuit data exchange according to IEC 60909. Usage : EquivalentBranch is a result of network reduction prior to the data exchange. """ negative_x21: Optional[float] = None """" Negative sequence series reactance from terminal sequence 2 to terminal sequence 1. Used for short circuit data exchange according to IEC 60909. Usage: EquivalentBranch is a result of network reduction prior to the data exchange. """ positive_r12: Optional[float] = None """" Positive sequence series resistance from terminal sequence 1 to terminal sequence 2 . Used for short circuit data exchange according to IEC 60909. EquivalentBranch is a result of network reduction prior to the data exchange. """ positive_r21: Optional[float] = None """" Positive sequence series resistance from terminal sequence 2 to terminal sequence 1. Used for short circuit data exchange according to IEC 60909. EquivalentBranch is a result of network reduction prior to the data exchange. """ positive_x12: Optional[float] = None """" Positive sequence series reactance from terminal sequence 1 to terminal sequence 2. Used for short circuit data exchange according to IEC 60909. Usage : EquivalentBranch is a result of network reduction prior to the data exchange. """ positive_x21: Optional[float] = None """" Positive sequence series reactance from terminal sequence 2 to terminal sequence 1. Used for short circuit data exchange according to IEC 60909. Usage : EquivalentBranch is a result of network reduction prior to the data exchange. """ r: Optional[float] = None """" Positive sequence series resistance of the reduced branch. """ r21: Optional[float] = None """" Resistance from terminal sequence 2 to terminal sequence 1 .Used for steady state power flow. This attribute is optional and represent unbalanced network such as off-nominal phase shifter. If only EquivalentBranch.r is given, then EquivalentBranch.r21 is assumed equal to EquivalentBranch.r. Usage rule : EquivalentBranch is a result of network reduction prior to the data exchange. """ x: Optional[float] = None """" Positive sequence series reactance of the reduced branch. """ x21: Optional[float] = None """" Reactance from terminal sequence 2 to terminal sequence 1. Used for steady state power flow. This attribute is optional and represents an unbalanced network such as off-nominal phase shifter. If only EquivalentBranch.x is given, then EquivalentBranch.x21 is assumed equal to EquivalentBranch.x. Usage rule: EquivalentBranch is a result of network reduction prior to the data exchange. """ zero_r12: Optional[float] = None """" Zero sequence series resistance from terminal sequence 1 to terminal sequence 2. Used for short circuit data exchange according to IEC 60909. EquivalentBranch is a result of network reduction prior to the data exchange. """ zero_r21: Optional[float] = None """ Zero sequence series resistance from terminal sequence 2 to terminal sequence 1. Used for short circuit data exchange according to IEC 60909. Usage : EquivalentBranch is a result of network reduction prior to the data exchange. """ zero_x12: Optional[float] = None """ Zero sequence series reactance from terminal sequence 1 to terminal sequence 2. Used for short circuit data exchange according to IEC 60909. Usage : EquivalentBranch is a result of network reduction prior to the data exchange. """ zero_x21: Optional[float] = None """ Zero sequence series reactance from terminal sequence 2 to terminal sequence 1. Used for short circuit data exchange according to IEC 60909. Usage : EquivalentBranch is a result of network reduction prior to the data exchange. """
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/base/equivalents/equivalent_branch.py
equivalent_branch.py
from __future__ import annotations from typing import Optional, Generator, List, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import Substation, Terminal, Loop from zepben.evolve.model.cim.iec61970.base.wires.line import Line from zepben.evolve.util import ngen, get_by_mrid, safe_remove, nlen __all__ = ["Circuit"] class Circuit(Line): """Missing description""" loop: Optional[Loop] = None _end_terminals: Optional[List[Terminal]] = None _end_substations: Optional[List[Substation]] = None def __init__(self, end_terminals: List[Terminal] = None, end_substations: List[Substation] = None, **kwargs): super(Circuit, self).__init__(**kwargs) if end_terminals: for term in end_terminals: self.add_end_terminal(term) if end_substations: for sub in end_substations: self.add_end_substation(sub) @property def end_terminals(self) -> Generator[Terminal, None, None]: """ The `Terminal`s representing the ends for this `Circuit`. """ return ngen(self._end_terminals) @property def end_substations(self) -> Generator[Substation, None, None]: """ The `Substations`s representing the ends for this `Circuit`. """ return ngen(self._end_substations) def num_end_terminals(self): """Return the number of end `Terminal`s associated with this `Circuit`""" return nlen(self._end_terminals) def get_end_terminal(self, mrid: str) -> Terminal: """ Get the `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` for this `Circuit` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` Returns The `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._end_terminals, mrid) def add_end_terminal(self, terminal: Terminal) -> Circuit: """ Associate an `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` with this `Circuit` `terminal` the `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` to associate with this `Circuit`. Returns A reference to this `Circuit` to allow fluent use. Raises `ValueError` if another `Terminal` with the same `mrid` already exists for this `Circuit`. """ if self._validate_reference(terminal, self.get_end_terminal, "An Terminal"): return self self._end_terminals = list() if self._end_terminals is None else self._end_terminals self._end_terminals.append(terminal) return self def remove_end_terminal(self, terminal: Terminal) -> Circuit: """ Disassociate `terminal` from this `Circuit` `terminal` the `zepben.evolve.cim.iec61970.base.core.terminal.Terminal` to disassociate from this `Circuit`. Returns A reference to this `Circuit` to allow fluent use. Raises `ValueError` if `terminal` was not associated with this `Circuit`. """ self._end_terminals = safe_remove(self._end_terminals, terminal) return self def clear_end_terminals(self) -> Circuit: """ Clear all end terminals. Returns A reference to this `Circuit` to allow fluent use. """ self._end_terminals = None return self def num_end_substations(self): """Return the number of end `Substation`s associated with this `Circuit`""" return nlen(self._end_substations) def get_end_substation(self, mrid: str) -> Substation: """ Get the `zepben.evolve.cim.iec61970.base.core.substation.Substation` for this `Circuit` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.cim.iec61970.base.core.substation.Substation` Returns The `zepben.evolve.cim.iec61970.base.core.substation.Substation` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._end_substations, mrid) def add_end_substation(self, substation: Substation) -> Circuit: """ Associate an `zepben.evolve.cim.iec61970.base.core.substation.Substation` with this `Circuit` `substation` the `zepben.evolve.cim.iec61970.base.core.substation.Substation` to associate with this `Circuit`. Returns A reference to this `Circuit` to allow fluent use. Raises `ValueError` if another `Substation` with the same `mrid` already exists for this `Circuit`. """ if self._validate_reference(substation, self.get_end_substation, "An Substation"): return self self._end_substations = list() if self._end_substations is None else self._end_substations self._end_substations.append(substation) return self def remove_end_substation(self, substation: Substation) -> Circuit: """ Disassociate `substation` from this `Circuit` `substation` the `zepben.evolve.cim.iec61970.base.core.substation.Substation` to disassociate from this `Circuit`. Returns A reference to this `Circuit` to allow fluent use. Raises `ValueError` if `substation` was not associated with this `Circuit`. """ self._end_substations = safe_remove(self._end_substations, substation) return self def clear_end_substations(self) -> Circuit: """ Clear all end substations. Returns A reference to this `Circuit` to allow fluent use. """ self._end_substations = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/infiec61970/feeder/circuit.py
circuit.py
from __future__ import annotations from typing import Optional, List, Generator, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import Circuit, Substation __all__ = ["Loop"] from zepben.evolve.model.cim.iec61970.base.core.identified_object import IdentifiedObject from zepben.evolve.util import safe_remove, ngen, nlen, get_by_mrid class Loop(IdentifiedObject): """Missing description""" _circuits: Optional[List[Circuit]] = None _substations: Optional[List[Substation]] = None _energizing_substations: Optional[List[Substation]] = None def __init__(self, circuits: List[Circuit] = None, substations: List[Substation] = None, energizing_substations: List[Substation] = None, **kwargs): super(Loop, self).__init__(**kwargs) if circuits: for term in circuits: self.add_circuit(term) if substations: for sub in substations: self.add_substation(sub) if energizing_substations: for sub in energizing_substations: self.add_energizing_substation(sub) @property def circuits(self) -> Generator[Circuit, None, None]: """ Sub-transmission `zepben.evolve.cim.infiec61970.base.core.circuit.Circuit`s that form part of this loop. """ return ngen(self._circuits) @property def substations(self) -> Generator[Substation, None, None]: """ The `zepben.evolve.cim.iec61970.base.core.substation.Substation`s that are powered by this `Loop`. """ return ngen(self._substations) @property def energizing_substations(self) -> Generator[Substation, None, None]: """ The `zepben.evolve.cim.iec61970.base.core.substation.Substation`s that normally energize this `Loop`. """ return ngen(self._energizing_substations) def num_circuits(self): """Return the number of end `zepben.evolve.cim.infiec61970.base.core.circuit.Circuit`s associated with this `Loop`""" return nlen(self._circuits) def get_circuit(self, mrid: str) -> Circuit: """ Get the `zepben.evolve.cim.infiec61970.base.core.circuit.Circuit` for this `Loop` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.cim.infiec61970.base.core.circuit.Circuit` Returns The `zepben.evolve.cim.infiec61970.base.core.circuit.Circuit` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._circuits, mrid) def add_circuit(self, circuit: Circuit) -> Loop: """ Associate an `zepben.evolve.cim.infiec61970.base.core.circuit.Circuit` with this `Loop` `circuit` the `zepben.evolve.cim.infiec61970.base.core.circuit.Circuit` to associate with this `Loop`. Returns A reference to this `Loop` to allow fluent use. Raises `ValueError` if another `Circuit` with the same `mrid` already exists for this `Loop`. """ if self._validate_reference(circuit, self.get_circuit, "An Circuit"): return self self._circuits = list() if self._circuits is None else self._circuits self._circuits.append(circuit) return self def remove_circuit(self, circuit: Circuit) -> Loop: """ Disassociate `circuit` from this `Loop` `circuit` the `zepben.evolve.cim.infiec61970.base.core.circuit.Circuit` to disassociate from this `Loop`. Returns A reference to this `Loop` to allow fluent use. Raises `ValueError` if `circuit` was not associated with this `Loop`. """ self._circuits = safe_remove(self._circuits, circuit) return self def clear_circuits(self) -> Loop: """ Clear all end circuits. Returns A reference to this `Loop` to allow fluent use. """ self._circuits = None return self def num_substations(self): """Return the number of end `zepben.evolve.cim.iec61970.base.core.substation.Substation`s associated with this `Loop`""" return nlen(self._substations) def get_substation(self, mrid: str) -> Substation: """ Get the `zepben.evolve.cim.iec61970.base.core.substation.Substation` for this `Loop` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.cim.iec61970.base.core.substation.Substation` Returns The `zepben.evolve.cim.iec61970.base.core.substation.Substation` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._substations, mrid) def add_substation(self, substation: Substation) -> Loop: """ Associate an `zepben.evolve.cim.iec61970.base.core.substation.Substation` with this `Loop` `substation` the `zepben.evolve.cim.iec61970.base.core.substation.Substation` to associate with this `Loop`. Returns A reference to this `Loop` to allow fluent use. Raises `ValueError` if another `Substation` with the same `mrid` already exists for this `Loop`. """ if self._validate_reference(substation, self.get_substation, "An Substation"): return self self._substations = list() if self._substations is None else self._substations self._substations.append(substation) return self def remove_substation(self, substation: Substation) -> Loop: """ Disassociate `substation` from this `Loop` `substation` the `zepben.evolve.cim.iec61970.base.core.substation.Substation` to disassociate from this `Loop`. Returns A reference to this `Loop` to allow fluent use. Raises `ValueError` if `substation` was not associated with this `Loop`. """ self._substations = safe_remove(self._substations, substation) return self def clear_substations(self) -> Loop: """ Clear all end substations. Returns A reference to this `Loop` to allow fluent use. """ self._substations = None return self def num_energizing_substations(self): """Return the number of end `zepben.evolve.cim.iec61970.base.core.substation.Substation`s associated with this `Loop`""" return nlen(self._energizing_substations) def get_energizing_substation(self, mrid: str) -> Substation: """ Get the `zepben.evolve.cim.iec61970.base.core.substation.Substation` for this `Loop` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.cim.iec61970.base.core.substation.Substation` Returns The `zepben.evolve.cim.iec61970.base.core.substation.Substation` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._energizing_substations, mrid) def add_energizing_substation(self, substation: Substation) -> Loop: """ Associate an `zepben.evolve.cim.iec61970.base.core.substation.Substation` with this `Loop` `substation` the `zepben.evolve.cim.iec61970.base.core.substation.Substation` to associate with this `Loop`. Returns A reference to this `Loop` to allow fluent use. Raises `ValueError` if another `Substation` with the same `mrid` already exists for this `Loop`. """ if self._validate_reference(substation, self.get_energizing_substation, "An Substation"): return self self._energizing_substations = list() if self._energizing_substations is None else self._energizing_substations self._energizing_substations.append(substation) return self def remove_energizing_substation(self, substation: Substation) -> Loop: """ Disassociate `substation` from this `Loop` `substation` the `zepben.evolve.cim.iec61970.base.core.substation.Substation` to disassociate from this `Loop`. Returns A reference to this `Loop` to allow fluent use. Raises `ValueError` if `substation` was not associated with this `Loop`. """ self._energizing_substations = safe_remove(self._energizing_substations, substation) return self def clear_energizing_substations(self) -> Loop: """ Clear all end energizing_substations. Returns A reference to this `Loop` to allow fluent use. """ self._energizing_substations = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61970/infiec61970/feeder/loop.py
loop.py
from __future__ import annotations from typing import Optional, Generator, List, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import CustomerAgreement from zepben.evolve.model.cim.iec61968.common.organisation_role import OrganisationRole from zepben.evolve.model.cim.iec61968.customers.customer_kind import CustomerKind from zepben.evolve.util import nlen, get_by_mrid, ngen, safe_remove __all__ = ["Customer"] class Customer(OrganisationRole): """ Organisation receiving services from service supplier. """ kind: CustomerKind = CustomerKind.UNKNOWN """Kind of customer""" _customer_agreements: Optional[List[CustomerAgreement]] = None def __init__(self, customer_agreements: List[CustomerAgreement] = None, **kwargs): super(Customer, self).__init__(**kwargs) if customer_agreements: for agreement in customer_agreements: self.add_agreement(agreement) def num_agreements(self) -> int: """ Get the number of `zepben.evolve.iec61968.customers.customer_agreement.CustomerAgreement`s associated with this `Customer`. """ return nlen(self._customer_agreements) @property def agreements(self) -> Generator[CustomerAgreement, None, None]: """ The `zepben.evolve.cim.iec61968.customers.customer_agreement.CustomerAgreement`s for this `Customer`. """ return ngen(self._customer_agreements) def get_agreement(self, mrid: str) -> CustomerAgreement: """ Get the `zepben.evolve.cim.iec61968.customers.customer_agreement.CustomerAgreement` for this `Customer` identified by `mrid`. `mrid` the mRID of the required `customer_agreement.CustomerAgreement` Returns the `zepben.evolve.cim.iec61968.customers.customer_agreement.CustomerAgreement` with the specified `mrid`. Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._customer_agreements, mrid) def add_agreement(self, customer_agreement: CustomerAgreement) -> Customer: """ Associate a `CustomerAgreement` with this `Customer`. `customer_agreement` The `customer_agreement.CustomerAgreement` to associate with this `Customer`. Returns A reference to this `Customer` to allow fluent use. Raises `ValueError` if another `CustomerAgreement` with the same `mrid` already exists for this `Customer` """ if self._validate_reference(customer_agreement, self.get_agreement, "A CustomerAgreement"): return self self._customer_agreements = list() if self._customer_agreements is None else self._customer_agreements self._customer_agreements.append(customer_agreement) return self def remove_agreement(self, customer_agreement: CustomerAgreement) -> Customer: """ Disassociate `customer_agreement` from this `Customer`. `customer_agreement` the `customer_agreement.CustomerAgreement` to disassociate with this `Customer`. Returns A reference to this `Customer` to allow fluent use. Raises `ValueError` if `customer_agreement` was not associated with this `Customer`. """ self._customer_agreements = safe_remove(self._customer_agreements, customer_agreement) return self def clear_agreements(self) -> Customer: """ Clear all customer agreements. Returns self """ self._customer_agreements = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61968/customers/customer.py
customer.py
from __future__ import annotations from typing import Optional, Generator, List, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import PricingStructure, Customer from zepben.evolve.model.cim.iec61968.common.document import Agreement from zepben.evolve.util import nlen, get_by_mrid, ngen, safe_remove __all__ = ["CustomerAgreement"] class CustomerAgreement(Agreement): """ Agreement between the customer and the service supplier to pay for service at a specific service location. It records certain billing information about the type of service provided at the service location and is used during charge creation to determine the type of service. """ _customer: Optional[Customer] = None """The `zepben.evolve.cim.iec61968.customers.customer.Customer` that has this `CustomerAgreement`.""" _pricing_structures: Optional[List[PricingStructure]] = None def __init__(self, customer: Customer = None, pricing_structures: List[PricingStructure] = None, **kwargs): super(CustomerAgreement, self).__init__(**kwargs) if customer: self.customer = customer if pricing_structures: for ps in pricing_structures: self.add_pricing_structure(ps) @property def customer(self): """The `zepben.evolve.cim.iec61968.customers.customer.Customer` that has this `CustomerAgreement`.""" return self._customer @customer.setter def customer(self, cust): if self._customer is None or self._customer is cust: self._customer = cust else: raise ValueError(f"customer for {str(self)} has already been set to {self._customer}, cannot reset this field to {cust}") def num_pricing_structures(self): """ The number of `zepben.evolve.cim.iec61968.customers.pricing_structure.PricingStructure`s associated with this `CustomerAgreement` """ return nlen(self._pricing_structures) @property def pricing_structures(self) -> Generator[PricingStructure, None, None]: """ The `zepben.evolve.cim.iec61968.customers.pricing_structure.PricingStructure`s of this `CustomerAgreement`. """ return ngen(self._pricing_structures) def get_pricing_structure(self, mrid: str) -> PricingStructure: """ Get the `zepben.evolve.cim.iec61968.customers.pricing_structure.PricingStructure` for this `CustomerAgreement` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.cim.iec61968.customers.pricing_structure.PricingStructure` Returns the `zepben.evolve.cim.iec61968.customers.pricing_structure.PricingStructure` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._pricing_structures, mrid) def add_pricing_structure(self, ps: PricingStructure) -> CustomerAgreement: """ Associate `ps` with this `CustomerAgreement` `ps` the `zepben.evolve.cim.iec61968.customers.pricing_structure.PricingStructure` to associate with this `CustomerAgreement`. Returns A reference to this `CustomerAgreement` to allow fluent use. Raises `ValueError` if another `PricingStructure` with the same `mrid` already exists for this `CustomerAgreement` """ if self._validate_reference(ps, self.get_pricing_structure, "A PricingStructure"): return self self._pricing_structures = list() if self._pricing_structures is None else self._pricing_structures self._pricing_structures.append(ps) return self def remove_pricing_structure(self, ps: PricingStructure) -> CustomerAgreement: """ Disassociate `ps` from this `CustomerAgreement` `ps` the `zepben.evolve.cim.iec61968.customers.pricing_structure.PricingStructure` to disassociate from this `CustomerAgreement`. Returns A reference to this `CustomerAgreement` to allow fluent use. Raises `ValueError` if `ps` was not associated with this `CustomerAgreement`. """ self._pricing_structures = safe_remove(self._pricing_structures, ps) return self def clear_pricing_structures(self) -> CustomerAgreement: """ Clear all pricing structures. Returns a reference to this `CustomerAgreement` to allow fluent use. """ self._pricing_structures = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61968/customers/customer_agreement.py
customer_agreement.py
from __future__ import annotations from typing import Optional, Generator, List, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import Tariff from zepben.evolve.model.cim.iec61968.common.document import Document from zepben.evolve.util import get_by_mrid, nlen, ngen, safe_remove __all__ = ["PricingStructure"] class PricingStructure(Document): """ Grouping of pricing components and prices used in the creation of customer charges and the eligibility criteria under which these terms may be offered to a customer. The reasons for grouping include state, customer classification, site characteristics, classification (i.e. fee price structure, deposit price structure, electric service price structure, etc.) and accounting requirements. """ _tariffs: Optional[List[Tariff]] = None def __init__(self, tariffs: List[Tariff] = None, **kwargs): super(PricingStructure, self).__init__(**kwargs) if tariffs: for tariff in tariffs: self.add_tariff(tariff) def num_tariffs(self): """ Returns The number of `zepben.evolve.cim.iec61968.customers.tariff.Tariff`s associated with this `PricingStructure` """ return nlen(self._tariffs) @property def tariffs(self) -> Generator[Tariff, None, None]: """ The `zepben.evolve.cim.iec61968.customers.tariff.Tariff`s of this `PricingStructure`. """ return ngen(self._tariffs) def get_tariff(self, mrid: str) -> Tariff: """ Get the `zepben.evolve.cim.iec61968.customers.tariff.Tariff` for this `PricingStructure` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.cim.iec61968.customers.tariff.Tariff` Returns The `zepben.evolve.cim.iec61968.customers.tariff.Tariff` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._tariffs, mrid) def add_tariff(self, tariff: Tariff) -> PricingStructure: """ Associate a `zepben.evolve.cim.iec61968.customers.tariff.Tariff` with this `PricingStructure`. `tariff` the `zepben.evolve.cim.iec61968.customers.tariff.Tariff` to associate with this `PricingStructure`. Returns A reference to this `PricingStructure` to allow fluent use. Raises `ValueError` if another `Tariff` with the same `mrid` already exists for this `PricingStructure`. """ if self._validate_reference(tariff, self.get_tariff, "A Tariff"): return self self._tariffs = list() if self._tariffs is None else self._tariffs self._tariffs.append(tariff) return self def remove_tariff(self, tariff: Tariff) -> PricingStructure: """ Disassociate `tariff` from this `PricingStructure`. `tariff` the `zepben.evolve.cim.iec61968.customers.tariff.Tariff` to disassociate from this `PricingStructure`. Returns A reference to this `PricingStructure` to allow fluent use. Raises `ValueError` if `tariff` was not associated with this `PricingStructure`. """ self._tariffs = safe_remove(self._tariffs, tariff) return self def clear_tariffs(self) -> PricingStructure: """ Clear all tariffs. Returns A reference to this `PricingStructure` to allow fluent use. """ self._tariffs = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61968/customers/pricing_structure.py
pricing_structure.py
from __future__ import annotations from typing import Optional, Generator, List, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import AssetOrganisationRole from zepben.evolve.model.cim.iec61968.common.location import Location from zepben.evolve.model.cim.iec61970.base.core.identified_object import IdentifiedObject from zepben.evolve.util import get_by_mrid, nlen, ngen, safe_remove __all__ = ["Asset", "AssetContainer"] class Asset(IdentifiedObject): """ Tangible resource of the utility, including power system equipment, various end devices, cabinets, buildings, etc. For electrical network equipment, the role of the asset is defined through PowerSystemResource and its subclasses, defined mainly in the Wires model (refer to IEC61970-301 and model package IEC61970::Wires). Asset description places emphasis on the physical characteristics of the equipment fulfilling that role. """ location: Optional[Location] = None """`zepben.evolve.cim.iec61968.common.location.Location` of this asset""" _organisation_roles: Optional[List[AssetOrganisationRole]] = None def __init__(self, organisation_roles: List[AssetOrganisationRole] = None, **kwargs): super(Asset, self).__init__(**kwargs) if organisation_roles: for role in organisation_roles: self.add_organisation_role(role) def num_organisation_roles(self) -> int: """ Get the number of `zepben.evolve.cim.iec61968.assets.asset_organisation_role.AssetOrganisationRole`s associated with this `Asset`. """ return nlen(self._organisation_roles) @property def organisation_roles(self) -> Generator[AssetOrganisationRole, None, None]: """ The `zepben.evolve.cim.iec61968.assets.asset_organisation_role.AssetOrganisationRole`s of this `Asset`. """ return ngen(self._organisation_roles) def get_organisation_role(self, mrid: str) -> AssetOrganisationRole: """ Get the `AssetOrganisationRole` for this asset identified by `mrid`. `mrid` the mRID of the required `zepben.evolve.cim.iec61968.assets.asset_organisation_role.AssetOrganisationRole` Returns The `zepben.evolve.cim.iec61968.assets.asset_organisation_role.AssetOrganisationRole` with the specified `mrid`. Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._organisation_roles, mrid) def add_organisation_role(self, role: AssetOrganisationRole) -> Asset: """ `role` The `zepben.evolve.cim.iec61968.assets.asset_organisation_role.AssetOrganisationRole` to associate with this `Asset`. Returns A reference to this `Asset` to allow fluent use. Raises `ValueError` if another `AssetOrganisationRole` with the same `mrid` already exists in this `Asset` """ if self._validate_reference(role, self.get_organisation_role, "An AssetOrganisationRole"): return self self._organisation_roles = list() if self._organisation_roles is None else self._organisation_roles self._organisation_roles.append(role) return self def remove_organisation_role(self, role: AssetOrganisationRole) -> Asset: """ Disassociate an `AssetOrganisationRole` from this `Asset`. `role` the `zepben.evolve.cim.iec61968.assets.asset_organisation_role.AssetOrganisationRole` to disassociate with this `Asset`. Raises `ValueError` if `role` was not associated with this `Asset`. Returns A reference to this `Asset` to allow fluent use. """ self._organisation_roles = safe_remove(self._organisation_roles, role) return self def clear_organisation_roles(self) -> Asset: """ Clear all organisation roles. Returns self """ self._organisation_roles = None return self class AssetContainer(Asset): """ Asset that is aggregation of other assets such as conductors, transformers, switchgear, land, fences, buildings, equipment, vehicles, etc. """ pass
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61968/assets/asset.py
asset.py
from __future__ import annotations from typing import List, Optional, Generator, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import Streetlight from zepben.evolve.model.cim.iec61968.assets.structure import Structure from zepben.evolve.util import get_by_mrid, ngen, nlen, safe_remove __all__ = ["Pole"] class Pole(Structure): """A Pole Asset""" classification: str = "" """Pole class: 1, 2, 3, 4, 5, 6, 7, H1, H2, Other, Unknown.""" _streetlights: Optional[List[Streetlight]] = None def __init__(self, streetlights: List[Streetlight] = None, **kwargs): super(Pole, self).__init__(**kwargs) if streetlights: for light in streetlights: self.add_streetlight(light) def num_streetlights(self) -> int: """ Get the number of `zepben.evolve.cim.iec61968.assets.streetlight.Streetlight`s associated with this `Pole`. """ return nlen(self._streetlights) @property def streetlights(self) -> Generator[Streetlight, None, None]: """ The `zepben.evolve.cim.iec61968.assets.streetlight.Streetlight`s of this `Pole`. """ return ngen(self._streetlights) def get_streetlight(self, mrid: str) -> Streetlight: """ Get the `zepben.evolve.cim.iec61968.assets.streetlight.Streetlight` for this asset identified by `mrid`. `mrid` the mRID of the required `zepben.evolve.cim.iec61968.assets.streetlight.Streetlight` Returns The `zepben.evolve.cim.iec61968.assets.streetlight.Streetlight` with the specified `mrid`. Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._streetlights, mrid) def add_streetlight(self, streetlight: Streetlight) -> Pole: """ Associate a `zepben.evolve.cim.iec61968.assets.streetlight.Streetlight` with this `Pole` `streetlight` the `zepben.evolve.cim.iec61968.assets.streetlight.Streetlight` to associate with this `Pole`. Returns A reference to this `Pole` to allow fluent use. Raises `ValueError` if another `Streetlight` with the same `mrid` already exists in this `Pole` """ if self._validate_reference(streetlight, self.get_streetlight, "A Streetlight"): return self self._streetlights = list() if self._streetlights is None else self._streetlights self._streetlights.append(streetlight) return self def remove_streetlight(self, streetlight: Streetlight) -> Pole: """ Disassociate `streetlight` from this `Pole` `streetlight` the `zepben.evolve.cim.iec61968.assets.streetlight.Streetlight` to disassociate from this `Pole`. Raises `ValueError` if `streetlight` was not associated with this `Pole`. Returns A reference to this `Pole` to allow fluent use. """ self._streetlights = safe_remove(self._streetlights, streetlight) return self def clear_streetlights(self) -> Pole: """ Clear all Streetlights. Returns self """ self._streetlights = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61968/assets/pole.py
pole.py
from __future__ import annotations from typing import Optional, Generator, List, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import Equipment from zepben.evolve.model.cim.iec61968.common.document import Document from zepben.evolve.util import get_by_mrid, nlen, ngen, safe_remove __all__ = ["OperationalRestriction"] class OperationalRestriction(Document): """ A document that can be associated with equipment to describe any sort of restrictions compared with the original manufacturer's specification or with the usual operational practice e.g. temporary maximum loadings, maximum switching current, do not operate if bus couplers are open, etc. In the UK, for example, if a breaker or switch ever mal-operates, this is reported centrally and utilities use their asset systems to identify all the installed devices of the same manufacturer's type. They then apply operational restrictions in the operational systems to warn operators of potential problems. After appropriate inspection and maintenance, the operational restrictions may be removed. """ _equipment: Optional[List[Equipment]] = None def __init__(self, equipment: List[Equipment] = None, **kwargs): super(OperationalRestriction, self).__init__(**kwargs) if equipment: for eq in equipment: self.add_equipment(eq) def num_equipment(self): """ Returns the number of `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` associated with this `OperationalRestriction` """ return nlen(self._equipment) @property def equipment(self) -> Generator[Equipment, None, None]: """ The `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` to which this `OperationalRestriction` applies. """ return ngen(self._equipment) def get_equipment(self, mrid: str) -> Equipment: """ Get the `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` for this `OperationalRestriction` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` Returns The `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._equipment, mrid) def add_equipment(self, equipment: Equipment) -> OperationalRestriction: """ Associate an `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` with this `OperationalRestriction` `equipment` The `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` to associate with this `OperationalRestriction`. Returns A reference to this `OperationalRestriction` to allow fluent use. Raises `ValueError` if another `Equipment` with the same `mrid` already exists for this `OperationalRestriction`. """ if self._validate_reference(equipment, self.get_equipment, "An Equipment"): return self self._equipment = list() if self._equipment is None else self._equipment self._equipment.append(equipment) return self def remove_equipment(self, equipment: Equipment) -> OperationalRestriction: """ Disassociate `equipment` from this `OperationalRestriction`. `equipment` The `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` to disassociate from this `OperationalRestriction`. Returns A reference to this `OperationalRestriction` to allow fluent use. Raises `ValueError` if `equipment` was not associated with this `OperationalRestriction`. """ self._equipment = safe_remove(self._equipment, equipment) return self def clear_equipment(self) -> OperationalRestriction: """ Clear all equipment. Returns A reference to this `OperationalRestriction` to allow fluent use. """ self._equipment = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61968/operations/operational_restriction.py
operational_restriction.py
from __future__ import annotations import logging from typing import Optional, Generator, List, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import Equipment from zepben.evolve.model.cim.iec61968.assets.asset import AssetContainer from zepben.evolve.model.cim.iec61968.common.location import Location from zepben.evolve.model.cim.iec61970.base.core.identified_object import IdentifiedObject from zepben.evolve.util import nlen, get_by_mrid, ngen, safe_remove __all__ = ["Meter", "EndDevice", "UsagePoint"] logger = logging.getLogger(__name__) class EndDevice(AssetContainer): """ Asset container that performs one or more end device functions. One type of end device is a meter which can perform metering, load management, connect/disconnect, accounting functions, etc. Some end devices, such as ones monitoring and controlling air conditioners, refrigerators, pool pumps may be connected to a meter. All end devices may have communication capability defined by the associated communication function(s). An end device may be owned by a consumer, a service provider, utility or otherwise. There may be a related end device function that identifies a sensor or control point within a metering application or communications systems (e.g., water, gas, electricity). Some devices may use an optical port that conforms to the ANSI C12.18 standard for communications. """ customer_mrid: Optional[str] = None """The `zepben.evolve.cim.iec61968.customers.customer.Customer` owning this `EndDevice`.""" service_location: Optional[Location] = None """Service `zepben.evolve.cim.iec61968.common.location.Location` whose service delivery is measured by this `EndDevice`.""" _usage_points: Optional[List[UsagePoint]] = None def __init__(self, usage_points: List[UsagePoint] = None, **kwargs): super(EndDevice, self).__init__(**kwargs) if usage_points: for up in usage_points: self.add_usage_point(up) def num_usage_points(self): """ Returns The number of `zepben.evolve.cim.iec61968.metering.metering.UsagePoint`s associated with this `EndDevice` """ return nlen(self._usage_points) @property def usage_points(self) -> Generator[UsagePoint, None, None]: """ The `zepben.evolve.cim.iec61968.metering.metering.UsagePoint`s associated with this `EndDevice` """ return ngen(self._usage_points) def get_usage_point(self, mrid: str) -> UsagePoint: """ Get the `UsagePoint` for this `EndDevice` identified by `mrid` `mrid` the mRID of the required `zepben.evolve.cim.iec61968.metering.metering.UsagePoint` Returns The `zepben.evolve.cim.iec61968.metering.metering.UsagePoint` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._usage_points, mrid) def add_usage_point(self, up: UsagePoint) -> EndDevice: """ Associate `up` to this `zepben.evolve.cim.iec61968.metering.metering.EndDevice`. `up` the `zepben.evolve.cim.iec61968.metering.metering.UsagePoint` to associate with this `EndDevice`. Returns A reference to this `EndDevice` to allow fluent use. Raises `ValueError` if another `UsagePoint` with the same `mrid` already exists for this `EndDevice`. """ if self._validate_reference(up, self.get_usage_point, "A UsagePoint"): return self self._usage_points = list() if self._usage_points is None else self._usage_points self._usage_points.append(up) return self def remove_usage_point(self, up: UsagePoint) -> EndDevice: """ Disassociate `up` from this `EndDevice` `up` the `zepben.evolve.cim.iec61968.metering.metering.UsagePoint` to disassociate from this `EndDevice`. Returns A reference to this `EndDevice` to allow fluent use. Raises `ValueError` if `up` was not associated with this `EndDevice`. """ self._usage_points = safe_remove(self._usage_points, up) return self def clear_usage_points(self) -> EndDevice: """ Clear all usage_points. Returns A reference to this `EndDevice` to allow fluent use. """ self._usage_points = None return self class UsagePoint(IdentifiedObject): """ Logical or physical point in the network to which readings or events may be attributed. Used at the place where a physical or virtual meter may be located; however, it is not required that a meter be present. """ usage_point_location: Optional[Location] = None """Service `zepben.evolve.cim.iec61968.common.location.Location` where the service delivered by this `UsagePoint` is consumed.""" is_virtual: bool = False """ If true, this usage point is virtual, i.e., no physical location exists in the network where a meter could be located to collect the meter readings. For example, one may define a virtual usage point to serve as an aggregation of usage for all of a company's premises distributed widely across the distribution territory. Otherwise, the usage point is physical, i.e., there is a logical point in the network where a meter could be located to collect meter readings. """ connection_category: Optional[str] = None """ A code used to specify the connection category, e.g., low voltage or low pressure, where the usage point is defined. """ _equipment: Optional[List[Equipment]] = None _end_devices: Optional[List[EndDevice]] = None def __init__(self, equipment: List[Equipment] = None, end_devices: List[EndDevice] = None, **kwargs): super(UsagePoint, self).__init__(**kwargs) if equipment: for eq in equipment: self.add_equipment(eq) if end_devices: for ed in end_devices: self.add_end_device(ed) def num_equipment(self): """ Returns The number of `zepben.evolve.cim.iec61970.base.core.equipment.Equipment`s associated with this `UsagePoint` """ return nlen(self._equipment) def num_end_devices(self): """ Returns The number of `zepben.evolve.cim.iec61968.metering.metering.EndDevice`s associated with this `UsagePoint` """ return nlen(self._end_devices) @property def end_devices(self) -> Generator[EndDevice, None, None]: """ The `EndDevice`'s (Meter's) associated with this `UsagePoint`. """ return ngen(self._end_devices) @property def equipment(self) -> Generator[Equipment, None, None]: """ The `zepben.model.Equipment` associated with this `UsagePoint`. """ return ngen(self._equipment) def get_equipment(self, mrid: str) -> Equipment: """ Get the `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` for this `UsagePoint` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` Returns The `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._equipment, mrid) def add_equipment(self, equipment: Equipment) -> UsagePoint: """ Associate an `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` with this `UsagePoint` `equipment` The `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` to associate with this `UsagePoint`. Returns A reference to this `UsagePoint` to allow fluent use. Raises `ValueError` if another `Equipment` with the same `mrid` already exists for this `UsagePoint`. """ if self._validate_reference(equipment, self.get_equipment, "An Equipment"): return self self._equipment = list() if self._equipment is None else self._equipment self._equipment.append(equipment) return self def remove_equipment(self, equipment: Equipment) -> UsagePoint: """ Disassociate an `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` from this `UsagePoint` `equipment` The `zepben.evolve.cim.iec61970.base.core.equipment.Equipment` to disassociate with this `UsagePoint`. Returns A reference to this `UsagePoint` to allow fluent use. Raises `ValueError` if `equipment` was not associated with this `UsagePoint`. """ self._equipment = safe_remove(self._equipment, equipment) return self def clear_equipment(self) -> UsagePoint: """ Clear all equipment. Returns A reference to this `UsagePoint` to allow fluent use. """ self._equipment = None return self def get_end_device(self, mrid: str) -> EndDevice: """ Get the `EndDevice` for this `UsagePoint` identified by `mrid` `mrid` The mRID of the required `zepben.evolve.cim.iec61968.metering.metering.EndDevice` Returns The `zepben.evolve.cim.iec61968.metering.metering.EndDevice` with the specified `mrid` if it exists Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._end_devices, mrid) def add_end_device(self, end_device: EndDevice) -> UsagePoint: """ Associate an `EndDevice` with this `UsagePoint` `end_device` The `zepben.evolve.cim.iec61968.metering.metering.EndDevice` to associate with this `UsagePoint`. Returns A reference to this `UsagePoint` to allow fluent use. Raises `ValueError` if another `EndDevice` with the same `mrid` already exists for this `UsagePoint`. """ if self._validate_reference(end_device, self.get_end_device, "An EndDevice"): return self self._end_devices = list() if self._end_devices is None else self._end_devices self._end_devices.append(end_device) return self def remove_end_device(self, end_device: EndDevice) -> UsagePoint: """ Disassociate `end_device` from this `UsagePoint`. `end_device` The `zepben.evolve.cim.iec61968.metering.metering.EndDevice` to disassociate from this `UsagePoint`. Returns A reference to this `UsagePoint` to allow fluent use. Raises `ValueError` if `end_device` was not associated with this `UsagePoint`. """ self._end_devices = safe_remove(self._end_devices, end_device) return self def clear_end_devices(self) -> UsagePoint: """ Clear all end_devices. Returns A reference to this `UsagePoint` to allow fluent use. """ self._end_devices = None return self def is_metered(self): """ Check whether this `UsagePoint` is metered. A `UsagePoint` is metered if it's associated with at least one `EndDevice`. Returns True if this `UsagePoint` has an `EndDevice`, False otherwise. """ return nlen(self._end_devices) > 0 class Meter(EndDevice): """ Physical asset that performs the metering role of the usage point. Used for measuring consumption and detection of events. """ @property def company_meter_id(self): """ Returns this `Meter`s ID. Currently stored in `zepben.evolve.cim.iec61970.base.core.identified_object.IdentifiedObject.name` """ return self.name @company_meter_id.setter def company_meter_id(self, meter_id): """ `meter_id` The ID to set for this Meter. Will use `zepben.evolve.cim.iec61970.base.core.identified_object.IdentifiedObject.name` as a backing field. """ self.name = meter_id
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61968/metering/metering.py
metering.py
from __future__ import annotations from typing import Optional, List, Generator, TYPE_CHECKING if TYPE_CHECKING: from zepben.evolve import PowerTransformerInfo from zepben.evolve.model.cim.iec61968.assetinfo.transformer_end_info import TransformerEndInfo from zepben.evolve.model.cim.iec61968.assets.asset_info import AssetInfo from zepben.evolve.util import nlen, ngen, safe_remove, get_by_mrid __all__ = ["TransformerTankInfo"] class TransformerTankInfo(AssetInfo): """Set of transformer tank data, from an equipment library.""" power_transformer_info: Optional[PowerTransformerInfo] = None """Power transformer data that this tank description is part of.""" _transformer_end_infos: Optional[List[TransformerEndInfo]] = None """Data for all the ends described by this transformer tank data.""" def __init__(self, transformer_end_infos: List[TransformerEndInfo] = None, **kwargs): super(TransformerTankInfo, self).__init__(**kwargs) if transformer_end_infos: for tei in transformer_end_infos: self.add_transformer_end_info(tei) def num_transformer_end_infos(self): """ Get the number of `zepben.evolve.model.cim.iec61968.assetinfo.transformer_end_info.TransformerEndInfo`s associated with this `TransformerTankInfo`. """ return nlen(self._transformer_end_infos) @property def transformer_end_infos(self) -> Generator[TransformerEndInfo, None, None]: """ The `zepben.evolve.model.cim.iec61968.assetinfo.transformer_end_info.TransformerEndInfo`s of this `TransformerTankInfo`. """ return ngen(self._transformer_end_infos) def get_transformer_end_info(self, mrid: str) -> TransformerEndInfo: """ Get the `zepben.evolve.model.cim.iec61968.assetinfo.transformer_end_info.TransformerEndInfo` for this `TransformerTankInfo` identified by `mrid`. `mrid` the mRID of the required `zepben.evolve.model.cim.iec61968.assetinfo.transformer_end_info.TransformerEndInfo` Returns The `zepben.evolve.model.cim.iec61968.assetinfo.transformer_end_info.TransformerEndInfo` with the specified `mrid`. Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._transformer_end_infos, mrid) def add_transformer_end_info(self, tei: TransformerEndInfo) -> TransformerTankInfo: """ `tei` The `zepben.evolve.model.cim.iec61968.assetinfo.transformer_end_info.TransformerEndInfo` to associate with this `TransformerTankInfo`. Returns A reference to this `TransformerTankInfo` to allow fluent use. Raises `ValueError` if another `zepben.evolve.model.cim.iec61968.assetinfo.transformer_end_info.TransformerEndInfo` with the same `mrid` already exists in this `TransformerTankInfo` """ if self._validate_reference(tei, self.get_transformer_end_info, "A TransformerEndInfo"): return self self._transformer_end_infos = list() if self._transformer_end_infos is None else self._transformer_end_infos self._transformer_end_infos.append(tei) return self def remove_transformer_end_info(self, tei: TransformerEndInfo) -> TransformerTankInfo: """ Disassociate an `zepben.evolve.model.cim.iec61968.assetinfo.transformer_end_info.TransformerEndInfo` from this `TransformerTankInfo`. `tei` the `zepben.evolve.model.cim.iec61968.assetinfo.transformer_end_info.TransformerEndInfo` to disassociate with this `TransformerTankInfo`. Raises `ValueError` if `tei` was not associated with this `TransformerTankInfo`. Returns A reference to this `TransformerTankInfo` to allow fluent use. """ self._transformer_end_infos = safe_remove(self._transformer_end_infos, tei) return self def clear_transformer_end_infos(self) -> TransformerTankInfo: """ Clears all `zepben.evolve.model.cim.iec61968.assetinfo.transformer_end_info.TransformerEndInfo`. Returns self """ self._transformer_end_infos = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61968/assetinfo/transformer_tank_info.py
transformer_tank_info.py
from __future__ import annotations import math from typing import Optional, TYPE_CHECKING, Tuple if TYPE_CHECKING: from zepben.evolve import TransformerTankInfo, NoLoadTest, OpenCircuitTest, ShortCircuitTest from zepben.evolve.model.cim.iec61968.assets.asset_info import AssetInfo from zepben.evolve.model.cim.iec61970.base.wires.transformer_star_impedance import TransformerStarImpedance from zepben.evolve.model.resistance_reactance import ResistanceReactance from zepben.evolve.model.cim.iec61970.base.wires.winding_connection import WindingConnection __all__ = ["TransformerEndInfo"] class TransformerEndInfo(AssetInfo): """Transformer end data.""" connection_kind: WindingConnection = WindingConnection.UNKNOWN_WINDING """Kind of connection.""" emergency_s: Optional[int] = None """Apparent power that the winding can carry under emergency conditions (also called long-term emergency power). Unit: VA""" end_number: int = 0 """Number for this transformer end, corresponding to the end's order in the PowerTransformer.vectorGroup attribute. Highest voltage winding should be 1.""" insulation_u: Optional[int] = None """Basic insulation level voltage rating. Unit: Volts""" phase_angle_clock: Optional[int] = None """Winding phase angle where 360 degrees are represented with clock hours, so the valid values are {0, ..., 11}. For example, to express the second winding in code 'Dyn11', set attributes as follows: 'endNumber'=2, 'connectionKind' = Yn and 'phaseAngleClock' = 11.""" r: Optional[float] = None """DC resistance. Unit: Ohms""" rated_s: Optional[int] = None """Normal apparent power rating. Unit: VA""" rated_u: Optional[int] = None """Rated voltage: phase-phase for three-phase windings, and either phase-phase or phase-neutral for single-phase windings. Unit: Volts""" short_term_s: Optional[int] = None """Apparent power that this winding can carry for a short period of time (in emergency). Unit: VA""" transformer_tank_info: Optional[TransformerTankInfo] = None """Transformer tank data that this end description is part of.""" transformer_star_impedance: Optional[TransformerStarImpedance] = None """Transformer star impedance calculated from this transformer end datasheet.""" energised_end_no_load_tests: Optional[NoLoadTest] = None """ All no-load test measurements in which this transformer end was energised. """ energised_end_short_circuit_tests: Optional[ShortCircuitTest] = None """ All short-circuit test measurements in which this transformer end was short-circuited. """ grounded_end_short_circuit_tests: Optional[ShortCircuitTest] = None """ All short-circuit test measurements in which this transformer end was energised. """ open_end_open_circuit_tests: Optional[OpenCircuitTest] = None """ All open-circuit test measurements in which this transformer end was not excited. """ energised_end_open_circuit_tests: Optional[OpenCircuitTest] = None """ All open-circuit test measurements in which this transformer end was excited. """ def resistance_reactance(self) -> Optional[ResistanceReactance]: """ Get the `ResistanceReactance` for this `TransformerEndInfo` from either the pre-calculated `transformer_star_impedance` or calculated from the associated test data. Returns the `ResistanceReactance` for this `TransformerEndInfo` or None if it could not be calculated """ if self.transformer_star_impedance is not None: return self.transformer_star_impedance.resistance_reactance().merge_if_incomplete(lambda: self.calculate_resistance_reactance_from_tests()) else: return self.calculate_resistance_reactance_from_tests() def calculate_resistance_reactance_from_tests(self) -> Optional[ResistanceReactance]: """ Get the `ResistanceReactance` for this `TransformerEndInfo` calculated from the associated test data. Returns the `ResistanceReactance` for this `TransformerEndInfo` or None if it could not be calculated Calculation of r0 and x0 from Test data is not supported. r0 nad x0 must be populated directly in the associated TransformerStarImpedance """ if not self.rated_u or not self.rated_s: return None def calculate_x(voltage: float, r: float) -> Optional[float]: if voltage is None or r is None: return None return round(math.sqrt((((voltage / 100) * (self.rated_u ** 2) / self.rated_s) ** 2) - (r ** 2)), 2) def calculate_r_x_from_test(short_circuit_test: ShortCircuitTest) -> Optional[Tuple[float, float]]: if short_circuit_test is None: return None elif short_circuit_test.voltage_ohmic_part is not None: r = round((short_circuit_test.voltage_ohmic_part * (self.rated_u ** 2)) / (self.rated_s * 100), 2) elif short_circuit_test.loss is not None: r = round(short_circuit_test.loss * ((self.rated_u / self.rated_s) ** 2), 2) else: return None return r, calculate_x(short_circuit_test.voltage, r) rr = ResistanceReactance() rx = calculate_r_x_from_test(self.energised_end_short_circuit_tests) if rx is not None: rr.r, rr.x = rx r0x0 = calculate_r_x_from_test(self.grounded_end_short_circuit_tests) if r0x0 is not None: rr.r0, rr.x0 = r0x0 return rr if not rr.is_empty() else None
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61968/assetinfo/transformer_end_info.py
transformer_end_info.py
from __future__ import annotations from typing import List, Optional, Generator from zepben.evolve.model.cim.iec61968.assetinfo.transformer_tank_info import TransformerTankInfo from zepben.evolve.model.cim.iec61968.assets.asset_info import AssetInfo from zepben.evolve.util import nlen, ngen, get_by_mrid, safe_remove __all__ = ["PowerTransformerInfo"] class PowerTransformerInfo(AssetInfo): """Set of power transformer data, from an equipment library.""" _transformer_tank_infos: Optional[List[TransformerTankInfo]] = None """Data for all the tanks described by this power transformer data.""" def __init__(self, transformer_tank_infos: List[TransformerTankInfo] = None, **kwargs): super(PowerTransformerInfo, self).__init__(**kwargs) if transformer_tank_infos: for ti in transformer_tank_infos: self.add_transformer_tank_info(ti) def num_transformer_tank_infos(self): """ Get the number of `zepben.evolve.model.cim.iec61968.assetinfo.transformer_tank_info.TransformerTankInfo`s associated with this `PowerTransformerInfo`. """ return nlen(self._transformer_tank_infos) @property def transformer_tank_infos(self) -> Generator[TransformerTankInfo, None, None]: """ The `zepben.evolve.model.cim.iec61968.assetinfo.transformer_tank_info.TransformerTankInfo`s of this `PowerTransformerInfo`. """ return ngen(self._transformer_tank_infos) def get_transformer_tank_info(self, mrid: str) -> TransformerTankInfo: """ Get the `zepben.evolve.model.cim.iec61968.assetinfo.transformer_tank_info.TransformerTankInfo` for this `PowerTransformerInfo` identified by `mrid`. `mrid` the mRID of the required `zepben.evolve.model.cim.iec61968.assetinfo.transformer_tank_info.TransformerTankInfo` Returns The `zepben.evolve.model.cim.iec61968.assetinfo.transformer_tank_info.TransformerTankInfo` with the specified `mrid`. Raises `KeyError` if `mrid` wasn't present. """ return get_by_mrid(self._transformer_tank_infos, mrid) def add_transformer_tank_info(self, tti: TransformerTankInfo) -> PowerTransformerInfo: """ `tti` The `zepben.evolve.model.cim.iec61968.assetinfo.transformer_tank_info.TransformerTankInfo` to associate with this `PowerTransformerInfo`. Returns A reference to this `PowerTransformerInfo` to allow fluent use. Raises `ValueError` if another `zepben.evolve.model.cim.iec61968.assetinfo.transformer_tank_info.TransformerTankInfo` with the same `mrid` already exists in this `PowerTransformerInfo` """ if self._validate_reference(tti, self.get_transformer_tank_info, "A TransformerTankInfo"): return self self._transformer_tank_infos = list() if self._transformer_tank_infos is None else self._transformer_tank_infos self._transformer_tank_infos.append(tti) return self def remove_transformer_tank_info(self, tti: TransformerTankInfo) -> PowerTransformerInfo: """ Disassociate an `zepben.evolve.model.cim.iec61968.assetinfo.transformer_tank_info.TransformerTankInfo` from this `PowerTransformerInfo`. `tti` the `zepben.evolve.model.cim.iec61968.assetinfo.transformer_tank_info.TransformerTankInfo` to disassociate with this `PowerTransformerInfo`. Raises `ValueError` if `tti` was not associated with this `PowerTransformerInfo`. Returns A reference to this `Asset` to allow fluent use. """ self._transformer_tank_infos = safe_remove(self._transformer_tank_infos, tti) return self def clear_transformer_tank_infos(self) -> PowerTransformerInfo: """ Clears all `zepben.evolve.model.cim.iec61968.assetinfo.transformer_tank_info.TransformerTankInfo`. Returns self """ self._transformer_tank_infos = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61968/assetinfo/power_transformer_info.py
power_transformer_info.py
from __future__ import annotations from typing import List, Optional, Generator from dataclassy import dataclass from zepben.evolve.model.cim.iec61970.base.core.identified_object import IdentifiedObject from zepben.evolve.util import require, nlen, ngen, safe_remove __all__ = ["PositionPoint", "Location", "StreetAddress", "TownDetail", "StreetDetail"] @dataclass(slots=True, frozen=True) class PositionPoint(object): """ Set of spatial coordinates that determine a point, defined in WGS84 (latitudes and longitudes). Use a single position point instance to desribe a point-oriented location. Use a sequence of position points to describe a line-oriented object (physical location of non-point oriented objects like cables or lines), or area of an object (like a substation or a geographical zone - in this case, have first and last position point with the same values). """ x_position: float """X axis position - longitude""" y_position: float """Y axis position - latitude""" def __init__(self): require(-90.0 <= self.y_position <= 90.0, lambda: f"Latitude is out of range. Expected -90 to 90, got {self.y_position}.") require(-180.0 <= self.x_position <= 180.0, lambda: f"Longitude is out of range. Expected -180 to 180, got {self.x_position}.") def __str__(self): return f"{self.x_position}:{self.y_position}" @property def longitude(self): return self.x_position @property def latitude(self): return self.y_position @dataclass(slots=True) class TownDetail(object): """ Town details, in the context of address. """ name: Optional[str] = None """Town name.""" state_or_province: Optional[str] = None """Name of the state or province.""" def all_fields_null_or_empty(self): """Check to see if all fields of this `TownDetail` are null or empty.""" return not (self.name or self.state_or_province) @dataclass(slots=True) class StreetDetail(object): """ Street details, in the context of address. """ building_name: str = "" """ (if applicable) In certain cases the physical location of the place of interest does not have a direct point of entry from the street, but may be located inside a larger structure such as a building, complex, office block, apartment, etc. """ floor_identification: str = "" """The identification by name or number, expressed as text, of the floor in the building as part of this address.""" name: str = "" """Name of the street.""" number: str = "" """Designator of the specific location on the street.""" suite_number: str = "" """Number of the apartment or suite.""" type: str = "" """Type of street. Examples include: street, circle, boulevard, avenue, road, drive, etc.""" display_address: str = "" """The address as it should be displayed to a user.""" def all_fields_empty(self): """Check to see if all fields of this `StreetDetail` are empty.""" return not ( self.building_name or self.floor_identification or self.name or self.number or self.suite_number or self.type or self.display_address ) @dataclass(slots=True) class StreetAddress(object): """ General purpose street and postal address information. """ postal_code: str = "" """Postal code for the address.""" town_detail: Optional[TownDetail] = None """Optional `TownDetail` for this address.""" po_box: str = "" """Post office box for the address.""" street_detail: Optional[StreetDetail] = None """Optional `StreetDetail` for this address.""" class Location(IdentifiedObject): """ The place, scene, or point of something where someone or something has been, is, and/or will be at a given moment in time. It can be defined with one or more `PositionPoint`'s. """ main_address: Optional[StreetAddress] = None """Main address of the location.""" _position_points: Optional[List[PositionPoint]] = None def __init__(self, position_points: List[PositionPoint] = None, **kwargs): """ `position_points` A list of `PositionPoint`s to associate with this `Location`. """ super(Location, self).__init__(**kwargs) if position_points: for point in position_points: self.add_point(point) def num_points(self): """ Returns The number of `PositionPoint`s in this `Location` """ return nlen(self._position_points) @property def points(self) -> Generator[PositionPoint, None, None]: """ Returns Generator over the `PositionPoint`s of this `Location`. """ for point in ngen(self._position_points): yield point def get_point(self, sequence_number: int) -> Optional[PositionPoint]: """ Get the `sequence_number` `PositionPoint` for this `Location`. `sequence_number` The sequence number of the `PositionPoint` to get. Returns The `PositionPoint` identified by `sequence_number` Raises IndexError if this `Location` didn't contain `sequence_number` points. """ return self._position_points[sequence_number] if 0 <= sequence_number < nlen(self._position_points) else None def __getitem__(self, item): return self.get_point(item) def add_point(self, point: PositionPoint) -> Location: """ Associate a `PositionPoint` with this `Location`, assigning it a sequence_number of `num_points`. `point` The `PositionPoint` to associate with this `Location`. Returns A reference to this `Location` to allow fluent use. """ return self.insert_point(point) def insert_point(self, point: PositionPoint, sequence_number: int = None) -> Location: """ Associate a `PositionPoint` with this `Location` `point` The `PositionPoint` to associate with this `Location`. `sequence_number` The sequence number of the `PositionPoint`. Returns A reference to this `Location` to allow fluent use. Raises `ValueError` if `sequence_number` < 0 or > `num_points()`. """ if sequence_number is None: sequence_number = self.num_points() require(0 <= sequence_number <= self.num_points(), lambda: f"Unable to add PositionPoint to {str(self)}. Sequence number {sequence_number} " f"is invalid. Expected a value between 0 and {self.num_points()}. Make sure you are " f"adding the points in the correct order and there are no gaps in the numbering.") self._position_points = list() if self._position_points is None else self._position_points self._position_points.insert(sequence_number, point) return self def __setitem__(self, key, value): return self.insert_point(value, key) def remove_point(self, point: PositionPoint) -> Location: """ Remove a `PositionPoint` from this `Location` `point` The `PositionPoint` to remove. Raises `ValueError` if `point` was not part of this `Location` Returns A reference to this `Location` to allow fluent use. """ self._position_points = safe_remove(self._position_points, point) return self def clear_points(self) -> Location: self._position_points = None return self
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/cim/iec61968/common/location.py
location.py
import abc from collections import Counter from dataclasses import dataclass, field from functools import reduce from typing import Set, Tuple, FrozenSet, Dict, Callable, Union, TypeVar, Any, List, Generic, Optional, Iterable from zepben.evolve import Traversal, LifoQueue, Junction, BusbarSection, EquivalentBranch from zepben.evolve.model.cim.iec61970.base.core.conducting_equipment import ConductingEquipment from zepben.evolve.model.cim.iec61970.base.core.terminal import Terminal from zepben.evolve.model.cim.iec61970.base.wires.aclinesegment import AcLineSegment from zepben.evolve.model.cim.iec61970.base.wires.energy_consumer import EnergyConsumer from zepben.evolve.model.cim.iec61970.base.wires.energy_source import EnergySource from zepben.evolve.model.cim.iec61970.base.wires.power_electronics_connection import PowerElectronicsConnection from zepben.evolve.model.cim.iec61970.base.wires.power_transformer import PowerTransformer, PowerTransformerEnd from zepben.evolve.model.cim.iec61970.base.wires.switch import Switch from zepben.evolve.services.network.network_service import NetworkService __all__ = [ "BusBranchNetworkCreationValidator", "BusBranchNetworkCreator", "BusBranchNetworkCreationMappings", "BusBranchNetworkCreationResult", "TerminalGrouping" ] BBN = TypeVar('BBN') # Bus-Branch Network TN = TypeVar('TN') # Topological Node TB = TypeVar('TB') # Topological Branch EB = TypeVar('EB') # Equivalent Branch PT = TypeVar('PT') # Power Transformer ES = TypeVar('ES') # Energy Source EC = TypeVar('EC') # Energy Consumer PEC = TypeVar('PEC') # Power Electronics Connection class BusBranchNetworkCreationValidator(Generic[BBN, TN, TB, EB, PT, ES, EC, PEC], metaclass=abc.ABCMeta): """ Validator used to determine if node-breaker network data is fit for the creation of a bus-branch network. """ @classmethod def __subclasshook__(cls, subclass): return (hasattr(subclass, "is_valid_network_data") and callable(subclass.is_valid_network_data) and hasattr(subclass, "is_valid_topological_node_data") and callable(subclass.is_valid_topological_node_data) and hasattr(subclass, "is_valid_topological_branch_data") and callable(subclass.is_valid_topological_branch_data) and hasattr(subclass, "is_valid_equivalent_branch_data") and callable(subclass.is_valid_topological_branch_data) and hasattr(subclass, "is_valid_power_transformer_data") and callable(subclass.is_valid_power_transformer_data) and hasattr(subclass, "is_valid_energy_source_data") and callable(subclass.is_valid_energy_source_data) and hasattr(subclass, "is_valid_energy_consumer_data") and callable(subclass.is_valid_energy_consumer_data) and hasattr(subclass, "is_valid_power_electronics_connection_data") and callable(subclass.is_valid_power_electronics_connection_data) or NotImplemented) @abc.abstractmethod def is_valid_network_data(self, node_breaker_network: NetworkService) -> bool: """ Validates if provided data is fit for the creation of a bus-branch network. NOTE: Refer to class `BusBranchNetworkCreator` for parameter information. :return: Whether data is valid or not. """ raise NotImplementedError @abc.abstractmethod def is_valid_topological_node_data( self, bus_branch_network: BBN, base_voltage: Optional[int], collapsed_conducting_equipment: FrozenSet[ConductingEquipment], border_terminals: FrozenSet[Terminal], inner_terminals: FrozenSet[Terminal], node_breaker_network: NetworkService ) -> bool: """ Validates if provided data is fit for the creation of a topological node. NOTE: Refer to class `BusBranchNetworkCreator` for parameter information. :return: Whether data is valid or not. """ raise NotImplementedError @abc.abstractmethod def is_valid_topological_branch_data( self, bus_branch_network: BBN, connected_topological_nodes: Tuple[TN, TN], length: Optional[float], collapsed_ac_line_segments: FrozenSet[AcLineSegment], border_terminals: FrozenSet[Terminal], inner_terminals: FrozenSet[Terminal], node_breaker_network: NetworkService ) -> bool: """ Validates if provided data is fit for the creation of a topological branch. NOTE: Refer to class `BusBranchNetworkCreator` for parameter information. :return: Whether data is valid or not. """ raise NotImplementedError @abc.abstractmethod def is_valid_equivalent_branch_data( self, bus_branch_network: BBN, connected_topological_nodes: List[TN], equivalent_branch: EquivalentBranch, node_breaker_network: NetworkService ) -> bool: """ Validates if provided data is fit for the creation of an equivalent branch. NOTE: Refer to class `BusBranchNetworkCreator` for parameter information. :return: Whether data is valid or not. """ raise NotImplementedError @abc.abstractmethod def is_valid_power_transformer_data( self, bus_branch_network: BBN, power_transformer: PowerTransformer, ends_to_topological_nodes: List[Tuple[PowerTransformerEnd, Optional[TN]]], node_breaker_network: NetworkService ) -> bool: """ Validates if provided data is fit for the creation of a power transformer. NOTE: Refer to class `BusBranchNetworkCreator` for parameter information. :return: Whether data is valid or not. """ raise NotImplementedError @abc.abstractmethod def is_valid_energy_source_data( self, bus_branch_network: BBN, energy_source: EnergySource, connected_topological_node: TN, node_breaker_network: NetworkService ) -> bool: """ Validates if provided data is fit for the creation of an energy source. NOTE: Refer to class `BusBranchNetworkCreator` for parameter information. :return: Whether data is valid or not. """ raise NotImplementedError @abc.abstractmethod def is_valid_energy_consumer_data( self, bus_branch_network: BBN, energy_consumer: EnergyConsumer, connected_topological_node: TN, node_breaker_network: NetworkService, ) -> bool: """ Validates if provided data is fit for the creation of an energy consumer. NOTE: Refer to class `BusBranchNetworkCreator` for parameter information. :return: Whether data is valid or not. """ raise NotImplementedError @abc.abstractmethod def is_valid_power_electronics_connection_data( self, bus_branch_network: BBN, power_electronics_connection: PowerElectronicsConnection, connected_topological_node: TN, node_breaker_network: NetworkService ) -> bool: """ Validates if provided data is fit for the creation of a power electronics connection. NOTE: Refer to class `BusBranchNetworkCreator` for parameter information. :return: Whether data is valid or not. """ raise NotImplementedError BNV = TypeVar('BNV', bound=BusBranchNetworkCreationValidator) # Subtype of BusBranchNetworkCreationValidator class BusBranchNetworkCreator(Generic[BBN, TN, TB, EB, PT, ES, EC, PEC, BNV], metaclass=abc.ABCMeta): """Contains the logic needed to generate a target bus-branch network from a source `NetworkService`. NOTE: All bus-branch network elements returned from the creators must have a uuid (universally unique identifier). This is needed to prevent collisions when generating the mappings object between the source `NetworkService` and the target bus-branch network. Generic Types: - BBN := Type for the object used to represent the bus-branch network. - TN := Type for the object used to represent a topological node in the bus-branch network. - TB := Type for the object used to represent a topological branch in the bus-branch network. - EB := Type for the object used to represent an equivalent branch in the bus-branch network. - PT := Type for the object used to represent a power transformer in the bus-branch network. - ES := Type for the object used to represent an energy source in the bus-branch network. - EC := Type for the object used to represent an energy consumer in the bus-branch network. - PEC := Type for the object used to represent a power electronics connection in the bus-branch network. - BNV := Type for the validator instance used in the creation of the bus-branch network. """ @classmethod def __subclasshook__(cls, subclass): return (hasattr(subclass, "bus_branch_network_creator") and callable(subclass.bus_branch_network_creator) and hasattr(subclass, "topological_node_creator") and callable(subclass.topological_node_creator) and hasattr(subclass, "topological_branch_creator") and callable(subclass.topological_branch_creator) and hasattr(subclass, "equivalent_branch_creator") and callable(subclass.topological_branch_creator) and hasattr(subclass, "power_transformer_creator") and callable(subclass.power_transformer_creator) and hasattr(subclass, "energy_source_creator") and callable(subclass.energy_source_creator) and hasattr(subclass, "energy_consumer_creator") and callable(subclass.energy_consumer_creator) and hasattr(subclass, "power_electronics_connection_creator") and callable(subclass.power_electronics_connection_creator) and hasattr(subclass, "validator_creator") and callable(subclass.validator_creator) or NotImplemented) @abc.abstractmethod def bus_branch_network_creator(self, node_breaker_network: NetworkService) -> BBN: """ Creates an empty target bus-branch network instance of type BBN. :param node_breaker_network: Instance of type `NetworkService` being used as a source node-breaker network. :return: Target bus-branch network of type BBN. """ raise NotImplementedError @abc.abstractmethod def topological_node_creator( self, bus_branch_network: BBN, base_voltage: Optional[int], collapsed_conducting_equipment: FrozenSet[ConductingEquipment], border_terminals: FrozenSet[Terminal], inner_terminals: FrozenSet[Terminal], node_breaker_network: NetworkService ) -> Tuple[Any, TN]: """ Callback used to create a topological node instance of type TN. :param bus_branch_network: Instance of type BBN being used as a target bus-branch network. :param base_voltage: Base voltage value to be used for the topological node in Volts. :param collapsed_conducting_equipment: Set that contains all instances of `ConductingEquipment` being collapsed in this topological node. :param border_terminals: Set that contains all instances of `Terminal` that connect this topological node to other equipment. :param inner_terminals: Set that contains all instances of `Terminal` collapsed in this topological node. :param node_breaker_network: Instance of type `NetworkService` being used as a source node-breaker network. :return: A 2-tuple with the first element being an id for the topological node and the second element being an instance of type TN that represents a topological node in the target bus-branch network. This instance will be passed into the appropriate bus-branch model element creators for the elements that are connected to this topological node. """ raise NotImplementedError @abc.abstractmethod def topological_branch_creator( self, bus_branch_network: BBN, connected_topological_nodes: Tuple[TN, TN], length: Optional[float], collapsed_ac_line_segments: FrozenSet[AcLineSegment], border_terminals: FrozenSet[Terminal], inner_terminals: FrozenSet[Terminal], node_breaker_network: NetworkService ) -> Tuple[Any, TB]: """ Callback used to create a topological branch instance in target bus-branch network. :param bus_branch_network: Instance of type BBN being used as a target bus-branch network. :param connected_topological_nodes: Instances of type TN connected to this topological branch sorted by `FeederDirection`. :param length: Length of the topological branch in meters. :param collapsed_ac_line_segments: Set that contains all instances of `AcLineSegment` being collapsed in this topological branch. e.g. connected lines with the same impedance values. :param border_terminals: Set that contains all instances of `Terminal` that connect this topological branch to other equipment. :param inner_terminals: Set that contains all instances of `Terminal` collapsed in this topological branch. :param node_breaker_network: Instance of type `NetworkService` being used as a source node-breaker network. :return: A 2-tuple with the first element being an id for the topological branch and the second element being an instance of type TB that represents a topological branch in the target bus-branch network. """ raise NotImplementedError @abc.abstractmethod def equivalent_branch_creator( self, bus_branch_network: BBN, connected_topological_nodes: List[TN], equivalent_branch: EquivalentBranch, node_breaker_network: NetworkService ) -> Tuple[Any, EB]: """ Callback used to create an equivalent branch instance in target bus-branch network. :param bus_branch_network: Instance of type BBN being used as a target bus-branch network. :param connected_topological_nodes: Instances of type TN connected to this topological branch sorted by `FeederDirection`. :param equivalent_branch: Instance of `EquivalentBranch` used to generate the equivalent branch in target bus-branch network. :param node_breaker_network: Instance of type `NetworkService` being used as a source node-breaker network. :return: A 2-tuple with the first element being an id for the topological branch and the second element being an instance of type TB that represents a topological branch in the target bus-branch network. """ raise NotImplementedError @abc.abstractmethod def power_transformer_creator( self, bus_branch_network: BBN, power_transformer: PowerTransformer, ends_to_topological_nodes: List[Tuple[PowerTransformerEnd, Optional[TN]]], node_breaker_network: NetworkService ) -> Dict[Any, PT]: """ Callback used to create a power transformer instance in target bus-branch network. :param bus_branch_network: Instance of type BBN being used as a target bus-branch network. :param power_transformer: Instance of `PowerTransformer` used to generate power transformer in target bus-branch network. :param ends_to_topological_nodes: List holding power transformer ends with the topological nodes they are connected to sorted by `FeederDirection`. :param node_breaker_network: Instance of type `NetworkService` being used as a source node-breaker network. :return: A dictionary with keys being uuids for the instance/s of type PT that represents a power transformer in the target bus-branch network. """ raise NotImplementedError @abc.abstractmethod def energy_source_creator( self, bus_branch_network: BBN, energy_source: EnergySource, connected_topological_node: TN, node_breaker_network: NetworkService ) -> Dict[Any, ES]: """ Callback used to create an energy source instance in target bus-branch network. :param bus_branch_network: Instance of type BBN being used as a target bus-branch network. :param energy_source: Instance of `EnergySource` used to generate energy source in target bus-branch network. :param connected_topological_node: Topological node of type TN that is connected to this energy source. :param node_breaker_network: Instance of type `NetworkService` being used as a source node-breaker network. :return: A dictionary with keys being uuids for the instance/s of type ES that represents an energy source in the target bus-branch network. """ raise NotImplementedError @abc.abstractmethod def energy_consumer_creator( self, bus_branch_network: BBN, energy_consumer: EnergyConsumer, connected_topological_node: TN, node_breaker_network: NetworkService, ) -> Dict[Any, EC]: """ Callback used to pass all the required values to generate an energy consumer object. :param bus_branch_network: Instance of type BBN being used as a target bus-branch network. :param energy_consumer: Instance of `EnergyConsumer` used to generate energy consumer in target bus-branch network. :param connected_topological_node: Topological node of type TN that is connected to this energy consumer. :param node_breaker_network: Instance of type `NetworkService` being used as a source node-breaker network. :return: A dictionary with keys being uuids for the instance/s of type EC that represents an energy consumer in the target bus-branch network. """ raise NotImplementedError @abc.abstractmethod def power_electronics_connection_creator( self, bus_branch_network: BBN, power_electronics_connection: PEC, connected_topological_node: TN, node_breaker_network: NetworkService ) -> Dict[Any, PEC]: """ Callback used to pass all the required values to generate a power electronics connection object. :param bus_branch_network: Instance of type BBN being used as a target bus-branch network. :param power_electronics_connection: Instance of `PowerElectronicsConnection` used to generate power electronics connection in target bus-branch network. :param connected_topological_node: Topological node of type TN that is connected to this power electronics connection. :param node_breaker_network: Instance of type `NetworkService` being used as a source node-breaker network. :return: A dictionary with keys being uuids for the instance/s of type PEC that represents a power electronics connection in the target bus-branch network. """ raise NotImplementedError @abc.abstractmethod def validator_creator(self) -> BNV: """ Callback used to create 'BusBranchNetworkCreationValidator' instance used for validation while creating a bus-branch network. :return: Instance of type 'BusBranchNetworkCreationValidator'. """ raise NotImplementedError # noinspection PyMethodMayBeStatic def has_negligible_impedance(self, ce: ConductingEquipment) -> bool: """ Callback used to evaluate if an instance of `ConductingEquipment` has negligible impedance. :param ce: `ConductingEquipment` instance whose impedance is being evaluated. :return: True if 'ce' has negligible impedance, False otherwise. """ if isinstance(ce, AcLineSegment): return ce.length == 0 if isinstance(ce, Switch): return not ce.is_open() if isinstance(ce, Junction) or isinstance(ce, BusbarSection): return True if isinstance(ce, EquivalentBranch): return _is_no_impedance_branch(ce) return False async def create(self, node_breaker_network: NetworkService) -> 'BusBranchNetworkCreationResult[BBN, BNV]': return await _create_bus_branch_network(self, node_breaker_network) CE = TypeVar("CE", bound=ConductingEquipment) @dataclass() class TerminalGrouping(Generic[CE]): border_terminals: Set[Terminal] = field(default_factory=set) inner_terminals: Set[Terminal] = field(default_factory=set) conducting_equipment_group: Set[CE] = field(default_factory=set) def terminals(self) -> Set[Terminal]: return {*self.border_terminals, *self.inner_terminals} class BusBranchToNodeBreakerMappings: def __init__(self): self.topological_nodes: Dict[Any, TerminalGrouping[ConductingEquipment]] = {} self.topological_branches: Dict[Any, TerminalGrouping[AcLineSegment]] = {} self.equivalent_branches: Dict[Any, Set[EquivalentBranch]] = {} self.power_transformers: Dict[Any, Set[PowerTransformer]] = {} self.energy_sources: Dict[Any, Set[EnergySource]] = {} self.energy_consumers: Dict[Any, Set[EnergyConsumer]] = {} self.power_electronics_connections: Dict[Any, Set[PowerElectronicsConnection]] = {} class NodeBreakerToBusBranchMappings: def __init__(self): self.objects: Dict[str, Set[Any]] = {} class BusBranchNetworkCreationMappings: """ Holds mappings between a bus-branch network (bbn) and a node-breaker network (nbn) of type `NetworkService`. """ def __init__(self): self.to_nbn = BusBranchToNodeBreakerMappings() self.to_bbn = NodeBreakerToBusBranchMappings() def _add_to_mapping(mapping: Dict[Any, Set[Any]], uuid: Any, obj_to_add: Any) -> None: if uuid not in mapping: mapping[uuid] = set() mapping[uuid].add(obj_to_add) class BusBranchNetworkCreationResult(Generic[BBN, BNV]): """ Represents the results of creating a bus-branch network from a node-breaker network. """ def __init__(self, validator: BNV): self.validator: BNV = validator self.mappings: BusBranchNetworkCreationMappings = BusBranchNetworkCreationMappings() self.network: BBN = None self.was_successful: bool = False async def _create_bus_branch_network( bus_branch_network_creator: BusBranchNetworkCreator[BBN, TN, TB, EB, PT, ES, EC, PEC, BNV], node_breaker_network: NetworkService ) -> BusBranchNetworkCreationResult[BBN, BNV]: """ Creates bus-branch network. :param bus_branch_network_creator: Instance of type `BusBranchNetworkCreator` used to generate the target bus-branch network. :param node_breaker_network: Instance of type `NetworkService` being used as a source node-breaker network. :return: `CreationResult` """ _validate_number_of_terminals(node_breaker_network) validator = bus_branch_network_creator.validator_creator() result: BusBranchNetworkCreationResult[BBN, BNV] = BusBranchNetworkCreationResult(validator) if not validator.is_valid_network_data(node_breaker_network): return result bus_branch_network = bus_branch_network_creator.bus_branch_network_creator(node_breaker_network) terminals_to_tns = {} # create topological branches tbs_creation_success = await _create_topological_branches(node_breaker_network, bus_branch_network, bus_branch_network_creator, result, terminals_to_tns, validator) if not tbs_creation_success: return result # create equivalent branches ebs_creation_success = await _create_equivalent_branches(node_breaker_network, bus_branch_network, bus_branch_network_creator, result, terminals_to_tns, validator) if not ebs_creation_success: return result # create power transformers pt_creation_success = await _create_power_transformers(node_breaker_network, bus_branch_network, bus_branch_network_creator, result, terminals_to_tns, validator) if not pt_creation_success: return result # create energy sources es_creation_success = await _create_energy_sources(node_breaker_network, bus_branch_network, bus_branch_network_creator, result, terminals_to_tns, validator) if not es_creation_success: return result # create energy consumers ec_creation_success = await _create_energy_consumers(node_breaker_network, bus_branch_network, bus_branch_network_creator, result, terminals_to_tns, validator) if not ec_creation_success: return result # create power electronics connections pec_creation_success = await _create_power_electronics_connections(node_breaker_network, bus_branch_network, bus_branch_network_creator, result, terminals_to_tns, validator) if not pec_creation_success: return result result.network = bus_branch_network result.was_successful = True return result async def _get_or_create_topological_node( terminal: Terminal, terminals_to_tns: Dict[str, TN], node_breaker_network: NetworkService, bus_branch_network: BBN, bus_branch_network_creator: BusBranchNetworkCreator[BBN, TN, TB, EB, PT, ES, EC, PEC, BNV], result: BusBranchNetworkCreationResult[BBN, BNV], validator: BNV ) -> (bool, TN): cached_tn = terminals_to_tns.get(terminal.mrid) if cached_tn is not None: return True, cached_tn # group terminals connected by negligible impedance equipment terminals_grouping = await _group_negligible_impedance_terminals(terminal, bus_branch_network_creator.has_negligible_impedance) negligible_impedance_equipment = frozenset(terminals_grouping.conducting_equipment_group) inner_terms = frozenset(terminals_grouping.inner_terminals) border_terms = frozenset(terminals_grouping.border_terminals) rated_u = _get_base_voltage(border_terms) # create topological node if not validator.is_valid_topological_node_data(bus_branch_network, rated_u, negligible_impedance_equipment, border_terms, inner_terms, node_breaker_network): return False, None tn_id, tn = bus_branch_network_creator.topological_node_creator( bus_branch_network, rated_u, negligible_impedance_equipment, border_terms, inner_terms, node_breaker_network ) # populate result mappings result.mappings.to_nbn.topological_nodes[tn_id] = terminals_grouping for t in terminals_grouping.terminals(): _add_to_mapping(result.mappings.to_bbn.objects, t.mrid, tn) if t.connectivity_node is not None: _add_to_mapping(result.mappings.to_bbn.objects, t.connectivity_node.mrid, tn) for ce in terminals_grouping.conducting_equipment_group: _add_to_mapping(result.mappings.to_bbn.objects, ce.mrid, tn) # map terminals to associated topological nodes for easy lookup when creating connected equipment for t in border_terms: terminals_to_tns[t.mrid] = tn for t in inner_terms: terminals_to_tns[t.mrid] = tn return True, tn async def _create_topological_branches( node_breaker_network: NetworkService, bus_branch_network: BBN, bus_branch_network_creator: BusBranchNetworkCreator[BBN, TN, TB, EB, PT, ES, EC, PEC, BNV], result: BusBranchNetworkCreationResult[BBN, BNV], terminals_to_tns: Dict[str, TN], validator: BNV ) -> bool: processed_acls_ids = set() for acls in node_breaker_network.objects(AcLineSegment): if not (acls.mrid in processed_acls_ids or bus_branch_network_creator.has_negligible_impedance(acls)): lines_grouping = await _group_common_ac_line_segment_terminals(acls) border_terms = frozenset(lines_grouping.border_terminals) common_acls = frozenset(lines_grouping.conducting_equipment_group) inner_terms = frozenset(lines_grouping.inner_terminals) # get/create connected topological nodes acls_tns = [] for t in _sort_terminals_by_feeder_direction(border_terms): tn_creation_success, tn = await _get_or_create_topological_node(t, terminals_to_tns, node_breaker_network, bus_branch_network, bus_branch_network_creator, result, validator) if not tn_creation_success: return False acls_tns.append(tn) total_length = reduce(lambda s, l: l.length + s, (common_acl for common_acl in common_acls), 0.0) # create topological branch if not validator.is_valid_topological_branch_data(bus_branch_network, (acls_tns[0], acls_tns[1]), total_length, common_acls, border_terms, inner_terms, node_breaker_network): return False tb_id, tb = bus_branch_network_creator.topological_branch_creator( bus_branch_network, (acls_tns[0], acls_tns[1]), total_length, common_acls, border_terms, inner_terms, node_breaker_network ) # populate result mappings result.mappings.to_nbn.topological_branches[tb_id] = lines_grouping for line in lines_grouping.conducting_equipment_group: _add_to_mapping(result.mappings.to_bbn.objects, line.mrid, tb) for t in lines_grouping.inner_terminals: _add_to_mapping(result.mappings.to_bbn.objects, t.mrid, tb) if t.connectivity_node is not None: _add_to_mapping(result.mappings.to_bbn.objects, t.connectivity_node.mrid, tb) # flag processed ac-line-segments processed_acls_ids.update({acls.mrid for acls in common_acls}) return True async def _create_equivalent_branches( node_breaker_network: NetworkService, bus_branch_network: BBN, bus_branch_network_creator: BusBranchNetworkCreator[BBN, TN, TB, EB, PT, ES, EC, PEC, BNV], result: BusBranchNetworkCreationResult[BBN, BNV], terminals_to_tns: Dict[str, TN], validator: BNV ) -> bool: for eb in node_breaker_network.objects(EquivalentBranch): if eb.mrid in result.mappings.to_bbn.objects: # skip if already processed continue # get/create connected topological nodes eb_tns = [] for t in _sort_terminals_by_feeder_direction(eb.terminals): tn_creation_success, tn = await _get_or_create_topological_node(t, terminals_to_tns, node_breaker_network, bus_branch_network, bus_branch_network_creator, result, validator) if not tn_creation_success: return False eb_tns.append(tn) if bus_branch_network_creator.has_negligible_impedance(eb): continue # create equivalent branch if not validator.is_valid_equivalent_branch_data(bus_branch_network, eb_tns, eb, node_breaker_network): return False teb_id, teb = bus_branch_network_creator.equivalent_branch_creator(bus_branch_network, eb_tns, eb, node_breaker_network) # populate result mappings _add_to_mapping(result.mappings.to_nbn.equivalent_branches, teb_id, eb) _add_to_mapping(result.mappings.to_bbn.objects, eb.mrid, teb) return True async def _create_power_transformers( node_breaker_network: NetworkService, bus_branch_network: BBN, bus_branch_network_creator: BusBranchNetworkCreator[BBN, TN, TB, EB, PT, ES, EC, PEC, BNV], result: BusBranchNetworkCreationResult[BBN, BNV], terminals_to_tns: Dict[str, TN], validator: BNV ) -> bool: for pt in node_breaker_network.objects(PowerTransformer): # create list of ends with their connected topological nodes ends_to_topological_nodes = [] for end in _sort_ends_by_feeder_direction(pt.ends): if end.terminal is not None: tn_creation_success, tn = await _get_or_create_topological_node(end.terminal, terminals_to_tns, node_breaker_network, bus_branch_network, bus_branch_network_creator, result, validator) if not tn_creation_success: return False ends_to_topological_nodes.append((end, tn)) else: ends_to_topological_nodes.append((end, None)) # create power transformer if not validator.is_valid_power_transformer_data(bus_branch_network, pt, ends_to_topological_nodes, node_breaker_network): return False txs = bus_branch_network_creator.power_transformer_creator(bus_branch_network, pt, ends_to_topological_nodes, node_breaker_network) # populate result mappings for tx_id, tx in txs.items(): _add_to_mapping(result.mappings.to_nbn.power_transformers, tx_id, pt) _add_to_mapping(result.mappings.to_bbn.objects, pt.mrid, tx) return True async def _create_energy_sources( node_breaker_network: NetworkService, bus_branch_network: BBN, bus_branch_network_creator: BusBranchNetworkCreator[BBN, TN, TB, EB, PT, ES, EC, PEC, BNV], result: BusBranchNetworkCreationResult[BBN, BNV], terminals_to_tns: Dict[str, TN], validator: BNV ) -> bool: for es in node_breaker_network.objects(EnergySource): es_terminal = next((t for t in es.terminals)) tn_creation_success, tn = await _get_or_create_topological_node(es_terminal, terminals_to_tns, node_breaker_network, bus_branch_network, bus_branch_network_creator, result, validator) if not tn_creation_success: return False if not validator.is_valid_energy_source_data(bus_branch_network, es, tn, node_breaker_network): return False bb_ess = bus_branch_network_creator.energy_source_creator(bus_branch_network, es, tn, node_breaker_network) # populate result mappings for bb_es_id, bb_es in bb_ess.items(): _add_to_mapping(result.mappings.to_nbn.energy_sources, bb_es_id, es) _add_to_mapping(result.mappings.to_bbn.objects, es.mrid, bb_es) return True async def _create_energy_consumers( node_breaker_network: NetworkService, bus_branch_network: BBN, bus_branch_network_creator: BusBranchNetworkCreator[BBN, TN, TB, EB, PT, ES, EC, PEC, BNV], result: BusBranchNetworkCreationResult[BBN, BNV], terminals_to_tns: Dict[str, TN], validator: BNV ): for ec in node_breaker_network.objects(EnergyConsumer): ec_terminal = next((t for t in ec.terminals)) tn_creation_success, tn = await _get_or_create_topological_node(ec_terminal, terminals_to_tns, node_breaker_network, bus_branch_network, bus_branch_network_creator, result, validator) if not tn_creation_success: return False if not validator.is_valid_energy_consumer_data(bus_branch_network, ec, tn, node_breaker_network): return False bb_ecs = bus_branch_network_creator.energy_consumer_creator(bus_branch_network, ec, tn, node_breaker_network) # populate result mappings for bb_ec_id, bb_ec in bb_ecs.items(): _add_to_mapping(result.mappings.to_nbn.energy_consumers, bb_ec_id, ec) _add_to_mapping(result.mappings.to_bbn.objects, ec.mrid, bb_ec) return True async def _create_power_electronics_connections( node_breaker_network: NetworkService, bus_branch_network: BBN, bus_branch_network_creator: BusBranchNetworkCreator[BBN, TN, TB, EB, PT, ES, EC, PEC, BNV], result: BusBranchNetworkCreationResult[BBN, BNV], terminals_to_tns: Dict[str, TN], validator: BNV ): for pec in node_breaker_network.objects(PowerElectronicsConnection): pec_terminal = next((t for t in pec.terminals)) tn_creation_success, tn = await _get_or_create_topological_node(pec_terminal, terminals_to_tns, node_breaker_network, bus_branch_network, bus_branch_network_creator, result, validator) if not tn_creation_success: return False if not validator.is_valid_power_electronics_connection_data(bus_branch_network, pec, tn, node_breaker_network): return False bb_pecs = bus_branch_network_creator.power_electronics_connection_creator(bus_branch_network, pec, tn, node_breaker_network) # populate result mappings for bb_pec_id, bb_pec in bb_pecs.items(): _add_to_mapping(result.mappings.to_nbn.power_electronics_connections, bb_pec_id, pec) _add_to_mapping(result.mappings.to_bbn.objects, pec.mrid, bb_pec) return True def _get_base_voltage(border_terminals: FrozenSet[Terminal]) -> Union[int, None]: voltages = set() for t in border_terminals: ce = t.conducting_equipment # Open switches may have a different voltage rating from the negligible-impedance equipment group due to the equipment on the other side of it. if isinstance(ce, Switch): continue if isinstance(ce, PowerTransformer): end_voltage = next((e.rated_u for e in ce.ends if e.terminal is t), None) if end_voltage is not None: voltages.add(end_voltage) else: if ce.base_voltage is not None: voltages.add(ce.base_voltage.nominal_voltage) return next(iter(voltages), None) def _validate_number_of_terminals(network: NetworkService): illegal_acls = [] for acl in network.objects(AcLineSegment): if acl.num_terminals() != 2: illegal_acls.append(acl.mrid) if len(illegal_acls) != 0: raise ValueError( f"NetworkService contains the following AcLineSegments with an invalid number of terminals: {illegal_acls}") illegal_es = [] for es in network.objects(EnergySource): if es.num_terminals() != 1: illegal_es.append(es.mrid) if len(illegal_es) != 0: raise ValueError( f"NetworkService contains the following EnergySources with an invalid number of terminals: {illegal_es}") illegal_ec = [] for ec in network.objects(EnergyConsumer): if ec.num_terminals() != 1: illegal_ec.append(ec.mrid) if len(illegal_ec) != 0: raise ValueError( f"NetworkService contains the following EnergyConsumers with an invalid number of terminals: {illegal_ec}") illegal_pec = [] for pec in network.objects(PowerElectronicsConnection): if pec.num_terminals() != 1: illegal_pec.append(pec.mrid) if len(illegal_pec) != 0: raise ValueError( f"NetworkService contains the following PowerElectronicsConnections with an invalid number of terminals: {illegal_pec}") async def _group_negligible_impedance_terminals( terminal: Terminal, has_negligible_impedance: Callable[[ConductingEquipment], bool] ) -> TerminalGrouping[ConductingEquipment]: tg = TerminalGrouping[ConductingEquipment]() # noinspection PyArgumentList trace = Traversal( start_item=terminal, queue_next=_queue_terminals_across_negligible_impedance(has_negligible_impedance), process_queue=LifoQueue(), step_actions=[_process_terminal(tg, has_negligible_impedance)] ) await trace.trace() return tg def _process_terminal( tg: TerminalGrouping[ConductingEquipment], has_negligible_impedance: Callable[[ConductingEquipment], bool] ): async def add_to_group(t: Terminal, _): if has_negligible_impedance(t.conducting_equipment): tg.conducting_equipment_group.add(t.conducting_equipment) tg.inner_terminals.add(t) else: tg.border_terminals.add(t) return add_to_group def _queue_terminals_across_negligible_impedance( has_negligible_impedance: Callable[[ConductingEquipment], bool] ): def queue_next(terminal: Terminal, traversal: Traversal[Terminal]): if terminal.connectivity_node is not None: traversal.process_queue.extend(ot for ot in terminal.connectivity_node.terminals if ot != terminal) if has_negligible_impedance(terminal.conducting_equipment): traversal.process_queue.extend(ot for ot in terminal.conducting_equipment.terminals if ot != terminal) return queue_next async def _group_common_ac_line_segment_terminals(acls: AcLineSegment) -> TerminalGrouping[AcLineSegment]: def has_common_impedance(line: AcLineSegment): return line.per_length_sequence_impedance.mrid == acls.per_length_sequence_impedance.mrid common_acls: TerminalGrouping[AcLineSegment] = TerminalGrouping() connectivity_node_counter = Counter() # noinspection PyArgumentList trace = Traversal( start_item=acls, queue_next=_queue_common_impedance_lines(common_acls, has_common_impedance), process_queue=LifoQueue(), step_actions=[_process_acls(common_acls, connectivity_node_counter)] ) await trace.trace() for t in (t for line in common_acls.conducting_equipment_group for t in line.terminals): if t.connectivity_node is None: common_acls.border_terminals.add(t) continue count = connectivity_node_counter.get(t.connectivity_node, 0) if count == 1: common_acls.border_terminals.add(t) else: common_acls.inner_terminals.add(t) return common_acls def _process_acls( common_acls: TerminalGrouping[AcLineSegment], connectivity_node_counter: Counter ): async def add_to_group(acls: AcLineSegment, _): if acls in common_acls.conducting_equipment_group: return common_acls.conducting_equipment_group.add(acls) connectivity_node_counter.update( (t.connectivity_node for t in acls.terminals if t.connectivity_node is not None)) return add_to_group def _queue_common_impedance_lines( common_acls: TerminalGrouping[AcLineSegment], has_common_impedance: Callable[[AcLineSegment], bool] ): def queue_next(acls: AcLineSegment, traversal: Traversal[AcLineSegment]): traversal.process_queue.extend(_next_common_acls(acls, has_common_impedance, common_acls)) return queue_next def _next_common_acls( acls: AcLineSegment, has_common_impedance: Callable[[AcLineSegment], bool], common_acls: TerminalGrouping[AcLineSegment] ) -> Set[AcLineSegment]: acls_terminals = {*acls.terminals} def can_process_ac_line(o: Terminal) -> bool: return o not in acls_terminals \ and isinstance(o.conducting_equipment, AcLineSegment) \ and has_common_impedance(o.conducting_equipment) \ and o.conducting_equipment not in common_acls.conducting_equipment_group def is_non_forking_ac_line(t: Terminal) -> bool: return t.connectivity_node is not None and len(list(t.connectivity_node.terminals)) == 2 return { o.conducting_equipment for t in acls.terminals if is_non_forking_ac_line(t) for o in t.connectivity_node.terminals if can_process_ac_line(o) } def _is_no_impedance_branch(eb: EquivalentBranch): return eb.r is None or eb.x is None or eb.r == 0.0 or eb.x == 0.0 def _sort_ends_by_feeder_direction(ends: Iterable[PowerTransformerEnd]) -> List[PowerTransformerEnd]: return sorted(iter(ends), key=lambda pte: 999 if pte.terminal is None else pte.terminal.normal_feeder_direction.value) def _sort_terminals_by_feeder_direction(terminals: Iterable[Terminal]) -> List[Terminal]: return sorted(iter(terminals), key=lambda ter: ter.normal_feeder_direction.value)
zepben.evolve.test-ci-central
/zepben.evolve.test_ci_central-0.32.0-py3-none-any.whl/zepben/evolve/model/busbranch/bus_branch.py
bus_branch.py
from zepben.evolve import Conductor, PowerTransformer, connect_with_password, SyncNetworkConsumerClient, ConductingEquipment, EnergyConsumer, Switch from zepben.protobuf.nc.nc_requests_pb2 import INCLUDE_ENERGIZED_LV_FEEDERS, INCLUDE_ENERGIZED_FEEDERS, INCLUDE_ENERGIZING_SUBSTATIONS, \ INCLUDE_ENERGIZING_FEEDERS def main(): # See connecting_to_grpc_service.py for examples of each connect function channel = connect_with_password(host="EWB hostname", rpc_port=1234, username="<username-or-email-address>", password="<your-password>", client_id="client ID") feeder_mrid = "NotARealFeeder123" print(f"Fetching {feeder_mrid}") # Note you should create a new client for each Feeder you retrieve # There is also a NetworkConsumerClient that is asyncio compatible, with the same API. client = SyncNetworkConsumerClient(channel=channel) network = client.service # Fetch feeder and all its LvFeeders client.get_equipment_container(feeder_mrid, include_energized_containers=INCLUDE_ENERGIZED_LV_FEEDERS).throw_on_error() print(f"Total Number of objects: {client.service.len_of()}") types = set(type(x) for x in network.objects(ConductingEquipment)) for t in types: print(f"Number of {t.__name__}'s = {len(list(network.objects(t)))}") total_length = 0 for conductor in network.objects(Conductor): total_length += conductor.length print(f"Total conductor length in {feeder_mrid}: {total_length:.3f}m") feeder = network.get(feeder_mrid) print(f"{feeder.mrid} Transformers:") for eq in feeder.equipment: if isinstance(eq, PowerTransformer): print(f" {eq} - Vector Group: {eq.vector_group.short_name}, Function: {eq.function.short_name}") print() print(f"{feeder_mrid} Energy Consumers:") for ec in network.objects(EnergyConsumer): print(f" {ec} - Real power draw: {ec.q}W, Reactive power draw: {ec.p}VAr") print() print(f"{feeder_mrid} Switches:") for switch in network.objects(Switch): print(f" {switch} - Open status: {switch.get_state():04b}") # === Some other examples of fetching containers === # Fetch substation equipment and include equipment from HV/MV feeders powered by it client.get_equipment_container("substation ID", include_energized_containers=INCLUDE_ENERGIZED_FEEDERS) # Same as above, but also fetch equipment from LV feeders powered by the HV/MV feeders client.get_equipment_container("substation ID", include_energized_containers=INCLUDE_ENERGIZED_LV_FEEDERS) # Fetch feeder equipment without fetching any additional equipment from powering/powered containers client.get_equipment_container("feeder ID") # Fetch HV/MV feeder equipment, the equipment from the substation powering it, and the equipment from the LV feeders it powers client.get_equipment_container("feeder ID", include_energizing_containers=INCLUDE_ENERGIZING_SUBSTATIONS, include_energized_containers=INCLUDE_ENERGIZED_LV_FEEDERS) # Fetch LV feeder equipment and include equipment from HV/MV feeders powering it client.get_equipment_container("LV feeder ID", include_energizing_containers=INCLUDE_ENERGIZING_FEEDERS) # Same as above, but also fetch equipment from the substations powering the HV/MV feeders client.get_equipment_container("LV feeder ID", include_energizing_containers=INCLUDE_ENERGIZING_SUBSTATIONS) if __name__ == "__main__": main()
zepben.examples
/zepben.examples-0.2.0b7-py3-none-any.whl/zepben/examples/fetching_network_model.py
fetching_network_model.py
from zepben.auth import AuthMethod from zepben.evolve import connect_insecure, NetworkConsumerClient, connect_tls, connect_with_password, connect_with_secret, SyncNetworkConsumerClient async def plaintext_connection(): """ Connects to an RPC server without TLS or authentication. This method should only be used in development and for demos. """ async with connect_insecure("hostname", 1234) as insecure_channel: client = NetworkConsumerClient(insecure_channel) grpc_result = await client.get_network_hierarchy() print(grpc_result.result) async def secure_connection(): """ Connects to an RPC server over TLS. No user/client credentials are used. """ async with connect_tls("hostname", 1234) as secure_channel: client = NetworkConsumerClient(secure_channel) grpc_result = await client.get_network_hierarchy() print(grpc_result.result) async def secure_connection_with_user_credentials(): """ Connects to an RPC server over TLS with user credentials. The authentication config will be fetched from https://hostname/auth or https://hostname/ewb/auth by default, which includes the domain of the OAuth token provider. """ async with connect_with_password("client ID", "username", "password", "hostname", 1234) as secure_channel: client = NetworkConsumerClient(secure_channel) grpc_result = await client.get_network_hierarchy() print(grpc_result.result) # Specify authentication config explicitly async with connect_with_password("client ID", "username", "password", "hostname", 1234, audience="https://fake_audience/", issuer_domain="fake.issuer.domain", auth_method=AuthMethod.AUTH0) as secure_channel: client = NetworkConsumerClient(secure_channel) grpc_result = await client.get_network_hierarchy() print(grpc_result.result) async def secure_connection_with_client_credentials(): """ Connects to an RPC server over TLS with client credentials. The authentication config will be fetched from https://hostname/auth or https://hostname/ewb/auth by default, which includes the domain of the OAuth token provider. """ async with connect_with_secret("client ID", "client secret", "hostname", 1234) as secure_channel: client = NetworkConsumerClient(secure_channel) grpc_result = await client.get_network_hierarchy() print(grpc_result.result) # Specify authentication config explicitly async with connect_with_secret("client ID", "client secret", "hostname", 1234, audience="https://fake_audience/", issuer_domain="fake.issuer.domain", auth_method=AuthMethod.AUTH0) as secure_channel: client = NetworkConsumerClient(secure_channel) grpc_result = await client.get_network_hierarchy() print(grpc_result.result) # You may use `SyncNetworkConsumerClient` if you prefer not to use asyncio. # The API calls are the same between `SyncNetworkConsumerClient` and `NetworkConsumerClient`. def connect_sync(): channel = connect_insecure("hostname", 1234) client = SyncNetworkConsumerClient(channel) grpc_result = client.get_network_hierarchy() print(grpc_result.result)
zepben.examples
/zepben.examples-0.2.0b7-py3-none-any.whl/zepben/examples/connecting_to_grpc_service.py
connecting_to_grpc_service.py
# A Traversal is used to iterate over graph-like structures. # The Evolve SDK contains several factory functions for traversals that cover common use cases. import asyncio from zepben.evolve import Switch, connected_equipment_trace, ConductingEquipmentStep, connected_equipment_breadth_trace, \ normal_connected_equipment_trace, current_connected_equipment_trace, connectivity_trace, ConnectivityResult, connected_equipment, \ connectivity_breadth_trace, SinglePhaseKind, normal_connectivity_trace, current_connectivity_trace, phase_trace, PhaseCode, PhaseStep, normal_phase_trace, \ current_phase_trace, assign_equipment_to_feeders, Feeder, LvFeeder, assign_equipment_to_lv_feeders, set_direction, Terminal, \ normal_limited_connected_equipment_trace, AcLineSegment, current_limited_connected_equipment_trace, FeederDirection, remove_direction, \ normal_downstream_trace, current_downstream_trace, TreeNode, Breaker from zepben.evolve.services.network.tracing.phases import phase_step from zepben.evolve.services.network.tracing.tracing import normal_upstream_trace, current_upstream_trace, normal_downstream_tree, current_downstream_tree # For the purposes of this example, we will use the IEEE 13 node feeder. from zepben.examples.ieee_13_node_test_feeder import network feeder_head = network.get("br_650", Breaker) switch = network.get("sw_671_692", Switch) hv_feeder = network.get("hv_fdr", Feeder) lv_feeder = network.get("lv_fdr", LvFeeder) def reset_switch(): switch.set_normally_open(False) switch.set_open(False) print("Switch reset (normally and currently closed)") print() def print_heading(heading): print("+" + "-" * (len(heading) + 2) + "+") print(f"| {heading} |") print("+" + "-" * (len(heading) + 2) + "+") print() async def equipment_traces(): # Equipment traces iterate over equipment connected in a network. print_heading("EQUIPMENT TRACING") # noinspection PyArgumentList start_item = ConductingEquipmentStep(conducting_equipment=feeder_head) visited = set() async def print_step(ces: ConductingEquipmentStep, _): visited.add(ces.conducting_equipment) print(f"\tDepth {ces.step:02d}: {ces.conducting_equipment}") # The connected equipment trace iterates through all connected equipment depth-first, and even through open switches. # Equipment will be revisited if a shorter path from the starting equipment is found. print("Connected Equipment Trace:") await connected_equipment_trace().add_step_action(print_step).run(start_item) print(f"Number of equipment visited: {len(visited)}") print() visited.clear() # There is also a breadth-first version, which guarantees that each equipment is visited at most once. print("Connected Equipment Breadth Trace:") await connected_equipment_breadth_trace().add_step_action(print_step).run(start_item) print(f"Number of equipment visited: {len(visited)}") print() visited.clear() # The normal connected equipment trace iterates through all equipment connected to the starting equipment in the network's normal state. # By setting the switch from node 671 to 692 to normally open on at least one phase, the traversal will not trace through the switch. # Even if a switch has closed phases, it will not be traced through if one or more of its phases are closed in the network's normal state. network.get("sw_671_692", Switch).set_normally_open(True, phase=SinglePhaseKind.A) print("Switch set to normally open on phase A") print() print("Normal Connected Equipment Trace:") await normal_connected_equipment_trace().add_step_action(print_step).run(start_item) print(f"Number of equipment visited: {len(visited)}") print() visited.clear() # The normal connected equipment trace iterates through all equipment connected to the starting equipment in the network's current state. # By setting the switch from node 671 to 692 to currently open on at least one phase, the traversal will not trace through the switch. # Even if a switch has closed phases, it will not be traced through if one or more of its phases are closed in the network's current state. switch.set_open(True, phase=SinglePhaseKind.B) print("Switch set to currently open on phase B") print() print("Current Connected Equipment Trace:") await current_connected_equipment_trace().add_step_action(print_step).run(start_item) print(f"Number of equipment visited: {len(visited)}") print() visited.clear() reset_switch() async def connectivity_traces(): # Connectivity traces iterate over the connectivity of equipment terminals, rather than the equipment themselves. # The tracker ensures that each equipment appears at most once as a destination in a connectivity. print_heading("CONNECTIVITY TRACING") start_item = connected_equipment(feeder_head)[0] visited = set() async def print_connectivity(cr: ConnectivityResult, _: bool): visited.add(cr) from_phases = "".join(phase_path.from_phase.short_name for phase_path in cr.nominal_phase_paths) to_phases = "".join(phase_path.to_phase.short_name for phase_path in cr.nominal_phase_paths) print(f"\t{cr.from_terminal.mrid:-<15}-{from_phases:->4}-{to_phases:-<4}-{cr.to_terminal.mrid:->15}") print("Connectivity Trace:") await connectivity_trace().add_step_action(print_connectivity).run(start_item) print(f"Number of connectivities visited: {len(visited)}") print() visited.clear() # A breadth-first connectivity trace is also available. print("Connectivity Breadth Trace:") await connectivity_breadth_trace().add_step_action(print_connectivity).run(start_item) print(f"Number of connectivities visited: {len(visited)}") print() visited.clear() # The normal connectivity trace is analogous to the normal connected equipment trace, # and likewise does not go through switches with at least one open phase. switch.set_normally_open(True, phase=SinglePhaseKind.A) print("Switch set to normally open on phase A") print() print("Normal Connectivity Trace:") await normal_connectivity_trace().add_step_action(print_connectivity).run(start_item) print(f"Number of connectivities visited: {len(visited)}") print() visited.clear() switch.set_open(True, phase=SinglePhaseKind.B) print("Switch set to currently open on phase B") print() print("Current Connectivity Trace:") await current_connectivity_trace().add_step_action(print_connectivity).run(start_item) print(f"Number of connectivities visited: {len(visited)}") print() visited.clear() reset_switch() async def limited_connected_equipment_traces(): # Limited connected equipment traces allow you to trace up to a number of steps, and optionally in a specified feeder direction. # Running the trace returns a dictionary from each visited equipment to the number of steps away it is from a starting equipment. # set_direction() must be run on a network before running directed traces. print_heading("LIMITED CONNECTED EQUIPMENT TRACES") switch.set_normally_open(True, phase=SinglePhaseKind.A) print(f"Switch set to normally open on phase A.") print() await set_direction().run(network) print(f"Feeder direction set for each terminal.") print() line = network.get("l_632_671", AcLineSegment) normal_distances = await normal_limited_connected_equipment_trace().run([line], maximum_steps=2, feeder_direction=FeederDirection.DOWNSTREAM) print("Normal limited connected downstream trace from line 632-671 with maximum steps of 2:") for eq, distance in normal_distances.items(): print(f"\tNumber of steps to {eq}: {distance}") print(f"Number of equipment traced: {len(normal_distances)}") print() current_distances = await current_limited_connected_equipment_trace().run([line], maximum_steps=2, feeder_direction=FeederDirection.DOWNSTREAM) print("Current limited connected downstream trace from line 632-671 with maximum steps of 2:") for eq, distance in current_distances.items(): print(f"\tNumber of steps to {eq}: {distance}") print(f"Number of equipment traced: {len(current_distances)}") print() remove_direction().run(network) print(f"Feeder direction removed for each terminal.") print() reset_switch() async def phase_traces(): # Phase traces account for which phases each terminal supports. print_heading("PHASE TRACING") feeder_head_phase_step = phase_step.start_at(feeder_head, PhaseCode.ABCN) switch_phase_step = phase_step.start_at(switch, PhaseCode.ABCN) visited = set() async def print_phase_step(step: PhaseStep, _: bool): visited.add(step) phases = "" for spk in PhaseCode.ABCN: if spk in step.phases: phases += spk.short_name else: phases += "-" print(f'\t{step.previous and step.previous.mrid or "(START)":-<15}-{phases: ^4}-{step.conducting_equipment.mrid:->15}') print("Phase Trace:") await phase_trace().add_step_action(print_phase_step).run(feeder_head_phase_step) print(f"Number of phase steps visited: {len(visited)}") print() visited.clear() # For each normally open phase on a switch, the normal phase trace will not trace through that phase for the switch. switch.set_normally_open(True, SinglePhaseKind.B) print("Normal Phase Trace:") await normal_phase_trace().add_step_action(print_phase_step).run(feeder_head_phase_step) print(f"Number of phase steps visited: {len(visited)}") print() visited.clear() # For each currently open phase on a switch, the current phase trace will not trace through that phase for the switch. switch.set_open(True, SinglePhaseKind.C) print("Current Phase Trace:") await current_phase_trace().add_step_action(print_phase_step).run(feeder_head_phase_step) print(f"Number of phase steps visited: {len(visited)}") print() visited.clear() # There are also directed phase traces. # set_direction() must be run on a network before running directed traces. # Note that set_direction() does not trace through switches with at least one open phase, # meaning that terminals beyond such a switch are left with a feeder direction of NONE. await set_direction().run(network) print(f"Feeder direction set for each terminal.") print() print("Normal Downstream Phase Trace:") await normal_downstream_trace().add_step_action(print_phase_step).run(feeder_head_phase_step) print(f"Number of phase steps visited: {len(visited)}") print() visited.clear() print("Current Downstream Phase Trace:") await current_downstream_trace().add_step_action(print_phase_step).run(feeder_head_phase_step) print(f"Number of phase steps visited: {len(visited)}") print() visited.clear() print("Normal Upstream Phase Trace:") await normal_upstream_trace().add_step_action(print_phase_step).run(switch_phase_step) print(f"Number of phase steps visited: {len(visited)}") print() visited.clear() print("Current Upstream Phase Trace:") await current_upstream_trace().add_step_action(print_phase_step).run(switch_phase_step) print(f"Number of phase steps visited: {len(visited)}") print() visited.clear() remove_direction().run(network) print(f"Feeder direction removed for each terminal.") print() reset_switch() async def assigning_equipment_to_feeders(): # Use assign_equipment_to_feeders() and assign_equipment_to_lv_feeders() to assign equipment to HV and LV feeders. # assign_equipment_to_feeders() also ensures that HV feeders that power LV feeders are associated. print_heading("ASSIGNING EQUIPMENT TO FEEDERS") print(f"Equipment in HV feeder: {[eq.mrid for eq in hv_feeder.equipment]}") print(f"Equipment in LV feeder: {[eq.mrid for eq in lv_feeder.equipment]}") print(f"LV feeders powered by HV feeder: {[lvf.mrid for lvf in hv_feeder.normal_energized_lv_feeders]}") print(f"HV feeders powering LV feeder: {[hvf.mrid for hvf in lv_feeder.normal_energizing_feeders]}") print() await assign_equipment_to_feeders().run(network) await assign_equipment_to_lv_feeders().run(network) print("Equipment assigned to feeders.") print() print(f"Equipment in HV feeder: {[eq.mrid for eq in hv_feeder.equipment]}") print(f"Equipment in LV feeder: {[eq.mrid for eq in lv_feeder.equipment]}") print(f"LV feeders powered by HV feeder: {[lvf.mrid for lvf in hv_feeder.normal_energized_lv_feeders]}") print(f"HV feeders powering LV feeder: {[hvf.mrid for hvf in lv_feeder.normal_energizing_feeders]}") print() async def set_and_remove_feeder_direction(): # Use set_direction().run(network) to evaluate the feeder direction of each terminal. print_heading("SETTING FEEDER DIRECTION") switch.set_normally_open(True, phase=SinglePhaseKind.A) print(f"Switch set to normally open on phase A. Switch is between feeder head and energy consumer 675.") consumer_terminal = network.get("ec_675_t", Terminal) print(f"Normal feeder direction of HV feeder head terminal: {hv_feeder.normal_head_terminal.normal_feeder_direction}") print(f"Current feeder direction of HV feeder head terminal: {hv_feeder.normal_head_terminal.current_feeder_direction}") print(f"Normal feeder direction of energy consumer 675 terminal: {consumer_terminal.normal_feeder_direction}") print(f"Current feeder direction of energy consumer 675 terminal: {consumer_terminal.current_feeder_direction}") print() await set_direction().run(network) print("Normal and current feeder direction set.") print() print(f"Normal feeder direction of HV feeder head terminal: {hv_feeder.normal_head_terminal.normal_feeder_direction}") print(f"Current feeder direction of HV feeder head terminal: {hv_feeder.normal_head_terminal.current_feeder_direction}") print(f"Normal feeder direction of energy consumer 675 terminal: {consumer_terminal.normal_feeder_direction}") print(f"Current feeder direction of energy consumer 675 terminal: {consumer_terminal.current_feeder_direction}") print() # Use remove_direction().run(network) to remove feeder directions. # While set_direction().run(network) must be awaited, remove_direction().run(network) does not, because it is not asynchronous. print_heading("REMOVING FEEDER DIRECTION") consumer_terminal = network.get("ec_675_t", Terminal) print(f"Normal feeder direction of HV feeder head terminal: {hv_feeder.normal_head_terminal.normal_feeder_direction}") print(f"Current feeder direction of HV feeder head terminal: {hv_feeder.normal_head_terminal.current_feeder_direction}") print(f"Normal feeder direction of energy consumer 675 terminal: {consumer_terminal.normal_feeder_direction}") print(f"Current feeder direction of energy consumer 675 terminal: {consumer_terminal.current_feeder_direction}") print() remove_direction().run(network) print("Normal and current feeder direction removed.") print() print(f"Normal feeder direction of HV feeder head terminal: {hv_feeder.normal_head_terminal.normal_feeder_direction}") print(f"Current feeder direction of HV feeder head terminal: {hv_feeder.normal_head_terminal.current_feeder_direction}") print(f"Normal feeder direction of energy consumer 675 terminal: {consumer_terminal.normal_feeder_direction}") print(f"Current feeder direction of energy consumer 675 terminal: {consumer_terminal.current_feeder_direction}") print() reset_switch() async def trees(): # A downstream tree contains all non-intersecting equipment paths starting from a common equipment and following downstream terminals. # The same equipment may appear multiple times in the tree if the network contains multiple downstream paths to the equipment, i.e. loops. # Similar to connected equipment traces, either the normal or current state of the network may be used to determine whether to trace through each switch. print_heading("DOWNSTREAM TREES") def desc_lines(node: TreeNode): children = list(node.children) for i, child in enumerate(children): is_last_child = i == len(children) - 1 branch_char = "┗" if is_last_child else "┣" stem_char = " " if is_last_child else "┃" yield f"{branch_char}━{child.conducting_equipment}" for line in desc_lines(child): yield f"{stem_char} {line}" def print_tree(root_node: TreeNode): print(root_node.conducting_equipment) for line in desc_lines(root_node): print(line) switch.set_open(True, SinglePhaseKind.C) print("Switch set to currently open on phase C.") print() await set_direction().run(network) print("Feeder direction set.") print() print("Normal Downstream Tree:") ndt = await normal_downstream_tree().run(feeder_head) print_tree(ndt) print() print("Current Downstream Tree:") cdt = await current_downstream_tree().run(feeder_head) print_tree(cdt) print() remove_direction().run(network) print(f"Feeder direction removed for each terminal.") print() reset_switch() async def main(): # All examples are self-contained. Feel free to comment out any of the following lines to isolate specific examples. await assigning_equipment_to_feeders() await set_and_remove_feeder_direction() await equipment_traces() await limited_connected_equipment_traces() await connectivity_traces() await phase_traces() await trees() if __name__ == "__main__": asyncio.run(main())
zepben.examples
/zepben.examples-0.2.0b7-py3-none-any.whl/zepben/examples/tracing.py
tracing.py
from zepben.evolve import NetworkService, AcLineSegment, PerLengthSequenceImpedance, Switch, Breaker, ConductingEquipment, NameType, Meter, EnergySource, \ Terminal # A `NetworkService` is a mutable node breaker network model that implements a subset of IEC61968 and IEC61970 CIM classes. # It is essentially a collection of `IdentifiedObject`s, and they may be added and removed as desired. from zepben.evolve.services.common.resolver import per_length_sequence_impedance network = NetworkService() print(""" ################## # ADDING OBJECTS # ################## """) # We start by adding a line segment and a breaker to the network model. line = AcLineSegment(mrid="acls_123") breaker = Breaker(mrid="b_456") print(f"{line} added? {network.add(line)}") print(f"{breaker} added? {network.add(breaker)}") # Objects with duplicate mRIDs are not added. invalid = EnergySource(mrid="acls_123") print(f"{invalid} added? {network.add(invalid)}") print(""" #################### # QUERYING OBJECTS # #################### """) # Use the `get` method to query the network model for an object with the specified mRID. print(f"Identified object with mrid b_456: {network.get('b_456')}") # A `KeyError` is raised if no object with the specified mRID is in the network model. try: network.get("not_in_network") except KeyError as error: print(error) # Narrow the desired type with the second parameter. In makes the intent clearer, and lets IDEs lint and autocomplete according to the requested type. print(f"Switch with mrid b_456 is open? {network.get('b_456', Switch).is_open()}") # A `TypeError` is raised if the object exists in the network model, but is not the correct type. try: network.get("acls_123", Switch) except TypeError as error: print(error) print(""" ################## # QUERYING TYPES # ################## """) # You may use the `objects` method to iterate over all objects that inherit a specified type. # Because the breaker is the only object in the network model that inherits from the `Switch` class, this will print "Switch: Breaker{b_456}". for switch in network.objects(Switch): print(f"Switch: {switch}") # However, both the line and the breaker inherit from `ConductingEquipment`. # The following line prints "Conducting equipment: AcLineSegment{acls_123}" and "Conducting equipment: Breaker{b_456}". for conducting_equipment in network.objects(ConductingEquipment): print(f"Conducting equipment: {conducting_equipment}") # Remark: Objects generated by network.objects(BaseType) are ordered by the name of their leaf class, so all `AcLineSegment`s will appear before all `Breaker`s. # The `len_of` method returns the number of objects that inherit a specified type. print(f"Number of switches: {network.len_of(Switch)}") print(f"Number of conducting equipment: {network.len_of(ConductingEquipment)}") print(""" ############# # RESOLVERS # ############# """) # There may be times when you need to reconstruct a network model from an unordered collection of identified objects. # `NetworkService` allows you to add reference resolvers, which complete associations when the remaining object in an association is added. network.resolve_or_defer_reference(per_length_sequence_impedance(line), "plsi_789") print(f"Network has unresolved references? {network.has_unresolved_references()}") print(f"plsi_789 has unresolved references? {network.has_unresolved_references('plsi_789')}") for unresolved_reference in network.get_unresolved_references_from("acls_123"): print(f"Unresolved reference from acls_123: {unresolved_reference}") for unresolved_reference in network.get_unresolved_references_to("plsi_789"): print(f"Unresolved reference to plsi_789: {unresolved_reference}") print(f"Number of unresolved references to plsi_789: {network.num_unresolved_references('plsi_789')}") print(f"Total unresolved references: {network.num_unresolved_references()}") print("Adding plsi_789 to the network...") network.add(PerLengthSequenceImpedance(mrid="plsi_789")) print(f"Total unresolved references: {network.num_unresolved_references()}") print(f"PerLengthSequenceImpedance of acls_123: {line.per_length_sequence_impedance}") print(""" ######################## # CONNECTING TERMINALS # ######################## """) # Terminals in a `NetworkService` may be connected using the `connect_terminals` method. # This automatically creates a connectivity node between the terminals, unless one of the terminals is already assigned to one. t1, t2, t3 = Terminal(mrid="t1"), Terminal(mrid="t2"), Terminal(mrid="t3") network.add(t1) network.add(t2) network.add(t3) network.connect_terminals(t1, t2) cn = t1.connectivity_node print(f"Connected to node {cn}:") for terminal in cn.terminals: print(f"\t{terminal}") # The mrid of the connectivity node may also be used to connect a terminal network.connect_by_mrid(t3, cn.mrid) print(f"Connected to node {cn}:") for terminal in cn.terminals: print(f"\t{terminal}") print(""" ######################## # CONNECTING TERMINALS # ######################## """) # Terminals in a `NetworkService` may be connected using the `connect_terminals` method. # This automatically creates a connectivity node between the terminals, unless one of the terminals is already assigned to one. t1, t2, t3 = Terminal(mrid="t1"), Terminal(mrid="t2"), Terminal(mrid="t3") network.add(t1) network.add(t2) network.add(t3) network.connect_terminals(t1, t2) cn = t1.connectivity_node print(f"Connected to node {cn}:") for terminal in cn.terminals: print(f"\t{terminal}") # The mrid of the connectivity node may also be used to connect a terminal network.connect_by_mrid(t3, cn.mrid) print(f"Connected to node {cn}:") for terminal in cn.terminals: print(f"\t{terminal}") print(""" ######### # NAMES # ######### """) # Apart from identified objects, a `NetworkService` also supports names. Each identified object has exactly one mRID, but can have any number of names. # Each name has a name type. In this example, we add two names of type "NMI" to the network model. meter1 = Meter() meter2 = Meter() name_type = NameType(name="NMI", description="National Meter Identifier") name_type.get_or_add_name("987654321", line) name_type.get_or_add_name("546372819", breaker) network.add_name_type(name_type) for name in network.get_name_type("NMI").names: print(f"NMI name {name.name} is assigned to {name.identified_object}") for name_type in network.name_types: print(f"Network has name type {name_type}") # Remark: In practice, NMI names are not assigned to lines and breakers. print(""" #################### # REMOVING OBJECTS # #################### """) # You may use the `remove` method to remove objects from the network model. network.remove(line) print(f"{line} removed successfully.") network.remove(breaker) print(f"{breaker} removed successfully.") # The object does not need to be the one that was added. It just needs to match the type and mRID. plsi = PerLengthSequenceImpedance(mrid="plsi_789") network.remove(plsi) print(f"{plsi} removed successfully.") # KeyError is raised if no matching object is found. try: network.remove(line) except KeyError as error: print(error)
zepben.examples
/zepben.examples-0.2.0b7-py3-none-any.whl/zepben/examples/network_service_interactions.py
network_service_interactions.py
from zepben.evolve import EnergySource, AcLineSegment, Fuse, PowerTransformer, Breaker, EnergyConsumer, NetworkService, Terminal, connected_equipment, \ ConductingEquipment, PhaseCode, connected_terminals, ConnectivityResult # This example explores how to examine the immediate connectivity of equipment. # We will build a simple, linear network to examine: # source consumer # | | # line line # | | # fuse breaker # | | # transformer fuse # | | # +-----------+ es_t = Terminal(mrid="es-t") es = EnergySource(mrid="es", terminals=[es_t]) hv_line_t1, hv_line_t2 = Terminal(mrid="hv_line_t1"), Terminal(mrid="hv_line_t2") hv_line = AcLineSegment(mrid="hv_line", terminals=[hv_line_t1, hv_line_t2]) hv_fuse_t1, hv_fuse_t2 = Terminal(mrid="hv_fuse_t1"), Terminal(mrid="hv_fuse_t2") hv_fuse = Fuse(mrid="hv_fuse", terminals=[hv_fuse_t1, hv_fuse_t2]) tx_t1, tx_t2 = Terminal(mrid="tx_t1"), Terminal(mrid="tx_t2", phases=PhaseCode.ABCN) tx = PowerTransformer(mrid="tx", terminals=[tx_t1, tx_t2]) lv_fuse_t1, lv_fuse_t2 = Terminal(mrid="lv_fuse_t1", phases=PhaseCode.ABCN), Terminal(mrid="lv_fuse_t2", phases=PhaseCode.ABCN) lv_fuse = Fuse(mrid="lv_fuse", terminals=[lv_fuse_t1, lv_fuse_t2]) breaker_t1, breaker_t2 = Terminal(mrid="breaker_t1", phases=PhaseCode.ABCN), Terminal(mrid="breaker_t2", phases=PhaseCode.BN) breaker = Breaker(mrid="breaker", terminals=[breaker_t1, breaker_t2]) lv_line_t1, lv_line_t2 = Terminal(mrid="lv_line_t1", phases=PhaseCode.BN), Terminal(mrid="lv_line_t2", phases=PhaseCode.BN) lv_line = AcLineSegment(mrid="lv_line", terminals=[lv_line_t1, lv_line_t2]) ec_t = Terminal(mrid="ec_t", phases=PhaseCode.BN) ec = EnergyConsumer(mrid="ec", terminals=[ec_t]) network = NetworkService() for io in [es_t, es, hv_line_t1, hv_line_t2, hv_line, hv_fuse_t1, hv_fuse_t2, hv_fuse, tx_t1, tx_t2, tx, lv_fuse_t1, lv_fuse_t2, lv_fuse, breaker_t1, breaker_t2, breaker, lv_line_t1, lv_line_t2, lv_line, ec_t, ec]: network.add(io) network.connect_terminals(es_t, hv_line_t1) network.connect_terminals(hv_line_t2, hv_fuse_t1) network.connect_terminals(hv_fuse_t2, tx_t1) network.connect_terminals(tx_t2, lv_fuse_t1) network.connect_terminals(lv_fuse_t2, breaker_t1) network.connect_terminals(breaker_t2, lv_line_t1) network.connect_terminals(lv_line_t2, ec_t) def fancy_print_connectivity_result(connectivity_result: ConnectivityResult): print(f"\t{connectivity_result.from_terminal} to {connectivity_result.to_terminal}") terminal_str_len = len(str(connectivity_result.from_terminal)) for core_path in connectivity_result.nominal_phase_paths: print(f"\t{core_path.from_phase.name:>{terminal_str_len}}----{core_path.to_phase.name}") def fancy_print_connected_equipment(equipment: ConductingEquipment, phases=None): if phases: print(f"Connectivity results for {equipment} on phases {phases}:") else: print(f"Connectivity results for {equipment}:") for connectivity_result in connected_equipment(equipment, phases): fancy_print_connectivity_result(connectivity_result) print() # connected_equipment(equipment, phases) will get all connections between equipment cores matching one of the requested phases. # The connected equipment does not need to connect via all specified phases to appear in the list of connectivity results. fancy_print_connected_equipment(tx) fancy_print_connected_equipment(tx, phases=PhaseCode.N) fancy_print_connected_equipment(breaker, phases=PhaseCode.BC) # connected_terminals is essentially connected_equipment where only one terminal is considered. print(f"Connectivity results for terminal {lv_fuse_t2} on phases {PhaseCode.ACN}:") for connectivity_result in connected_terminals(lv_fuse_t2, PhaseCode.ACN): fancy_print_connectivity_result(connectivity_result)
zepben.examples
/zepben.examples-0.2.0b7-py3-none-any.whl/zepben/examples/examining_connectivity.py
examining_connectivity.py
import asyncio import logging import pandapower as pp from pp_creators.basic_creator import BasicPandaPowerNetworkCreator from zepben.evolve import set_direction, NetworkService, Terminal, EnergySource from zepben.examples.ieee_13_node_test_feeder import network logger = logging.getLogger(__name__) async def main(): add_energy_source(network, network["br_650_t1"]) await set_direction().run(network) bbn_creator = BasicPandaPowerNetworkCreator( logger=logger, ec_load_provider=lambda ec: (5000, 0) # Model each energy consumer with a 5kW nonreactive load ) result = await bbn_creator.create(network) print(f"Translation successful: {result.was_successful}") print(result.network) print() print("bus table:") print(result.network["bus"]) print() print("load table:") print(result.network["load"]) print() print("ext_grid table:") print(result.network["ext_grid"]) print() print("line table:") print(result.network["line"]) print() print("trafo table:") print(result.network["trafo"]) print() print("line_geodata table:") print(result.network["line_geodata"]) print() print("Running load flow study...", end="") pp.runpp(result.network) print("done.") print() print(result.network) print() print("res_bus table:") print(result.network["res_bus"]) print() print("res_line table:") print(result.network["res_line"]) print() print("res_trafo table:") print(result.network["res_trafo"]) print() print("res_ext_grid table:") print(result.network["res_ext_grid"]) print() print("res_load table:") print(result.network["res_load"]) print() def add_energy_source(network: NetworkService, connect_to_terminal: Terminal): bv = connect_to_terminal.conducting_equipment.base_voltage es_t = Terminal(phases=connect_to_terminal.phases) es = EnergySource(terminals=[es_t], base_voltage=bv) network.add(es_t) network.add(es) network.connect_terminals(es_t, connect_to_terminal) if __name__ == "__main__": asyncio.run(main())
zepben.examples
/zepben.examples-0.2.0b7-py3-none-any.whl/zepben/examples/translating_to_pandapower_model.py
translating_to_pandapower_model.py
import json import os from typing import List import requests from graphqlclient import GraphQLClient from zepben.auth import get_token_fetcher # This example utilises the EWB GraphQL APIs to fetch the network hierarchy from the server and # then create a Powerfactory model by selecting components of the hierarchy to use. # To use, populate the below variables with your desired targets plus the server and auth settings. # Set of mRID/names of targets, leaving any target blank will exclude that level of hierarchy if it's the highest level # Names are visible through the hierarchy viewer in the UI - or you can do a getNetworkHierarchy GraphQL query as per the below. target_zone_substation = {"zonesub-mRID-or-name"} target_feeder = {"feeder-mRID-or-name"} target_lv = {"lvfeeder-mRID-or-name"} # resulting PFD file name file_name = "test_file" output_dir = "path to output dir" # graphQL endpoint access settings network_endpoint = 'https://{url}/api/network/graphql' api_endpoint = 'https://{url}/api/graphql' audience = "https://{url}/" issuer_domain = "issuer_domain" client_id = 'client_id' username = 'username' password = 'password' ### EXAMPLE QUERY ONLY ### # This is an example GraphQL query for the full network hierarchy. This is not used as part of this code, and is purely illustrative. # See below functions for actual queries used. ''' query network { getNetworkHierarchy { geographicalRegions { mRID name subGeographicalRegions { mRID name substations { mRID name feeders { mRID name normalEnergizedLvFeeders { mRID name } } } } } } } ''' def request_pf_model_for_a_zone_with_hv_lv(): # Set up auth token_fetcher = get_token_fetcher(audience=audience, issuer_domain=issuer_domain, client_id=client_id, username=username, password=password) tft = token_fetcher.fetch_token() target = [] # Request for ZoneSub -> Feeder -> lvFeeder body = ''' query network { getNetworkHierarchy { substations { mRID name feeders { mRID name normalEnergizedLvFeeders { mRID name } } } } } ''' if check_if_currently_generating_a_model(tft): result = retrieve_network_hierarchy(body, tft) target = get_target(target, result) model_id = request_pf_model(target, file_name, tft) print("Power factory model creation requested, model id: " + model_id) else: print("Warning: Still generating previous model, current model will not be generated.") def request_pf_model_for_a_zone_with_hv_only(): # Set up auth token_fetcher = get_token_fetcher(audience=audience, issuer_domain=issuer_domain, client_id=client_id, username=username, password=password) tft = token_fetcher.fetch_token() target = [] # Request for ZoneSub -> Feeder body = ''' query network { getNetworkHierarchy { substations { mRID name feeders { mRID name } } } } ''' if check_if_currently_generating_a_model(tft): result = retrieve_network_hierarchy(body, tft) target = get_target(target, result) model_id = request_pf_model(target, file_name, tft) print("Power factory model creation requested, model id: " + model_id) else: print("Warning: Still generating previous model, current model will not be generated.") def retrieve_network_hierarchy(body, tft): client = GraphQLClient(network_endpoint) client.inject_token(tft) result = client.execute(body) return json.loads(result) def get_target(target, result): if len(target_zone_substation) == 0: # No Zone sub was specified thus no zone sub will be added to target for zone_sub in result['data']['getNetworkHierarchy']["substations"]: target = get_feeder(target, zone_sub) else: queried_zone_sub = [x for x in result['data']['getNetworkHierarchy']["substations"] if x['mRID'] in target_zone_substation or x['name'] in target_zone_substation] for zone_sub in queried_zone_sub: target.append(zone_sub['mRID']) target = get_feeder(target, zone_sub) return target def get_feeder(target, zone_sub): if 'feeders' in zone_sub.keys(): if len(target_feeder) == 0: for feeder in zone_sub['feeders']: if len(target_zone_substation) != 0: target.append(feeder['mRID']) target = get_lvfeeder(target, feeder) # Path to include only specific feeders else: queried_feeder = [x for x in zone_sub['feeders'] if x['mRID'] in target_feeder or x['name'] in target_feeder] for feeder in queried_feeder: target.append(feeder['mRID']) target = get_lvfeeder(target, feeder) return target def get_lvfeeder(target, feeder): if 'normalEnergizedLvFeeders' in feeder.keys(): # Path to include all lvFeeders if len(target_lv) == 0: for lv in feeder['normalEnergizedLvFeeders']: target.append(lv['mRID']) # Path to include only specific lvFeeders else: queried_lv = [x for x in feeder['normalEnergizedLvFeeders'] if x['mRID'] in target_lv or x['name'] in target_lv] for lv in queried_lv: target.append(lv['mRID']) return target def request_pf_model(equipment_container_list: List[str], filename: str, tft: str): """ Performs the GraphQL request to create the Powerfactory model for the provided list of equipment containers. :param equipment_container_list: List of EquipmentContainer mRIDs to include in the Powerfactory model. :param filename: Desired PFD filename :param tft: Bearer token to use for auth """ client = GraphQLClient(api_endpoint) client.inject_token(tft) # Set isPublic to false if you only want the specific user to see the model body = ''' mutation createNetModel($input: NetModelInput!) { createNetModel(input: $input) } ''' variables = {'input': { 'name': filename, 'generationSpec': {'equipmentContainerMrids': equipment_container_list, 'distributionTransformerConfig': { 'rGround': 0.01, 'xGround': 0.01 } }, 'isPublic': 'true'}} result = client.execute(body, variables) return json.loads(result)['data']['createNetModel'] def check_if_currently_generating_a_model(tft): body = ''' query pagedNetModels( $limit: Int! $offset: Long! $filter: GetNetModelsFilterInput $sort: GetNetModelsSortCriteriaInput ) { pagedNetModels( limit: $limit offset: $offset filter: $filter sort: $sort ) { totalCount offset netModels { id name createdAt state errors } } } ''' variables = { "limit": 10, "offset": 0, "filter": {} } client = GraphQLClient(api_endpoint) client.inject_token(tft) result = client.execute(body, variables) entries = json.loads(result) for entry in entries['data']['pagedNetModels']['netModels']: if entry['state'] == 'CREATION': return False return True def download_model(model_number): # Set up auth token_fetcher = get_token_fetcher(audience=audience, issuer_domain=issuer_domain, client_id=client_id, username=username, password=password) tft = token_fetcher.fetch_token() # Request model model_url = api_endpoint.replace("graphql", "net-model/") + str(model_number) body = ''' query netModelById($modelId: ID!) { netModelById(modelId: $modelId) { id name createdAt state generationSpec { equipmentContainerMrids distributionTransformerConfig { rGround xGround } } isPublic errors } } ''' variables = { "modelId": model_number, } client = GraphQLClient(api_endpoint) client.inject_token(tft) result = json.loads(client.execute(body, variables)) model_status = result['data']['netModelById']['state'] if model_status == "COMPLETED": model = requests.get(model_url, headers={'Authorization': tft}) open(os.path.join(output_dir, file_name) + ".pfd", 'wb').write(model.content) print(file_name + ".pfd saved at " + output_dir) elif model_status == "CREATION": print("Model is still being created, please download at a later time") elif model_status == "FAILED": print("Model creation error: " + str(result['data']['netModelById']['errors'])) if __name__ == "__main__": # Generate model with lv request_pf_model_for_a_zone_with_hv_lv() # Generate model without lv request_pf_model_for_a_zone_with_hv_only() # Download a model via model number download_model(123)
zepben.examples
/zepben.examples-0.2.0b7-py3-none-any.whl/zepben/examples/request_power_factory_models.py
request_power_factory_models.py
import json from geojson import Feature, LineString, FeatureCollection, Point from zepben.eas import Study, Result, GeoJsonOverlay, EasClient from zepben.evolve import connect_insecure, SyncNetworkConsumerClient, AcLineSegment, EnergyConsumer # A study is a geographical visualisation of data that is drawn on top of the network. # This data is typically the result of a load flow simulation. # Each study may contain multiple results: different visualisations that the user may switch between. # For example, the first result may display per-unit voltage data, while the second result highlights overloaded equipment. # Two results are created in this example study: one makes a heatmap of energy consumers and the other highlights LV lines and displays their length. # Both Evolve App Server and Energy Workbench must be running for this example. from zepben.protobuf.nc.nc_requests_pb2 import INCLUDE_ENERGIZED_LV_FEEDERS def main(): # Fetch network model from Energy Workbench's gRPC service (see ../connecting_to_grpc_service.py for examples on different connection functions) grpc_channel = connect_insecure("<EWB hostname>", 50052) grpc_client = SyncNetworkConsumerClient(grpc_channel) grpc_client.get_equipment_container("<feeder-id>", include_energized_containers=INCLUDE_ENERGIZED_LV_FEEDERS) network = grpc_client.service # Make result that displays a heatmap of energy consumers. ec_geojson = [] for ec in network.objects(EnergyConsumer): if ec.location is not None: coord = list(ec.location.points)[0] ec_feature = Feature( id=ec.mrid, geometry=Point((coord.x_position, coord.y_position)) ) ec_geojson.append(ec_feature) ec_result = Result( name="Energy Consumers", geo_json_overlay=GeoJsonOverlay( data=FeatureCollection(ec_geojson), styles=["ec-heatmap"] # Select which Mapbox layers to show for this result ) ) # Make result that highlights LV lines. Each result is a named GeoJSON overlay. lv_lines_geojson = [] for line in network.objects(AcLineSegment): if line.base_voltage_value <= 1000 and line.location is not None: line_feature = Feature( id=line.mrid, geometry=LineString([(p.x_position, p.y_position) for p in line.location.points]), properties={ "length": line.length # Numeric and textual data may be added here. It will be displayed and formatted according to the style(s) used. } ) lv_lines_geojson.append(line_feature) lv_lines_result = Result( name="LV Lines", geo_json_overlay=GeoJsonOverlay( data=FeatureCollection(lv_lines_geojson), styles=["lv-lines", "lv-lengths"] # Select which Mapbox layers to show for this result ) ) # Create and upload the study. study = Study( name="Example Study", description="Example study with two results.", tags=["example"], # Tags make it easy to search for studies in a large list of them. results=[ec_result, lv_lines_result], styles=json.load(open("style.json", "r")) # This is the "layers" property of a Mapbox GL JS style. # Layers specify how features are rendered. For more information about layers, read https://docs.mapbox.com/mapbox-gl-js/style-spec/layers/. # Each layer may have an entry in the legend via the metadata["zb:legend"] field. ) eas_client = EasClient( # Replace these values with the host/port and credentials for the instance of EAS you would like to upload the study to. host="<EAS hostname>", port=7654, client_id="<client-id>", username="<username or email>", password="<password>" ) print("Uploading study...") eas_client.upload_study(study) print("Study uploaded! Please check the Evolve Web App.") eas_client.close() if __name__ == "__main__": main()
zepben.examples
/zepben.examples-0.2.0b7-py3-none-any.whl/zepben/examples/studies/creating_and_uploading_study.py
creating_and_uploading_study.py
import json from datetime import date from typing import List, Dict import aiohttp __all__ = ["EwbLoadShapeInfoProvider"] from aiohttp import ClientSession from zepben.opendss import LoadShapeInfoProvider, LoadShapeInfo _load_api_date_format = "%Y-%m-%d" class EwbLoadShapeInfoProvider(LoadShapeInfoProvider): def __init__(self, session: ClientSession = None, base_url=None, json_serialiser=None): if not session: if not base_url: raise ValueError("base_url must be provided if not providing a session - it should be the host and port of EWB only") conn = aiohttp.TCPConnector(limit=200, limit_per_host=0) timeout = aiohttp.ClientTimeout(total=60) self.session = aiohttp.ClientSession(base_url=base_url, json_serialize=json_serialiser if json_serialiser is not None else json.dumps, connector=conn, timeout=timeout) else: self.session = session async def get_load_shape_info(self, conducting_equipment_mrid: str, from_date: date, to_date: date) -> LoadShapeInfo: load_result = await self._get_load_profile(conducting_equipment_mrid, from_date, to_date) max_abs_val = 0 zero_count = 0 days_replaced = 0 values = [] for result in load_result: for series in result["series"]: for series_item in series: for reading in series_item['energy']["readings"]: val = reading["values"]["kwNet"] abs_val = abs(val) if abs_val > max_abs_val: max_abs_val = abs_val values.append(val) if val == 0: zero_count += 1 # Once 48 reading of 0 is accumulated, it is replaced with previous day's readings if zero_count == 48 and zero_count != len(values) and len(values) > 95: for c in range(48): values[-(48 - c)] = values[-(96 - c)] days_replaced += 1 zero_count = 0 else: # This route will patch non full day zero readings just in case # This route replace all 0 at the start of loadshape with first reading if zero_count > 0 and zero_count == (len(values) - 1): for i in range(zero_count): values[-(i+2)] = values[-1] elif zero_count > 0: difference = (float(values[-1]) - float(values[-(zero_count + 2)])) / (zero_count + 1) for i in range(zero_count): values[-(zero_count + 1 - i)] = float(values[-(zero_count + 2 - i)]) + difference zero_count = 0 # Fix the rest of the zero values at the end of the load shape by duping the same amount of entries prior # Last condition is to avoid fixing 0's when we don't have at least one full day of data to copy from. # TODO: what a hack, but who cares. it's 95 because on the second day we should always have 96 values. this applies above as well. if zero_count > 0 and zero_count != len(values) and len(values) > 95: try: for i in range(zero_count): values[-(i+1)] = values[-(zero_count+i+1)] except IndexError as i: pass zero_count = 0 return LoadShapeInfo(max_abs_val, 1.0, [1 if max_abs_val == 0 else v / max_abs_val for v in values], 0.5) async def _get_load_profile(self, from_asset_mrid: str, from_date: date, to_date: date) -> List[Dict]: url = f'/ewb/energy/profiles/api/v1/range/{from_asset_mrid}/from-date/{from_date.isoformat()}/to-date/{to_date.isoformat()}' async with self.session.get(url=url) as response: return (await response.json())["results"] if response.status == 200 else []
zepben.opendss
/zepben.opendss-0.4.0b1-py3-none-any.whl/zepben/opendss/ewb/load/load_result.py
load_result.py
import logging from functools import cmp_to_key from random import choice from typing import FrozenSet, Tuple, List, Optional, Callable, Dict from zepben.evolve import Terminal, NetworkService, AcLineSegment, PowerTransformer, EnergyConsumer, \ PowerTransformerEnd, ConductingEquipment, \ PowerElectronicsConnection, BusBranchNetworkCreator, EnergySource, Switch, Junction, BusbarSection, PerLengthSequenceImpedance, EquivalentBranch, \ TransformerFunctionKind, WireInfo from zepben.opendss import BusConnection, Bus from zepben.opendss import ConnectionPoint from zepben.opendss import LineCode, Circuit, Line, Load, NetworkModel, Transformer, TransformerWinding from zepben.opendss.creators.utils import id_from_identified_objects, get_bus_nodes, cmp_end_tn_by_t_direction, \ get_voltage_kv, tx_rating, vreg_from_nominal_v, is_swer_tx, is_dist_tx, create_swer_tx, \ create_tx from zepben.opendss.creators.validators.validator import OpenDssNetworkValidator from zepben.opendss.model.network.reg_control import RegControl __all__ = ["OpenDssNetworkCreator", "id_from_identified_objects"] class OpenDssNetworkCreator( BusBranchNetworkCreator[NetworkModel, Bus, Line, Line, Transformer, Circuit, Load, Load, OpenDssNetworkValidator] ): def __init__( self, *, logger: logging.Logger, vm_pu: float = 1.0, load_provider: Callable[[ConductingEquipment], Tuple[float, float]] = lambda x: (0, 0), pec_load_provider: Callable[[ConductingEquipment], Tuple[float, float]] = lambda x: (0, 0), min_line_r_ohm: float = 0.001, min_line_x_ohm: float = 0.001 ): # -- input -- self.vm_pu = vm_pu self.logger = logger self.load_provider = load_provider self.pec_load_provider = pec_load_provider self.min_line_r_ohm = min_line_r_ohm self.min_line_x_ohm = min_line_x_ohm def bus_branch_network_creator(self, node_breaker_network: NetworkService) -> NetworkModel: network = NetworkModel(default_base_frequency=50) return network def topological_node_creator( self, bus_branch_network: NetworkModel, base_voltage: Optional[int], collapsed_conducting_equipment: FrozenSet[ConductingEquipment], border_terminals: FrozenSet[Terminal], inner_terminals: FrozenSet[Terminal], node_breaker_network: NetworkService ) -> Tuple[str, Bus]: uid = id_from_identified_objects(border_terminals) max_phases_terminal = max((t for t in border_terminals), key=lambda t: len(t.phases.single_phases)) bus = Bus(uid=uid, nodes=get_bus_nodes(max_phases_terminal)) bus_branch_network.add_bus(bus) return uid, bus def topological_branch_creator( self, bus_branch_network: NetworkModel, connected_topological_nodes: Tuple[Bus, Bus], length: Optional[float], collapsed_ac_line_segments: FrozenSet[AcLineSegment], border_terminals: FrozenSet[Terminal], inner_terminals: FrozenSet[Terminal], node_breaker_network: NetworkService ) -> Tuple[str, Line]: ac_line = next(iter(collapsed_ac_line_segments)) connected_nodes = min(connected_topological_nodes, key=lambda b: len(b.nodes)).nodes line_code = self._get_create_line_code(bus_branch_network, ac_line.per_length_sequence_impedance, ac_line.wire_info, len(connected_nodes)) uid = id_from_identified_objects(collapsed_ac_line_segments) line = Line( uid=uid, units="m", length=0.5 if length is None else length, bus_conn1=BusConnection(connected_topological_nodes[0], connected_nodes), bus_conn2=BusConnection(connected_topological_nodes[1], connected_nodes), line_code=line_code ) bus_branch_network.add_line(line) return uid, line @staticmethod def _get_create_line_code( bus_branch_network: NetworkModel, per_length_sequence_impedance: PerLengthSequenceImpedance, wire_info: WireInfo, nphases: int ) -> LineCode: uid = f"{wire_info.mrid}-{per_length_sequence_impedance.mrid}-{nphases}W" line_code = bus_branch_network.line_codes.get(uid) if line_code is not None: return line_code line_code = LineCode( uid=uid, units="m", nphases=nphases, r1=per_length_sequence_impedance.r, r0=per_length_sequence_impedance.r0, x1=per_length_sequence_impedance.x, x0=per_length_sequence_impedance.x0, b1=0.0 if per_length_sequence_impedance.bch is None else per_length_sequence_impedance.bch * 1000000, b0=0.0 if per_length_sequence_impedance.b0ch is None else per_length_sequence_impedance.b0ch * 1000000, norm_amps=wire_info.rated_current, emerg_amps=wire_info.rated_current * 1.5 ) bus_branch_network.add_line_code(line_code) return line_code def equivalent_branch_creator(self, bus_branch_network: NetworkModel, connected_topological_nodes: List[Bus], equivalent_branch: EquivalentBranch, node_breaker_network: NetworkService) -> Tuple[str, Line]: raise RuntimeError( f"The creation of EquivalentBranches is not supported by the OpenDssNetworkCreator." f" Tried to create EquivalentBranches {equivalent_branch.mrid}.") def power_transformer_creator( self, bus_branch_network: NetworkModel, power_transformer: PowerTransformer, ends_to_topological_nodes: List[Tuple[PowerTransformerEnd, Optional[Bus]]], node_breaker_network: NetworkService ) -> Dict[str, Transformer]: uid = power_transformer.mrid rating_kva = tx_rating(power_transformer, 234000.0) / 1000.0 if power_transformer.function is TransformerFunctionKind.voltageRegulator: # TODO: this is done to figure out the end to use for the reg_controller as the end number is non-deterministic # for regulators with our current data processing, once we make the bus-branch creator functionality sort terminals # from upstream to downstream this should not be needed anymore. ends_to_topological_nodes = sorted(ends_to_topological_nodes, key=cmp_to_key(cmp_end_tn_by_t_direction)) transformers_and_reg_controllers = {} nodes = max((bus for end, bus in ends_to_topological_nodes), key=lambda b: len(b.nodes)).nodes rating_kva = 1500 if rating_kva < 1000 else rating_kva for node in nodes: transformer = Transformer( uid=f"{uid}_{str(node)}", phases=1, load_loss_percent=0.002, xhl=0.007, xht=None, xlt=None, windings=[TransformerWinding( conn="wye", kv=get_voltage_kv(end.rated_u, {node}, True), kva=rating_kva, bus_conn=BusConnection(bus, {node}) ) for end, bus in ends_to_topological_nodes] ) bus_branch_network.add_transformer(transformer) transformers_and_reg_controllers[transformer.uid] = transformer reg_control = RegControl( uid=f"{uid}_controller_{str(node)}", transformer=transformer, winding=len(transformer.windings), vreg=vreg_from_nominal_v(list(power_transformer.ends)[0].nominal_voltage), band=2, ptratio=100, ctprim=700, r=2, x=7 ) bus_branch_network.add_reg_control(reg_control) transformers_and_reg_controllers[reg_control.uid] = reg_control return transformers_and_reg_controllers else: is_swer = is_swer_tx(power_transformer) is_dist = is_dist_tx(power_transformer) num_phases = min([len(get_bus_nodes(end.terminal)) for end, t in ends_to_topological_nodes if end.terminal is not None]) num_phases = 1 if num_phases < 3 else 3 ends_to_topological_nodes = sorted(ends_to_topological_nodes, key=lambda end_tn: end_tn[0].end_number) if is_swer and is_dist: transformer = create_swer_tx(power_transformer, num_phases, rating_kva, ends_to_topological_nodes) else: transformer = create_tx(power_transformer, num_phases, rating_kva, ends_to_topological_nodes) bus_branch_network.add_transformer(transformer) return {transformer.uid: transformer} def energy_source_creator( self, bus_branch_network: NetworkModel, energy_source: EnergySource, connected_topological_node: Bus, node_breaker_network: NetworkService ) -> Dict[str, Circuit]: if bus_branch_network.circuit is not None: raise RuntimeError("Found multiple EnergySources while trying to create OpenDss model. Only one energy source is supported.") uid = energy_source.name # Setting defaults if any of the value here is None es_rn = energy_source.rn if energy_source.rn else energy_source.r es_xn = energy_source.xn if energy_source.xn else energy_source.x es_r0 = energy_source.r0 if energy_source.r0 else 0.39 es_x0 = energy_source.x0 if energy_source.x0 else 3.9 circuit = Circuit( uid=uid, bus_conn=BusConnection(connected_topological_node, connected_topological_node.nodes), pu=self.vm_pu, base_kv=get_voltage_kv(energy_source.base_voltage.nominal_voltage, connected_topological_node.nodes), phases=len(connected_topological_node.nodes), rpos=energy_source.r, xpos=energy_source.x, rneg=es_rn, xneg=es_xn, rzero=es_r0, xzero=es_x0, ) bus_branch_network.set_circuit(circuit) return {circuit.uid: circuit} def energy_consumer_creator( self, bus_branch_network: NetworkModel, energy_consumer: EnergyConsumer, connected_topological_node: Bus, node_breaker_network: NetworkService ) -> Dict[str, Load]: uid = energy_consumer.mrid nodes = {choice([n for n in connected_topological_node.nodes])} if len(connected_topological_node.nodes) == 2 else connected_topological_node.nodes connection_point = ConnectionPoint( uid=uid, bus_conn=BusConnection(connected_topological_node, nodes), kv=get_voltage_kv(energy_consumer.base_voltage.nominal_voltage, nodes), phases=len(nodes) ) bus_branch_network.add_connection_point(connection_point) return {uid: connection_point} def power_electronics_connection_creator( self, bus_branch_network: NetworkModel, power_electronics_connection: PowerElectronicsConnection, connected_topological_node: Bus, node_breaker_network: NetworkService, ) -> Dict[str, Load]: uid = power_electronics_connection.mrid return {uid: None} def has_negligible_impedance(self, ce: ConductingEquipment) -> bool: if isinstance(ce, AcLineSegment): if ce.length == 0 or ce.per_length_sequence_impedance.r == 0: return True if ce.length * ce.per_length_sequence_impedance.r < self.min_line_r_ohm \ or ce.length * ce.per_length_sequence_impedance.x < self.min_line_x_ohm: return True return False if isinstance(ce, Switch): return not ce.is_open() if isinstance(ce, Junction) or isinstance(ce, BusbarSection) or isinstance(ce, EquivalentBranch): return True return False def validator_creator(self) -> OpenDssNetworkValidator: return OpenDssNetworkValidator(logger=self.logger)
zepben.opendss
/zepben.opendss-0.4.0b1-py3-none-any.whl/zepben/opendss/creators/creator.py
creator.py
__all__ = ["transformer_end_connection_mapper", "id_from_identified_objects", "get_bus_nodes", "tx_rating", "vreg_from_nominal_v", "get_voltage_kv", "load_loss_percent", "tx_bus_connection", "cmp_end_tn_by_t_direction", "closest_connected_nodes", "is_swer_tx", "is_dist_tx", "create_tx", "create_swer_tx"] import random from math import sqrt, log from typing import TypeVar, Collection, Set, Union, Tuple, List from zepben.evolve import PowerTransformerEnd, SinglePhaseKind, WindingConnection, IdentifiedObject, Terminal, PowerTransformer, FeederDirection from zepben.opendss import Node, Bus, BusConnection, Transformer, TransformerWinding T = TypeVar("T") def transformer_end_connection_mapper(transformer_end: PowerTransformerEnd): if transformer_end.connection_kind == WindingConnection.D: return "delta" elif transformer_end.connection_kind == WindingConnection.Y: return "wye" else: # TODO: There are tons of windings missing here, if we throw for anything other than D and Y then this won't run on anywhere return "delta" if transformer_end.end_number == 1 else "wye" # raise Exception(f'WindingConnection {transformer_end.connection_kind} is not supported for ' # f'TransformerEnd: {transformer_end.mrid}') def id_from_identified_objects(ios: Collection[IdentifiedObject], separator: str = "__"): return separator.join(sorted(io.mrid for io in ios)) spk_to_node = { SinglePhaseKind.A: Node.A, SinglePhaseKind.B: Node.B, SinglePhaseKind.C: Node.C } def get_bus_nodes(t: Terminal) -> Set[Node]: if t is None: return set() return {n for n in {spk_to_node.get(t.traced_phases.normal(sp)) for sp in t.phases.single_phases} if n is not None} def tx_rating(pt: PowerTransformer, default_rating: float = None): rating = None for end in pt.ends: if end.rated_s is not None and end.rated_s != 0: rating = end.rated_s if rating is None: if pt.power_transformer_info is not None: for tank_info in pt.power_transformer_info.transformer_tank_infos: for end_info in tank_info.transformer_end_infos: if end_info.rated_s is not None and end_info.rated_s != 0: rating = end_info.rated_s if rating is None and default_rating is not None: rating = default_rating return rating # NOTE: Input Needs to be nominal voltage in Volts def vreg_from_nominal_v(nominal_voltage: int): m = 0.0059090909 ltg_ltl = {19100: 33000, 12700: 22000, 6350: 11000} return round(ltg_ltl.get(nominal_voltage, nominal_voltage) * m, 1) def get_voltage_kv(base_voltage: Union[int, None], nodes: Union[Set[Node], None], force_line_to_ground: bool = False): if base_voltage is None: return 0.0 if base_voltage == 19100 or base_voltage == 12700 or base_voltage == 6350: return round(base_voltage / 1000, 3) if force_line_to_ground: return round(base_voltage / sqrt(3) / 1000.0, 3) return round((base_voltage / sqrt(3) if nodes is not None and len(list(nodes)) == 1 and base_voltage < 1000 else base_voltage) / 1000.0, 3) def load_loss_percent(rating_kva: float): if rating_kva == 0: return 0.0 value = -0.288 * log(rating_kva) + 2.4293 if value > 2.2: return 2.2 elif value < 0.5: return 0.5 else: return value def tx_bus_connection( power_transformer: PowerTransformer, end: PowerTransformerEnd, num_phases: int, bus: Union[Bus, None] ): end_to_nodes = {end.mrid: get_bus_nodes(end.terminal) for end in power_transformer.ends} nodes = end_to_nodes.get(end.mrid) return BusConnection(Bus(f"{power_transformer.mrid}-disconnected-end-{end.end_number}", nodes=set()) if bus is None else bus, nodes) def cmp_end_tn_by_t_direction(end_tn1: Tuple[PowerTransformerEnd, Bus], end_tn2: Tuple[PowerTransformerEnd, Bus]): end1, tn1 = end_tn1 end2, tn2 = end_tn2 if tn1 is not None and end1 is not None: if end1.terminal.normal_feeder_direction.has(FeederDirection.UPSTREAM): return -1 if tn2 is not None and end2 is not None: return 1 return 0 def closest_connected_nodes(terminal: Terminal) -> Set[Node]: if terminal is None: return set() if terminal.connectivity_node is None: return get_bus_nodes(terminal) o_nodes = None for ot in terminal.connectivity_node.terminals: if ot != terminal: o_nodes = get_bus_nodes(ot) if o_nodes is None: return get_bus_nodes(terminal) else: return o_nodes def is_swer_tx(pt: PowerTransformer) -> bool: return any(end.nominal_voltage == 19100 or end.nominal_voltage == 12700 or end.nominal_voltage == 6350 for end in pt.ends) def is_dist_tx(pt: PowerTransformer) -> bool: return any(end.nominal_voltage < 1000 for end in pt.ends) def create_tx(power_transformer: PowerTransformer, num_phases: int, rating_kva: float, ends_to_topological_nodes: List[Tuple[PowerTransformerEnd, Bus]]) -> Transformer: return Transformer( uid=power_transformer.mrid, phases=num_phases, load_loss_percent=load_loss_percent(rating_kva), xhl=4, xht=None, xlt=None, windings=[TransformerWinding( conn=transformer_end_connection_mapper(end), kv=get_voltage_kv(end.rated_u, bus.nodes if bus is not None else None), kva=rating_kva, bus_conn=tx_bus_connection(power_transformer, end, num_phases, bus) ) for end, bus in ends_to_topological_nodes] ) def create_swer_tx(power_transformer: PowerTransformer, num_phases: int, rating_kva: float, ends_to_topological_nodes: List[Tuple[PowerTransformerEnd, Bus]]) -> Transformer: transformer = Transformer( uid=power_transformer.mrid, phases=num_phases, load_loss_percent=0.4, xhl=3.54, xht=3.54, xlt=2.36, windings=[TransformerWinding( conn=transformer_end_connection_mapper(end), kv=get_voltage_kv(end.rated_u, bus.nodes if bus is not None else None), kva=rating_kva, bus_conn=tx_bus_connection(power_transformer, end, num_phases, bus) ) for end, bus in ends_to_topological_nodes] ) secondary = transformer.windings[len(transformer.windings) - 1] # TODO: We hard code 0.25kV for second and third winding on swer-dist transformer because essential wants this. # This should be reviewed and removed in the future because hard-coding values this way independently of source data # leads to non-reusable functionality between customers. secondary.kv = 0.25 transformer.windings.append( TransformerWinding( secondary.conn, secondary.kv, secondary.kva, BusConnection( secondary.bus_conn.bus, {Node.G, _get_other_node(next(iter(secondary.bus_conn.connections)))} if len(secondary.bus_conn.connections) != 0 else {}, validate=False ) ) ) return transformer def _get_other_node(node: Node): nodes = {Node.A, Node.B, Node.C} nodes.remove(node) return random.choice([n for n in nodes])
zepben.opendss
/zepben.opendss-0.4.0b1-py3-none-any.whl/zepben/opendss/creators/utils.py
utils.py
import logging from typing import List, Tuple, Optional, FrozenSet from zepben.evolve import BusBranchNetworkCreationValidator, NetworkService, EnergyConsumer, EnergySource, \ PowerTransformer, PowerTransformerEnd, AcLineSegment, Terminal, \ ConductingEquipment, PowerElectronicsConnection, EquivalentBranch __all__ = ["OpenDssNetworkValidator"] from zepben.opendss import Circuit, Line, Load, Transformer, Bus, NetworkModel class OpenDssNetworkValidator(BusBranchNetworkCreationValidator[NetworkModel, Bus, Line, Line, Transformer, Circuit, Load, Load]): logger: logging.Logger def __init__(self, logger: logging.Logger): self.logger = logger def is_valid_network_data(self, node_breaker_network: NetworkService) -> bool: return True def is_valid_topological_node_data(self, bus_branch_network: NetworkModel, base_voltage: Optional[int], collapsed_conducting_equipment: FrozenSet[ConductingEquipment], border_terminals: FrozenSet[Terminal], inner_terminals: FrozenSet[Terminal], node_breaker_network: NetworkService) -> bool: if base_voltage is None: self.logger.error( f"Cannot create bus due to missing base voltage: {_format_topological_node(base_voltage, border_terminals, inner_terminals, collapsed_conducting_equipment)}") return False return True def is_valid_topological_branch_data(self, bus_branch_network: NetworkModel, connected_topological_nodes: Tuple[Bus, Bus], length: Optional[float], collapsed_ac_line_segments: FrozenSet[AcLineSegment], border_terminals: FrozenSet[Terminal], inner_terminals: FrozenSet[Terminal], node_breaker_network: NetworkService) -> bool: if length is None: self.logger.error( f"Cannot create branch due to missing length: {[acls.mrid for acls in collapsed_ac_line_segments]}") return False if length == 0: self.logger.warning(f"Branch with total length of 0: {[acls.mrid for acls in collapsed_ac_line_segments]}") return True def is_valid_equivalent_branch_data(self, bus_branch_network: NetworkModel, connected_topological_nodes: List[Bus], equivalent_branch: EquivalentBranch, node_breaker_network: NetworkService) -> bool: return True def is_valid_power_transformer_data(self, bus_branch_network: NetworkModel, power_transformer: PowerTransformer, ends_to_topological_nodes: List[Tuple[PowerTransformerEnd, Optional[Bus]]], node_breaker_network: NetworkService) -> bool: return True def is_valid_energy_source_data(self, bus_branch_network: NetworkModel, energy_source: EnergySource, connected_topological_node: Bus, node_breaker_network: NetworkService) -> bool: return True def is_valid_energy_consumer_data(self, bus_branch_network: NetworkModel, energy_consumer: EnergyConsumer, connected_topological_node: Bus, node_breaker_network: NetworkService) -> bool: return True def is_valid_power_electronics_connection_data(self, bus_branch_network: NetworkModel, power_electronics_connection: PowerElectronicsConnection, connected_topological_node: Bus, node_breaker_network: NetworkService) -> bool: return True def _format_topological_node(base_voltage: Optional[int], border_terminals: FrozenSet[Terminal], inner_terminals: FrozenSet[Terminal], collapsed_conducting_equipment: FrozenSet[ConductingEquipment]) -> str: return "{ " \ f"base_voltage: {base_voltage}, " \ f"inner_terminals: {inner_terminals}, " \ f"border_terminals: {border_terminals}, " \ f"collapsed_equipment: {collapsed_conducting_equipment} " \ "}"
zepben.opendss
/zepben.opendss-0.4.0b1-py3-none-any.whl/zepben/opendss/creators/validators/validator.py
validator.py
import os.path import sys from typing import List, Dict import logging import aiofiles as aiof import aiohttp import ujson from zepben.auth.client import create_token_fetcher logger = logging.getLogger(__name__) __all__ = ["OpenDssLoadShapeWriter"] class OpenDssLoadShapeWriter: def __init__(self, output_dir: str, secure: bool = False, username: str = None, password: str = None, client_id: str = None, host: str = None): self.secure = secure self.out_dir = output_dir if secure: authenticator = create_token_fetcher(f"https://{host}/ewb/auth") authenticator.token_request_data.update( { "grant_type": "password", "username": username, "password": password, "scope": "offline_access openid profile email", "client_id": client_id } ) authenticator.refresh_request_data.update({ "grant_type": "refresh_token", "scope": "offline_access openid profile email", "client_id": client_id }) self.token = authenticator.fetch_token() else: self.token = '' async def get_load_profile(self, from_asset: str, from_date: str, to_date: str, host: str, port: int) -> List[Dict]: async with aiohttp.ClientSession(headers={'Authorization': self.token}, json_serialize=ujson.dumps) as session: async with session.get(url= f'{"https" if self.secure else "http"}://{host}:{port}/ewb/energy/profiles/api/v1/range/{from_asset}' f'/from-date/{from_date}' f'/to-date/{to_date}' ) as response: return (await response.json())["results"] if response.status == 200 else [] @staticmethod def create_load_shape(load_profile): max_value = sys.float_info.min load_shape = [] zero_count = 0 days_replaced = 0 try: for s in load_profile[0]["series"][0]: for entry in s["energy"]["readings"]: if abs(entry["values"]["kwNet"]) > max_value: max_value = abs(entry["values"]["kwNet"]) for s in load_profile[0]["series"][0]: for entry in s["energy"]["readings"]: load_shape.append(f'{entry["values"]["kwNet"] / max_value}\n') if entry["values"]["kwNet"] == 0: zero_count += 1 # Once 48 reading of 0 is accumulated, it is replaced with previous day's readings if zero_count == 48: for c in range(48): load_shape[-(48 - c)] = load_shape[-(96 - c)] days_replaced += 1 zero_count = 0 else: # This route will patch non full day zero readings just in case # Compensate for leading 2 0.0 readings if zero_count > 0 and len(load_shape) > 3: difference = (float(load_shape[-1]) - float(load_shape[-(zero_count+2)]))/(zero_count+1) for i in range(zero_count): load_shape[-(zero_count+1-i)] = str(float(load_shape[-(zero_count+2-i)]) + difference) + '\n' zero_count = 0 except IndexError: # Empty Feeder pass # Normalize to 365 days a year if len(load_shape) > 17520: difference = len(load_shape)-17520 del load_shape[:difference] return load_shape, max_value async def write_load_shape_to_txt(self, feeder: str, target: str, load_shape: List[str]): if len(load_shape) != 0: base_folder = f'{self.out_dir}/{feeder}/base/' if not os.path.exists(base_folder): os.makedirs(base_folder) async with aiof.open(f'{base_folder}{target}.txt', 'w', encoding='ascii') as f: await f.writelines(load_shape) await f.close()
zepben.opendss
/zepben.opendss-0.4.0b1-py3-none-any.whl/zepben/opendss/writer/opendss_load_shape_writer.py
opendss_load_shape_writer.py
import os.path import struct from pathlib import Path from typing import Callable, Set, Tuple, Collection from typing import TypeVar, List import aiofiles as aiof from zepben.opendss import Line, LineCode, EnergyMeter, Transformer, TransformerWinding, Load, ConnectionPoint, Monitor, GrowthShape from zepben.opendss.model.load.generator import Generator from zepben.opendss.model.load.load_shape import LoadShape from zepben.opendss.model.load.power_conversion_element import PowerConversionElement from zepben.opendss.model.master import Master, PceEnableTarget from zepben.opendss.model.network.bus import Node, BusConnection from zepben.opendss.model.network.reg_control import RegControl __all__ = ["OpenDssWriter"] class OpenDssWriter: @staticmethod async def write(dir_path_str: str, master: Master): model_dir = Path(dir_path_str) if not os.path.exists(model_dir): os.makedirs(model_dir) if not model_dir.is_dir(): raise ValueError(f"The argument '{dir_path_str}' for the dir_path_str parameter was not a directory") # -- Network Model -- if OpenDssWriter.has_lines(master): await OpenDssWriter.write_lines_file(model_dir, master) if OpenDssWriter.has_line_codes(master): await OpenDssWriter.write_line_codes_file(model_dir, master) if OpenDssWriter.has_transformers(master): await OpenDssWriter.write_transformers_file(model_dir, master) if OpenDssWriter.has_reg_controls(master): await OpenDssWriter.write_reg_controls_file(model_dir, master) # -- Load Model -- if OpenDssWriter.has_loads(master): await OpenDssWriter.write_loads_file(model_dir, master) if OpenDssWriter.has_generators(master): await OpenDssWriter.write_generators_file(model_dir, master) if OpenDssWriter.has_load_shapes(master): await OpenDssWriter.write_load_shape_files(model_dir, master) if OpenDssWriter.has_growth_shapes(master): await OpenDssWriter.write_growth_shape_file(model_dir, master) # -- Metering Model -- if OpenDssWriter.has_energy_meters(master): await OpenDssWriter.write_energy_meter_file(model_dir, master) if OpenDssWriter.has_monitors(master): await OpenDssWriter.write_monitor_file(model_dir, master) if OpenDssWriter.has_yearly_config(master): await OpenDssWriter.write_yearly_config_files(model_dir, master) # -- Master -- await OpenDssWriter.write_master_file(model_dir=model_dir, master=master) @staticmethod def has_lines(master: Master) -> bool: return OpenDssWriter.has_elements(master.network_model.lines) @staticmethod def has_line_codes(master: Master) -> bool: return OpenDssWriter.has_elements(master.network_model.line_codes) @staticmethod def has_transformers(master: Master) -> bool: return OpenDssWriter.has_elements(master.network_model.transformers) @staticmethod def has_reg_controls(master: Master) -> bool: return OpenDssWriter.has_elements(master.network_model.reg_controls) @staticmethod def has_loads(master: Master) -> bool: return OpenDssWriter.has_elements(master.load_model.loads) @staticmethod def has_generators(master: Master) -> bool: return OpenDssWriter.has_elements(master.load_model.generators) @staticmethod def has_load_shapes(master: Master) -> bool: return OpenDssWriter.has_elements(master.load_model.load_shapes) @staticmethod def has_growth_shapes(master: Master) -> bool: return OpenDssWriter.has_elements(master.load_model.growth_shapes) @staticmethod def has_energy_meters(master: Master) -> bool: return OpenDssWriter.has_elements(master.metering_model.energy_meters) @staticmethod def has_monitors(master: Master) -> bool: return OpenDssWriter.has_elements(master.metering_model.monitors) @staticmethod def has_yearly_config(master: Master) -> bool: return OpenDssWriter.has_elements(master.yearly_config) @staticmethod def has_elements(collection: Collection) -> bool: return collection.__len__() != 0 @staticmethod async def write_lines_file(model_dir: Path, master: Master): await OpenDssWriter.write_elements_to_file( model_dir / 'Lines.dss', lambda: master.network_model.lines.values(), OpenDssWriter.line_to_str ) @staticmethod async def write_line_codes_file(model_dir: Path, master: Master): await OpenDssWriter.write_elements_to_file( model_dir / 'LineCodes.dss', lambda: master.network_model.line_codes.values(), OpenDssWriter.line_code_to_str ) @staticmethod async def write_transformers_file(model_dir: Path, master: Master): await OpenDssWriter.write_elements_to_file( model_dir / 'Transformers.dss', lambda: master.network_model.transformers.values(), OpenDssWriter.transformer_to_str ) @staticmethod async def write_reg_controls_file(model_dir: Path, master: Master): await OpenDssWriter.write_elements_to_file( model_dir / 'RegControls.dss', lambda: master.network_model.reg_controls.values(), OpenDssWriter.reg_control_to_str ) @staticmethod async def write_loads_file(model_dir: Path, master: Master): await OpenDssWriter.write_elements_to_file( model_dir / 'Loads.dss', lambda: [(conn_point, load) for conn_point in master.network_model.connection_points.values() for load in master.load_model.get_loads_by_conn_point_uid(conn_point.uid)], OpenDssWriter.conn_point_load_to_str ) @staticmethod async def write_generators_file(model_dir: Path, master: Master): await OpenDssWriter.write_elements_to_file( model_dir / 'Generators.dss', lambda: [(conn_point, generator) for conn_point in master.network_model.connection_points.values() for generator in master.load_model.get_generators_by_cnn_point_uid(conn_point.uid)], OpenDssWriter.conn_point_generator_to_str ) @staticmethod async def write_load_shape_files(model_dir: Path, master: Master): await OpenDssWriter.write_elements_to_file( model_dir / 'LoadShapes.dss', lambda: master.load_model.load_shapes.values(), OpenDssWriter.load_shape_to_str ) for load_shape in master.load_model.load_shapes.values(): if len(load_shape.shape) != 0: await OpenDssWriter.write_mult_file(model_dir, load_shape) @staticmethod async def write_mult_file(model_dir: Path, load_shape: LoadShape): packed_data = struct.pack(f'{len(load_shape.shape)}f', *load_shape.shape) async with aiof.open(f'{model_dir}{os.sep}{load_shape.uid}.sng', 'wb') as file: await file.write(packed_data) @staticmethod async def write_growth_shape_file(model_dir: Path, master: Master): await OpenDssWriter.write_elements_to_file( model_dir / 'GrowthShapes.dss', lambda: master.load_model.growth_shapes.values(), OpenDssWriter.growth_shape_to_str ) @staticmethod async def write_energy_meter_file(model_dir: Path, master: Master): await OpenDssWriter.write_elements_to_file( model_dir / 'EnergyMeters.dss', lambda: master.metering_model.energy_meters.values(), OpenDssWriter.energy_meter_to_str ) @staticmethod async def write_monitor_file(model_dir: Path, master: Master): await OpenDssWriter.write_elements_to_file( model_dir / 'Monitors.dss', lambda: master.metering_model.monitors.values(), OpenDssWriter.monitor_to_str ) @staticmethod async def write_yearly_config_files(model_dir: Path, master: Master): for index, yc in master.yearly_config.items(): if len(yc.enable_pce_targets) != 0: await OpenDssWriter.write_elements_to_file( model_dir / f'Year{index}Setup.dss', lambda: yc.enable_pce_targets, OpenDssWriter.enable_command_to_str ) if len(yc.disable_pce_targets) != 0: await OpenDssWriter.write_elements_to_file( model_dir / f'Year{index}Cleanup.dss', lambda: yc.disable_pce_targets, OpenDssWriter.disable_command_to_str ) @staticmethod async def write_master_file(model_dir: Path, master: Master): async with aiof.open((model_dir / 'Master.dss'), 'w') as file: master_str = OpenDssWriter.master_to_str(master) if not master_str: raise ValueError("Empty master object for OpenDss model.") await file.write(master_str) T = TypeVar('T') # noinspection PyArgumentList @staticmethod async def write_elements_to_file( file_path: Path, elements_provider: Callable[[], List[T]], to_str: Callable[[T], str] ): async with aiof.open(str(file_path), 'w') as file: strings = [] for element in elements_provider(): as_string = to_str(element) if as_string: strings.append(as_string) await file.write("\n".join(strings)) @staticmethod def nodes_to_str(nodes: Set[Node]) -> str: nodes_str = '.'.join(sorted(str(n.value) for n in nodes)) return f".{nodes_str}" if nodes_str else "" @staticmethod def bus_conn_to_str(bus_conn: BusConnection) -> str: return f"{bus_conn.bus.uid}{OpenDssWriter.nodes_to_str(bus_conn.connections)}" @staticmethod def line_to_str(line: Line) -> str: return f"New Line.{line.uid} " \ f"Units={line.units} " \ f"Length={line.length} " \ f"bus1={OpenDssWriter.bus_conn_to_str(line.bus_conn1)} bus2={OpenDssWriter.bus_conn_to_str(line.bus_conn2)} " \ f"Linecode={line.line_code.uid}" @staticmethod def line_code_to_str(line_code: LineCode) -> str: return f"New Linecode.{line_code.uid} " \ f"units={line_code.units} " \ f"nphases={line_code.nphases} " \ f"Normamps={line_code.norm_amps} Emergamps={line_code.emerg_amps} " \ f"R1={line_code.r1} R0={line_code.r0 or line_code.r1} " \ f"X1={line_code.x1} X0={line_code.x0 or line_code.x1} " \ f"B1={line_code.b1} B0={line_code.b0 or line_code.b1}" @staticmethod def conn_point_load_to_str(conn_point_to_load: Tuple[ConnectionPoint, Load]): cnn_point, load = conn_point_to_load return f"New Load.{load.uid} {OpenDssWriter.conn_point_pce_to_str(conn_point_to_load)}" @staticmethod def conn_point_generator_to_str(conn_point_to_generator: Tuple[ConnectionPoint, Generator]): cnn_point, generator = conn_point_to_generator return f"New Generator.{generator.uid} {OpenDssWriter.conn_point_pce_to_str(conn_point_to_generator)}" @staticmethod def conn_point_pce_to_str(conn_point_to_pce: Tuple[ConnectionPoint, PowerConversionElement]) -> str: cnn_point, pce = conn_point_to_pce pce_str = f"bus1={OpenDssWriter.bus_conn_to_str(cnn_point.bus_conn)} " \ f"kV={cnn_point.kv} Vminpu={cnn_point.v_min_pu} Vmaxpu={cnn_point.v_max_pu} " \ f"model={pce.model} " \ f"Phases={cnn_point.phases} " \ f"kW={pce.kw} PF={pce.pf} " \ f"enabled={pce.enabled}" if pce.load_shape is not None and pce.kw != 0: pce_str += f" {pce.load_shape.duration}={pce.load_shape.uid}" if pce.growth_shape is not None: pce_str += f" Growth={pce.growth_shape.uid}" return pce_str @staticmethod def reg_control_to_str(reg_control: RegControl) -> str: tap_winding = f'tapwinding={reg_control.tap_winding}' if reg_control.tap_winding else '' return f"New regcontrol.{reg_control.uid} " \ f"transformer={reg_control.transformer.uid} winding={reg_control.winding} " \ f"vreg={reg_control.vreg} band={reg_control.band} ptratio={reg_control.ptratio} ctprim={reg_control.ctprim} " \ f"R={reg_control.r} " \ f"{tap_winding}" @staticmethod def energy_meter_to_str(energy_meter: EnergyMeter) -> str: return f"New energymeter.{energy_meter.uid} " \ f"element={energy_meter.element.element_type}.{energy_meter.element.uid} " \ f"term={energy_meter.term} " \ f"option={energy_meter.option} " \ f"action={energy_meter.action} " \ f"PhaseVolt={energy_meter.phasevolt}" @staticmethod def monitor_to_str(monitor: Monitor) -> str: return f"New monitor.{monitor.uid} " \ f"element={monitor.element.element_type}.{monitor.element.uid} " \ f"mode={monitor.mode}" @staticmethod def transformer_to_str(transformer: Transformer) -> str: tx_str = f"New Transformer.{transformer.uid} " \ f"phases={transformer.phases} " \ f"windings={len(transformer.windings)} " \ f"%loadloss={transformer.load_loss_percent} " \ f"XHL={transformer.xhl} " if transformer.xht is not None: tx_str += f"XHT={transformer.xht} " if transformer.xlt is not None: tx_str += f"XLT={transformer.xlt} " tx_str += " ".join(OpenDssWriter.t_winding_to_str(tw, index + 1) for index, tw in enumerate(sorted(transformer.windings, key=lambda w: w.kv, reverse=True))) return tx_str @staticmethod def t_winding_to_str(t_winding: TransformerWinding, w_number: int) -> str: t_winding_str = f"wdg={w_number} conn={t_winding.conn} " \ f"Kv={t_winding.kv} kva={t_winding.kva} " \ f"bus={OpenDssWriter.bus_conn_to_str(t_winding.bus_conn)}" if t_winding.tap is not None: t_winding_str += f" Tap={t_winding.tap}" return t_winding_str @staticmethod def load_shape_to_str(load_shape: LoadShape) -> str: action = f'action={load_shape.action}' if load_shape.action else '' return f"New Loadshape.{load_shape.uid} " \ f"npts={len(load_shape.shape)} " \ f"{'' if load_shape.interval is None else f'interval={load_shape.interval} '}" \ f"mult=[sngfile={load_shape.uid}.sng] " \ f"{action}" @staticmethod def growth_shape_to_str(growth_shape: GrowthShape) -> str: years = [] multi = [] for index, value in enumerate(growth_shape.shape): years.append(str(index)) multi.append(str(value)) if years and multi: return f"New GrowthShape.{growth_shape.uid} " \ f"npts={len(years)} " \ f"Year=[{', '.join(years)}] " \ f"Mult=[{', '.join(multi)}]" @staticmethod def enable_command_to_str(pce_target: PceEnableTarget) -> str: return f"Edit {pce_target.pce_type}.{pce_target.uid} enabled=True" @staticmethod def disable_command_to_str(pce_target: PceEnableTarget) -> str: return f"Edit {pce_target.pce_type}.{pce_target.uid} enabled=False" @staticmethod def master_to_str(master: Master) -> str: model_files_str = "" if OpenDssWriter.has_line_codes(master): model_files_str += "Redirect LineCodes.dss\n" if OpenDssWriter.has_lines(master): model_files_str += "Redirect Lines.dss\n" if OpenDssWriter.has_transformers(master): model_files_str += "Redirect Transformers.dss\n" if OpenDssWriter.has_reg_controls(master): model_files_str += "Redirect RegControls.dss\n" if OpenDssWriter.has_load_shapes(master): model_files_str += "Redirect LoadShapes.dss\n" if OpenDssWriter.has_growth_shapes(master): model_files_str += "Redirect GrowthShapes.dss\n" if OpenDssWriter.has_loads(master): model_files_str += "Redirect Loads.dss\n" if OpenDssWriter.has_generators(master): model_files_str += "Redirect Generators.dss\n" if OpenDssWriter.has_energy_meters(master): model_files_str += "Redirect EnergyMeters.dss\n" if OpenDssWriter.has_monitors(master): model_files_str += "Redirect Monitors.dss\n" if not model_files_str: return "" if None in [master.network_model.circuit.rpos, master.network_model.circuit.xpos]: impedance = "" else: impedance = (f"Z1 = [{master.network_model.circuit.rpos}, {master.network_model.circuit.xpos}] " f"Z2 = [{master.network_model.circuit.rneg}, {master.network_model.circuit.xneg}] " f"Z0 = [{master.network_model.circuit.rzero}, {master.network_model.circuit.xzero}] ") return ( "Clear\n" + "\n" + f"set defaultbasefreq={master.network_model.default_base_frequency}\n" + "\n" + f"New Circuit.{master.network_model.circuit.uid} " f"bus1={master.network_model.circuit.bus_conn.bus.uid} " f"pu={master.network_model.circuit.pu} " f"basekV={master.network_model.circuit.base_kv} " + impedance + f"phases={master.network_model.circuit.phases}\n" + "\n" + "Set normvminpu=0.9\n" "Set normvmaxpu=1.054\n" + "Set emergvminpu=0.8\n" + "Set emergvmaxpu=1.1\n" + "\n" + model_files_str + "\n" + f"Set Voltagebases=[{','.join(str(vb) for vb in master.network_model.voltage_bases)}]\n" "\n" + "Calcvoltagebases\n" + "\n" + "Set overloadreport=true\t! TURN OVERLOAD REPORT ON\n" + "Set voltexcept=true\t! voltage exception report\n" + "Set demand=true\t! demand interval ON\n" + "Set DIVerbose=true\t! verbose mode is ON\n" + "Set Maxiter=25\n" + "Set Maxcontroliter=20\n" + "set mode=yearly\n" + "\n" + OpenDssWriter.master_solve_str(master) ) @staticmethod def master_solve_str(master: Master) -> str: if len(master.yearly_config) == 0: return "Solve" solve_str = "" for year, config in sorted(master.yearly_config.items(), key=lambda x: x[0]): if len(config.enable_pce_targets) != 0: solve_str += f"Redirect Year{year}Setup.dss\n" solve_str += f"set year={year}\n" \ f"Solve\n" \ f"CloseDI\n" if len(config.disable_pce_targets) != 0: solve_str += f"Redirect Year{year}Cleanup.dss\n" solve_str += "\n" return solve_str
zepben.opendss
/zepben.opendss-0.4.0b1-py3-none-any.whl/zepben/opendss/writer/opendss_writer.py
opendss_writer.py
import logging from typing import Dict, Any, TextIO from dataclassy import dataclass from zepben.opendss import Circuit, DssType, Line, LineCode, Load, Transformer, TransformerWinding, NetworkModel logger = logging.getLogger(__name__) __all__ = ["BaseDSSReader"] def line(fields: Dict[str, Any]) -> Line: return Line(fields["id"], fields["units"], float(fields["length"]), fields["bus1"], fields["bus2"], int(fields["phases"]), fields["linecode"]) def line_code(fields: Dict[str, Any]) -> LineCode: r0 = convert_val(fields["r0"]) x0 = convert_val(fields["x0"]) r1 = convert_val(fields["r1"]) x1 = convert_val(fields["x1"]) return LineCode(fields["id"], fields["units"], r1, r0, x1, x0) def convert_val(val: str) -> float: # TODO: Fix this to do the right thing for values like: 4 0.102 * try: return float(val) except: s = val.split() return float(s[0]) * float(s[1]) def load(fields: Dict[str, Any]) -> Load: return Load(fields["id"], fields["bus1"], float(fields["kv"]), float(fields["kw"]), float(fields["kvar"]), int(fields["phases"]), float(fields["vminpu"]), float(fields["vmaxpu"])) def transformer(fields: Dict[str, Any], windings: Dict[int, Dict[str, Any]]) -> Transformer: transformer_windings = [] for i, winding in windings.items(): transformer_windings.append(TransformerWinding(winding["conn"], winding["kv"], winding["kva"], winding["bus"])) return Transformer(fields["id"], fields["phases"], fields["%loadloss"], transformer_windings) def circuit(fields: Dict[str, Any]) -> Circuit: return Circuit(fields["id"], fields["bus1"], fields["pu"], fields["base_kv"]) type_map = { DssType.line: line, DssType.linecode: line_code, DssType.load: load, DssType.transformer: transformer, DssType.circuit: circuit, } @dataclass(slots=True) class BaseDSSReader(object): network_model: NetworkModel = NetworkModel() def read_file(self, file: TextIO): for line in file: self.process_line(line) def process_line(self, line: str): keyed_values = line.split() if not line or line.startswith('!'): return fields = dict() windings = dict() reader = None dss_type = None winding = None grouped_vals = [] grouped_key = None stop_processing_group = False for kv in keyed_values: s = kv.split(sep='=', maxsplit=1) # If we've started a group process it. if grouped_vals: if not stop_processing_group: grouped_vals.append(kv) if kv.endswith(')'): stop_processing_group = True grouped = ' '.join(grouped_vals).lstrip('(').rstrip(')') fields[grouped_key.casefold()] = grouped grouped_key = None grouped_vals = [] stop_processing_group = False continue continue if s[0].casefold() == 'wdg': winding = s[1] windings[winding] = dict() elif len(s) > 1: # Deal with parenthesis grouped results e.g (3.02 4.75 *) if s[1].startswith('('): grouped_key = s[0] grouped_vals.append(s[1]) continue if winding is not None: windings[winding][s[0].casefold()] = s[1] else: # lowercase all keys fields[s[0].casefold()] = s[1] else: if s[0].casefold() == 'new': continue # TODO: Are other things required here? s2 = s[0].split(sep='.', maxsplit=1) # Skip fields with no . or = seperators if len(s2) < 2: continue try: dss_type = DssType[s2[0].casefold()] except KeyError as ke: logger.debug(f"Could not process type {s2[0]} - line in file was: {line}") continue reader = type_map[dss_type] fields["id"] = s2[1] if dss_type is None: logger.warning(f"No type was detected for line: {line}") return if reader is None: logger.warning(f"Could not handle line: {line}") return if not fields: logger.warning(f"Fields could not be processed for line: {line}") return try: if dss_type == DssType.line: self.master.add_line(reader(fields)) elif dss_type == DssType.load: self.master.add_load(reader(fields)) elif dss_type == DssType.linecode: self.master.add_line_code(reader(fields)) elif dss_type == DssType.transformer: # noinspection PyTypeChecker self.master.add_transformer(reader(fields, windings)) elif dss_type == DssType.circuit: self.master.set_circuit(reader(fields)) else: raise ValueError(f"Unhandled type {dss_type}") except Exception as e: raise e def read_line(self) -> Line: pass
zepben.opendss
/zepben.opendss-0.4.0b1-py3-none-any.whl/zepben/opendss/reader/opendss_reader.py
opendss_reader.py
from typing import Optional, Dict, Set __all__ = ["NetworkModel"] from zepben.opendss import ConnectionPoint, Circuit, Bus, Line, LineCode, Transformer, RegControl class NetworkModel: def __init__( self, default_base_frequency: int = 50, circuit: Optional[Circuit] = None, buses: Dict[str, Bus] = None, lines: Dict[str, Line] = None, line_codes: Dict[str, LineCode] = None, transformers: Dict[str, Transformer] = None, connection_points: Dict[str, ConnectionPoint] = None, reg_controls: Dict[str, RegControl] = None ): self.default_base_frequency = default_base_frequency self.circuit = circuit self.buses = {} if buses is None else buses self.lines = {} if lines is None else lines self.line_codes = {} if line_codes is None else line_codes self.transformers = {} if transformers is None else transformers self.connection_points = {} if connection_points is None else connection_points self.reg_controls = {} if reg_controls is None else reg_controls @property def voltage_bases(self) -> Set[float]: # TODO: this is a really poor way of making sure voltages are line-to-line but due to us # not having a consistent convention for values stored in nominal voltages of the model # this hack will be used in the mean time. # The we need to update our cim networks to use a consistent voltage conventions throughout the # board and then add convenience method to retrieve that voltage value in line-to-line or # line-to-ground form. As it stands right now when you read a nominal voltage value you have no # way of knowing if the value is line-to-line or line-to-ground except being familiar with # the source data before-hand. # Once we have a way to tell the convention being used for each voltage this code should be updated # to rely on that mechanism instead of this map of hard-coded values. ltg_to_ltl = { 0.24: 0.415, 0.25: 0.415, 6.351: 11, 6.35: 11, 12.7: 22, 19.1: 33 } voltage_bases = set() voltage_bases.add(ltg_to_ltl.get(self.circuit.base_kv, self.circuit.base_kv)) for tx in self.transformers.values(): for w in tx.windings: voltage_bases.add(ltg_to_ltl.get(w.kv, w.kv)) for cp in self.connection_points.values(): voltage_bases.add(ltg_to_ltl.get(cp.kv, cp.kv)) return voltage_bases def set_default_base_frequency(self, default_base_frequency: int): self.default_base_frequency = default_base_frequency def set_circuit(self, circuit: Optional[Circuit] = None): self.circuit = circuit def add_bus(self, bus: Bus): self.buses[bus.uid] = bus def add_line(self, line: Line): self.lines[line.uid] = line def add_line_code(self, line_code: LineCode): self.line_codes[line_code.uid] = line_code def add_transformer(self, transformer: Transformer): self.transformers[transformer.uid] = transformer def add_connection_point(self, connection_point: ConnectionPoint): self.connection_points[connection_point.uid] = connection_point def add_reg_control(self, reg_control: RegControl): self.reg_controls[reg_control.uid] = reg_control def remove_line(self, uid: str): del self.lines[uid] def remove_line_code(self, uid: str): del self.line_codes[uid] def remove_transformer(self, uid: str): del self.transformers[uid] def remove_connection_point(self, uid: str): del self.connection_points[uid] def remove_reg_control(self, uid: str): del self.reg_controls[uid] def copy(self): raise NotImplementedError("Copy method is not implemented")
zepben.opendss
/zepben.opendss-0.4.0b1-py3-none-any.whl/zepben/opendss/model/network/network_model.py
network_model.py
from datetime import date from typing import Dict, Callable, Set from zepben.opendss import NetworkModel from zepben.opendss.model.load.generator import Generator from zepben.opendss.model.load.growth_shape import GrowthShape from zepben.opendss.model.load.load import LoadShapeInfoProvider, Load from zepben.opendss.model.load.load_shape import LoadShape from zepben.opendss.model.load.power_conversion_element import PowerConversionElement __all__ = ["LoadModel", "single_point_load_model", "load_model_from_load_shape_info_provider", "update_loads_in_model"] class LoadModel: def __init__( self, network: NetworkModel, loads: Dict[str, Load] = None, generators: Dict[str, Generator] = None, load_shapes: Dict[str, LoadShape] = None, growth_shapes: Dict[str, GrowthShape] = None ): self.network = network self.loads = {} if loads is None else loads self.generators = {} if generators is None else generators self.load_shapes = {} if load_shapes is None else load_shapes self.growth_shapes = {} if growth_shapes is None else growth_shapes self._loads_by_conn_point_uid: Dict[str, Set[Load]] = {load.connection_point_uid: load for load in self.loads.values()} self._generators_by_conn_point_uid: Dict[str, Set[Generator]] = {generator.connection_point_uid: generator for generator in self.generators.values()} def get_loads_by_conn_point_uid(self, conn_point_uid: str) -> Set[Load]: return self._loads_by_conn_point_uid.get(conn_point_uid, set()) def add_load(self, load: Load): self._verify_power_conversion_element_refs(load) LoadModel._add_refs_for_pce(self._loads_by_conn_point_uid, load) self.loads[load.uid] = load def remove_load(self, uid: str): if uid in self.loads: LoadModel._remove_refs_for_pce(self._loads_by_conn_point_uid, self.loads[uid]) del self.loads[uid] def get_generators_by_cnn_point_uid(self, conn_point_uid: str) -> Set[Generator]: return self._generators_by_conn_point_uid.get(conn_point_uid, set()) def add_generator(self, generator: Generator): self._verify_power_conversion_element_refs(generator) LoadModel._add_refs_for_pce(self._generators_by_conn_point_uid, generator) self.generators[generator.uid] = generator def remove_generator(self, uid: str): if uid in self.generators: LoadModel._remove_refs_for_pce(self._generators_by_conn_point_uid, self.generators[uid]) del self.generators[uid] def _verify_power_conversion_element_refs(self, pce: PowerConversionElement): if pce.connection_point_uid not in self.network.connection_points: raise ReferenceError(f"No connection point found with uid {pce.connection_point_uid}.") if pce.load_shape is not None and pce.load_shape.uid not in self.load_shapes.keys(): raise ReferenceError(f"No load shape found with uid {pce.load_shape.uid}.") if pce.growth_shape is not None and pce.growth_shape.uid not in self.growth_shapes.keys(): raise ReferenceError(f"No growth shape found with uid {pce.growth_shape.uid}.") @staticmethod def _add_refs_for_pce(ref_dictionary: Dict[str, Set[PowerConversionElement]], element: PowerConversionElement): if element.connection_point_uid not in ref_dictionary: ref_dictionary[element.connection_point_uid] = set() ref_dictionary[element.connection_point_uid].add(element) @staticmethod def _remove_refs_for_pce(ref_dictionary: Dict[str, Set[PowerConversionElement]], element: PowerConversionElement): if element.connection_point_uid not in ref_dictionary: return elements = ref_dictionary[element.connection_point_uid] elements.remove(element) if len(elements) == 0: del ref_dictionary[element.connection_point_uid] def add_load_shape(self, load_shape: LoadShape): self.load_shapes[load_shape.uid] = load_shape def remove_load_shape(self, uid: str): del self.load_shapes[uid] def add_growth_shape(self, growth_shape: GrowthShape): self.growth_shapes[growth_shape.uid] = growth_shape def remove_growth_shape(self, uid: str): del self.growth_shapes[uid] def copy(self): raise NotImplementedError("Copy method is not implemented") def single_point_load_model(network_model: NetworkModel, kw: float, pf: float) -> LoadModel: load_model = LoadModel(network_model) for conn in network_model.connection_points.values(): load_model.add_load(Load(uid=f"{conn.uid}_LOAD", connection_point_uid=conn.uid, kw=kw, pf=pf)) return load_model async def load_model_from_load_shape_info_provider(load_shape_info_provider: LoadShapeInfoProvider, from_date: date, to_date: date, network_model: NetworkModel) -> LoadModel: load_model = LoadModel(network_model) for conn_point in network_model.connection_points.values(): await _add_to_load_model(load_model, load_shape_info_provider, conn_point.uid, from_date, to_date) return load_model async def _add_to_load_model(load_model: LoadModel, load_shape_info_provider: LoadShapeInfoProvider, conn_point_uid: str, from_date: date, to_date: date): load_shape_info = await load_shape_info_provider.get_load_shape_info(conn_point_uid, from_date, to_date) load_shape = None if len(load_shape_info.shape) != 0: load_shape = LoadShape( uid=f"{conn_point_uid}_SHAPE", shape=load_shape_info.shape, interval=load_shape_info.interval ) load_model.add_load_shape(load_shape) load_model.add_load( Load( uid=f"{conn_point_uid}_LOAD", connection_point_uid=conn_point_uid, kw=load_shape_info.kw, pf=load_shape_info.pf, load_shape=load_shape ) ) def update_loads_in_model(load_model: LoadModel, load_updater: Callable[[Load], None], load_filter: Callable[[Load], bool] = lambda _: True): for load in load_model.loads.values(): if load_filter(load): load_updater(load)
zepben.opendss
/zepben.opendss-0.4.0b1-py3-none-any.whl/zepben/opendss/model/load/load_model.py
load_model.py
__all__ = ["MeteringModel", "get_basic_metering_model"] from typing import Dict from zepben.opendss import EnergyMeter, NetworkModel, TargetElement, Monitor class MeteringModel: def __init__( self, network: NetworkModel, energy_meters: Dict[str, EnergyMeter] = None, monitors: Dict[str, Monitor] = None ): self.network = network self.energy_meters = {} if energy_meters is None else energy_meters self.monitors = {} if monitors is None else monitors def add_energy_meter(self, energy_meter: EnergyMeter): self._validate_element_ref(energy_meter.element) self.energy_meters[energy_meter.uid] = energy_meter def add_monitor(self, monitor: Monitor): self._validate_element_ref(monitor.element) self.monitors[monitor.uid] = monitor def _validate_element_ref(self, element: TargetElement): line_uids = self.network.lines.keys() transformer_uids = self.network.transformers.keys() load_connection_uids = self.network.connection_points.keys() if element.uid not in load_connection_uids \ and element.uid not in line_uids \ and element.uid not in transformer_uids: raise ReferenceError(f"No element found with uid {element.uid}.") def remove_energy_meter(self, uid: str): del self.energy_meters[uid] def remove_monitor(self, uid: str): del self.monitors[uid] def copy(self): raise NotImplementedError("Copy method is not implemented") def get_basic_metering_model(network_model: NetworkModel) -> MeteringModel: metering_model = MeteringModel(network_model) lines_connected_to_feeder_head = [ln for ln in network_model.lines.values() if "source" in ln.bus_conn1.bus.uid or "source" in ln.bus_conn2.bus.uid] for ln in lines_connected_to_feeder_head: t_element = TargetElement( uid=ln.uid, element_type="Line" ) metering_model.add_monitor(Monitor(uid=f"{t_element.uid}_monitor", element=t_element, mode=1)) metering_model.add_energy_meter(EnergyMeter(uid=f"{t_element.uid}_em", element=t_element, term=1)) for transformer in network_model.transformers.values(): if any(w.kv < 1 for w in transformer.windings): metering_model.add_energy_meter( EnergyMeter( uid=f"{transformer.uid}_em", element=TargetElement( uid=transformer.uid, element_type="Transformer" ), term=1 ) ) return metering_model
zepben.opendss
/zepben.opendss-0.4.0b1-py3-none-any.whl/zepben/opendss/model/metering/metering_model.py
metering_model.py
"""Client and server classes corresponding to protobuf-defined services.""" import grpc from zepben.protobuf.cc import cc_requests_pb2 as zepben_dot_protobuf_dot_cc_dot_cc__requests__pb2 from zepben.protobuf.cc import cc_responses_pb2 as zepben_dot_protobuf_dot_cc_dot_cc__responses__pb2 class CustomerConsumerStub(object): """Missing associated documentation comment in .proto file.""" def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.getIdentifiedObjects = channel.stream_stream( '/zepben.protobuf.cc.CustomerConsumer/getIdentifiedObjects', request_serializer=zepben_dot_protobuf_dot_cc_dot_cc__requests__pb2.GetIdentifiedObjectsRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_cc_dot_cc__responses__pb2.GetIdentifiedObjectsResponse.FromString, ) self.getCustomersForContainer = channel.stream_stream( '/zepben.protobuf.cc.CustomerConsumer/getCustomersForContainer', request_serializer=zepben_dot_protobuf_dot_cc_dot_cc__requests__pb2.GetCustomersForContainerRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_cc_dot_cc__responses__pb2.GetCustomersForContainerResponse.FromString, ) class CustomerConsumerServicer(object): """Missing associated documentation comment in .proto file.""" def getIdentifiedObjects(self, request_iterator, context): """Get identified objects """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def getCustomersForContainer(self, request_iterator, context): """Get customers for a given EquipmentContainer """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_CustomerConsumerServicer_to_server(servicer, server): rpc_method_handlers = { 'getIdentifiedObjects': grpc.stream_stream_rpc_method_handler( servicer.getIdentifiedObjects, request_deserializer=zepben_dot_protobuf_dot_cc_dot_cc__requests__pb2.GetIdentifiedObjectsRequest.FromString, response_serializer=zepben_dot_protobuf_dot_cc_dot_cc__responses__pb2.GetIdentifiedObjectsResponse.SerializeToString, ), 'getCustomersForContainer': grpc.stream_stream_rpc_method_handler( servicer.getCustomersForContainer, request_deserializer=zepben_dot_protobuf_dot_cc_dot_cc__requests__pb2.GetCustomersForContainerRequest.FromString, response_serializer=zepben_dot_protobuf_dot_cc_dot_cc__responses__pb2.GetCustomersForContainerResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'zepben.protobuf.cc.CustomerConsumer', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. class CustomerConsumer(object): """Missing associated documentation comment in .proto file.""" @staticmethod def getIdentifiedObjects(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/zepben.protobuf.cc.CustomerConsumer/getIdentifiedObjects', zepben_dot_protobuf_dot_cc_dot_cc__requests__pb2.GetIdentifiedObjectsRequest.SerializeToString, zepben_dot_protobuf_dot_cc_dot_cc__responses__pb2.GetIdentifiedObjectsResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def getCustomersForContainer(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/zepben.protobuf.cc.CustomerConsumer/getCustomersForContainer', zepben_dot_protobuf_dot_cc_dot_cc__requests__pb2.GetCustomersForContainerRequest.SerializeToString, zepben_dot_protobuf_dot_cc_dot_cc__responses__pb2.GetCustomersForContainerResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
zepben.protobuf
/zepben.protobuf-0.25.0b2-py3-none-any.whl/zepben/protobuf/cc/cc_pb2_grpc.py
cc_pb2_grpc.py
"""Client and server classes corresponding to protobuf-defined services.""" import grpc from zepben.protobuf.nm import set_switch_state_requests_pb2 as zepben_dot_protobuf_dot_nm_dot_set__switch__state__requests__pb2 from zepben.protobuf.nm import set_switch_state_responses_pb2 as zepben_dot_protobuf_dot_nm_dot_set__switch__state__responses__pb2 class SwitchStateServiceStub(object): """Missing associated documentation comment in .proto file.""" def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.setCurrentSwitchStates = channel.unary_unary( '/zepben.protobuf.nm.SwitchStateService/setCurrentSwitchStates', request_serializer=zepben_dot_protobuf_dot_nm_dot_set__switch__state__requests__pb2.SetCurrentSwitchStatesRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_nm_dot_set__switch__state__responses__pb2.SetCurrentSwitchStatesResponse.FromString, ) class SwitchStateServiceServicer(object): """Missing associated documentation comment in .proto file.""" def setCurrentSwitchStates(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_SwitchStateServiceServicer_to_server(servicer, server): rpc_method_handlers = { 'setCurrentSwitchStates': grpc.unary_unary_rpc_method_handler( servicer.setCurrentSwitchStates, request_deserializer=zepben_dot_protobuf_dot_nm_dot_set__switch__state__requests__pb2.SetCurrentSwitchStatesRequest.FromString, response_serializer=zepben_dot_protobuf_dot_nm_dot_set__switch__state__responses__pb2.SetCurrentSwitchStatesResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'zepben.protobuf.nm.SwitchStateService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. class SwitchStateService(object): """Missing associated documentation comment in .proto file.""" @staticmethod def setCurrentSwitchStates(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/zepben.protobuf.nm.SwitchStateService/setCurrentSwitchStates', zepben_dot_protobuf_dot_nm_dot_set__switch__state__requests__pb2.SetCurrentSwitchStatesRequest.SerializeToString, zepben_dot_protobuf_dot_nm_dot_set__switch__state__responses__pb2.SetCurrentSwitchStatesResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
zepben.protobuf
/zepben.protobuf-0.25.0b2-py3-none-any.whl/zepben/protobuf/nm/set_switch_state_pb2_grpc.py
set_switch_state_pb2_grpc.py
"""Client and server classes corresponding to protobuf-defined services.""" import grpc from zepben.protobuf.nc import nc_requests_pb2 as zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2 from zepben.protobuf.nc import nc_responses_pb2 as zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2 class NetworkConsumerStub(object): """Missing associated documentation comment in .proto file.""" def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.getIdentifiedObjects = channel.stream_stream( '/zepben.protobuf.nc.NetworkConsumer/getIdentifiedObjects', request_serializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetIdentifiedObjectsRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetIdentifiedObjectsResponse.FromString, ) self.getNetworkHierarchy = channel.unary_unary( '/zepben.protobuf.nc.NetworkConsumer/getNetworkHierarchy', request_serializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetNetworkHierarchyRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetNetworkHierarchyResponse.FromString, ) self.getEquipmentForContainers = channel.stream_stream( '/zepben.protobuf.nc.NetworkConsumer/getEquipmentForContainers', request_serializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetEquipmentForContainersRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetEquipmentForContainersResponse.FromString, ) self.getCurrentEquipmentForFeeder = channel.unary_stream( '/zepben.protobuf.nc.NetworkConsumer/getCurrentEquipmentForFeeder', request_serializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetCurrentEquipmentForFeederRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetCurrentEquipmentForFeederResponse.FromString, ) self.getEquipmentForRestriction = channel.unary_stream( '/zepben.protobuf.nc.NetworkConsumer/getEquipmentForRestriction', request_serializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetEquipmentForRestrictionRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetEquipmentForRestrictionResponse.FromString, ) self.getTerminalsForNode = channel.unary_stream( '/zepben.protobuf.nc.NetworkConsumer/getTerminalsForNode', request_serializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetTerminalsForNodeRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetTerminalsForNodeResponse.FromString, ) class NetworkConsumerServicer(object): """Missing associated documentation comment in .proto file.""" def getIdentifiedObjects(self, request_iterator, context): """Get identified objects """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def getNetworkHierarchy(self, request, context): """Get a simplified view of the network hierarchy from the geographical region down to the feeder level """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def getEquipmentForContainers(self, request_iterator, context): """Get an EquipmentContainer's equipment """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def getCurrentEquipmentForFeeder(self, request, context): """Get an EquipmentContainer's current equipment """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def getEquipmentForRestriction(self, request, context): """Get an OperationalRestriction's equipment """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def getTerminalsForNode(self, request, context): """Get a ConnectivityNodes Terminals """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_NetworkConsumerServicer_to_server(servicer, server): rpc_method_handlers = { 'getIdentifiedObjects': grpc.stream_stream_rpc_method_handler( servicer.getIdentifiedObjects, request_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetIdentifiedObjectsRequest.FromString, response_serializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetIdentifiedObjectsResponse.SerializeToString, ), 'getNetworkHierarchy': grpc.unary_unary_rpc_method_handler( servicer.getNetworkHierarchy, request_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetNetworkHierarchyRequest.FromString, response_serializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetNetworkHierarchyResponse.SerializeToString, ), 'getEquipmentForContainers': grpc.stream_stream_rpc_method_handler( servicer.getEquipmentForContainers, request_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetEquipmentForContainersRequest.FromString, response_serializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetEquipmentForContainersResponse.SerializeToString, ), 'getCurrentEquipmentForFeeder': grpc.unary_stream_rpc_method_handler( servicer.getCurrentEquipmentForFeeder, request_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetCurrentEquipmentForFeederRequest.FromString, response_serializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetCurrentEquipmentForFeederResponse.SerializeToString, ), 'getEquipmentForRestriction': grpc.unary_stream_rpc_method_handler( servicer.getEquipmentForRestriction, request_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetEquipmentForRestrictionRequest.FromString, response_serializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetEquipmentForRestrictionResponse.SerializeToString, ), 'getTerminalsForNode': grpc.unary_stream_rpc_method_handler( servicer.getTerminalsForNode, request_deserializer=zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetTerminalsForNodeRequest.FromString, response_serializer=zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetTerminalsForNodeResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'zepben.protobuf.nc.NetworkConsumer', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. class NetworkConsumer(object): """Missing associated documentation comment in .proto file.""" @staticmethod def getIdentifiedObjects(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/zepben.protobuf.nc.NetworkConsumer/getIdentifiedObjects', zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetIdentifiedObjectsRequest.SerializeToString, zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetIdentifiedObjectsResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def getNetworkHierarchy(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/zepben.protobuf.nc.NetworkConsumer/getNetworkHierarchy', zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetNetworkHierarchyRequest.SerializeToString, zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetNetworkHierarchyResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def getEquipmentForContainers(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/zepben.protobuf.nc.NetworkConsumer/getEquipmentForContainers', zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetEquipmentForContainersRequest.SerializeToString, zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetEquipmentForContainersResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def getCurrentEquipmentForFeeder(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_stream(request, target, '/zepben.protobuf.nc.NetworkConsumer/getCurrentEquipmentForFeeder', zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetCurrentEquipmentForFeederRequest.SerializeToString, zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetCurrentEquipmentForFeederResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def getEquipmentForRestriction(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_stream(request, target, '/zepben.protobuf.nc.NetworkConsumer/getEquipmentForRestriction', zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetEquipmentForRestrictionRequest.SerializeToString, zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetEquipmentForRestrictionResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def getTerminalsForNode(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_stream(request, target, '/zepben.protobuf.nc.NetworkConsumer/getTerminalsForNode', zepben_dot_protobuf_dot_nc_dot_nc__requests__pb2.GetTerminalsForNodeRequest.SerializeToString, zepben_dot_protobuf_dot_nc_dot_nc__responses__pb2.GetTerminalsForNodeResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
zepben.protobuf
/zepben.protobuf-0.25.0b2-py3-none-any.whl/zepben/protobuf/nc/nc_pb2_grpc.py
nc_pb2_grpc.py
"""Client and server classes corresponding to protobuf-defined services.""" import grpc from zepben.protobuf.mc import mc_requests_pb2 as zepben_dot_protobuf_dot_mc_dot_mc__requests__pb2 from zepben.protobuf.mc import mc_responses_pb2 as zepben_dot_protobuf_dot_mc_dot_mc__responses__pb2 class MeasurementConsumerStub(object): """Missing associated documentation comment in .proto file.""" def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.getAccumulatorValues = channel.unary_stream( '/zepben.protobuf.mc.MeasurementConsumer/getAccumulatorValues', request_serializer=zepben_dot_protobuf_dot_mc_dot_mc__requests__pb2.GetAccumulatorValueRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_mc_dot_mc__responses__pb2.GetAccumulatorValueResponse.FromString, ) self.getAnalogValues = channel.unary_stream( '/zepben.protobuf.mc.MeasurementConsumer/getAnalogValues', request_serializer=zepben_dot_protobuf_dot_mc_dot_mc__requests__pb2.GetAnalogValueRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_mc_dot_mc__responses__pb2.GetAnalogValueResponse.FromString, ) self.getDiscreteValues = channel.unary_stream( '/zepben.protobuf.mc.MeasurementConsumer/getDiscreteValues', request_serializer=zepben_dot_protobuf_dot_mc_dot_mc__requests__pb2.GetDiscreteValueRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_mc_dot_mc__responses__pb2.GetDiscreteValueResponse.FromString, ) class MeasurementConsumerServicer(object): """Missing associated documentation comment in .proto file.""" def getAccumulatorValues(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def getAnalogValues(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def getDiscreteValues(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_MeasurementConsumerServicer_to_server(servicer, server): rpc_method_handlers = { 'getAccumulatorValues': grpc.unary_stream_rpc_method_handler( servicer.getAccumulatorValues, request_deserializer=zepben_dot_protobuf_dot_mc_dot_mc__requests__pb2.GetAccumulatorValueRequest.FromString, response_serializer=zepben_dot_protobuf_dot_mc_dot_mc__responses__pb2.GetAccumulatorValueResponse.SerializeToString, ), 'getAnalogValues': grpc.unary_stream_rpc_method_handler( servicer.getAnalogValues, request_deserializer=zepben_dot_protobuf_dot_mc_dot_mc__requests__pb2.GetAnalogValueRequest.FromString, response_serializer=zepben_dot_protobuf_dot_mc_dot_mc__responses__pb2.GetAnalogValueResponse.SerializeToString, ), 'getDiscreteValues': grpc.unary_stream_rpc_method_handler( servicer.getDiscreteValues, request_deserializer=zepben_dot_protobuf_dot_mc_dot_mc__requests__pb2.GetDiscreteValueRequest.FromString, response_serializer=zepben_dot_protobuf_dot_mc_dot_mc__responses__pb2.GetDiscreteValueResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'zepben.protobuf.mc.MeasurementConsumer', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. class MeasurementConsumer(object): """Missing associated documentation comment in .proto file.""" @staticmethod def getAccumulatorValues(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_stream(request, target, '/zepben.protobuf.mc.MeasurementConsumer/getAccumulatorValues', zepben_dot_protobuf_dot_mc_dot_mc__requests__pb2.GetAccumulatorValueRequest.SerializeToString, zepben_dot_protobuf_dot_mc_dot_mc__responses__pb2.GetAccumulatorValueResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def getAnalogValues(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_stream(request, target, '/zepben.protobuf.mc.MeasurementConsumer/getAnalogValues', zepben_dot_protobuf_dot_mc_dot_mc__requests__pb2.GetAnalogValueRequest.SerializeToString, zepben_dot_protobuf_dot_mc_dot_mc__responses__pb2.GetAnalogValueResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def getDiscreteValues(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_stream(request, target, '/zepben.protobuf.mc.MeasurementConsumer/getDiscreteValues', zepben_dot_protobuf_dot_mc_dot_mc__requests__pb2.GetDiscreteValueRequest.SerializeToString, zepben_dot_protobuf_dot_mc_dot_mc__responses__pb2.GetDiscreteValueResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
zepben.protobuf
/zepben.protobuf-0.25.0b2-py3-none-any.whl/zepben/protobuf/mc/mc_pb2_grpc.py
mc_pb2_grpc.py
"""Client and server classes corresponding to protobuf-defined services.""" import grpc from zepben.protobuf.dc import dc_requests_pb2 as zepben_dot_protobuf_dot_dc_dot_dc__requests__pb2 from zepben.protobuf.dc import dc_responses_pb2 as zepben_dot_protobuf_dot_dc_dot_dc__responses__pb2 class DiagramConsumerStub(object): """Missing associated documentation comment in .proto file.""" def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.getIdentifiedObjects = channel.stream_stream( '/zepben.protobuf.dc.DiagramConsumer/getIdentifiedObjects', request_serializer=zepben_dot_protobuf_dot_dc_dot_dc__requests__pb2.GetIdentifiedObjectsRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_dc_dot_dc__responses__pb2.GetIdentifiedObjectsResponse.FromString, ) self.getDiagramObjects = channel.stream_stream( '/zepben.protobuf.dc.DiagramConsumer/getDiagramObjects', request_serializer=zepben_dot_protobuf_dot_dc_dot_dc__requests__pb2.GetDiagramObjectsRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_dc_dot_dc__responses__pb2.GetDiagramObjectsResponse.FromString, ) class DiagramConsumerServicer(object): """Missing associated documentation comment in .proto file.""" def getIdentifiedObjects(self, request_iterator, context): """Get identified objects """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def getDiagramObjects(self, request_iterator, context): """Get diagram objects """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_DiagramConsumerServicer_to_server(servicer, server): rpc_method_handlers = { 'getIdentifiedObjects': grpc.stream_stream_rpc_method_handler( servicer.getIdentifiedObjects, request_deserializer=zepben_dot_protobuf_dot_dc_dot_dc__requests__pb2.GetIdentifiedObjectsRequest.FromString, response_serializer=zepben_dot_protobuf_dot_dc_dot_dc__responses__pb2.GetIdentifiedObjectsResponse.SerializeToString, ), 'getDiagramObjects': grpc.stream_stream_rpc_method_handler( servicer.getDiagramObjects, request_deserializer=zepben_dot_protobuf_dot_dc_dot_dc__requests__pb2.GetDiagramObjectsRequest.FromString, response_serializer=zepben_dot_protobuf_dot_dc_dot_dc__responses__pb2.GetDiagramObjectsResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'zepben.protobuf.dc.DiagramConsumer', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. class DiagramConsumer(object): """Missing associated documentation comment in .proto file.""" @staticmethod def getIdentifiedObjects(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/zepben.protobuf.dc.DiagramConsumer/getIdentifiedObjects', zepben_dot_protobuf_dot_dc_dot_dc__requests__pb2.GetIdentifiedObjectsRequest.SerializeToString, zepben_dot_protobuf_dot_dc_dot_dc__responses__pb2.GetIdentifiedObjectsResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def getDiagramObjects(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/zepben.protobuf.dc.DiagramConsumer/getDiagramObjects', zepben_dot_protobuf_dot_dc_dot_dc__requests__pb2.GetDiagramObjectsRequest.SerializeToString, zepben_dot_protobuf_dot_dc_dot_dc__responses__pb2.GetDiagramObjectsResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
zepben.protobuf
/zepben.protobuf-0.25.0b2-py3-none-any.whl/zepben/protobuf/dc/dc_pb2_grpc.py
dc_pb2_grpc.py
"""Client and server classes corresponding to protobuf-defined services.""" import grpc from zepben.protobuf.mp import mp_requests_pb2 as zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2 from zepben.protobuf.mp import mp_responses_pb2 as zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2 class MeasurementProducerStub(object): """Missing associated documentation comment in .proto file.""" def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.CreateAccumulatorValue = channel.unary_unary( '/zepben.protobuf.mp.MeasurementProducer/CreateAccumulatorValue', request_serializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAccumulatorValueRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAccumulatorValueResponse.FromString, ) self.CreateAnalogValue = channel.unary_unary( '/zepben.protobuf.mp.MeasurementProducer/CreateAnalogValue', request_serializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAnalogValueRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAnalogValueResponse.FromString, ) self.CreateDiscreteValue = channel.unary_unary( '/zepben.protobuf.mp.MeasurementProducer/CreateDiscreteValue', request_serializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateDiscreteValueRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateDiscreteValueResponse.FromString, ) self.CreateAccumulatorValues = channel.unary_unary( '/zepben.protobuf.mp.MeasurementProducer/CreateAccumulatorValues', request_serializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAccumulatorValuesRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAccumulatorValuesResponse.FromString, ) self.CreateAnalogValues = channel.unary_unary( '/zepben.protobuf.mp.MeasurementProducer/CreateAnalogValues', request_serializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAnalogValuesRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAnalogValuesResponse.FromString, ) self.CreateDiscreteValues = channel.unary_unary( '/zepben.protobuf.mp.MeasurementProducer/CreateDiscreteValues', request_serializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateDiscreteValuesRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateDiscreteValuesResponse.FromString, ) self.CreateMeasurementValues = channel.stream_stream( '/zepben.protobuf.mp.MeasurementProducer/CreateMeasurementValues', request_serializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateMeasurementValuesRequest.SerializeToString, response_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateMeasurementValuesResponse.FromString, ) class MeasurementProducerServicer(object): """Missing associated documentation comment in .proto file.""" def CreateAccumulatorValue(self, request, context): """Package: IEC-61970 Base/Meas """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateAnalogValue(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateDiscreteValue(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateAccumulatorValues(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateAnalogValues(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateDiscreteValues(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateMeasurementValues(self, request_iterator, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_MeasurementProducerServicer_to_server(servicer, server): rpc_method_handlers = { 'CreateAccumulatorValue': grpc.unary_unary_rpc_method_handler( servicer.CreateAccumulatorValue, request_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAccumulatorValueRequest.FromString, response_serializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAccumulatorValueResponse.SerializeToString, ), 'CreateAnalogValue': grpc.unary_unary_rpc_method_handler( servicer.CreateAnalogValue, request_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAnalogValueRequest.FromString, response_serializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAnalogValueResponse.SerializeToString, ), 'CreateDiscreteValue': grpc.unary_unary_rpc_method_handler( servicer.CreateDiscreteValue, request_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateDiscreteValueRequest.FromString, response_serializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateDiscreteValueResponse.SerializeToString, ), 'CreateAccumulatorValues': grpc.unary_unary_rpc_method_handler( servicer.CreateAccumulatorValues, request_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAccumulatorValuesRequest.FromString, response_serializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAccumulatorValuesResponse.SerializeToString, ), 'CreateAnalogValues': grpc.unary_unary_rpc_method_handler( servicer.CreateAnalogValues, request_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAnalogValuesRequest.FromString, response_serializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAnalogValuesResponse.SerializeToString, ), 'CreateDiscreteValues': grpc.unary_unary_rpc_method_handler( servicer.CreateDiscreteValues, request_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateDiscreteValuesRequest.FromString, response_serializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateDiscreteValuesResponse.SerializeToString, ), 'CreateMeasurementValues': grpc.stream_stream_rpc_method_handler( servicer.CreateMeasurementValues, request_deserializer=zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateMeasurementValuesRequest.FromString, response_serializer=zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateMeasurementValuesResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'zepben.protobuf.mp.MeasurementProducer', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. class MeasurementProducer(object): """Missing associated documentation comment in .proto file.""" @staticmethod def CreateAccumulatorValue(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/zepben.protobuf.mp.MeasurementProducer/CreateAccumulatorValue', zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAccumulatorValueRequest.SerializeToString, zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAccumulatorValueResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def CreateAnalogValue(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/zepben.protobuf.mp.MeasurementProducer/CreateAnalogValue', zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAnalogValueRequest.SerializeToString, zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAnalogValueResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def CreateDiscreteValue(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/zepben.protobuf.mp.MeasurementProducer/CreateDiscreteValue', zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateDiscreteValueRequest.SerializeToString, zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateDiscreteValueResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def CreateAccumulatorValues(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/zepben.protobuf.mp.MeasurementProducer/CreateAccumulatorValues', zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAccumulatorValuesRequest.SerializeToString, zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAccumulatorValuesResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def CreateAnalogValues(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/zepben.protobuf.mp.MeasurementProducer/CreateAnalogValues', zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateAnalogValuesRequest.SerializeToString, zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateAnalogValuesResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def CreateDiscreteValues(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/zepben.protobuf.mp.MeasurementProducer/CreateDiscreteValues', zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateDiscreteValuesRequest.SerializeToString, zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateDiscreteValuesResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def CreateMeasurementValues(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/zepben.protobuf.mp.MeasurementProducer/CreateMeasurementValues', zepben_dot_protobuf_dot_mp_dot_mp__requests__pb2.CreateMeasurementValuesRequest.SerializeToString, zepben_dot_protobuf_dot_mp_dot_mp__responses__pb2.CreateMeasurementValuesResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
zepben.protobuf
/zepben.protobuf-0.25.0b2-py3-none-any.whl/zepben/protobuf/mp/mp_pb2_grpc.py
mp_pb2_grpc.py
import json import pathlib import tomli from uuid import UUID from datetime import datetime from datetime import timedelta from jose import jwt from passlib.context import CryptContext from fastapi import HTTPException from fastapi import status from fastapi import Header def verify_token(header=Header(..., alias="WWW-Authorization")): """ Function to verify authorization header with json web token :param header: request header named "WWW-Authorization" :type header: fastapi.Header """ try: token = header.split("Bearer")[1].strip() return token except IndexError: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail="Невалидный заголовок авторизации", ) class UUIDEncoder(json.JSONEncoder): """Encoder for uuid to json responses""" def default(self, obj): if isinstance(obj, UUID): return str(obj) return json.JSONEncoder.default(self, obj) class AuthService: def __init__(self, expire: timedelta): self.pwd_context: CryptContext = CryptContext(schemes=["bcrypt"], deprecated="auto") self.expire = expire def verify_password(self, plain_password, hashed_password) -> bool: return self.pwd_context.verify(plain_password, hashed_password) def get_password_hash(self, password) -> str: return self.pwd_context.hash(password) def create_access_token(self, data: dict) -> str: to_encode = data.copy() expire = datetime.utcnow() + self.expire to_encode.update({"exp": expire}) encoded_jwt = jwt.encode( to_encode, self.jwt_secret, algorithm="HS256" ) return encoded_jwt def get_project_meta(project_path: str = pathlib.Path(__name__).parent.parent.parent): pyproject_path = pathlib.Path( project_path, "pyproject.toml" ) with open(pyproject_path, mode="rb") as pyproject: return tomli.load(pyproject)["tool"]["poetry"] def get_project_data() -> tuple: project_meta = get_project_meta() return project_meta["name"], project_meta["version"]
zeph1rr-fastapi-utils
/zeph1rr_fastapi_utils-1.1.0-py3-none-any.whl/zeph1rr_fastapi_utils/utils.py
utils.py
import os from gostcrypto.gostcipher import GOST34122015Kuznechik class Kuznechik: """ Class for Kuznechik encoding :param key: 32byte key for encoding :type key: bytearray """ def __init__(self, key: bytearray) -> None: self.key = key self.cipher_obj = GOST34122015Kuznechik(key) self.buffer_size = self.cipher_obj.block_size @staticmethod def _dump_key(key: bytearray, key_path: str = 'keys'): try: with open(os.path.join(key_path, 'key'), 'wb') as file: file.write(key) except FileNotFoundError: os.makedirs(key_path) Kuznechik._dump_key(key, key_path) @staticmethod def generate_key(key_path: str = 'keys') -> bytearray: """ Generating key and key_file. :param key_path: path to folder for key_file :type key_path: str """ key = os.urandom(32) Kuznechik._dump_key(key, key_path) return key def encrypt(self, data: str) -> str: """ Encrypt data. :param data: data to encrypt :type data: str """ data = data.encode() while 16 - (len(data) % 16) != 0 and 16 - (len(data) % 16) != 16: data += '0'.encode() encrypted_data = bytearray() for i in range(0, len(data), 16): encrypted_data += self.cipher_obj.encrypt(data[i:i+16]) return encrypted_data.hex() def decrypt(self, data: str) -> str: """ Decrypt data. :param data: data to encrypt :type data: str """ encrypted_data = bytes.fromhex(data) decrypted_data = bytearray() for i in range(0, len(encrypted_data), 16): decrypted_data += self.cipher_obj.decrypt(encrypted_data[i:i+16]) decrypted_data = list(decrypted_data.decode()) i = len(decrypted_data) - 1 while '0' in decrypted_data[i]: del decrypted_data[i] i -= 1 return ''.join(decrypted_data)
zeph1rr-kuznechik
/zeph1rr_kuznechik-0.1.0.tar.gz/zeph1rr_kuznechik-0.1.0/zeph1rr_kuznechik/Kuznechik.py
Kuznechik.py
![logo](https://user-images.githubusercontent.com/39420322/179288567-257d5aa4-c19f-42b3-be58-cd77bd18d561.png) ![release](https://img.shields.io/github/v/release/venkatachalamlab/zephir) [![PyPI](https://img.shields.io/pypi/v/zephir)](https://pypi.org/project/zephir/) [![Downloads](https://pepy.tech/badge/zephir)](https://pepy.tech/project/zephir) [![GitHub](https://img.shields.io/github/license/venkatachalamlab/ZephIR)](https://github.com/venkatachalamlab/ZephIR/blob/master/LICENSE) [![GitHub stars](https://img.shields.io/github/stars/venkatachalamlab/ZephIR.svg?style=social&label=Star)](https://github.com/venkatachalamlab/ZephIR) [![Youtube](https://img.shields.io/badge/YouTube-Demo-red)](https://youtu.be/4O9aIftvoqM) ZephIR is a multiple object tracking algorithm based on image registration and built on PyTorch. Check out our [preprint](https://www.biorxiv.org/content/10.1101/2022.07.18.500485v1) and [tutorial video](https://youtu.be/4O9aIftvoqM)! ### Latest Updates in v1.0.3 * better support for single-keypoint tracking * major updates to keypoint detection algorithm (ZephOD), including a new CLI command: `auto_annotate` for automatically annotating a frame with detected keypoints without linking (i.e. identity-blind) * see [release notes](https://github.com/venkatachalamlab/ZephIR/releases) for full list of changes ## About ZephIR tracks keypoints in a 2D or 3D movie by registering image descriptors sampled around each keypoint. Image registration loss is combined with three additional regularization terms: - spring connections between neighboring objects allow a flexible spatial model of loosely correlated motion - feature detection optimizes results towards centers of detected features - temporal smoothing of pixel intensity a small patch of frames limit fluctuations in activity Overview of tracking loss: ![loss](https://user-images.githubusercontent.com/39420322/179583408-79b86ebc-7d44-4fd0-ab80-a53eee300c16.png) ZephIR is fast, efficient, and designed to run on laptops instead of powerful desktop workstations. It requires no prior training of any model weights, and it is capable of generalizing to a wide diversity of datasets with small tweaks to parameters. This makes ZephIR ideal for analyzing datasets that lack a large corpus of training data, and for tracking fluorescent sources in moving and deforming tissue, both of which create a particularly challenging environment for modern deep learning techniques. ZephIR can also serve as a data augmentation tool in some cases. We provide some support for exporting ZephIR results to [DeepLabCut](https://github.com/DeepLabCut/DeepLabCut). ## Installation ### Quick start ```bash pip install docopt pandas==1.4.2 zephir ``` ### Dependencies Make sure that **Python (>=3.8.1)** and the following packages are installed (prefer conda over pip): - dataclasses (>=0.6) - docopt (>=0.6.2) - Flask (>=2.1.2) - gevent (>=21.12.0) - h5py (>=3.6.0) - matplotlib (>=3.5.2) - numpy (>=1.22.4) - opencv-python (>=4.5.5.64) - pandas (>=1.4.2) - pathlib (>=1.0.1) - scikit-learn (>=1.0.2) - scikit-image (>=0.19.2) - scipy (>=1.7.3) - setuptools (>=61.2.0) - torch (>=1.10.0) (see [PyTorch.org](https://pytorch.org/get-started/locally/) for instructions on installing with CUDA) - tqdm (>=4.64.0) ### Build from source 1. Clone git repository: ```bash git clone https://github.com/venkatachalamlab/ZephIR.git ``` 2. Navigate to the cloned directory on your local machine. 3. Checkout the current release: ```bash git checkout v1.0.3 ``` Use the following command to see what's new in the most recent release: ```bash git show v1.0.3 ``` 4. Install: ```bash python setup.py install ``` or install in development mode: ```bash python setup.py develop ``` ## Getting Started Run from command line: ```bash zephir --dataset=. [options] ``` We provide a detailed guide for running ZephIR as well as some example workflows for using ZephIR [here](https://github.com/venkatachalamlab/ZephIR/blob/main/docs/Guide-ZephIR.md). ## Parameters For a list of all CLI options and user-tunable parameters, see [here](https://github.com/venkatachalamlab/ZephIR/blob/main/docs/Guide-parameters.md). To help figure out what options may be right for you, check out the list of examples with explanations for the chosen parameters [here](https://github.com/venkatachalamlab/ZephIR/blob/main/docs/examples.md). ## Interfacing with Annotator ZephIR includes an annotator GUI with custom Python macros for interacting with the data from the GUI. Run from command line: ```bash annotator --dataset=. [--port=5000] ``` Learn more about the annotator and its features [here](https://github.com/venkatachalamlab/ZephIR/blob/main/docs/annotatorGUI.md). We also provide a more detailed user guide for using the GUI as a part of a ZephIR workflow [here](https://github.com/venkatachalamlab/ZephIR/blob/main/docs/Guide-annotatorGUI.md).
zephir
/zephir-1.0.3.tar.gz/zephir-1.0.3/README.md
README.md
import time import base64 import codecs import csv import json import logging import os import math import random import re import string import hashlib import pytz import datetime from datetime import datetime, timedelta from email.utils import format_datetime from dateutil import parser import requests from flask import ( Flask, Response, request, current_app as app, render_template ) # from twilio.base.exceptions import TwilioRestException # from twilio.rest import Client as TwilioClient from voluptuous import MultipleInvalid from unicodedata import normalize from werkzeug.utils import secure_filename logger = logging.getLogger(__name__) def is_allowed_file(filename, allowed_extensions={'csv'}): """ This function allows checks if the extension of the filename received and returns a boolean value based on whether it is present in the `allowed_extensions` set or not. :param str filename: The name of the file :param set allowed_extensions: Allowed extensions :return bool: """ file_extension = filename.rsplit('.', 1)[1].lower() return '.' in filename and file_extension in allowed_extensions def get_rows_from_csv(f_path, header=False, delimiter=',',\ int_fields=[], empty_check_col=None): """ f_path - Represents the relative path of the CSV file header - Set to True if the first row is to be skipped. delimiter - CSV delimiter can be `,`, `;`, etc. int_fields - List of columns that has to be converted to integer - Empty values are returned as None. """ with codecs.open(f_path, encoding='utf-8', errors='ignore') as f: f.seek(0) reader = csv.reader(f, delimiter=delimiter) # Skip the header if specified if header: next(reader) rows = [] for row in reader: # Skip row if the required check is empty if empty_check_col is not None: if row[empty_check_col] == '': continue for i, col in enumerate(row): row[i] = col.strip() rows.append(row) return rows def get_rows_from_workbook_sheet(sheet, header=False, int_fields=[],\ empty_check_col=None): """ f_path - Represents the relative path of the CSV file header - Set to True if the first row is to be skipped. delimiter - CSV delimiter can be `,`, `;`, etc. int_fields - List of columns that has to be converted to integer - Empty values are returned as None. """ reader = sheet.iter_rows() # Skip the header if specified if header: next(reader) rows = [] for row in reader: # Skip row if the required check is empty if empty_check_col is not None: if row[empty_check_col] == '': continue new_row = [] for i, col in enumerate(row): print(col.value, type(col.value)) try: new_row.append( col.value.strip()) except: new_row.append(str(col.value).strip()) rows.append(new_row) return rows def responsify(data, message=None, http_status=200, pagination=None,\ summary=None): """ Argument data refers to data or errors, the latter in case of the http_status being 4xx or 5xx. status: str - 'success'/'error' http_status: int - 200 to 599 data (success) array/dictionary/None - contains the data errors: array - :param dict/list data: Data dict or list of errors :param str/None message: The optional message to be sent by the API :param int http_status: The status code of the response :param tuple pagination: current_page, standard_page_size, total_pages :param summary: :return dict: The dictionary response that has to be jsonified """ if http_status < 400: res = { 'status': 'success', 'http_status': http_status, 'data': data, 'message': message, } else: res = { 'status': 'error', 'http_status': http_status, 'errors': data, 'message': message, } if pagination: res['pagination'] = { 'current_page': pagination[0], 'standard_page_size': pagination[1], 'total_pages': pagination[2], } if summary: res['summary'] = { 'active_count': summary[0], } return res def send_email(to, subject, mailgun_config, template_string,\ template=None, template_data=None, attachments=None,\ from_=None, reply_to=None, recipient_vars=None,\ delivery_time=None, env='development'): """ Takes care of sending an email based on the email service configured with the application. This function is used to send both individual and bulk emails to keep the code DRY (Needs confirmation). mailgun_config = { 'sender': None, 'url': None, 'api_key': None, } `recipient_vars` is a must when sending bulk emails if you want to make sure it is sent individually to the recipients, otherwise they have the rest of the recipient addresses too. :param str template: The HTML email template file path :param dict template_data: The data to be rendered with the template :param list(str) to: List of recipients - Mailgun recipient format :param str subject: The email subject :param dict mailgun_config: Contains keys: `URL`, `API_KEY`, `SENDER` :param list(str) attachments: List of file paths to be attached :param ??? recipient_vars: ??? :param datetime delivery_time: The time the email has to be delivered :return requests.models.Response: """ # logger.info(template_string) if template: html = render_template(template, data=template_data) else: html = template_string # logger.info(html) data = { 'from': mailgun_config['SENDER'], 'to': to, 'subject': subject, 'html': html, } if from_: data['from'] = from_ if reply_to: data['h:Reply-To'] = reply_to if delivery_time: data['o:deliverytime'] = format_datetime( datetime.utcnow() + timedelta(days=int(deliverytime)) ) if recipient_vars: data['recipient-variables'] = recipient_vars #TODO: Attachments are not being delivered currently files = {} if attachments: for a in attachments: file_ = open(a[1:], 'rb') files['test'] = file_ #TODO Detect environment without the app.env variable # Use dotenv? if env == 'development': logger.warning('Development environment detected, not sending email.') return None # Requesting to Mailgun's REST API # Note that the mailgun config URL is different if Mailgun is # configured to send emails from the EU server rather than the US server res = requests.post( mailgun_config['URL'] + '/messages', auth=('api', mailgun_config['API_KEY']), data=data, files=files, ) return res def send_sms(to, body, twilio_config, env='development'): """ This method is a helper to send sms via the Twilio API. twilio_config = { 'ACCOUNT_SID': str, 'AUTH_TOKEN': str, 'FROM_NUMBER': str, } :param string to: To phone number. :param string body: The sms body :param dict twilio_config: Contains keys: `ACCOUNT_SID`, `AUTH_TOKEN`, `FROM_NUMBER` """ if env == 'development': logger.warning('Development environment detected, not sending SMS.') return None # Twilio client is configured with account sid + auth token twilio_client = TwilioClient( twilio_config['ACCOUNT_SID'], twilio_config['AUTH_TOKEN'] ) message = None try: message = twilio_client.messages.create( from_=twilio_config['FROM_NUMBER'], body=body, to=to, ) logger.info( 'Twilio message response with id: {}, status: {} for phone: {}' .format( message.sid, message.status, to, ) ) except TwilioRestException as te: logger.error( 'Twilio request error: {} while sending SMS to {}'.format( te, to, ) ) except Exception as e: logger.error('Cannot send SMS. Unknown exception {}'.format(e)) return message def upload_base64_encoded_file(base64_value, filename): """ This function handles uploading of a base64 encoded file and return the file object. :param ??? base64_value: The file bytes??? :param filename str: ??? :return file: """ base64_file = bytes(base64_value, 'utf-8') with open(filename, 'wb') as f: f.write(base64.decodestring(base64_file)) f.close() return f def upload_file(file_, upload_type='image', config=None): """ This function handles uploading of a file, getting the file object - typically returned by the Flask request handler. :param file file_: The file object that has to be saved :return str: The path of the saved file """ upload_folder = config['FILE_UPLOAD_FOLDER'] # Add timestamp to filename to avoid image replacement due to name # duplication timestamp = str(int(round(time.time() * 1000000))) filename = secure_filename(file_.filename) ext = filename.split('.')[-1] filename = '{}.{}'.format(timestamp, ext) f_path = os.path.join(upload_folder, filename) file_.save(f_path) return { 'original_name': file_.filename, 'name': filename, 'type_': ext, 'path': f_path } def validate_schema_with_errors(schema, payload): """ The first argument is the actual schema to be validated with and the second argument is the dictionary containing the data to be validated. It returns an empty list if there are no errors and a list of error dictionaries in case of an error(s). :param dict schema: Returns either False or a list of errors :param dict payload: Data object that has to be validated :return list(dict): Empty list if no errors """ errors = [] if not payload: return [{ 'field': 'data', 'description': 'Request data cannot be null', }] try: schema(payload) except MultipleInvalid as e: for x in e.errors: field = str(x.path[0]) if len(x.path) > 0: for node in x.path[1:]: field += '.' + str(node) errors.append({ 'field': field, 'description': str(x.error_message).capitalize(), }) return errors _punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+') def tokenify(text, delim='-', append_random=False, non_ascii=False): """ This function generates a slug, with a default delimiter as an hyphen NFKD: Normalization Form - Compatibility Decomposition NFKD is used normalizing a literal in unicode. This uses the normalize function from the unicodedata module """ result = [] for word in _punct_re.split(text.lower()): if non_ascii: word = normalize('NFKD', word) else: word = normalize('NFKD', word).encode('ascii', 'ignore')\ .decode('utf-8') if word: result.append(word) result = delim.join(result) if append_random: result += delim + str(int(round(time.time()*10**6))) return result def random_string_generator(size=5, chars=None): """ Returns a random string of digits. """ if not chars: chars = string.digits+string.ascii_letters return ''.join(random.choice(chars) for _ in range(size)) def get_datetime(s): """ This function converts the date/datetime string to python's datetime object. Accepted formats: yyyy-mm-dd, yyyy-mm-dd hh:mm, yyyy-mm-ddThh-mm :param str s: :return datetime: """ # Remove unwanted characters from the datetime # Bank transactions and SDD status has these characters in date s = s.strip(' ') # Nesting exception looks ugly but no better alternative found yet: # https://softwareengineering.stackexchange.com/questions/118788/ try: return datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S') except ValueError: try: return datetime.datetime.strptime(s, '%Y-%m-%d %H:%M') except ValueError: try: return datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M') except ValueError: try: return datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M:%S') except ValueError: try: return datetime.datetime.strptime(s, '%d/%m/%Y') except ValueError: return None def serialize_datetime(datetime_object, without_day=False): """ This function serialized the datetime object representation into a universally acceptable string format. If without_day is set to True, this function converts the date into a format like yyyy-mm. :param datetime datetime_object: :param bool without_day: :return str: """ if not datetime_object: return None if without_day: return datetime.datetime.strftime(date, '%Y-%m') return str(datetime_object) def convert_date_deprecated(date): """ This function converts the date into a format like dd/mm/yy :param date: :return: """ converted_date = datetime.datetime.strftime(date, "%d/%m/%Y") return converted_date def convert_datetime_to_utc(datetime_obj, timezone): """ This function converts the given time of the given timezone to the UTC time. """ local = pytz.timezone(timezone) local_dt = local.localize(datetime_obj, is_dst=None) utc_dt = local_dt.astimezone(pytz.utc) return utc_dt def normalize_date(date): """ This function converts any date string into a standard date object. :param date object: :return: """ if not date: return None date = parser.parse(date.strip()) return date def generate_checksum_of_file(fpath): """ This function generates md5 checksum of the given file and returns it. :param str fname: Path of the file :return str: Hash string of the file """ hash_md5 = hashlib.md5() with open(fpath, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) return hash_md5.hexdigest() def add_pagination(query_params, default_page=1, default_page_size=100): # Pagination stuff page = query_params.get('page', default_page) try: page = int(page) if page <= 0: raise ValueError except (TypeError, ValueError): page = default_page page_size = query_params.get('page_size', default_page_size) try: page_size = int(page_size) if page_size <= 0: raise ValueError except (TypeError, ValueError): page_size = default_page_size query_params['page'] = page query_params['page_size'] = page_size return query_params # Flask related def add_urls(blueprint, resource_classes): """ This function adds the URL rules of all the resources that is being passed as an argument list using Flask's add_url_rule method. This allows us to group requests and HTTP method handlers ins classes with each method handler as a function. :param Blueprint blueprint: The blueprint to which the routes are to be attached :param list(object) resource_classes: The user defined resource classes """ for cls in resource_classes: cls_name = cls.__name__ if hasattr(cls, 'get_all'): blueprint.add_url_rule( cls.collection_route, cls_name + '_get_all', view_func=cls.get_all, methods=['GET'] ) if hasattr(cls, 'post'): blueprint.add_url_rule( cls.collection_route, cls_name + '_post', view_func=cls.post, methods=['POST'] ) if hasattr(cls, 'get'): blueprint.add_url_rule( cls.resource_route, cls_name + '_get', view_func=cls.get, methods=['GET'] ) if hasattr(cls, 'patch'): blueprint.add_url_rule( cls.resource_route, cls_name + '_patch', view_func=cls.patch, methods=['PATCH'] ) if hasattr(cls, 'delete'): blueprint.add_url_rule( cls.resource_route, cls_name + '_delete', view_func=cls.delete, methods=['DELETE'] ) class ApiFlask(Flask): """ ApiFlask is inherited from the Flask class to override the make_response function to automatically convert a returned dictionary to a JSON response. """ def make_response(self, rv): if isinstance(rv, dict): return Response( json.dumps(rv), status=rv['http_status'], mimetype='application/json', ) return Flask.make_response(self, rv)
zephony-helpers
/zephony_helpers-0.3.tar.gz/zephony_helpers-0.3/zephony_helpers/__init__.py
__init__.py
# Zephserver ## Python service manager that can be used as a WebSocket server Zephserver is a python service manager that can be used in a cluster. It has been written to support a websocket server but it can be used for any other stuff needing full time services or cluster management. More documentation on how the server works [here](http://zephserver.readthedocs.org/en/latest/) ## Quickstart To use zephserver you will need python 2.7. It should not work on python 3 and a Unix incompatible system. ### Quickstart as service manager (use zephserver as a websocket server bellow) #### 1.Install zephserver from pip `pip install zephserver` #### 2.Create your folder `mkdir myserver` #### 3.Add the configuration file take zephsettings.py file from the example folder. the variable heart_beat_period, PORT_ZEPH and TASKS_PATH are not used empty the service list(these services are made for the websocket server) #### 4.Add the starter file Copy zephstarter_no_django.py file in your folder #### 5.Write your services read the zephserver/service/service_interface.py file to know the minimum interface you have to implement. #### 6.Register your service Add your service to the SERVICE_LIST variable. Respect the syntax `my_server.my_package.my_service/MyService` syntaxe. example for the service_interface it would be `'zephserver.service.service_interface/ServiceInterface'` nota : there is no need for the service object to have the same name as its file and there can be multiple services in a file. #### 7.Start your server To start your server in the current shell, simply call : `python zephstarter_no_django.py`. tips: to not lock your shell user the ` &` modifier at the end of the command. #### 8.Stop your server to stop your server call `zephserver-stop /path/to/the/folder/interface.sock` If the server died without the zephserver command (crash) you will have to remove the server.lock file. ### Quickstart as websocket server #### 1.Install zephserver from pip `pip install zephserver` If you wants to use django services (db_service and session backend with django) install django(only version 1.7 is supported) #### 2.Create your folder `mkdir myserver` if you use django myserver will simply refer to the django site folder #### 3.Add the configuration file take zephsettings.py file from the example folder. #### 4.Add the starter file If you use django Copy the zephstarter.py file and adpt it to your application otherwise copy zephstarter_no_django.py file in your folder #### 7.Start your server To start your server in the current shell, simply call : `python zephstarter.py`. tips: to not lock your shell user the ` &` modifier at the end of the command. #### 8.Stop your server to stop your server call `zephserver-stop /path/to/the/folder/interface.sock` If the server died without the zephserver command (crash) you will have to remove the server.lock file.
zephserver
/zephserver-0.1.27.tar.gz/zephserver-0.1.27/README.md
README.md
from __future__ import absolute_import from behave.model import ScenarioOutline from behave.formatter.base import Formatter import behave2cucumberZephyr from zipfile import ZipFile from behave.model_core import Status import base64 import six try: import json except ImportError: import simplejson as json import os from os import listdir, makedirs from os.path import dirname import requests import datetime class ZephyrFormatter(Formatter): name = "Zepher Formatter" description = "Zepher compatible JSON file dump of test run" dumps_kwargs = {} split_text_into_lines = True # EXPERIMENT for better readability. results_dir = "results/zephyr" results_file = "zephyr-json.zip" json_number_types = six.integer_types + (float,) json_scalar_types = json_number_types + (six.text_type, bool, type(None)) file = None step_count = 0 api_key = None test_cycle_key = None start_date_time = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.000+0000") url_base = None def __init__(self, stream_opener, config): super(ZephyrFormatter, self).__init__(stream_opener, config) self.create_results_dirs() self.file_cleanup(True) self.feature_count = 0 self.current_feature = None self.current_feature_data = None self.current_scenario = None self._step_index = 0 self.step_count = 0 self.str_result_json = "" api_key = self.config.userdata.get("ZEPHYR_API_KEY", None) if not api_key: # I know... this is unfortuantely a hack for a short comming api_key = self.config.userdata.get("zephyr_api_key", "") self.api_key = api_key self.url_base = self.config.userdata.get("ZEPHYR_API_URL", "") def reset(self): self.current_feature = None self.current_feature_data = None self.current_scenario = None self._step_index = 0 self.step_count = 0 # -- FORMATTER API: def uri(self, uri): pass def feature(self, feature): self.file_cleanup(False, "tmp.json") self.open_new_file() self.reset() self.current_feature = feature self.current_feature_data = { "keyword": feature.keyword, "name": feature.name, "tags": list(feature.tags), "location": six.text_type(feature.location), "status": None, # Not known before feature run. } element = self.current_feature_data if feature.description: element["description"] = feature.description def background(self, background): element = self.add_feature_element( { "type": "background", "keyword": background.keyword, "name": background.name, "location": six.text_type(background.location), "steps": [], } ) if background.name: element["name"] = background.name self._step_index = 0 # -- ADD BACKGROUND STEPS: Support *.feature file regeneration. for step_ in background.steps: self.step(step_) def scenario(self, scenario): self.finish_current_scenario() self.current_scenario = scenario element = self.add_feature_element( { "type": "scenario", "keyword": scenario.keyword, "name": scenario.name, "tags": scenario.tags, "location": six.text_type(scenario.location), "steps": [], "status": None, } ) if scenario.description: element["description"] = scenario.description self._step_index = 0 @classmethod def make_table(cls, table): table_data = { "headings": table.headings, "rows": [list(row) for row in table.rows], } return table_data def step(self, step): s = { "keyword": step.keyword, "step_type": step.step_type, "name": step.name, "location": six.text_type(step.location), } if step.text: text = step.text if self.split_text_into_lines and "\n" in text: text = text.splitlines() s["text"] = text if step.table: s["table"] = self.make_table(step.table) element = self.current_feature_element element["steps"].append(s) self.step_count = +1 def match(self, match): args = [] for argument in match.arguments: argument_value = argument.value if not isinstance(argument_value, self.json_scalar_types): # -- OOPS: Avoid invalid JSON format w/ custom types. # Use raw string (original) instead. argument_value = argument.original assert isinstance(argument_value, self.json_scalar_types) arg = { "value": argument_value, } if argument.name: arg["name"] = argument.name if argument.original != argument_value: # -- REDUNDANT DATA COMPRESSION: Suppress for strings. arg["original"] = argument.original args.append(arg) match_data = { "location": six.text_type(match.location) or "", "arguments": args, } if match.location: # -- NOTE: match.location=None occurs for undefined steps. steps = self.current_feature_element["steps"] steps[self._step_index]["match"] = match_data def result(self, step): steps = self.current_feature_element["steps"] steps[self._step_index]["result"] = { "status": step.status.name, "duration": step.duration, } if step.error_message and step.status == Status.failed: result_element = steps[self._step_index]["result"] result_element["error_message"] = step.error_message self._step_index += 1 def embedding(self, mime_type, data): step = self.current_feature_element["steps"][-1] step["embeddings"].append( { "mime_type": mime_type, "data": base64.b64encode(data).replace("\n", ""), } ) def eof(self): """ End of feature """ if not self.current_feature_data: return # -- NORMAL CASE: Write collected data of current feature. self.finish_current_scenario() self.update_status_data() if self.feature_count == 0: # -- FIRST FEATURE: self.write_json_header() else: # -- NEXT FEATURE: self.write_json_feature_separator() self.write_json_feature(self.current_feature_data) self.feature_count += 1 self.write_feature_results() self.reset() def open_new_file(self): self.file = open(f"{self.results_dir}/tmp.json", "w") def close(self): if self.feature_count == 0: # -- FIRST FEATURE: Corner case when no features are provided. self.write_json_header() self.zip_files() if self.api_key: self.upload_results() self.update_testCycle() else: print("No ZEPHYR_API_KEY provided. No updates send to Zepker") self.file_cleanup() # -- JSON-DATA COLLECTION: def add_feature_element(self, element): assert self.current_feature_data is not None if "elements" not in self.current_feature_data: self.current_feature_data["elements"] = [] self.current_feature_data["elements"].append(element) return element @property def current_feature_element(self): assert self.current_feature_data is not None return self.current_feature_data["elements"][-1] def update_status_data(self): assert self.current_feature assert self.current_feature_data self.current_feature_data["status"] = self.current_feature.status.name def finish_current_scenario(self): if self.current_scenario: status_name = self.current_scenario.status.name self.current_feature_element["status"] = status_name # -- JSON-WRITER: def write_json_header(self): self.file.write("[") self.file.flush() def write_json_footer(self): self.file.write("]") self.file.flush() def write_json_feature(self, feature_data): self.file.write(json.dumps(feature_data)) self.file.flush() def write_json_feature_separator(self): self.file.write(",\n\n") self.file.flush() # -- File functions: def create_results_dirs(self): # path = dirname(dirname(__file__)) dirs = self.results_dir.split("/") path = "" for dir_name in dirs: try: # Create target Directory if path == "": path = dir_name makedirs(dir_name) else: path = f"{path}/{dir_name}" makedirs(path) print("Directory ", path, " Created ") except FileExistsError: if self.config.verbose: print("Directory ", path, " already exists") pass def file_cleanup(self, all_files=False, name=""): for file_name in listdir(self.results_dir): try: if all_files: os.remove(f"{self.results_dir}/{file_name}") elif file_name == name: os.remove(f"{self.results_dir}/{file_name}") break elif file_name.endswith(".json") and file_name == name: os.remove(f"{self.results_dir}/{file_name}") except FileNotFoundError: pass except PermissionError: if self.config.verbose: print("Can not delete folder") pass def zip_files(self): if len(os.listdir(self.results_dir)) > 1: # There will be a tmp.json file with ZipFile(f"{self.results_dir}/{self.results_file}", "w") as zf: for file_name in listdir(self.results_dir): if file_name.endswith(".json") and file_name != "tmp.json": zf.write(f"{self.results_dir}/{file_name}") zf.close() def write_feature_results(self): if self.step_count == 0: self.feature_count = 0 self.close_stream() return self.write_json_footer() self.feature_count = 0 for tag in self.current_feature_data["elements"][0]["tags"]: if "TestCaseKey" in tag: tag = tag.split("TestCaseKey=")[1] break self.close_stream() with open(f"{self.results_dir}/tmp.json", "r") as f: content = json.load(f) zephyr_json = behave2cucumberZephyr.convert(content) f.close() file_name = f"{self.results_dir}/{tag}.json" with open(file_name, "w") as f: # json.dump(zephyr_json, f) f.write(json.dumps(zephyr_json)) def upload_results(self): upload_results = self.config.userdata.get( "ZEPHYR_UPLOAD_RESULTS", "false" ).lower() if upload_results == "false": return zephyr_project_key = self.config.userdata.get("ZEPHYR_PROJECT_KEY", "") create_test_cases = self.config.userdata.get( "ZEPHYR_AUTO_CREATE_TEST_CASES", "false" ).lower() url = f"{self.url_base}/v2/automations/executions/cucumber?projectKey={zephyr_project_key}&autoCreateTestCases={create_test_cases}" file = f"{self.results_dir}/{self.results_file}" payload = {} files = [("file", (self.results_file, open(file, "rb"), "application/zip"))] headers = {"Authorization": self.api_key} response = requests.request( "POST", url, headers=headers, data=payload, files=files ) response.raise_for_status() print(f"Test cycle key: {response.json()['testCycle']['key']}") self.test_cycle_key = response.json()["testCycle"]["key"] def update_testCycle(self): # Get test cycle data from Zepyr url = f"{self.url_base}/v2/testcycles/{self.test_cycle_key}" headers = {"Authorization": self.api_key, "Content-Type": "application/json"} response = requests.request("GET", url, headers=headers) response.raise_for_status() # Add detail to response before posting it back to Zephyr data = response.json() test_name = self.config.userdata.get( "ZEPHYR_TEST_CYCLE_NAME", f"Automation Build {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}", ) data["name"] = test_name data["plannedStartDate"] = self.start_date_time data["plannedEndDate"] = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.000+0000") response = requests.request("PUT", url, headers=headers, data=json.dumps(data)) response.raise_for_status()
zephyr-behave
/zephyr_behave-1.2.1.tar.gz/zephyr_behave-1.2.1/zephyr_behave/formatter.py
formatter.py
# Zephyr Zephyr is a command-line utility that provides project and component scaffolding to build modular pipelines. ## What is Zephyr Zephyr allows you to quickly bootstrap boilerplate code at the project **and** module level by leveraging [cookiecutter](https://github.com/cookiecutter/cookiecutter). Zephyr also provides a way to bootstrap modular pipelines by converting a list of modules into a [Metaflow](https://github.com/Netflix/metaflow) pipeline. ## Using Zephyr Getting up and running with Zephyr is easy ### Installing Zephyr Install zephyr from [pypi](https://pypi.org/project/zephyr-bootstrap/) ``` pip install zephyr-bootstrap ``` ### Bootstrap project To start simply use... ``` zephyr init ``` Follow the prompts to create a sample project ``` 2021-05-25 21:38:16,467 | INFO : initializing project... app_name [my_project]: desc [My project Description]: Project my_project created ``` The project comes ready with a loaded Makefile, example notebook, and everything to build a library package. ``` ls my_project/ data docker-compose.yml docs Makefile my_project notebooks pipelines README.md requirements.txt setup.py tests VERSION ``` #### Custom template You can also use custom cookiecutters to bootstrap a project ``` zephyr init --custom https://github.com/drivendata/cookiecutter-data-science ``` ### Create Module Once a project is created, you can create a new module using... ``` zephyr module create ``` Follow the prompts to bootstap a module ``` 2021-05-25 21:40:44,452 | INFO : Creating module... module_name [my_module]: desc [My module description]: project_name [my_project]: Module my_module created ``` The module comes ready with a buildable docker image, example script, and sample proccesing code. ``` ls my_project/modules/my_module/ build_docker.sh controller.py Dockerfile example.py __init__.py linux_packages.txt README.md requirements.txt sample_inputs sample_outputs tests ``` #### Custom template You can also use custom cookiecutters to bootstrap a module ``` zephyr module create --custom https://github.com/audreyfeldroy/cookiecutter-pypackage ``` #### Delete module Can remove a module using... ``` zephyr module delete ``` Follow promopts to remove module ``` Current modules: ['my_module'] Type module to delete: my_module Do you want to delete my_module [y/N]: y 2021-05-25 21:43:27,450 | INFO : Deleting module...my_module Module my_module deleted ``` ### Pipeline Can create a modular pipeline using... ``` zephyr pipeline create ``` Select what modules you want, by following the prompts ``` 2021-05-25 21:53:59,024 | INFO : Creating pipeline... Current modules: ['my_module', 'test_mod', 'mod_1', 'mod_2'] Enter comma sepearted modules for pipeline: mod_1,mod_2,my_module Do you want to continue? with these modules: ['mod_1', 'mod_2', 'my_module'] [Y/n]: pipeline_name [my_pipeline]: Pipeline created Test with... python pipelines/my_pipeline/my_pipeline_pipeline.py run ``` The bootstrapped pipeline is ready to run. ``` Metaflow 2.2.8 executing My_pipelineFlow for user:banjtheman Validating your flow... The graph looks good! Running pylint... Pylint is happy! 2021-05-25 21:54:27.950 Workflow starting (run-id 1621994067931797): 2021-05-25 21:54:27.955 [1621994067931797/start/1 (pid 29700)] Task is starting. 2021-05-25 21:54:28.779 [1621994067931797/start/1 (pid 29700)] 2021-05-25 21:54:28,779 |INFO: Start step 2021-05-25 21:54:28.864 [1621994067931797/start/1 (pid 29700)] Task finished successfully. 2021-05-25 21:54:28.868 [1621994067931797/mod_1_step/2 (pid 29708)] Task is starting. 2021-05-25 21:54:29.519 [1621994067931797/mod_1_step/2 (pid 29708)] 2021-05-25 21:54:29,519 |INFO: Starting module mod_1 2021-05-25 21:54:29.604 [1621994067931797/mod_1_step/2 (pid 29708)] Task finished successfully. 2021-05-25 21:54:29.609 [1621994067931797/mod_2_step/3 (pid 29716)] Task is starting. 2021-05-25 21:54:30.335 [1621994067931797/mod_2_step/3 (pid 29716)] 2021-05-25 21:54:30,334 |INFO: Starting module mod_2 2021-05-25 21:54:30.427 [1621994067931797/mod_2_step/3 (pid 29716)] Task finished successfully. 2021-05-25 21:54:30.432 [1621994067931797/my_module_step/4 (pid 29724)] Task is starting. 2021-05-25 21:54:31.081 [1621994067931797/my_module_step/4 (pid 29724)] 2021-05-25 21:54:31,081 |INFO: Starting module my_module 2021-05-25 21:54:31.178 [1621994067931797/my_module_step/4 (pid 29724)] Task finished successfully. 2021-05-25 21:54:31.183 [1621994067931797/end/5 (pid 29732)] Task is starting. 2021-05-25 21:54:31.863 [1621994067931797/end/5 (pid 29732)] 2021-05-25 21:54:31,862 |INFO: Job's done 2021-05-25 21:54:31.953 [1621994067931797/end/5 (pid 29732)] Task finished successfully. 2021-05-25 21:54:31.953 Done! ```
zephyr-bootstrap
/zephyr_bootstrap-0.0.2.tar.gz/zephyr_bootstrap-0.0.2/README.md
README.md
from typing import List # 3rd party imports import click from cookiecutter.main import cookiecutter # Project Imports from zephyr.zephyr_utils import zephyr_utils def create_module_string(module_name: str, next_step: str) -> str: """ Purpose: Creates a moudle string Args: module_name - name of module next_step - next module to run Returns: module_string - code to run module string """ module_string = "\n" module_string += f" @step\n" module_string += f" def {module_name}_step(self):\n" module_string += f' """\n' module_string += f" Runs module {module_name}\n" module_string += f' """\n' module_string += f" # TODO insert your module processes here\n" module_string += f' logging.info("Starting module {module_name}")\n' module_string += f" self.next(self.{next_step})\n" return module_string def replace_pipeline_file( pipeline_file: str, modules: List[str], project_name: str, pipeline_name: str ) -> None: """ Purpose: Replace placeholders in pipeline file Args: pipeline_file - file to replace modules - modules to add project_name - name of the project Returns: N/A """ # Get file data file_data = zephyr_utils.read_from_file(pipeline_file) # replace project imports file_data = file_data.replace("REPLACE_PROJECT_IMPORT", f"import {project_name}") # replace flow name file_data = file_data.replace( "REPLACE_PIPELINE_NAME", f"{pipeline_name.capitalize()}" ) # replace start next file_data = file_data.replace( "REPLACE_START_STEP", f"self.next(self.{modules[0]}_step)" ) modules_length = len(modules) module_replace_string = "" # For each module generate the code for index, module in enumerate(modules): if index + 1 == modules_length: next_step = "end" else: next_step = f"{modules[index + 1]}_step" module_string = create_module_string(module, next_step) module_replace_string += module_string # replace module holder file_data = file_data.replace("REPLACE_MODULES", module_replace_string) # Replace file with new code zephyr_utils.write_to_file(pipeline_file, file_data) def create_pipeline(project_name: str) -> None: """ Purpose: Create a zephyr pipeline Args: project - name of the project Returns: N/A """ # Load zephyr config zephyr_config = zephyr_utils.load_json(".zephyr/config.json") zephyr_moudles = zephyr_config["modules"] click.echo(f"Current modules: {zephyr_moudles}") moudle_list = click.prompt("Enter comma sepearted modules for pipeline", type=str) modules = moudle_list.split(",") # check if valid modules for module in modules: if module not in zephyr_moudles: click.echo(f"Invalid module : {module}") return click.confirm( f"Do you want to continue? with these modules: {modules}", abort=True, default=True, ) full_dir_path = cookiecutter( "https://github.com/banjtheman/cookiecutter-zephyr-pipeline", output_dir=f"pipelines/", ) pipeline_name = full_dir_path.split("/")[-1] pipeline_full_name = f"{full_dir_path}/{pipeline_name}_pipeline.py" replace_pipeline_file(pipeline_full_name, modules, project_name, pipeline_name) # update the config json with the pipeline zephyr_config["pipelines"].append(pipeline_name) zephyr_utils.save_json(".zephyr/config.json", zephyr_config) click.echo("Pipeline created") click.echo(f"Test with...") click.echo(f"python {pipeline_full_name} run")
zephyr-bootstrap
/zephyr_bootstrap-0.0.2.tar.gz/zephyr_bootstrap-0.0.2/zephyr/zephyr_utils/pipeline_utils.py
pipeline_utils.py
import click import shutil # 3rd party imports from cookiecutter.main import cookiecutter # Project Imports from zephyr.zephyr_utils import zephyr_utils def create_module(project_name: str) -> None: """ Purpose: Create a zephyr module Args: project - name of the project Returns: N/A """ full_dir_path = cookiecutter( "https://github.com/banjtheman/cookiecutter-zephyr-module", output_dir=f"{project_name}/modules/", extra_context={"project_name": project_name}, ) # Add module to __init__.py init_path = f"{project_name}/modules/__init__.py" module_name = full_dir_path.split("/")[-1] import_text = f"from . import {module_name}\n" zephyr_utils.append_to_file(init_path, import_text) # update the config json with the modules zephyr_config = zephyr_utils.load_json(".zephyr/config.json") zephyr_config["modules"].append(module_name) zephyr_utils.save_json(".zephyr/config.json", zephyr_config) # append to docker-compose.yml docker_compose_text = f"""\ {module_name}: build: {project_name}/modules/{module_name}/.\n""" zephyr_utils.append_to_file("./docker-compose.yml", docker_compose_text) click.echo(f"Module {module_name} created") def create_custom_module(url: str) -> None: """ Purpose: Create a zephyr module Args: project - name of the project Returns: N/A """ full_dir_path = cookiecutter( url, output_dir=f"modules/", ) module_name = full_dir_path.split("/")[-1] # update the config json with the modules zephyr_config = zephyr_utils.load_json(".zephyr/config.json") zephyr_config["modules"].append(module_name) zephyr_utils.save_json(".zephyr/config.json", zephyr_config) click.echo(f"Custom module {module_name} created") def delete_module(project_name: str, module_name: str) -> None: """ Purpose: delete a zephyr module Args: project - name of the project module_name - name of module Returns: N/A """ moudle_path = f"{project_name}/modules/{module_name}" shutil.rmtree(moudle_path, ignore_errors=True) zephyr_config = zephyr_utils.load_json(".zephyr/config.json") # If custom module then exit if "custom_project" in zephyr_config: return # Remove module to __init__.py init_path = f"{project_name}/modules/__init__.py" import_text = f"from . import {module_name}\n" # Get file data file_data = zephyr_utils.read_from_file(init_path) # replace project imports file_data = file_data.replace(import_text, f"") zephyr_utils.write_to_file(init_path, file_data) # update the config json with the modules zephyr_config["modules"].remove(module_name) zephyr_utils.save_json(".zephyr/config.json", zephyr_config) # remove from docker-compose.yml docker_compose_text = f"""\ {module_name}: build: {project_name}/modules/{module_name}/.\n""" file_data = zephyr_utils.read_from_file("./docker-compose.yml") # replace docker build file_data = file_data.replace(docker_compose_text, f"") zephyr_utils.write_to_file("./docker-compose.yml", file_data) click.echo(f"Module {module_name} deleted")
zephyr-bootstrap
/zephyr_bootstrap-0.0.2.tar.gz/zephyr_bootstrap-0.0.2/zephyr/zephyr_utils/module_utils.py
module_utils.py
import logging import os import json from subprocess import Popen from typing import Type, Union, Dict, Any # Local Python Library Imports from zephyr.zephyr_config.config import Config def check_if_in_project() -> bool: """ Purpose: check if in zephyr project Args: N/A Returns: status (Boolean): False if not in zephyr project, True if in zephyr project """ # check if folder exisit if os.path.exists(".zephyr/config.json"): # we should only check this file, in a git project if you delete .git # you are no longer in a git project we will follow that example status = True else: logging.info("Not inside Zephyr Project") logging.info("run `zephyr init and retry") status = False return status def get_logger(name: str, log_level: int) -> logging.Logger: """ Purpose: Load logger object Args: name (String): name of log log_level(Int): Level for log Returns: logger (Logger obj): Logger object """ logger = logging.getLogger(name) if not logger.handlers: # Prevent logging from propagating to the root logger logger.propagate = False console = logging.StreamHandler() logger.addHandler(console) formatter = logging.Formatter( "%(asctime)s - %(levelname)s - %(name)s - %(message)s" ) console.setFormatter(formatter) logger.setLevel(log_level) return logger def load_configs() -> Type[Config]: """ Purpose: Load configuration object Args: environment (String): Environment to get configs for Returns: config (Config obj): Configuration object """ return Config def load_json(path_to_json: str) -> Dict[str, Any]: """ Purpose: Load json files Args: path_to_json (String): Path to json file Returns: Conf: JSON file if loaded, else None """ try: with open(path_to_json, "r") as config_file: conf = json.load(config_file) return conf except Exception as error: logging.error(error) raise TypeError("Invalid JSON file") def save_json(json_path: str, json_data: Any) -> None: """ Purpose: Save json files Args: path_to_json (String): Path to json file json_data: Data to save Returns: N/A """ try: with open(json_path, "w") as outfile: json.dump(json_data, outfile) except Exception as error: raise OSError(error) def append_to_file(file_path: str, file_text: str) -> bool: """ Purpose: Append text to a file Args/Requests: file_path: file path file_text: Text of file Return: Status: True if appended, False if failed """ try: with open(file_path, "a") as myfile: myfile.write(file_text) return True except Exception as error: logging.error(error) return False def read_from_file(file_path: str) -> str: """ Purpose: Read data from a file Args/Requests: file_path: file path Return: read_data: Text from file """ try: with open(file_path) as f: read_data = f.read() except Exception as error: logging.error(error) return None return read_data def write_to_file(file_path: str, file_text: str) -> bool: """ Purpose: Write text from a file Args/Requests: file_path: file path file_text: Text of file Return: Status: True if appened, False if failed """ try: with open(file_path, "w") as myfile: myfile.write(file_text) return True except Exception as error: logging.error(error) return False
zephyr-bootstrap
/zephyr_bootstrap-0.0.2.tar.gz/zephyr_bootstrap-0.0.2/zephyr/zephyr_utils/zephyr_utils.py
zephyr_utils.py
import os # 3rd party imports import nbformat as nbf import click from cookiecutter.main import cookiecutter # Project Imports from zephyr.zephyr_utils import zephyr_utils def create_project(): """ Purpose: Create a zephyr project Args: N/A Returns: N/A """ project_path = cookiecutter("https://github.com/banjtheman/cookiecutter-zephyr") project_name = project_path.split("/")[-1] create_starter_notebook(project_name) click.echo(f"Project {project_name} created") def create_custom_project(url): """ Purpose: Create a custom zephyr project Args: project - name of the project Returns: N/A """ project_path = cookiecutter(url) project_name = project_path.split("/")[-1] custom_json = { "project_name": f"{project_name}", "custom_project": "True", "project_desc": "custom project", "pipelines": [], "modules": [], } # make custom .zephyr dir cmd = f"mkdir -p {project_name}/.zephyr/" os.system(cmd) # make custom pipelines dir cmd = f"mkdir -p {project_name}/pipelines/" os.system(cmd) # make custom modules dir cmd = f"mkdir -p {project_name}/modules/" os.system(cmd) # if file doesnt exist save config_path = f"{project_name}/.zephyr/config.json" if not os.path.exists(config_path): zephyr_utils.save_json(config_path, custom_json) click.echo(f"Custom Project {project_name} created") def create_starter_notebook(project_name: str): """ Purpose: Create a juptyer notebook to start Args: project - name of the project Returns: N/A """ nb = nbf.v4.new_notebook() starter_text = f"""\ # {project_name} Sample Notebook This is a starter notebook to facilitate experimentation """ imports = f"""\ %load_ext autoreload %autoreload 2 # common notebook imports import json import pandas as pd import numpy as np from IPython.display import display # Project import import {project_name} """ blank_cell = f"""\ # TODO: begin my experimentation """ nb["cells"] = [ nbf.v4.new_markdown_cell(starter_text), nbf.v4.new_code_cell(imports), nbf.v4.new_code_cell(blank_cell), ] nbf.write(nb, f"{project_name}/notebooks/example_notebook.ipynb")
zephyr-bootstrap
/zephyr_bootstrap-0.0.2.tar.gz/zephyr_bootstrap-0.0.2/zephyr/zephyr_utils/init_utils.py
init_utils.py
import os import sys import logging import click # Local Python Library Imports from zephyr.zephyr_state.zephyr_state import ZephyrState from zephyr.zephyr_utils import zephyr_utils, init_utils, module_utils, pipeline_utils # Setup Zephyr Logging LOGLEVEL = logging.INFO logging.basicConfig( format="%(asctime)s | %(levelname)s : %(message)s", level=LOGLEVEL, stream=sys.stdout, ) LOGGER = logging.getLogger("zephyr-log") @click.group(invoke_without_command=False) @click.version_option("0.0.1") @click.pass_context def zephyr_cli(cli_context: click.Context) -> None: """ A Modular Pipeline Scaffolding Tool """ cli_context.obj = ZephyrState() return None @click.command("init") @click.option("--custom", help="Your custom cookie cutter url", required=False) def init_command(custom: str) -> None: """Create and initialize zephyr folder""" """ Purpose: Create and initialize zephyr project Args: N/A Returns: N/A """ if custom: click.echo("Building custom cookie cutter") init_utils.create_custom_project(custom) return LOGGER.info(f"initializing project...") init_utils.create_project() # Module Command Group @zephyr_cli.group("module") def module_commands(): """Module related commands""" pass @module_commands.command(name="create", help="creates new module") @click.option("--custom", help="Your custom cookie cutter url", required=False) def module_create(custom: str) -> None: """Create and initialize zephyr module""" """ Purpose: Create and initialize zephyr module Args: N/A Returns: N/A """ # Check if in project if zephyr_utils.check_if_in_project(): if custom: click.echo("Building custom module") module_utils.create_custom_module(custom) return # get module json project_json = zephyr_utils.load_json(".zephyr/config.json") project_name = project_json["project_name"] LOGGER.info(f"Creating module...") module_utils.create_module(project_name) @module_commands.command(name="delete", help="deletes a module") def module_delete() -> None: """Deletes a zephyr module""" """ Purpose: Delete a zephyr module Args: N/A Returns: N/A """ # Check if in project if zephyr_utils.check_if_in_project(): # get module json zephyr_config = zephyr_utils.load_json(".zephyr/config.json") project_name = zephyr_config["project_name"] zephyr_moudles = zephyr_config["modules"] click.echo(f"Current modules: {zephyr_moudles}") module = click.prompt("Type module to delete", type=str) # check if valid module if module not in zephyr_moudles: click.echo(f"Invalid module : {module}") return click.confirm( f"Do you want to delete {module}", abort=True, default=False, ) LOGGER.info(f"Deleting module...{module}") module_utils.delete_module(project_name, module) # Pipeline Command Group @zephyr_cli.group("pipeline") def pipeline_commands(): """Pipeline related commands""" pass @pipeline_commands.command(name="create", help="creates new pipeline") def pipeline_create() -> None: """Create and initialize zephyr pipeline""" """ Purpose: Create and initialize zephyr pipeline Args: N/A Returns: N/A """ # Check if in project if zephyr_utils.check_if_in_project(): # get module json project_json = zephyr_utils.load_json(".zephyr/config.json") project_name = project_json["project_name"] LOGGER.info(f"Creating pipeline...") pipeline_utils.create_pipeline(project_name) def setup_zephyr_cli() -> None: """ Purpose: Build Command Groups for Zephyr CLI. Args: N/A Returns: N/A """ # zephyr commands zephyr_cli.add_command(init_command) module_commands.add_command(module_create) module_commands.add_command(module_delete) pipeline_commands.add_command(pipeline_create) if __name__ == "__main__": try: setup_zephyr_cli() except Exception as error: print(f"{os.path.basename(__file__)} failed due to error: {error}") raise error
zephyr-bootstrap
/zephyr_bootstrap-0.0.2.tar.gz/zephyr_bootstrap-0.0.2/zephyr/zephyr_cli/cli.py
cli.py
from enum import Enum class Environment(Enum): """ Environment """ UAT = 'uat' SIT = 'sit' PROD = 'prod' QA = 'qa' DEV = 'dev' class Zone(Enum): """The country zones""" AR = 'ar' BE = 'be' BO = 'bo' BR = 'br' CA = 'ca' CL = 'cl' CO = 'co' DE = 'de' DO = 'do' EC = 'ec' IC = 'ic' GB = 'gb' HN = 'hn' MX = 'mx' NL = 'nl' PA = 'pa' PE = 'pe' PH = 'ph' PY = 'py' SV = 'SV' TZ = 'tz' US = 'us' UY = 'uy' ZA = 'za' _ZONE_NAME_MAP = { Zone.AR: 'Argentina', Zone.BE: 'Belgium', Zone.BO: 'Bolivia', Zone.BR: 'Brazil', Zone.CA: 'Canada', Zone.CL: 'Chile', Zone.CO: 'Colombia', Zone.DE: 'Germany', Zone.DO: 'Dominican Republic', Zone.EC: 'Ecuador', Zone.IC: 'Canary Island', Zone.GB: 'United Kingdom', Zone.HN: 'Honduras', Zone.MX: 'Mexico', Zone.NL: 'Netherlands', Zone.PA: 'Panama', Zone.PE: 'Peru', Zone.PY: 'Paraguay', Zone.SV: 'El Salvador', Zone.TZ: 'Tanzania', Zone.US: 'United States', Zone.UY: 'Uruguay', Zone.ZA: 'South Africa'} # Update and Up-to-date Message UPDATE_MSG = 'This scenario was updated! - Scenario Code: "{scenario_key}"' UP_TO_DATE_MSG = 'This scenario is up to date! - Scenario Code: "{scenario_key}"' # Zephyr request URLs (More info: https://support.smartbear.com/zephyr-scale-cloud/api-docs/) FOLDERS_URL = 'https://api.zephyrscale.smartbear.com/v2/' \ 'folders?projectKey={project}&maxResults={max_results}&folderType=TEST_CYCLE' TEST_EXECUTION_URL = 'https://api.zephyrscale.smartbear.com/v2/testexecutions' TEST_EXECUTION_ISSUE_URL = 'https://api.zephyrscale.smartbear.com/v2/testexecutions/{key}/links/issues' TEST_CASE_URL = 'https://api.zephyrscale.smartbear.com/v2/testcases/{key}' TEST_CYCLE_URL = 'https://api.zephyrscale.smartbear.com/v2/testcycles' TEST_CASE_SCRIPT_URL = 'https://api.zephyrscale.smartbear.com/v2/testcases/{key}/testscript' TEST_CASE_STATUS_URL = 'https://api.zephyrscale.smartbear.com/v2/statuses/{status}' # Zephyr "Automation Status" IDs AUTOMATED_STATUS_ID = '584918' NEED_UPDATE_STATUS_ID = '583101' STATUS_ID_NAME_MAP = {'584918': 'AUTOMATED', '583101': 'NEED UPDATE'} # BEESQM STATUS QM_STATUS_ZEPHYR_UPDATE = 'APPROVED' # Specific for QM QM_PROJECT = 'BEESQM' QM_ZONE_FIELD_PATTERN = '[Automated] Zones for testing ' QM_ENV_FIELD_PATTERN = r'\((.*?)\)' # Test execution status _STATUS_PASS = 'PASS' _STATUS_FAIL = 'FAIL' # Others DEFAULT_HEADER = {'Content-Type': 'application/json', 'Authorization': None} TEST_CASE_KEY_PATTERN = r'{project}-\w+' BUG_KEY_PATTERN = r'^bug.*-(\w+-\w+)$' HTML_HYPERLINK_PATTERN = '<a href="{url}" rel="noopener noreferrer" target="_blank">{display_text}</a>' HTML_SUCCESS_MESSAGE_PATTERN = '<span style="color:rgb(65, 168, 95)">{message}</span>' HTML_FAIL_MESSAGE_PATTERN = '<span style="color:rgb(184, 49, 47)">{message}</span>' HTML_LINE_BREAK = '<br />' TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' TEST_CYCLE_NAME_TIME_FORMAT = '%Y-%m-%dT%H:%M' MAX_FOLDER_RESULTS = 1000 TEST_EXECUTION_PASS_COMMENT = 'The scenario has completed with no failures.' TEST_EXECUTION_FAILED_COMMENT = 'The scenario has failed.' TEST_SCRIPT_EXAMPLES_TABULATION_SIZE = 6 AZURE_PROJECT = 'CI_PROJECT' AZURE_BUILD_ID = 'CI_BUILD_ID' AZURE_URL = 'CI_URL' _QM_SUB_FOLDER_MAP = { 'regression': 'Regression', 'expansion': 'Expansion', 'deploy': 'Deploy', 'healthcheck': 'Healthcheck', 'healthcheck-prod': 'Healthcheck PROD', 'segment': 'Segment', 'smoke': 'Smoke' } _QM_AUTOMATED_TEST_CYCLE_FOLDER_MAP = { Environment.SIT: 'SIT-Automated Test Cycles', Environment.UAT: 'UAT-Automated Test Cycles', Environment.PROD: 'PROD-Automated Test Cycles', 'sit': 'SIT-Automated Test Cycles', 'uat': 'UAT-Automated Test Cycles', 'prod': 'PROD-Automated Test Cycles' } _QM_ENVIRONMENT_NAME_MAP = { Environment.SIT: '[Master Branch] SIT/QA', Environment.UAT: '[Release Branch] UAT', Environment.PROD: '[Production branch] Prod', 'sit': '[Master Branch] SIT/QA', 'uat': '[Release Branch] UAT', 'prod': '[Production branch] Prod' } _JIRA_VERSION_URL = 'https://ab-inbev.atlassian.net/rest/api/3/project/{project}/version?query={version}' _JIRA_ACCOUNT_URL = 'https://ab-inbev.atlassian.net/rest/api/latest/user/search?query={email}' _JIRA_ISSUE_URL = 'https://ab-inbev.atlassian.net/rest/api/2/issue/{key}' _QM_TEST_CYCLE_TITLE_VERSION_PATTERN = '{major}.{minor}.{micro}' _TEST_COMPONENT_CYCLE_NAME_PATTERN = 'Core Test - {tag} {version} ' \ '{component} {platform} {zone} {env} {date}' _TEST_CYCLE_NAME_PATTERN = 'Core test - {tag} - {execution_data} - {date}' _TEST_CYCLE_URL_PATTERN = 'https://ab-inbev.atlassian.net/projects/' \ '{project}?selectedItem=com.atlassian.plugins.atlassian-connect-plugin:' \ 'com.kanoah.test-manager__main-project-page#!/testCycle/{key}' _TEST_CYCLE_URL = 'https://api.zephyrscale.smartbear.com/v2/testcycles' _QM_JIRA_VERSION_PATTERN = 'release-{major}-{minor}-{micro}' _HTML_LINE_BREAK = '<br />' _HTML_HYPERLINK_PATTERN = '<a href="{url}" rel="noopener noreferrer" target="_blank">{display_text}</a>' _HTML_SUCCESS_MESSAGE_PATTERN = '<span style="color:rgb(65, 168, 95)">{message}</span>' _HTML_FAIL_MESSAGE_PATTERN = '<span style="color:rgb(184, 49, 47)">{message}</span>' _TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' _BUG_KEY_PATTERN = r'^bug.*-(\w+-\w+)$' _PLATFORM_FIELD = 'customfield_13456' _ENVIRONMENT_FIELD = 'customfield_13464' _ZONES_FIELD = 'customfield_13365' _TEST_EXECUTION_PASS_COMMENT = 'The scenario has completed with no failures.' _TEST_EXECUTION_FAILED_COMMENT = 'The scenario has failed.'
zephyr-library
/zephyr_library-1.0.7-py3-none-any.whl/zephyr_core/constants.py
constants.py
from .core_logger import logger import time import requests from requests.adapters import HTTPAdapter, Retry from .actions import get_headers import json import os from .constants import (AZURE_URL, AZURE_PROJECT, AZURE_BUILD_ID, _HTML_SUCCESS_MESSAGE_PATTERN, _HTML_FAIL_MESSAGE_PATTERN, TEST_CASE_URL) def request(method, url, zephyr_token, headers=None, **kwargs): """ Common request Args: method: function request.get, request.put, ... url: str Request url zephyr_token: str zephyr credential headers: dict Request headers **kwargs: * Request args Returns: response Response """ request_per_session = 0 t0 = time.time() s = requests.Session() retry_total = 5 retries = Retry(total=retry_total, backoff_factor=0.3, status_forcelist=[502, 503, 504]) s.mount('http://', HTTPAdapter(max_retries=retries)) logger.info('Starting the request to get some information in Zephyr') try: if not headers: headers = get_headers(authorization=zephyr_token) if 'zephyr' in url: request_per_session += 1 response = method(url=url, headers=headers, **kwargs) response.raise_for_status() return response except Exception as e: t1 = time.time() logger.error(f'Request Error: {e}') logger.error(f'url: {url}\nheaders: {headers}\n **kwargs: {kwargs}\n' f' Took {t1 - t0} seconds\n Retries: {retry_total}') return None def load_json_file(json_file): logger.info('Loading the Json file data for a variable') with open(json_file, 'r', encoding='utf-8') as file: # Then we convert the dict for json format and save it in the file json_data = json.load(file) return json_data def get_azure_url(): """ Retrieves the Azure URL if it is an Azure Pipeline execution. Returns ------- str Azure execution URL """ url = os.getenv(AZURE_URL) project = os.getenv(AZURE_PROJECT) build_id = os.getenv(AZURE_BUILD_ID) if all((url, project, build_id)): return f'{url}/{project}/_build/results?buildId={build_id}&view=results' return None def get_test_case_by_id(test_case_id, zephyr_token): """ Request to get the test case id Parameters: test_case_id: function request.get zephyr_token: str zephyr credential Returns: request Request """ url = TEST_CASE_URL.format(key=test_case_id) return request(requests.get, url, zephyr_token) def convert_the_msg_in_html_code_format(success_msg, message): """ Args: message: str The message that will be displayed on Zephyr success_msg: str The message that will be displayed on Zephyr Returns: str The message that will be displayed on Zephyr in html format <a> """ if success_msg: html_message_pattern = _HTML_SUCCESS_MESSAGE_PATTERN else: html_message_pattern = _HTML_FAIL_MESSAGE_PATTERN return html_message_pattern.format(message=message)
zephyr-library
/zephyr_library-1.0.7-py3-none-any.whl/zephyr_core/utils.py
utils.py
import requests import re from .constants import TEST_CASE_URL, TEST_CASE_KEY_PATTERN import json import os from atlassian import Jira from .core_logger import logger from . import utils _MOBILE_PLATFORMS = ['android', 'ios'] _JIRA_BASE_URL = 'https://ab-inbev.atlassian.net' # QM specific data _QM_PROJECT = 'BEESQM' class JsonTreatment: def __init__(self, token, jira_token, confluence_user, confluence_password): self._token = token self._jira_token = jira_token self._confluence_user = confluence_user self._confluence_password = confluence_password # Variables self._project = None self._json_data = None self.new_json_data = None self._testcase = None self._test_tag = None self._test_tag_zephyr_key = None self._platform = None self._component_test_case = None self._test_tag_zephyr_complete = None self._component_test_case_key = None self._test_result_list = [] def get_test_case(self, project, scenario_tags, scenario_name, platform): """ Fill in all attributes from this class with the executed scenario info """ logger.info('Trying to get the test case tag on zephyr') for tag in scenario_tags: if re.match(TEST_CASE_KEY_PATTERN.format(project=project), tag): url = TEST_CASE_URL.format(key=tag) test_case = utils.request(method=requests.get, url=url, zephyr_token=self._token).json() test_case_platform = test_case['customFields']['Technology'].casefold() if test_case_platform == platform: return test_case else: return test_case else: logger.error(f'No valid Zephyr test case key found for the scenario ' f'"{scenario_name}" and the platform "{platform}".') return None def _get_the_test_case_component(self, component_id): """ Get the test case component Args: component_id: str Component id from JIRA/Atlassian """ if self._confluence_user and self._confluence_password: logger.info("Getting the Zephyr component of the test case") base = Jira(url=_JIRA_BASE_URL, username=self._confluence_user, password=self._confluence_password) component_test_case = base.component(component_id=component_id)['name'] return component_test_case else: return None def load_json_file(self, json_file): self._json_data = utils.load_json_file(json_file) self.populate_new_json() def create_new_json(self): if not self._test_result_list != []: logger.error('The list of result used to create the json file is empty, check it') else: logger.info('Creating the Json file with the test execution result') with open(f'{os.getcwd()}/zephyr_result.json', 'w', encoding='utf-8') as file: # Then we convert the dict for json format and save it in the file json.dump(self._test_result_list, file, indent=4) def populate_new_json(self): if not self._json_data: logger.error('Json file has not data or was not received') else: logger.info('Populate the list with the test results') start = 0 count = len(self._json_data) # While to populate the list with all test results while start < count: self._project = self._json_data[start]['projectKey'] self._testcase = self._json_data[start]['testCase'] self._platform = self._json_data[start]['customFields']['platform'].casefold() self._test_tag = self._json_data[start]['testTags'] # Verify if _test_tag is a list and try to search the test tag with a unique json request if type(self._test_tag) == list: self._test_tag_zephyr_complete = \ self.get_test_case(self._project, self._test_tag, self._testcase, self._platform) if not self._test_tag_zephyr_complete['customFields']['Technology'].casefold() == self._platform: self._test_tag_zephyr_key = self._json_data[start]['testTags'][1] else: self._test_tag_zephyr_key = self._test_tag_zephyr_complete['key'] # if not _test_tag is a list we have just one test tag and can use it to get the component id else: self._test_tag_zephyr_key = self._test_tag self._test_tag_zephyr_complete = \ self.get_test_case(self._project, self._test_tag, self._testcase, self._platform) self._component_test_case = \ self._get_the_test_case_component(self._test_tag_zephyr_complete['component']['id']) if not self._component_test_case: logger.error("the Zephyr component of the test case was not found") self._component_test_case_key = self._test_tag_zephyr_complete['component']['id'] self.new_json_data = { "projectKey": self._project, "testCase": self._testcase, "testTag": self._test_tag_zephyr_key, "testComponent": self._component_test_case, "testComponentKey": self._component_test_case_key, "statusName": self._json_data[start]['statusName'], "environmentName": self._json_data[start]['environmentName'], "actualEndDate": self._json_data[start]['actualEndDate'], "executionTime": self._json_data[start]['executionTime'], "customFields": { "zone": self._json_data[start]['customFields']['zone'], "platform": self._json_data[start]['customFields']['platform'] } } self._test_result_list.append(self.new_json_data) start = start + 1 self.create_new_json()
zephyr-library
/zephyr_library-1.0.7-py3-none-any.whl/zephyr_core/json_treatment.py
json_treatment.py
.. highlight:: shell ============ Contributing ============ Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. You can contribute in many ways: Types of Contributions ---------------------- Report Bugs ~~~~~~~~~~~ Report bugs at the `GitHub Issues page`_. If you are reporting a bug, please include: * Your operating system name and version. * Any details about your local setup that might be helpful in troubleshooting. * Detailed steps to reproduce the bug. Fix Bugs ~~~~~~~~ Look through the GitHub issues for bugs. Anything tagged with "bug" and "help wanted" is open to whoever wants to implement it. Implement Features ~~~~~~~~~~~~~~~~~~ Look through the GitHub issues for features. Anything tagged with "enhancement" and "help wanted" is open to whoever wants to implement it. Write Documentation ~~~~~~~~~~~~~~~~~~~ Zephyr could always use more documentation, whether as part of the official Zephyr docs, in docstrings, or even on the web in blog posts, articles, and such. Submit Feedback ~~~~~~~~~~~~~~~ The best way to send feedback is to file an issue at the `GitHub Issues page`_. If you are proposing a feature: * Explain in detail how it would work. * Keep the scope as narrow as possible, to make it easier to implement. * Remember that this is a volunteer-driven project, and that contributions are welcome :) Get Started! ------------ Ready to contribute? Here's how to set up `Zephyr` for local development. 1. Fork the `Zephyr` repo on GitHub. 2. Clone your fork locally:: $ git clone [email protected]:your_name_here/zephyr.git 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: $ mkvirtualenv zephyr $ cd zephyr/ $ make install-develop 4. Create a branch for local development:: $ git checkout -b name-of-your-bugfix-or-feature Try to use the naming scheme of prefixing your branch with ``gh-X`` where X is the associated issue, such as ``gh-3-fix-foo-bug``. And if you are not developing on your own fork, further prefix the branch with your GitHub username, like ``githubusername/gh-3-fix-foo-bug``. Now you can make your changes locally. 5. While hacking your changes, make sure to cover all your developments with the required unit tests, and that none of the old tests fail as a consequence of your changes. For this, make sure to run the tests suite and check the code coverage:: $ make lint # Check code styling $ make test # Run the tests $ make coverage # Get the coverage report 6. When you're done making changes, check that your changes pass all the styling checks and tests, including other Python supported versions, using:: $ make test-all 7. Make also sure to include the necessary documentation in the code as docstrings following the `Google docstrings style`_. If you want to view how your documentation will look like when it is published, you can generate and view the docs with this command:: $ make view-docs 8. Commit your changes and push your branch to GitHub:: $ git add . $ git commit -m "Your detailed description of your changes." $ git push origin name-of-your-bugfix-or-feature 9. Submit a pull request through the GitHub website. Pull Request Guidelines ----------------------- Before you submit a pull request, check that it meets these guidelines: 1. It resolves an open GitHub Issue and contains its reference in the title or the comment. If there is no associated issue, feel free to create one. 2. Whenever possible, it resolves only **one** issue. If your PR resolves more than one issue, try to split it in more than one pull request. 3. The pull request should include unit tests that cover all the changed code 4. If the pull request adds functionality, the docs should be updated. Put your new functionality into a function with a docstring, and add the feature to the documentation in an appropriate place. 5. The pull request should work for all the supported Python versions. Check the `Travis Build Status page`_ and make sure that all the checks pass. Unit Testing Guidelines ----------------------- All the Unit Tests should comply with the following requirements: 1. Unit Tests should be based only in unittest and pytest modules. 2. The tests that cover a module called ``zephyr/path/to/a_module.py`` should be implemented in a separated module called ``tests/zephyr/path/to/test_a_module.py``. Note that the module name has the ``test_`` prefix and is located in a path similar to the one of the tested module, just inside the ``tests`` folder. 3. Each method of the tested module should have at least one associated test method, and each test method should cover only **one** use case or scenario. 4. Test case methods should start with the ``test_`` prefix and have descriptive names that indicate which scenario they cover. Names such as ``test_some_methed_input_none``, ``test_some_method_value_error`` or ``test_some_method_timeout`` are right, but names like ``test_some_method_1``, ``some_method`` or ``test_error`` are not. 5. Each test should validate only what the code of the method being tested does, and not cover the behavior of any third party package or tool being used, which is assumed to work properly as far as it is being passed the right values. 6. Any third party tool that may have any kind of random behavior, such as some Machine Learning models, databases or Web APIs, will be mocked using the ``mock`` library, and the only thing that will be tested is that our code passes the right values to them. 7. Unit tests should not use anything from outside the test and the code being tested. This includes not reading or writing to any file system or database, which will be properly mocked. Tips ---- To run a subset of tests:: $ python -m pytest tests.test_zephyr $ python -m pytest -k 'foo' Release Workflow ---------------- The process of releasing a new version involves several steps combining both ``git`` and ``bumpversion`` which, briefly: 1. Merge what is in ``main`` branch into ``stable`` branch. 2. Update the version in ``setup.cfg``, ``zephyr_ml/__init__.py`` and ``HISTORY.md`` files. 3. Create a new git tag pointing at the corresponding commit in ``stable`` branch. 4. Merge the new commit from ``stable`` into ``main``. 5. Update the version in ``setup.cfg`` and ``zephyr_ml/__init__.py`` to open the next development iteration. .. note:: Before starting the process, make sure that ``HISTORY.md`` has been updated with a new entry that explains the changes that will be included in the new version. Normally this is just a list of the Pull Requests that have been merged to main since the last release. Once this is done, run of the following commands: 1. If you are releasing a patch version:: make release 2. If you are releasing a minor version:: make release-minor 3. If you are releasing a major version:: make release-major Release Candidates ~~~~~~~~~~~~~~~~~~ Sometimes it is necessary or convenient to upload a release candidate to PyPi as a pre-release, in order to make some of the new features available for testing on other projects before they are included in an actual full-blown release. In order to perform such an action, you can execute:: make release-candidate This will perform the following actions: 1. Build and upload the current version to PyPi as a pre-release, with the format ``X.Y.Z.devN`` 2. Bump the current version to the next release candidate, ``X.Y.Z.dev(N+1)`` After this is done, the new pre-release can be installed by including the ``dev`` section in the dependency specification, either in ``setup.py``:: install_requires = [ ... 'zephyr_ml>=X.Y.Z.dev', ... ] or in command line:: pip install 'zephyr_ml>=X.Y.Z.dev' .. _GitHub issues page: https://github.com/D3-AI/zephyr/issues .. _Travis Build Status page: https://travis-ci.org/D3-AI/zephyr/pull_requests .. _Google docstrings style: https://google.github.io/styleguide/pyguide.html?showone=Comments#Comments
zephyr-ml
/zephyr-ml-0.0.2.tar.gz/zephyr-ml-0.0.2/CONTRIBUTING.rst
CONTRIBUTING.rst
<p align="left"> <img width=15% src="https://dai.lids.mit.edu/wp-content/uploads/2018/06/Logo_DAI_highres.png" alt="DAI-Lab" /> <i>A project from Data to AI Lab at MIT.</i> </p> <!-- Uncomment these lines after releasing the package to PyPI for version and downloads badges --> <!--[![PyPI Shield](https://img.shields.io/pypi/v/zephyr_ml.svg)](https://pypi.python.org/pypi/zephyr_ml)--> <!--[![Downloads](https://pepy.tech/badge/zephyr_ml)](https://pepy.tech/project/zephyr_ml)--> <!--[![Travis CI Shield](https://travis-ci.org/signals-dev/zephyr.svg?branch=main)](https://travis-ci.org/signals-dev/zephyr)--> <!--[![Coverage Status](https://codecov.io/gh/signals-dev/zephyr/branch/main/graph/badge.svg)](https://codecov.io/gh/signals-dev/zephyr)--> # Zephyr A machine learning library for assisting in the generation of machine learning problems for wind farms operations data by analyzing past occurrences of events. | Important Links | | | ----------------------------------- | -------------------------------------------------------------------- | | :computer: **[Website]** | Check out the Sintel Website for more information about the project. | | :book: **[Documentation]** | Quickstarts, User and Development Guides, and API Reference. | | :star: **[Tutorials]** | Checkout our notebooks | | :octocat: **[Repository]** | The link to the Github Repository of this library. | | :scroll: **[License]** | The repository is published under the MIT License. | | :keyboard: **[Development Status]** | This software is in its Pre-Alpha stage. | | ![][Slack Logo] **[Community]** | Join our Slack Workspace for announcements and discussions. | [Website]: https://sintel.dev/ [Documentation]: https://dtail.gitbook.io/zephyr/ [Repository]: https://github.com/sintel-dev/Zephyr [Tutorials]: https://github.com/sintel-dev/Zephyr/blob/master/notebooks [License]: https://github.com/sintel-dev/Zephyr/blob/master/LICENSE [Development Status]: https://pypi.org/search/?c=Development+Status+%3A%3A+2+-+Pre-Alpha [Community]: https://join.slack.com/t/sintel-space/shared_invite/zt-q147oimb-4HcphcxPfDAM0O9_4PaUtw [Slack Logo]: https://github.com/sintel-dev/Orion/blob/master/docs/images/slack.png - Homepage: https://github.com/signals-dev/zephyr # Overview The **Zephyr** library is a framework designed to assist in the generation of machine learning problems for wind farms operations data by analyzing past occurrences of events. The main features of **Zephyr** are: * **EntitySet creation**: tools designed to represent wind farm data and the relationship between different tables. We have functions to create EntitySets for datasets with PI data and datasets using SCADA data. * **Labeling Functions**: a collection of functions, as well as tools to create custom versions of them, ready to be used to analyze past operations data in the search for occurrences of specific types of events in the past. * **Prediction Engineering**: a flexible framework designed to apply labeling functions on wind turbine operations data in a number of different ways to create labels for custom Machine Learning problems. * **Feature Engineering**: a guide to using Featuretools to apply automated feature engineerinig to wind farm data. # Install ## Requirements **Zephyr** has been developed and runs on Python 3.6 and 3.7. Also, although it is not strictly required, the usage of a [virtualenv]( https://virtualenv.pypa.io/en/latest/) is highly recommended in order to avoid interfering with other software installed in the system where you are trying to run **Zephyr**. ## Download and Install **Zephyr** can be installed locally using [pip](https://pip.pypa.io/en/stable/) with the following command: ```bash pip install zephyr-ml ``` If you want to install from source or contribute to the project please read the [Contributing Guide](CONTRIBUTING.rst). # Quickstart In this short tutorial we will guide you through a series of steps that will help you getting started with **Zephyr**. ## 1. Loading the data The first step we will be to use preprocessed data to create an EntitySet. Depending on the type of data, we will either the `zephyr_ml.create_pidata_entityset` or `zephyr_ml.create_scada_entityset` functions. **NOTE**: if you cloned the **Zephyr** repository, you will find some demo data inside the `notebooks/data` folder which has been preprocessed to fit the `create_entityset` data requirements. ```python3 import os import pandas as pd from zephyr_ml import create_scada_entityset data_path = 'notebooks/data' data = { 'turbines': pd.read_csv(os.path.join(data_path, 'turbines.csv')), 'alarms': pd.read_csv(os.path.join(data_path, 'alarms.csv')), 'work_orders': pd.read_csv(os.path.join(data_path, 'work_orders.csv')), 'stoppages': pd.read_csv(os.path.join(data_path, 'stoppages.csv')), 'notifications': pd.read_csv(os.path.join(data_path, 'notifications.csv')), 'scada': pd.read_csv(os.path.join(data_path, 'scada.csv')) } scada_es = create_scada_entityset(data) ``` This will load the turbine, alarms, stoppages, work order, notifications, and SCADA data, and return it as an EntitySet. ``` Entityset: SCADA data DataFrames: turbines [Rows: 1, Columns: 10] alarms [Rows: 2, Columns: 9] work_orders [Rows: 2, Columns: 20] stoppages [Rows: 2, Columns: 16] notifications [Rows: 2, Columns: 15] scada [Rows: 2, Columns: 5] Relationships: alarms.COD_ELEMENT -> turbines.COD_ELEMENT stoppages.COD_ELEMENT -> turbines.COD_ELEMENT work_orders.COD_ELEMENT -> turbines.COD_ELEMENT scada.COD_ELEMENT -> turbines.COD_ELEMENT notifications.COD_ORDER -> work_orders.COD_ORDER ``` ## 2. Selecting a Labeling Function The second step will be to choose an adequate **Labeling Function**. We can see the list of available labeling functions using the `zephyr_ml.labeling.get_labeling_functions` function. ```python3 from zephyr_ml import labeling labeling.get_labeling_functions() ``` This will return us a dictionary with the name and a short description of each available function. ``` {'brake_pad_presence': 'Calculates the total power loss over the data slice.', 'converter_replacement_presence': 'Calculates the converter replacement presence.', 'total_power_loss': 'Calculates the total power loss over the data slice.'} ``` In this case, we will choose the `total_power_loss` function, which calculates the total amount of power lost over a slice of time. ## 3. Generate Target Times Once we have loaded the data and the Labeling Function, we are ready to start using the `zephyr_ml.generate_labels` function to generate a Target Times table. ```python3 from zephyr_ml import DataLabeler data_labeler = DataLabeler(labeling.labeling_functions.total_power_loss) target_times, metadata = data_labeler.generate_label_times(scada_es) ``` This will return us a `compose.LabelTimes` containing the three columns required to start working on a Machine Learning problem: the turbine ID (COD_ELEMENT), the cutoff time (time) and the label. ``` COD_ELEMENT time label 0 0 2022-01-01 45801.0 ``` ## 4. Feature Engineering Using EntitySets and LabelTimes allows us to easily use Featuretools for automatic feature generation. ```python3 import featuretools as ft feature_matrix, features = ft.dfs( entityset=scada_es, target_dataframe_name='turbines', cutoff_time_in_index=True, cutoff_time=target_times, max_features=20 ) ``` Then we get a list of features and the computed `feature_matrix`. ``` TURBINE_PI_ID TURBINE_LOCAL_ID TURBINE_SAP_COD DES_CORE_ELEMENT SITE DES_CORE_PLANT ... MODE(alarms.COD_STATUS) MODE(alarms.DES_NAME) MODE(alarms.DES_TITLE) NUM_UNIQUE(alarms.COD_ALARM) NUM_UNIQUE(alarms.COD_ALARM_INT) label COD_ELEMENT time ... 0 2022-01-01 TA00 A0 LOC000 T00 LOCATION LOC ... Alarm1 Alarm1 Description of alarm 1 1 1 45801.0 [1 rows x 21 columns] ``` ## 5. Modeling Once we have the feature matrix, we can train a model using the Zephyr interface where you can train, infer, and evaluate a pipeline. First, we need to prepare our dataset for training by creating ``X`` and ``y`` variables and one-hot encoding features. ```python3 y = list(feature_matrix.pop('label')) X = pd.get_dummies(feature_matrix).values ``` In this example, we will use an 'xgb' regression pipeline to predict total power loss. ```python3 from zephyr_ml import Zephyr pipeline_name = 'xgb_regressor' zephyr = Zephyr(pipeline_name) ``` To train the pipeline, we simply use the `fit` function. ```python3 zephyr.fit(X, y) ``` After it finished training, we can make prediciton using `predict` ```python3 y_pred = zephyr.predict(X) ``` We can also use ``zephyr.evaluate`` to obtain the performance of the pipeline. # What's Next? If you want to continue learning about **Zephyr** and all its features please have a look at the tutorials found inside the [notebooks folder]( https://github.com/signals-dev/zephyr/tree/main/notebooks).
zephyr-ml
/zephyr-ml-0.0.2.tar.gz/zephyr-ml-0.0.2/README.md
README.md
# History ## 0.0.2 - 2023-05-09 SigPro integration for processing signals * Integrating SigPro - [Issue #7](https://github.com/signals-dev/Zephyr/issues/7) by @frances-h @sarahmish * Add options to xgb pipeline - [Issue #5](https://github.com/signals-dev/Zephyr/issues/5) by @sarahmish ## 0.0.1 - 2023-03-02 New modeling module using Zephyr class * Expand GH action tests - [Issue #4](https://github.com/signals-dev/Zephyr/issues/4) by @sarahmish * Add XGB Pipeline - [Issue #1](https://github.com/signals-dev/Zephyr/issues/1) by @sarahmish ## 0.0.0 - 2022-11-17 First full release * Prediction Engineering Framework by @frances-h * EntitySet creation by @frances-h * DataLabeler and initial labeling functions by @frances-h
zephyr-ml
/zephyr-ml-0.0.2.tar.gz/zephyr-ml-0.0.2/HISTORY.md
HISTORY.md
import json import logging import os import pickle from functools import partial from typing import List, Union import numpy as np import pandas as pd from mlblocks import MLPipeline from sklearn import metrics LOGGER = logging.getLogger(__name__) _REGRESSION_METRICS = { 'mae': metrics.mean_absolute_error, 'mse': metrics.mean_squared_error, 'r2': metrics.r2_score, } _CLASSIFICATION_METRICS = { 'accuracy': metrics.accuracy_score, 'f1': metrics.f1_score, 'recall': metrics.recall_score, 'precision': metrics.precision_score, } METRICS = _CLASSIFICATION_METRICS class Zephyr: """Zephyr Class. The Zephyr Class provides the main machine learning pipeline functionalities of Zephyr and is responsible for the interaction with the underlying MLBlocks pipelines. Args: pipeline (str, dict or MLPipeline): Pipeline to use. It can be passed as: * An ``str`` with a path to a JSON file. * An ``str`` with the name of a registered pipeline. * An ``MLPipeline`` instance. * A ``dict`` with an ``MLPipeline`` specification. hyperparameters (dict): Additional hyperparameters to set to the Pipeline. """ DEFAULT_PIPELINE = 'xgb_classifier' def _get_mlpipeline(self): pipeline = self._pipeline if isinstance(pipeline, str) and os.path.isfile(pipeline): with open(pipeline) as json_file: pipeline = json.load(json_file) mlpipeline = MLPipeline(pipeline) if self._hyperparameters: mlpipeline.set_hyperparameters(self._hyperparameters) return mlpipeline def __init__(self, pipeline: Union[str, dict, MLPipeline] = None, hyperparameters: dict = None): self._pipeline = pipeline or self.DEFAULT_PIPELINE self._hyperparameters = hyperparameters self._mlpipeline = self._get_mlpipeline() self._fitted = False def __eq__(self, other): return ( isinstance(other, self.__class__) and self._pipeline == other._pipeline and self._hyperparameters == other._hyperparameters and self._fitted == other._fitted ) def _get_outputs_spec(self, default=True): outputs_spec = ["default"] if default else [] try: visual_names = self._mlpipeline.get_output_names('visual') outputs_spec.append('visual') except ValueError: visual_names = [] return outputs_spec, visual_names def fit(self, X: pd.DataFrame, y: Union[pd.Series, np.ndarray], visual: bool = False, **kwargs): """Fit the pipeline to the given data. Args: X (DataFrame): Input data, passed as a ``pandas.DataFrame`` containing the feature matrix. y (Series or ndarray): Target data, passed as a ``pandas.Series`` or ``numpy.ndarray`` containing the target values. visual (bool): If ``True``, capture the ``visual`` named output from the ``MLPipeline`` and return it as an output. """ if not self._fitted: self._mlpipeline = self._get_mlpipeline() if visual: outputs_spec, visual_names = self._get_outputs_spec(False) else: outputs_spec = None outputs = self._mlpipeline.fit(X, y, output_=outputs_spec, **kwargs) self._fitted = True if visual and outputs is not None: return dict(zip(visual_names, outputs)) def predict(self, X: pd.DataFrame, visual: bool = False, **kwargs) -> pd.Series: """Predict the pipeline to the given data. Args: X (DataFrame): Input data, passed as a ``pandas.DataFrame`` containing the feature matrix. visual (bool): If ``True``, capture the ``visual`` named output from the ``MLPipeline`` and return it as an output. Returns: Series or ndarray: Predictions to the input data. """ if visual: outputs_spec, visual_names = self._get_outputs_spec() else: outputs_spec = 'default' outputs = self._mlpipeline.predict(X, output_=outputs_spec, **kwargs) if visual and visual_names: prediction = outputs[0] return prediction, dict(zip(visual_names, outputs[-len(visual_names):])) return outputs def fit_predict(self, X: pd.DataFrame, y: Union[pd.Series, np.ndarray], **kwargs) -> pd.Series: """Fit the pipeline to the data and then predict targets. This method is functionally equivalent to calling ``fit(X, y)`` and later on ``predict(X)`` but with the difference that here the ``MLPipeline`` is called only once, using its ``fit`` method, and the output is directly captured without having to execute the whole pipeline again during the ``predict`` phase. Args: X (DataFrame): Input data, passed as a ``pandas.DataFrame`` containing the feature matrix. y (Series or ndarray): Target data, passed as a ``pandas.Series`` or ``numpy.ndarray`` containing the target values. Returns: Series or ndarray: Predictions to the input data. """ if not self._fitted: self._mlpipeline = self._get_mlpipeline() result = self._mlpipeline.fit(X, y, output_='default', **kwargs) self._fitted = True return result def evaluate(self, X: pd.DataFrame, y: Union[pd.Series, np.ndarray], fit: bool = False, train_X: pd.DataFrame = None, train_y: Union[pd.Series, np.ndarray] = None, metrics: List[str] = METRICS) -> pd.Series: """Evaluate the performance of the pipeline. Args: X (DataFrame): Input data, passed as a ``pandas.DataFrame`` containing the feature matrix. y (Series or ndarray): Target data, passed as a ``pandas.Series`` or ``numpy.ndarray`` containing the target values. fit (bool): Whether to fit the pipeline before evaluating it. Defaults to ``False``. train_X (DataFrame): Training data, passed as a ``pandas.DataFrame`` containing the feature matrix. If not given, the pipeline is fitted on ``X``. train_y (Series or ndarray): Target data used for training, passed as a ``pandas.Series`` or ``numpy.ndarray`` containing the target values. metrics (list): List of metrics to used passed as a list of strings. If not given, it defaults to all the metrics. Returns: Series: ``pandas.Series`` containing one element for each metric applied, with the metric name as index. """ if not fit: method = self._mlpipeline.predict else: if not self._fitted: mlpipeline = self._get_mlpipeline() else: mlpipeline = self._mlpipeline if train_X is not None and train_y is not None: # fit first and then predict mlpipeline.fit(train_X, train_y) method = mlpipeline.predict else: # fit and predict at once method = partial(mlpipeline.fit, y=y, output_='default') result = method(X) scores = { metric: METRICS[metric](y, result) for metric in metrics } return pd.Series(scores) def save(self, path: str): """Save this object using pickle. Args: path (str): Path to the file where the serialization of this object will be stored. """ os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, 'wb') as pickle_file: pickle.dump(self, pickle_file) @classmethod def load(cls, path: str): """Load an Zephyr instance from a pickle file. Args: path (str): Path to the file where the instance has been previously serialized. Returns: Orion Raises: ValueError: If the serialized object is not a Zephyr instance. """ with open(path, 'rb') as pickle_file: zephyr = pickle.load(pickle_file) if not isinstance(zephyr, cls): raise ValueError('Serialized object is not a Zephyr instance') return zephyr
zephyr-ml
/zephyr-ml-0.0.2.tar.gz/zephyr-ml-0.0.2/zephyr_ml/core.py
core.py
import featuretools as ft from zephyr_ml.metadata import get_mapped_kwargs def _create_entityset(entities, es_type, es_kwargs): # filter out stated logical types for missing columns for entity, df in entities.items(): es_kwargs[entity]['logical_types'] = { col: t for col, t in es_kwargs[entity]['logical_types'].items() if col in df.columns } turbines_index = es_kwargs['turbines']['index'] work_orders_index = es_kwargs['work_orders']['index'] relationships = [ ('turbines', turbines_index, 'alarms', turbines_index), ('turbines', turbines_index, 'stoppages', turbines_index), ('turbines', turbines_index, 'work_orders', turbines_index), ('turbines', turbines_index, es_type, turbines_index), ('work_orders', work_orders_index, 'notifications', work_orders_index) ] es = ft.EntitySet() for name, df in entities.items(): es.add_dataframe( dataframe_name=name, dataframe=df, **es_kwargs[name] ) for relationship in relationships: parent_df, parent_column, child_df, child_column = relationship es.add_relationship(parent_df, parent_column, child_df, child_column) return es def create_pidata_entityset(dfs, new_kwargs_mapping=None): '''Generate an entityset for PI data datasets Args: data_paths (dict): Dictionary mapping entity names ('alarms', 'notifications', 'stoppages', 'work_orders', 'pidata', 'turbines') to the pandas dataframe for that entity. **kwargs: Updated keyword arguments to be used during entityset creation ''' entity_kwargs = get_mapped_kwargs('pidata', new_kwargs_mapping) _validate_data(dfs, 'pidata', entity_kwargs) es = _create_entityset(dfs, 'pidata', entity_kwargs) es.id = 'PI data' return es def create_scada_entityset(dfs, new_kwargs_mapping=None): '''Generate an entityset for SCADA data datasets Args: data_paths (dict): Dictionary mapping entity names ('alarms', 'notifications', 'stoppages', 'work_orders', 'scada', 'turbines') to the pandas dataframe for that entity. ''' entity_kwargs = get_mapped_kwargs('scada', new_kwargs_mapping) _validate_data(dfs, 'scada', entity_kwargs) es = _create_entityset(dfs, 'scada', entity_kwargs) es.id = 'SCADA data' return es def _validate_data(dfs, es_type, es_kwargs): '''Validate data by checking for required columns in each entity ''' entities = set(['alarms', 'stoppages', 'work_orders', 'notifications', 'turbines', es_type]) if set(dfs.keys()) != entities: missing = entities.difference(set(dfs.keys())) extra = set(dfs.keys()).difference(entities) msg = [] if missing: msg.append('Missing dataframes for entities {}.'.format(', '.join(missing))) if extra: msg.append('Unrecognized entities {} included in dfs.'.format(', '.join(extra))) raise ValueError(' '.join(msg)) turbines_index = es_kwargs['turbines']['index'] work_orders_index = es_kwargs['work_orders']['index'] if work_orders_index not in dfs['work_orders'].columns: raise ValueError( 'Expected index column "{}" missing from work_orders entity'.format(work_orders_index)) if work_orders_index not in dfs['notifications'].columns: raise ValueError( 'Expected column "{}" missing from notifications entity'.format(work_orders_index)) if not dfs['work_orders'][work_orders_index].is_unique: raise ValueError('Expected index column "{}" of work_orders entity is not ' 'unique'.format(work_orders_index)) if turbines_index not in dfs['turbines'].columns: raise ValueError( 'Expected index column "{}" missing from turbines entity'.format(turbines_index)) if not dfs['turbines'][turbines_index].is_unique: raise ValueError( 'Expected index column "{}" of turbines entity is not unique.'.format(turbines_index)) for entity, df in dfs.items(): if turbines_index not in df.columns: raise ValueError( 'Turbines index column "{}" missing from data for {} entity'.format( turbines_index, entity)) time_index = es_kwargs[entity].get('time_index', False) if time_index and time_index not in df.columns: raise ValueError( 'Missing time index column "{}" from {} entity'.format( time_index, entity)) secondary_time_indices = es_kwargs[entity].get('secondary_time_index', {}) for time_index, cols in secondary_time_indices.items(): if time_index not in df.columns: raise ValueError( 'Secondary time index "{}" missing from {} entity'.format( time_index, entity)) for col in cols: if col not in df.columns: raise ValueError(('Column "{}" associated with secondary time index "{}" ' 'missing from {} entity').format(col, time_index, entity))
zephyr-ml
/zephyr-ml-0.0.2.tar.gz/zephyr-ml-0.0.2/zephyr_ml/entityset.py
entityset.py
DEFAULT_ES_KWARGS = { 'alarms': { 'index': '_index', 'make_index': True, 'time_index': 'DAT_START', 'secondary_time_index': {'DAT_END': ['IND_DURATION']}, 'logical_types': { 'COD_ELEMENT': 'categorical', # turbine id 'DAT_START': 'datetime', # start 'DAT_END': 'datetime', # end 'IND_DURATION': 'double', # duration 'COD_ALARM': 'categorical', # alarm code 'COD_ALARM_INT': 'categorical', # international alarm code 'DES_NAME': 'categorical', # alarm name 'DES_TITLE': 'categorical', # alarm description 'COD_STATUS': 'categorical' # status code } }, 'stoppages': { 'index': '_index', 'make_index': True, 'time_index': 'DAT_START', 'secondary_time_index': {'DAT_END': ['IND_DURATION', 'IND_LOST_GEN']}, 'logical_types': { 'COD_ELEMENT': 'categorical', # turbine id 'DAT_START': 'datetime', # start 'DAT_END': 'datetime', # end 'DES_WO_NAME': 'natural_language', # work order name 'DES_COMMENTS': 'natural_language', # work order comments 'COD_WO': 'integer_nullable', # stoppage code 'IND_DURATION': 'double', # duration 'IND_LOST_GEN': 'double', # generation loss 'COD_ALARM': 'categorical', # alarm code 'COD_CAUSE': 'categorical', # stoppage cause 'COD_INCIDENCE': 'categorical', # incidence code 'COD_ORIGIN': 'categorical', # origin code 'DESC_CLASS': 'categorical', # ???? 'COD_STATUS': 'categorical', # status code 'COD_CODE': 'categorical', # stoppage code 'DES_DESCRIPTION': 'natural_language', # stoppage description 'DES_TECH_NAME': 'categorical' # turbine technology } }, 'notifications': { 'index': '_index', 'make_index': True, 'time_index': 'DAT_POSTING', 'secondary_time_index': {'DAT_MALF_END': ['IND_BREAKDOWN_DUR']}, 'logical_types': { 'COD_ELEMENT': 'categorical', # turbine id 'COD_ORDER': 'categorical', 'IND_QUANTITY': 'double', 'COD_MATERIAL_SAP': 'categorical', 'DAT_POSTING': 'datetime', 'COD_MAT_DOC': 'categorical', 'DES_MEDIUM': 'categorical', 'COD_NOTIF': 'categorical', 'DAT_MALF_START': 'datetime', 'DAT_MALF_END': 'datetime', 'IND_BREAKDOWN_DUR': 'double', 'FUNCT_LOC_DES': 'categorical', 'COD_ALARM': 'categorical', 'DES_ALARM': 'categorical' } }, 'work_orders': { 'index': 'COD_ORDER', 'time_index': 'DAT_BASIC_START', 'secondary_time_index': {'DAT_VALID_END': []}, 'logical_types': { 'COD_ELEMENT': 'categorical', 'COD_ORDER': 'categorical', 'DAT_BASIC_START': 'datetime', 'DAT_BASIC_END': 'datetime', 'COD_EQUIPMENT': 'categorical', 'COD_MAINT_PLANT': 'categorical', 'COD_MAINT_ACT_TYPE': 'categorical', 'COD_CREATED_BY': 'categorical', 'COD_ORDER_TYPE': 'categorical', 'DAT_REFERENCE': 'datetime', 'DAT_CREATED_ON': 'datetime', 'DAT_VALID_END': 'datetime', 'DAT_VALID_START': 'datetime', 'COD_SYSTEM_STAT': 'categorical', 'DES_LONG': 'natural_language', 'COD_FUNCT_LOC': 'categorical', 'COD_NOTIF_OBJ': 'categorical', 'COD_MAINT_ITEM': 'categorical', 'DES_MEDIUM': 'natural_language', 'DES_FUNCT_LOC': 'categorical' } }, 'turbines': { 'index': 'COD_ELEMENT', 'logical_types': { 'COD_ELEMENT': 'categorical', 'TURBINE_PI_ID': 'categorical', 'TURBINE_LOCAL_ID': 'categorical', 'TURBINE_SAP_COD': 'categorical', 'DES_CORE_ELEMENT': 'categorical', 'SITE': 'categorical', 'DES_CORE_PLANT': 'categorical', 'COD_PLANT_SAP': 'categorical', 'PI_COLLECTOR_SITE_NAME': 'categorical', 'PI_LOCAL_SITE_NAME': 'categorical' } } } DEFAULT_ES_TYPE_KWARGS = { 'pidata': { 'index': '_index', 'make_index': True, 'time_index': 'time', 'logical_types': { 'time': 'datetime', 'COD_ELEMENT': 'categorical' } }, 'scada': { 'index': '_index', 'make_index': True, 'time_index': 'TIMESTAMP', 'logical_types': { 'TIMESTAMP': 'datetime', 'COD_ELEMENT': 'categorical' } } } def get_mapped_kwargs(es_type, new_kwargs=None): if es_type not in DEFAULT_ES_TYPE_KWARGS.keys(): raise ValueError('Unrecognized es_type argument: {}'.format(es_type)) mapped_kwargs = DEFAULT_ES_KWARGS.copy() mapped_kwargs.update({es_type: DEFAULT_ES_TYPE_KWARGS[es_type]}) if new_kwargs is not None: if not isinstance(new_kwargs, dict): raise ValueError('new_kwargs must be dictionary mapping entity name to dictionary ' 'with updated keyword arguments for EntitySet creation.') for entity in new_kwargs: if entity not in mapped_kwargs: raise ValueError('Unrecognized entity "{}" found in new keyword argument ' 'mapping.'.format(entity)) mapped_kwargs[entity].update(new_kwargs[entity]) return mapped_kwargs
zephyr-ml
/zephyr-ml-0.0.2.tar.gz/zephyr-ml-0.0.2/zephyr_ml/metadata.py
metadata.py
from sigpro import SigPro def process_signals(es, signal_dataframe_name, signal_column, transformations, aggregations, window_size, replace_dataframe=False, **kwargs): ''' Process signals using SigPro. Apply SigPro transformations and aggregations on the specified entity from the given entityset. If ``replace_dataframe=True``, then the old entity will be updated. Args: es (featuretools.EntitySet): Entityset to extract signals from. signal_dataframe_name (str): Name of the dataframe in the entityset containing signal data to process. signal_column (str): Name of column or containing signal values to apply signal processing pipeline to. transformations (list[dict]): List of dictionaries containing the transformation primitives. aggregations (list[dict]): List of dictionaries containing the aggregation primitives. window_size (str): Size of the window to bin the signals over. e.g. ('1h). replace_dataframe (bool): If ``True``, will replace the entire signal dataframe in the EntitySet with the processed signals. Defaults to ``False``, creating a new child dataframe containing processed signals with the suffix ``_processed``. ''' signal_df = es[signal_dataframe_name] time_index = signal_df.ww.time_index for relationship in es.relationships: child_name = relationship.child_dataframe.ww.name parent_name = relationship.parent_dataframe.ww.name if child_name == signal_df.ww.name and parent_name == 'turbines': old_relationship = relationship groupby_index = relationship.child_column.name pipeline = SigPro(transformations, aggregations, values_column_name=signal_column, **kwargs) processed_df, f_cols = pipeline.process_signal( signal_df, window=window_size, time_index=time_index, groupby_index=groupby_index, **kwargs ) if replace_dataframe: es.add_dataframe( processed_df, signal_dataframe_name, time_index=time_index, index='_index') else: df_name = '{}_processed'.format(signal_df.ww.name) es.add_dataframe(processed_df, df_name, time_index=time_index, make_index=True, index='_index') es.add_relationship('turbines', old_relationship.parent_column.name, df_name, old_relationship.child_column.name)
zephyr-ml
/zephyr-ml-0.0.2.tar.gz/zephyr-ml-0.0.2/zephyr_ml/feature_engineering.py
feature_engineering.py
import logging import numpy as np import sklearn LOGGER = logging.getLogger(__name__) METRICS = { "accuracy": sklearn.metrics.accuracy_score, "precision": sklearn.metrics.precision_score, "recall": sklearn.metrics.recall_score, "f1": sklearn.metrics.f1_score, } class FindThreshold: """Find Optimal Threshold. This class find the optimal threshold value that produces the highest metric score. In the fit phase, it detects the best threshold based on the given metric. In the produce phase, it applies the found threshold on the predicted values. This is intended for classification problems. Args: metric (str): String representing which metric to use. """ def __init__(self, metric='f1'): self._metric = 'f1' self._threshold = None def fit(self, y_true, y_pred): """Find the threshold that obtains the best metric value. Args: y_true (Series or ndarray): ``pandas.Series`` or ``numpy.ndarray`` ground truth target values. y_pred (Series or ndarray): ``pandas.Series`` or ``numpy.ndarray`` predicted target valeus. """ if y_pred.ndim > 1: y_pred = y_pred[:, 1] RANGE = np.arange(0, 1, 0.01) scores = list() scorer = METRICS[self._metric] for thresh in RANGE: y = [1 if x else 0 for x in y_pred > thresh] scores.append(scorer(y_true, y)) threshold = RANGE[np.argmax(scores)] LOGGER.info(f'best threshold found at {threshold}') self._threshold = threshold self._scores = scores def apply_threshold(self, y_pred): """Apply threshold on predicted values. Args: y_pred (Series): ``pandas.Series`` predicted target valeus. Return: tuple: * list of predicted target valeus in binary codes. * detected float value for threshold. * list of scores obtained at each threshold. """ if y_pred.ndim > 1: y_pred = y_pred[:, 1] binary = [1 if x else 0 for x in y_pred > self._threshold] return binary, self._threshold, self._scores
zephyr-ml
/zephyr-ml-0.0.2.tar.gz/zephyr-ml-0.0.2/zephyr_ml/primitives/postprocessing.py
postprocessing.py
from inspect import getfullargspec import composeml as cp class DataLabeler: """Class that defines the prediction problem. This class supports the generation of `label_times` which is fundamental to the feature generation phase as well as specifying the target labels. Args: function (LabelingFunction): function that defines the labeling function, it should return a tuple of labeling function, the dataframe, and the name of the target entity. """ def __init__(self, function): self.function = function def generate_label_times(self, es, num_samples=-1, subset=None, column_map={}, verbose=False, **kwargs): """Searches the data to calculate label times. Args: es (featuretools.EntitySet): Entityset to extract `label_times` from. num_samples (int): Number of samples for each to return. Defaults to -1 which returns all possible samples. subset (float or int): Portion of the data to select for searching. verbose: An indicator to the verbosity of searching. column_map: Dictionary mapping column references in labeling function to actual column names. See labeling function for columns referenced. Returns: composeml.LabelTimes: Calculated labels with cutoff times. """ labeling_function, df, meta = self.function(es, column_map) data = df if isinstance(subset, float) or isinstance(subset, int): data = data.sample(subset) target_entity_index = meta.get('target_entity_index') time_index = meta.get('time_index') thresh = kwargs.get('thresh') or meta.get('thresh') window_size = kwargs.get('window_size') or meta.get('window_size') label_maker = cp.LabelMaker(labeling_function=labeling_function, target_dataframe_name=target_entity_index, time_index=time_index, window_size=window_size) kwargs = {**meta, **kwargs} kwargs = { k: kwargs.get(k) for k in set( getfullargspec( label_maker.search)[0]) if kwargs.get(k) is not None} label_times = label_maker.search(data.sort_values(time_index), num_samples, verbose=verbose, **kwargs) if thresh is not None: label_times = label_times.threshold(thresh) return label_times, meta
zephyr-ml
/zephyr-ml-0.0.2.tar.gz/zephyr-ml-0.0.2/zephyr_ml/labeling/data_labeler.py
data_labeler.py
import numpy as np import pandas as pd def _search_relationship(es, left, right): for r in es.relationships: if r.parent_name in left: if right == r.child_name: left_on = r.parent_column.name right_on = r.child_column.name elif r.child_name in left: if right == r.parent_name: left_on = r.child_column.name right_on = r.parent_column.name return left_on, right_on def denormalize(es, entities): """Merge a set of entities into a single dataframe. Convert a set of entities from the entityset into a single dataframe by repetitively merging the selected entities. The merge process is applied sequentially. Args: entities (list): list of strings denoting which entities to merge. Returns: pandas.DataFrame: A single dataframe containing all the information from the selected entities. """ k = len(entities) # initial entity to start from (should be the target entity) first = entities[0] previous = [first] df = es[first] # merge the dataframes to create a single input for i in range(1, k): right = entities[i] left_on, right_on = _search_relationship(es, previous, right) df = pd.merge(df, es[right], left_on=left_on, right_on=right_on, how='left', suffixes=('', '_y')).filter(regex='^(?!.*_y)') previous.append(right) return df def required_columns(columns): """Decorator function for recording required columns for a function.""" def wrapper(wrapped): def func(*args, **kwargs): return wrapped(*args, **kwargs) func.__required_columns__ = columns func.__doc__ = wrapped.__doc__ func.__name__ = wrapped.__name__ return func return wrapper def merge_binary_labeling_functions(labeling_functions, and_connected=True): """Generates a labeling function from merging multiple binary labeling functions. Args: labeling_functions (list): A list of labeling functions (with df as an input) to merge. and_connected (bool): If and_connected is True, each individual labeling function criteria must be True for the output function to give a positive label. If and_connected is False, at least one labeling function criteria has to be met for the output function to give a positive label. Default is True. Returns: function: A function that takes in a dataframe, which is derived from the input labeling functions. """ def merged_function(df): out = and_connected for function in labeling_functions: if and_connected: out &= function(df) else: out |= function(df) return int(out) return merged_function def aggregate_by_column(numerical_column, aggregation): """Generates a function for aggregates numerical column values over a data slice. Args: numerical_column (str): Numerical column to aggregate over. aggregation (function): Aggregation function to apply. Returns: function: The function returns the total numerical column value over the data slice as a continuous label. """ def aggregate_function(df): """Aggregate function with: numerical_column={} aggregation={} """ return aggregation(df[numerical_column]) aggregate_function.__doc__ = aggregate_function.__doc__.format(numerical_column, aggregation.__name__) return aggregate_function def categorical_presence(categorical_column, value): """Generates a function that determines if the categorical column has the desired value. Args: categorical_column (str): Categorical column to use values from. value (str or int or float): Value to compare categorical columns values to. Returns: function: The function returns 1 if categorical column has the desired value, 0 otherwise. """ def categorical_function(df): """Categorical presence function with: categorical_column={} value={} """ return int(df[categorical_column].isin([value]).sum() > 0) categorical_function.__doc__ = categorical_function.__doc__.format(categorical_column, value) return categorical_function def keyword_in_text(keyword, columns=None): """Determines presence of keyword in text field data columns. Args: keyword (str): Keyword to search the text columns for. columns (list or None): List of columns to search through to find keyword. If None, all columns are tested. Default is None. Returns: function: The function returns 1 if the keyword is present in any column, 0 otherwise. """ def keyword_function(df): """Keyword function with: keyword={} columns={} """ mask = np.full(len(df), False) for col in columns: try: mask |= df[col].str.contains(keyword, case=False, na=False) except KeyError: print("Unable to find column for keyword search") return int(mask.sum() != 0) keyword_function.__doc__ = keyword_function.__doc__.format(keyword, columns) return keyword_function def greater_than(numerical_column, threshold): """Generates a function to see if there are numerical values greater than a threshold. Args: numerical_column (str): Numerical column to use values from. threshold (float): Threshold for the numerical values used to define the binary labels. Returns: function: The function returns 1 if data contains a value is greater than threshold, 0 otherwise. """ def numerical_function(df): """Numerical presence function with: numerical_column={} threshold={} """ series = df[numerical_column] return int(len(series[series > threshold]) > 0) numerical_function.__doc__ = numerical_function.__doc__.format(numerical_column, threshold) return numerical_function def total_duration(start_time, end_time): """Generates function for calculating the total duration given start/end time indexes. Args: start_time (str): Name of the start time column. end_time (str): Name of the end time column. Returns: function: The function returns the total duration in seconds based on the two given time endpoints for the data slice. """ def duration_function(df): """Duration function with: start_time={} end_time={} """ return ((df[end_time] - df[start_time]).dt.total_seconds()).sum() duration_function.__doc__ = duration_function.__doc__.format(start_time, end_time) return duration_function
zephyr-ml
/zephyr-ml-0.0.2.tar.gz/zephyr-ml-0.0.2/zephyr_ml/labeling/utils.py
utils.py
from zephyr_ml.labeling.utils import denormalize def converter_replacement_presence(es, column_map={}): """Calculates the converter replacement presence. Args: es (ft.EntitySet): EntitySet of data to check converter replacements. column_map (dict): Optional dictionary to update default column names to the actual corresponding column names in the data slice. Can contain the following keys: "sap_code": Column that contains the material SAP code. Defaults to "COD_MATERIAL_SAP". "turbine_id": Column containing the ID of the turbine associated with a stoppage. Must match the index column of the 'turbines' entity. Defaults to "COD_ELEMENT". "description": Column containing the description for a given notification. Defaults to "DES_MEDIUM". "time_index": Column to use as the time index for the data slice. Defaults to "DAT_MALF_START". Returns: label: Labeling function to find converter replacement presence over a data slice. df: Denormalized dataframe of data to get labels from. meta: Dictionary containing metadata about labeling function. """ sap_code = column_map.get('sap_code', 'COD_MATERIAL_SAP') column_map.get('description', 'DES_MEDIUM') turbine_id = column_map.get('turbine_id_column', 'COD_ELEMENT') time_index = column_map.get('time_index', 'DAT_MALF_START') def label(ds, **kwargs): logic1 = (ds[sap_code] == 36052411).any() # logic2 = ds[DESCRIPTION].str.lower().apply(lambda x: 'inu' in x).any() f = logic1 # or logic2 return f meta = { "target_entity_index": turbine_id, "time_index": time_index, "window_size": "10d" } # denormalize(es, entities=['notifications', 'work_orders']) df = denormalize(es, entities=['notifications']) df = df.dropna(subset=[time_index]) return label, df, meta
zephyr-ml
/zephyr-ml-0.0.2.tar.gz/zephyr-ml-0.0.2/zephyr_ml/labeling/labeling_functions/converter_replacement_presence.py
converter_replacement_presence.py
# Zephyr-python-api ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/zephyr-python-api) ![PyPI](https://img.shields.io/pypi/v/zephyr-python-api) ![PyPI - License](https://img.shields.io/pypi/l/zephyr-python-api) ### Project description This is a set of wrappers for Zephyr Scale (TM4J) REST API. This means you can interact with Zephyr Scale without GUI, access it with python code and create automation scripts for your every day interactions. To be done: * More usage examples * Tests, tests and tests for gods of testing * Convenient docs * Implementing higher level wrappers representing Test Case, Test Cycle, etc. ### Installation ``` pip install zephyr-python-api ``` ### Example usage Zephyr Cloud auth: ```python from zephyr import ZephyrScale zscale = ZephyrScale(token=<your_token>) ``` Zephyr Server (TM4J) auth: ```python from zephyr import ZephyrScale # Auth can be made with Jira token auth = {"token": "<your_jira_token>"} # or with login and password (suggest using get_pass) auth = {"username": "<your_login>", "password": "<your_password>"} # or even session cookie dict auth = {"cookies": "<session_cookie_dict>"} zscale = ZephyrScale.server_api(base_url=<your_base_url>, **auth) ``` Then it is possible to interact with api wrappers: ```python zapi = zscale.api # Get all test cases all_test_cases = zapi.test_cases.get_test_cases() # Get a single test case by its id test_case = zapi.test_cases.get_test_case("<test_case_id>") # Create a test case creation_result = zapi.test_cases.create_test_case("<project_key>", "test_case_name") ``` ### Troubleshooting For troubleshooting see [TROUBLESHOOTING.md](TROUBLESHOOTING.md) ### License This library is licensed under the Apache 2.0 License. ### Links [Zephyr Scale Cloud API docs](https://support.smartbear.com/zephyr-scale-cloud/api-docs/) [Zephyr Scale Server API docs](https://support.smartbear.com/zephyr-scale-server/api-docs/v1/)
zephyr-python-api
/zephyr-python-api-0.0.3.tar.gz/zephyr-python-api-0.0.3/README.md
README.md
import logging from urllib.parse import urlparse, parse_qs from requests import HTTPError, Session INIT_SESSION_MSG = "Initialize session by {}" class InvalidAuthData(Exception): """Invalid authentication data provided""" class ZephyrSession: """ Zephyr Scale basic session object. :param base_url: url to make requests to :param token: auth token :param username: username :param password: password :param cookies: cookie dict :keyword session_attrs: a dict with session attrs to be set as keys and their values """ def __init__(self, base_url, token=None, username=None, password=None, cookies=None, **kwargs): self.base_url = base_url self._session = Session() self.logger = logging.getLogger(__name__) if token: self.logger.debug(INIT_SESSION_MSG.format("token")) self._session.headers.update({"Authorization": f"Bearer {token}"}) elif username and password: self.logger.debug(INIT_SESSION_MSG.format("username and password")) self._session.auth = (username, password) elif cookies: self.logger.debug(INIT_SESSION_MSG.format("cookies")) self._session.cookies.update(cookies) else: raise InvalidAuthData("Insufficient auth data") if kwargs.get("session_attrs"): self._modify_session(**kwargs.get("session_attrs")) def _create_url(self, *args): """Helper for URL creation""" return self.base_url + "/".join(args) def _modify_session(self, **kwargs): """Modify requests session with extra arguments""" self.logger.debug(f"Modify requests session object with {kwargs}") for session_attr, value in kwargs.items(): setattr(self._session, session_attr, value) def _request(self, method: str, endpoint: str, return_raw: bool = False, **kwargs): """General request wrapper with logging and handling response""" self.logger.debug(f"{method.capitalize()} data: endpoint={endpoint} and {kwargs}") url = self._create_url(endpoint) response = self._session.request(method=method, url=url, **kwargs) if response.status_code < 400: if return_raw: return response if response.text: return response.json() return "" raise HTTPError(f"Error {response.status_code}. Response: {response.content}") def get(self, endpoint: str, params: dict = None, **kwargs): """Get request wrapper""" return self._request("get", endpoint, params=params, **kwargs) def post(self, endpoint: str, json: dict = None, **kwargs): """Post request wrapper""" return self._request("post", endpoint, json=json, **kwargs) def put(self, endpoint: str, json: dict = None, **kwargs): """Put request wrapper""" return self._request("put", endpoint, json=json, **kwargs) def delete(self, endpoint: str, **kwargs): """Delete request wrapper""" return self._request("delete", endpoint, **kwargs) def get_paginated(self, endpoint, params=None): """Get paginated data""" self.logger.debug(f"Get paginated data from endpoint={endpoint} and params={params}") if params is None: params = {} while True: response = self.get(endpoint, params=params) if "values" not in response: return for value in response.get("values", []): yield value if response.get("isLast") is True: break params_str = urlparse(response.get("next")).query params.update(parse_qs(params_str)) return def post_file(self, endpoint: str, file_path: str, to_files=None, **kwargs): """ Post wrapper to send a file. Handles single file opening, sending its content and closing """ with open(file_path, "rb") as file: files = {"file": file} if to_files: files.update(to_files) return self._request("post", endpoint, files=files, **kwargs)
zephyr-python-api
/zephyr-python-api-0.0.3.tar.gz/zephyr-python-api-0.0.3/zephyr/scale/zephyr_session.py
zephyr_session.py
from ...zephyr_session import ZephyrSession from .paths import ServerPaths as Paths class EndpointTemplate: """Class with basic constructor for endpoint classes""" def __init__(self, session: ZephyrSession): self.session = session class TestCaseEndpoints(EndpointTemplate): """Api wrapper for "Test Case" endpoints""" def create_test_case(self, project_key, name, **kwargs): """Creates a new Test Case""" json = {"projectKey": project_key, "name": name} json.update(kwargs) return self.session.post(Paths.CASE, json=json) def get_test_case(self, test_case_key, **params): """Retrieve the Test Case matching the given key""" return self.session.get(Paths.CASE_KEY.format(test_case_key), params=params) def update_test_case(self, test_case_key, **json): """Updates a Test Case""" return self.session.put(Paths.CASE_KEY.format(test_case_key), json=json) def delete_test_case(self, test_case_key): """Delete the Test Case matching the given key""" return self.session.delete(Paths.CASE_KEY.format(test_case_key)) def get_attachments(self, test_case_key): """Retrieve the Test Case Attachments matching the given key""" return self.session.get(Paths.CASE_ATTACH.format(test_case_key)) def create_attachment(self, test_case_key, file_path): """Create a new attachment on the specified Test Case""" return self.session.post_file(Paths.CASE_ATTACH.format(test_case_key), file_path) def get_latest_result(self, test_case_key): """Retrieve the last test result for a given key""" return self.session.get(Paths.CASE_LATEST_RES.format(test_case_key)) def get_step_attachments(self, test_case_key, step_index): """Retrieve the attachments for a test case step""" return self.session.get(Paths.CASE_STP_ATTACH.format(test_case_key, step_index)) def create_step_attachment(self, test_case_key, step_index, file_path): """Create a new attachment on the specified Step of a Test Case""" return self.session.post(Paths.CASE_STP_ATTACH.format(test_case_key, step_index), file_path) def search_cases(self, query, **params): """Retrieve the Test Cases that matches the query passed as parameter""" params.update({"query": query}) return self.session.get(Paths.CASE_SEARCH, params=params) def get_all_versions(self, test_case_key, **params): """Get all test case versions ids by its key name. Undocumented in API""" return self.session.get(Paths.CASE_VERS.format(test_case_key), params=params) class TestPlanEndpoints(EndpointTemplate): """Api wrapper for "Test Plan" endpoints""" def create_test_plan(self, project_key, name, **kwargs): """Creates a new Test Plan""" json = {"projectKey": project_key, "name": name} json.update(kwargs) return self.session.post(Paths.PLAN, json=json) def get_test_plan(self, test_plan_key, **params): """Retrieve the Test Plan matching the given key""" return self.session.get(Paths.PLAN_KEY.format(test_plan_key), params=params) def update_test_plan(self, test_plan_key, **json): """Updates a Test Plan""" return self.session.put(Paths.PLAN_KEY.format(test_plan_key), json=json) def delete_test_plan(self, test_plan_key): """Delete the Test Plan matching the given key""" return self.session.delete(Paths.PLAN_KEY.format(test_plan_key)) def get_attachments(self, test_plan_key): """Retrieve the Test Plan Attachments matching the given key""" return self.session.get(Paths.PLAN_ATTACH.format(test_plan_key)) def create_attachments(self, test_plan_key, file_path): """Create a new attachment on the specified Test Plan""" return self.session.post_file(Paths.PLAN_ATTACH.format(test_plan_key), file_path) def search_plans(self, query, **params): """Retrieve the Test Plans that matches the query passed as parameter""" params.update({"query": query}) return self.session.get(Paths.PLAN_SEARCH, params=params) class TestRunEndpoints(EndpointTemplate): """Api wrapper for "Test Run" endpoints""" def create_test_run(self, project_key, name, **kwargs): """Creates a new Test Run""" json = {"projectKey": project_key, "name": name} json.update(kwargs) return self.session.post(Paths.RUN, json=json) def get_test_run(self, test_run_key, **params): """Retrieve the Test Run matching the given key""" return self.session.get(Paths.RUN_KEY.format(test_run_key), params=params) def delete_test_run(self, test_run_key): """Delete the Test Run matching the given key""" return self.session.delete(Paths.RUN_KEY.format(test_run_key)) def get_attachments(self, test_run_key): """Retrieve the Test Run Attachments matching the given key""" return self.session.get(Paths.RUN_ATTACH.format(test_run_key)) def create_attachments(self, test_run_key, file_path): """Create a new attachment on the specified Test Run""" return self.session.post_file(Paths.RUN_ATTACH.format(test_run_key), file_path) def create_test_result(self, test_run_key, test_case_key, **json): """ Creates a new Test Result on the specified Test Run, looking for an item that matches the testCaseKey and the query string filter parameters. """ return self.session.post(Paths.RUN_TEST_RESULT.format(test_run_key, test_case_key), json=json) def update_test_result(self, test_run_key, test_case_key, **json): """ Updates the last Test Result on the specified Test Run, looking for an item that matches the testCaseKey and the query string filter parameters. Only defined fields will be updated. """ return self.session.post(Paths.RUN_TEST_RESULT.format(test_run_key, test_case_key), json=json) def get_test_results(self, test_run_key): """Retrieve All Test Results linked to a Test Run""" return self.session.get(Paths.RUN_TEST_RESULTS.format(test_run_key)) def create_test_results(self, test_run_key, results): """ Create new Test Results on the specified Test Run, looking for items that match the testCaseKey for each body item. """ return self.session.post(Paths.RUN_TEST_RESULTS.format(test_run_key), json=results) def search_runs(self, query, **params): """Retrieve the Test Runs that matches the query passed as parameter""" params.update({"query": query}) return self.session.get(Paths.RUN_SEARCH, params=params) class TestResultEndpoints(EndpointTemplate): """Api wrapper for "Test Result" endpoints""" def create_test_result(self, project_key, test_case_key, **json): """Creates a new Test Result for a Test Case""" data = {"projectKey": project_key, "testCaseKey": test_case_key} data.update(json) return self.session.post(Paths.RES, json=data) def get_attachments(self, test_result_id): """Retrieve the Test Result Attachments matching the given id""" return self.session.get(Paths.RES_ATTACH.format(test_result_id)) def create_attachment(self, test_result_id, file_path): """Create a new attachment on the specified Test Result""" return self.session.post_file(Paths.RES_ATTACH.format(test_result_id), file_path) def get_step_attachments(self, test_result_id, step_id): """ Retrieve the Test Result Step Attachments matching the given testResultId and stepIndex """ return self.session.get(Paths.RES_STP_ATTACH.format(test_result_id, step_id)) def create_step_attachment(self, test_result_id, step_id, file_path): """Create a new attachment on the specified step of the Test Result""" return self.session.post_file(Paths.RES_STP_ATTACH.format(test_result_id, step_id), file_path) class IssueLinkEndpoints(EndpointTemplate): """Api wrapper for "Issue Link" endpoints""" def get_issue_links(self, issue_key, **params): """Retrieve all Test Cases linked to an Issue""" return self.session.get(Paths.ISSUE_CASES.format(issue_key), params=params) class FolderEndpoints(EndpointTemplate): """Api wrapper for "Folder" endpoints""" def create_folder(self, project_key, name, folder_type): """ Creates a new folder for test cases, test plans or test runs. In order to create a new folder you must POST a json with 3 fields: projectKey, name and type. The field type can be filled with TEST_CASE, TEST_PLAN or TEST_RUN. """ json = {"projectKey": project_key, "name": name, "type": folder_type} return self.session.post(Paths.FOLDER, json=json) def update_folder(self, folder_id, **json): """ Updates a folder for test cases, test plans or test runs. You can only update the name or the custom field value of a folder, in order to do that you must PUT a json with 2 fields: name and customFields. The field name is a String and forward and backslashes are not allowed. The field customFields is an object with the key being the custom field name. """ return self.session.put(Paths.FOLDER_ID.format(folder_id), json=json) class AttachmentEndpoints(EndpointTemplate): """Api wrapper for "Attachment" endpoints""" def delete_attachment(self, attachment_id): """Delete an Attachment given an id""" return self.session.delete(Paths.ATTACH.format(attachment_id)) class EnvironmentEndpoints(EndpointTemplate): """Api wrapper for "Environment" endpoints""" def get_environments(self, project_key): """ Retrieve the Environments matching the given projectKey. The project must exist. The project must have Zephyr Scale enabled. """ params = {"projectKey": project_key} return self.session.get(Paths.ENV, params=params) def create_environment(self, project_key, name, description=None): """ Creates a new Environment. The project must exist The project must have Zephyr Scale enabled The name must be unique """ json = {"projectKey": project_key, "name": name, "description": description} return self.session.post(Paths.ENV, json=json) class AutomationEndpoints(EndpointTemplate): """Api wrapper for "Automation" endpoints""" def create_cycle(self, project_key, file_path, cycle_data=None): """ Creates a new Test Cycle based on provided automated test results. This endpoint receives a zip file containing one or more Zephyr Scale Test Results File Format to create the Test Cycle. See Zephyr Scale JUnit Integration (https://bitbucket.org/smartbeartm4j/tm4j-junit-integration) to learn how to generate this file. Optionally, you can send a testCycle part in your form data to customize the created Test Cycle. """ return self.session.post_file(Paths.ATM_PRJ_KEY.format(project_key), file_path=file_path, data=cycle_data) def create_cycle_cucumber(self, project_key, file_path, cycle_data=None): """ Creates a new Test Cycle based on provided automated test results. This endpoint receives a zip file containing one or more Cucumber Json Output file (https://relishapp.com/cucumber/cucumber/docs/formatters/json-output-formatter). Optionally, you can send a testCycle part in your form data to customize the created Test Cycle. """ # return self.session.post_file(Paths.ATM_CUCUMBER.format(project_key), # file_path=file_path, # data=cycle_data) raise NotImplementedError def get_testcases_cucumber(self, query): """ Retrieve a zip file containing Cucumber Feature Files that matches the tql passed as parameter. """ # return self.session.get(Paths.ATM_CASES, params={"tql": query}) raise NotImplementedError class ProjectEndpoints(EndpointTemplate): """Api wrapper for "Project" endpoints""" def create_zephyr_project(self, project_key, enabled): """ Create a Zephyr Scale project for an existing Jira project. If the Zephyr Scale project exists, enable/disable it. """ json = {"projectKey": project_key, "enabled": enabled} return self.session.post(Paths.PRJ, json=json) class CustomFieldEndpoints(EndpointTemplate): """Api wrapper for "Custom Field" endpoints""" def create_custom_field(self, project_key, name, field_type, category, **kwargs): """ Creates a new custom field for test cases, test plans, test runs, test result or folder. The custom fied name must be unique by project and category. Custom fields must have one of these categories: TEST_PLAN, TEST_RUN, TEST_STEP, TEST_EXECUTION, TEST_CASE or FOLDER. Custom fields must have of these types: SINGLE_LINE_TEXT, MULTI_LINE_TEXT, NUMBER, DATE, SINGLE_CHOICE_SELECT_LIST, CHECKBOX, DECIMAL, MULTI_CHOICE_SELECT_LIST or USER_LIST. """ json = {"projectKey": project_key, "name": name, "type": field_type, "category": category} json.update(kwargs) return self.session.post(Paths.CFIELD, json=json) def create_custom_field_opt(self, custom_field_id, option_name): """ Creates a new custom field option for SINGLE_CHOICE_SELECT_LIST or MULTI_CHOICE_SELECT_LIST custom field. """ return self.session.post(Paths.CFIELD_OPT.format(custom_field_id), json={"name": option_name}) class DeleteExecutionEndpoints(EndpointTemplate): """Api wrapper for "Delete Execution" endpoints""" def delete_execution(self, date): """ Starts the deletion process of Test Executions (also known as Test Results). This process only removes executions older than 3 months and it will keep the last test executions. Only Jira Admin users can execute this process. """ json = {"deleteExecutionsCreatedBefore": date} return self.session.post(Paths.DEL_EXEC, json=json) def get_status(self): """Gets the status of the test execution deletion process. The statuses can be: IN_PROGRESS, FINISHED or FAILED.""" return self.session.get(Paths.DEL_EXEC_STATUS)
zephyr-python-api
/zephyr-python-api-0.0.3.tar.gz/zephyr-python-api-0.0.3/zephyr/scale/server/endpoints/endpoints.py
endpoints.py
import logging from zephyr.scale.zephyr_session import ZephyrSession from zephyr.scale.cloud.endpoints import (AutomationEndpoints, EnvironmentEndpoints, FolderEndpoints, HealthcheckEndpoints, LinkEndpoints, PriorityEndpoints, ProjectEndpoints, StatusEndpoints, TestCaseEndpoints, TestCycleEndpoints, TestExecutionEndpoints, TestPlanEndpoints) # pylint: disable=missing-function-docstring class CloudApiWrapper: """Zephyr Scale Cloud Api wrapper. Contains wrappers by sections.""" def __init__(self, session: ZephyrSession): self.session = session self.logger = logging.getLogger(__name__) @property def test_cases(self): return TestCaseEndpoints(self.session) @property def test_cycles(self): return TestCycleEndpoints(self.session) @property def test_plans(self): return TestPlanEndpoints(self.session) @property def test_executions(self): return TestExecutionEndpoints(self.session) @property def folders(self): return FolderEndpoints(self.session) @property def statuses(self): return StatusEndpoints(self.session) @property def priorities(self): return PriorityEndpoints(self.session) @property def environments(self): return EnvironmentEndpoints(self.session) @property def projects(self): return ProjectEndpoints(self.session) @property def links(self): return LinkEndpoints(self.session) @property def automations(self): return AutomationEndpoints(self.session) @property def healthcheck(self): return HealthcheckEndpoints(self.session)
zephyr-python-api
/zephyr-python-api-0.0.3.tar.gz/zephyr-python-api-0.0.3/zephyr/scale/cloud/cloud_api.py
cloud_api.py
from json import dumps from ...zephyr_session import ZephyrSession class AutomationEndpoints: """Api wrapper for "Automation" endpoints""" def __init__(self, session: ZephyrSession): self.session = session def _post_reports(self, path, project_key, file_path, auto_create=False, test_cycle=None, **kwargs): """ Post various reports logic. :param path: str with resource path :param project_key: str with project key :param file_path: str with path to .zip archive with report files :param auto_create: indicate if test cases should be created if non existent :param test_cycle: dict with test cycle description data """ params = {'projectKey': project_key} to_files = None if auto_create: params.update({'autoCreateTestCases': True}) if test_cycle: to_files = {'testCycle': (None, dumps(test_cycle), 'application/json')} return self.session.post_file(path, file_path, to_files=to_files, params=params, **kwargs) def post_custom_format(self, project_key, file_path, auto_create=False, test_cycle=None, **kwargs): """ Create results using Zephyr Scale's custom results format. :param project_key: str with project key :param file_path: str with path to .zip archive with report files :param auto_create: indicate if test cases should be created if non existent :param test_cycle: dict with test cycle description data """ return self._post_reports('automations/executions/custom', project_key=project_key, file_path=file_path, auto_create=auto_create, test_cycle=test_cycle, **kwargs) def post_cucumber_format(self, project_key, file_path, auto_create=False, test_cycle=None, **kwargs): """ Create results using the Cucumber results format. :param project_key: str with project key :param file_path: str with path to .zip archive with report files :param auto_create: indicate if test cases should be created if non existent :param test_cycle: dict with test cycle description data """ return self._post_reports('automations/executions/cucumber', project_key=project_key, file_path=file_path, auto_create=auto_create, test_cycle=test_cycle, **kwargs) def post_junit_xml_format(self, project_key, file_path, auto_create=False, test_cycle=None, **kwargs): """ Create results using the JUnit XML results format. :param project_key: str with project key :param file_path: str with path to .zip archive with report files :param auto_create: indicate if test cases should be created if non existent :param test_cycle: dict with test cycle description data """ return self._post_reports('automations/executions/junit', project_key=project_key, file_path=file_path, auto_create=auto_create, test_cycle=test_cycle, **kwargs) def get_testcases_zip(self, project_key): """ Retrieve a zip file containing Cucumber Feature Files that matches the query passed as parameter. """ params = {"projectKey": project_key} headers = {"Accept": "application/zip"} return self.session.get("automations/testcases", return_raw=True, params=params, headers=headers)
zephyr-python-api
/zephyr-python-api-0.0.3.tar.gz/zephyr-python-api-0.0.3/zephyr/scale/cloud/endpoints/automations.py
automations.py
import json import requests from zephyr_results_publisher.behave_to_cucumber_converter import convert_report, validate_json from zephyr_results_publisher.file_util import zip_file, get_path_dir from zephyr_results_publisher.helper import check_response_status, find_folder_id_by_name BASE_URL = "https://api.zephyrscale.smartbear.com/v2" def publish(zephyr_token, project_key, source_report_file, report_format, auto_create_test_cases="true"): if does_zephyr_token_exist(zephyr_token) is False: return if report_format == "behave": report_format = "cucumber" print(f"Start converting: {source_report_file}") report = convert_behave_report(source_report_file) if report is not None: with open(source_report_file, "w") as f: f.write(str(report)) if report_format == "cucumber": validate_report_schema(source_report_file) url = BASE_URL + f"/automations/executions/{report_format}" source_path_dir = get_path_dir(source_report_file) output_zip = f"{source_path_dir}/testResults.zip" zip_file(source_report_file, output_zip) params = { "projectKey": project_key, "autoCreateTestCases": auto_create_test_cases } headers = { "Authorization": "Bearer " + zephyr_token } files = { "file": open(output_zip, 'rb') } print(f"Sending results to Zephyr Scale...") response = requests.post(url, files=files, params=params, headers=headers) check_response_status(response, 200) parsed_response = json.loads(response.text) print(f"Parsed response: {parsed_response}") return parsed_response def publish_customized_test_cycle(zephyr_token, project_key, source_report_file, report_format, auto_create_test_cases="true", test_cycle_name="Automated Build", test_cycle_folder_name="All test cycles", test_cycle_description="", test_cycle_jira_project_version=1, test_cycle_custom_fields=None): if does_zephyr_token_exist(zephyr_token) is False: return if test_cycle_custom_fields is None \ or test_cycle_custom_fields == "" \ or test_cycle_custom_fields == "{}": test_cycle_custom_fields = {} if report_format == "behave": report_format = "cucumber" report = convert_behave_report(source_report_file) if report is not None: with open(source_report_file, "w") as f: f.write(str(report)) if report_format == "cucumber": validate_report_schema(source_report_file) url = BASE_URL + f"/automations/executions/{report_format}" source_path_dir = get_path_dir(source_report_file) output_zip = f"{source_path_dir}/testResults.zip" zip_file(source_report_file, output_zip) test_cycle = customize_test_cycle(zephyr_token, project_key, test_cycle_name, test_cycle_folder_name, test_cycle_description, test_cycle_jira_project_version, test_cycle_custom_fields) params = { "projectKey": project_key, "autoCreateTestCases": auto_create_test_cases } headers = { "Authorization": "Bearer " + zephyr_token } files = { "file": open(output_zip, 'rb'), "testCycle": ("test_cycle.json", test_cycle, "application/json") } print(f"Sending results to Zephyr Scale...") response = requests.post(url, files=files, params=params, headers=headers) check_response_status(response, 200) parsed_response = json.loads(response.text) print(f"Parsed response: {parsed_response}") return parsed_response def does_zephyr_token_exist(zephyr_token): if zephyr_token is None: print("Zephyr Scale API Access Token is not provided. Results will not be published to Zephyr Scale.") return False return True def convert_behave_report(report_path): converted = convert_report(report_path) return json.dumps(converted, sort_keys=True, indent=2) def validate_report_schema(report_path): print(f"Start file validation: {report_path}") with open(report_path, 'r') as json_file: report = json.load(json_file) validate_json(report) def customize_test_cycle(zephyr_token, project_key, test_cycle_name="Automation cycle", folder_name="All test cycles", description="", jira_project_version=1, custom_fields=None): if custom_fields is None: custom_fields = {} folder_id = get_folder_id_by_name(zephyr_token, folder_name, project_key, 20) test_cycle_json = { "name": test_cycle_name, "description": description, "jiraProjectVersion": jira_project_version, "folderId": folder_id, "customFields": custom_fields } print(f"Custom test cycle is generated: {test_cycle_json}") return json.dumps(test_cycle_json) def get_folder_id_by_name(zephyr_token, name, project_key, max_results): url = BASE_URL + f"/folders" params = { "projectKey": project_key, "folderType": "TEST_CYCLE", "startAt": 0, "maxResults": max_results } headers = { "Authorization": "Bearer " + zephyr_token } response = requests.get(url, params=params, headers=headers) check_response_status(response, 200) parsed_response = json.loads(response.text) return find_folder_id_by_name(name, parsed_response)
zephyr-results-publisher
/zephyr_results_publisher-0.2.0-py3-none-any.whl/zephyr_results_publisher/publisher.py
publisher.py
import json import sys import os from jsonschema import Draft4Validator def convert_report(report_file): with open(report_file, 'r') as json_file: report = json.load(json_file) # delete_tags params if delete_tags = True def common_processing(item, delete_tags): item['uri'], item['line'] = item.pop('location').split(':') item['line'] = int(item['line']) if delete_tags: item['tags'] = [] else: item['tags'] = [{'name': '@' + tag} for tag in item.get('tags', [])] if 'id' not in item: item['id'] = item['name'].replace(' ', '-').lower() if 'description' in item: item['description'] = item['description'][0] else: item['description'] = '' for feature in report: common_processing(feature, True) for scenario in feature['elements']: common_processing(scenario, False) for step in scenario['steps']: step['uri'], step['line'] = step.pop('location').split(':') step['line'] = int(step['line']) if 'result' in step: step['result']['duration'] = int(1000000000 * step['result']['duration']) else: step['result'] = {'status': 'skipped', 'duration': 0} if 'table' in step: step['rows'] = [{'cells': step['table']['headings']}] + \ [{'cells': cells} for cells in step['table']['rows']] del step['table'] if 'match' in step: if 'arguments' in step['match']: step['match']['arguments'] = \ [{'val': '{}'.format(arg['value']), 'offset': 0} for arg in step['match']['arguments']] else: step['match'] = {'arguments': [], 'location': 'UNKNOWN - SKIPPED'} return report def validate_json(report_file): file_dir = os.path.dirname(__file__) file_path = f"{file_dir}/model/cucumber_report_schema.json" with open(file_path, 'r') as json_file: schema = json.load(json_file) errors = list(Draft4Validator(schema).iter_errors(report_file)) for error in errors: print('#/' + '/'.join([str(path) for path in error.path]), error.message, file=sys.stderr) if errors: sys.exit(1)
zephyr-results-publisher
/zephyr_results_publisher-0.2.0-py3-none-any.whl/zephyr_results_publisher/behave_to_cucumber_converter.py
behave_to_cucumber_converter.py
from zephyr_sdk.components import cycles from zephyr_sdk.components import defects from zephyr_sdk.components import projects from zephyr_sdk.components import releases from zephyr_sdk.components import requirements from zephyr_sdk.components import requirementtrees from zephyr_sdk.components import testcases from zephyr_sdk.components import testcasetrees from zephyr_sdk.components import users from zephyr_sdk.exceptions.ZExceptions import ResourceNotFoundError from zephyr_sdk.exceptions.ZExceptions import InsufficientContextError from zephyr_sdk.exceptions.ZExceptions import MethodNotImplementedError class ZClient: # Constructor for the zephyr client. def __init__(self, token, url): self.token = token self.base_url = url # Set user id for the API calls user_info = self.get_current_logged_in_users() self.user_id = user_info['id'] self.user_name = user_info['username'] # Properties that are undeclared for now self.project_id = None self.project_name = None self.release_id = None self.release_name = None self.req_tree_id = None self.testcase_tree_id = None # Methods to add context to the client itself. def set_project(self, project_name): # Find the project with the name passed in response = self.get_all_projects_lite() for project in response: # Set the project id and exit function if found if project['name'] == project_name: self.project_id = project['id'] self.project_name = project_name return # If the method got to this point, than raise an exception. raise ResourceNotFoundError("Project", project_name) def set_release(self, release_name): # First check to see if the project was set if self.project_id is None: raise InsufficientContextError("Project") # Now find the release with the name passed in response = self.get_releases_for_a_project(self.project_id) for release in response: # Set the release id and exit the function if found if release['name'] == release_name: self.release_id = release['id'] self.release_name = release['name'] return # If the method got to this point, raise an exception raise ResourceNotFoundError("Release", release_name) def set_requirement_tree(self, tree_name): # First check to see if the project was set if self.project_id is None: raise InsufficientContextError("Project") # Now find the requirement tree with the specified name. response = self.get_requirement_tree_with_ids(self.project_id, self.release_id) for tree in response: # Set the requirement tree id to be the proper tree if found. if tree['name'] == tree_name: self.req_tree_id = tree['id'] return # Raise an exception that tree was not found. raise ResourceNotFoundError("Requirement tree", tree_name) def set_testcase_tree(self, tree_name): # First check to see if release id has been set. if self.release_id is None: raise InsufficientContextError("Release") # Now make the API call to get the testcase tree ids response = self.get_testcase_tree_by_release_id() for tree in response: if tree['name'] == tree_name: self.testcase_tree_id = tree['id'] return raise ResourceNotFoundError("Testcase Tree", tree_name) # Cycle methods def create_cycle(self, cycle_spec): return cycles.create_cycle(self, cycle_spec) def delete_cycle(self, cycle_id): return cycles.delete_cycle(self, cycle_id) def get_cycle_by_id(self, cycle_id): return cycles.get_cycle_by_id(self, cycle_id) def get_cycles_for_release(self, release_id): return cycles.get_cycles_for_release(self, release_id) def update_cycle(self, cycle_id, cycle_spec): return cycles.update_cycle(self, cycle_id, cycle_spec) # Defects methods def create_defect(self, defect_spec): if 'projectId' not in defect_spec and self.project_id is not None: defect_spec['projectId'] = self.project_id if 'product' not in defect_spec and self.project_name is not None: defect_spec['product'] = str(self.project_id) if 'target_milestone' not in defect_spec and self.release_name is not None: defect_spec['target_milestone'] = self.release_name if 'version' not in defect_spec and self.release_id is not None: defect_spec['version'] = str(self.release_id) if 'assigned_to' not in defect_spec: defect_spec['assigned_to'] = self.user_name return defects.create_defect(self, defect_spec) def delete_defect(self, defect_id): return defects.delete_defect(self, defect_id) def get_component(self, component_name, project_id=None): if project_id is None: return defects.get_component(self, component_name, self.project_id) else: return defects.get_component(self, component_name, project_id) def get_defect(self, defect_id): return defects.get_defect(self, defect_id) # Projects methods def get_all_normal_projects_details(self): return projects.get_all_normal_projects_details(self) def get_all_normal_project(self): return projects.get_all_normal_project(self) def get_lead_for_all_projects(self): return projects.get_lead_for_all_projects(self) def get_project_by_id(self, project_id=None): if project_id is None: return projects.get_project_by_id(self, self.project_id) else: return projects.get_project_by_id(self, project_id) def get_project_team_count_for_all_projects(self): return projects.get_project_team_count_for_all_projects(self) def get_project_team_count_for_all_users(self): return projects.get_project_team_count_for_all_users(self) def get_project_team_for_allocated_projects(self): return projects.get_project_team_for_allocated_projects(self) def get_project_team_for_project(self, project_id=None): raise MethodNotImplementedError("This method produces too much output.") if project_id is None: return projects.get_project_team_for_project(self, self.project_id) else: return projects.get_project_team_for_project(self, project_id) def get_all_projects(self, include_inactive): raise MethodNotImplementedError("Response always returns empty.") return projects.get_all_projects(self, include_inactive) def get_all_projects_lite(self): return projects.get_all_projects_lite(self) # Releases methods def create_release(self, release_spec): if 'projectId' not in release_spec and self.project_id is not None: release_spec['projectId'] = self.project_id return releases.create_release(self, release_spec) def delete_release(self, release_id): return releases.delete_release(self, release_id) def get_release_by_release_id(self, release_id=None): if release_id is None: return releases.get_release_by_release_id(self, self.release_id) else: return releases.get_release_by_release_id(self, release_id) def get_releases_for_a_project(self, project_id=None): if project_id is None: return releases.get_releases_for_a_project(self, self.project_id) else: return releases.get_releases_for_a_project(self, project_id) def update_release(self, release_spec, release_id=None): if release_id is None: return releases.update_release(self, release_spec, self.release_id) else: return releases.update_release(self, release_spec, release_id) # Requirements methods def create_requirement(self, requirement_spec): if 'requirementTreeId' not in requirement_spec and self.req_tree_id is not None: requirement_spec['requirementTreeId'] = self.req_tree_id return requirements.create_requirement(self, requirement_spec) def delete_requirement(self, req_id): return requirements.delete_requirement(self, req_id) def get_requirements(self, req_tree_id=None): if req_tree_id is None: return requirements.get_requirements(self, self.req_tree_id) else: return requirements.get_requirements(self, req_tree_id) def update_requirement(self, req_id, requirement_spec): return requirements.update_requirement(self, req_id, requirement_spec) # RequirementTrees methods def create_requirement_tree(self, req_tree_spec): if 'projectId' not in req_tree_spec and self.project_id is not None: req_tree_spec['projectId'] = self.project_id if 'releaseIds' not in req_tree_spec and self.release_id is not None: req_tree_spec['releaseIds'] = [self.release_id] if 'parentId' not in req_tree_spec and self.req_tree_id is not None: req_tree_spec['parentId'] = self.req_tree_id return requirementtrees.create_requirement_tree(self, req_tree_spec) def get_all_requirement_trees(self): return requirementtrees.get_all_requirement_trees(self) def get_requirement_tree_with_ids(self, project_id=None, release_id=None): if project_id is None: project_id = self.project_id if release_id is None: release_id = self.release_id return requirementtrees.get_requirement_tree_with_ids(self, project_id, release_id) # Testcases methods def create_testcase(self, testcase_spec): if 'testcase' not in testcase_spec: testcase_spec['testcase'] = {} if 'releaseId' not in testcase_spec['testcase'] and self.release_id is not None: testcase_spec['testcase']['releaseId'] = self.release_id if 'tcrCatalogTreeId' not in testcase_spec and self.testcase_tree_id is not None: testcase_spec['tcrCatalogTreeId'] = self.testcase_tree_id return testcases.create_testcase(self, testcase_spec) def delete_testcase(self, testcase_id): return testcases.delete_testcase(self, testcase_id) def get_testcase_by_id(self, testcase_id): return testcases.get_testcase_by_id(self, testcase_id) def map_testcase_to_requirements(self, map_spec): return testcases.map_testcase_to_requirements(self, map_spec) def update_testcase(self, testcase_id, testcase_spec): return testcases.update_testcase(self, testcase_id, testcase_spec) # Testcase tree methods def get_testcase_tree_by_release_id(self, release_id=None): if release_id is None: release_id = self.release_id return testcasetrees.get_testcase_tree_by_release_id(self, release_id) # Users methods def get_current_logged_in_users(self): return users.get_current_logged_in_users(self)
zephyr-sdk
/zephyr_sdk-0.2.1-py3-none-any.whl/zephyr_sdk/zephyr.py
zephyr.py
import requests from zephyr_sdk.exceptions.ZExceptions import ZAPIError from zephyr_sdk.exceptions.ZExceptions import MissingParametersError # Create a release with the information passed in def create_release(client, release_spec): url = client.base_url + '/release/' headers = { 'Authorization': 'Bearer ' + client.token, 'Content-Type': 'application/json' } # Validate the release_spec variable error_list = [] if 'name' not in release_spec: error_list.append("name") if 'releaseStartDate' not in release_spec: error_list.append("releaseStartDate") if 'releaseEndDate' not in release_spec: error_list.append("releaseEndDate") if 'projectId' not in release_spec: error_list.append("projectId") # If any errors were added to the list, raise an exception if len(error_list) > 0: raise MissingParametersError(error_list) # Otherwise make the API call. r = requests.post(url, json=release_spec, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Delete a release by release id. def delete_release(client, release_id): url = client.base_url + '/release/' + str(release_id) headers = { 'Authorization': 'Bearer ' + client.token } r = requests.delete(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Get details of a release by the release id def get_release_by_release_id(client, release_id): url = client.base_url + '/release/' + str(release_id) headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Get all releases for a project def get_releases_for_a_project(client, project_id): url = client.base_url + '/release/project/' + str(project_id) headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Create a release with the information passed in def update_release(client, release_id, release_spec): # Get info on the release to fill the request. release_info = client.get_release_by_release_id(release_id) url = client.base_url + '/release/' + str(release_id) headers = { 'Authorization': 'Bearer ' + client.token, 'Content-Type': 'application/json' } # Go through the request info, and replace any values in there with values # from release spec. for key, value in release_info.items(): if key not in release_spec: release_spec[key] = value # Otherwise make the API call. r = requests.put(url, json=release_spec, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json()
zephyr-sdk
/zephyr_sdk-0.2.1-py3-none-any.whl/zephyr_sdk/components/releases.py
releases.py
import requests from zephyr_sdk.exceptions.ZExceptions import ZAPIError # Get the details of all projects. def get_all_normal_projects_details(client): url = client.base_url + '/project/details' headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Gets the id of all projects, and returns them in an array. def get_all_normal_project(client): url = client.base_url + '/project/normal' headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Gets the id of every project lead. def get_lead_for_all_projects(client): url = client.base_url + '/project/all/leads' headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Gets project details by the id passed in. def get_project_by_id(client, project_id): url = client.base_url + '/project/' + str(project_id) headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Gets the team count for all projects def get_project_team_count_for_all_projects(client): url = client.base_url + '/project/count/allprojects' headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Gets the number of projects each user is a member of. def get_project_team_count_for_all_users(client): url = client.base_url + '/project/count/allusers' headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Get the user ids for the teams for all projects def get_project_team_for_allocated_projects(client): url = client.base_url + '/project/allocated/projects' headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Get the user ids of the members on a specific project def get_project_team_for_project(client, project_id): url = client.base_url + '/project/projectteam/' + str(project_id) headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Get all projects def get_all_projects(client, include_inactive): url = client.base_url + '/project?includeinactive=' + str(include_inactive) headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Light call to get all projects. Only ids and names def get_all_projects_lite(client): url = client.base_url + '/project/lite' headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json()
zephyr-sdk
/zephyr_sdk-0.2.1-py3-none-any.whl/zephyr_sdk/components/projects.py
projects.py
import requests from zephyr_sdk.exceptions.ZExceptions import ZAPIError from zephyr_sdk.exceptions.ZExceptions import MissingParametersError from zephyr_sdk.exceptions.ZExceptions import ResourceNotFoundError def create_defect(client, defect_spec): url = client.base_url + '/defect' headers = { "Authorization": "Bearer " + client.token, "Content-Type": "application/json" } # Validate the request body error_list = [] # Check for the required parameters required_parameters = [ 'projectId', 'product', 'target_milestone', 'hardware', 'longDesc', 'status', 'version', 'severity', 'assigned_to', 'component', 'shortDesc', 'op_sys', 'priority' ] for parameter in required_parameters: if parameter not in defect_spec: error_list.append(parameter) if len(error_list) > 0: raise MissingParametersError(error_list) r = requests.post(url, headers=headers, json=defect_spec) if r.status_code != 200: raise ZAPIError(r) return r.json() # Update defect method. def update_defect(client, defect_spec, defect_id): url = client.base_url + '/defect/' + str(defect_id) # Request to delete defect. def delete_defect(client, defect_id): url = client.base_url + '/defect/' + str(defect_id) headers = { "Authorization": 'Bearer ' + client.token } r = requests.delete(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Method for grabbing a component ID def get_component(client, component_name, project_id): url = client.base_url + '/defect/jtrac/project/metadata?projectids=' + str(project_id) headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) metadata = r.json() project_metadata = metadata[str(project_id)] # Iterate over the project metadata to find the object that # has all the components. component_array_index = "" for data_object_index, data_object in project_metadata.items(): if data_object[0]['name'] == 'component': component_array_index = data_object_index break # Iterate over the components to find the right one. component_array = project_metadata[component_array_index] for component in component_array: if component['option'] == component_name: return component # If we get to this part, something went wrong. raise ResourceNotFoundError("component", component_name) # Get a single defect. def get_defect(client, defect_id): url = client.base_url + '/defect/' + str(defect_id) headers = { 'Authorization': 'Bearer ' + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json()
zephyr-sdk
/zephyr_sdk-0.2.1-py3-none-any.whl/zephyr_sdk/components/defects.py
defects.py
import requests from zephyr_sdk.exceptions.ZExceptions import ZAPIError from zephyr_sdk.exceptions.ZExceptions import MissingParametersError # Create an execution cycle def create_cycle(client, cycle_spec): url = client.base_url + '/cycle' headers = { "Authorization": "Bearer " + client.token, "Content-Type": "application/json" } # Validate the request body error_list = [] # Check for the required parameters required_parameters = [ 'name', 'releaseId', 'cycleStartDate', 'cycleEndDate' ] for parameter in required_parameters: if parameter not in cycle_spec: error_list.append(parameter) if len(error_list) > 0: raise MissingParametersError(error_list) r = requests.post(url, headers=headers, json=cycle_spec) if r.status_code != 200: raise ZAPIError(r) return r.json() # Delete an execution cycle def delete_cycle(client, cycle_id): url = client.base_url + '/cycle/' + str(cycle_id) headers = { "Authorization": "Bearer " + client.token } r = requests.delete(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Get execution cycle by cycle id. def get_cycle_by_id(client, cycle_id): url = client.base_url + '/cycle/' + str(cycle_id) headers = { "Authorization": "Bearer " + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Get execution cycles in a release. def get_cycles_for_release(client, release_id): url = client.base_url + '/cycle/release/' + str(release_id) headers = { "Authorization": "Bearer " + client.token } r = requests.get(url, headers=headers) if r.status_code != 200: raise ZAPIError(r) return r.json() # Create an execution cycle def update_cycle(client, cycle_id, cycle_spec): url = client.base_url + '/cycle/' + str(cycle_id) headers = { "Authorization": "Bearer " + client.token, "Content-Type": "application/json" } cycle_info = client.get_cycle_by_id(cycle_id) for parameter in cycle_info: if parameter not in cycle_spec: cycle_spec[parameter] = cycle_info[parameter] r = requests.put(url, headers=headers, json=cycle_spec) if r.status_code != 200: raise ZAPIError(r) return r.json()
zephyr-sdk
/zephyr_sdk-0.2.1-py3-none-any.whl/zephyr_sdk/components/cycles.py
cycles.py
# Zephyr [![MIT License](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/uwoseis/zephyr/blob/master/LICENSE) [![Travis Status](https://travis-ci.org/uwoseis/zephyr.svg?branch=master)](https://travis-ci.org/uwoseis/zephyr) [![Coverage Status](https://coveralls.io/repos/uwoseis/zephyr/badge.svg?branch=master&service=github)](https://coveralls.io/github/uwoseis/zephyr?branch=master) [![Code Issues](https://www.quantifiedcode.com/api/v1/project/02a2dbe184e04d1482daa174e9973501/badge.svg)](https://www.quantifiedcode.com/app/project/02a2dbe184e04d1482daa174e9973501) Open-source seismic waveform modelling and inversion code written in Python - Project site: [https://zephyr.space](https://zephyr.space) - Auto docs: [https://zephyr.space/api/zephyr.html](https://zephyr.space/api/zephyr.html) ## Authors - Brendan Smithyman <[[email protected]](mailto:[email protected])> - Shaun Hadden <[[email protected]](mailto:[email protected])>
zephyr-seis
/zephyr-seis-0.1.7.tar.gz/zephyr-seis-0.1.7/README.md
README.md
from __future__ import print_function, unicode_literals, division, absolute_import from builtins import open from future import standard_library standard_library.install_aliases() from builtins import object import pickle from zephyr import backend from zephyr import middleware from zephyr import frontend class Job(object): ''' The base class for jobs. ''' Problem = None Survey = None SystemWrapper = None Disc = None Solver = None projnm = None def __init__(self, projnm, supplementalConfig=None): try: from pymatsolver import MumpsSolver except ImportError: print('NB: Can\'t import MumpsSolver; falling back to SuperLU') else: self.Solver = MumpsSolver self.projnm = projnm print('Setting up composite job "%s":'%(self.__class__.__name__,)) for item in self.__class__.__mro__[:-1][::-1]: print('\t%s'%(item.__name__,)) print() systemConfig = self.getSystemConfig(projnm) update = {} if self.SystemWrapper is not None: update['SystemWrapper'] = self.SystemWrapper if self.Disc is not None: update['Disc'] = self.Disc if self.Solver is not None: update['Solver'] = self.Solver systemConfig.update(update) if supplementalConfig is not None: systemConfig.update(supplementalConfig) if not 'projnm' in systemConfig: systemConfig['projnm'] = projnm # Set up problem and survey objects self.systemConfig = systemConfig self.problem = self.Problem(systemConfig) self.survey = self.Survey(systemConfig) self.problem.pair(self.survey) def getSystemConfig(self, projnm): ''' Get the project ''' raise NotImplementedError def run(self): ''' Run the job ''' raise NotImplementedError def saveData(self, data): ''' Output the data ''' raise NotImplementedError class ForwardModelingJob(Job): ''' A task job that selects forward modelling. ''' def run(self): messageInfo = { 'class': self.__class__.__name__, 'projnm': self.projnm, } print('Running %(class)s(%(projnm)s)...'%messageInfo) print('\t- solving system') data = self.survey.dpred() data.shape = (self.survey.nrec, self.survey.nsrc, self.survey.nfreq) print('\t- saving data') self.saveData(data) print('Done!') class Visco2DJob(Job): ''' A physics job profile that selects 2D viscoacoustic Helmholtz ''' Problem = middleware.Helm2DViscoProblem Survey = middleware.Helm2DSurvey class IsotropicVisco2DJob(Visco2DJob): ''' A physics job profile that selects 2D viscoacoustic Helmholtz with isotropy (i.e., MiniZephyr). ''' Disc = backend.MiniZephyrHD class AnisotropicVisco2DJob(Visco2DJob): ''' A physics job profile that selects 2D viscoacoustic Helmholtz with TTI anisotropy (i.e., Eurus). ''' Disc = backend.EurusHD class IniInputJob(Job): ''' An input job profile that reads configuration from a projnm.ini file and SEG-Y model / data files ''' def getSystemConfig(self, projnm): self.ds = middleware.FullwvDatastore(projnm) return self.ds.systemConfig class PythonInputJob(Job): ''' An input job profile that gets configuration from a projnm.py file ''' def getSystemConfig(self, projnm): self.ds = middleware.FlatDatastore(projnm) return self.ds.systemConfig class PickleInputJob(Job): ''' An input job profile that gets configuration from a projnm.pickle file ''' def getSystemConfig(self, projnm): self.ds = middleware.PickleDatastore(projnm) return self.ds.systemConfig class UtoutOutputJob(Job): ''' An output job profile that saves results to a projnm.utout file ''' def saveData(self, data): utow = middleware.UtoutWriter(self.systemConfig) utow(data) class PickleOutputJob(Job): ''' An output job profile that saves results to a projnm.pickle file ''' def saveData(self, data): with open(self.projnm, 'wb') as fp: pickler = pickle.Pickler(fp) pickler.dump(data) class OmegaIOJob(IniInputJob, UtoutOutputJob): ''' An input/output job profile that emulates Omega ''' class OmegaJob(IsotropicVisco2DJob, ForwardModelingJob, OmegaIOJob): ''' A 2D viscoacoustic parallel job on the local machine. Roughly equivalent to the default behaviour of OMEGA. ''' class PythonUtoutJob(IsotropicVisco2DJob, ForwardModelingJob, PythonInputJob, UtoutOutputJob): ''' A 2D viscoacoustic parallel job on the local machine. Constructs systemConfig from a Python file, but outputs to projnm.utout. ''' class AnisoOmegaJob(AnisotropicVisco2DJob, ForwardModelingJob, OmegaIOJob): ''' A 2D viscoacoustic parallel job on the local machine. Roughly equivalent to the default behaviour of OMEGA. Replaces isotropic solver with TTI anisotropic solver. ''' class AnisoPythonUtoutJob(AnisotropicVisco2DJob, ForwardModelingJob, PythonInputJob, UtoutOutputJob): ''' A 2D viscoacoustic parallel job on the local machine. Constructs systemConfig from a Python file, but outputs to projnm.utout. '''
zephyr-seis
/zephyr-seis-0.1.7.tar.gz/zephyr-seis-0.1.7/zephyr/frontend/jobs.py
jobs.py
from __future__ import print_function,division, absolute_import from future import standard_library standard_library.install_aliases() import click @click.group() @click.version_option() def zephyr(): '''A command-line interface for Zephyr''' @click.command() @click.argument('projnm') @click.confirmation_option(prompt='Are you sure you want to clean project outputs?') def clean(projnm): '''Clean up project results / outputs''' print('Cleaning up project!') print('projnm: \t%s'%projnm) zephyr.add_command(clean) @click.command() @click.argument('projnm') @click.option('--storage', type=click.Choice(['dir', 'hdf5']), default='dir') @click.option('--fromini', type=click.File()) def init(projnm, storage, fromini): '''Set up a new modelling or inversion project''' print('Initializing project!') print('projnm: \t%s'%projnm) print('storage:\t%s'%storage) if fromini is not None: print('fromini:\t%s'%fromini.read()) zephyr.add_command(init) @click.command() @click.argument('projnm') def invert(projnm): '''Run an inversion project''' print('Running project!') print('projnm: \t%s'%projnm) zephyr.add_command(invert) @click.command() @click.argument('projnm') def inspect(projnm): '''Print information about an existing project''' print('Information about an existing project!') print('projnm: \t%s'%projnm) zephyr.add_command(inspect) @click.command() @click.argument('projnm') def migrate(projnm): '''Run a migration''' print('Running project!') print('projnm: \t%s'%projnm) zephyr.add_command(migrate) @click.command() @click.argument('projnm') @click.option('--job', default='OmegaJob', help='The job to run') def model(projnm, job): '''Run a forward model''' from . import jobs jClass = getattr(jobs, job) assert issubclass(jClass, jobs.Job) j = jClass(projnm) j.run() zephyr.add_command(model) @click.command() @click.argument('projnm') def pack(projnm): '''Collect configuration into an HDF5 datafile''' print('Collecting project!') print('projnm: \t%s'%projnm) zephyr.add_command(pack) @click.command() @click.argument('projnm') def unpack(projnm): '''Extract configuration from an HDF5 datafile''' print('Extracting project!') print('projnm: \t%s'%projnm) zephyr.add_command(unpack) if __name__ == "__main__": zephyr()
zephyr-seis
/zephyr-seis-0.1.7.tar.gz/zephyr-seis-0.1.7/zephyr/frontend/cli.py
cli.py
from __future__ import division, unicode_literals, print_function, absolute_import from future import standard_library standard_library.install_aliases() from builtins import object import warnings import numpy as np from scipy.special import hankel1 class AnalyticalHelmholtz(object): ''' An implementation of the analytical Helmholtz system, with additional support for the tilted elliptical case by way of coordinate stretching. ''' def __init__(self, systemConfig): 'Initialize using a systemConfig' self.omega = 2 * np.pi * systemConfig['freq'] self.c = systemConfig['c'] self.rho = systemConfig.get('rho', 1.) self.k = self.omega / self.c self.stretch = 1. / (1 + (2.*systemConfig.get('eps', 0.))) self.theta = systemConfig.get('theta', 0.) self.scaleterm = systemConfig.get('scaleterm', 0.5) xorig = systemConfig.get('xorig', 0.) zorig = systemConfig.get('zorig', 0.) dx = systemConfig.get('dx', 1.) dz = systemConfig.get('dz', 1.) nx = systemConfig['nx'] nz = systemConfig['nz'] self._z, self._x = np.mgrid[ zorig:zorig+dz*nz:dz, xorig:xorig+dz*nx:dx ] if systemConfig.get('3D', False): self.Green = self.Green3D else: self.Green = self.Green2D def Green2D(self, r): 'Model the 2D Green\'s function' # Correct: -0.5j * hankel2(0, self.k*r) return self.scaleterm * self.rho * (-0.5j * hankel1(0, self.k*r)) def Green3D(self, r): 'Model the 3D Green\'s function' # Correct: (1./(4*np.pi*r)) * np.exp(-1j*self.k*r) return self.scaleterm * self.rho * (1./(4*np.pi*r)) * np.exp(1j*self.k*r) def __call__(self, q): 'Model the appropriate Green\'s function, given a source location' x = q[0,0] z = q[0,-1] dx = self._x - x dz = self._z - z dist = np.sqrt(dx**2 + dz**2) with warnings.catch_warnings(): warnings.simplefilter('ignore') strangle = np.arctan(dz / dx) + self.theta stretch = np.sqrt(self.stretch * np.cos(strangle)**2 + np.sin(strangle)**2) return np.nan_to_num(self.Green(dist * stretch)).ravel() def __mul__(self, q): 'Pretend to be a matrix' return self(q)
zephyr-seis
/zephyr-seis-0.1.7.tar.gz/zephyr-seis-0.1.7/zephyr/backend/analytical.py
analytical.py