code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
import copy
from dataclasses import is_dataclass, dataclass, Field
from itertools import zip_longest
from typing import TypeVar, Type, Optional, Mapping, Any, Union, List
from typing_extensions import TypeAlias
from .config import Config
from .data import Data, DictData
from .dataclasses import (
get_default_value_for_field,
create_instance,
DefaultValueNotFoundError,
get_fields,
)
from .exceptions import (
ForwardReferenceError,
WrongTypeError,
DaciteError,
UnionMatchError,
MissingValueError,
DaciteFieldError,
UnexpectedDataError,
StrictUnionMatchError,
AggregatedError,
)
from .types import (
is_instance,
is_generic_collection,
is_union,
extract_generic,
is_optional,
transform_value,
extract_origin_collection,
is_init_var,
extract_init_var,
is_set,
get_data_class_hints,
)
T = TypeVar("T")
SKIP_FLAG = "SKIP_FLAG"
def cast_field(data: Data, field: Field, config: Config) -> Any:
if field.name in data:
try:
field_data = data[field.name]
transformed_value = transform_value(
type_hooks=config.type_hooks,
cast=config.cast,
target_type=field.type,
value=field_data,
)
value = _build_value(
type_=field.type, data=transformed_value, config=config
)
except AggregatedError as e:
for err in e.errors:
err.update_path(field.name)
raise
except DaciteFieldError as error:
error.update_path(field.name)
raise
if config.check_types and not is_instance(value, field.type):
raise WrongTypeError(
field_path=field.name, field_type=field.type, value=value
)
return value
elif not field.init:
# If the non-init field isn't in the dict, let the dataclass handle default, to ensure
# we won't get errors in the case of frozen dataclasses, as issue #195 highlights.
return SKIP_FLAG
else:
try:
return get_default_value_for_field(
field,
allow_missing_fields_as_none=config.allow_missing_fields_as_none,
)
except DefaultValueNotFoundError:
raise MissingValueError(field.name)
def from_dict(data_class: Type[T], data: Data, config: Optional[Config] = None) -> T:
"""Create a data class instance from a dictionary.
:param data_class: a data class type
:param data: a dictionary of a input data
:param config: a configuration of the creation process
:return: an instance of a data class
"""
init_values: DictData = {}
post_init_values: DictData = {}
config = config or Config()
errors = []
try:
data_class_hints = get_data_class_hints(
data_class, globalns=config.forward_references
)
except NameError as error:
raise ForwardReferenceError(str(error))
data_class_fields = get_fields(data_class)
if config.strict:
extra_fields = set(data.keys()) - {f.name for f in data_class_fields}
if extra_fields:
raise UnexpectedDataError(keys=extra_fields)
for field in data_class_fields:
field = copy.copy(field)
field.type = data_class_hints[field.name]
# Wrap try catch so we can capture multiple failed fields at a time
try:
value = cast_field(data, field, config)
if type(value) is str and value == SKIP_FLAG:
continue
if field.init:
init_values[field.name] = value
else:
post_init_values[field.name] = value
except AggregatedError as e:
errors.extend(e.errors)
except DaciteFieldError as e:
errors.append(e)
if len(errors) > 0:
raise AggregatedError(errors)
return create_instance(
data_class=data_class,
init_values=init_values,
post_init_values=post_init_values,
)
def _build_value(type_: Type, data: Any, config: Config) -> Any:
if is_init_var(type_):
type_ = extract_init_var(type_)
if is_union(type_):
return _build_value_for_union(union=type_, data=data, config=config)
elif is_generic_collection(type_):
origin = extract_origin_collection(type_)
if is_instance(data, origin):
return _build_value_for_collection(
collection=type_, data=data, config=config
)
if is_set(origin):
return origin(
_build_value(
type_=extract_generic(type_)[0], data=single_val, config=config
)
for single_val in data
)
elif is_dataclass(type_) and is_instance(data, Data):
if hasattr(type_, "from_dict"):
return type_.from_dict(data=data, config=config)
return from_dict(data_class=type_, data=data, config=config)
return data
def _build_value_for_union(union: Type, data: Any, config: Config) -> Any:
types = extract_generic(union)
if is_optional(union) and len(types) == 2:
return _build_value(type_=types[0], data=data, config=config)
union_matches = {}
for inner_type in types:
try:
# noinspection PyBroadException
try:
data = transform_value(
type_hooks=config.type_hooks,
cast=config.cast,
target_type=inner_type,
value=data,
)
except Exception as e: # pylint: disable=broad-except
continue
value = _build_value(type_=inner_type, data=data, config=config)
if is_instance(value, inner_type):
if config.strict_unions_match:
union_matches[inner_type] = value
else:
return value
except DaciteError as e:
print(e)
pass
if config.strict_unions_match:
if len(union_matches) > 1:
raise StrictUnionMatchError(union_matches)
return union_matches.popitem()[1]
if not config.check_types:
return data
raise UnionMatchError(field_type=union, value=data)
def _build_value_for_collection(collection: Type, data: Any, config: Config) -> Any:
data_type = data.__class__
if is_instance(data, Mapping):
item_type = extract_generic(collection, defaults=(Any, Any))[1]
return data_type(
(key, _build_value(type_=item_type, data=value, config=config))
for key, value in data.items()
)
elif is_instance(data, tuple):
if not data:
return data_type()
types = extract_generic(collection)
if len(types) == 2 and types[1] == Ellipsis:
return data_type(
_build_value(type_=types[0], data=item, config=config) for item in data
)
return data_type(
_build_value(type_=type_, data=item, config=config)
for item, type_ in zip_longest(data, types)
)
item_type = extract_generic(collection, defaults=(Any,))[0]
return data_type(
_build_value(type_=item_type, data=item, config=config) for item in data
)
|
/scale_sensor_fusion_io-0.3.2-py3-none-any.whl/scale_sensor_fusion_io/validation/dacite_internal/core.py
| 0.690246 | 0.241243 |
core.py
|
pypi
|
from typing import Any, Type, Optional, Set, Dict, List
from .types import is_union
def _name(type_: Type) -> str:
return (
type_.__name__
if hasattr(type_, "__name__") and not is_union(type_)
else str(type_)
)
class DaciteError(Exception):
pass
class DaciteFieldError(DaciteError):
def __init__(self, field_path: Optional[str] = None):
super().__init__()
self.field_path = field_path
def update_path(self, parent_field_path: str) -> None:
if self.field_path:
self.field_path = f"{parent_field_path}.{self.field_path}"
else:
self.field_path = parent_field_path
class WrongTypeError(DaciteFieldError):
def __init__(
self, field_type: Type, value: Any, field_path: Optional[str] = None
) -> None:
super().__init__(field_path=field_path)
self.field_type = field_type
self.value = value
def __str__(self) -> str:
return (
f'wrong value type for field "{self.field_path}" - should be "{_name(self.field_type)}" '
f'instead of value "{self.value}" of type "{_name(type(self.value))}"'
)
class MissingValueError(DaciteFieldError):
def __init__(self, field_path: Optional[str] = None):
super().__init__(field_path=field_path)
def __str__(self) -> str:
return f'missing value for field "{self.field_path}"'
class UnionMatchError(WrongTypeError):
def __str__(self) -> str:
return (
f'can not match type "{_name(type(self.value))}" to any type '
f'of "{self.field_path}" union: {_name(self.field_type)}'
)
class StrictUnionMatchError(DaciteFieldError):
def __init__(
self, union_matches: Dict[Type, Any], field_path: Optional[str] = None
) -> None:
super().__init__(field_path=field_path)
self.union_matches = union_matches
def __str__(self) -> str:
conflicting_types = ", ".join(_name(type_) for type_ in self.union_matches)
return f'can not choose between possible Union matches for field "{self.field_path}": {conflicting_types}'
class ForwardReferenceError(DaciteError):
def __init__(self, message: str) -> None:
super().__init__()
self.message = message
def __str__(self) -> str:
return f"can not resolve forward reference: {self.message}"
class UnexpectedDataError(DaciteError):
def __init__(self, keys: Set[str]) -> None:
super().__init__()
self.keys = keys
def __str__(self) -> str:
formatted_keys = ", ".join(f'"{key}"' for key in self.keys)
return f"can not match {formatted_keys} to any data class field"
class AggregatedError(DaciteError):
def __init__(self, errors: List[DaciteFieldError]):
super().__init__()
self.errors = errors
def add_errors(self, errors: List[DaciteFieldError]):
self.errors.extend(errors)
|
/scale_sensor_fusion_io-0.3.2-py3-none-any.whl/scale_sensor_fusion_io/validation/dacite_internal/exceptions.py
| 0.911756 | 0.155687 |
exceptions.py
|
pypi
|
import pprint
from dataclasses import asdict, dataclass
from enum import Enum
from typing import Callable, Generic, List, Literal, Optional, TypeVar, Union
import numpy as np
import scale_sensor_fusion_io.validation.dacite_internal as _dacite
from scale_json_binary import read_file
from scale_sensor_fusion_io.models import (
CameraDistortion,
CameraIntrinsics,
CameraSensor,
LidarSensor,
LidarSensorFrame,
LidarSensorPoints,
PosePath,
RadarSensor,
RadarSensorFrame,
RadarSensorPoints,
Scene,
Sensor,
)
from scale_sensor_fusion_io.spec import SFS
from scale_sensor_fusion_io.validation.error import (
ErrorDetails,
ParseError,
ParseResult,
ParseSuccess,
PathField,
ValidationResult,
)
from scale_sensor_fusion_io.validation.helpers import (
convert_error,
handle_dacite,
is_strictly_increasing,
)
from typing_extensions import TypeAlias
CFG = _dacite.Config(cast=[Enum, tuple])
_T = TypeVar("_T")
"""Parse and validate a sfs file"""
def _handle_result(
res: ParseResult[_T], error_details: List[ErrorDetails], path: List[PathField] = []
) -> Optional[_T]:
if res.success:
return res.data
else:
error_details.extend(
res.details if not path else res.prepend_path(path).details
)
return None
def parse_radar(sensor: dict) -> ParseResult[SFS.RadarSensor]:
error_details: List[ErrorDetails] = []
parsed_sensor = handle_dacite(
lambda: _dacite.from_dict(
data_class=SFS.RadarSensor,
data=sensor,
config=CFG,
),
error_details,
)
if parsed_sensor:
if len(parsed_sensor.frames) <= 0:
error_details.append(
ErrorDetails(path=["frames"], errors=["Must have at least one frame"])
)
for idx, frame in enumerate(parsed_sensor.frames):
"""
When reading via dacite, the numpy arrays aren't correctly shaped (since that's not included in the typedef)
Thus, we reshape all fields here
"""
frame.points.positions = frame.points.positions.reshape((-1, 3))
frame.points.directions = (
frame.points.directions.reshape((-1, 3))
if frame.points.directions is not None
else None
)
if (
frame.points.timestamps is not None
and frame.points.timestamps.dtype == np.uint64
):
error_details.append(
ErrorDetails(
path=["frames", idx, "points", "timestamps"],
errors=["Uint64 timestamps are not supported yet"],
)
)
if len(error_details) > 0:
return ParseError(details=error_details)
assert parsed_sensor is not None
return ParseSuccess(data=parsed_sensor)
def parse_lidar(sensor: dict) -> ParseResult[SFS.LidarSensor]:
error_details: List[ErrorDetails] = []
parsed_sensor = handle_dacite(
lambda: _dacite.from_dict(
data_class=SFS.LidarSensor,
data=sensor,
config=CFG,
),
error_details,
)
if parsed_sensor:
if len(parsed_sensor.frames) <= 0:
error_details.append(
ErrorDetails(path=["frames"], errors=["Must have at least one frame"])
)
for idx, frame in enumerate(parsed_sensor.frames):
"""
When reading via dacite, the numpy arrays aren't correctly shaped (since that's not included in the typedef)
Thus, we reshape all fields here
"""
frame.points.positions = frame.points.positions.reshape((-1, 3))
frame.points.colors = (
frame.points.colors.reshape((-1, 3))
if frame.points.colors is not None
else None
)
if (
frame.points.timestamps is not None
and frame.points.timestamps.dtype == np.uint64
):
error_details.append(
ErrorDetails(
path=["frames", idx, "points", "timestamps"],
errors=["Uint64 timestamps are not supported yet"],
)
)
if len(error_details) > 0:
return ParseError(details=error_details)
assert parsed_sensor is not None
return ParseSuccess(data=parsed_sensor)
def parse_sensor(sensor: dict) -> ParseResult[SFS.Sensor]:
error_details: List[ErrorDetails] = []
# type
sensor_type = sensor.get("type")
if not sensor_type:
error_details.append(ErrorDetails.missing_field("type"))
parsed_sensor: Optional[SFS.Sensor] = None
if sensor_type == "camera":
parsed_sensor = handle_dacite(
lambda: _dacite.from_dict(
data_class=SFS.CameraSensor,
data=sensor,
config=CFG,
),
error_details,
)
elif sensor_type == "lidar":
parsed_sensor = _handle_result(parse_lidar(sensor), error_details)
elif sensor_type == "radar":
parsed_sensor = _handle_result(parse_radar(sensor), error_details)
else:
error_details.append(
ErrorDetails(
path=["type"], errors=[f"Invalid sensor type provided: {sensor_type}"]
)
)
if len(error_details) > 0:
return ParseError(details=error_details)
assert parsed_sensor is not None
return ParseSuccess(data=parsed_sensor)
def parse_scene_as_sfs(raw_data: dict) -> ParseResult[SFS.Scene]:
"""
Parse raw dict as SFS.Scene
Few notes:
* We use a modified version of dacite to allow for aggregating errors instead of failing fast
* We also don't run _dacite.from_dict on the scene object directly since it can't handle union types very elegantly currently
"""
error_details: List[ErrorDetails] = []
# version
version = raw_data.get("version")
if version is None:
return ParseError.missing_field("version")
if not raw_data["version"].startswith("1.0") and not raw_data["version"].startswith(
"5.1"
):
return ParseError.from_msg(
f"Invalid version provided: {raw_data['version']}", path=["version"]
)
# sensors
sensors = []
_sensors = raw_data.get("sensors")
if _sensors:
if type(_sensors) != list:
return ParseError.from_msg("Sensors must be a list", path=["sensors"])
for idx, sensor in enumerate(_sensors):
sensor = _handle_result(
parse_sensor(sensor), error_details, path=["sensors", idx]
)
if sensor:
sensors.append(sensor)
# time_offset
fields = SFS.Scene.__dataclass_fields__
time_offset = handle_dacite(
lambda: _dacite.cast_field(raw_data, field=fields["time_offset"], config=CFG), # type: ignore
error_details,
)
# time_unit
time_unit = handle_dacite(
lambda: _dacite.cast_field(raw_data, field=fields["time_unit"], config=CFG), # type: ignore
error_details,
)
# Additional scene level validations
if len(error_details) == 0:
scene = SFS.Scene(sensors=sensors, time_offset=time_offset, time_unit=time_unit)
return ParseSuccess(data=scene)
return ParseError(details=error_details)
pass
|
/scale_sensor_fusion_io-0.3.2-py3-none-any.whl/scale_sensor_fusion_io/validation/parser/sfs.py
| 0.797872 | 0.215619 |
sfs.py
|
pypi
|
import numpy as np
import pandas as pd
from scipy.spatial.transform import Rotation
# Utils
def points_to_df(points):
positions = points['positions'].reshape(-1, 3)
return pd.DataFrame({
'x': positions[:, 0],
'y': positions[:, 1],
'z': positions[:, 2],
'intensity': points['intensities'],
'timestamp': points['timestamps'],
})
def df_to_points(df):
return {
'positions': np.float32(df[['x', 'y', 'z']]),
'intensities': np.uint8(df['intensity']),
'timestamps': np.uint32(df['timestamp'])
}
# Downsample functions
def voxel_downsample(df, voxel_size):
df = df.sample(frac=1)
df[['vx', 'vy', 'vz']] = (df[['x', 'y', 'z']] / voxel_size).astype(int)
return df.groupby(['vx', 'vy', 'vz']).mean()
def voxel_rand_downsample(df, voxel_size):
df = df.sample(frac=1)
df[['vx', 'vy', 'vz']] = (df[['x', 'y', 'z']] / voxel_size).astype(int)
return df.groupby(['vx', 'vy', 'vz']).first()
def hypervoxel_rand_downsample(df, voxel_size, time_window):
df = df.sample(frac=1)
df[['vx', 'vy', 'vz']] = (df[['x', 'y', 'z']] / voxel_size).astype(int)
df['vt'] = (df['timestamp'] / time_window).astype(int)
return df.groupby(['vx', 'vy', 'vz', 'vt']).first()
def random_downsample(df, ratio: float):
return df.sample(int(len(df) * ratio))
# Scene downsample
def take_timestamp(elem):
return elem['timestamp']
def downsample_scene(scene: dict, downsampler):
total_points = 0
downsampled_points = 0
for sensor in scene['sensors']:
if sensor['type'] == 'lidar':
sensor['frames'] = sorted(sensor['frames'][:200], key=take_timestamp)
for frame in sensor['frames']:
points_df = points_to_df(frame['points'])
downsampled_df = downsampler(points_df)
frame['points'] = df_to_points(downsampled_df)
total_points += len(points_df)
downsampled_points += len(downsampled_df)
print(f"Downsampled {total_points} to {downsampled_points} ({downsampled_points/total_points:.04f})")
return scene
|
/scale_sensor_fusion_io-0.3.2-py3-none-any.whl/scale_sensor_fusion_io/utils/downsample.py
| 0.561936 | 0.44083 |
downsample.py
|
pypi
|
import os
import numpy.typing as npt
from typing import Any, List, Iterable, Optional, Union
from dataclasses import dataclass
from subprocess import Popen, PIPE
from tqdm import tqdm
import ffmpeg
from turbojpeg import TurboJPEG
turbo_jpeg = TurboJPEG()
@dataclass
class VideoWriter:
target_file: str
fps: Optional[int] = 10
threads: Optional[int] = 0
crf: Optional[int] = 24
show_progress: Optional[bool] = True
count: int = 0
def build_cmd(self) -> List[str]:
params = [
"ffmpeg",
"-y",
"-hide_banner",
"-loglevel error",
"-f image2pipe",
f"-r {self.fps}",
"-i -",
"-vcodec libx264",
"-x264-params keyint=2:scenecut=0",
"-pix_fmt yuv420p",
f"-crf {self.crf}",
f"-r {self.fps}",
f"-threads {self.threads}",
"-preset fast",
self.target_file,
]
return [item for param in params for item in param.split(" ", 1)]
def encode(self, images: Iterable[Union[str, bytes]]) -> None:
cmd = self.build_cmd()
# start ffmpeg process
process = Popen(cmd, stdin=PIPE)
if process.stdin is None:
raise ValueError("Failed to open ffmpeg process")
if self.show_progress:
images = tqdm(images)
# stream images to ffmpeg
for image in images:
if isinstance(image, str):
with open(image, "rb") as fp:
image = fp.read()
process.stdin.write(image)
process.stdin.close()
# wait for process to finish
process.wait()
def get_video(self) -> bytes:
with open(self.target_file, "rb") as fp:
return fp.read()
def writeFrame(self, im: npt.NDArray) -> None:
self.jpeg_bytes.append(turbo_jpeg.encode(im, quality=95))
self.count += 1
def __enter__(self) -> "VideoWriter":
self.jpeg_bytes: List[bytes] = []
return self
def __exit__(self, exc_type: Any, exc_value: Any, exc_traceback: Any) -> None:
if self.count > 0:
self.encode(self.jpeg_bytes)
# Util function to generate a video from a list of images
def generate_video(
image_files: List[str],
target_file: str,
fps: Optional[int] = 10,
threads: Optional[int] = 0,
) -> None:
encoder = VideoWriter(target_file, fps, threads)
encoder.encode(image_files)
def write_audio_and_video(audio_file: str, video_file: str, output_file: str) -> None:
if not os.path.isfile(audio_file) or not os.path.isfile(video_file):
raise ValueError("Audio or video file does not exist")
input_video = ffmpeg.input(video_file)
input_audio = ffmpeg.input(audio_file)
ffmpeg.concat(input_video, input_audio, v=1, a=1).output(
output_file, loglevel="error"
).overwrite_output().run()
|
/scale_sensor_fusion_io-0.3.2-py3-none-any.whl/scale_sensor_fusion_io/utils/generate_video.py
| 0.733547 | 0.234122 |
generate_video.py
|
pypi
|
from enum import Enum
from typing import cast
import dacite
from scale_json_binary import JSONBinaryEncoder
import scale_sensor_fusion_io.models
from scale_sensor_fusion_io.model_converters import from_scene_spec_sfs
from ..spec import SFS
encoder = JSONBinaryEncoder()
def _fix_data_shape(scene: SFS.Scene) -> SFS.Scene:
"""
When reading via dacite, the numpy arrays aren't correctly shaped (since that's not included in the typedef)
This function fixes all the fields that need to be reshaped
"""
if scene.sensors:
for sensor in scene.sensors:
if sensor.type == "lidar":
for l_frame in sensor.frames:
l_frame.points.positions = l_frame.points.positions.reshape(-1, 3)
if l_frame.points.colors is not None:
l_frame.points.colors = l_frame.points.colors.reshape(-1, 3)
elif sensor.type == "radar":
for r_frame in sensor.frames:
r_frame.points.positions = r_frame.points.positions.reshape(-1, 3)
if r_frame.points.directions is not None:
r_frame.points.directions = r_frame.points.directions.reshape(
-1, 3
)
if r_frame.points.lengths is not None:
r_frame.points.lengths = r_frame.points.lengths.reshape(-1, 3)
elif sensor.type == "points":
sensor.points.positions = sensor.points.positions.reshape(-1, 3)
return scene
class SFSLoader:
def __init__(
self,
scene_url: str,
):
self.scene_url = scene_url
def load(self) -> scale_sensor_fusion_io.models.Scene:
scene_sfs = self.load_as_sfs()
return from_scene_spec_sfs(scene_sfs)
def load_as_sfs(self) -> SFS.Scene:
raw_data: bytes
with open(self.scene_url, "rb") as fd:
raw_data = cast(bytes, fd.read())
obj = encoder.loads(raw_data)
if "version" not in obj or not obj["version"].startswith("1.0"):
raise Exception(f"Cannot load scene with version {obj['version']}")
scene_bs5 = dacite.from_dict(
data_class=SFS.Scene,
data=obj,
config=dacite.Config(
cast=[Enum, tuple],
),
)
scene = _fix_data_shape(scene_bs5)
return scene
def load_unsafe(self) -> dict:
"""
Loads the scene as a typed dict without doing any validation or parsing. It just hackily casts to the TypedDict representation of the scene spec.
This is primarily useful for doing quick scripting where you may want to fix previously created, malformed scene
"""
pass
with open(self.scene_url, "rb") as fd:
raw_data = cast(bytes, fd.read())
obj = encoder.loads(raw_data)
if "version" not in obj or not obj["version"].startswith("1.0"):
raise Exception(f"Cannot load scene with version {obj['version']}")
return obj
|
/scale_sensor_fusion_io-0.3.2-py3-none-any.whl/scale_sensor_fusion_io/loaders/sfs_loader.py
| 0.864009 | 0.230876 |
sfs_loader.py
|
pypi
|
from enum import Enum
from typing import Optional, cast
import dacite
from scale_json_binary import JSONBinaryEncoder
from ..spec import BS5
encoder = JSONBinaryEncoder()
def _fix_data_shape(scene: BS5.Scene) -> BS5.Scene:
"""
When reading via dacite, the numpy arrays aren't correctly shaped (since that's not included in the typedef)
This function fixes all the fields that need to be reshaped
"""
if scene.sensors:
for sensor in scene.sensors:
if sensor.type == "lidar":
for l_frame in sensor.frames:
l_frame.points.positions = l_frame.points.positions.reshape(-1, 3)
if l_frame.points.colors is not None:
l_frame.points.colors = l_frame.points.colors.reshape(-1, 3)
elif sensor.type == "radar":
for r_frame in sensor.frames:
r_frame.points.positions = r_frame.points.positions.reshape(-1, 3)
if r_frame.points.directions is not None:
r_frame.points.directions = r_frame.points.directions.reshape(
-1, 3
)
if r_frame.points.lengths is not None:
r_frame.points.lengths = r_frame.points.lengths.reshape(-1, 3)
elif sensor.type == "points":
sensor.points.positions = sensor.points.positions.reshape(-1, 3)
return scene
class BS5Loader:
def __init__(
self,
scene_url: str,
):
self.scene_url = scene_url
def load(self) -> BS5.Scene:
raw_data: bytes
with open(self.scene_url, "rb") as fd:
raw_data = cast(bytes, fd.read())
obj = encoder.loads(raw_data)
if "version" not in obj or not obj["version"].startswith("5.1"):
raise Exception(f"Cannot load scene with version {obj['version']}")
scene_bs5 = dacite.from_dict(
data_class=BS5.Scene,
data=obj,
config=dacite.Config(
cast=[Enum, tuple],
),
)
scene = _fix_data_shape(scene_bs5)
return scene
def load_as_bs5(self) -> BS5.Scene:
raw_data: bytes
with open(self.scene_url, "rb") as fd:
raw_data = cast(bytes, fd.read())
obj = encoder.loads(raw_data)
if "version" not in obj or not obj["version"].startswith("5."):
raise Exception(f"Cannot load scene with version {obj['version']}")
scene_bs5 = dacite.from_dict(
data_class=BS5.Scene,
data=obj,
config=dacite.Config(cast=[Enum, tuple]),
)
return scene_bs5
|
/scale_sensor_fusion_io-0.3.2-py3-none-any.whl/scale_sensor_fusion_io/loaders/bs5_loader.py
| 0.878738 | 0.235295 |
bs5_loader.py
|
pypi
|
import logging
import json
LOG_MESSAGES = {
"PathNotFound": "The file path does not exist.",
"UnreadableFile": "The file was not readable.",
"MalformedJson": "The trigger(s) definition has malformed JSON.",
"InvalidType": "The trigger definition is an invalid input type.",
"InvalidAutotagDims": "The autotag could not be evaluated because of a dimension mismatch.",
"InvalidSCQLSyntax": "The trigger definition is valid JSON but has invalid SCQL.",
"IncompleteTriggerState": "A required field to evaluate the trigger is missing.",
}
_LogRecordDefaultAttributes = {
"name",
"msg",
"args",
"levelname",
"levelno",
"pathname",
"filename",
"module",
"exc_info",
"exc_text",
"stack_info",
"lineno",
"funcName",
"created",
"msecs",
"relativeCreated",
"thread",
"threadName",
"processName",
"process",
"message",
"asctime",
}
BASIC_FORMAT = {
"timestamp": "asctime",
}
class PercentStyle(object):
asctime_format = "%(asctime)s"
asctime_search = "%(asctime)"
def __init__(self):
self._fmt = ""
def usesTime(self):
return self._fmt.find(self.asctime_search) >= 0
def format(self, record):
return self._fmt % record.__dict__
class JsonFormatter(logging.Formatter):
"""
Converts a LogRecord to a JSON string.
"""
def __init__(
self,
fmt: dict = BASIC_FORMAT,
style: str = "%",
datefmt: str = "%Y-%m-%dT%H:%M:%S%Z",
):
logging.Formatter.__init__(self, fmt="", datefmt=datefmt, style=style)
self.json_fmt = fmt
self._style = PercentStyle()
self._style._fmt = ""
def setRecordMessage(self, record: logging.LogRecord) -> None:
if isinstance(record.msg, (int, float, bool, type(None))):
# keep these types without quote when output
record.message = record.msg
else:
record.message = str(record.msg)
if record.args:
record.message = record.getMessage()
if record.exc_info and not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
def _add_newline_if_missing(message):
message = str(message)
if message[-1:] != "\n":
message += "\n"
return message
if record.exc_text:
record.message = _add_newline_if_missing(record.message)
record.message += record.exc_text
if getattr(record, "stack_info", None):
record.message = _add_newline_if_missing(record.message)
record.message += self.formatStack(record.stack_info)
def getRecordExtraAttrs(self, record: logging.LogRecord) -> dict:
extras = {
k: record.__dict__[k]
for k in record.__dict__
if k not in _LogRecordDefaultAttributes
}
return extras
def formatMessage(self, record: logging.LogRecord) -> str:
return self._style.format(record)
def format(self, record: logging.LogRecord) -> str:
def _set_extra_to_result():
for k, v in extra.items():
if k not in self.json_fmt:
result[k] = v
def _set_fmt_to_result():
if v in record.__dict__:
result[k] = getattr(record, v, None)
else:
self._style._fmt = v
result[k] = self.formatMessage(record)
result = {}
self.setRecordMessage(record)
record.asctime = self.formatTime(record, self.datefmt)
extra = record.__dict__.pop("__extra", None) or record.__dict__.pop(
"_JsonFormatter__extra", None
)
if extra is None:
extra = self.getRecordExtraAttrs(record)
for k, v in self.json_fmt.items():
if k in extra:
result[k] = extra[k]
else:
_set_fmt_to_result()
_set_extra_to_result()
record.__extra = extra
return json.dumps(result)
class Logger:
def __init__(self, name: str, filename: str) -> None:
self.logger = logging.getLogger(name)
self.logger.setLevel(logging.INFO)
formatter = JsonFormatter()
sh = logging.FileHandler(filename)
sh.setFormatter(formatter)
self.logger.addHandler(sh)
def get_extras(self, type: str, message: str, extra: dict = {}) -> dict:
log_info = {"message": message}
if extra.get("log_info"):
log_info.update(extra.get("log_info"))
del extra["log_info"]
extras = {"type": type, "log_info": log_info}
extras.update(extra)
return extras
def info(self, message: str, extra: dict = {}) -> None:
extras = self.get_extras("INFO", message, extra)
self.logger.info(
message,
extra=extras,
)
def warn(self, message: str, extra: dict = {}) -> None:
extras = self.get_extras("WARN", message, extra)
self.logger.warning(
message,
extra=extras,
)
def error(self, message: str, extra: dict = {}) -> None:
extras = self.get_extras("ERROR", message, extra)
self.logger.error(
message,
extra=extras,
)
|
/scale-smartcapture-0.5.tar.gz/scale-smartcapture-0.5/smartcapture/logger.py
| 0.549157 | 0.21713 |
logger.py
|
pypi
|
import base64
import json
import requests
from typing import List, Union, Tuple
from .trigger import Trigger
from .logger import Logger, LOG_MESSAGES
from .constants import SMARTCAPTURE_ENDPOINT
class SmartCaptureClient:
def __init__(
self,
device_name: str,
api_key: str = "",
externally_hosted: bool = False,
log_file: str = "sc_log.log",
) -> None:
"""
Initializes the SmartCaptureClient.
Args:
device_name: Name of the device registered with SmartCapture.
api_key: API key to be used to authenticate with the SmartCapture server.
externally_hosted: If true, the client will not attempt to fetch device or trigger
information from the SmartCapture server.
log_file: Path of the log file to write to.
Returns:
status code: 0 if successful or error code if not.
"""
if not api_key and not externally_hosted:
raise Exception(
"API key must be provided when using Scale SmartCapture Server"
)
self.logger = Logger("smartcapture_client", log_file)
self.api_key = api_key
self.encoded_key = base64.b64encode((self.api_key + ":").encode()).decode()
self.device_name = device_name
self.triggers = []
if not externally_hosted:
self.device_id = self._get_device_id()
def _get_device_id(self) -> str:
"""
Gets the device id from the SmartCapture server.
Returns:
device_id, status_code: Device id registered with SmartCapture and status code indicating
success or failure.
"""
resp = requests.get(
f"{SMARTCAPTURE_ENDPOINT}/all_devices",
headers={"Authorization": "Basic " + self.encoded_key},
)
for device in resp.json():
if device["name"] == self.device_name:
return device["id"]
raise Exception("Device not registered on smart capture")
def _deserialize_triggers(self, trigger_config: dict) -> List[Trigger]:
"""
Deserializes the trigger configuration into a list of Trigger objects.
Args:
trigger_config: Trigger configuration JSON object to be deserialized.
Returns:
trigger_objects: List of Trigger objects.
"""
last_activations = {
trigger.trigger_id: trigger.last_activation for trigger in self.triggers
}
triggers = trigger_config.get("triggers", [])
autotags = trigger_config.get("autotags", [])
autotag_data = {}
for autotag in autotags:
id = autotag["id"]
autotag_data[id] = autotag[id]
trigger_objects = []
for trigger in triggers:
metadata = {
"sample_rate": trigger["sample_rate"],
"dataset_id": trigger["dataset_id"],
"scql_version": trigger["scql_version"],
"autotags": autotag_data,
}
trigger_object = Trigger(
trigger["id"],
json.dumps(trigger["predicate"]),
metadata,
self.logger,
)
if trigger["id"] in last_activations:
trigger_object.last_activation = last_activations[trigger["id"]]
trigger_objects.append(trigger_object)
return trigger_objects
def load(self, trigger_config: Union[str, dict]) -> int:
"""
Loads triggers from json object or json file on device and prepares them for evaluation.
Args:
trigger_config: String containing path the json file with serialized trigggers or
json object with serialized triggers.
Returns:
status code: 0 if successful or error code if not.
"""
if type(trigger_config) == str:
try:
data = json.load(open(trigger_config))
except FileNotFoundError:
self.logger.error(
LOG_MESSAGES["PathNotFound"],
extra={"log_info": {"name": "PathNotFound"}},
)
return 101
except PermissionError:
self.logger.error(
LOG_MESSAGES["UnreadableFile"],
extra={"log_info": {"name": "UnreadableFile"}},
)
return 102
except json.decoder.JSONDecodeError:
self.logger.error(
LOG_MESSAGES["MalformedJSON"],
extra={"log_info": {"name": "MalformedJSON"}},
)
return 201
self.triggers = self._deserialize_triggers(data)
elif type(trigger_config) == dict:
self.triggers = self._deserialize_triggers(trigger_config)
else:
self.logger.error(
LOG_MESSAGES["InvalidType"],
extra={"log_info": {"name": "InvalidType"}},
)
return 204
self.logger.info(
f"Loaded {len(self.triggers)} triggers.",
extra={"log_info": {"name": "TriggersLoaded"}},
)
return 0
def evaluate(self, state: dict) -> List[Tuple[str, bool, str, int]]:
"""
Evaluates the triggers against the input state dictionary.
Args:
state: State to be evaluated against the triggers.
Returns:
List of tuples corresponding to triggers containing the trigger id, the result of the
evaluation, the dataset id, and the status code.
"""
trigger_results = []
for trigger in self.triggers:
result, status_code = trigger.evaluate(state)
trigger_results.append(
(trigger.trigger_id, result, trigger.dataset_id, status_code)
)
return trigger_results
|
/scale-smartcapture-0.5.tar.gz/scale-smartcapture-0.5/smartcapture/client.py
| 0.810179 | 0.158402 |
client.py
|
pypi
|
import json
import time
from typing import Tuple
from .scql import SCQLPredicate
from .logger import Logger, LOG_MESSAGES
class Trigger:
def __init__(
self, trigger_id: str, predicate: str, metadata: dict, logger: Logger
) -> None:
"""
Args:
trigger_id: Unique identifier to identify triggers to be used
predicate: Smart Capture Query predicate
metadata: Additional data required to evaluate if a trigger should be activated.
For example:
{
"autotags": {
"tag_1": [0.5, 0.3, ...]
},
"sample_rate": 10,
}
"""
self.trigger_id = trigger_id
self.autotags = metadata.get("autotags", {})
self.sample_rate = metadata.get("sample_rate", 0)
self.dataset_id = metadata.get("dataset_id", "")
self.scql_version = metadata.get("scql_version", "1.0")
self.predicate = SCQLPredicate(json.loads(predicate), self.autotags, self)
self.last_activation = 0
self.logger = logger
def evaluate(self, state: dict) -> Tuple[bool, int]:
"""
Evaluates if a trigger has been activated given the device state using the predicate
Args:
state: Dictionary containing state data of the device.
Returns:
Tuple containing:
result: boolean indicating if the trigger has been activated.
status_code: 0 if successful or error code if not.
"""
if time.time() * 1000.0 < self.last_activation + self.sample_rate:
return False, 1001
try:
result, status_code = self.predicate.evaluate(state)
except KeyError as e:
self.logger.error(
LOG_MESSAGES["IncompleteTriggerState"],
extra={
"trigger_id": self.trigger_id,
"offending_field": e.args[0],
"log_info": {"name": "IncompleteTriggerState"},
},
)
return False, 302
if result:
self.last_activation = time.time() * 1000.0
return result, status_code
|
/scale-smartcapture-0.5.tar.gz/scale-smartcapture-0.5/smartcapture/trigger.py
| 0.801392 | 0.271929 |
trigger.py
|
pypi
|
import numpy as np
from typing import Any, Tuple, TYPE_CHECKING
from smartcapture.utils import getFromDict
from .logger import LOG_MESSAGES
if TYPE_CHECKING:
from smartcapture.trigger import Trigger
class SCQLPredicate:
def __init__(self, predicate: Any, autotags: dict, trigger: "Trigger"):
self.predicate = predicate
self.autotags = autotags
self.trigger = trigger
def evaluate(self, state: dict) -> Tuple[Any, int]:
if type(self.predicate) != dict:
return self.predicate, 0
if len(self.predicate) == 1:
key = list(self.predicate)[0]
if key == "$and":
return ANDExpression(
self.predicate["$and"], self.autotags, self.trigger
).evaluate(state)
elif key == "$or":
return ORExpression(
self.predicate["$or"], self.autotags, self.trigger
).evaluate(state)
elif key == "$not":
return not SCQLPredicate(
self.predicate["$not"], self.autotags, self.trigger
).evaluate(state)
else:
return ConditionOnField(
key, self.predicate[key], self.autotags, self.trigger
).evaluate(state)
else: # default to AND predicate mimicking MongoQL behavior
and_predicate = [{k: v} for k, v in self.predicate.items()]
return ANDExpression(and_predicate, self.autotags, self.trigger).evaluate(
state
)
class ANDExpression(SCQLPredicate):
def evaluate(self, state: dict) -> bool:
for condition in self.predicate:
value, status_code = SCQLPredicate(
condition, self.autotags, self.trigger
).evaluate(state)
if status_code != 0:
return False, status_code
if value is False:
return False, 0
return True, 0
class ORExpression(SCQLPredicate):
def evaluate(self, state: dict) -> bool:
for condition in self.predicate:
value, status_code = SCQLPredicate(
condition, self.autotags, self.trigger
).evaluate(state)
if status_code != 0:
return False, status_code
if value is True:
return True, 0
return False, 0
class ConditionOnField:
def __init__(
self, field: str, predicate: dict, autotags: dict, trigger: "Trigger"
) -> bool:
self.field = field.split(".")
self.predicate = predicate
self.autotags = autotags
self.trigger = trigger
def evaluate(self, state):
field_value = getFromDict(state, self.field)
key = list(self.predicate)[0]
if key == "$eq":
return field_value == self.predicate[key], 0
elif key == "$neq":
return field_value != self.predicate[key], 0
elif key == "$gt":
return field_value > self.predicate[key], 0
elif key == "$gte":
return field_value >= self.predicate[key], 0
elif key == "$lt":
return field_value < self.predicate[key], 0
elif key == "$lte":
return field_value <= self.predicate[key], 0
elif key == "$in":
return field_value in self.predicate[key], 0
elif key == "$autotag":
autotag_id, threshold = self.predicate[key][0], self.predicate[key][1]
autotag_coefficients = self.autotags[autotag_id]
try:
autotag_value = np.dot(field_value, autotag_coefficients)
return bool(autotag_value >= threshold), 0
except ValueError:
self.trigger.logger.error(
LOG_MESSAGES["InvalidAutotagDims"],
extra={
"trigger_id": self.trigger.trigger_id,
"offending_field": ".".join(self.field),
"offending_value": str(self.predicate[key]),
"log_info": {"name": "InvalidAutotagDims"},
},
)
return False, 307
else:
self.trigger.logger.error(
LOG_MESSAGES["InvalidSCQLSyntax"],
extra={
"trigger_id": self.trigger.trigger_id,
"log_info": {"name": "InvalidSCQLSyntax"},
},
)
return False, 202
|
/scale-smartcapture-0.5.tar.gz/scale-smartcapture-0.5/smartcapture/scql.py
| 0.619471 | 0.355915 |
scql.py
|
pypi
|
# Python SCALE Codec
[](https://github.com/polkascan/py-scale-codec/actions/workflows/unittests.yml?query=workflow%3A%22Run+unit+tests%22)
[](https://pypi.org/project/scalecodec/)
[](https://pypi.org/project/scalecodec/)
[](https://github.com/polkascan/py-scale-codec/blob/master/LICENSE)
## Description
[Substrate](https://github.com/paritytech/substrate) uses a lightweight and efficient [encoding and decoding program](https://docs.substrate.io/reference/scale-codec/) to optimize how data is sent and received over the network. The program used to serialize and deserialize data is called the SCALE codec, with SCALE being an acronym for **S**imple **C**oncatenated **A**ggregate **L**ittle-**E**ndian.
## Documentation
https://polkascan.github.io/py-scale-codec/
## Installation
```bash
pip install scalecodec
```
## Examples of different types
| Type | Description | Example SCALE decoding value | SCALE encoded value |
|------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------|---------------------------------------------------------------------------------|
| `bool` | Boolean values are encoded using the least significant bit of a single byte. | `True` | `0x01` |
| `u16` | Basic integers are encoded using a fixed-width little-endian (LE) format. | `42` | `0x2a00` |
| `Compact` | A "compact" or general integer encoding is sufficient for encoding large integers (up to 2**536) and is more efficient at encoding most values than the fixed-width version. (Though for single-byte values, the fixed-width integer is never worse.) | `0` | `0x00` |
| | | `1` | `0x04` |
| | | `42` | `0xa8` |
| | | `69` | `0x1501` |
| | | `100000000000000` | `0x0b00407a10f35a` |
| `Vec` | A collection of same-typed values is encoded, prefixed with a compact encoding of the number of items, followed by each item's encoding concatenated in turn. | `[4, 8, 15, 16, 23, 42]` | `0x18040008000f00100017002a00` |
| `BitVec` | A sequence of bools, represented in a more space efficient bit format | `0b00000010_01111101` | `0x287d02` |
| `str`,`Bytes`, `String` | Strings are Vectors of bytes (`Vec<u8>`) containing a valid UTF8 sequence. | `"Test"` | `0x1054657374` |
| | | `b"Test"` | `0x1054657374` |
| | | `[84, 101, 115, 116]` | `0x1054657374` |
| `[u8; 4]` | Fixed sized array of in this case an `u8` | `b"babe"` | `0x62616265` |
| | | `"0x62616265"` | `0x62616265` |
| | | `[98, 97, 98, 101]` | `0x62616265` |
| `AccountId` | An [SS58 formatted](https://docs.substrate.io/reference/address-formats/) representation of an account. See also the [SS58 util functions](https://polkascan.github.io/py-scale-codec/utils/ss58.html) | `"5GDyPHLVHcQYPTWfygtPY eogQjyZy7J9fsi4brPhgEFq4pcv"` | `0xb80269ec500e458a630846b99105c397 ee574125823d6f4388e9c7572e115c05` |
| `Enum` Example: `enum IntOrBool { Int(u8), Bool(bool),}` | A fixed number of variants, each mutually exclusive and potentially implying a further value or series of values. Encoded as the first byte identifying the index of the variant that the value is. Any further bytes are used to encode any data that the variant implies. Thus, no more than 256 variants are supported. | `{'Int': 8}` | `0x002a` |
| | | `{'Bool': True}` | `0x0101` |
| `Struct` Example: `struct Motion { pub votes: Vec<AccountId>, pub id: u32 }` | For structures, the values are named, but that is irrelevant for the encoding (names are ignored - only order matters). All containers store elements consecutively. The order of the elements is not fixed, depends on the container, and cannot be relied on at decoding. This implicitly means that decoding some byte-array into a specified structure that enforces an order and then re-encoding it could result in a different byte array than the original that was decoded. | `{"votes": ["5GDyPHLVHcQYPTWfygtPYeo gQjyZy7J9fsi4brPhgEFq4pcv"], "id": 4}` | `0x04b80269ec500e458a630846b99105c397ee57 4125823d6f4388e9c7572e115c0504000000` |
## License
https://github.com/polkascan/py-scale-codec/blob/master/LICENSE
|
/scalecodec-1.2.6.tar.gz/scalecodec-1.2.6/README.md
| 0.703142 | 0.776496 |
README.md
|
pypi
|
from typing import Union
import numpy as np
import scipy.linalg
from scipy.linalg import cholesky
from scipy.sparse import csc_matrix, csr_matrix
from scipy.sparse.linalg import LinearOperator, aslinearoperator
from sklearn.decomposition import TruncatedSVD
from sklearn.utils.extmath import randomized_range_finder
__all__ = ["approximate", "Factor"]
SparseMatrix = Union[csc_matrix, csr_matrix]
class Factor(LinearOperator):
""" A LinearOperator object including how to apply its inverse action.
This subclasses LinearOperator to provide an interface which along with the
usual actions:
x -> L x,
x-> L.T x,
allows provides:
x -> L^{-1} x,
x -> L^{-T} x.
The inverse action is computed using the `solve` argument (or the default).
Attributes:
matrix: A boolean indicating if we like SPAM or not.
solve: A method for solving Lx = b.
"""
def __init__(
self,
matrix: Union[np.ndarray, SparseMatrix, LinearOperator],
solve=None,
):
super().__init__(matrix.dtype, matrix.shape)
self.linear_operator = aslinearoperator(matrix)
self.matrix = matrix
self.shape = self.matrix.shape
self.is_sparse = scipy.sparse.issparse(matrix)
if solve is not None:
self._solve = solve
else:
self._solve = scipy.sparse.linalg.spsolve
def inv(self) -> LinearOperator:
"""
Returns a LinearOperator object given by the inverse action of `self`.
Allows composition of factors, e.g., the following is valid:
l = LinearOperator(...)
f = Factor(...)
a = f @ l @ f.inv()
"""
return LinearOperator(
shape=self.shape,
dtype=self.dtype,
matvec=self.solve,
rmatvec=self.rsolve,
)
def solve(self, x):
return self._solve(self.matrix, x)
def rsolve(self, x):
return self._solve(self.matrix.T, x)
def _matvec(self, x):
return self.linear_operator.matvec(x)
def _rmatvec(self, x):
return self.linear_operator.rmatvec(x)
def approximate(
linear_operator: LinearOperator,
algorithm: str,
rank_approx: int,
n_oversamples: int = 1,
n_power_iter: int = 0,
random_state=None,
):
"""
Computes a low rank approximation of a LinearOperator.
For a LinearOperator L, The result is a tuple, (F, F.T), such that
L(I) ~ F @ F.T, where I is the identity matrix of appropriate dimension.
Args:
linear_operator: LinearOperator.
algorithm: Can be either 'truncated_svd', 'randomized' or 'nystrom'.
rank_approx: rank of the approximation (must be less than rank(X)).
n_oversamples: Oversampling parameter.
n_power_iter: Number of power iterations used in range finding.
random_state: Seed.
Returns:
The factors F, F.T of a low rank approximation of `X` as np.arrays of
size n x r and r x n, respectively.
Raises:
NotImplementedError: If `algorithm` is not recognised.
"""
if algorithm == "truncated_svd":
d, _ = linear_operator.shape
_matrix = linear_operator.dot(np.identity(d))
svd = TruncatedSVD(
n_components=rank_approx,
algorithm="arpack",
n_iter=n_power_iter,
n_oversamples=n_oversamples,
random_state=random_state,
)
Us = svd.fit_transform(_matrix)
return Us, svd.components_
elif algorithm == "randomized":
Q = randomized_range_finder(
linear_operator,
size=rank_approx + n_oversamples,
n_iter=n_power_iter,
random_state=random_state,
power_iteration_normalizer="QR",
)
qmq = Q.T @ (linear_operator @ Q)
s_rev, Uhat_rev = scipy.linalg.eigh(qmq)
Uhat = np.flip(Uhat_rev, axis=1)
s = np.flip(s_rev)
U = Q @ Uhat
return U[:, :rank_approx] * s[:rank_approx], U[:, :rank_approx].T
elif algorithm == "nystrom":
Q = randomized_range_finder(
linear_operator,
size=rank_approx + n_oversamples,
n_iter=n_power_iter,
random_state=random_state,
power_iteration_normalizer="QR",
)
B_1 = linear_operator @ Q
B_2 = Q.T @ B_1
C = cholesky(B_2, lower=True)
FT = scipy.linalg.solve(C, B_1.T)
return FT.T, FT
else:
raise NotImplementedError
|
/scaled_preconditioners-0.1.1-py3-none-any.whl/scaled_preconditioners/approximation.py
| 0.947697 | 0.759627 |
approximation.py
|
pypi
|
import numpy as np
import scipy.sparse
from scipy.sparse import identity as sparse_identity
from scipy.sparse.linalg import LinearOperator
from scaled_preconditioners.approximation import Factor, approximate
__all__ = ["compute_preconditioner", "Factor"]
def compute_preconditioner(
factor: Factor,
psd_term: LinearOperator,
algorithm: str,
rank_approx: int,
n_oversamples: int = 1,
n_power_iter: int = 0,
random_state: int = 0,
) -> LinearOperator:
"""
For a Hermitian matrix S = A + B, this method computes the preconditioner:
P = Q(I + X)Q^*,
where X is a low rank approximation G = Q^{-1} B Q^{-*}. The preconditioner
is provided as a `LinearOperator`. The type of approximation is given by the
`algorithm` parameter.
Args:
factor: a Factor object.
psd_term: a Symmetric positive semidefinite matrix as a LinearOperator.
algorithm: Can be either 'truncated_svd', 'randomized' or 'nystrom'.
rank_approx: rank of the approximation (must be less than rank(X)).
n_oversamples: Oversampling parameter. Not currently supported for
algorithm="nystrom".
n_power_iter: Number of power iterations used in range finding.
random_state: Seed.
Returns:
A low rank approximation of `X` as a LinearOperator.
"""
scaled_psd_term = factor.inv() @ psd_term @ factor.inv().T
f, ft = approximate(
scaled_psd_term,
algorithm,
rank_approx=rank_approx,
n_oversamples=n_oversamples,
n_power_iter=n_power_iter,
random_state=random_state,
)
inner = sparse_identity(rank_approx) + ft @ f
def apply_inner(vector):
# v -> f.T v
w = ft.dot(vector)
# w -> (I_r + f.T f)^{1} w
u = scipy.linalg.solve(inner, w)
# action of I_r - f(I_r + f.T @ f)^{1} f.T
return vector - np.dot(f, u)
def action(vector):
vector = factor.solve(vector)
vector = apply_inner(vector)
vector = factor.rsolve(vector)
return vector
return LinearOperator(factor.shape, matvec=action)
|
/scaled_preconditioners-0.1.1-py3-none-any.whl/scaled_preconditioners/preconditioner.py
| 0.936692 | 0.522994 |
preconditioner.py
|
pypi
|
# Scaled
This project is aiming the target that provides simple and efficient and reliable way for distributing computing
framework, centralized scheduler and stable protocol when client and worker talking to scheduler
# Introduction
The goal for this project should be as simple as possible
- It built on top of zmq
- it has ready python version of Client, Scheduler, Worker
- I will provide golang or Rust version of Scheduler, the goal for the Scheduler should be completely computer language
agnostic, which means they follow the same protocol
- Scheduler might support function based computing tree in the future
# Installation
`pip install scaled`
if you want to use uvloop, please do: `pip install uvloop`, default we are using python builtin uvloop
# How to use it
## Start local scheduler and cluster at the same time in the code
```python
import random
from scaled.client import Client
from scaled.cluster.combo import SchedulerClusterCombo
def calculate(sec: int):
return sec * 1
def main():
address = "tcp://127.0.0.1:2345"
cluster = SchedulerClusterCombo(address=address, n_workers=10, event_loop="uvloop")
client = Client(address=address)
tasks = [random.randint(0, 100) for _ in range(100000)]
futures = [client.submit(calculate, i) for i in tasks]
results = [future.result() for future in futures]
assert results == tasks
client.disconnect()
cluster.shutdown()
if __name__ == "__main__":
main()
```
## Start scheduler and cluster independently
use `scaled_scheduler` to start scheduler, for example:
```bash
$ scaled_scheduler tcp://0.0.0.0:8516
[INFO]2023-03-19 12:16:10-0400: logging to ('/dev/stdout',)
[INFO]2023-03-19 12:16:10-0400: use event loop: 2
[INFO]2023-03-19 12:16:10-0400: Scheduler: monitor address is ipc:///tmp/0.0.0.0_8516_monitor
[INFO]2023-03-19 12:16:10-0400: AsyncBinder: started
[INFO]2023-03-19 12:16:10-0400: VanillaTaskManager: started
[INFO]2023-03-19 12:16:10-0400: VanillaFunctionManager: started
[INFO]2023-03-19 12:16:10-0400: VanillaWorkerManager: started
[INFO]2023-03-19 12:16:10-0400: StatusReporter: started
```
use `scaled_cluster` to start 10 workers:
```bash
$ scaled_worker -n 10 tcp://127.0.0.1:8516
[INFO]2023-03-19 12:19:19-0400: logging to ('/dev/stdout',)
[INFO]2023-03-19 12:19:19-0400: ClusterProcess: starting 23 workers, heartbeat_interval_seconds=2, function_retention_seconds=3600
[INFO]2023-03-19 12:19:19-0400: Worker[0] started
[INFO]2023-03-19 12:19:19-0400: Worker[1] started
[INFO]2023-03-19 12:19:19-0400: Worker[2] started
[INFO]2023-03-19 12:19:19-0400: Worker[3] started
[INFO]2023-03-19 12:19:19-0400: Worker[4] started
[INFO]2023-03-19 12:19:19-0400: Worker[5] started
[INFO]2023-03-19 12:19:19-0400: Worker[6] started
[INFO]2023-03-19 12:19:19-0400: Worker[7] started
[INFO]2023-03-19 12:19:19-0400: Worker[8] started
[INFO]2023-03-19 12:19:19-0400: Worker[9] started
```
for detail options of above 2 program, please use argument `-h` to check out all available options
Then you can write simply write client code as:
```python
from scaled.client import Client
def foobar(foo: int):
return foo
client = Client(address="tcp://127.0.0.1:2345")
future = client.submit(foobar, 1)
print(future.result())
```
Scaled also supports submit graph task, for example:
```python
from scaled.client import Client
def inc(i):
return i + 1
def add(a, b):
return a + b
def minus(a, b):
return a - b
graph = {
"a": 2,
"b": 2,
"c": (inc, "a"), # c = a + 1 = 2 + 1 = 3
"d": (add, "a", "b"), # d = a + b = 2 + 2 = 4
"e": (minus, "d", "c") # e = d - c = 4 - 3 = 1
}
client = Client(address="tcp://127.0.0.1:2345")
futures = client.submit_graph(graph, keys=["e"])
print(futures[0].result())
```
# Scaled Top
You can use `scaled_top` to connect to scheduler monitor address to get some insides of the scaled_top
```bash
$ scaled_top ipc:///tmp/0.0.0.0_8516_monitor
```
Which will something similar to top command, but it's for getting status of the scaled system:
```bash
scheduler | task_manager | scheduler_sent | scheduler_received
cpu 0.0% | unassigned 0 | FunctionResponse 24 | Heartbeat 183,109
rss 37.1 MiB | running 0 | TaskEcho 200,000 | FunctionRequest 24
| success 200,000 | Task 200,000 | Task 200,000
| failed 0 | TaskResult 200,000 | TaskResult 200,000
| canceled 0 | BalanceRequest 4 | BalanceResponse 4
--------------------------------------------------------------------------------------------------
Shortcuts: worker[n] cpu[c] rss[m] free[f] working[w] queued[q]
Total 10 worker(s)
worker agt_cpu agt_rss [cpu] rss free sent queued | function_id_to_tasks
W|Linux|15940|3c9409c0+ 0.0% 32.7m 0.0% 28.4m 1000 0 0 |
W|Linux|15946|d6450641+ 0.0% 30.7m 0.0% 28.2m 1000 0 0 |
W|Linux|15942|3ed56e89+ 0.0% 34.8m 0.0% 30.4m 1000 0 0 |
W|Linux|15944|6e7d5b99+ 0.0% 30.8m 0.0% 28.2m 1000 0 0 |
W|Linux|15945|33106447+ 0.0% 31.1m 0.0% 28.1m 1000 0 0 |
W|Linux|15937|b031ce9a+ 0.0% 31.0m 0.0% 30.3m 1000 0 0 |
W|Linux|15941|c4dcc2f3+ 0.0% 30.5m 0.0% 28.2m 1000 0 0 |
W|Linux|15939|e1ab4340+ 0.0% 31.0m 0.0% 28.1m 1000 0 0 |
W|Linux|15938|ed582770+ 0.0% 31.1m 0.0% 28.1m 1000 0 0 |
W|Linux|15943|a7fe8b5e+ 0.0% 30.7m 0.0% 28.3m 1000 0 0 |
```
- scheduler section is showing how much resources scheduler used
- task_manager section shows count for each task status
- scheduler_sent section shows count for each type of messages scheduler sent
- scheduler_received section shows count for each type of messages scheduler received
- function_id_to_tasks section shows task count for each function used
- worker section shows worker details, you can use shortcuts to sort by columns, the char * on column header show which
column is sorted right now
- agt_cpu/agt_rss means cpu/memory usage of worker agent
- cpu/rss means cpu/memory usage of worker
- free means number of free task slots for this worker
- sent means how many tasks scheduler sent to the worker
- queued means how many tasks worker received and queued
|
/scaled-0.56.tar.gz/scaled-0.56/README.md
| 0.41052 | 0.907885 |
README.md
|
pypi
|
from datetime import datetime
import time
import singer
import json
import re
import collections
import inflection
from decimal import Decimal
from datetime import datetime
logger = singer.get_logger()
def validate_config(config):
"""Validates config"""
errors = []
required_config_keys = [
's3_bucket'
]
# Check if mandatory keys exist
for k in required_config_keys:
if not config.get(k, None):
errors.append("Required key is missing from config: [{}]".format(k))
return errors
def float_to_decimal(value):
"""Walk the given data structure and turn all instances of float into
double."""
if isinstance(value, float):
return Decimal(str(value))
if isinstance(value, list):
return [float_to_decimal(child) for child in value]
if isinstance(value, dict):
return {k: float_to_decimal(v) for k, v in value.items()}
return value
def add_metadata_columns_to_schema(schema_message):
"""Metadata _sdc columns according to the stitch documentation at
https://www.stitchdata.com/docs/data-structure/integration-schemas#sdc-columns
Metadata columns gives information about data injections
"""
extended_schema_message = schema_message
extended_schema_message['schema']['properties']['_sdc_batched_at'] = { 'type': ['null', 'string'], 'format': 'date-time' }
extended_schema_message['schema']['properties']['_sdc_deleted_at'] = { 'type': ['null', 'string'] }
extended_schema_message['schema']['properties']['_sdc_extracted_at'] = { 'type': ['null', 'string'], 'format': 'date-time' }
extended_schema_message['schema']['properties']['_sdc_primary_key'] = {'type': ['null', 'string'] }
extended_schema_message['schema']['properties']['_sdc_received_at'] = { 'type': ['null', 'string'], 'format': 'date-time' }
extended_schema_message['schema']['properties']['_sdc_sequence'] = {'type': ['integer'] }
extended_schema_message['schema']['properties']['_sdc_table_version'] = {'type': ['null', 'string'] }
return extended_schema_message
def add_metadata_values_to_record(record_message, schema_message):
"""Populate metadata _sdc columns from incoming record message
The location of the required attributes are fixed in the stream
"""
extended_record = record_message['record']
extended_record['_sdc_batched_at'] = datetime.now().isoformat()
extended_record['_sdc_deleted_at'] = record_message.get('record', {}).get('_sdc_deleted_at')
extended_record['_sdc_extracted_at'] = record_message.get('time_extracted')
extended_record['_sdc_primary_key'] = schema_message.get('key_properties')
extended_record['_sdc_received_at'] = datetime.now().isoformat()
extended_record['_sdc_sequence'] = int(round(time.time() * 1000))
extended_record['_sdc_table_version'] = record_message.get('version')
return extended_record
def remove_metadata_values_from_record(record_message):
"""Removes every metadata _sdc column from a given record message
"""
cleaned_record = record_message['record']
cleaned_record.pop('_sdc_batched_at', None)
cleaned_record.pop('_sdc_deleted_at', None)
cleaned_record.pop('_sdc_extracted_at', None)
cleaned_record.pop('_sdc_primary_key', None)
cleaned_record.pop('_sdc_received_at', None)
cleaned_record.pop('_sdc_sequence', None)
cleaned_record.pop('_sdc_table_version', None)
return cleaned_record
# pylint: disable=unnecessary-comprehension
def flatten_key(k, parent_key, sep):
"""
"""
full_key = parent_key + [k]
inflected_key = [n for n in full_key]
reducer_index = 0
while len(sep.join(inflected_key)) >= 255 and reducer_index < len(inflected_key):
reduced_key = re.sub(r'[a-z]', '', inflection.camelize(inflected_key[reducer_index]))
inflected_key[reducer_index] = \
(reduced_key if len(reduced_key) > 1 else inflected_key[reducer_index][0:3]).lower()
reducer_index += 1
return sep.join(inflected_key)
def flatten_record(d, parent_key=[], sep='__'):
"""
"""
items = []
for k in sorted(d.keys()):
v = d[k]
new_key = flatten_key(k, parent_key, sep)
if isinstance(v, collections.MutableMapping):
items.extend(flatten_record(v, parent_key + [k], sep=sep).items())
else:
items.append((new_key, json.dumps(v) if type(v) is list else v))
return dict(items)
def get_target_key(message, prefix=None, timestamp=None, naming_convention=None):
"""Creates and returns an S3 key for the message"""
if not naming_convention:
naming_convention = '{stream}-{timestamp}.json' # o['stream'] + '-' + now + '.json'
if not timestamp:
timestamp = datetime.now().strftime('%Y%m%dT%H%M%S')
key = naming_convention
# replace simple tokens
for k, v in {
'{stream}': message['stream'],
'{timestamp}': timestamp,
'{date}': datetime.now().strftime('%Y-%m-%d')
}.items():
if k in key:
key = key.replace(k, v)
# replace dynamic tokens
# todo: replace dynamic tokens such as {date(<format>)} with the date formatted as requested in <format>
if prefix:
filename = key.split('/')[-1]
key = key.replace(filename, f'{prefix}{filename}')
return key
|
/scalefree-target-s3-json-0.2.tar.gz/scalefree-target-s3-json-0.2/target_s3_json/utils.py
| 0.638835 | 0.218523 |
utils.py
|
pypi
|
[](https://travis-ci.org/steveniemitz/scales)
# scales
A protocol agnostic RPC client stack for python.
## Features
* Built in support for HTTP, Thrift, ThriftMux, Kafka, and Redis (experimental).
* Extensible stack for easy support of other protocols.
* Fully asynchronous API
* Robust load balancing and error detection / recovery.
* Service discovery via ZooKeeper
## Installing
```bash
pip install scales-rpc
```
## Getting started
Getting started with scales is very simple. For example, lets use it to do an HTTP GET of www.google.com
```python
from scales.http import Http
client = Http.NewClient('tcp://www.google.com:80')
response = client.Get('/')
print(response.text)
```
The HTTP client is the simplest type, you give it a URI (see service discovery below), and it returns a client with `Get(uri)` and `Post(uri, data)` methods. The response is a `requests` response object.
## Service Discovery
Out of the box, scales uses the `ScalesUriParser` to parse the URIs passed to NewClient. The `ScalesUriParser` supports two protocols, `tcp://` to create a static serverset of host:port pairs (for example `tcp://localhost:8080,localhost:8081`), and `zk://` to create a dynamic serverset based on a ZooKeeper node. ZooKeeper URIs should be in the form of `zk://zk_server1:port,zk_server2:port/full/znode/path`.
## Monitoring / Metrics
Scales provides an internal metrics tracking system called Varz. A component in scales.varz called the VarzReceiver handles tracking and aggregating metrics. This component can be used as-is, or replaced at runtime via monkey patching to integrate with a custom metrics system.
In addition, a helper class, VarzAggregator, can be used to generate varz aggregations. By default metrics are aggregated to the service level, however this can be customized by passing in a custom key selector to Aggregate.
For example:
```python
aggregated_varz = VarzAggregator.Aggregate(
VarzReceiver.VARZ_DATA,
VarzReceiver.VARZ_METRICS)
```
# Class Hierarchy
## Core
The scales core is composed of 4 modules
* Messages
* Sinks
* Load Balancers
* Pools
### Messages
A message is an envelope to carry some data. In scales, there are two main messages, `MethodCallMessage` and `MethodReturnMessage`, representing a request and response.
### Sinks
Sinks are the core message processing unit of scales. In scales, every layer of the RPC stack is a sink.
Some examples of sinks are:
* **Serializer** sinks handle serializing a `Message` object into a stream.
* **Transport** sinks handle sending and receiving data over a transport (socket, HTTP, etc)
* **Dispatch** sinks handles initiating a method call, and is called by the tranparent client proxy.
### Load Balancers
Load balancers are sinks as well, however, they (as well as pools) are important enough to have their own category. Scales provides two load balancers out of the box, the `HeapBalancerSink`, and the `ApertureBalancerSink`.
The `HeapBalancerSink` maintains a min-heap of all nodes in the serverset, and dispatches requests to the least-loaded node at the time. Nodes detected as down are not dispatched to unless all nodes have failed.
The `ApertureBalancerSink` is a specialization of the `HeapBalancerSink` which attempts to maintain the smallest possible subset of the serverset to maintain load within a certain load band. Nodes are added to the aperture when the load average of the serverset reaches a certain amount (2 by default), and are removed when the load average goes below a certain amount (.5 by default). This method is useful for situations where the load the client is generating is small in relation to the size of the serverset. The aperture balancer is the default for all scales clients.
### Pools
Pools maintain one or more transport sinks to an underlying endpoint and handle request concurrency to that sink. Scales comes with two pool types, the `SingletonPool` and the `WatermarkPool`.
The `SingletonPool` maintains at most one transport sink, and allows unlimited concurrency to it. This sink is used for transports that allow multiplexing requests over a single connection, such a ThriftMux.
The `WatermarkPool` maintains a sink pool, sized by a low watermark, and a high watermark. The pool grows until it hits the low watermark, and maintains up to that many sinks forever. Once the number of concurrently open sinks reaches the low watermark, new sinks are created for each request, until the number of concurrently open sinks reaches a high watermark. At this point, incomming requests are queued until the number of concurrently open sinks goes below the high watermark.
## Protocol Support
Out of the box, scales supports five protocols, `thrift`, `thriftmux`, `http`, `kafka` (producer only), and and `redis` (experimental).
### Thrift (and ThriftMux)
Scales supports calling thrift services via autogenerated python thrift code generated by the [Apache Thrift compiler](https://thrift.apache.org/). Serialization and deserialization are handled by the thrift library using the `TBinaryProtocol` (specifically `TBinaryProtocolAccelerated`).
Scales proxies are created for Thrift and ThriftMux using their respective builders (`scales.thrift.Thrift`, `scales.thriftmux.ThriftMux`), and passing the generated thrift interface.
For example:
```python
from my_project.gen_py.example_rpc_service import (ExampleService, ttypes)
from scales.thriftmux import ThriftMux
client = ThriftMux.NewClient(ExampleService.Iface, 'tcp://localhost:8080')
ret = client.passMessage(ttypes.Message('hi!'))
```
### Kafka
Scales provides a robust, high performance Kafka producer client. It supports discovering the kafka brokers either directly or via ZooKeeper. Sending messages is very simple:
```python
from scales.kafka import Kafka
client = Kafka.NewClient("tcp://broker1:9092,broker2:9092")
client.Put("my-topic", ["payload 1", "payload 2"])
```
Limitiations:
* Currently messages are distributed across partitions using a least-loaded strategy (via a HeapBalancer). Partition selection via a hash function is unsupported.
* Only the producer API is implemented.
### Redis (**EXPERIMENTAL**)
The redis client is a highly experimental wrapper of the python redis client. It is not recommended for production use.
# Scales architecture
A primary goal of scales was to build a fully asychronous system. All methods should be non-blocking, instead opting to either return an AsyncResult representing the state of the operation, or operate in a continuation passing style.
## Transparent Proxy / Dispatcher
The entry point to scales is a transparent proxy, which is generated by the Scales builder, or through helper methods such as `ThriftMux.NewClient`. A proxy's job is to intercept all methods defined by a type (an interface) and route it to a dispatcher. Scales provides one dispatcher, the `MessageDispatcher`. The `MessageDispatcher` is a special type of `ClientMessageSink` that initiates the sink chain. It takes a method call, packages it into a `MethodCallMessage`, then fowards it to the next sink. On the response side, it terminates the chain by taking a response method and applying it to a gevent `AsyncResult`. The proxy uses this async result to either wait on (in the synchronous case) or return to the caller to use (in the asynchronous case).
## Message processing
Messages (request or response) flow through scales in a cooperative chain. Each sink takes a message from the previous, does something to it, and passes it to the next. If a sink wants to also handle the response message, it installs itself in the response `sink_stack` via `sink_stack.Push(self, ...)`. Response messages traverse the stack in a similar cooperative way, with each sink calling the next sink on the stack via `sink_stack.AsyncProcessResponse(stream, msg)`.
## Serializer sinks
Serializer sinks translate a `MethodCallMessage` into a serialized stream. Serializer sinks perform the switch from `msg` to `stream` (and the reverse) during sink stack processing.
## Transport sinks
Transport sinks are the final sink in the request sink chain, and initiate the response sink chain. As the name implies, transport sinks are `ClientMessageSinks` that take message and stream, and send it across some form of transport (for example, Thrift and ThriftMux both use a TCP transport). They also handle reading from the transport and dispatching the response data back up the sink chain for the message that the response belongs to. This may range from simple, in the thrift case, a transport sink can only handle one request concurrently, or more complicated. For example, the ThriftMux transport sink maintains a hash table of tags (read from the response stream) to response sink stacks.
Transport sinks are also responsible for handling timeouts. They can either read the deadline from the message passed in and calculate a timeout from it to be used by the transport, or use the timeout even also on the message to asynchronously trigger a timeout. They are not however, required to notify the client of the timeout. The timeout sink will accomplish that.
In addition, transport sinks must support having multiple concurrent `Open()` calls pending. Some upstream sinks may call `Open()` more than once on a transport, and rely on this being a safe operation.
Finally, transport sinks must detect when they're underlying connection is no longer usable, and report this up the stack by setting their on_faulted observable. Upstream sinks use this event to trigger reconnection logic, load balancer adjustment, etc.
# Extending scales
There are a few common extension points that one would want to use when implementing new features in scales.
The simplest is implementing a sink that simply inspects (and possibly modifies) messages as they pass through the system.
|
/scales-rpc-2.0.1.tar.gz/scales-rpc-2.0.1/README.md
| 0.482429 | 0.917709 |
README.md
|
pypi
|
import functools
import sys
import gevent
from gevent import Greenlet
from gevent.event import AsyncResult as g_AsyncResult
class NamedGreenlet(Greenlet):
def __init__(self, run=None, *args, **kwargs):
self.name = None
Greenlet.__init__(self, run, *args, **kwargs)
@classmethod
def spawn(cls, name, *args, **kwargs):
g = cls(*args, **kwargs)
g.name = name
g.start()
return g
def __repr__(self):
if self.name:
return self.name
else:
return NamedGreenlet.__repr__(self)
class AsyncResult(g_AsyncResult):
@staticmethod
def WhenAll(ars):
"""Returns an AsyncResult representing the state of all AsyncResults passed.
Args:
ars - An enumerable of AsyncResults.
Returns:
An AsyncResult representing the completion of all ars passed in. When all
complete, the AsyncResult will be set to an array of the results of each
AsyncResult, in the order they were enumerated in.
If any AsyncResult fails, the return result will fail.
"""
ret = AsyncResult()
num_ars = len(ars)
total = [num_ars]
results = [None] * num_ars
def complete(_n, _ar):
if _ar.exception:
ret.set_exception(_ar.exception)
elif not ret.ready():
total[0] -= 1
results[_n] = _ar.value
if total[0] == 0:
ret.set(results)
for n, ar in enumerate(ars):
ar.rawlink(functools.partial(complete, n))
return ret
@staticmethod
def WhenAny(ars):
"""Returns an AsyncResult representing the state of any AsyncResult passed in.
The return value represents the state of the first AsyncResult to complete, or,
if all fail, the last to fail.
Args:
ars - An enumerable of AsyncResults.
Returns:
An AsyncResult representing the state of the first AsyncResult to complete.
The AsyncResult's value will be set to the value of the first result to
complete, or, if all fail, the exception thrown by the last to fail.
"""
ready_ars = [ar for ar in ars if ar.ready()]
if ready_ars:
return ready_ars[0]
ret = AsyncResult()
total = [len(ars)]
def complete(_ar):
total[0] -= 1
if total[0] == 0 and _ar.exception:
ret.set_exception(_ar.exception)
elif not ret.ready() and _ar.successful():
ret.set(_ar.value)
for ar in ars:
ar.rawlink(complete)
return ret
@staticmethod
def FromValue(val):
if val is None:
return AsyncResult.Complete()
else:
ar = AsyncResult()
ar.set(val)
return ar
@staticmethod
def Complete():
"""Return an AsyncResult that has completed."""
return _COMPLETE
@staticmethod
def CompleteIn(n):
"""Returns an AsyncResult that completes in <n> seconds
Args:
n - The number of seconds to wait before completing.
"""
ar = AsyncResult()
def helper():
ar.set()
g = Greenlet(helper)
g.start_later(float(n))
return ar
def _SafeLinkHelper(self, fn):
try:
self.set(fn())
except:
self.set_exception(sys.exc_info()[1])
def SafeLink(self, fn):
"""Propagate the result of calling fn() on a new greenlet to ar
Args:
ar - An AsyncResult.
fn - The function to execute.
"""
gevent.spawn(self._SafeLinkHelper, fn)
def ContinueWith(self, fn, on_hub=True):
cw_ar = AsyncResult()
def continue_with_callback(_ar):
def run():
try:
val = fn(_ar)
cw_ar.set(val)
except:
cw_ar.set_exception(sys.exc_info()[1])
if on_hub:
run()
else:
gevent.spawn(run)
self.rawlink(continue_with_callback)
return cw_ar
def Map(self, fn):
def mapper(_):
if self.exception:
return self
else:
return fn(self.value)
return self.ContinueWith(mapper).Unwrap()
def _UnwrapHelper(self, target):
if self.ready():
# We're ready, propagate the result
if self.exception:
target.set_exception(self.exception)
else:
if isinstance(self.value, AsyncResult):
self.value._UnwrapHelper(target)
else:
target.set(self.value)
else:
self.rawlink(
functools.partial(AsyncResult._UnwrapHelper, target=target))
def Unwrap(self):
unwrapped_ar = AsyncResult()
self._UnwrapHelper(unwrapped_ar)
return unwrapped_ar
@staticmethod
def TryGet(val):
if isinstance(val, AsyncResult):
return val.get()
else:
return val
@staticmethod
def Run(fn):
ar = AsyncResult()
ar.SafeLink(fn)
return ar
@staticmethod
def RunInline(fn):
ar = AsyncResult()
ar._SafeLinkHelper(fn)
return ar
_COMPLETE = AsyncResult()
_COMPLETE.set()
class NoopTimeout(object):
def start(self): pass
def cancel(self): pass
|
/scales-rpc-2.0.1.tar.gz/scales-rpc-2.0.1/scales/asynchronous.py
| 0.640973 | 0.207877 |
asynchronous.py
|
pypi
|
import functools
import random
try:
from gevent.lock import RLock # pylint: disable=E0611
except ImportError:
from gevent.coros import RLock
from .base import (
LoadBalancerSink,
NoMembersError
)
from ..asynchronous import AsyncResult
from ..constants import (
ChannelState,
Int,
MessageProperties,
SinkProperties,
SinkRole
)
from ..sink import (
FailingMessageSink,
SinkProvider,
)
from ..varz import (
Counter,
Gauge,
Source,
VarzBase
)
class Heap(object):
"""A utility class to perform heap functions"""
@staticmethod
def Swap(heap, i, j):
"""Swap two elements in the heap.
Args:
heap - The heap array.
i, j - The indexes int the array to swap.
"""
if i == 0 or j == 0:
raise Exception("heap swapping of element 0 should never happen.")
heap[i], heap[j] = heap[j], heap[i]
heap[i].index = i
heap[j].index = j
@staticmethod
def FixUp(heap, i):
"""Traverse up the heap, ensuring the invariant is maintained.
Args:
heap - The heap array.
i - The index to start at.
"""
while True:
if i != 1 and heap[i] < heap[i//2]:
Heap.Swap(heap, i, i//2)
i //= 2 # FixUp(heap, i/2)
else:
break
@staticmethod
def FixDown(heap, i, j):
"""Traverse down the heap, ensuring the invariant is maintained.
Args:
heap - The heap array.
i, j - The node index to traverse from -> to.
"""
while True:
if j < i * 2: break
m = 2 * i if (j == i * 2 or heap[2*i] < heap[2*i+1]) else 2*i+1
if heap[m] < heap[i]:
Heap.Swap(heap, i, m)
i = m # FixDown(heap, m, j)
else:
break
def synchronized(fn):
"""Runs the wrapped method under a lock.
The self parameter to the wrapped function is expected to have a __heap_lock
attribute.
"""
@functools.wraps(fn)
def wrapper(self, *args, **kwargs):
with self._heap_lock:
return fn(self, *args, **kwargs)
return wrapper
class HeapBalancerSink(LoadBalancerSink):
"""A sink that implements a heap load balancer."""
Penalty = Int.MaxValue
Idle = Int.MinValue + 1
class HeapVarz(VarzBase):
"""
size - The number of nodes in the pool
no_members - The number of times the balancer served a failing requests
because there were no members in the pool.
"""
_VARZ_BASE_NAME = 'scales.loadbalancer.Heap'
_VARZ = {
'size': Gauge,
'no_members': Counter
}
class Node(object):
__slots__ = ('load', 'index', 'downq', 'avg_load', 'channel', 'endpoint')
def __init__(self, channel, load, index, endpoint):
self.channel = channel
self.avg_load = 0
self.load = load
self.index = index
self.downq = None
self.endpoint = endpoint
def __lt__(self, other):
"""Compare to other, return true if (load, index) < other.(load, index)"""
if self.load > other.load:
return False
elif self.load < other.load:
return True
else:
return self.index < other.index
def __init__(self, next_provider, sink_properties, global_properties):
self._heap = [self.Node(
FailingMessageSink(functools.partial(Exception, "this sink should never be used")),
self.Idle, 0, None)]
self._no_members = FailingMessageSink(NoMembersError)
self._downq = None
self._size = 0
self._open = False
self._heap_lock = RLock()
service_name = global_properties[SinkProperties.Label]
self.__varz = self.HeapVarz(Source(service=service_name))
super(HeapBalancerSink, self).__init__(next_provider, sink_properties, global_properties)
def _AsyncProcessRequestImpl(self, sink_stack, msg, stream, headers):
if self._size == 0:
channel = self._no_members
self.__varz.no_members()
else:
with self._heap_lock:
n = self.__Get()
n.load += 1
Heap.FixDown(self._heap, n.index, self._size)
self._OnGet(n)
put_called = [False]
def PutWrapper():
if not put_called[0]:
with self._heap_lock:
put_called[0] = True
self.__Put(n)
sink_stack.Push(self, PutWrapper)
msg.properties[MessageProperties.Endpoint] = n.endpoint
channel = n.channel
channel.AsyncProcessRequest(sink_stack, msg, stream, headers)
def AsyncProcessResponse(self, sink_stack, context, stream, msg):
context()
sink_stack.AsyncProcessResponse(stream, msg)
# Events override by subclasses
def _OnNodeDown(self, node):
return AsyncResult.Complete()
def _OnNodeUp(self, node):
pass
def _OnGet(self, node):
pass
def _OnPut(self, node):
pass
def __Get(self):
"""Get the least-loaded node from the heap.
Returns:
A node.
"""
while True:
n = self._downq
m = None
while n is not None:
if n.index < 0:
# The node has been discarded.
n = n.downq
if m is None:
self._downq = n
else:
m.downq = n
elif n.channel.state == ChannelState.Open:
# The node was resurrected, mark it back up
n.load -= self.Penalty
ep = n.endpoint
Heap.FixUp(self._heap, n.index)
o = n.downq
n.downq = None
n = o
if m is None:
self._downq = n
else:
m.downq = n
self._log.info('Marking node %s up' % str(ep))
else:
# No change, move to the next node in the linked list
m, n = n, n.downq
n = self._heap[1]
if n.channel.state == ChannelState.Open or n.load >= 0:
return n
else:
# Node is now down
n.downq = self._downq
self._downq = n
n.load += self.Penalty
Heap.FixDown(self._heap, 1, self._size)
self._OnNodeDown(n)
self._log.warning('Marking node %s down' % str(n.endpoint))
# Loop
def __Put(self, n):
"""'Return' a member to the heap. Load on the node is decremented and its
position in the heap is adjusted.
Args:
n - The node to return
"""
n.load -= 1
if n.load < self.Idle:
self._log.warning('Decrementing load below Zero')
n.load = self.Idle
if n.index < 0 and n.load > self.Idle:
pass
elif n.index < 0 and n.load == self.Idle:
n.channel.Close()
elif n.load == self.Idle and self._size > 1:
i = n.index
Heap.Swap(self._heap, i, self._size)
Heap.FixDown(self._heap, i, self._size - 1)
j = random.randint(1, self._size)
Heap.Swap(self._heap, j, self._size)
Heap.FixUp(self._heap, j)
Heap.FixUp(self._heap, self._size)
else:
Heap.FixUp(self._heap, n.index)
self._OnPut(n)
@synchronized
def _AddSink(self, endpoint, sink_factory):
"""Add a sink to the heap.
The sink is immediately opened and initialized to Zero load.
Args:
sink - The sink that was just added.
"""
self._size += 1
self.__varz.size(self._size)
new_node = self.Node(sink_factory(), self.Idle, self._size, endpoint)
self._heap.append(new_node)
Heap.FixUp(self._heap, self._size)
# Adding an Open() in here allows us to optimistically assume it'll be opened
# before the next message attempts to get it. However, the Open() will likely
# yield, so other code paths need to be aware there is a potentially un-open
# sink on the heap.
if self._open:
return self._OpenNode(new_node)
else:
return AsyncResult.Complete()
def _FindNodeByEndpoint(self, endpoint):
i = next((idx for idx, node in enumerate(self._heap)
if node.endpoint == endpoint), 0)
if i == 0:
return None
return self._heap[i]
@synchronized
def _RemoveSink(self, endpoint):
"""Remove a sink from the heap.
The sink is closed immediately if it has no outstanding load, otherwise the
close is deferred until the sink goes idle.
Args:
sink - The sink to be removed.
"""
node = self._FindNodeByEndpoint(endpoint)
if not node or node.index < 0:
return False
i = node.index
Heap.Swap(self._heap, i, self._size)
Heap.FixDown(self._heap, i, self._size - 1)
self._heap.pop()
self._size -= 1
self.__varz.size(self._size)
if self._size < 0:
self._log.warning("Decrementing size below 0")
self._size = 0
node.index = -1
if node.load == self.Idle or node.load >= 0:
node.channel.Close()
return True
def _OnServersChanged(self, endpoint, channel_factory, added):
"""Invoked by the LoadBalancer when an endpoint joins or leaves the
server set.
Args:
endpoint - A tuple of (endpoint, sink).
added - True if the endpoint is being added, False if being removed.
"""
if added:
self._AddSink(endpoint, channel_factory)
else:
self._RemoveSink(endpoint)
def _OnOpenNodeComplete(self, ar, node):
if ar.exception:
self._log.error('Exception caught opening channel: %s' % str(ar.exception))
return self._OnNodeDown(node)
else:
return ar
def _OpenNode(self, n):
return n.channel \
.Open() \
.ContinueWith(lambda ar: self._OnOpenNodeComplete(ar, n)) \
.Unwrap()
def _OpenInitialChannels(self):
"""Open the sink and all underlying nodes."""
self._open = True
if self._size > 0:
# Ignore the first sink, it's the FailingChannelSink.
AsyncResult.WhenAny([self._OpenNode(n) for n in self._heap[1:]])\
.ContinueWith(lambda _ar: self._OnOpenComplete())
else:
self._OnOpenComplete()
def Close(self):
"""Close the sink and all underlying nodes immediately."""
super(HeapBalancerSink, self).Close()
self._open = False
[n.channel.Close() for n in self._heap]
@property
def state(self):
return max([n.channel.state for n in self._heap])
HeapBalancerSink.Builder = SinkProvider(
HeapBalancerSink,
SinkRole.LoadBalancer,
server_set_provider = None)
|
/scales-rpc-2.0.1.tar.gz/scales-rpc-2.0.1/scales/loadbalancer/heap.py
| 0.695752 | 0.167661 |
heap.py
|
pypi
|
from abc import (ABCMeta, abstractmethod)
from six import string_types
class ServerSetProvider(ABCMeta('ABCMeta', (object,), {})):
"""Base class for providing a set of servers, as well as optionally
notifying the pool of servers joining and leaving the set."""
@abstractmethod
def Initialize(self, on_join, on_leave):
"""Initialize the provider.
This method is called before any calls to GetServers().
Args:
on_join - A function to be called when a server joins the set.
on_leave - A function to be called when a server leaves the set.
"""
raise NotImplementedError()
@abstractmethod
def Close(self):
"""Close the provider and any resources associated.
"""
raise NotImplementedError()
@abstractmethod
def GetServers(self):
"""Get all the current servers in the set.
Returns:
An iterable of servers.
"""
raise NotImplementedError()
@property
def endpoint_name(self):
return None
class StaticServerSetProvider(ServerSetProvider):
"""A ServerSetProvider that returns a static set of servers."""
def __init__(self, servers):
"""Initializes the set with a static list of servers.
Args:
servers - An iterable of servers.
"""
self._servers = servers
def Initialize(self, on_join, on_leave):
pass
def Close(self):
pass
def GetServers(self):
return self._servers
class ZooKeeperServerSetProvider(ServerSetProvider):
"""A ServerSetProvider that tracks servers in zookeeper."""
from ..loadbalancer.zookeeper import ServerSet
from kazoo.client import KazooClient
from kazoo.handlers.gevent import SequentialGeventHandler
def __init__(self,
zk_servers_or_client,
zk_path,
zk_timeout=30.0,
member_prefix='member_',
member_factory=None,
endpoint_name=None):
"""
Args:
zk_servers_or_client - Either a comma separated list of host:port ZK
servers, or an instance of a KazooClient.
zk_path - The ZK path to discover services from.
zk_timeout - The timeout to set on the ZK client. Ignored if
zk_servers_or_client is a KazooClient.
member_prefix - The prefix to match for children in the watched znode.
member_factory - A callable to produce a Member object from a znode.
endpoint_name - The name of the endpoint on the Member. If None, the
endpoint is the serviceEndpoint in the znode data,
otherwise it is taken from the additionalEndpoints data
in the znode.
"""
self._zk_client = None
if isinstance(zk_servers_or_client, string_types):
self._zk_client = self._GetZooKeeperClient(zk_servers_or_client, zk_timeout)
self._owns_zk_client = True
else:
self._zk_client = zk_servers_or_client
self._owns_zk_client = False
self._zk_path = zk_path
self._server_set = None
self._member_prefix = member_prefix
self._member_factory = member_factory
self._endpoint_name = endpoint_name
def _GetZooKeeperClient(self, zk_servers, zk_timeout):
if self._zk_client:
return self._zk_client
else:
return self.KazooClient(
hosts=zk_servers,
timeout=zk_timeout,
handler=self.SequentialGeventHandler(),
randomize_hosts=True)
def _MemberFilter(self, node):
return node.startswith(self._member_prefix)
def Initialize(self, on_join, on_leave):
self._zk_client.start()
self._server_set = self.ServerSet(
self._zk_client, self._zk_path, on_join, on_leave,
self._MemberFilter, self._member_factory)
def Close(self):
if self._server_set:
self._server_set.stop()
if self._owns_zk_client and self._zk_client:
self._zk_client.stop()
def GetServers(self):
if not self._server_set:
raise Exception('Initialize() must be called first.')
return self._server_set.get_members()
@property
def endpoint_name(self):
return self._endpoint_name
|
/scales-rpc-2.0.1.tar.gz/scales-rpc-2.0.1/scales/loadbalancer/serverset.py
| 0.849878 | 0.200127 |
serverset.py
|
pypi
|
from __future__ import absolute_import
from thrift.protocol.TJSONProtocol import TJSONProtocol, JTYPES, CTYPES
from thrift.Thrift import TType
from ..compat import BytesIO
try:
import simplejson as json
except ImportError:
import json
class TFastJSONProtocol(TJSONProtocol):
class InitContext(object):
"""A context for initializing the reader"""
def __init__(self, msg):
self.msg = msg
def read(self):
return self.msg
def write(self, obj):
self.msg = obj
def get_buffer(self):
return self.msg
class ArrayContext(object):
"""A context for reading from an array."""
def __init__(self, array):
assert isinstance(array, list)
self.arr = array
self.idx = 0
def read(self):
idx = self.idx
self.idx += 1
return self.arr[idx]
def write(self, val):
self.arr.append(val)
def get_buffer(self):
return self.arr
class ObjectContext(object):
"""A context for reading from an object."""
def __init__(self, obj):
assert isinstance(obj, dict)
self.obj = list(obj.items())
self.field = None
self.idx = 0
def readFieldBegin(self):
id = 0
if self.idx >= len(self.obj):
ttype = TType.STOP
else:
obj = self.obj[self.idx]
id = int(obj[0])
assert len(obj[1]) == 1
self.field = obj[1].items()[0]
ttype = JTYPES[self.field[0]]
return (None, ttype, id)
def readFieldEnd(self):
self.idx += 1
def writeFieldBegin(self, name, ttype, id):
self.field = [
str(id),
[CTYPES[ttype], None]
]
def writeFieldEnd(self):
self.field[1] = dict([self.field[1]])
self.obj.append(self.field)
self.field = None
def read(self):
return self.field[1]
def write(self, obj):
self.field[1][1] = obj
def get_buffer(self):
return dict(self.obj)
class MapContext(object):
"""A context for reading from a map."""
def __init__(self, obj):
self.map = list(obj.items())
self.map_idx = 0
self.idx = 0
self._read_next()
def _read_next(self):
if self.map_idx < len(self.map):
self.obj = self.map[self.map_idx]
self.idx = 0
self.map_idx += 1
def read(self):
idx, self.idx = self.idx, self.idx + 1
o = self.obj[idx]
if self.idx == 2:
self._read_next()
return o
def write(self, value):
if self.idx == 0:
self.map.append([None, None])
self.map[-1][self.idx] = value
self.idx += 1
if self.idx == 2:
self.idx = 0
def get_buffer(self):
return dict(self.map)
def __init__(self, trans):
TJSONProtocol.__init__(self, trans)
self._stack = []
self._ctx = None
def _StartReadContext(self, ctx_type):
next_ctx = ctx_type(self._ctx.read())
self._stack.append(self._ctx)
self._ctx = next_ctx
def _StartWriteContext(self, ctx_type, init):
next_ctx = ctx_type(init)
self._stack.append(self._ctx)
self._ctx = next_ctx
def readJSONArrayStart(self):
self._StartReadContext(self.ArrayContext)
def readJSONObjectStart(self):
self._StartReadContext(self.ObjectContext)
def _EndReadContext(self):
self._ctx = self._stack.pop()
readMessageEnd = _EndReadContext
readJSONObjectEnd = _EndReadContext
readJSONArrayEnd = _EndReadContext
def _EndWriteContext(self):
curr = self._ctx.get_buffer()
self._ctx = self._stack.pop()
self._ctx.write(curr)
def _readTransport(self):
js = BytesIO()
while True:
data = self.trans.read(4096)
if not data:
break
js.write(data)
return js.getvalue()
def readMessageBegin(self):
if hasattr(self.trans, 'getvalue'):
js = self.trans.getvalue()
else:
js = self._readTransport()
message = json.loads(js)
self._ctx = self.InitContext(message)
self._stack = []
return TJSONProtocol.readMessageBegin(self)
def readJSONString(self, skipContext):
return self._ctx.read()
def readJSONInteger(self):
return int(self._ctx.read())
def readJSONDouble(self):
return float(self._ctx.read())
def readFieldBegin(self):
return self._ctx.readFieldBegin()
def readFieldEnd(self):
return self._ctx.readFieldEnd()
def readMapBegin(self):
self.readJSONArrayStart()
keyType = JTYPES[self.readJSONString(False)]
valueType = JTYPES[self.readJSONString(False)]
size = self.readJSONInteger()
self._StartReadContext(self.MapContext)
return (keyType, valueType, size)
def writeMessageBegin(self, name, request_type, seqid):
self._StartWriteContext(self.InitContext, None)
TJSONProtocol.writeMessageBegin(self, name, request_type, seqid)
def writeMessageEnd(self):
TJSONProtocol.writeMessageEnd(self)
# The thrift JSON parser is very sensitive, it can't handle spaces after
# commas or colons <table flip emoji>
json.dump(self._ctx.get_buffer(), self.trans, separators=(',',':'))
def writeJSONArrayStart(self):
self._StartWriteContext(self.ArrayContext, [])
def writeJSONArrayEnd(self):
self._EndWriteContext()
def writeJSONObjectStart(self):
self._StartWriteContext(self.ObjectContext, {})
def writeJSONObjectEnd(self):
self._EndWriteContext()
def writeFieldBegin(self, name, ttype, id):
self._ctx.writeFieldBegin(name, ttype, id)
def writeFieldEnd(self):
self._ctx.writeFieldEnd()
def writeMapBegin(self, ktype, vtype, size):
self.writeJSONArrayStart()
self.writeJSONString(CTYPES[ktype])
self.writeJSONString(CTYPES[vtype])
self.writeJSONNumber(size)
self._StartWriteContext(self.MapContext, {})
def writeJSONString(self, number):
self._ctx.write(number)
writeJSONNumber = writeJSONString
class TFastJSONProtocolFactory(object):
def getProtocol(self, trans):
return TFastJSONProtocol(trans)
|
/scales-rpc-2.0.1.tar.gz/scales-rpc-2.0.1/scales/thrift/protocol.py
| 0.707607 | 0.192027 |
protocol.py
|
pypi
|
import matplotlib.pyplot as plt
import numpy as np
def _get_fret_number(string, note):
""" Returns the fret number of a given note on a given string """
pitches = [('A', 0), ('Bbb', 0), ('G##', 0),
('A#', 1), ('Bb', 1), ('Cbb', 1),
('B', 2), ('Cb', 2), ('A##', 2),
('B#', 3), ('C', 3), ('Dbb', 3),
('C#', 4), ('Db', 4), ('B##', 4),
('D', 5), ('C##', 5), ('Ebb', 5),
('D#', 6), ('Eb', 6), ('Fbb', 6),
('E', 7), ('Fb', 7), ('D##', 7),
('E#', 8), ('F', 8), ('Gbb', 8),
('F#', 9), ('Gb', 9), ('E##', 9),
('G', 10), ('F##', 10), ('Abb', 10),
('G#', 11), ('Ab', 11)]
for (pitch, index) in pitches:
if string.lower() == pitch.lower():
start = index
if note.lower() == pitch.lower():
stop = index
fret = 12 - (start - stop)
return fret % 12
class Scales:
""" A tool to generate stringed instrument visual aids """
def __init__(self, scale: list, strings=None, title=None):
"""
Constructs a Scales object
:param scale: the notes to draw, as a list of strings
:param strings: the tuning of the instrument, defaults to standard 6 string guitar
:param title: title of the image
"""
if strings is None: # Standard tuning
self.strings = ['E', 'A', 'D', 'G', 'B', 'E']
else:
self.strings = strings
self.scale = scale
self.title = title
def _draw_fretboard(self, x: float, y: float, start: int):
"""
Draws a scale on a full fretboard in preparation for the draw function
:param x: width of figure
:param y: height of figure
"""
fig, ax = plt.subplots(figsize=(x, y))
plt.subplots_adjust(left=0.04, right=.98, top=.85, bottom=.1)
ax.set_axis_off()
ax.set_title(self.title)
marker_style = dict(color='tab:blue', linestyle=':', marker='o', markersize=15, markerfacecoloralt='tab:red')
# Draw fretboard
markers = [3, 5, 7, 9, 12, 15, 17]
total_frets = 24
for n, string in enumerate(self.strings):
# Draw string labels and string lines
ax.text(-0.4 + start, n, string.capitalize(), horizontalalignment='center', verticalalignment='center')
ax.plot(n * np.ones(total_frets), linestyle='solid', color='black')
plt.axvline(-0.4 + start, color='white', linewidth=11)
# Fill fretboard with empty markers and label fret numbers
for fret in range(1, total_frets):
ax.plot(fret, n, fillstyle='none', **marker_style)
number_color = 'black'
if fret in markers:
plt.axvline(fret, color='black')
number_color = 'red'
ax.text(fret, -0.75, fret, color=number_color, horizontalalignment='center', verticalalignment='center')
# Draw notes
for i in range(len(self.strings)):
for j in range(len(self.scale)):
# Determine fill style, unique on root
fillstyle = 'full'
if self.scale[j] == self.scale[0]:
fillstyle = 'top'
# Plot notes
fret = _get_fret_number(self.strings[i], self.scale[j])
ax.plot(fret, i, fillstyle=fillstyle, **marker_style)
ax.plot(fret + 12, i, fillstyle=fillstyle, **marker_style)
def draw(self, start=0, stop=15):
"""
Draws the fretboard and notes of the scale
Roots are marked with red
:param start: fret to start drawing at, defaults to 0
:param stop: fret to stop drawing at, defaults to 15
"""
# Get range of frets to zoom into
distance = abs(start - stop)
# Size of figure
x = distance * .5 + 1
y = len(self.strings) / 2.7
self._draw_fretboard(x, y, start)
# Zoom
pad = 0.5
left = start - pad
right = stop + pad
bottom = 0 - pad
top = len(self.strings) - pad
plt.axis([left, right, bottom, top])
plt.show()
plt.close()
# Test code
if __name__ == '__main__':
a_mixo = ['A', 'B', 'C#', 'D', 'E', 'F#', 'G']
six_string = Scales(title='A Mixolydian', scale=a_mixo)
six_string.draw(start=11, stop=15)
six_string.draw(start=4, stop=8)
e_mixo = ['E', 'F#', 'G#', 'A', 'B', 'C#', 'D']
Scales(title='E Mixolydian on 4 string bass', strings=['E', 'A', 'D', 'G'], scale=e_mixo).draw()
gb_dorian = ['gb', 'Ab', 'Bbb', 'Cb', 'db', 'Eb', 'FB']
flats = Scales(title='Gb Dorian', scale=gb_dorian)
# flats.draw(stop=5)
c_major = ['C', 'D', 'E', 'F', 'G', 'A', 'B']
ukulele = Scales(title='C Major on Ukulele', strings=['G', 'C', 'E', 'A'], scale=c_major)
ukulele.draw()
g_chord = ['G', 'B', 'D']
# Scales(title='Open G', scale=g_chord).draw(stop=3)
e_minor = ['E', 'F#', 'G', 'A', 'B', 'C', 'D']
bass = Scales(title='E Minor on 4-string bass', strings=['E', 'A', 'D', 'G'], scale=e_minor)
# bass.draw()
# A blank sheet to print and write in your own scales
# Scales([]).draw()
# Chord sheet
# Scales([]).draw(stop=3)
|
/scales.py-0.1.4.tar.gz/scales.py-0.1.4/scales.py
| 0.666931 | 0.645567 |
scales.py
|
pypi
|
# Scaleup
Scale-up Suite Automation. This library provides a set of high level methods for scripting automation of Dynochem(R) and Reaction Lab(TM) models through the ```RunScript Automation``` interface. These methods wrap the COM calls to Scale-up Suite's ```ModelAutomation.exe``` allowing end users to write shorter and simpler scripts and focus on what they want to achieve rather than the mechanics of doing it.
Contact your [Scale-up Systems](https://www.scale-up.com) representative to obtain access to the ```RunScript Automation``` product to which this library connects.
## Dependencies
* [pywin32](https://github.com/mhammond/pywin32)
* [pandas](https://pypi.org/project/pandas/)
* [openpyxl](https://pypi.org/project/openpyxl/)
## Installation
```python
pip install scaleup
```
# scaleup package
The `scaleup` package contains the routines for invoking automation, and helper routines to set up runs, along with classes representing the result of a script invocation along with results for the individual steps.
## scaleup.scripts
### scaleup.scripts.ScriptResult
This class contains the result for a script run.
* **Fields**
* **error_message** (*str*) -- If an erorr occurred when running the script then this field with contain the error message. Otherwise None or empty.
* **steps** (*list*) -- This contains a list of results for the steps executed. One step result per step.
### scaleup.scripts.SimulationStepResult
This contains the result of a Simulation step. It contains a *[pandas.DataFrame](https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.html)* which holds the results for each scenario along with (if requested) a list of `SimulationProfile` instances which holds the profiles for each scenario.
* **Fields**
* **type** (*str*) -- 'SIMULATION'
* **endpoints** (*DataFrame*) -- DataFrame containing the results for the scenarios.
* **profiles** (*list*) -- list of `SimulationProfile` instances, one per scenario (if requested).
### scaleup.scripts.SimulationProfile
Details of the scenario profile
* **Fields**
* **scenario_name** (*str*) -- the name of the scenario
* **solver_steps** (*float*) -- number of steps
* **total_time** (*float*) -- total time (seconds)
* **matrix_size** (*float*) -- matrix size
* **plotted_points** (*float*) -- plotted points
* **integration_method** (*str*) -- integration method
* **accuracy** (*float*) -- accuract
* **run_time** (*float*) -- run time (seconds)
* **results** (*DataFrame*) -- profile values for the scenario
### scaleup.scripts.FittingStepResult
Contains the result of a Fitting Simulation step.
* **Fields**
* **type** (*str*) -- 'FITTING'
* **report** (*list*) -- The Fitting report
### scaleup.scripts.OptimizationStepResult
Contains the result of a Optimization Simulation step.
* **Fields**
* **type** (*str*) -- 'OPTIMIZATION'
* **report** (*list*) -- The Optimization report
### scaleup.scripts.VerificationStepResult
Contains the result of a Verification Simulation step.
* **Fields**
* **type** (*str*) -- 'VERIFICATION'
* **report** (*list*) -- The Verification report
---
## scaleup.scaleup module
Short-cut routines for quick running of steps
### scaleup.scaleup.run_simulation(file_name: str, scenarios=[], return_profiles: bool = False, show_progress=True, alt_reader=True)
Runs a Simulation step
* **Parameters**
* **file_name** (*str*) -- name of the model file
* **scenarios** (*list**, **optional*) -- String representations of the scenarios to be run. To run scenarios from the model, pass in the scenario name. If empty, runs all scenarios in the model, defaults to []
* **return_profiles** (*bool**, **optional*) -- Return the profiles as well as the endpoints, defaults to False
* **show_progress** (*bool**, **optional*) -- Show the progress indicator when running, defaults to True
* **alt_reader** (*bool**, **optional*) -- Use the alternate reader for Excel Models (i.e. not Excel Interop), defaults to True
* **Returns**
ScriptResult object
### scaleup.scaleup.run_fitting(file_name: str, scenarios, parameters, fit_to_each: bool = False, show_progress: bool = True, alt_reader: bool = True, update_to_source: bool = False)
Runs a Fitting step
* **Parameters**
* **file_name** (*str*) -- name of the model file
* **scenarios** (*list*) -- the names of the scenarios to be fitted
* **parameters** (*list*) -- string representations of the parameters to be fitted
* **fit_to_each** (*bool**, **optional*) -- Whether to fit to each scenario, defaults to False
* **show_progress** (*bool**, **optional*) -- Shoe the UI when running, defaults to True
* **alt_reader** (*bool**, **optional*) -- Use the alternate reader for Excel Models (i.e. not Excel Interop), defaults to True
* **update_to_source** (*bool**, **optional*) -- Write back the fitted values to the original model, defaults to False
* **Returns**
ScriptResult object
### scaleup.scaleup.run_optimization(file_name: str, scenario: str, factors, responses, show_progress: bool = True, alt_reader: bool = True, update_to_source: bool = False)
Runs an Optimization step
* **Parameters**
* **file_name** (*str*) -- name of the model file
* **scenario** (*str*) -- The name of the scenario to be optimized
* **factors** (*list*) -- string respresentations of the factors to be varied
* **responses** (*list*) -- string representations of the responses to optimize
* **show_progress** (*bool**, **optional*) -- Shoe the UI when running, defaults to True
* **alt_reader** (*bool**, **optional*) -- Use the alternate reader for Excel Models (i.e. not Excel Interop), defaults to True
* **update_to_source** (*bool**, **optional*) -- Write back the fitted values to the original model, defaults to False
* **Returns**
ScriptResult object
### scaleup.scaleup.run_verification(file_name: str, scenarios=[], verification_type: int = 1, show_progress=True, alt_reader=True)
Runs a Verification step
* **Parameters**
* **file_name** (*str*) -- name of the model file
* **scenarios** (*list**, **optional*) -- Names of the scenarios to be run. If empty, runs all scenarios in the model, defaults to []
* **verification_type** (*int**, **optional*) -- Verification type. 1 for full report, 2 for residuals, 3 for SSQs, defaults to 1
* **show_progress** (*bool**, **optional*) -- Shoe the UI when running, defaults to True
* **alt_reader** (*bool**, **optional*) -- Use the alternate reader for Excel Models (i.e. not Excel Interop), defaults to True
* **Returns**
ScriptResult object
### scaleup.scaleup.run_multiple_steps(file_name: str, steps, show_progress: bool = True, alt_reader: bool = True, update_to_source: bool = False)
Run multiple steps in sequence
* **Parameters**
* **file_name** (*str*) -- name of the model file
* **steps** (*list*) -- the steps to run
* **show_progress** (*bool**, **optional*) -- Shoe the UI when running, defaults to True
* **alt_reader** (*bool**, **optional*) -- Use the alternate reader for Excel Models (i.e. not Excel Interop), defaults to True
* **update_to_source** (*bool**, **optional*) -- Write back the fitted values to the original model, defaults to False
* **Returns**
ScriptResult object
---
## scaleup.dynochem_automation module
Helper routines for COM automation of Dynochem Runtime scripting
### scaleup.dynochem_automation.create_fitting_step(scenarios, parameters, fitEachScenario: bool, updateToSource: bool = False)
Creates a Fitting step
* **Parameters**
* **scenarios** (*list*) -- The scenarios to be fitted
* **parameters** (*list*) -- The parameters to be fitted
* **fitEachScenario** (*bool*) -- Fit to each scenario or not
* **updateToSource** (*bool**, **optional*) -- Write the fitted values back to the model, defaults to False
* **Returns**
Script step
* **Return type**
COM Object
### scaleup.dynochem_automation.create_optimization_step(scenarioName: str, factors, responses, updateToSource: bool = False)
Creates an Optimization step
* **Parameters**
* **scenarioName** (*str*) -- The name of the scenario to be used
* **factors** (*list*) -- The factors to vary
* **responses** (*list*) -- The Responses to be optimized
* **updateToSource** (*bool**, **optional*) -- Whether the optimized scenario and data sheet are written back to the model, defaults to False
* **Returns**
Script step
* **Return type**
COM Object
### scaleup.dynochem_automation.create_script_parameters(name: str, file_name: str, options: str = '', dataSheets=None, injected_headers=None)
Creates a script parameters class
* **Parameters**
* **name** (*str*) -- Name of the automation job
* **file_name** (*str*) -- Name of the model file
* **options** (*str**, **optional*) -- Options e.g. Show/Hide the UI, defaults to ''
* **dataSheets** (*list**, **optional*) -- Custom Datasheets, defaults to None
* **injected_header** (*list**, **optional*) -- Additional headers for scenarios (e.g. process sheet parameters), defaults to None
* **Raises**
**ValueError** -- If the model is not of a supported type (must be .xls? or .rxm)
* **Returns**
COM object of script parameters
* **Return type**
COM object
### scaleup.dynochem_automation.create_simulation_step(scenarios=[], writeProfiles: bool = False, returnProfiles: bool = False, calc_wssq = False)
Creates a simulation step
* **Parameters**
* **scenarios** (*list**, **optional*) -- The scenarios to be run. To run existing scenarios, pass in their names. If empty, all are run, defaults to []
* **writeProfiles** (*bool**, **optional*) -- Write the profiles to an file, defaults to False
* **returnProfiles** (*bool**, **optional*) -- return the profiles as well as the endpoints, defaults to False
* **calc_wssq** (*bool**, **optional*) -- include the calculated WSSQ as a return column, defaults to False
* **Returns**
Script step
* **Return type**
COM Object
### scaleup.dynochem_automation.create_verification_step(scenarios=[], verificationType: int = 1)
Creates a Verification step
* **Parameters**
* **scenarios** (*list**, **optional*) -- The names of the scenarios to be run. If empty, all scenarios are run, defaults to []
* **verificationType** (*int**, **optional*) -- The Verification Type. 1 for full report, 2 for residuals, 3 for SSQs, defaults to 1
* **Returns**
Script step
* **Return type**
COM Object
### scaleup.dynochem_automation.run_script(file_name: str, script_name: str, steps, options: str = '', dataSheets=None, injected_headers=None)
Runs a script
* **Parameters**
* **file_name** (*str*) -- The file name for the model
* **script_name** (*str*) -- The name of the job - displayed in the UI if visible
* **steps** (*list*) -- The steps to be executed
* **options** (*str**, **optional*) -- Option string for run (Show/Hide UI, Use Excel interop or not for Excel Models), defaults to ''
* **dataSheets** (*list**, **optional*) -- Replacement Datasheets to inject data into the model, defaults to None
* **injected_header** (*list**, **optional*) -- Additional headers for scenarios (e.g. process sheet parameters), defaults to None
* **Returns**
Script Result
* **Return type**
COM Object
### scaleup.dynochem_automation.set_datasheets(script, datasheets)
## scaleup.dynochem_automation_defs module
COM Object creation Helpers
### scaleup.dynochem_automation_defs.create_fitting_parameters_w32()
Create a fitting parameters object
* **Returns**
COM Object with fitting specific parameters
* **Return type**
COM Object
### scaleup.dynochem_automation_defs.create_model_automate_w32()
Creates ModelAutomate object
* **Returns**
ModelAutomate COM object for running scripts
* **Return type**
COM Object
### scaleup.dynochem_automation_defs.create_optimization_parameters_w32()
Create an optimization parameters object
* **Returns**
COM Object with optimization specific parameters
* **Return type**
COM Object
### scaleup.dynochem_automation_defs.create_script_parameters_w32()
Creates Script parameters COM Object
* **Returns**
Script parameters COM Object
* **Return type**
COM Object
### scaleup.dynochem_automation_defs.create_script_step_w32(stepType: str)
Create a script step object
* **Parameters**
**stepType** (*str*) -- Creates a script step
* **Returns**
[description]
* **Return type**
[type]
### scaleup.dynochem_automation_defs.create_simulation_parameters_w32()
Create a simulation parameters object
* **Returns**
COM Object with simulation specific parameters
* **Return type**
COM Object
### scaleup.dynochem_automation_defs.create_verification_parameters_w32()
Create a verification parameters object
* **Returns**
COM Object with verification specific parameters
* **Return type**
COM Object
## scaleup.script_utils module
Various utilities for using with scripts
### _class_ scaleup.script_utils.OptTarget(value)
Bases: `enum.Enum`
Enumeration for Optimization Target
#### Max(_ = _ )
#### Min(_ = (2,_ )
#### Target(_ = (1,_ )
### scaleup.script_utils.create_fitting_parameter(parameter_name: str, unit: str, initial_value: float, max_value: float, min_value: float, fit_to_log: bool = False)
Create a fitting parameter
* **Parameters**
* **parameter_name** (*str*) -- name of the parameter
* **unit** (*str*) -- unit of measure for the parameter
* **initial_value** (*float*) -- inital value
* **max_value** (*float*) -- max value
* **min_value** (*float*) -- min value
* **fit_to_log** (*bool**, **optional*) -- Fit to the log of the value, defaults to False
* **Returns**
Formatted parameter details for use with fitting step
* **Return type**
str
### scaleup.script_utils.create_max_optimization_response(name: str, weighting: float = 1.0)
Creates a 'Maximize' target for a response
* **Parameters**
* **name** (*str*) -- name of the response to maximize
* **weighting** (*float**, **optional*) -- Weighting for this parameter, defaults to 1.0
* **Returns**
Formatted response for use in the Optimization step
* **Return type**
str
### scaleup.script_utils.create_min_optimization_response(name: str, weighting: float = 1.0)
Creates a 'Minimize' target for a response
* **Parameters**
* **name** (*str*) -- name of the response to minimize
* **weighting** (*float**, **optional*) -- Weighting for this parameter, defaults to 1.0
* **Returns**
Formatted response for use in the Optimization step
* **Return type**
str
### scaleup.script_utils.create_optimization_factor(name: str, unit: str, initial_value: float, min=None, max=None)
Create optimization Factor
* **Parameters**
* **name** (*str*) -- Factor name
* **unit** (*str*) -- factor unit
* **initial_value** (*float*) -- initial value
* **min** (*float**, **optional*) -- min value for factor. If not set, uses half of the initial value
* **max** (*float**, **optional*) -- max value for factor. If not set, uses twice the initial value
* **Returns**
Formatted factor for use in optimization step
* **Return type**
str
### scaleup.script_utils.create_optimization_response(name: str, target: scaleup.script_utils.OptTarget, value: float = 0.0, weighting: float = 1.0)
Create a response for the optimization step
* **Parameters**
* **name** (*str*) -- name of the response
* **target** (*OptTarget*) -- Type of response. Min|Max|Target
* **value** (*float**, **optional*) -- Target Value, defaults to 0.0
* **weighting** (*float**, **optional*) -- Weighting for the parameter, defaults to 1.0
* **Returns**
Formatted response for use in the Optimization step
* **Return type**
str
### scaleup.script_utils.create_script_option_string(showInUi: bool = True, altReader: bool = False)
Generates the script option string
* **Parameters**
* **showInUi** (*bool**, **optional*) -- Show the Automation UI when running, defaults to True
* **altReader** (*bool**, **optional*) -- Use the alternate file reader for Excel models (i.e. not Excel Interop), defaults to False
* **Returns**
Formatted string of options
* **Return type**
str
### scaleup.script_utils.create_simulation_scenario(scen_name: str, ds_name: str, values=[])
Creates a simulation scenario for use with Simulation and Verification steps.
* **Parameters**
* **scen_name** (*str*) -- scenario name
* **ds_name** (*str*) -- datasheet name
* **values** (*list**, **optional*) -- parameter values for the scenario, defaults to []
* **Returns**
String representation of the scenario for use with Simulation and Verification steps.
* **Return type**
str
### scaleup.script_utils.create_target_optimization_response(name: str, value: float, weighting: float = 1.0)
Creates a 'Target' target for a response
* **Parameters**
* **name** (*str*) -- name of the response
* **value** (*float*) -- Target value for the response
* **weighting** (*float**, **optional*) -- Weighting for this parameter, defaults to 1.0
* **Returns**
Formatted response for use in the Optimization step
* **Return type**
str
### scaleup.script_utils.escape_name(scenName: str)
Escapes a name containing tilde (~) characters, replacing each ~ with &TLD;
This is used when generating tilde delimited strings to safely send names which contains tildes.
* **Parameters**
**scenName** (*str*) -- name to be escaped
* **Returns**
Escaped version of the input string
* **Return type**
str
### scaleup.script_utils.get_file_path(file_name: str, folder_name: Optional[str] = None)
Returns the path of a file relative to the current folder
* **Parameters**
* **file_name** (*str*) -- the name of the file
* **folder_name** (*str**, **optional*) -- sub-folder in which the file can be found, defaults to None
* **Returns**
Full path to the file
* **Return type**
str
### scaleup.script_utils.get_run_type(file_name: str)
Ascertains the run type of a file based on the extension.
- Excel models "*.xls(?)" are 1
- Reaction Lab Models "*.rxm" are 2
- All others return 0
* **Parameters**
**file_name** (*str*) -- name of the file
* **Returns**
0, 1, or 2, depending on the file extension
* **Return type**
int
### scaleup.script_utils.write_result(result, writer=None, separator: str = ',')
Write the contents of a script result.
A script result contain have an error message or an array of step results.
If it contains an error message, then this is written
If it contains step results, each result (each of which may contain multiple pages) is written as
lines of values separated by the passed in separator or a comma.
If no 'writer' method is passed in, then we print to console.
* **Parameters**
* **result** (*COM Object*) -- Script Result object from RunScript()
* **writer** (*method*) -- Method which takes in a string, if not set, 'print' is used.
### scaleup.script_utils.write_result_to_file(result, file_name: str, append: bool = True)
Write the contents of a script result.
A script result contain have an error message or an array of step results.
If it contains an error message, then this is written
If it contains step results, each result (each of which may contain multiple pages) is written as
lines of values separated by the passed in separator or a comma.
If no 'writer' method is passed in, then we print to console.
* **Parameters**
* **result** (*COM Object*) -- Script Result object from RunScript()
* **writer** (*method*) -- Method which takes in a string, if not set, 'print' is used.
* **append** (*bool*) -- Append the output to the file (or overwrite). Default is True (append)
## Module contents
|
/scaleup-1.0.27.tar.gz/scaleup-1.0.27/README.md
| 0.829561 | 0.911219 |
README.md
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
Region,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class ListCredentialsRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
ID_ASC = "id_asc"
ID_DESC = "id_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
class ListNamespacesRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
UPDATED_AT_ASC = "updated_at_asc"
UPDATED_AT_DESC = "updated_at_desc"
ID_ASC = "id_asc"
ID_DESC = "id_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
PROJECT_ID_ASC = "project_id_asc"
PROJECT_ID_DESC = "project_id_desc"
def __str__(self) -> str:
return str(self.value)
class NamespaceProtocol(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
NATS = "nats"
SQS_SNS = "sqs_sns"
def __str__(self) -> str:
return str(self.value)
@dataclass
class Credential:
"""
Credential.
"""
id: str
"""
ID of the credentials.
"""
name: str
"""
Name of the credentials.
"""
namespace_id: str
"""
Namespace containing the credentials.
"""
protocol: NamespaceProtocol
"""
Protocol associated with the credentials.
"""
nats_credentials: Optional[CredentialNATSCredsFile]
"""
Object containing the credentials, if the credentials are for a NATS namespace.
One-of ('credential_type'): at most one of 'nats_credentials', 'sqs_sns_credentials' could be set.
"""
sqs_sns_credentials: Optional[CredentialSQSSNSCreds]
"""
Object containing the credentials and their metadata, if the credentials are for an SQS/SNS namespace.
One-of ('credential_type'): at most one of 'nats_credentials', 'sqs_sns_credentials' could be set.
"""
@dataclass
class CredentialNATSCredsFile:
"""
Credential.nats creds file.
"""
content: str
"""
Raw content of the NATS credentials file.
"""
@dataclass
class CredentialSQSSNSCreds:
"""
Credential.sqssns creds.
"""
access_key: str
"""
Access key ID.
"""
secret_key: Optional[str]
"""
Secret key ID.
"""
permissions: Optional[Permissions]
"""
Permissions associated with these credentials.
"""
@dataclass
class CredentialSummary:
"""
Credential summary.
"""
id: str
"""
ID of the credentials.
"""
name: str
"""
Name of the credentials.
"""
namespace_id: str
"""
Namespace containing the credentials.
"""
protocol: NamespaceProtocol
"""
Protocol associated with the credentials.
"""
sqs_sns_credentials: Optional[CredentialSummarySQSSNSCreds]
"""
Object containing the credentials and their metadata, if the credentials are for an SQS/SNS namespace.
One-of ('credential_type'): at most one of 'sqs_sns_credentials' could be set.
"""
@dataclass
class CredentialSummarySQSSNSCreds:
"""
Credential summary.sqssns creds.
"""
access_key: str
"""
Access key ID.
"""
permissions: Optional[Permissions]
"""
Permissions associated with these credentials.
"""
@dataclass
class ListCredentialsResponse:
"""
List credentials response.
"""
total_count: int
"""
Total count of existing credentials (matching any filters specified).
"""
credentials: List[CredentialSummary]
"""
Credentials on this page.
"""
@dataclass
class ListNamespacesResponse:
"""
List namespaces response.
"""
total_count: int
"""
Total count of existing namespaces (matching any filters specified).
"""
namespaces: List[Namespace]
"""
Namespaces on this page.
"""
@dataclass
class Namespace:
"""
Namespace.
"""
id: str
"""
Namespace ID.
"""
name: str
"""
Namespace name.
"""
endpoint: str
"""
Endpoint of the service matching the namespace's protocol.
"""
protocol: NamespaceProtocol
"""
Namespace protocol.
"""
project_id: str
"""
Project ID of the Project containing the namespace.
"""
region: Region
"""
Region where the namespace is deployed.
"""
created_at: Optional[datetime]
"""
Namespace creation date.
"""
updated_at: Optional[datetime]
"""
Namespace last modification date.
"""
@dataclass
class Permissions:
"""
Permissions.
"""
can_publish: Optional[bool]
"""
Defines whether the credentials bearer can publish messages to the service (send messages to SQS queues or publish to SNS topics).
"""
can_receive: Optional[bool]
"""
Defines whether the credentials bearer can receive messages from the service.
"""
can_manage: Optional[bool]
"""
Defines whether the credentials bearer can manage the associated resources (SQS queues or SNS topics or subscriptions).
"""
@dataclass
class ListNamespacesRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
organization_id: Optional[str]
"""
Include only namespaces in this Organization.
"""
project_id: Optional[str]
"""
Include only namespaces in this Project.
"""
page: Optional[int]
"""
Page number to return.
"""
page_size: Optional[int]
"""
Maximum number of namespaces to return per page.
"""
order_by: Optional[ListNamespacesRequestOrderBy]
"""
Order in which to return results.
"""
@dataclass
class CreateNamespaceRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
name: Optional[str]
"""
Namespace name.
"""
protocol: Optional[NamespaceProtocol]
"""
Namespace protocol. You must specify a valid protocol (and not `unknown`) to avoid an error.
"""
project_id: Optional[str]
"""
Project containing the Namespace.
"""
@dataclass
class UpdateNamespaceRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
namespace_id: str
"""
ID of the Namespace to update.
"""
name: Optional[str]
"""
Namespace name.
"""
@dataclass
class GetNamespaceRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
namespace_id: str
"""
ID of the Namespace to get.
"""
@dataclass
class DeleteNamespaceRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
namespace_id: str
"""
ID of the namespace to delete.
"""
@dataclass
class CreateCredentialRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
namespace_id: str
"""
Namespace containing the credentials.
"""
name: Optional[str]
"""
Name of the credentials.
"""
permissions: Optional[Permissions]
"""
Permissions associated with these credentials.
"""
@dataclass
class DeleteCredentialRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
credential_id: str
"""
ID of the credentials to delete.
"""
@dataclass
class ListCredentialsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
namespace_id: Optional[str]
"""
Namespace containing the credentials.
"""
page: Optional[int]
"""
Page number to return.
"""
page_size: Optional[int]
"""
Maximum number of credentials to return per page.
"""
order_by: Optional[ListCredentialsRequestOrderBy]
"""
Order in which to return results.
"""
@dataclass
class UpdateCredentialRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
credential_id: str
"""
ID of the credentials to update.
"""
name: Optional[str]
"""
Name of the credentials.
"""
permissions: Optional[Permissions]
"""
Permissions associated with these credentials.
"""
@dataclass
class GetCredentialRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
credential_id: str
"""
ID of the credentials to get.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/mnq/v1alpha1/types.py
| 0.891457 | 0.233226 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from dateutil import parser
from .types import (
NamespaceProtocol,
Credential,
CredentialNATSCredsFile,
CredentialSQSSNSCreds,
CredentialSummary,
CredentialSummarySQSSNSCreds,
ListCredentialsResponse,
ListNamespacesResponse,
Namespace,
Permissions,
CreateNamespaceRequest,
UpdateNamespaceRequest,
CreateCredentialRequest,
UpdateCredentialRequest,
)
def unmarshal_Permissions(data: Any) -> Permissions:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Permissions' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("can_manage", None)
args["can_manage"] = field
field = data.get("can_publish", None)
args["can_publish"] = field
field = data.get("can_receive", None)
args["can_receive"] = field
return Permissions(**args)
def unmarshal_CredentialSummarySQSSNSCreds(data: Any) -> CredentialSummarySQSSNSCreds:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'CredentialSummarySQSSNSCreds' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("access_key", None)
args["access_key"] = field
field = data.get("permissions", None)
args["permissions"] = unmarshal_Permissions(field) if field is not None else None
return CredentialSummarySQSSNSCreds(**args)
def unmarshal_CredentialNATSCredsFile(data: Any) -> CredentialNATSCredsFile:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'CredentialNATSCredsFile' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("content", None)
args["content"] = field
return CredentialNATSCredsFile(**args)
def unmarshal_CredentialSQSSNSCreds(data: Any) -> CredentialSQSSNSCreds:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'CredentialSQSSNSCreds' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("access_key", None)
args["access_key"] = field
field = data.get("permissions", None)
args["permissions"] = unmarshal_Permissions(field) if field is not None else None
field = data.get("secret_key", None)
args["secret_key"] = field
return CredentialSQSSNSCreds(**args)
def unmarshal_CredentialSummary(data: Any) -> CredentialSummary:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'CredentialSummary' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("namespace_id", None)
args["namespace_id"] = field
field = data.get("protocol", None)
args["protocol"] = field
field = data.get("sqs_sns_credentials", None)
args["sqs_sns_credentials"] = (
unmarshal_CredentialSummarySQSSNSCreds(field) if field is not None else None
)
return CredentialSummary(**args)
def unmarshal_Namespace(data: Any) -> Namespace:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Namespace' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("endpoint", None)
args["endpoint"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("protocol", None)
args["protocol"] = field
field = data.get("region", None)
args["region"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return Namespace(**args)
def unmarshal_Credential(data: Any) -> Credential:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Credential' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("namespace_id", None)
args["namespace_id"] = field
field = data.get("nats_credentials", None)
args["nats_credentials"] = (
unmarshal_CredentialNATSCredsFile(field) if field is not None else None
)
field = data.get("protocol", None)
args["protocol"] = field
field = data.get("sqs_sns_credentials", None)
args["sqs_sns_credentials"] = (
unmarshal_CredentialSQSSNSCreds(field) if field is not None else None
)
return Credential(**args)
def unmarshal_ListCredentialsResponse(data: Any) -> ListCredentialsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListCredentialsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("credentials", None)
args["credentials"] = (
[unmarshal_CredentialSummary(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListCredentialsResponse(**args)
def unmarshal_ListNamespacesResponse(data: Any) -> ListNamespacesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListNamespacesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("namespaces", None)
args["namespaces"] = (
[unmarshal_Namespace(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListNamespacesResponse(**args)
def marshal_Permissions(
request: Permissions,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.can_manage is not None:
output["can_manage"] = request.can_manage
if request.can_publish is not None:
output["can_publish"] = request.can_publish
if request.can_receive is not None:
output["can_receive"] = request.can_receive
return output
def marshal_CreateCredentialRequest(
request: CreateCredentialRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.namespace_id is not None:
output["namespace_id"] = request.namespace_id
if request.permissions is not None:
output["permissions"] = marshal_Permissions(request.permissions, defaults)
return output
def marshal_CreateNamespaceRequest(
request: CreateNamespaceRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.protocol is not None:
output["protocol"] = NamespaceProtocol(request.protocol)
return output
def marshal_UpdateCredentialRequest(
request: UpdateCredentialRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.permissions is not None:
output["permissions"] = marshal_Permissions(request.permissions, defaults)
return output
def marshal_UpdateNamespaceRequest(
request: UpdateNamespaceRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.namespace_id is not None:
output["namespace_id"] = request.namespace_id
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/mnq/v1alpha1/marshalling.py
| 0.736211 | 0.240451 |
marshalling.py
|
pypi
|
from typing import List, Optional
from scaleway_core.api import API
from scaleway_core.bridge import (
Region,
)
from scaleway_core.utils import (
fetch_all_pages_async,
random_name,
validate_path_param,
)
from .types import (
ListCredentialsRequestOrderBy,
ListNamespacesRequestOrderBy,
NamespaceProtocol,
Credential,
CredentialSummary,
ListCredentialsResponse,
ListNamespacesResponse,
Namespace,
Permissions,
CreateNamespaceRequest,
UpdateNamespaceRequest,
CreateCredentialRequest,
UpdateCredentialRequest,
)
from .marshalling import (
marshal_CreateCredentialRequest,
marshal_CreateNamespaceRequest,
marshal_UpdateCredentialRequest,
marshal_UpdateNamespaceRequest,
unmarshal_Namespace,
unmarshal_Credential,
unmarshal_ListCredentialsResponse,
unmarshal_ListNamespacesResponse,
)
class MnqV1Alpha1API(API):
"""
Messaging and Queuing API.
This API allows you to manage Scaleway Messaging and Queueing brokers.
Messaging and Queuing API.
"""
async def list_namespaces(
self,
*,
region: Optional[Region] = None,
organization_id: Optional[str] = None,
project_id: Optional[str] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: ListNamespacesRequestOrderBy = ListNamespacesRequestOrderBy.CREATED_AT_ASC,
) -> ListNamespacesResponse:
"""
List namespaces.
List all Messaging and Queuing namespaces in the specified region, for a Scaleway Organization or Project. By default, the namespaces returned in the list are ordered by creation date in ascending order, though this can be modified via the `order_by` field.
:param region: Region to target. If none is passed will use default region from the config.
:param organization_id: Include only namespaces in this Organization.
:param project_id: Include only namespaces in this Project.
:param page: Page number to return.
:param page_size: Maximum number of namespaces to return per page.
:param order_by: Order in which to return results.
:return: :class:`ListNamespacesResponse <ListNamespacesResponse>`
Usage:
::
result = await api.list_namespaces()
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"GET",
f"/mnq/v1alpha1/regions/{param_region}/namespaces",
params={
"order_by": order_by,
"organization_id": organization_id
or self.client.default_organization_id,
"page": page,
"page_size": page_size or self.client.default_page_size,
"project_id": project_id or self.client.default_project_id,
},
)
self._throw_on_error(res)
return unmarshal_ListNamespacesResponse(res.json())
async def list_namespaces_all(
self,
*,
region: Optional[Region] = None,
organization_id: Optional[str] = None,
project_id: Optional[str] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: Optional[ListNamespacesRequestOrderBy] = None,
) -> List[Namespace]:
"""
List namespaces.
List all Messaging and Queuing namespaces in the specified region, for a Scaleway Organization or Project. By default, the namespaces returned in the list are ordered by creation date in ascending order, though this can be modified via the `order_by` field.
:param region: Region to target. If none is passed will use default region from the config.
:param organization_id: Include only namespaces in this Organization.
:param project_id: Include only namespaces in this Project.
:param page: Page number to return.
:param page_size: Maximum number of namespaces to return per page.
:param order_by: Order in which to return results.
:return: :class:`List[ListNamespacesResponse] <List[ListNamespacesResponse]>`
Usage:
::
result = await api.list_namespaces_all()
"""
return await fetch_all_pages_async(
type=ListNamespacesResponse,
key="namespaces",
fetcher=self.list_namespaces,
args={
"region": region,
"organization_id": organization_id,
"project_id": project_id,
"page": page,
"page_size": page_size,
"order_by": order_by,
},
)
async def create_namespace(
self,
*,
region: Optional[Region] = None,
name: Optional[str] = None,
protocol: NamespaceProtocol = NamespaceProtocol.UNKNOWN,
project_id: Optional[str] = None,
) -> Namespace:
"""
Create a namespace.
Create a Messaging and Queuing namespace, set to the desired protocol.
:param region: Region to target. If none is passed will use default region from the config.
:param name: Namespace name.
:param protocol: Namespace protocol. You must specify a valid protocol (and not `unknown`) to avoid an error.
:param project_id: Project containing the Namespace.
:return: :class:`Namespace <Namespace>`
Usage:
::
result = await api.create_namespace()
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"POST",
f"/mnq/v1alpha1/regions/{param_region}/namespaces",
body=marshal_CreateNamespaceRequest(
CreateNamespaceRequest(
region=region,
name=name or random_name(prefix="mnq"),
protocol=protocol,
project_id=project_id,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Namespace(res.json())
async def update_namespace(
self,
*,
namespace_id: str,
region: Optional[Region] = None,
name: Optional[str] = None,
) -> Namespace:
"""
Update the name of a namespace.
Update the name of a Messaging and Queuing namespace, specified by its namespace ID.
:param region: Region to target. If none is passed will use default region from the config.
:param namespace_id: ID of the Namespace to update.
:param name: Namespace name.
:return: :class:`Namespace <Namespace>`
Usage:
::
result = await api.update_namespace(namespace_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"PATCH",
f"/mnq/v1alpha1/regions/{param_region}/namespaces",
body=marshal_UpdateNamespaceRequest(
UpdateNamespaceRequest(
namespace_id=namespace_id,
region=region,
name=name,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Namespace(res.json())
async def get_namespace(
self,
*,
namespace_id: str,
region: Optional[Region] = None,
) -> Namespace:
"""
Get a namespace.
Retrieve information about an existing Messaging and Queuing namespace, identified by its namespace ID. Its full details, including name, endpoint and protocol, are returned in the response.
:param region: Region to target. If none is passed will use default region from the config.
:param namespace_id: ID of the Namespace to get.
:return: :class:`Namespace <Namespace>`
Usage:
::
result = await api.get_namespace(namespace_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_namespace_id = validate_path_param("namespace_id", namespace_id)
res = self._request(
"GET",
f"/mnq/v1alpha1/regions/{param_region}/namespaces/{param_namespace_id}",
)
self._throw_on_error(res)
return unmarshal_Namespace(res.json())
async def delete_namespace(
self,
*,
namespace_id: str,
region: Optional[Region] = None,
) -> Optional[None]:
"""
Delete a namespace.
Delete a Messaging and Queuing namespace, specified by its namespace ID. Note that deleting a namespace is irreversible, and any URLs, credentials and queued messages belonging to this namespace will also be deleted.
:param region: Region to target. If none is passed will use default region from the config.
:param namespace_id: ID of the namespace to delete.
Usage:
::
result = await api.delete_namespace(namespace_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_namespace_id = validate_path_param("namespace_id", namespace_id)
res = self._request(
"DELETE",
f"/mnq/v1alpha1/regions/{param_region}/namespaces/{param_namespace_id}",
)
self._throw_on_error(res)
return None
async def create_credential(
self,
*,
namespace_id: str,
region: Optional[Region] = None,
name: Optional[str] = None,
permissions: Optional[Permissions] = None,
) -> Credential:
"""
Create credentials.
Create a set of credentials for a Messaging and Queuing namespace, specified by its namespace ID. If creating credentials for a NATS namespace, the `permissions` object must not be included in the request. If creating credentials for an SQS/SNS namespace, the `permissions` object is required, with all three of its child attributes.
:param region: Region to target. If none is passed will use default region from the config.
:param namespace_id: Namespace containing the credentials.
:param name: Name of the credentials.
:param permissions: Permissions associated with these credentials.
:return: :class:`Credential <Credential>`
Usage:
::
result = await api.create_credential(namespace_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"POST",
f"/mnq/v1alpha1/regions/{param_region}/credentials",
body=marshal_CreateCredentialRequest(
CreateCredentialRequest(
namespace_id=namespace_id,
region=region,
name=name or random_name(prefix="mnq"),
permissions=permissions,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Credential(res.json())
async def delete_credential(
self,
*,
credential_id: str,
region: Optional[Region] = None,
) -> Optional[None]:
"""
Delete credentials.
Delete a set of credentials, specified by their credential ID. Deleting credentials is irreversible and cannot be undone. The credentials can no longer be used to access the namespace.
:param region: Region to target. If none is passed will use default region from the config.
:param credential_id: ID of the credentials to delete.
Usage:
::
result = await api.delete_credential(credential_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_credential_id = validate_path_param("credential_id", credential_id)
res = self._request(
"DELETE",
f"/mnq/v1alpha1/regions/{param_region}/credentials/{param_credential_id}",
)
self._throw_on_error(res)
return None
async def list_credentials(
self,
*,
region: Optional[Region] = None,
namespace_id: Optional[str] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: ListCredentialsRequestOrderBy = ListCredentialsRequestOrderBy.ID_ASC,
) -> ListCredentialsResponse:
"""
List credentials.
List existing credentials in the specified region. The response contains only the metadata for the credentials, not the credentials themselves (for this, use **Get Credentials**).
:param region: Region to target. If none is passed will use default region from the config.
:param namespace_id: Namespace containing the credentials.
:param page: Page number to return.
:param page_size: Maximum number of credentials to return per page.
:param order_by: Order in which to return results.
:return: :class:`ListCredentialsResponse <ListCredentialsResponse>`
Usage:
::
result = await api.list_credentials()
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"GET",
f"/mnq/v1alpha1/regions/{param_region}/credentials",
params={
"namespace_id": namespace_id,
"order_by": order_by,
"page": page,
"page_size": page_size or self.client.default_page_size,
},
)
self._throw_on_error(res)
return unmarshal_ListCredentialsResponse(res.json())
async def list_credentials_all(
self,
*,
region: Optional[Region] = None,
namespace_id: Optional[str] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: Optional[ListCredentialsRequestOrderBy] = None,
) -> List[CredentialSummary]:
"""
List credentials.
List existing credentials in the specified region. The response contains only the metadata for the credentials, not the credentials themselves (for this, use **Get Credentials**).
:param region: Region to target. If none is passed will use default region from the config.
:param namespace_id: Namespace containing the credentials.
:param page: Page number to return.
:param page_size: Maximum number of credentials to return per page.
:param order_by: Order in which to return results.
:return: :class:`List[ListCredentialsResponse] <List[ListCredentialsResponse]>`
Usage:
::
result = await api.list_credentials_all()
"""
return await fetch_all_pages_async(
type=ListCredentialsResponse,
key="credentials",
fetcher=self.list_credentials,
args={
"region": region,
"namespace_id": namespace_id,
"page": page,
"page_size": page_size,
"order_by": order_by,
},
)
async def update_credential(
self,
*,
credential_id: str,
region: Optional[Region] = None,
name: Optional[str] = None,
permissions: Optional[Permissions] = None,
) -> Credential:
"""
Update credentials.
Update a set of credentials. You can update the credentials' name, or (in the case of SQS/SNS credentials only) their permissions. To update the name of NATS credentials, do not include the `permissions` object in your request.
:param region: Region to target. If none is passed will use default region from the config.
:param credential_id: ID of the credentials to update.
:param name: Name of the credentials.
:param permissions: Permissions associated with these credentials.
:return: :class:`Credential <Credential>`
Usage:
::
result = await api.update_credential(credential_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_credential_id = validate_path_param("credential_id", credential_id)
res = self._request(
"PATCH",
f"/mnq/v1alpha1/regions/{param_region}/credentials/{param_credential_id}",
body=marshal_UpdateCredentialRequest(
UpdateCredentialRequest(
credential_id=credential_id,
region=region,
name=name,
permissions=permissions,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Credential(res.json())
async def get_credential(
self,
*,
credential_id: str,
region: Optional[Region] = None,
) -> Credential:
"""
Get credentials.
Retrieve an existing set of credentials, identified by the `credential_id`. The credentials themselves, as well as their metadata (protocol, namespace ID etc), are returned in the response.
:param region: Region to target. If none is passed will use default region from the config.
:param credential_id: ID of the credentials to get.
:return: :class:`Credential <Credential>`
Usage:
::
result = await api.get_credential(credential_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_credential_id = validate_path_param("credential_id", credential_id)
res = self._request(
"GET",
f"/mnq/v1alpha1/regions/{param_region}/credentials/{param_credential_id}",
)
self._throw_on_error(res)
return unmarshal_Credential(res.json())
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/mnq/v1alpha1/api.py
| 0.923497 | 0.167117 |
api.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
TimeSeries,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class CockpitStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_STATUS = "unknown_status"
CREATING = "creating"
READY = "ready"
DELETING = "deleting"
UPDATING = "updating"
ERROR = "error"
def __str__(self) -> str:
return str(self.value)
class GrafanaUserRole(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_ROLE = "unknown_role"
EDITOR = "editor"
VIEWER = "viewer"
def __str__(self) -> str:
return str(self.value)
class ListGrafanaUsersRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
LOGIN_ASC = "login_asc"
LOGIN_DESC = "login_desc"
def __str__(self) -> str:
return str(self.value)
class ListPlansRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
class ListTokensRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
class PlanName(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_NAME = "unknown_name"
FREE = "free"
PREMIUM = "premium"
CUSTOM = "custom"
def __str__(self) -> str:
return str(self.value)
@dataclass
class Cockpit:
"""
Cockpit.
"""
project_id: str
"""
ID of the Project the Cockpit belongs to.
"""
created_at: Optional[datetime]
"""
Date and time of the Cockpit's creation.
"""
updated_at: Optional[datetime]
"""
Date and time of the Cockpit's last update.
"""
endpoints: Optional[CockpitEndpoints]
"""
Endpoints of the Cockpit.
"""
status: CockpitStatus
"""
Status of the Cockpit.
"""
managed_alerts_enabled: bool
"""
Specifies whether managed alerts are enabled or disabled.
"""
plan: Optional[Plan]
"""
Pricing plan information.
"""
@dataclass
class CockpitEndpoints:
"""
Cockpit. endpoints.
"""
metrics_url: str
"""
URL for metrics.
"""
logs_url: str
"""
URL for logs.
"""
alertmanager_url: str
"""
URL for the alert manager.
"""
grafana_url: str
"""
URL for the Grafana dashboard.
"""
@dataclass
class CockpitMetrics:
"""
Metrics for a given Cockpit.
Cockpit metrics.
"""
timeseries: List[TimeSeries]
"""
Time series array.
"""
@dataclass
class ContactPoint:
"""
Contact point.
"""
email: Optional[ContactPointEmail]
"""
Contact point configuration.
One-of ('configuration'): at most one of 'email' could be set.
"""
@dataclass
class ContactPointEmail:
to: str
@dataclass
class GrafanaUser:
"""
Grafana user.
"""
id: int
"""
ID of the Grafana user.
"""
login: str
"""
Username of the Grafana user.
"""
role: GrafanaUserRole
"""
Role assigned to the Grafana user.
"""
password: Optional[str]
"""
The Grafana user's password.
"""
@dataclass
class ListContactPointsResponse:
"""
Response returned when listing contact points.
List contact points response.
"""
total_count: int
"""
Count of all contact points created.
"""
contact_points: List[ContactPoint]
"""
Array of contact points.
"""
has_additional_receivers: bool
"""
Specifies whether the contact point has other receivers than the default receiver.
"""
has_additional_contact_points: bool
"""
Specifies whether there are unmanaged contact points.
"""
@dataclass
class ListGrafanaUsersResponse:
"""
Response returned when listing Grafana users.
List grafana users response.
"""
total_count: int
"""
Count of all Grafana users.
"""
grafana_users: List[GrafanaUser]
"""
Information on all Grafana users.
"""
@dataclass
class ListPlansResponse:
"""
Response returned when listing all pricing plans.
List plans response.
"""
total_count: int
"""
Count of all pricing plans.
"""
plans: List[Plan]
"""
Information on plans.
"""
@dataclass
class ListTokensResponse:
"""
List tokens response.
"""
total_count: int
"""
Count of all tokens created.
"""
tokens: List[Token]
"""
List of all tokens created.
"""
@dataclass
class Plan:
"""
Pricing plan.
Plan.
"""
id: str
"""
ID of a given pricing plan.
"""
name: PlanName
"""
Name of a given pricing plan.
"""
retention_metrics_interval: Optional[str]
"""
Retention for metrics.
"""
retention_logs_interval: Optional[str]
"""
Retention for logs.
"""
sample_ingestion_price: int
"""
Ingestion price for 1 million samples in cents.
"""
logs_ingestion_price: int
"""
Ingestion price for 1 GB of logs in cents.
"""
retention_price: int
"""
Retention price in euros per month.
"""
@dataclass
class SelectPlanResponse:
"""
Response returned when selecting a pricing plan.
Select plan response.
"""
@dataclass
class Token:
"""
Token.
"""
id: str
"""
ID of the token.
"""
project_id: str
"""
ID of the Project.
"""
name: str
"""
Name of the token.
"""
created_at: Optional[datetime]
"""
Date and time of the token's creation.
"""
updated_at: Optional[datetime]
"""
Date and time of the token's last update.
"""
scopes: Optional[TokenScopes]
"""
Token's permissions.
"""
secret_key: Optional[str]
"""
Token's secret key.
"""
@dataclass
class TokenScopes:
"""
Token scopes.
"""
query_metrics: bool
"""
Permission to fetch metrics.
"""
write_metrics: bool
"""
Permission to write metrics.
"""
setup_metrics_rules: bool
"""
Permission to setup metrics rules.
"""
query_logs: bool
"""
Permission to fetch logs.
"""
write_logs: bool
"""
Permission to write logs.
"""
setup_logs_rules: bool
"""
Permission to setup logs rules.
"""
setup_alerts: bool
"""
Permission to setup alerts.
"""
@dataclass
class ActivateCockpitRequest:
project_id: Optional[str]
"""
ID of the Project the Cockpit belongs to.
"""
@dataclass
class GetCockpitRequest:
project_id: Optional[str]
"""
ID of the Project the Cockpit belongs to.
"""
@dataclass
class GetCockpitMetricsRequest:
project_id: Optional[str]
"""
ID of the Project the Cockpit belongs to.
"""
start_date: Optional[datetime]
"""
Desired time range's start date for the metrics.
"""
end_date: Optional[datetime]
"""
Desired time range's end date for the metrics.
"""
metric_name: Optional[str]
"""
Name of the metric requested.
"""
@dataclass
class DeactivateCockpitRequest:
project_id: Optional[str]
"""
ID of the Project the Cockpit belongs to.
"""
@dataclass
class ResetCockpitGrafanaRequest:
project_id: Optional[str]
"""
ID of the Project the Cockpit belongs to.
"""
@dataclass
class CreateTokenRequest:
project_id: Optional[str]
"""
ID of the Project.
"""
name: Optional[str]
"""
Name of the token.
"""
scopes: Optional[TokenScopes]
"""
Token's permissions.
"""
@dataclass
class ListTokensRequest:
page: Optional[int]
"""
Page number.
"""
page_size: Optional[int]
"""
Page size.
"""
order_by: Optional[ListTokensRequestOrderBy]
project_id: Optional[str]
"""
ID of the Project.
"""
@dataclass
class GetTokenRequest:
token_id: str
"""
ID of the token.
"""
@dataclass
class DeleteTokenRequest:
token_id: str
"""
ID of the token.
"""
@dataclass
class CreateContactPointRequest:
project_id: Optional[str]
"""
ID of the Project in which to create the contact point.
"""
contact_point: Optional[ContactPoint]
"""
Contact point to create.
"""
@dataclass
class ListContactPointsRequest:
page: Optional[int]
"""
Page number.
"""
page_size: Optional[int]
"""
Page size.
"""
project_id: Optional[str]
"""
ID of the Project from which to list the contact points.
"""
@dataclass
class DeleteContactPointRequest:
project_id: Optional[str]
"""
ID of the Project.
"""
contact_point: Optional[ContactPoint]
"""
Contact point to delete.
"""
@dataclass
class EnableManagedAlertsRequest:
project_id: Optional[str]
"""
ID of the Project.
"""
@dataclass
class DisableManagedAlertsRequest:
project_id: Optional[str]
"""
ID of the Project.
"""
@dataclass
class TriggerTestAlertRequest:
project_id: Optional[str]
@dataclass
class CreateGrafanaUserRequest:
project_id: Optional[str]
"""
ID of the Project.
"""
login: str
"""
Username of the Grafana user.
"""
role: GrafanaUserRole
"""
Role assigned to the Grafana user.
"""
@dataclass
class ListGrafanaUsersRequest:
page: Optional[int]
"""
Page number.
"""
page_size: Optional[int]
"""
Page size.
"""
order_by: Optional[ListGrafanaUsersRequestOrderBy]
project_id: Optional[str]
"""
ID of the Project.
"""
@dataclass
class DeleteGrafanaUserRequest:
grafana_user_id: int
"""
ID of the Grafana user.
"""
project_id: Optional[str]
"""
ID of the Project.
"""
@dataclass
class ResetGrafanaUserPasswordRequest:
grafana_user_id: int
"""
ID of the Grafana user.
"""
project_id: Optional[str]
"""
ID of the Project.
"""
@dataclass
class ListPlansRequest:
page: Optional[int]
"""
Page number.
"""
page_size: Optional[int]
"""
Page size.
"""
order_by: Optional[ListPlansRequestOrderBy]
@dataclass
class SelectPlanRequest:
project_id: Optional[str]
"""
ID of the Project.
"""
plan_id: str
"""
ID of the pricing plan.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/cockpit/v1beta1/types.py
| 0.859457 | 0.200538 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from scaleway_core.bridge import (
unmarshal_TimeSeries,
)
from scaleway_core.utils import (
OneOfPossibility,
resolve_one_of,
)
from dateutil import parser
from .types import (
GrafanaUserRole,
Cockpit,
CockpitEndpoints,
CockpitMetrics,
ContactPoint,
ContactPointEmail,
GrafanaUser,
ListContactPointsResponse,
ListGrafanaUsersResponse,
ListPlansResponse,
ListTokensResponse,
Plan,
SelectPlanResponse,
Token,
TokenScopes,
ActivateCockpitRequest,
DeactivateCockpitRequest,
ResetCockpitGrafanaRequest,
CreateTokenRequest,
CreateContactPointRequest,
DeleteContactPointRequest,
EnableManagedAlertsRequest,
DisableManagedAlertsRequest,
TriggerTestAlertRequest,
CreateGrafanaUserRequest,
DeleteGrafanaUserRequest,
ResetGrafanaUserPasswordRequest,
SelectPlanRequest,
)
def unmarshal_ContactPointEmail(data: Any) -> ContactPointEmail:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ContactPointEmail' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("to", None)
args["to"] = field
return ContactPointEmail(**args)
def unmarshal_TokenScopes(data: Any) -> TokenScopes:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'TokenScopes' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("query_logs", None)
args["query_logs"] = field
field = data.get("query_metrics", None)
args["query_metrics"] = field
field = data.get("setup_alerts", None)
args["setup_alerts"] = field
field = data.get("setup_logs_rules", None)
args["setup_logs_rules"] = field
field = data.get("setup_metrics_rules", None)
args["setup_metrics_rules"] = field
field = data.get("write_logs", None)
args["write_logs"] = field
field = data.get("write_metrics", None)
args["write_metrics"] = field
return TokenScopes(**args)
def unmarshal_CockpitEndpoints(data: Any) -> CockpitEndpoints:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'CockpitEndpoints' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("alertmanager_url", None)
args["alertmanager_url"] = field
field = data.get("grafana_url", None)
args["grafana_url"] = field
field = data.get("logs_url", None)
args["logs_url"] = field
field = data.get("metrics_url", None)
args["metrics_url"] = field
return CockpitEndpoints(**args)
def unmarshal_ContactPoint(data: Any) -> ContactPoint:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ContactPoint' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("email", None)
args["email"] = unmarshal_ContactPointEmail(field) if field is not None else None
return ContactPoint(**args)
def unmarshal_GrafanaUser(data: Any) -> GrafanaUser:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'GrafanaUser' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("id", None)
args["id"] = field
field = data.get("login", None)
args["login"] = field
field = data.get("password", None)
args["password"] = field
field = data.get("role", None)
args["role"] = field
return GrafanaUser(**args)
def unmarshal_Plan(data: Any) -> Plan:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Plan' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("id", None)
args["id"] = field
field = data.get("logs_ingestion_price", None)
args["logs_ingestion_price"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("retention_logs_interval", None)
args["retention_logs_interval"] = field
field = data.get("retention_metrics_interval", None)
args["retention_metrics_interval"] = field
field = data.get("retention_price", None)
args["retention_price"] = field
field = data.get("sample_ingestion_price", None)
args["sample_ingestion_price"] = field
return Plan(**args)
def unmarshal_Token(data: Any) -> Token:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Token' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("scopes", None)
args["scopes"] = unmarshal_TokenScopes(field) if field is not None else None
field = data.get("secret_key", None)
args["secret_key"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return Token(**args)
def unmarshal_Cockpit(data: Any) -> Cockpit:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Cockpit' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("endpoints", None)
args["endpoints"] = unmarshal_CockpitEndpoints(field) if field is not None else None
field = data.get("managed_alerts_enabled", None)
args["managed_alerts_enabled"] = field
field = data.get("plan", None)
args["plan"] = unmarshal_Plan(field) if field is not None else None
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return Cockpit(**args)
def unmarshal_CockpitMetrics(data: Any) -> CockpitMetrics:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'CockpitMetrics' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("timeseries", None)
args["timeseries"] = (
[unmarshal_TimeSeries(v) for v in field] if field is not None else None
)
return CockpitMetrics(**args)
def unmarshal_ListContactPointsResponse(data: Any) -> ListContactPointsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListContactPointsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("contact_points", None)
args["contact_points"] = (
[unmarshal_ContactPoint(v) for v in field] if field is not None else None
)
field = data.get("has_additional_contact_points", None)
args["has_additional_contact_points"] = field
field = data.get("has_additional_receivers", None)
args["has_additional_receivers"] = field
field = data.get("total_count", None)
args["total_count"] = field
return ListContactPointsResponse(**args)
def unmarshal_ListGrafanaUsersResponse(data: Any) -> ListGrafanaUsersResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListGrafanaUsersResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("grafana_users", None)
args["grafana_users"] = (
[unmarshal_GrafanaUser(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListGrafanaUsersResponse(**args)
def unmarshal_ListPlansResponse(data: Any) -> ListPlansResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListPlansResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("plans", None)
args["plans"] = [unmarshal_Plan(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListPlansResponse(**args)
def unmarshal_ListTokensResponse(data: Any) -> ListTokensResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListTokensResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("tokens", None)
args["tokens"] = [unmarshal_Token(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListTokensResponse(**args)
def unmarshal_SelectPlanResponse(data: Any) -> SelectPlanResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'SelectPlanResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
return SelectPlanResponse(**args)
def marshal_ContactPointEmail(
request: ContactPointEmail,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.to is not None:
output["to"] = request.to
return output
def marshal_ContactPoint(
request: ContactPoint,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"email",
marshal_ContactPointEmail(request.email, defaults)
if request.email is not None
else None,
),
]
),
)
return output
def marshal_TokenScopes(
request: TokenScopes,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.query_logs is not None:
output["query_logs"] = request.query_logs
if request.query_metrics is not None:
output["query_metrics"] = request.query_metrics
if request.setup_alerts is not None:
output["setup_alerts"] = request.setup_alerts
if request.setup_logs_rules is not None:
output["setup_logs_rules"] = request.setup_logs_rules
if request.setup_metrics_rules is not None:
output["setup_metrics_rules"] = request.setup_metrics_rules
if request.write_logs is not None:
output["write_logs"] = request.write_logs
if request.write_metrics is not None:
output["write_metrics"] = request.write_metrics
return output
def marshal_ActivateCockpitRequest(
request: ActivateCockpitRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
def marshal_CreateContactPointRequest(
request: CreateContactPointRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.contact_point is not None:
output["contact_point"] = marshal_ContactPoint(request.contact_point, defaults)
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
def marshal_CreateGrafanaUserRequest(
request: CreateGrafanaUserRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.login is not None:
output["login"] = request.login
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.role is not None:
output["role"] = GrafanaUserRole(request.role)
return output
def marshal_CreateTokenRequest(
request: CreateTokenRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.scopes is not None:
output["scopes"] = marshal_TokenScopes(request.scopes, defaults)
return output
def marshal_DeactivateCockpitRequest(
request: DeactivateCockpitRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
def marshal_DeleteContactPointRequest(
request: DeleteContactPointRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.contact_point is not None:
output["contact_point"] = marshal_ContactPoint(request.contact_point, defaults)
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
def marshal_DeleteGrafanaUserRequest(
request: DeleteGrafanaUserRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
def marshal_DisableManagedAlertsRequest(
request: DisableManagedAlertsRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
def marshal_EnableManagedAlertsRequest(
request: EnableManagedAlertsRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
def marshal_ResetCockpitGrafanaRequest(
request: ResetCockpitGrafanaRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
def marshal_ResetGrafanaUserPasswordRequest(
request: ResetGrafanaUserPasswordRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
def marshal_SelectPlanRequest(
request: SelectPlanRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.plan_id is not None:
output["plan_id"] = request.plan_id
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
def marshal_TriggerTestAlertRequest(
request: TriggerTestAlertRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/cockpit/v1beta1/marshalling.py
| 0.758958 | 0.180612 |
marshalling.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
Money,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class DownloadInvoiceRequestFileType(str, Enum, metaclass=StrEnumMeta):
PDF = "pdf"
def __str__(self) -> str:
return str(self.value)
class InvoiceType(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_TYPE = "unknown_type"
PERIODIC = "periodic"
PURCHASE = "purchase"
def __str__(self) -> str:
return str(self.value)
class ListInvoicesRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
INVOICE_NUMBER_DESC = "invoice_number_desc"
INVOICE_NUMBER_ASC = "invoice_number_asc"
START_DATE_DESC = "start_date_desc"
START_DATE_ASC = "start_date_asc"
ISSUED_DATE_DESC = "issued_date_desc"
ISSUED_DATE_ASC = "issued_date_asc"
DUE_DATE_DESC = "due_date_desc"
DUE_DATE_ASC = "due_date_asc"
TOTAL_UNTAXED_DESC = "total_untaxed_desc"
TOTAL_UNTAXED_ASC = "total_untaxed_asc"
TOTAL_TAXED_DESC = "total_taxed_desc"
TOTAL_TAXED_ASC = "total_taxed_asc"
INVOICE_TYPE_DESC = "invoice_type_desc"
INVOICE_TYPE_ASC = "invoice_type_asc"
def __str__(self) -> str:
return str(self.value)
@dataclass
class GetConsumptionResponse:
"""
Get consumption response.
"""
consumptions: List[GetConsumptionResponseConsumption]
"""
Detailed consumption list.
"""
updated_at: Optional[datetime]
"""
Last consumption update date.
"""
@dataclass
class GetConsumptionResponseConsumption:
"""
Get consumption response. consumption.
"""
value: Optional[Money]
"""
Monetary value of the consumption.
"""
description: str
"""
Description of the consumption.
"""
project_id: str
"""
Project ID of the consumption.
"""
category: str
"""
Category of the consumption.
"""
operation_path: str
"""
Unique identifier of the product.
"""
@dataclass
class Invoice:
"""
Invoice.
"""
id: str
"""
Invoice ID.
"""
start_date: Optional[datetime]
"""
Start date of the billing period.
"""
issued_date: Optional[datetime]
"""
Date when the invoice was sent to the customer.
"""
due_date: Optional[datetime]
"""
Payment time limit, set according to the Organization's payment conditions.
"""
total_untaxed: Optional[Money]
"""
Total amount, untaxed.
"""
total_taxed: Optional[Money]
"""
Total amount, taxed.
"""
invoice_type: InvoiceType
"""
Type of invoice.
"""
number: int
"""
Invoice number.
"""
@dataclass
class ListInvoicesResponse:
"""
List invoices response.
"""
total_count: int
"""
Total number of invoices.
"""
invoices: List[Invoice]
"""
Paginated returned invoices.
"""
@dataclass
class GetConsumptionRequest:
organization_id: Optional[str]
"""
Filter by organization ID.
"""
@dataclass
class ListInvoicesRequest:
organization_id: Optional[str]
"""
Organization ID to filter for, only invoices from this Organization will be returned.
"""
started_after: Optional[datetime]
"""
Invoice's `start_date` is greater or equal to `started_after`.
"""
started_before: Optional[datetime]
"""
Invoice's `start_date` precedes `started_before`.
"""
invoice_type: Optional[InvoiceType]
"""
Invoice type. It can either be `periodic` or `purchase`.
"""
page: Optional[int]
"""
Positive integer to choose the page to return.
"""
page_size: Optional[int]
"""
Positive integer lower or equal to 100 to select the number of items to return.
"""
order_by: Optional[ListInvoicesRequestOrderBy]
"""
How invoices are ordered in the response.
"""
@dataclass
class DownloadInvoiceRequest:
invoice_id: str
"""
Invoice ID.
"""
file_type: DownloadInvoiceRequestFileType
"""
Wanted file type.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/billing/v2alpha1/types.py
| 0.901759 | 0.258478 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.bridge import (
unmarshal_Money,
)
from dateutil import parser
from .types import (
GetConsumptionResponse,
GetConsumptionResponseConsumption,
Invoice,
ListInvoicesResponse,
)
def unmarshal_GetConsumptionResponseConsumption(
data: Any,
) -> GetConsumptionResponseConsumption:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'GetConsumptionResponseConsumption' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("category", None)
args["category"] = field
field = data.get("description", None)
args["description"] = field
field = data.get("operation_path", None)
args["operation_path"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("value", None)
args["value"] = unmarshal_Money(field) if field is not None else None
return GetConsumptionResponseConsumption(**args)
def unmarshal_Invoice(data: Any) -> Invoice:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Invoice' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("due_date", None)
args["due_date"] = parser.isoparse(field) if type(field) is str else field
field = data.get("id", None)
args["id"] = field
field = data.get("invoice_type", None)
args["invoice_type"] = field
field = data.get("issued_date", None)
args["issued_date"] = parser.isoparse(field) if type(field) is str else field
field = data.get("number", None)
args["number"] = field
field = data.get("start_date", None)
args["start_date"] = parser.isoparse(field) if type(field) is str else field
field = data.get("total_taxed", None)
args["total_taxed"] = unmarshal_Money(field) if field is not None else None
field = data.get("total_untaxed", None)
args["total_untaxed"] = unmarshal_Money(field) if field is not None else None
return Invoice(**args)
def unmarshal_GetConsumptionResponse(data: Any) -> GetConsumptionResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'GetConsumptionResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("consumptions", None)
args["consumptions"] = (
[unmarshal_GetConsumptionResponseConsumption(v) for v in field]
if field is not None
else None
)
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return GetConsumptionResponse(**args)
def unmarshal_ListInvoicesResponse(data: Any) -> ListInvoicesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListInvoicesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("invoices", None)
args["invoices"] = (
[unmarshal_Invoice(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListInvoicesResponse(**args)
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/billing/v2alpha1/marshalling.py
| 0.761494 | 0.296393 |
marshalling.py
|
pypi
|
from datetime import datetime
from typing import List, Optional
from scaleway_core.api import API
from scaleway_core.bridge import (
ScwFile,
unmarshal_ScwFile,
)
from scaleway_core.utils import (
fetch_all_pages_async,
validate_path_param,
)
from .types import (
DownloadInvoiceRequestFileType,
InvoiceType,
ListInvoicesRequestOrderBy,
GetConsumptionResponse,
Invoice,
ListInvoicesResponse,
)
from .marshalling import (
unmarshal_GetConsumptionResponse,
unmarshal_ListInvoicesResponse,
)
class BillingV2Alpha1API(API):
"""
Billing API.
This API allows you to query your consumption.
Billing API.
"""
async def get_consumption(
self,
*,
organization_id: Optional[str] = None,
) -> GetConsumptionResponse:
"""
Usage:
::
result = await api.get_consumption()
"""
res = self._request(
"GET",
f"/billing/v2alpha1/consumption",
params={
"organization_id": organization_id
or self.client.default_organization_id,
},
)
self._throw_on_error(res)
return unmarshal_GetConsumptionResponse(res.json())
async def list_invoices(
self,
*,
organization_id: Optional[str] = None,
started_after: Optional[datetime] = None,
started_before: Optional[datetime] = None,
invoice_type: InvoiceType = InvoiceType.UNKNOWN_TYPE,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: ListInvoicesRequestOrderBy = ListInvoicesRequestOrderBy.INVOICE_NUMBER_DESC,
) -> ListInvoicesResponse:
"""
Usage:
::
result = await api.list_invoices()
"""
res = self._request(
"GET",
f"/billing/v2alpha1/invoices",
params={
"invoice_type": invoice_type,
"order_by": order_by,
"organization_id": organization_id
or self.client.default_organization_id,
"page": page,
"page_size": page_size or self.client.default_page_size,
"started_after": started_after,
"started_before": started_before,
},
)
self._throw_on_error(res)
return unmarshal_ListInvoicesResponse(res.json())
async def list_invoices_all(
self,
*,
organization_id: Optional[str] = None,
started_after: Optional[datetime] = None,
started_before: Optional[datetime] = None,
invoice_type: Optional[InvoiceType] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: Optional[ListInvoicesRequestOrderBy] = None,
) -> List[Invoice]:
"""
:return: :class:`List[ListInvoicesResponse] <List[ListInvoicesResponse]>`
Usage:
::
result = await api.list_invoices_all()
"""
return await fetch_all_pages_async(
type=ListInvoicesResponse,
key="invoices",
fetcher=self.list_invoices,
args={
"organization_id": organization_id,
"started_after": started_after,
"started_before": started_before,
"invoice_type": invoice_type,
"page": page,
"page_size": page_size,
"order_by": order_by,
},
)
async def download_invoice(
self,
*,
invoice_id: str,
file_type: DownloadInvoiceRequestFileType,
) -> Optional[ScwFile]:
"""
Usage:
::
result = await api.download_invoice(
invoice_id="example",
file_type=pdf,
)
"""
param_invoice_id = validate_path_param("invoice_id", invoice_id)
res = self._request(
"GET",
f"/billing/v2alpha1/invoices/{param_invoice_id}/download",
params={
"file_type": file_type,
},
)
self._throw_on_error(res)
json = res.json()
return unmarshal_ScwFile(json) if json is not None else None
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/billing/v2alpha1/api.py
| 0.829768 | 0.175485 |
api.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
Region,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class ListSecretsRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
UPDATED_AT_ASC = "updated_at_asc"
UPDATED_AT_DESC = "updated_at_desc"
def __str__(self) -> str:
return str(self.value)
class Product(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
def __str__(self) -> str:
return str(self.value)
class SecretStatus(str, Enum, metaclass=StrEnumMeta):
READY = "ready"
LOCKED = "locked"
def __str__(self) -> str:
return str(self.value)
class SecretType(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_SECRET_TYPE = "unknown_secret_type"
OPAQUE = "opaque"
CERTIFICATE = "certificate"
def __str__(self) -> str:
return str(self.value)
class SecretVersionStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
ENABLED = "enabled"
DISABLED = "disabled"
DESTROYED = "destroyed"
def __str__(self) -> str:
return str(self.value)
@dataclass
class AccessSecretVersionResponse:
"""
Access secret version response.
"""
secret_id: str
"""
ID of the secret.
"""
revision: int
"""
Version number.
The first version of the secret is numbered 1, and all subsequent revisions augment by 1.
"""
data: str
"""
The base64-encoded secret payload of the version.
"""
data_crc32: Optional[int]
"""
The CRC32 checksum of the data as a base-10 integer.
This field is only available if a CRC32 was supplied during the creation of the version.
"""
@dataclass
class ListSecretVersionsResponse:
"""
List secret versions response.
"""
versions: List[SecretVersion]
"""
Single page of versions.
"""
total_count: int
"""
Number of versions.
"""
@dataclass
class ListSecretsResponse:
"""
List secrets response.
"""
secrets: List[Secret]
"""
Single page of secrets matching the requested criteria.
"""
total_count: int
"""
Count of all secrets matching the requested criteria.
"""
@dataclass
class ListTagsResponse:
"""
List tags response.
"""
tags: List[str]
"""
List of tags.
"""
total_count: int
"""
Count of all tags matching the requested criteria.
"""
@dataclass
class PasswordGenerationParams:
"""
Password generation params.
"""
length: int
"""
Length of the password to generate (between 1 and 1024).
"""
no_lowercase_letters: bool
"""
Do not include lower case letters by default in the alphabet.
"""
no_uppercase_letters: bool
"""
Do not include upper case letters by default in the alphabet.
"""
no_digits: bool
"""
Do not include digits by default in the alphabet.
"""
additional_chars: str
"""
Additional ascii characters to be included in the alphabet.
"""
@dataclass
class Secret:
"""
Secret.
"""
id: str
"""
ID of the secret.
"""
project_id: str
"""
ID of the Project containing the secret.
"""
name: str
"""
Name of the secret.
"""
status: SecretStatus
"""
Current status of the secret.
* `ready`: the secret can be read, modified and deleted.
* `locked`: no action can be performed on the secret. This status can only be applied and removed by Scaleway.
"""
created_at: Optional[datetime]
"""
Date and time of the secret's creation.
"""
updated_at: Optional[datetime]
"""
Last update of the secret.
"""
tags: List[str]
"""
List of the secret's tags.
"""
version_count: int
"""
Number of versions for this secret.
"""
description: Optional[str]
"""
Updated description of the secret.
"""
is_managed: bool
"""
Returns `true` for secrets that are managed by another product.
"""
is_protected: bool
"""
Returns `true` for protected secrets that cannot be deleted.
"""
type_: SecretType
"""
Type of the secret.
See `Secret.Type` enum for description of values.
"""
region: Region
"""
Region of the secret.
"""
@dataclass
class SecretVersion:
"""
Secret version.
"""
revision: int
"""
Version number.
The first version of the secret is numbered 1, and all subsequent revisions augment by 1.
"""
secret_id: str
"""
ID of the secret.
"""
status: SecretVersionStatus
"""
Current status of the version.
* `unknown`: the version is in an invalid state.
* `enabled`: the version is accessible.
* `disabled`: the version is not accessible but can be enabled.
* `destroyed`: the version is permanently deleted. It is not possible to recover it.
"""
created_at: Optional[datetime]
"""
Date and time of the version's creation.
"""
updated_at: Optional[datetime]
"""
Last update of the version.
"""
description: Optional[str]
"""
Description of the version.
"""
is_latest: bool
"""
Returns `true` if the version is the latest.
"""
@dataclass
class CreateSecretRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
project_id: Optional[str]
"""
ID of the Project containing the secret.
"""
name: str
"""
Name of the secret.
"""
tags: Optional[List[str]]
"""
List of the secret's tags.
"""
description: Optional[str]
"""
Description of the secret.
"""
type_: SecretType
"""
Type of the secret.
(Optional.) See `Secret.Type` enum for description of values. If not specified, the type is `Opaque`.
"""
@dataclass
class GetSecretRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
@dataclass
class GetSecretByNameRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_name: str
"""
Name of the secret.
"""
project_id: Optional[str]
"""
ID of the Project to target.
(Optional.) If not specified, Secret Manager will look for the secret in all Projects.
"""
@dataclass
class UpdateSecretRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
name: Optional[str]
"""
Secret's updated name (optional).
"""
tags: Optional[List[str]]
"""
Secret's updated list of tags (optional).
"""
description: Optional[str]
"""
Description of the secret.
"""
@dataclass
class ListSecretsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
organization_id: Optional[str]
"""
Filter by Organization ID (optional).
"""
project_id: Optional[str]
"""
Filter by Project ID (optional).
"""
order_by: Optional[ListSecretsRequestOrderBy]
page: Optional[int]
page_size: Optional[int]
tags: Optional[List[str]]
"""
List of tags to filter on (optional).
"""
name: Optional[str]
"""
Filter by secret name (optional).
"""
is_managed: Optional[bool]
"""
Filter by managed / not managed (optional).
"""
@dataclass
class DeleteSecretRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
@dataclass
class ProtectSecretRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret to protect.
"""
@dataclass
class UnprotectSecretRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret to unprotect.
"""
@dataclass
class AddSecretOwnerRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
product_name: Optional[str]
"""
(Deprecated: use `product` field) Name of the product to add.
:deprecated
"""
product: Product
"""
ID of the product to add.
See `Product` enum for description of values.
"""
@dataclass
class CreateSecretVersionRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
data: str
"""
The base64-encoded secret payload of the version.
"""
description: Optional[str]
"""
Description of the version.
"""
disable_previous: Optional[bool]
"""
Disable the previous secret version.
(Optional.) If there is no previous version or if the previous version was already disabled, does nothing.
"""
password_generation: Optional[PasswordGenerationParams]
"""
Options to generate a password.
(Optional.) If specified, a random password will be generated. The `data` and `data_crc32` fields must be empty. By default, the generator will use upper and lower case letters, and digits. This behavior can be tuned using the generation parameters.
One-of ('_password_generation'): at most one of 'password_generation' could be set.
:deprecated
"""
data_crc32: Optional[int]
"""
(Optional.) The CRC32 checksum of the data as a base-10 integer.
If specified, Secret Manager will verify the integrity of the data received against the given CRC32 checksum. An error is returned if the CRC32 does not match. If, however, the CRC32 matches, it will be stored and returned along with the SecretVersion on future access requests.
"""
@dataclass
class GeneratePasswordRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
description: Optional[str]
"""
Description of the version.
"""
disable_previous: Optional[bool]
"""
(Optional.) Disable the previous secret version.
This has no effect if there is no previous version or if the previous version was already disabled.
"""
length: int
"""
Length of the password to generate (between 1 and 1024 characters).
"""
no_lowercase_letters: Optional[bool]
"""
(Optional.) Exclude lower case letters by default in the password character set.
"""
no_uppercase_letters: Optional[bool]
"""
(Optional.) Exclude upper case letters by default in the password character set.
"""
no_digits: Optional[bool]
"""
(Optional.) Exclude digits by default in the password character set.
"""
additional_chars: Optional[str]
"""
(Optional.) Additional ASCII characters to be included in the password character set.
"""
@dataclass
class GetSecretVersionRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
revision: str
"""
Version number.
The first version of the secret is numbered 1, and all subsequent revisions augment by 1. Value can be a number or "latest".
"""
@dataclass
class GetSecretVersionByNameRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_name: str
"""
Name of the secret.
"""
revision: str
"""
Version number.
The first version of the secret is numbered 1, and all subsequent revisions augment by 1. Value can be a number or "latest".
"""
project_id: Optional[str]
"""
ID of the Project to target.
(Optional.) If not specified, Secret Manager will look for the secret version in all Projects.
"""
@dataclass
class UpdateSecretVersionRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
revision: str
"""
Version number.
The first version of the secret is numbered 1, and all subsequent revisions augment by 1. Value can be a number or "latest".
"""
description: Optional[str]
"""
Description of the version.
"""
@dataclass
class ListSecretVersionsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
page: Optional[int]
page_size: Optional[int]
status: Optional[List[SecretVersionStatus]]
"""
Filter results by status.
"""
@dataclass
class ListSecretVersionsByNameRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_name: str
"""
Name of the secret.
"""
page: Optional[int]
page_size: Optional[int]
status: Optional[List[SecretVersionStatus]]
"""
Filter results by status.
"""
project_id: Optional[str]
"""
ID of the Project to target.
(Optional.) If not specified, Secret Manager will look for the secret in all Projects.
"""
@dataclass
class EnableSecretVersionRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
revision: str
"""
Version number.
The first version of the secret is numbered 1, and all subsequent revisions augment by 1. Value can be a number or "latest".
"""
@dataclass
class DisableSecretVersionRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
revision: str
"""
Version number.
The first version of the secret is numbered 1, and all subsequent revisions augment by 1. Value can be a number or "latest".
"""
@dataclass
class AccessSecretVersionRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
revision: str
"""
Version number.
The first version of the secret is numbered 1, and all subsequent revisions augment by 1. Value can be a number or "latest".
"""
@dataclass
class AccessSecretVersionByNameRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_name: str
"""
Name of the secret.
"""
revision: str
"""
Version number.
The first version of the secret is numbered 1, and all subsequent revisions augment by 1. Value can be a number or "latest".
"""
project_id: Optional[str]
"""
ID of the Project to target.
(Optional.) If not specified, Secret Manager will look for the secret version in all Projects.
"""
@dataclass
class DestroySecretVersionRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
secret_id: str
"""
ID of the secret.
"""
revision: str
"""
Version number.
The first version of the secret is numbered 1, and all subsequent revisions augment by 1. Value can be a number or "latest".
"""
@dataclass
class ListTagsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
project_id: Optional[str]
"""
ID of the Project to target.
(Optional.) If not specified, Secret Manager will look for tags in all Projects.
"""
page: Optional[int]
page_size: Optional[int]
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/secret/v1alpha1/types.py
| 0.910491 | 0.308034 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from scaleway_core.utils import (
OneOfPossibility,
resolve_one_of,
)
from dateutil import parser
from .types import (
Product,
SecretType,
AccessSecretVersionResponse,
ListSecretVersionsResponse,
ListSecretsResponse,
ListTagsResponse,
PasswordGenerationParams,
Secret,
SecretVersion,
CreateSecretRequest,
UpdateSecretRequest,
AddSecretOwnerRequest,
CreateSecretVersionRequest,
GeneratePasswordRequest,
UpdateSecretVersionRequest,
)
def unmarshal_Secret(data: Any) -> Secret:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Secret' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("description", None)
args["description"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("is_managed", None)
args["is_managed"] = field
field = data.get("is_protected", None)
args["is_protected"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("region", None)
args["region"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("tags", None)
args["tags"] = field
field = data.get("type", None)
args["type_"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("version_count", None)
args["version_count"] = field
return Secret(**args)
def unmarshal_SecretVersion(data: Any) -> SecretVersion:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'SecretVersion' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("description", None)
args["description"] = field
field = data.get("is_latest", None)
args["is_latest"] = field
field = data.get("revision", None)
args["revision"] = field
field = data.get("secret_id", None)
args["secret_id"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return SecretVersion(**args)
def unmarshal_AccessSecretVersionResponse(data: Any) -> AccessSecretVersionResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'AccessSecretVersionResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("data", None)
args["data"] = field
field = data.get("data_crc32", None)
args["data_crc32"] = field
field = data.get("revision", None)
args["revision"] = field
field = data.get("secret_id", None)
args["secret_id"] = field
return AccessSecretVersionResponse(**args)
def unmarshal_ListSecretVersionsResponse(data: Any) -> ListSecretVersionsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListSecretVersionsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("total_count", None)
args["total_count"] = field
field = data.get("versions", None)
args["versions"] = (
[unmarshal_SecretVersion(v) for v in field] if field is not None else None
)
return ListSecretVersionsResponse(**args)
def unmarshal_ListSecretsResponse(data: Any) -> ListSecretsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListSecretsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("secrets", None)
args["secrets"] = (
[unmarshal_Secret(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListSecretsResponse(**args)
def unmarshal_ListTagsResponse(data: Any) -> ListTagsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListTagsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("tags", None)
args["tags"] = field
field = data.get("total_count", None)
args["total_count"] = field
return ListTagsResponse(**args)
def marshal_PasswordGenerationParams(
request: PasswordGenerationParams,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.additional_chars is not None:
output["additional_chars"] = request.additional_chars
if request.length is not None:
output["length"] = request.length
if request.no_digits is not None:
output["no_digits"] = request.no_digits
if request.no_lowercase_letters is not None:
output["no_lowercase_letters"] = request.no_lowercase_letters
if request.no_uppercase_letters is not None:
output["no_uppercase_letters"] = request.no_uppercase_letters
return output
def marshal_AddSecretOwnerRequest(
request: AddSecretOwnerRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.product is not None:
output["product"] = Product(request.product)
if request.product_name is not None:
output["product_name"] = request.product_name
return output
def marshal_CreateSecretRequest(
request: CreateSecretRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.tags is not None:
output["tags"] = request.tags
if request.type_ is not None:
output["type"] = SecretType(request.type_)
return output
def marshal_CreateSecretVersionRequest(
request: CreateSecretVersionRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"password_generation",
marshal_PasswordGenerationParams(
request.password_generation, defaults
)
if request.password_generation is not None
else None,
),
]
),
)
if request.data is not None:
output["data"] = request.data
if request.data_crc32 is not None:
output["data_crc32"] = request.data_crc32
if request.description is not None:
output["description"] = request.description
if request.disable_previous is not None:
output["disable_previous"] = request.disable_previous
return output
def marshal_GeneratePasswordRequest(
request: GeneratePasswordRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.additional_chars is not None:
output["additional_chars"] = request.additional_chars
if request.description is not None:
output["description"] = request.description
if request.disable_previous is not None:
output["disable_previous"] = request.disable_previous
if request.length is not None:
output["length"] = request.length
if request.no_digits is not None:
output["no_digits"] = request.no_digits
if request.no_lowercase_letters is not None:
output["no_lowercase_letters"] = request.no_lowercase_letters
if request.no_uppercase_letters is not None:
output["no_uppercase_letters"] = request.no_uppercase_letters
return output
def marshal_UpdateSecretRequest(
request: UpdateSecretRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
if request.tags is not None:
output["tags"] = request.tags
return output
def marshal_UpdateSecretVersionRequest(
request: UpdateSecretVersionRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/secret/v1alpha1/marshalling.py
| 0.75487 | 0.185689 |
marshalling.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
Zone,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class ListImagesRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
UPDATED_AT_ASC = "updated_at_asc"
UPDATED_AT_DESC = "updated_at_desc"
def __str__(self) -> str:
return str(self.value)
class ListLocalImagesRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
def __str__(self) -> str:
return str(self.value)
class ListVersionsRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
def __str__(self) -> str:
return str(self.value)
class LocalImageType(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_TYPE = "unknown_type"
INSTANCE_LOCAL = "instance_local"
INSTANCE_SBS = "instance_sbs"
def __str__(self) -> str:
return str(self.value)
@dataclass
class Category:
id: str
name: str
description: str
@dataclass
class Image:
"""
Image.
"""
id: str
"""
UUID of this image.
"""
name: str
"""
Name of the image.
"""
description: str
"""
Text description of this image.
"""
logo: str
"""
URL of this image's logo.
"""
categories: List[str]
"""
List of categories this image belongs to.
"""
created_at: Optional[datetime]
"""
Creation date of this image.
"""
updated_at: Optional[datetime]
"""
Date of the last modification of this image.
"""
valid_until: Optional[datetime]
"""
Expiration date of this image.
"""
label: str
"""
Label of this image.
Typically an identifier for a distribution (ex. "ubuntu_focal").
"""
@dataclass
class ListCategoriesResponse:
categories: List[Category]
total_count: int
@dataclass
class ListImagesResponse:
images: List[Image]
total_count: int
@dataclass
class ListLocalImagesResponse:
local_images: List[LocalImage]
total_count: int
@dataclass
class ListVersionsResponse:
versions: List[Version]
total_count: int
@dataclass
class LocalImage:
"""
Local image.
"""
id: str
"""
UUID of this local image.
Version you will typically use to define an image in an API call.
"""
compatible_commercial_types: List[str]
"""
List of all commercial types that are compatible with this local image.
"""
arch: str
"""
Supported architecture for this local image.
"""
zone: Zone
"""
Availability Zone where this local image is available.
"""
label: str
"""
Image label this image belongs to.
"""
type_: LocalImageType
"""
Type of this local image.
"""
@dataclass
class Version:
"""
Version.
"""
id: str
"""
UUID of this version.
"""
name: str
"""
Name of this version.
"""
created_at: Optional[datetime]
"""
Creation date of this image version.
"""
updated_at: Optional[datetime]
"""
Date of the last modification of this version.
"""
published_at: Optional[datetime]
"""
Date this version was officially published.
"""
@dataclass
class ListImagesRequest:
page_size: Optional[int]
"""
A positive integer lower or equal to 100 to select the number of items to display.
"""
page: Optional[int]
"""
A positive integer to choose the page to display.
"""
order_by: Optional[ListImagesRequestOrderBy]
"""
Ordering to use.
"""
arch: Optional[str]
"""
Choose for which machine architecture to return images.
"""
category: Optional[str]
"""
Choose the category of images to get.
"""
include_eol: bool
"""
Choose to include end-of-life images.
"""
@dataclass
class GetImageRequest:
image_id: str
"""
Display the image name.
"""
@dataclass
class ListVersionsRequest:
image_id: str
page_size: Optional[int]
page: Optional[int]
order_by: Optional[ListVersionsRequestOrderBy]
@dataclass
class GetVersionRequest:
version_id: str
@dataclass
class ListLocalImagesRequest:
image_id: Optional[str]
"""
One-of ('scope'): at most one of 'image_id', 'version_id', 'image_label' could be set.
"""
version_id: Optional[str]
"""
One-of ('scope'): at most one of 'image_id', 'version_id', 'image_label' could be set.
"""
page_size: Optional[int]
page: Optional[int]
order_by: Optional[ListLocalImagesRequestOrderBy]
image_label: Optional[str]
"""
One-of ('scope'): at most one of 'image_id', 'version_id', 'image_label' could be set.
"""
zone: Optional[Zone]
type_: Optional[LocalImageType]
@dataclass
class GetLocalImageRequest:
local_image_id: str
@dataclass
class ListCategoriesRequest:
page_size: Optional[int]
page: Optional[int]
@dataclass
class GetCategoryRequest:
category_id: str
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/marketplace/v2/types.py
| 0.89963 | 0.251602 |
types.py
|
pypi
|
from typing import Any, Dict
from dateutil import parser
from .types import (
Category,
Image,
ListCategoriesResponse,
ListImagesResponse,
ListLocalImagesResponse,
ListVersionsResponse,
LocalImage,
Version,
)
def unmarshal_Category(data: Any) -> Category:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Category' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("description", None)
args["description"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
return Category(**args)
def unmarshal_Image(data: Any) -> Image:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Image' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("categories", None)
args["categories"] = field
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("description", None)
args["description"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("label", None)
args["label"] = field
field = data.get("logo", None)
args["logo"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("valid_until", None)
args["valid_until"] = parser.isoparse(field) if type(field) is str else field
return Image(**args)
def unmarshal_LocalImage(data: Any) -> LocalImage:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'LocalImage' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("arch", None)
args["arch"] = field
field = data.get("compatible_commercial_types", None)
args["compatible_commercial_types"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("label", None)
args["label"] = field
field = data.get("type", None)
args["type_"] = field
field = data.get("zone", None)
args["zone"] = field
return LocalImage(**args)
def unmarshal_Version(data: Any) -> Version:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Version' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("published_at", None)
args["published_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return Version(**args)
def unmarshal_ListCategoriesResponse(data: Any) -> ListCategoriesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListCategoriesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("categories", None)
args["categories"] = (
[unmarshal_Category(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListCategoriesResponse(**args)
def unmarshal_ListImagesResponse(data: Any) -> ListImagesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListImagesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("images", None)
args["images"] = [unmarshal_Image(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListImagesResponse(**args)
def unmarshal_ListLocalImagesResponse(data: Any) -> ListLocalImagesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListLocalImagesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("local_images", None)
args["local_images"] = (
[unmarshal_LocalImage(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListLocalImagesResponse(**args)
def unmarshal_ListVersionsResponse(data: Any) -> ListVersionsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListVersionsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("total_count", None)
args["total_count"] = field
field = data.get("versions", None)
args["versions"] = (
[unmarshal_Version(v) for v in field] if field is not None else None
)
return ListVersionsResponse(**args)
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/marketplace/v2/marshalling.py
| 0.806167 | 0.32178 |
marshalling.py
|
pypi
|
from typing import List, Optional
from scaleway_core.api import API
from scaleway_core.bridge import (
Zone,
)
from scaleway_core.utils import (
OneOfPossibility,
fetch_all_pages_async,
resolve_one_of,
validate_path_param,
)
from .types import (
ListImagesRequestOrderBy,
ListLocalImagesRequestOrderBy,
ListVersionsRequestOrderBy,
LocalImageType,
Category,
Image,
ListCategoriesResponse,
ListImagesResponse,
ListLocalImagesResponse,
ListVersionsResponse,
LocalImage,
Version,
)
from .marshalling import (
unmarshal_Category,
unmarshal_Image,
unmarshal_LocalImage,
unmarshal_Version,
unmarshal_ListCategoriesResponse,
unmarshal_ListImagesResponse,
unmarshal_ListLocalImagesResponse,
unmarshal_ListVersionsResponse,
)
class MarketplaceV2API(API):
"""
Marketplace API.
Marketplace API.
"""
async def list_images(
self,
*,
include_eol: bool,
page_size: Optional[int] = None,
page: Optional[int] = None,
order_by: ListImagesRequestOrderBy = ListImagesRequestOrderBy.NAME_ASC,
arch: Optional[str] = None,
category: Optional[str] = None,
) -> ListImagesResponse:
"""
List marketplace images.
List all available images on the marketplace, their UUID, CPU architecture and description.
:param page_size: A positive integer lower or equal to 100 to select the number of items to display.
:param page: A positive integer to choose the page to display.
:param order_by: Ordering to use.
:param arch: Choose for which machine architecture to return images.
:param category: Choose the category of images to get.
:param include_eol: Choose to include end-of-life images.
:return: :class:`ListImagesResponse <ListImagesResponse>`
Usage:
::
result = await api.list_images(include_eol=True)
"""
res = self._request(
"GET",
f"/marketplace/v2/images",
params={
"arch": arch,
"category": category,
"include_eol": include_eol,
"order_by": order_by,
"page": page,
"page_size": page_size or self.client.default_page_size,
},
)
self._throw_on_error(res)
return unmarshal_ListImagesResponse(res.json())
async def list_images_all(
self,
*,
include_eol: bool,
page_size: Optional[int] = None,
page: Optional[int] = None,
order_by: Optional[ListImagesRequestOrderBy] = None,
arch: Optional[str] = None,
category: Optional[str] = None,
) -> List[Image]:
"""
List marketplace images.
List all available images on the marketplace, their UUID, CPU architecture and description.
:param page_size: A positive integer lower or equal to 100 to select the number of items to display.
:param page: A positive integer to choose the page to display.
:param order_by: Ordering to use.
:param arch: Choose for which machine architecture to return images.
:param category: Choose the category of images to get.
:param include_eol: Choose to include end-of-life images.
:return: :class:`List[ListImagesResponse] <List[ListImagesResponse]>`
Usage:
::
result = await api.list_images_all(include_eol=True)
"""
return await fetch_all_pages_async(
type=ListImagesResponse,
key="images",
fetcher=self.list_images,
args={
"include_eol": include_eol,
"page_size": page_size,
"page": page,
"order_by": order_by,
"arch": arch,
"category": category,
},
)
async def get_image(
self,
*,
image_id: str,
) -> Image:
"""
Get a specific marketplace image.
Get detailed information about a marketplace image, specified by its `image_id` (UUID format).
:param image_id: Display the image name.
:return: :class:`Image <Image>`
Usage:
::
result = await api.get_image(image_id="example")
"""
param_image_id = validate_path_param("image_id", image_id)
res = self._request(
"GET",
f"/marketplace/v2/images/{param_image_id}",
)
self._throw_on_error(res)
return unmarshal_Image(res.json())
async def list_versions(
self,
*,
image_id: str,
page_size: Optional[int] = None,
page: Optional[int] = None,
order_by: ListVersionsRequestOrderBy = ListVersionsRequestOrderBy.CREATED_AT_ASC,
) -> ListVersionsResponse:
"""
List versions of an Image.
Get a list of all available version of an image, specified by its `image_id` (UUID format).
:param image_id:
:param page_size:
:param page:
:param order_by:
:return: :class:`ListVersionsResponse <ListVersionsResponse>`
Usage:
::
result = await api.list_versions(image_id="example")
"""
res = self._request(
"GET",
f"/marketplace/v2/versions",
params={
"image_id": image_id,
"order_by": order_by,
"page": page,
"page_size": page_size or self.client.default_page_size,
},
)
self._throw_on_error(res)
return unmarshal_ListVersionsResponse(res.json())
async def list_versions_all(
self,
*,
image_id: str,
page_size: Optional[int] = None,
page: Optional[int] = None,
order_by: Optional[ListVersionsRequestOrderBy] = None,
) -> List[Version]:
"""
List versions of an Image.
Get a list of all available version of an image, specified by its `image_id` (UUID format).
:param image_id:
:param page_size:
:param page:
:param order_by:
:return: :class:`List[ListVersionsResponse] <List[ListVersionsResponse]>`
Usage:
::
result = await api.list_versions_all(image_id="example")
"""
return await fetch_all_pages_async(
type=ListVersionsResponse,
key="versions",
fetcher=self.list_versions,
args={
"image_id": image_id,
"page_size": page_size,
"page": page,
"order_by": order_by,
},
)
async def get_version(
self,
*,
version_id: str,
) -> Version:
"""
Get a specific image version.
Get information such as the name, creation date, last update and published date for an image version specified by its `version_id` (UUID format).
:param version_id:
:return: :class:`Version <Version>`
Usage:
::
result = await api.get_version(version_id="example")
"""
param_version_id = validate_path_param("version_id", version_id)
res = self._request(
"GET",
f"/marketplace/v2/versions/{param_version_id}",
)
self._throw_on_error(res)
return unmarshal_Version(res.json())
async def list_local_images(
self,
*,
image_id: Optional[str] = None,
version_id: Optional[str] = None,
page_size: Optional[int] = None,
page: Optional[int] = None,
order_by: ListLocalImagesRequestOrderBy = ListLocalImagesRequestOrderBy.CREATED_AT_ASC,
image_label: Optional[str] = None,
zone: Optional[Zone] = None,
type_: LocalImageType = LocalImageType.UNKNOWN_TYPE,
) -> ListLocalImagesResponse:
"""
List local images from a specific image or version.
List information about local images in a specific Availability Zone, specified by its `image_id` (UUID format), `version_id` (UUID format) or `image_label`. Only one of these three parameters may be set.
:param image_id: One-of ('scope'): at most one of 'image_id', 'version_id', 'image_label' could be set.
:param version_id: One-of ('scope'): at most one of 'image_id', 'version_id', 'image_label' could be set.
:param page_size:
:param page:
:param order_by:
:param image_label: One-of ('scope'): at most one of 'image_id', 'version_id', 'image_label' could be set.
:param zone:
:param type_:
:return: :class:`ListLocalImagesResponse <ListLocalImagesResponse>`
Usage:
::
result = await api.list_local_images()
"""
res = self._request(
"GET",
f"/marketplace/v2/local-images",
params={
"order_by": order_by,
"page": page,
"page_size": page_size or self.client.default_page_size,
"type": type_,
"zone": zone or self.client.default_zone,
**resolve_one_of(
[
OneOfPossibility("image_id", image_id),
OneOfPossibility("version_id", version_id),
OneOfPossibility("image_label", image_label),
]
),
},
)
self._throw_on_error(res)
return unmarshal_ListLocalImagesResponse(res.json())
async def list_local_images_all(
self,
*,
image_id: Optional[str] = None,
version_id: Optional[str] = None,
page_size: Optional[int] = None,
page: Optional[int] = None,
order_by: Optional[ListLocalImagesRequestOrderBy] = None,
image_label: Optional[str] = None,
zone: Optional[Zone] = None,
type_: Optional[LocalImageType] = None,
) -> List[LocalImage]:
"""
List local images from a specific image or version.
List information about local images in a specific Availability Zone, specified by its `image_id` (UUID format), `version_id` (UUID format) or `image_label`. Only one of these three parameters may be set.
:param image_id: One-of ('scope'): at most one of 'image_id', 'version_id', 'image_label' could be set.
:param version_id: One-of ('scope'): at most one of 'image_id', 'version_id', 'image_label' could be set.
:param page_size:
:param page:
:param order_by:
:param image_label: One-of ('scope'): at most one of 'image_id', 'version_id', 'image_label' could be set.
:param zone:
:param type_:
:return: :class:`List[ListLocalImagesResponse] <List[ListLocalImagesResponse]>`
Usage:
::
result = await api.list_local_images_all()
"""
return await fetch_all_pages_async(
type=ListLocalImagesResponse,
key="local_images",
fetcher=self.list_local_images,
args={
"image_id": image_id,
"version_id": version_id,
"page_size": page_size,
"page": page,
"order_by": order_by,
"image_label": image_label,
"zone": zone,
"type_": type_,
},
)
async def get_local_image(
self,
*,
local_image_id: str,
) -> LocalImage:
"""
Get a specific local image by ID.
Get detailed information about a local image, including compatible commercial types, supported architecture, labels and the Availability Zone of the image, specified by its `local_image_id` (UUID format).
:param local_image_id:
:return: :class:`LocalImage <LocalImage>`
Usage:
::
result = await api.get_local_image(local_image_id="example")
"""
param_local_image_id = validate_path_param("local_image_id", local_image_id)
res = self._request(
"GET",
f"/marketplace/v2/local-images/{param_local_image_id}",
)
self._throw_on_error(res)
return unmarshal_LocalImage(res.json())
async def list_categories(
self,
*,
page_size: Optional[int] = None,
page: Optional[int] = None,
) -> ListCategoriesResponse:
"""
List existing image categories.
Get a list of all existing categories. The output can be paginated.
:param page_size:
:param page:
:return: :class:`ListCategoriesResponse <ListCategoriesResponse>`
Usage:
::
result = await api.list_categories()
"""
res = self._request(
"GET",
f"/marketplace/v2/categories",
params={
"page": page,
"page_size": page_size or self.client.default_page_size,
},
)
self._throw_on_error(res)
return unmarshal_ListCategoriesResponse(res.json())
async def list_categories_all(
self,
*,
page_size: Optional[int] = None,
page: Optional[int] = None,
) -> List[Category]:
"""
List existing image categories.
Get a list of all existing categories. The output can be paginated.
:param page_size:
:param page:
:return: :class:`List[ListCategoriesResponse] <List[ListCategoriesResponse]>`
Usage:
::
result = await api.list_categories_all()
"""
return await fetch_all_pages_async(
type=ListCategoriesResponse,
key="categories",
fetcher=self.list_categories,
args={
"page_size": page_size,
"page": page,
},
)
async def get_category(
self,
*,
category_id: str,
) -> Category:
"""
Get a specific category.
Get information about a specific category of the marketplace catalog, specified by its `category_id` (UUID format).
:param category_id:
:return: :class:`Category <Category>`
Usage:
::
result = await api.get_category(category_id="example")
"""
param_category_id = validate_path_param("category_id", category_id)
res = self._request(
"GET",
f"/marketplace/v2/categories/{param_category_id}",
)
self._throw_on_error(res)
return unmarshal_Category(res.json())
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/marketplace/v2/api.py
| 0.929304 | 0.240953 |
api.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from typing import List, Optional
from scaleway_core.bridge import (
Zone,
)
@dataclass
class GetImageResponse:
image: Optional[Image]
@dataclass
class GetVersionResponse:
version: Optional[Version]
@dataclass
class Image:
"""
Image.
"""
id: str
"""
UUID of this image.
"""
name: str
"""
Name of the image.
"""
description: str
"""
Text description of this image.
"""
logo: str
"""
URL of this image's logo.
"""
categories: List[str]
"""
List of categories this image belongs to.
"""
creation_date: Optional[datetime]
"""
Creation date of this image.
"""
modification_date: Optional[datetime]
"""
Date of the last modification of this image.
"""
valid_until: Optional[datetime]
"""
Expiration date of this image.
"""
label: str
"""
Label of this image.
Typically an identifier for a distribution (ex. "ubuntu_focal").
"""
versions: List[Version]
"""
List of versions of this image.
"""
organization: Optional[Organization]
"""
Organization this image belongs to.
"""
current_public_version: str
@dataclass
class ListImagesResponse:
images: List[Image]
total_count: int
@dataclass
class ListVersionsResponse:
versions: List[Version]
total_count: int
@dataclass
class LocalImage:
"""
Local image.
"""
id: str
"""
UUID of this local image.
Version you will typically use to define an image in an API call.
"""
compatible_commercial_types: List[str]
"""
List of all commercial types that are compatible with this local image.
"""
arch: str
"""
Supported architecture for this local image.
"""
zone: Zone
"""
Availability Zone where this local image is available.
"""
@dataclass
class Organization:
id: str
name: str
@dataclass
class Version:
"""
Version.
"""
id: str
"""
UUID of this version.
"""
name: str
"""
Name of this version.
"""
creation_date: Optional[datetime]
"""
Creation date of this image version.
"""
modification_date: Optional[datetime]
"""
Date of the last modification of this version.
"""
local_images: List[LocalImage]
"""
List of local images available in this version.
"""
@dataclass
class ListImagesRequest:
per_page: Optional[int]
"""
A positive integer lower or equal to 100 to select the number of items to display.
"""
page: Optional[int]
"""
A positive integer to choose the page to display.
"""
@dataclass
class GetImageRequest:
image_id: str
"""
Display the image name.
"""
@dataclass
class ListVersionsRequest:
image_id: str
@dataclass
class GetVersionRequest:
image_id: str
version_id: str
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/marketplace/v1/types.py
| 0.904777 | 0.497192 |
types.py
|
pypi
|
from typing import Any, Dict
from dateutil import parser
from .types import (
GetImageResponse,
GetVersionResponse,
Image,
ListImagesResponse,
ListVersionsResponse,
LocalImage,
Organization,
Version,
)
def unmarshal_LocalImage(data: Any) -> LocalImage:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'LocalImage' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("arch", None)
args["arch"] = field
field = data.get("compatible_commercial_types", None)
args["compatible_commercial_types"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("zone", None)
args["zone"] = field
return LocalImage(**args)
def unmarshal_Organization(data: Any) -> Organization:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Organization' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
return Organization(**args)
def unmarshal_Version(data: Any) -> Version:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Version' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("creation_date", None)
args["creation_date"] = parser.isoparse(field) if type(field) is str else field
field = data.get("id", None)
args["id"] = field
field = data.get("local_images", None)
args["local_images"] = (
[unmarshal_LocalImage(v) for v in field] if field is not None else None
)
field = data.get("modification_date", None)
args["modification_date"] = parser.isoparse(field) if type(field) is str else field
field = data.get("name", None)
args["name"] = field
return Version(**args)
def unmarshal_Image(data: Any) -> Image:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Image' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("categories", None)
args["categories"] = field
field = data.get("creation_date", None)
args["creation_date"] = parser.isoparse(field) if type(field) is str else field
field = data.get("current_public_version", None)
args["current_public_version"] = field
field = data.get("description", None)
args["description"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("label", None)
args["label"] = field
field = data.get("logo", None)
args["logo"] = field
field = data.get("modification_date", None)
args["modification_date"] = parser.isoparse(field) if type(field) is str else field
field = data.get("name", None)
args["name"] = field
field = data.get("organization", None)
args["organization"] = unmarshal_Organization(field) if field is not None else None
field = data.get("valid_until", None)
args["valid_until"] = parser.isoparse(field) if type(field) is str else field
field = data.get("versions", None)
args["versions"] = (
[unmarshal_Version(v) for v in field] if field is not None else None
)
return Image(**args)
def unmarshal_GetImageResponse(data: Any) -> GetImageResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'GetImageResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("image", None)
args["image"] = unmarshal_Image(field) if field is not None else None
return GetImageResponse(**args)
def unmarshal_GetVersionResponse(data: Any) -> GetVersionResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'GetVersionResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("version", None)
args["version"] = unmarshal_Version(field) if field is not None else None
return GetVersionResponse(**args)
def unmarshal_ListImagesResponse(data: Any) -> ListImagesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListImagesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("images", None)
args["images"] = [unmarshal_Image(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListImagesResponse(**args)
def unmarshal_ListVersionsResponse(data: Any) -> ListVersionsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListVersionsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("total_count", None)
args["total_count"] = field
field = data.get("versions", None)
args["versions"] = (
[unmarshal_Version(v) for v in field] if field is not None else None
)
return ListVersionsResponse(**args)
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/marketplace/v1/marshalling.py
| 0.807309 | 0.288757 |
marshalling.py
|
pypi
|
from typing import List, Optional
from scaleway_core.api import API
from scaleway_core.utils import (
fetch_all_pages_async,
validate_path_param,
)
from .types import (
GetImageResponse,
GetVersionResponse,
Image,
ListImagesResponse,
ListVersionsResponse,
)
from .marshalling import (
unmarshal_GetImageResponse,
unmarshal_GetVersionResponse,
unmarshal_ListImagesResponse,
unmarshal_ListVersionsResponse,
)
class MarketplaceV1API(API):
"""
Marketplace API.
Marketplace API.
"""
async def list_images(
self,
*,
per_page: Optional[int] = None,
page: Optional[int] = None,
) -> ListImagesResponse:
"""
List marketplace images.
:param per_page: A positive integer lower or equal to 100 to select the number of items to display.
:param page: A positive integer to choose the page to display.
:return: :class:`ListImagesResponse <ListImagesResponse>`
Usage:
::
result = await api.list_images()
"""
res = self._request(
"GET",
f"/marketplace/v1/images",
params={
"page": page,
"per_page": per_page or self.client.default_page_size,
},
)
self._throw_on_error(res)
return unmarshal_ListImagesResponse(res.json())
async def list_images_all(
self,
*,
per_page: Optional[int] = None,
page: Optional[int] = None,
) -> List[Image]:
"""
List marketplace images.
:param per_page: A positive integer lower or equal to 100 to select the number of items to display.
:param page: A positive integer to choose the page to display.
:return: :class:`List[ListImagesResponse] <List[ListImagesResponse]>`
Usage:
::
result = await api.list_images_all()
"""
return await fetch_all_pages_async(
type=ListImagesResponse,
key="images",
fetcher=self.list_images,
args={
"per_page": per_page,
"page": page,
},
)
async def get_image(
self,
*,
image_id: str,
) -> GetImageResponse:
"""
Get a specific marketplace image.
:param image_id: Display the image name.
:return: :class:`GetImageResponse <GetImageResponse>`
Usage:
::
result = await api.get_image(image_id="example")
"""
param_image_id = validate_path_param("image_id", image_id)
res = self._request(
"GET",
f"/marketplace/v1/images/{param_image_id}",
)
self._throw_on_error(res)
return unmarshal_GetImageResponse(res.json())
async def list_versions(
self,
*,
image_id: str,
) -> ListVersionsResponse:
"""
Usage:
::
result = await api.list_versions(image_id="example")
"""
param_image_id = validate_path_param("image_id", image_id)
res = self._request(
"GET",
f"/marketplace/v1/images/{param_image_id}/versions",
)
self._throw_on_error(res)
return unmarshal_ListVersionsResponse(res.json())
async def get_version(
self,
*,
image_id: str,
version_id: str,
) -> GetVersionResponse:
"""
Usage:
::
result = await api.get_version(
image_id="example",
version_id="example",
)
"""
param_image_id = validate_path_param("image_id", image_id)
param_version_id = validate_path_param("version_id", version_id)
res = self._request(
"GET",
f"/marketplace/v1/images/{param_image_id}/versions/{param_version_id}",
)
self._throw_on_error(res)
return unmarshal_GetVersionResponse(res.json())
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/marketplace/v1/api.py
| 0.935043 | 0.206334 |
api.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from scaleway_core.utils import (
OneOfPossibility,
resolve_one_of,
)
from dateutil import parser
from .types import (
PATRuleProtocol,
DHCP,
DHCPEntry,
Gateway,
GatewayNetwork,
GatewayType,
IP,
ListDHCPEntriesResponse,
ListDHCPsResponse,
ListGatewayNetworksResponse,
ListGatewayTypesResponse,
ListGatewaysResponse,
ListIPsResponse,
ListPATRulesResponse,
PATRule,
SetDHCPEntriesRequestEntry,
SetDHCPEntriesResponse,
SetPATRulesRequestRule,
SetPATRulesResponse,
CreateGatewayRequest,
UpdateGatewayRequest,
CreateGatewayNetworkRequest,
UpdateGatewayNetworkRequest,
CreateDHCPRequest,
UpdateDHCPRequest,
CreateDHCPEntryRequest,
UpdateDHCPEntryRequest,
SetDHCPEntriesRequest,
CreatePATRuleRequest,
UpdatePATRuleRequest,
SetPATRulesRequest,
CreateIPRequest,
UpdateIPRequest,
)
def unmarshal_DHCP(data: Any) -> DHCP:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'DHCP' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("address", None)
args["address"] = field
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("dns_local_name", None)
args["dns_local_name"] = field
field = data.get("dns_search", None)
args["dns_search"] = field
field = data.get("dns_servers_override", None)
args["dns_servers_override"] = field
field = data.get("enable_dynamic", None)
args["enable_dynamic"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("pool_high", None)
args["pool_high"] = field
field = data.get("pool_low", None)
args["pool_low"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("push_default_route", None)
args["push_default_route"] = field
field = data.get("push_dns_server", None)
args["push_dns_server"] = field
field = data.get("rebind_timer", None)
args["rebind_timer"] = field
field = data.get("renew_timer", None)
args["renew_timer"] = field
field = data.get("subnet", None)
args["subnet"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("valid_lifetime", None)
args["valid_lifetime"] = field
field = data.get("zone", None)
args["zone"] = field
return DHCP(**args)
def unmarshal_GatewayNetwork(data: Any) -> GatewayNetwork:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'GatewayNetwork' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("address", None)
args["address"] = field
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("dhcp", None)
args["dhcp"] = unmarshal_DHCP(field) if field is not None else None
field = data.get("enable_dhcp", None)
args["enable_dhcp"] = field
field = data.get("enable_masquerade", None)
args["enable_masquerade"] = field
field = data.get("gateway_id", None)
args["gateway_id"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("mac_address", None)
args["mac_address"] = field
field = data.get("private_network_id", None)
args["private_network_id"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("zone", None)
args["zone"] = field
return GatewayNetwork(**args)
def unmarshal_GatewayType(data: Any) -> GatewayType:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'GatewayType' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("bandwidth", None)
args["bandwidth"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("zone", None)
args["zone"] = field
return GatewayType(**args)
def unmarshal_IP(data: Any) -> IP:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'IP' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("address", None)
args["address"] = field
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("gateway_id", None)
args["gateway_id"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("reverse", None)
args["reverse"] = field
field = data.get("tags", None)
args["tags"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("zone", None)
args["zone"] = field
return IP(**args)
def unmarshal_DHCPEntry(data: Any) -> DHCPEntry:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'DHCPEntry' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("gateway_network_id", None)
args["gateway_network_id"] = field
field = data.get("hostname", None)
args["hostname"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("ip_address", None)
args["ip_address"] = field
field = data.get("mac_address", None)
args["mac_address"] = field
field = data.get("type", None)
args["type_"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("zone", None)
args["zone"] = field
return DHCPEntry(**args)
def unmarshal_Gateway(data: Any) -> Gateway:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Gateway' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("bastion_enabled", None)
args["bastion_enabled"] = field
field = data.get("bastion_port", None)
args["bastion_port"] = field
field = data.get("can_upgrade_to", None)
args["can_upgrade_to"] = field
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("gateway_networks", None)
args["gateway_networks"] = (
[unmarshal_GatewayNetwork(v) for v in field] if field is not None else None
)
field = data.get("id", None)
args["id"] = field
field = data.get("ip", None)
args["ip"] = unmarshal_IP(field) if field is not None else None
field = data.get("name", None)
args["name"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("smtp_enabled", None)
args["smtp_enabled"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("tags", None)
args["tags"] = field
field = data.get("type", None)
args["type_"] = unmarshal_GatewayType(field) if field is not None else None
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("upstream_dns_servers", None)
args["upstream_dns_servers"] = field
field = data.get("version", None)
args["version"] = field
field = data.get("zone", None)
args["zone"] = field
return Gateway(**args)
def unmarshal_PATRule(data: Any) -> PATRule:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'PATRule' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("gateway_id", None)
args["gateway_id"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("private_ip", None)
args["private_ip"] = field
field = data.get("private_port", None)
args["private_port"] = field
field = data.get("protocol", None)
args["protocol"] = field
field = data.get("public_port", None)
args["public_port"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("zone", None)
args["zone"] = field
return PATRule(**args)
def unmarshal_ListDHCPEntriesResponse(data: Any) -> ListDHCPEntriesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListDHCPEntriesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("dhcp_entries", None)
args["dhcp_entries"] = (
[unmarshal_DHCPEntry(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListDHCPEntriesResponse(**args)
def unmarshal_ListDHCPsResponse(data: Any) -> ListDHCPsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListDHCPsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("dhcps", None)
args["dhcps"] = [unmarshal_DHCP(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListDHCPsResponse(**args)
def unmarshal_ListGatewayNetworksResponse(data: Any) -> ListGatewayNetworksResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListGatewayNetworksResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("gateway_networks", None)
args["gateway_networks"] = (
[unmarshal_GatewayNetwork(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListGatewayNetworksResponse(**args)
def unmarshal_ListGatewayTypesResponse(data: Any) -> ListGatewayTypesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListGatewayTypesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("types", None)
args["types"] = (
[unmarshal_GatewayType(v) for v in field] if field is not None else None
)
return ListGatewayTypesResponse(**args)
def unmarshal_ListGatewaysResponse(data: Any) -> ListGatewaysResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListGatewaysResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("gateways", None)
args["gateways"] = (
[unmarshal_Gateway(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListGatewaysResponse(**args)
def unmarshal_ListIPsResponse(data: Any) -> ListIPsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListIPsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("ips", None)
args["ips"] = [unmarshal_IP(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListIPsResponse(**args)
def unmarshal_ListPATRulesResponse(data: Any) -> ListPATRulesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListPATRulesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("pat_rules", None)
args["pat_rules"] = (
[unmarshal_PATRule(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListPATRulesResponse(**args)
def unmarshal_SetDHCPEntriesResponse(data: Any) -> SetDHCPEntriesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'SetDHCPEntriesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("dhcp_entries", None)
args["dhcp_entries"] = (
[unmarshal_DHCPEntry(v) for v in field] if field is not None else None
)
return SetDHCPEntriesResponse(**args)
def unmarshal_SetPATRulesResponse(data: Any) -> SetPATRulesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'SetPATRulesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("pat_rules", None)
args["pat_rules"] = (
[unmarshal_PATRule(v) for v in field] if field is not None else None
)
return SetPATRulesResponse(**args)
def marshal_CreateDHCPRequest(
request: CreateDHCPRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.address is not None:
output["address"] = request.address
if request.dns_local_name is not None:
output["dns_local_name"] = request.dns_local_name
if request.dns_search is not None:
output["dns_search"] = request.dns_search
if request.dns_servers_override is not None:
output["dns_servers_override"] = request.dns_servers_override
if request.enable_dynamic is not None:
output["enable_dynamic"] = request.enable_dynamic
if request.pool_high is not None:
output["pool_high"] = request.pool_high
if request.pool_low is not None:
output["pool_low"] = request.pool_low
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.push_default_route is not None:
output["push_default_route"] = request.push_default_route
if request.push_dns_server is not None:
output["push_dns_server"] = request.push_dns_server
if request.rebind_timer is not None:
output["rebind_timer"] = request.rebind_timer
if request.renew_timer is not None:
output["renew_timer"] = request.renew_timer
if request.subnet is not None:
output["subnet"] = request.subnet
if request.valid_lifetime is not None:
output["valid_lifetime"] = request.valid_lifetime
return output
def marshal_SetDHCPEntriesRequestEntry(
request: SetDHCPEntriesRequestEntry,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.ip_address is not None:
output["ip_address"] = request.ip_address
if request.mac_address is not None:
output["mac_address"] = request.mac_address
return output
def marshal_SetPATRulesRequestRule(
request: SetPATRulesRequestRule,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.private_ip is not None:
output["private_ip"] = request.private_ip
if request.private_port is not None:
output["private_port"] = request.private_port
if request.protocol is not None:
output["protocol"] = PATRuleProtocol(request.protocol)
if request.public_port is not None:
output["public_port"] = request.public_port
return output
def marshal_CreateDHCPEntryRequest(
request: CreateDHCPEntryRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.gateway_network_id is not None:
output["gateway_network_id"] = request.gateway_network_id
if request.ip_address is not None:
output["ip_address"] = request.ip_address
if request.mac_address is not None:
output["mac_address"] = request.mac_address
return output
def marshal_CreateGatewayNetworkRequest(
request: CreateGatewayNetworkRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"dhcp_id", request.dhcp_id if request.dhcp_id is not None else None
),
OneOfPossibility(
"dhcp",
marshal_CreateDHCPRequest(request.dhcp, defaults)
if request.dhcp is not None
else None,
),
OneOfPossibility(
"address", request.address if request.address is not None else None
),
]
),
)
if request.enable_dhcp is not None:
output["enable_dhcp"] = request.enable_dhcp
if request.enable_masquerade is not None:
output["enable_masquerade"] = request.enable_masquerade
if request.gateway_id is not None:
output["gateway_id"] = request.gateway_id
if request.private_network_id is not None:
output["private_network_id"] = request.private_network_id
return output
def marshal_CreateGatewayRequest(
request: CreateGatewayRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.bastion_port is not None:
output["bastion_port"] = request.bastion_port
if request.enable_bastion is not None:
output["enable_bastion"] = request.enable_bastion
if request.enable_smtp is not None:
output["enable_smtp"] = request.enable_smtp
if request.ip_id is not None:
output["ip_id"] = request.ip_id
if request.name is not None:
output["name"] = request.name
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.tags is not None:
output["tags"] = request.tags
if request.type_ is not None:
output["type"] = request.type_
if request.upstream_dns_servers is not None:
output["upstream_dns_servers"] = request.upstream_dns_servers
return output
def marshal_CreateIPRequest(
request: CreateIPRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.tags is not None:
output["tags"] = request.tags
return output
def marshal_CreatePATRuleRequest(
request: CreatePATRuleRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.gateway_id is not None:
output["gateway_id"] = request.gateway_id
if request.private_ip is not None:
output["private_ip"] = request.private_ip
if request.private_port is not None:
output["private_port"] = request.private_port
if request.protocol is not None:
output["protocol"] = PATRuleProtocol(request.protocol)
if request.public_port is not None:
output["public_port"] = request.public_port
return output
def marshal_SetDHCPEntriesRequest(
request: SetDHCPEntriesRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.dhcp_entries is not None:
output["dhcp_entries"] = [
marshal_SetDHCPEntriesRequestEntry(v, defaults)
for v in request.dhcp_entries
]
if request.gateway_network_id is not None:
output["gateway_network_id"] = request.gateway_network_id
return output
def marshal_SetPATRulesRequest(
request: SetPATRulesRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.gateway_id is not None:
output["gateway_id"] = request.gateway_id
if request.pat_rules is not None:
output["pat_rules"] = [
marshal_SetPATRulesRequestRule(v, defaults) for v in request.pat_rules
]
return output
def marshal_UpdateDHCPEntryRequest(
request: UpdateDHCPEntryRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.ip_address is not None:
output["ip_address"] = request.ip_address
return output
def marshal_UpdateDHCPRequest(
request: UpdateDHCPRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.address is not None:
output["address"] = request.address
if request.dns_local_name is not None:
output["dns_local_name"] = request.dns_local_name
if request.dns_search is not None:
output["dns_search"] = request.dns_search
if request.dns_servers_override is not None:
output["dns_servers_override"] = request.dns_servers_override
if request.enable_dynamic is not None:
output["enable_dynamic"] = request.enable_dynamic
if request.pool_high is not None:
output["pool_high"] = request.pool_high
if request.pool_low is not None:
output["pool_low"] = request.pool_low
if request.push_default_route is not None:
output["push_default_route"] = request.push_default_route
if request.push_dns_server is not None:
output["push_dns_server"] = request.push_dns_server
if request.rebind_timer is not None:
output["rebind_timer"] = request.rebind_timer
if request.renew_timer is not None:
output["renew_timer"] = request.renew_timer
if request.subnet is not None:
output["subnet"] = request.subnet
if request.valid_lifetime is not None:
output["valid_lifetime"] = request.valid_lifetime
return output
def marshal_UpdateGatewayNetworkRequest(
request: UpdateGatewayNetworkRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"dhcp_id", request.dhcp_id if request.dhcp_id is not None else None
),
OneOfPossibility(
"address", request.address if request.address is not None else None
),
]
),
)
if request.enable_dhcp is not None:
output["enable_dhcp"] = request.enable_dhcp
if request.enable_masquerade is not None:
output["enable_masquerade"] = request.enable_masquerade
return output
def marshal_UpdateGatewayRequest(
request: UpdateGatewayRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.bastion_port is not None:
output["bastion_port"] = request.bastion_port
if request.enable_bastion is not None:
output["enable_bastion"] = request.enable_bastion
if request.enable_smtp is not None:
output["enable_smtp"] = request.enable_smtp
if request.name is not None:
output["name"] = request.name
if request.tags is not None:
output["tags"] = request.tags
if request.upstream_dns_servers is not None:
output["upstream_dns_servers"] = request.upstream_dns_servers
return output
def marshal_UpdateIPRequest(
request: UpdateIPRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.gateway_id is not None:
output["gateway_id"] = request.gateway_id
if request.reverse is not None:
output["reverse"] = request.reverse
if request.tags is not None:
output["tags"] = request.tags
return output
def marshal_UpdatePATRuleRequest(
request: UpdatePATRuleRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.private_ip is not None:
output["private_ip"] = request.private_ip
if request.private_port is not None:
output["private_port"] = request.private_port
if request.protocol is not None:
output["protocol"] = PATRuleProtocol(request.protocol)
if request.public_port is not None:
output["public_port"] = request.public_port
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/vpcgw/v1/marshalling.py
| 0.735167 | 0.174797 |
marshalling.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from scaleway_core.utils import (
OneOfPossibility,
resolve_one_of,
)
from dateutil import parser
from .types import (
APIKey,
Application,
Group,
JWT,
ListAPIKeysResponse,
ListApplicationsResponse,
ListGroupsResponse,
ListJWTsResponse,
ListPermissionSetsResponse,
ListPoliciesResponse,
ListQuotaResponse,
ListRulesResponse,
ListSSHKeysResponse,
ListUsersResponse,
PermissionSet,
Policy,
Quotum,
Rule,
RuleSpecs,
SSHKey,
SetRulesResponse,
User,
CreateSSHKeyRequest,
UpdateSSHKeyRequest,
CreateUserRequest,
CreateApplicationRequest,
UpdateApplicationRequest,
CreateGroupRequest,
UpdateGroupRequest,
SetGroupMembersRequest,
AddGroupMemberRequest,
AddGroupMembersRequest,
RemoveGroupMemberRequest,
CreatePolicyRequest,
UpdatePolicyRequest,
SetRulesRequest,
CreateAPIKeyRequest,
UpdateAPIKeyRequest,
)
def unmarshal_APIKey(data: Any) -> APIKey:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'APIKey' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("access_key", None)
args["access_key"] = field
field = data.get("application_id", None)
args["application_id"] = field
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("creation_ip", None)
args["creation_ip"] = field
field = data.get("default_project_id", None)
args["default_project_id"] = field
field = data.get("description", None)
args["description"] = field
field = data.get("editable", None)
args["editable"] = field
field = data.get("expires_at", None)
args["expires_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("secret_key", None)
args["secret_key"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("user_id", None)
args["user_id"] = field
return APIKey(**args)
def unmarshal_Application(data: Any) -> Application:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Application' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("description", None)
args["description"] = field
field = data.get("editable", None)
args["editable"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("nb_api_keys", None)
args["nb_api_keys"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return Application(**args)
def unmarshal_Group(data: Any) -> Group:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Group' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("application_ids", None)
args["application_ids"] = field
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("description", None)
args["description"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("user_ids", None)
args["user_ids"] = field
return Group(**args)
def unmarshal_JWT(data: Any) -> JWT:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'JWT' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("audience_id", None)
args["audience_id"] = field
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("expires_at", None)
args["expires_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("ip", None)
args["ip"] = field
field = data.get("issuer_id", None)
args["issuer_id"] = field
field = data.get("jti", None)
args["jti"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("user_agent", None)
args["user_agent"] = field
return JWT(**args)
def unmarshal_PermissionSet(data: Any) -> PermissionSet:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'PermissionSet' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("categories", None)
args["categories"] = field
field = data.get("description", None)
args["description"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("scope_type", None)
args["scope_type"] = field
return PermissionSet(**args)
def unmarshal_Policy(data: Any) -> Policy:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Policy' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("application_id", None)
args["application_id"] = field
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("description", None)
args["description"] = field
field = data.get("editable", None)
args["editable"] = field
field = data.get("group_id", None)
args["group_id"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("nb_permission_sets", None)
args["nb_permission_sets"] = field
field = data.get("nb_rules", None)
args["nb_rules"] = field
field = data.get("nb_scopes", None)
args["nb_scopes"] = field
field = data.get("no_principal", None)
args["no_principal"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("user_id", None)
args["user_id"] = field
return Policy(**args)
def unmarshal_Quotum(data: Any) -> Quotum:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Quotum' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("limit", None)
args["limit"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("unlimited", None)
args["unlimited"] = field
return Quotum(**args)
def unmarshal_Rule(data: Any) -> Rule:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Rule' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("account_root_user_id", None)
args["account_root_user_id"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("permission_set_names", None)
args["permission_set_names"] = field
field = data.get("permission_sets_scope_type", None)
args["permission_sets_scope_type"] = field
field = data.get("project_ids", None)
args["project_ids"] = field
return Rule(**args)
def unmarshal_SSHKey(data: Any) -> SSHKey:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'SSHKey' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("disabled", None)
args["disabled"] = field
field = data.get("fingerprint", None)
args["fingerprint"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("public_key", None)
args["public_key"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return SSHKey(**args)
def unmarshal_User(data: Any) -> User:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'User' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("account_root_user_id", None)
args["account_root_user_id"] = field
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("deletable", None)
args["deletable"] = field
field = data.get("email", None)
args["email"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("last_login_at", None)
args["last_login_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("mfa", None)
args["mfa"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("two_factor_enabled", None)
args["two_factor_enabled"] = field
field = data.get("type", None)
args["type_"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return User(**args)
def unmarshal_ListAPIKeysResponse(data: Any) -> ListAPIKeysResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListAPIKeysResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("api_keys", None)
args["api_keys"] = (
[unmarshal_APIKey(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListAPIKeysResponse(**args)
def unmarshal_ListApplicationsResponse(data: Any) -> ListApplicationsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListApplicationsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("applications", None)
args["applications"] = (
[unmarshal_Application(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListApplicationsResponse(**args)
def unmarshal_ListGroupsResponse(data: Any) -> ListGroupsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListGroupsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("groups", None)
args["groups"] = [unmarshal_Group(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListGroupsResponse(**args)
def unmarshal_ListJWTsResponse(data: Any) -> ListJWTsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListJWTsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("jwts", None)
args["jwts"] = [unmarshal_JWT(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListJWTsResponse(**args)
def unmarshal_ListPermissionSetsResponse(data: Any) -> ListPermissionSetsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListPermissionSetsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("permission_sets", None)
args["permission_sets"] = (
[unmarshal_PermissionSet(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListPermissionSetsResponse(**args)
def unmarshal_ListPoliciesResponse(data: Any) -> ListPoliciesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListPoliciesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("policies", None)
args["policies"] = (
[unmarshal_Policy(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListPoliciesResponse(**args)
def unmarshal_ListQuotaResponse(data: Any) -> ListQuotaResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListQuotaResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("quota", None)
args["quota"] = [unmarshal_Quotum(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListQuotaResponse(**args)
def unmarshal_ListRulesResponse(data: Any) -> ListRulesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListRulesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("rules", None)
args["rules"] = [unmarshal_Rule(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListRulesResponse(**args)
def unmarshal_ListSSHKeysResponse(data: Any) -> ListSSHKeysResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListSSHKeysResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("ssh_keys", None)
args["ssh_keys"] = (
[unmarshal_SSHKey(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListSSHKeysResponse(**args)
def unmarshal_ListUsersResponse(data: Any) -> ListUsersResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListUsersResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("total_count", None)
args["total_count"] = field
field = data.get("users", None)
args["users"] = [unmarshal_User(v) for v in field] if field is not None else None
return ListUsersResponse(**args)
def unmarshal_SetRulesResponse(data: Any) -> SetRulesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'SetRulesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("rules", None)
args["rules"] = [unmarshal_Rule(v) for v in field] if field is not None else None
return SetRulesResponse(**args)
def marshal_RuleSpecs(
request: RuleSpecs,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"project_ids",
request.project_ids if request.project_ids is not None else None,
),
OneOfPossibility(
"organization_id",
request.organization_id
if request.organization_id is not None
else None,
defaults.default_organization_id,
),
]
),
)
if request.permission_set_names is not None:
output["permission_set_names"] = request.permission_set_names
return output
def marshal_AddGroupMemberRequest(
request: AddGroupMemberRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"user_id", request.user_id if request.user_id is not None else None
),
OneOfPossibility(
"application_id",
request.application_id
if request.application_id is not None
else None,
),
]
),
)
return output
def marshal_AddGroupMembersRequest(
request: AddGroupMembersRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.application_ids is not None:
output["application_ids"] = request.application_ids
if request.user_ids is not None:
output["user_ids"] = request.user_ids
return output
def marshal_CreateAPIKeyRequest(
request: CreateAPIKeyRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"application_id",
request.application_id
if request.application_id is not None
else None,
),
OneOfPossibility(
"user_id", request.user_id if request.user_id is not None else None
),
]
),
)
if request.default_project_id is not None:
output["default_project_id"] = request.default_project_id
if request.description is not None:
output["description"] = request.description
if request.expires_at is not None:
output["expires_at"] = request.expires_at
return output
def marshal_CreateApplicationRequest(
request: CreateApplicationRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
if request.organization_id is not None:
output["organization_id"] = (
request.organization_id or defaults.default_organization_id
)
return output
def marshal_CreateGroupRequest(
request: CreateGroupRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
if request.organization_id is not None:
output["organization_id"] = (
request.organization_id or defaults.default_organization_id
)
return output
def marshal_CreatePolicyRequest(
request: CreatePolicyRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"user_id", request.user_id if request.user_id is not None else None
),
OneOfPossibility(
"group_id",
request.group_id if request.group_id is not None else None,
),
OneOfPossibility(
"application_id",
request.application_id
if request.application_id is not None
else None,
),
OneOfPossibility(
"no_principal",
request.no_principal if request.no_principal is not None else None,
),
]
),
)
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
if request.organization_id is not None:
output["organization_id"] = (
request.organization_id or defaults.default_organization_id
)
if request.rules is not None:
output["rules"] = [marshal_RuleSpecs(v, defaults) for v in request.rules]
return output
def marshal_CreateSSHKeyRequest(
request: CreateSSHKeyRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.public_key is not None:
output["public_key"] = request.public_key
return output
def marshal_CreateUserRequest(
request: CreateUserRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.email is not None:
output["email"] = request.email
if request.organization_id is not None:
output["organization_id"] = (
request.organization_id or defaults.default_organization_id
)
return output
def marshal_RemoveGroupMemberRequest(
request: RemoveGroupMemberRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"user_id", request.user_id if request.user_id is not None else None
),
OneOfPossibility(
"application_id",
request.application_id
if request.application_id is not None
else None,
),
]
),
)
return output
def marshal_SetGroupMembersRequest(
request: SetGroupMembersRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.application_ids is not None:
output["application_ids"] = request.application_ids
if request.user_ids is not None:
output["user_ids"] = request.user_ids
return output
def marshal_SetRulesRequest(
request: SetRulesRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.policy_id is not None:
output["policy_id"] = request.policy_id
if request.rules is not None:
output["rules"] = [marshal_RuleSpecs(v, defaults) for v in request.rules]
return output
def marshal_UpdateAPIKeyRequest(
request: UpdateAPIKeyRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.default_project_id is not None:
output["default_project_id"] = request.default_project_id
if request.description is not None:
output["description"] = request.description
return output
def marshal_UpdateApplicationRequest(
request: UpdateApplicationRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
return output
def marshal_UpdateGroupRequest(
request: UpdateGroupRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
return output
def marshal_UpdatePolicyRequest(
request: UpdatePolicyRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"user_id", request.user_id if request.user_id is not None else None
),
OneOfPossibility(
"group_id",
request.group_id if request.group_id is not None else None,
),
OneOfPossibility(
"application_id",
request.application_id
if request.application_id is not None
else None,
),
OneOfPossibility(
"no_principal",
request.no_principal if request.no_principal is not None else None,
),
]
),
)
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
return output
def marshal_UpdateSSHKeyRequest(
request: UpdateSSHKeyRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.disabled is not None:
output["disabled"] = request.disabled
if request.name is not None:
output["name"] = request.name
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/iam/v1alpha1/marshalling.py
| 0.693784 | 0.168788 |
marshalling.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
Region,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class ListPrivateNetworksRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
class ListVPCsRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
@dataclass
class AddSubnetsResponse:
subnets: List[str]
@dataclass
class DeleteSubnetsResponse:
subnets: List[str]
@dataclass
class ListPrivateNetworksResponse:
private_networks: List[PrivateNetwork]
total_count: int
@dataclass
class ListVPCsResponse:
vpcs: List[VPC]
total_count: int
@dataclass
class PrivateNetwork:
"""
Private network.
"""
id: str
"""
Private Network ID.
"""
name: str
"""
Private Network name.
"""
organization_id: str
"""
Scaleway Organization the Private Network belongs to.
"""
project_id: str
"""
Scaleway Project the Private Network belongs to.
"""
region: Region
"""
Region in which the Private Network is available.
"""
tags: List[str]
"""
Tags of the Private Network.
"""
created_at: Optional[datetime]
"""
Date the Private Network was created.
"""
updated_at: Optional[datetime]
"""
Date the Private Network was last modified.
"""
subnets: List[Subnet]
"""
Private Network subnets.
"""
vpc_id: str
"""
VPC the Private Network belongs to.
"""
dhcp_enabled: bool
"""
Defines whether managed DHCP is enabled for this Private Network.
"""
@dataclass
class SetSubnetsResponse:
subnets: List[str]
@dataclass
class Subnet:
"""
Subnet.
"""
id: str
"""
ID of the subnet.
"""
created_at: Optional[datetime]
"""
Subnet creation date.
"""
updated_at: Optional[datetime]
"""
Subnet last modification date.
"""
subnet: str
"""
Subnet CIDR.
"""
@dataclass
class VPC:
"""
Vpc.
"""
id: str
"""
VPC ID.
"""
name: str
"""
VPC name.
"""
organization_id: str
"""
Scaleway Organization the VPC belongs to.
"""
project_id: str
"""
Scaleway Project the VPC belongs to.
"""
region: Region
"""
Region of the VPC.
"""
tags: List[str]
"""
Tags for the VPC.
"""
is_default: bool
"""
Defines whether the VPC is the default one for its Project.
"""
created_at: Optional[datetime]
"""
Date the VPC was created.
"""
updated_at: Optional[datetime]
"""
Date the VPC was last modified.
"""
private_network_count: int
"""
Number of Private Networks within this VPC.
"""
@dataclass
class ListVPCsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
order_by: Optional[ListVPCsRequestOrderBy]
"""
Sort order of the returned VPCs.
"""
page: Optional[int]
"""
Page number to return, from the paginated results.
"""
page_size: Optional[int]
"""
Maximum number of VPCs to return per page.
"""
name: Optional[str]
"""
Name to filter for. Only VPCs with names containing this string will be returned.
"""
tags: Optional[List[str]]
"""
Tags to filter for. Only VPCs with one more more matching tags will be returned.
"""
organization_id: Optional[str]
"""
Organization ID to filter for. Only VPCs belonging to this Organization will be returned.
"""
project_id: Optional[str]
"""
Project ID to filter for. Only VPCs belonging to this Project will be returned.
"""
is_default: Optional[bool]
"""
Defines whether to filter only for VPCs which are the default one for their Project.
"""
@dataclass
class CreateVPCRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
name: Optional[str]
"""
Name for the VPC.
"""
project_id: Optional[str]
"""
Scaleway Project in which to create the VPC.
"""
tags: Optional[List[str]]
"""
Tags for the VPC.
"""
@dataclass
class GetVPCRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
vpc_id: str
"""
VPC ID.
"""
@dataclass
class UpdateVPCRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
vpc_id: str
"""
VPC ID.
"""
name: Optional[str]
"""
Name for the VPC.
"""
tags: Optional[List[str]]
"""
Tags for the VPC.
"""
@dataclass
class DeleteVPCRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
vpc_id: str
"""
VPC ID.
"""
@dataclass
class ListPrivateNetworksRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
order_by: Optional[ListPrivateNetworksRequestOrderBy]
"""
Sort order of the returned Private Networks.
"""
page: Optional[int]
"""
Page number to return, from the paginated results.
"""
page_size: Optional[int]
"""
Maximum number of Private Networks to return per page.
"""
name: Optional[str]
"""
Name to filter for. Only Private Networks with names containing this string will be returned.
"""
tags: Optional[List[str]]
"""
Tags to filter for. Only Private Networks with one or more matching tags will be returned.
"""
organization_id: Optional[str]
"""
Organization ID to filter for. Only Private Networks belonging to this Organization will be returned.
"""
project_id: Optional[str]
"""
Project ID to filter for. Only Private Networks belonging to this Project will be returned.
"""
private_network_ids: Optional[List[str]]
"""
Private Network IDs to filter for. Only Private Networks with one of these IDs will be returned.
"""
vpc_id: Optional[str]
"""
VPC ID to filter for. Only Private Networks belonging to this VPC will be returned.
"""
dhcp_enabled: Optional[bool]
"""
DHCP status to filter for. When true, only Private Networks with managed DHCP enabled will be returned.
"""
@dataclass
class CreatePrivateNetworkRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
name: Optional[str]
"""
Name for the Private Network.
"""
project_id: Optional[str]
"""
Scaleway Project in which to create the Private Network.
"""
tags: Optional[List[str]]
"""
Tags for the Private Network.
"""
subnets: Optional[List[str]]
"""
Private Network subnets CIDR.
"""
vpc_id: Optional[str]
"""
VPC in which to create the Private Network.
"""
@dataclass
class GetPrivateNetworkRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
private_network_id: str
"""
Private Network ID.
"""
@dataclass
class UpdatePrivateNetworkRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
private_network_id: str
"""
Private Network ID.
"""
name: Optional[str]
"""
Name for the Private Network.
"""
tags: Optional[List[str]]
"""
Tags for the Private Network.
"""
@dataclass
class DeletePrivateNetworkRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
private_network_id: str
"""
Private Network ID.
"""
@dataclass
class MigrateZonalPrivateNetworksRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
organization_id: Optional[str]
"""
Organization ID to target. The specified zoned Private Networks within this Organization will be migrated to regional.
One-of ('scope'): at most one of 'organization_id', 'project_id' could be set.
"""
project_id: Optional[str]
"""
Project to target. The specified zoned Private Networks within this Project will be migrated to regional.
One-of ('scope'): at most one of 'organization_id', 'project_id' could be set.
"""
private_network_ids: Optional[List[str]]
"""
IDs of the Private Networks to migrate.
"""
@dataclass
class EnableDHCPRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
private_network_id: str
"""
Private Network ID.
"""
@dataclass
class SetSubnetsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
private_network_id: str
"""
Private Network ID.
"""
subnets: Optional[List[str]]
"""
Private Network subnets CIDR.
"""
@dataclass
class AddSubnetsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
private_network_id: str
"""
Private Network ID.
"""
subnets: Optional[List[str]]
"""
Private Network subnets CIDR.
"""
@dataclass
class DeleteSubnetsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
private_network_id: str
"""
Private Network ID.
"""
subnets: Optional[List[str]]
"""
Private Network subnets CIDR.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/vpc/v2/types.py
| 0.926462 | 0.311761 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from scaleway_core.utils import (
OneOfPossibility,
resolve_one_of,
)
from dateutil import parser
from .types import (
AddSubnetsResponse,
DeleteSubnetsResponse,
ListPrivateNetworksResponse,
ListVPCsResponse,
PrivateNetwork,
SetSubnetsResponse,
Subnet,
VPC,
CreateVPCRequest,
UpdateVPCRequest,
CreatePrivateNetworkRequest,
UpdatePrivateNetworkRequest,
MigrateZonalPrivateNetworksRequest,
SetSubnetsRequest,
AddSubnetsRequest,
DeleteSubnetsRequest,
)
def unmarshal_Subnet(data: Any) -> Subnet:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Subnet' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("id", None)
args["id"] = field
field = data.get("subnet", None)
args["subnet"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return Subnet(**args)
def unmarshal_PrivateNetwork(data: Any) -> PrivateNetwork:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'PrivateNetwork' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("dhcp_enabled", None)
args["dhcp_enabled"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("region", None)
args["region"] = field
field = data.get("subnets", None)
args["subnets"] = (
[unmarshal_Subnet(v) for v in field] if field is not None else None
)
field = data.get("tags", None)
args["tags"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("vpc_id", None)
args["vpc_id"] = field
return PrivateNetwork(**args)
def unmarshal_VPC(data: Any) -> VPC:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'VPC' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("id", None)
args["id"] = field
field = data.get("is_default", None)
args["is_default"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("private_network_count", None)
args["private_network_count"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("region", None)
args["region"] = field
field = data.get("tags", None)
args["tags"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return VPC(**args)
def unmarshal_AddSubnetsResponse(data: Any) -> AddSubnetsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'AddSubnetsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("subnets", None)
args["subnets"] = field
return AddSubnetsResponse(**args)
def unmarshal_DeleteSubnetsResponse(data: Any) -> DeleteSubnetsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'DeleteSubnetsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("subnets", None)
args["subnets"] = field
return DeleteSubnetsResponse(**args)
def unmarshal_ListPrivateNetworksResponse(data: Any) -> ListPrivateNetworksResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListPrivateNetworksResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("private_networks", None)
args["private_networks"] = (
[unmarshal_PrivateNetwork(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListPrivateNetworksResponse(**args)
def unmarshal_ListVPCsResponse(data: Any) -> ListVPCsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListVPCsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("total_count", None)
args["total_count"] = field
field = data.get("vpcs", None)
args["vpcs"] = [unmarshal_VPC(v) for v in field] if field is not None else None
return ListVPCsResponse(**args)
def unmarshal_SetSubnetsResponse(data: Any) -> SetSubnetsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'SetSubnetsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("subnets", None)
args["subnets"] = field
return SetSubnetsResponse(**args)
def marshal_AddSubnetsRequest(
request: AddSubnetsRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.subnets is not None:
output["subnets"] = request.subnets
return output
def marshal_CreatePrivateNetworkRequest(
request: CreatePrivateNetworkRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.subnets is not None:
output["subnets"] = request.subnets
if request.tags is not None:
output["tags"] = request.tags
if request.vpc_id is not None:
output["vpc_id"] = request.vpc_id
return output
def marshal_CreateVPCRequest(
request: CreateVPCRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.tags is not None:
output["tags"] = request.tags
return output
def marshal_DeleteSubnetsRequest(
request: DeleteSubnetsRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.subnets is not None:
output["subnets"] = request.subnets
return output
def marshal_MigrateZonalPrivateNetworksRequest(
request: MigrateZonalPrivateNetworksRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"project_id",
request.project_id or defaults.default_project_id
if request.project_id is not None
else None,
defaults.default_project_id,
),
OneOfPossibility(
"organization_id",
request.organization_id or defaults.default_organization_id
if request.organization_id is not None
else None,
defaults.default_organization_id,
),
]
),
)
if request.private_network_ids is not None:
output["private_network_ids"] = request.private_network_ids
return output
def marshal_SetSubnetsRequest(
request: SetSubnetsRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.subnets is not None:
output["subnets"] = request.subnets
return output
def marshal_UpdatePrivateNetworkRequest(
request: UpdatePrivateNetworkRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.tags is not None:
output["tags"] = request.tags
return output
def marshal_UpdateVPCRequest(
request: UpdateVPCRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.tags is not None:
output["tags"] = request.tags
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/vpc/v2/marshalling.py
| 0.757077 | 0.197851 |
marshalling.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
Zone,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class ListPrivateNetworksRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
@dataclass
class ListPrivateNetworksResponse:
private_networks: List[PrivateNetwork]
total_count: int
@dataclass
class PrivateNetwork:
"""
Private network.
"""
id: str
"""
Private Network ID.
"""
name: str
"""
Private Network name.
"""
organization_id: str
"""
Scaleway Organization the Private Network belongs to.
"""
project_id: str
"""
Scaleway Project the Private Network belongs to.
"""
zone: Zone
"""
Availability Zone in which the Private Network is available.
"""
tags: List[str]
"""
Tags of the Private Network.
"""
created_at: Optional[datetime]
"""
Date the Private Network was created.
"""
updated_at: Optional[datetime]
"""
Date the Private Network was last modified.
"""
subnets: List[str]
"""
Private Network subnets CIDR.
"""
@dataclass
class ListPrivateNetworksRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
order_by: Optional[ListPrivateNetworksRequestOrderBy]
"""
Sort order of the returned Private Networks.
"""
page: Optional[int]
"""
Page number to return, from the paginated results.
"""
page_size: Optional[int]
"""
Maximum number of Private Networks to return per page.
"""
name: Optional[str]
"""
Name to filter for. Only Private Networks with names containing this string will be returned.
"""
tags: Optional[List[str]]
"""
Tags to filter for. Only Private Networks with one or more matching tags will be returned.
"""
organization_id: Optional[str]
"""
Organization ID to filter for. Only Private Networks belonging to this Organization will be returned.
"""
project_id: Optional[str]
"""
Project ID to filter for. Only Private Networks belonging to this Project will be returned.
"""
private_network_ids: Optional[List[str]]
"""
Private Network IDs to filter for. Only Private Networks with one of these IDs will be returned.
"""
include_regional: Optional[bool]
"""
Defines whether to include regional Private Networks in the response.
"""
@dataclass
class CreatePrivateNetworkRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
name: Optional[str]
"""
Name for the Private Network.
"""
project_id: Optional[str]
"""
Scaleway Project in which to create the Private Network.
"""
tags: Optional[List[str]]
"""
Tags for the Private Network.
"""
subnets: Optional[List[str]]
"""
Private Network subnets CIDR.
"""
@dataclass
class GetPrivateNetworkRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
private_network_id: str
"""
Private Network ID.
"""
@dataclass
class UpdatePrivateNetworkRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
private_network_id: str
"""
Private Network ID.
"""
name: Optional[str]
"""
Name of the private network.
"""
tags: Optional[List[str]]
"""
Tags for the Private Network.
"""
subnets: Optional[List[str]]
"""
Private Network subnets CIDR (deprecated).
:deprecated
"""
@dataclass
class DeletePrivateNetworkRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
private_network_id: str
"""
Private Network ID.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/vpc/v1/types.py
| 0.916213 | 0.367242 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from dateutil import parser
from .types import (
ListPrivateNetworksResponse,
PrivateNetwork,
CreatePrivateNetworkRequest,
UpdatePrivateNetworkRequest,
)
def unmarshal_PrivateNetwork(data: Any) -> PrivateNetwork:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'PrivateNetwork' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("subnets", None)
args["subnets"] = field
field = data.get("tags", None)
args["tags"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("zone", None)
args["zone"] = field
return PrivateNetwork(**args)
def unmarshal_ListPrivateNetworksResponse(data: Any) -> ListPrivateNetworksResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListPrivateNetworksResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("private_networks", None)
args["private_networks"] = (
[unmarshal_PrivateNetwork(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListPrivateNetworksResponse(**args)
def marshal_CreatePrivateNetworkRequest(
request: CreatePrivateNetworkRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.subnets is not None:
output["subnets"] = request.subnets
if request.tags is not None:
output["tags"] = request.tags
return output
def marshal_UpdatePrivateNetworkRequest(
request: UpdatePrivateNetworkRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.subnets is not None:
output["subnets"] = request.subnets
if request.tags is not None:
output["tags"] = request.tags
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/vpc/v1/marshalling.py
| 0.724675 | 0.176317 |
marshalling.py
|
pypi
|
from typing import List, Optional
from scaleway_core.api import API
from scaleway_core.bridge import (
Zone,
)
from scaleway_core.utils import (
fetch_all_pages_async,
random_name,
validate_path_param,
)
from .types import (
ListPrivateNetworksRequestOrderBy,
ListPrivateNetworksResponse,
PrivateNetwork,
CreatePrivateNetworkRequest,
UpdatePrivateNetworkRequest,
)
from .marshalling import (
marshal_CreatePrivateNetworkRequest,
marshal_UpdatePrivateNetworkRequest,
unmarshal_PrivateNetwork,
unmarshal_ListPrivateNetworksResponse,
)
class VpcV1API(API):
"""
VPC API.
VPC API.
"""
async def list_private_networks(
self,
*,
zone: Optional[Zone] = None,
order_by: ListPrivateNetworksRequestOrderBy = ListPrivateNetworksRequestOrderBy.CREATED_AT_ASC,
page: Optional[int] = None,
page_size: Optional[int] = None,
name: Optional[str] = None,
tags: Optional[List[str]] = None,
organization_id: Optional[str] = None,
project_id: Optional[str] = None,
private_network_ids: Optional[List[str]] = None,
include_regional: Optional[bool] = None,
) -> ListPrivateNetworksResponse:
"""
List Private Networks.
List existing Private Networks in a specified Availability Zone. By default, the Private Networks returned in the list are ordered by creation date in ascending order, though this can be modified via the order_by field.
:param zone: Zone to target. If none is passed will use default zone from the config.
:param order_by: Sort order of the returned Private Networks.
:param page: Page number to return, from the paginated results.
:param page_size: Maximum number of Private Networks to return per page.
:param name: Name to filter for. Only Private Networks with names containing this string will be returned.
:param tags: Tags to filter for. Only Private Networks with one or more matching tags will be returned.
:param organization_id: Organization ID to filter for. Only Private Networks belonging to this Organization will be returned.
:param project_id: Project ID to filter for. Only Private Networks belonging to this Project will be returned.
:param private_network_ids: Private Network IDs to filter for. Only Private Networks with one of these IDs will be returned.
:param include_regional: Defines whether to include regional Private Networks in the response.
:return: :class:`ListPrivateNetworksResponse <ListPrivateNetworksResponse>`
Usage:
::
result = await api.list_private_networks()
"""
param_zone = validate_path_param("zone", zone or self.client.default_zone)
res = self._request(
"GET",
f"/vpc/v1/zones/{param_zone}/private-networks",
params={
"include_regional": include_regional,
"name": name,
"order_by": order_by,
"organization_id": organization_id
or self.client.default_organization_id,
"page": page,
"page_size": page_size or self.client.default_page_size,
"private_network_ids": private_network_ids,
"project_id": project_id or self.client.default_project_id,
"tags": tags,
},
)
self._throw_on_error(res)
return unmarshal_ListPrivateNetworksResponse(res.json())
async def list_private_networks_all(
self,
*,
zone: Optional[Zone] = None,
order_by: Optional[ListPrivateNetworksRequestOrderBy] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
name: Optional[str] = None,
tags: Optional[List[str]] = None,
organization_id: Optional[str] = None,
project_id: Optional[str] = None,
private_network_ids: Optional[List[str]] = None,
include_regional: Optional[bool] = None,
) -> List[PrivateNetwork]:
"""
List Private Networks.
List existing Private Networks in a specified Availability Zone. By default, the Private Networks returned in the list are ordered by creation date in ascending order, though this can be modified via the order_by field.
:param zone: Zone to target. If none is passed will use default zone from the config.
:param order_by: Sort order of the returned Private Networks.
:param page: Page number to return, from the paginated results.
:param page_size: Maximum number of Private Networks to return per page.
:param name: Name to filter for. Only Private Networks with names containing this string will be returned.
:param tags: Tags to filter for. Only Private Networks with one or more matching tags will be returned.
:param organization_id: Organization ID to filter for. Only Private Networks belonging to this Organization will be returned.
:param project_id: Project ID to filter for. Only Private Networks belonging to this Project will be returned.
:param private_network_ids: Private Network IDs to filter for. Only Private Networks with one of these IDs will be returned.
:param include_regional: Defines whether to include regional Private Networks in the response.
:return: :class:`List[ListPrivateNetworksResponse] <List[ListPrivateNetworksResponse]>`
Usage:
::
result = await api.list_private_networks_all()
"""
return await fetch_all_pages_async(
type=ListPrivateNetworksResponse,
key="private_networks",
fetcher=self.list_private_networks,
args={
"zone": zone,
"order_by": order_by,
"page": page,
"page_size": page_size,
"name": name,
"tags": tags,
"organization_id": organization_id,
"project_id": project_id,
"private_network_ids": private_network_ids,
"include_regional": include_regional,
},
)
async def create_private_network(
self,
*,
zone: Optional[Zone] = None,
name: Optional[str] = None,
project_id: Optional[str] = None,
tags: Optional[List[str]] = None,
subnets: Optional[List[str]] = None,
) -> PrivateNetwork:
"""
Create a Private Network.
Create a new Private Network. Once created, you can attach Scaleway resources in the same Availability Zone.
:param zone: Zone to target. If none is passed will use default zone from the config.
:param name: Name for the Private Network.
:param project_id: Scaleway Project in which to create the Private Network.
:param tags: Tags for the Private Network.
:param subnets: Private Network subnets CIDR.
:return: :class:`PrivateNetwork <PrivateNetwork>`
Usage:
::
result = await api.create_private_network()
"""
param_zone = validate_path_param("zone", zone or self.client.default_zone)
res = self._request(
"POST",
f"/vpc/v1/zones/{param_zone}/private-networks",
body=marshal_CreatePrivateNetworkRequest(
CreatePrivateNetworkRequest(
zone=zone,
name=name or random_name(prefix="pn"),
project_id=project_id,
tags=tags,
subnets=subnets,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_PrivateNetwork(res.json())
async def get_private_network(
self,
*,
private_network_id: str,
zone: Optional[Zone] = None,
) -> PrivateNetwork:
"""
Get a Private Network.
Retrieve information about an existing Private Network, specified by its Private Network ID. Its full details are returned in the response object.
:param zone: Zone to target. If none is passed will use default zone from the config.
:param private_network_id: Private Network ID.
:return: :class:`PrivateNetwork <PrivateNetwork>`
Usage:
::
result = await api.get_private_network(private_network_id="example")
"""
param_zone = validate_path_param("zone", zone or self.client.default_zone)
param_private_network_id = validate_path_param(
"private_network_id", private_network_id
)
res = self._request(
"GET",
f"/vpc/v1/zones/{param_zone}/private-networks/{param_private_network_id}",
)
self._throw_on_error(res)
return unmarshal_PrivateNetwork(res.json())
async def update_private_network(
self,
*,
private_network_id: str,
zone: Optional[Zone] = None,
name: Optional[str] = None,
tags: Optional[List[str]] = None,
subnets: Optional[List[str]] = None,
) -> PrivateNetwork:
"""
Update Private Network.
Update parameters (such as name or tags) of an existing Private Network, specified by its Private Network ID.
:param zone: Zone to target. If none is passed will use default zone from the config.
:param private_network_id: Private Network ID.
:param name: Name of the private network.
:param tags: Tags for the Private Network.
:param subnets: Private Network subnets CIDR (deprecated).
:return: :class:`PrivateNetwork <PrivateNetwork>`
Usage:
::
result = await api.update_private_network(private_network_id="example")
"""
param_zone = validate_path_param("zone", zone or self.client.default_zone)
param_private_network_id = validate_path_param(
"private_network_id", private_network_id
)
res = self._request(
"PATCH",
f"/vpc/v1/zones/{param_zone}/private-networks/{param_private_network_id}",
body=marshal_UpdatePrivateNetworkRequest(
UpdatePrivateNetworkRequest(
private_network_id=private_network_id,
zone=zone,
name=name,
tags=tags,
subnets=subnets,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_PrivateNetwork(res.json())
async def delete_private_network(
self,
*,
private_network_id: str,
zone: Optional[Zone] = None,
) -> Optional[None]:
"""
Delete a Private Network.
Delete an existing Private Network. Note that you must first detach all resources from the network, in order to delete it.
:param zone: Zone to target. If none is passed will use default zone from the config.
:param private_network_id: Private Network ID.
Usage:
::
result = await api.delete_private_network(private_network_id="example")
"""
param_zone = validate_path_param("zone", zone or self.client.default_zone)
param_private_network_id = validate_path_param(
"private_network_id", private_network_id
)
res = self._request(
"DELETE",
f"/vpc/v1/zones/{param_zone}/private-networks/{param_private_network_id}",
)
self._throw_on_error(res)
return None
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/vpc/v1/api.py
| 0.932982 | 0.237764 |
api.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
TimeSeries,
Zone,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class AvailableClusterSettingPropertyType(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "UNKNOWN"
BOOLEAN = "BOOLEAN"
INT = "INT"
STRING = "STRING"
def __str__(self) -> str:
return str(self.value)
class ClusterStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
READY = "ready"
PROVISIONING = "provisioning"
CONFIGURING = "configuring"
DELETING = "deleting"
ERROR = "error"
AUTOHEALING = "autohealing"
LOCKED = "locked"
SUSPENDED = "suspended"
INITIALIZING = "initializing"
def __str__(self) -> str:
return str(self.value)
class ListClustersRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
class NodeTypeStock(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
LOW_STOCK = "low_stock"
OUT_OF_STOCK = "out_of_stock"
AVAILABLE = "available"
def __str__(self) -> str:
return str(self.value)
class PrivateNetworkProvisioningMode(str, Enum, metaclass=StrEnumMeta):
STATIC = "static"
IPAM = "ipam"
def __str__(self) -> str:
return str(self.value)
@dataclass
class ACLRule:
"""
Acl rule.
"""
id: str
"""
ID of the rule.
"""
ip_cidr: Optional[str]
"""
IPv4 network address of the rule.
"""
description: Optional[str]
"""
Description of the rule.
"""
@dataclass
class ACLRuleSpec:
"""
Acl rule spec.
"""
ip_cidr: str
"""
IPv4 network address of the rule.
"""
description: str
"""
Description of the rule.
"""
@dataclass
class AddAclRulesResponse:
"""
Add acl rules response.
"""
acl_rules: List[ACLRule]
"""
ACL Rules enabled for the Database Instance.
"""
total_count: int
"""
Total count of ACL rules of the Database Instance.
"""
@dataclass
class AddEndpointsResponse:
"""
Add endpoints response.
"""
endpoints: List[Endpoint]
"""
Endpoints defined on the Database Instance.
"""
total_count: int
"""
Total count of endpoints of the Database Instance.
"""
@dataclass
class AvailableClusterSetting:
"""
Available cluster setting.
"""
name: str
"""
Name of the setting.
"""
default_value: Optional[str]
"""
Default value of the setting.
"""
type_: AvailableClusterSettingPropertyType
"""
Type of setting.
"""
description: str
"""
Description of the setting.
"""
max_value: Optional[int]
"""
Optional maximum value of the setting.
"""
min_value: Optional[int]
"""
Optional minimum value of the setting.
"""
regex: Optional[str]
"""
Optional validation rule of the setting.
"""
deprecated: bool
"""
Defines whether or not the setting is deprecated.
"""
@dataclass
class Cluster:
"""
Cluster.
"""
id: str
"""
UUID of the Database Instance.
"""
name: str
"""
Name of the Database Instance.
"""
project_id: str
"""
Project ID the Database Instance belongs to.
"""
status: ClusterStatus
"""
Status of the Database Instance.
"""
version: str
"""
Redis™ engine version of the Database Instance.
"""
endpoints: List[Endpoint]
"""
List of Database Instance endpoints.
"""
tags: List[str]
"""
List of tags applied to the Database Instance.
"""
node_type: str
"""
Node type of the Database Instance.
"""
created_at: Optional[datetime]
"""
Creation date (Format ISO 8601).
"""
updated_at: Optional[datetime]
"""
Update date (Format ISO 8601).
"""
tls_enabled: bool
"""
Defines whether or not TLS is enabled.
"""
cluster_settings: List[ClusterSetting]
"""
List of Database Instance settings.
"""
acl_rules: List[ACLRule]
"""
List of ACL rules.
"""
cluster_size: int
"""
Number of nodes of the Database Instance cluster.
"""
zone: Zone
"""
Zone of the Database Instance.
"""
user_name: str
"""
Name of the user associated to the cluster.
"""
upgradable_versions: List[str]
"""
List of engine versions the Database Instance can upgrade to.
"""
@dataclass
class ClusterMetricsResponse:
"""
Cluster metrics response.
"""
timeseries: List[TimeSeries]
"""
Time series of metrics of a given cluster.
"""
@dataclass
class ClusterSetting:
"""
Cluster setting.
"""
value: str
"""
Value of the setting.
"""
name: str
"""
Name of the setting.
"""
@dataclass
class ClusterSettingsResponse:
"""
Cluster settings response.
"""
settings: List[ClusterSetting]
"""
Settings configured for a given Database Instance.
"""
@dataclass
class ClusterVersion:
"""
Cluster version.
"""
version: str
"""
Redis™ engine version.
"""
end_of_life_at: Optional[datetime]
"""
Date of End of Life.
"""
available_settings: List[AvailableClusterSetting]
"""
Cluster settings available to be updated.
"""
logo_url: str
"""
Redis™ logo url.
"""
zone: Zone
"""
Zone of the Redis™ Database Instance.
"""
@dataclass
class Endpoint:
"""
Endpoint.
"""
port: int
"""
TCP port of the endpoint.
"""
private_network: Optional[PrivateNetwork]
"""
Private Network details.
One-of ('details'): at most one of 'private_network', 'public_network' could be set.
"""
public_network: Optional[PublicNetwork]
"""
Public network details.
One-of ('details'): at most one of 'private_network', 'public_network' could be set.
"""
ips: List[str]
"""
List of IPv4 addresses of the endpoint.
"""
id: str
"""
UUID of the endpoint.
"""
@dataclass
class EndpointSpec:
"""
Endpoint spec.
"""
private_network: Optional[EndpointSpecPrivateNetworkSpec]
"""
Private Network specification details.
One-of ('endpoint_type'): at most one of 'private_network', 'public_network' could be set.
"""
public_network: Optional[EndpointSpecPublicNetworkSpec]
"""
Public network specification details.
One-of ('endpoint_type'): at most one of 'private_network', 'public_network' could be set.
"""
@dataclass
class EndpointSpecPrivateNetworkSpec:
"""
Endpoint spec. private network spec.
"""
id: str
"""
UUID of the Private Network to connect to the Database Instance.
"""
service_ips: List[str]
"""
Endpoint IPv4 address with a CIDR notation. You must provide at least one IPv4 per node.
"""
ipam_config: Optional[EndpointSpecPrivateNetworkSpecIpamConfig]
"""
Automated configuration of your Private Network endpoint with Scaleway IPAM service.
"""
@dataclass
class EndpointSpecPrivateNetworkSpecIpamConfig:
pass
@dataclass
class EndpointSpecPublicNetworkSpec:
"""
Endpoint spec. public network spec.
"""
@dataclass
class ListClusterVersionsResponse:
"""
List cluster versions response.
"""
versions: List[ClusterVersion]
"""
List of available Redis™ engine versions.
"""
total_count: int
"""
Total count of available Redis™ engine versions.
"""
@dataclass
class ListClustersResponse:
"""
List clusters response.
"""
clusters: List[Cluster]
"""
List all Database Instances.
"""
total_count: int
"""
Total count of Database Instances.
"""
@dataclass
class ListNodeTypesResponse:
"""
List node types response.
"""
node_types: List[NodeType]
"""
Types of node.
"""
total_count: int
"""
Total count of node types available.
"""
@dataclass
class NodeType:
"""
Node type.
"""
name: str
"""
Node type name.
"""
stock_status: NodeTypeStock
"""
Current stock status of the node type.
"""
description: str
"""
Current specifications of the offer.
"""
vcpus: int
"""
Number of virtual CPUs.
"""
memory: int
"""
Quantity of RAM.
"""
disabled: bool
"""
Defines whether node type is currently disabled or not.
"""
beta: bool
"""
Defines whether node type is currently in beta.
"""
zone: Zone
"""
Zone of the node type.
"""
@dataclass
class PrivateNetwork:
"""
Private network.
"""
id: str
"""
UUID of the Private Network.
"""
service_ips: List[str]
"""
List of IPv4 CIDR notation addresses of the endpoint.
"""
zone: Zone
"""
Zone of the Private Network.
"""
provisioning_mode: PrivateNetworkProvisioningMode
"""
How your endpoint ips are provisioned.
"""
@dataclass
class PublicNetwork:
pass
@dataclass
class SetAclRulesResponse:
"""
Set acl rules response.
"""
acl_rules: List[ACLRule]
"""
ACL Rules enabled for the Database Instance.
"""
@dataclass
class SetEndpointsResponse:
"""
Set endpoints response.
"""
endpoints: List[Endpoint]
"""
Endpoints defined on the Database Instance.
"""
@dataclass
class CreateClusterRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
project_id: Optional[str]
"""
Project ID in which to create the Database Instance.
"""
name: Optional[str]
"""
Name of the Database Instance.
"""
version: str
"""
Redis™ engine version of the Database Instance.
"""
tags: Optional[List[str]]
"""
Tags to apply to the Database Instance.
"""
node_type: str
"""
Type of node to use for the Database Instance.
"""
user_name: str
"""
Name of the user created upon Database Instance creation.
"""
password: str
"""
Password of the user.
"""
cluster_size: Optional[int]
"""
Number of nodes in the Redis™ cluster.
"""
acl_rules: Optional[List[ACLRuleSpec]]
"""
List of ACLRuleSpec used to secure your publicly exposed cluster.
"""
endpoints: Optional[List[EndpointSpec]]
"""
Zero or multiple EndpointSpec used to expose your cluster publicly and inside Private Networks.
Zero or multiple EndpointSpec used to expose your cluster publicly and inside private networks. If no EndpoindSpec is given the cluster will be publicly exposed by default.
"""
tls_enabled: bool
"""
Defines whether or not TLS is enabled.
"""
cluster_settings: Optional[List[ClusterSetting]]
"""
List of advanced settings to be set upon Database Instance initialization.
"""
@dataclass
class UpdateClusterRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the Database Instance to update.
"""
name: Optional[str]
"""
Name of the Database Instance.
"""
tags: Optional[List[str]]
"""
Database Instance tags.
"""
user_name: Optional[str]
"""
Name of the Database Instance user.
"""
password: Optional[str]
"""
Password of the Database Instance user.
"""
@dataclass
class GetClusterRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the cluster.
"""
@dataclass
class ListClustersRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
tags: Optional[List[str]]
"""
Filter by Database Instance tags.
"""
name: Optional[str]
"""
Filter by Database Instance names.
"""
order_by: Optional[ListClustersRequestOrderBy]
"""
Criteria to use when ordering the list.
"""
project_id: Optional[str]
"""
Filter by Project ID.
"""
organization_id: Optional[str]
"""
Filter by Organization ID.
"""
version: Optional[str]
"""
Filter by Redis™ engine version.
"""
page: Optional[int]
page_size: Optional[int]
@dataclass
class MigrateClusterRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the Database Instance to update.
"""
version: Optional[str]
"""
Redis™ engine version of the Database Instance.
One-of ('action'): at most one of 'version', 'node_type', 'cluster_size' could be set.
"""
node_type: Optional[str]
"""
Type of node to use for the Database Instance.
One-of ('action'): at most one of 'version', 'node_type', 'cluster_size' could be set.
"""
cluster_size: Optional[int]
"""
Number of nodes for the Database Instance.
One-of ('action'): at most one of 'version', 'node_type', 'cluster_size' could be set.
"""
@dataclass
class DeleteClusterRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the Database Instance to delete.
"""
@dataclass
class GetClusterMetricsRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the cluster.
"""
start_at: Optional[datetime]
"""
Start date.
"""
end_at: Optional[datetime]
"""
End date.
"""
metric_name: Optional[str]
"""
Name of the metric to gather.
"""
@dataclass
class ListNodeTypesRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
include_disabled_types: bool
"""
Defines whether or not to include disabled types.
"""
page: Optional[int]
page_size: Optional[int]
@dataclass
class ListClusterVersionsRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
include_disabled: bool
"""
Defines whether or not to include disabled Redis™ engine versions.
"""
include_beta: bool
"""
Defines whether or not to include beta Redis™ engine versions.
"""
include_deprecated: bool
"""
Defines whether or not to include deprecated Redis™ engine versions.
"""
version: Optional[str]
"""
List Redis™ engine versions that match a given name pattern.
"""
page: Optional[int]
page_size: Optional[int]
@dataclass
class GetClusterCertificateRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the cluster.
"""
@dataclass
class RenewClusterCertificateRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the cluster.
"""
@dataclass
class AddClusterSettingsRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the Database Instance you want to add settings to.
"""
settings: List[ClusterSetting]
"""
Settings to add to the cluster.
"""
@dataclass
class DeleteClusterSettingRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the Database Instance where the settings must be set.
"""
setting_name: str
"""
Setting name to delete.
"""
@dataclass
class SetClusterSettingsRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the Database Instance where the settings must be set.
"""
settings: List[ClusterSetting]
"""
Settings to define for the Database Instance.
"""
@dataclass
class SetAclRulesRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the Database Instance where the ACL rules have to be set.
"""
acl_rules: List[ACLRuleSpec]
"""
ACLs rules to define for the cluster.
"""
@dataclass
class AddAclRulesRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the Database Instance you want to add ACL rules to.
"""
acl_rules: List[ACLRuleSpec]
"""
ACLs rules to add to the cluster.
"""
@dataclass
class DeleteAclRuleRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
acl_id: str
"""
UUID of the ACL rule you want to delete.
"""
@dataclass
class GetAclRuleRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
acl_id: str
"""
UUID of the ACL rule you want to get.
"""
@dataclass
class SetEndpointsRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the Database Instance where the endpoints have to be set.
"""
endpoints: List[EndpointSpec]
"""
Endpoints to define for the Database Instance.
"""
@dataclass
class AddEndpointsRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
cluster_id: str
"""
UUID of the Database Instance you want to add endpoints to.
"""
endpoints: List[EndpointSpec]
"""
Endpoints to add to the Database Instance.
"""
@dataclass
class DeleteEndpointRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
endpoint_id: str
"""
UUID of the endpoint you want to delete.
"""
@dataclass
class GetEndpointRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
endpoint_id: str
"""
UUID of the endpoint you want to get.
"""
@dataclass
class UpdateEndpointRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
endpoint_id: str
private_network: Optional[EndpointSpecPrivateNetworkSpec]
"""
One-of ('endpoint_type'): at most one of 'private_network', 'public_network' could be set.
"""
public_network: Optional[EndpointSpecPublicNetworkSpec]
"""
One-of ('endpoint_type'): at most one of 'private_network', 'public_network' could be set.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/redis/v1/types.py
| 0.891705 | 0.296794 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from scaleway_core.bridge import (
unmarshal_TimeSeries,
)
from scaleway_core.utils import (
OneOfPossibility,
resolve_one_of,
)
from dateutil import parser
from .types import (
ACLRule,
ACLRuleSpec,
AddAclRulesResponse,
AddEndpointsResponse,
AvailableClusterSetting,
Cluster,
ClusterMetricsResponse,
ClusterSetting,
ClusterSettingsResponse,
ClusterVersion,
Endpoint,
EndpointSpec,
EndpointSpecPrivateNetworkSpec,
EndpointSpecPrivateNetworkSpecIpamConfig,
EndpointSpecPublicNetworkSpec,
ListClusterVersionsResponse,
ListClustersResponse,
ListNodeTypesResponse,
NodeType,
PrivateNetwork,
PublicNetwork,
SetAclRulesResponse,
SetEndpointsResponse,
CreateClusterRequest,
UpdateClusterRequest,
MigrateClusterRequest,
AddClusterSettingsRequest,
SetClusterSettingsRequest,
SetAclRulesRequest,
AddAclRulesRequest,
SetEndpointsRequest,
AddEndpointsRequest,
UpdateEndpointRequest,
)
def unmarshal_PrivateNetwork(data: Any) -> PrivateNetwork:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'PrivateNetwork' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("id", None)
args["id"] = field
field = data.get("provisioning_mode", None)
args["provisioning_mode"] = field
field = data.get("service_ips", None)
args["service_ips"] = field
field = data.get("zone", None)
args["zone"] = field
return PrivateNetwork(**args)
def unmarshal_PublicNetwork(data: Any) -> PublicNetwork:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'PublicNetwork' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
return PublicNetwork(**args)
def unmarshal_ACLRule(data: Any) -> ACLRule:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ACLRule' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("description", None)
args["description"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("ip_cidr", None)
args["ip_cidr"] = field
return ACLRule(**args)
def unmarshal_AvailableClusterSetting(data: Any) -> AvailableClusterSetting:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'AvailableClusterSetting' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("default_value", None)
args["default_value"] = field
field = data.get("deprecated", None)
args["deprecated"] = field
field = data.get("description", None)
args["description"] = field
field = data.get("max_value", None)
args["max_value"] = field
field = data.get("min_value", None)
args["min_value"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("regex", None)
args["regex"] = field
field = data.get("type", None)
args["type_"] = field
return AvailableClusterSetting(**args)
def unmarshal_ClusterSetting(data: Any) -> ClusterSetting:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ClusterSetting' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("name", None)
args["name"] = field
field = data.get("value", None)
args["value"] = field
return ClusterSetting(**args)
def unmarshal_Endpoint(data: Any) -> Endpoint:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Endpoint' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("id", None)
args["id"] = field
field = data.get("ips", None)
args["ips"] = field
field = data.get("port", None)
args["port"] = field
field = data.get("private_network", None)
args["private_network"] = (
unmarshal_PrivateNetwork(field) if field is not None else None
)
field = data.get("public_network", None)
args["public_network"] = (
unmarshal_PublicNetwork(field) if field is not None else None
)
return Endpoint(**args)
def unmarshal_Cluster(data: Any) -> Cluster:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Cluster' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("acl_rules", None)
args["acl_rules"] = (
[unmarshal_ACLRule(v) for v in field] if field is not None else None
)
field = data.get("cluster_settings", None)
args["cluster_settings"] = (
[unmarshal_ClusterSetting(v) for v in field] if field is not None else None
)
field = data.get("cluster_size", None)
args["cluster_size"] = field
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("endpoints", None)
args["endpoints"] = (
[unmarshal_Endpoint(v) for v in field] if field is not None else None
)
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("node_type", None)
args["node_type"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("tags", None)
args["tags"] = field
field = data.get("tls_enabled", None)
args["tls_enabled"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("upgradable_versions", None)
args["upgradable_versions"] = field
field = data.get("user_name", None)
args["user_name"] = field
field = data.get("version", None)
args["version"] = field
field = data.get("zone", None)
args["zone"] = field
return Cluster(**args)
def unmarshal_ClusterVersion(data: Any) -> ClusterVersion:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ClusterVersion' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("available_settings", None)
args["available_settings"] = (
[unmarshal_AvailableClusterSetting(v) for v in field]
if field is not None
else None
)
field = data.get("end_of_life_at", None)
args["end_of_life_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("logo_url", None)
args["logo_url"] = field
field = data.get("version", None)
args["version"] = field
field = data.get("zone", None)
args["zone"] = field
return ClusterVersion(**args)
def unmarshal_NodeType(data: Any) -> NodeType:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'NodeType' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("beta", None)
args["beta"] = field
field = data.get("description", None)
args["description"] = field
field = data.get("disabled", None)
args["disabled"] = field
field = data.get("memory", None)
args["memory"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("stock_status", None)
args["stock_status"] = field
field = data.get("vcpus", None)
args["vcpus"] = field
field = data.get("zone", None)
args["zone"] = field
return NodeType(**args)
def unmarshal_AddAclRulesResponse(data: Any) -> AddAclRulesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'AddAclRulesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("acl_rules", None)
args["acl_rules"] = (
[unmarshal_ACLRule(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return AddAclRulesResponse(**args)
def unmarshal_AddEndpointsResponse(data: Any) -> AddEndpointsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'AddEndpointsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("endpoints", None)
args["endpoints"] = (
[unmarshal_Endpoint(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return AddEndpointsResponse(**args)
def unmarshal_ClusterMetricsResponse(data: Any) -> ClusterMetricsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ClusterMetricsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("timeseries", None)
args["timeseries"] = (
[unmarshal_TimeSeries(v) for v in field] if field is not None else None
)
return ClusterMetricsResponse(**args)
def unmarshal_ClusterSettingsResponse(data: Any) -> ClusterSettingsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ClusterSettingsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("settings", None)
args["settings"] = (
[unmarshal_ClusterSetting(v) for v in field] if field is not None else None
)
return ClusterSettingsResponse(**args)
def unmarshal_ListClusterVersionsResponse(data: Any) -> ListClusterVersionsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListClusterVersionsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("total_count", None)
args["total_count"] = field
field = data.get("versions", None)
args["versions"] = (
[unmarshal_ClusterVersion(v) for v in field] if field is not None else None
)
return ListClusterVersionsResponse(**args)
def unmarshal_ListClustersResponse(data: Any) -> ListClustersResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListClustersResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("clusters", None)
args["clusters"] = (
[unmarshal_Cluster(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListClustersResponse(**args)
def unmarshal_ListNodeTypesResponse(data: Any) -> ListNodeTypesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListNodeTypesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("node_types", None)
args["node_types"] = (
[unmarshal_NodeType(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListNodeTypesResponse(**args)
def unmarshal_SetAclRulesResponse(data: Any) -> SetAclRulesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'SetAclRulesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("acl_rules", None)
args["acl_rules"] = (
[unmarshal_ACLRule(v) for v in field] if field is not None else None
)
return SetAclRulesResponse(**args)
def unmarshal_SetEndpointsResponse(data: Any) -> SetEndpointsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'SetEndpointsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("endpoints", None)
args["endpoints"] = (
[unmarshal_Endpoint(v) for v in field] if field is not None else None
)
return SetEndpointsResponse(**args)
def marshal_EndpointSpecPrivateNetworkSpecIpamConfig(
request: EndpointSpecPrivateNetworkSpecIpamConfig,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
return output
def marshal_EndpointSpecPrivateNetworkSpec(
request: EndpointSpecPrivateNetworkSpec,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.id is not None:
output["id"] = request.id
if request.ipam_config is not None:
output["ipam_config"] = marshal_EndpointSpecPrivateNetworkSpecIpamConfig(
request.ipam_config, defaults
)
if request.service_ips is not None:
output["service_ips"] = request.service_ips
return output
def marshal_EndpointSpecPublicNetworkSpec(
request: EndpointSpecPublicNetworkSpec,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
return output
def marshal_ACLRuleSpec(
request: ACLRuleSpec,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.ip_cidr is not None:
output["ip_cidr"] = request.ip_cidr
return output
def marshal_ClusterSetting(
request: ClusterSetting,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.value is not None:
output["value"] = request.value
return output
def marshal_EndpointSpec(
request: EndpointSpec,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"private_network",
marshal_EndpointSpecPrivateNetworkSpec(
request.private_network, defaults
)
if request.private_network is not None
else None,
),
OneOfPossibility(
"public_network",
marshal_EndpointSpecPublicNetworkSpec(
request.public_network, defaults
)
if request.public_network is not None
else None,
),
]
),
)
return output
def marshal_AddAclRulesRequest(
request: AddAclRulesRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.acl_rules is not None:
output["acl_rules"] = [
marshal_ACLRuleSpec(v, defaults) for v in request.acl_rules
]
return output
def marshal_AddClusterSettingsRequest(
request: AddClusterSettingsRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.settings is not None:
output["settings"] = [
marshal_ClusterSetting(v, defaults) for v in request.settings
]
return output
def marshal_AddEndpointsRequest(
request: AddEndpointsRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.endpoints is not None:
output["endpoints"] = [
marshal_EndpointSpec(v, defaults) for v in request.endpoints
]
return output
def marshal_CreateClusterRequest(
request: CreateClusterRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.acl_rules is not None:
output["acl_rules"] = [
marshal_ACLRuleSpec(v, defaults) for v in request.acl_rules
]
if request.cluster_settings is not None:
output["cluster_settings"] = [
marshal_ClusterSetting(v, defaults) for v in request.cluster_settings
]
if request.cluster_size is not None:
output["cluster_size"] = request.cluster_size
if request.endpoints is not None:
output["endpoints"] = [
marshal_EndpointSpec(v, defaults) for v in request.endpoints
]
if request.name is not None:
output["name"] = request.name
if request.node_type is not None:
output["node_type"] = request.node_type
if request.password is not None:
output["password"] = request.password
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.tags is not None:
output["tags"] = request.tags
if request.tls_enabled is not None:
output["tls_enabled"] = request.tls_enabled
if request.user_name is not None:
output["user_name"] = request.user_name
if request.version is not None:
output["version"] = request.version
return output
def marshal_MigrateClusterRequest(
request: MigrateClusterRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"version", request.version if request.version is not None else None
),
OneOfPossibility(
"node_type",
request.node_type if request.node_type is not None else None,
),
OneOfPossibility(
"cluster_size",
request.cluster_size if request.cluster_size is not None else None,
),
]
),
)
return output
def marshal_SetAclRulesRequest(
request: SetAclRulesRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.acl_rules is not None:
output["acl_rules"] = [
marshal_ACLRuleSpec(v, defaults) for v in request.acl_rules
]
return output
def marshal_SetClusterSettingsRequest(
request: SetClusterSettingsRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.settings is not None:
output["settings"] = [
marshal_ClusterSetting(v, defaults) for v in request.settings
]
return output
def marshal_SetEndpointsRequest(
request: SetEndpointsRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.endpoints is not None:
output["endpoints"] = [
marshal_EndpointSpec(v, defaults) for v in request.endpoints
]
return output
def marshal_UpdateClusterRequest(
request: UpdateClusterRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.password is not None:
output["password"] = request.password
if request.tags is not None:
output["tags"] = request.tags
if request.user_name is not None:
output["user_name"] = request.user_name
return output
def marshal_UpdateEndpointRequest(
request: UpdateEndpointRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"private_network",
marshal_EndpointSpecPrivateNetworkSpec(
request.private_network, defaults
)
if request.private_network is not None
else None,
),
OneOfPossibility(
"public_network",
marshal_EndpointSpecPublicNetworkSpec(
request.public_network, defaults
)
if request.public_network is not None
else None,
),
]
),
)
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/redis/v1/marshalling.py
| 0.76934 | 0.15925 |
marshalling.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
Region,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class DomainLastStatusRecordStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_RECORD_STATUS = "unknown_record_status"
VALID = "valid"
INVALID = "invalid"
NOT_FOUND = "not_found"
def __str__(self) -> str:
return str(self.value)
class DomainStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
CHECKED = "checked"
UNCHECKED = "unchecked"
INVALID = "invalid"
LOCKED = "locked"
REVOKED = "revoked"
PENDING = "pending"
def __str__(self) -> str:
return str(self.value)
class EmailFlag(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_FLAG = "unknown_flag"
SOFT_BOUNCE = "soft_bounce"
HARD_BOUNCE = "hard_bounce"
SPAM = "spam"
MAILBOX_FULL = "mailbox_full"
def __str__(self) -> str:
return str(self.value)
class EmailRcptType(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_RCPT_TYPE = "unknown_rcpt_type"
TO = "to"
CC = "cc"
BCC = "bcc"
def __str__(self) -> str:
return str(self.value)
class EmailStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
NEW = "new"
SENDING = "sending"
SENT = "sent"
FAILED = "failed"
CANCELED = "canceled"
def __str__(self) -> str:
return str(self.value)
class ListEmailsRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_DESC = "created_at_desc"
CREATED_AT_ASC = "created_at_asc"
UPDATED_AT_DESC = "updated_at_desc"
UPDATED_AT_ASC = "updated_at_asc"
STATUS_DESC = "status_desc"
STATUS_ASC = "status_asc"
MAIL_FROM_DESC = "mail_from_desc"
MAIL_FROM_ASC = "mail_from_asc"
MAIL_RCPT_DESC = "mail_rcpt_desc"
MAIL_RCPT_ASC = "mail_rcpt_asc"
SUBJECT_DESC = "subject_desc"
SUBJECT_ASC = "subject_asc"
def __str__(self) -> str:
return str(self.value)
@dataclass
class CreateEmailRequestAddress:
"""
Create email request. address.
"""
email: str
"""
Email address.
"""
name: Optional[str]
"""
(Optional) Name displayed.
"""
@dataclass
class CreateEmailRequestAttachment:
"""
Create email request. attachment.
"""
name: str
"""
Filename of the attachment.
"""
type_: str
"""
MIME type of the attachment.
"""
content: str
"""
Content of the attachment encoded in base64.
"""
@dataclass
class CreateEmailResponse:
"""
Create email response.
"""
emails: List[Email]
"""
Single page of emails matching the requested criteria.
"""
@dataclass
class Domain:
"""
Domain.
"""
id: str
"""
ID of the domain.
"""
organization_id: str
"""
ID of the domain's Organization.
"""
project_id: str
"""
ID of the domain's Project.
"""
name: str
"""
Domain name (example.com).
"""
status: DomainStatus
"""
Status of the domain.
"""
created_at: Optional[datetime]
"""
Date and time of domain creation.
"""
next_check_at: Optional[datetime]
"""
Date and time of the next scheduled check.
"""
last_valid_at: Optional[datetime]
"""
Date and time the domain was last valid.
"""
revoked_at: Optional[datetime]
"""
Date and time of the domain's deletion.
"""
last_error: Optional[str]
"""
Error message returned if the last check failed.
:deprecated
"""
spf_config: str
"""
Snippet of the SPF record to register in the DNS zone.
"""
dkim_config: str
"""
DKIM public key to record in the DNS zone.
"""
statistics: Optional[DomainStatistics]
"""
Domain's statistics.
"""
region: Region
@dataclass
class DomainLastStatus:
"""
Domain last status.
"""
domain_id: str
"""
The id of the domain.
"""
domain_name: str
"""
The domain name (example.com).
"""
spf_record: Optional[DomainLastStatusSpfRecord]
"""
The SPF record verification data.
"""
dkim_record: Optional[DomainLastStatusDkimRecord]
"""
The DKIM record verification data.
"""
@dataclass
class DomainLastStatusDkimRecord:
"""
Domain last status. dkim record.
"""
status: DomainLastStatusRecordStatus
"""
Status of the DKIM record's configurartion.
"""
last_valid_at: Optional[datetime]
"""
Time and date the DKIM record was last valid.
"""
error: Optional[str]
"""
An error text displays in case the record is not valid.
"""
@dataclass
class DomainLastStatusSpfRecord:
"""
Domain last status. spf record.
"""
status: DomainLastStatusRecordStatus
"""
Status of the SPF record's configurartion.
"""
last_valid_at: Optional[datetime]
"""
Time and date the SPF record was last valid.
"""
error: Optional[str]
"""
An error text displays in case the record is not valid.
"""
@dataclass
class DomainStatistics:
total_count: int
sent_count: int
failed_count: int
canceled_count: int
@dataclass
class Email:
"""
Email.
"""
id: str
"""
Technical ID of the email.
"""
message_id: str
"""
Message ID of the email.
"""
project_id: str
"""
ID of the Project to which the email belongs.
"""
mail_from: str
"""
Email address of the sender.
"""
rcpt_to: Optional[str]
"""
Email address of the recipient.
:deprecated
"""
mail_rcpt: str
"""
Email address of the recipient.
"""
rcpt_type: EmailRcptType
"""
Type of recipient.
"""
subject: str
"""
Subject of the email.
"""
created_at: Optional[datetime]
"""
Creation date of the email object.
"""
updated_at: Optional[datetime]
"""
Last update of the email object.
"""
status: EmailStatus
"""
Status of the email.
"""
status_details: Optional[str]
"""
Additional status information.
"""
try_count: int
"""
Number of attempts to send the email.
"""
last_tries: List[EmailTry]
"""
Information about the last three attempts to send the email.
"""
flags: List[EmailFlag]
"""
Flags categorize emails. They allow you to obtain more information about recurring errors, for example.
"""
@dataclass
class EmailTry:
"""
Email. try.
"""
rank: int
"""
Rank number of this attempt to send the email.
"""
tried_at: Optional[datetime]
"""
Date of the attempt to send the email.
"""
code: int
"""
The SMTP status code received after the attempt. 0 if the attempt did not reach an SMTP server.
"""
message: str
"""
The SMTP message received. If the attempt did not reach an SMTP server, the message returned explains what happened.
"""
@dataclass
class ListDomainsResponse:
"""
List domains response.
"""
total_count: int
"""
Number of domains that match the request (without pagination).
"""
domains: List[Domain]
@dataclass
class ListEmailsResponse:
"""
List emails response.
"""
total_count: int
"""
Number of emails matching the requested criteria.
"""
emails: List[Email]
"""
Single page of emails matching the requested criteria.
"""
@dataclass
class Statistics:
"""
Statistics.
"""
total_count: int
"""
Total number of emails matching the requested criteria.
"""
new_count: int
"""
Number of emails still in the `new` transient state. This means emails received from the API but not yet processed.
"""
sending_count: int
"""
Number of emails still in the `sending` transient state. This means emails received from the API but not yet in their final status.
"""
sent_count: int
"""
Number of emails in the final `sent` state. This means emails that have been delivered to the target mail system.
"""
failed_count: int
"""
Number of emails in the final `failed` state. This means emails that have been refused by the target mail system with a final error status.
"""
canceled_count: int
"""
Number of emails in the final `canceled` state. This means emails that have been canceled upon request.
"""
@dataclass
class CreateEmailRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
from_: Optional[CreateEmailRequestAddress]
"""
Sender information. Must be from a checked domain declared in the Project.
"""
to: Optional[List[CreateEmailRequestAddress]]
"""
An array of the primary recipient's information.
"""
cc: Optional[List[CreateEmailRequestAddress]]
"""
An array of the carbon copy recipient's information.
"""
bcc: Optional[List[CreateEmailRequestAddress]]
"""
An array of the blind carbon copy recipient's information.
"""
subject: str
"""
Subject of the email.
"""
text: str
"""
Text content.
"""
html: str
"""
HTML content.
"""
project_id: Optional[str]
"""
ID of the Project in which to create the email.
"""
attachments: Optional[List[CreateEmailRequestAttachment]]
"""
Array of attachments.
"""
send_before: Optional[datetime]
"""
Maximum date to deliver the email.
"""
@dataclass
class GetEmailRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
email_id: str
"""
ID of the email to retrieve.
"""
@dataclass
class ListEmailsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
page: Optional[int]
page_size: Optional[int]
project_id: Optional[str]
"""
(Optional) ID of the Project in which to list the emails.
"""
domain_id: Optional[str]
"""
(Optional) ID of the domain for which to list the emails.
"""
message_id: Optional[str]
"""
(Optional) ID of the message for which to list the emails.
"""
since: Optional[datetime]
"""
(Optional) List emails created after this date.
"""
until: Optional[datetime]
"""
(Optional) List emails created before this date.
"""
mail_from: Optional[str]
"""
(Optional) List emails sent with this sender's email address.
"""
mail_to: Optional[str]
"""
List emails sent to this recipient's email address.
:deprecated
"""
mail_rcpt: Optional[str]
"""
(Optional) List emails sent to this recipient's email address.
"""
statuses: Optional[List[EmailStatus]]
"""
(Optional) List emails with any of these statuses.
"""
subject: Optional[str]
"""
(Optional) List emails with this subject.
"""
search: Optional[str]
"""
(Optional) List emails by searching to all fields.
"""
order_by: Optional[ListEmailsRequestOrderBy]
"""
(Optional) List emails corresponding to specific criteria.
"""
flags: Optional[List[EmailFlag]]
"""
(Optional) List emails containing only specific flags.
"""
@dataclass
class GetStatisticsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
project_id: Optional[str]
"""
(Optional) Number of emails for this Project.
"""
domain_id: Optional[str]
"""
(Optional) Number of emails sent from this domain (must be coherent with the `project_id` and the `organization_id`).
"""
since: Optional[datetime]
"""
(Optional) Number of emails created after this date.
"""
until: Optional[datetime]
"""
(Optional) Number of emails created before this date.
"""
mail_from: Optional[str]
"""
(Optional) Number of emails sent with this sender's email address.
"""
@dataclass
class CancelEmailRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
email_id: str
"""
ID of the email to cancel.
"""
@dataclass
class CreateDomainRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
project_id: Optional[str]
"""
ID of the project to which the domain belongs.
"""
domain_name: str
"""
Fully qualified domain dame.
"""
accept_tos: bool
"""
Accept Scaleway's Terms of Service.
"""
@dataclass
class GetDomainRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
domain_id: str
"""
ID of the domain.
"""
@dataclass
class ListDomainsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
page: Optional[int]
"""
Requested page number. Value must be greater or equal to 1.
"""
page_size: Optional[int]
"""
Page size.
"""
project_id: Optional[str]
status: Optional[List[DomainStatus]]
organization_id: Optional[str]
name: Optional[str]
@dataclass
class RevokeDomainRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
domain_id: str
"""
ID of the domain to delete.
"""
@dataclass
class CheckDomainRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
domain_id: str
"""
ID of the domain to check.
"""
@dataclass
class GetDomainLastStatusRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
domain_id: str
"""
ID of the domain to delete.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/tem/v1alpha1/types.py
| 0.877503 | 0.187058 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from dateutil import parser
from .types import (
CreateEmailRequestAddress,
CreateEmailRequestAttachment,
CreateEmailResponse,
Domain,
DomainLastStatus,
DomainLastStatusDkimRecord,
DomainLastStatusSpfRecord,
DomainStatistics,
Email,
EmailTry,
ListDomainsResponse,
ListEmailsResponse,
Statistics,
CreateEmailRequest,
CreateDomainRequest,
)
def unmarshal_DomainStatistics(data: Any) -> DomainStatistics:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'DomainStatistics' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("canceled_count", None)
args["canceled_count"] = field
field = data.get("failed_count", None)
args["failed_count"] = field
field = data.get("sent_count", None)
args["sent_count"] = field
field = data.get("total_count", None)
args["total_count"] = field
return DomainStatistics(**args)
def unmarshal_EmailTry(data: Any) -> EmailTry:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'EmailTry' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("code", None)
args["code"] = field
field = data.get("message", None)
args["message"] = field
field = data.get("rank", None)
args["rank"] = field
field = data.get("tried_at", None)
args["tried_at"] = parser.isoparse(field) if type(field) is str else field
return EmailTry(**args)
def unmarshal_Domain(data: Any) -> Domain:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Domain' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("dkim_config", None)
args["dkim_config"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("last_error", None)
args["last_error"] = field
field = data.get("last_valid_at", None)
args["last_valid_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("name", None)
args["name"] = field
field = data.get("next_check_at", None)
args["next_check_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("region", None)
args["region"] = field
field = data.get("revoked_at", None)
args["revoked_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("spf_config", None)
args["spf_config"] = field
field = data.get("statistics", None)
args["statistics"] = (
unmarshal_DomainStatistics(field) if field is not None else None
)
field = data.get("status", None)
args["status"] = field
return Domain(**args)
def unmarshal_DomainLastStatusDkimRecord(data: Any) -> DomainLastStatusDkimRecord:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'DomainLastStatusDkimRecord' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("error", None)
args["error"] = field
field = data.get("last_valid_at", None)
args["last_valid_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("status", None)
args["status"] = field
return DomainLastStatusDkimRecord(**args)
def unmarshal_DomainLastStatusSpfRecord(data: Any) -> DomainLastStatusSpfRecord:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'DomainLastStatusSpfRecord' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("error", None)
args["error"] = field
field = data.get("last_valid_at", None)
args["last_valid_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("status", None)
args["status"] = field
return DomainLastStatusSpfRecord(**args)
def unmarshal_Email(data: Any) -> Email:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Email' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("flags", None)
args["flags"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("last_tries", None)
args["last_tries"] = (
[unmarshal_EmailTry(v) for v in field] if field is not None else None
)
field = data.get("mail_from", None)
args["mail_from"] = field
field = data.get("mail_rcpt", None)
args["mail_rcpt"] = field
field = data.get("message_id", None)
args["message_id"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("rcpt_to", None)
args["rcpt_to"] = field
field = data.get("rcpt_type", None)
args["rcpt_type"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("status_details", None)
args["status_details"] = field
field = data.get("subject", None)
args["subject"] = field
field = data.get("try_count", None)
args["try_count"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return Email(**args)
def unmarshal_CreateEmailResponse(data: Any) -> CreateEmailResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'CreateEmailResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("emails", None)
args["emails"] = [unmarshal_Email(v) for v in field] if field is not None else None
return CreateEmailResponse(**args)
def unmarshal_DomainLastStatus(data: Any) -> DomainLastStatus:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'DomainLastStatus' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("dkim_record", None)
args["dkim_record"] = (
unmarshal_DomainLastStatusDkimRecord(field) if field is not None else None
)
field = data.get("domain_id", None)
args["domain_id"] = field
field = data.get("domain_name", None)
args["domain_name"] = field
field = data.get("spf_record", None)
args["spf_record"] = (
unmarshal_DomainLastStatusSpfRecord(field) if field is not None else None
)
return DomainLastStatus(**args)
def unmarshal_ListDomainsResponse(data: Any) -> ListDomainsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListDomainsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("domains", None)
args["domains"] = (
[unmarshal_Domain(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListDomainsResponse(**args)
def unmarshal_ListEmailsResponse(data: Any) -> ListEmailsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListEmailsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("emails", None)
args["emails"] = [unmarshal_Email(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListEmailsResponse(**args)
def unmarshal_Statistics(data: Any) -> Statistics:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Statistics' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("canceled_count", None)
args["canceled_count"] = field
field = data.get("failed_count", None)
args["failed_count"] = field
field = data.get("new_count", None)
args["new_count"] = field
field = data.get("sending_count", None)
args["sending_count"] = field
field = data.get("sent_count", None)
args["sent_count"] = field
field = data.get("total_count", None)
args["total_count"] = field
return Statistics(**args)
def marshal_CreateEmailRequestAddress(
request: CreateEmailRequestAddress,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.email is not None:
output["email"] = request.email
if request.name is not None:
output["name"] = request.name
return output
def marshal_CreateEmailRequestAttachment(
request: CreateEmailRequestAttachment,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.content is not None:
output["content"] = request.content
if request.name is not None:
output["name"] = request.name
if request.type_ is not None:
output["type"] = request.type_
return output
def marshal_CreateDomainRequest(
request: CreateDomainRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.accept_tos is not None:
output["accept_tos"] = request.accept_tos
if request.domain_name is not None:
output["domain_name"] = request.domain_name
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
return output
def marshal_CreateEmailRequest(
request: CreateEmailRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.attachments is not None:
output["attachments"] = [
marshal_CreateEmailRequestAttachment(v, defaults)
for v in request.attachments
]
if request.bcc is not None:
output["bcc"] = [
marshal_CreateEmailRequestAddress(v, defaults) for v in request.bcc
]
if request.cc is not None:
output["cc"] = [
marshal_CreateEmailRequestAddress(v, defaults) for v in request.cc
]
if request.from_ is not None:
output["from"] = marshal_CreateEmailRequestAddress(request.from_, defaults)
if request.html is not None:
output["html"] = request.html
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.send_before is not None:
output["send_before"] = request.send_before
if request.subject is not None:
output["subject"] = request.subject
if request.text is not None:
output["text"] = request.text
if request.to is not None:
output["to"] = [
marshal_CreateEmailRequestAddress(v, defaults) for v in request.to
]
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/tem/v1alpha1/marshalling.py
| 0.710226 | 0.30527 |
marshalling.py
|
pypi
|
from datetime import datetime
from typing import Awaitable, List, Optional, Union
from scaleway_core.api import API
from scaleway_core.bridge import (
Region,
)
from scaleway_core.utils import (
WaitForOptions,
fetch_all_pages_async,
validate_path_param,
wait_for_resource_async,
)
from .types import (
DomainStatus,
EmailFlag,
EmailStatus,
ListEmailsRequestOrderBy,
CreateEmailRequestAddress,
CreateEmailRequestAttachment,
CreateEmailResponse,
Domain,
DomainLastStatus,
Email,
ListDomainsResponse,
ListEmailsResponse,
Statistics,
CreateEmailRequest,
CreateDomainRequest,
)
from .content import (
DOMAIN_TRANSIENT_STATUSES,
EMAIL_TRANSIENT_STATUSES,
)
from .marshalling import (
marshal_CreateDomainRequest,
marshal_CreateEmailRequest,
unmarshal_Domain,
unmarshal_Email,
unmarshal_CreateEmailResponse,
unmarshal_DomainLastStatus,
unmarshal_ListDomainsResponse,
unmarshal_ListEmailsResponse,
unmarshal_Statistics,
)
class TemV1Alpha1API(API):
"""
Transactional Email API.
Transactional Email API.
"""
async def create_email(
self,
*,
subject: str,
text: str,
html: str,
region: Optional[Region] = None,
from_: Optional[CreateEmailRequestAddress] = None,
to: Optional[List[CreateEmailRequestAddress]] = None,
cc: Optional[List[CreateEmailRequestAddress]] = None,
bcc: Optional[List[CreateEmailRequestAddress]] = None,
project_id: Optional[str] = None,
attachments: Optional[List[CreateEmailRequestAttachment]] = None,
send_before: Optional[datetime] = None,
) -> CreateEmailResponse:
"""
Send an email.
You must specify the `region`, the sender and the recipient's information and the `project_id` to send an email from a checked domain. The subject of the email must contain at least 6 characters.
:param region: Region to target. If none is passed will use default region from the config.
:param from_: Sender information. Must be from a checked domain declared in the Project.
:param to: An array of the primary recipient's information.
:param cc: An array of the carbon copy recipient's information.
:param bcc: An array of the blind carbon copy recipient's information.
:param subject: Subject of the email.
:param text: Text content.
:param html: HTML content.
:param project_id: ID of the Project in which to create the email.
:param attachments: Array of attachments.
:param send_before: Maximum date to deliver the email.
:return: :class:`CreateEmailResponse <CreateEmailResponse>`
Usage:
::
result = await api.create_email(
subject="example",
text="example",
html="example",
)
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"POST",
f"/transactional-email/v1alpha1/regions/{param_region}/emails",
body=marshal_CreateEmailRequest(
CreateEmailRequest(
subject=subject,
text=text,
html=html,
region=region,
from_=from_,
to=to,
cc=cc,
bcc=bcc,
project_id=project_id,
attachments=attachments,
send_before=send_before,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_CreateEmailResponse(res.json())
async def get_email(
self,
*,
email_id: str,
region: Optional[Region] = None,
) -> Email:
"""
Get an email.
Retrieve information about a specific email using the `email_id` and `region` parameters.
:param region: Region to target. If none is passed will use default region from the config.
:param email_id: ID of the email to retrieve.
:return: :class:`Email <Email>`
Usage:
::
result = await api.get_email(email_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_email_id = validate_path_param("email_id", email_id)
res = self._request(
"GET",
f"/transactional-email/v1alpha1/regions/{param_region}/emails/{param_email_id}",
)
self._throw_on_error(res)
return unmarshal_Email(res.json())
async def wait_for_email(
self,
*,
email_id: str,
region: Optional[Region] = None,
options: Optional[WaitForOptions[Email, Union[bool, Awaitable[bool]]]] = None,
) -> Email:
"""
Waits for :class:`Email <Email>` to be in a final state.
:param region: Region to target. If none is passed will use default region from the config.
:param email_id: ID of the email to retrieve.
:param options: The options for the waiter
:return: :class:`Email <Email>`
Usage:
::
result = api.wait_for_email(email_id="example")
"""
if not options:
options = WaitForOptions()
if not options.stop:
options.stop = lambda res: res.status not in EMAIL_TRANSIENT_STATUSES
return await wait_for_resource_async(
fetcher=self.get_email,
options=options,
args={
"email_id": email_id,
"region": region,
},
)
async def list_emails(
self,
*,
region: Optional[Region] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
project_id: Optional[str] = None,
domain_id: Optional[str] = None,
message_id: Optional[str] = None,
since: Optional[datetime] = None,
until: Optional[datetime] = None,
mail_from: Optional[str] = None,
mail_to: Optional[str] = None,
mail_rcpt: Optional[str] = None,
statuses: Optional[List[EmailStatus]] = None,
subject: Optional[str] = None,
search: Optional[str] = None,
order_by: ListEmailsRequestOrderBy = ListEmailsRequestOrderBy.CREATED_AT_DESC,
flags: Optional[List[EmailFlag]] = None,
) -> ListEmailsResponse:
"""
List emails.
Retrieve the list of emails sent from a specific domain or for a specific Project or Organization. You must specify the `region`.
:param region: Region to target. If none is passed will use default region from the config.
:param page:
:param page_size:
:param project_id: (Optional) ID of the Project in which to list the emails.
:param domain_id: (Optional) ID of the domain for which to list the emails.
:param message_id: (Optional) ID of the message for which to list the emails.
:param since: (Optional) List emails created after this date.
:param until: (Optional) List emails created before this date.
:param mail_from: (Optional) List emails sent with this sender's email address.
:param mail_to: List emails sent to this recipient's email address.
:param mail_rcpt: (Optional) List emails sent to this recipient's email address.
:param statuses: (Optional) List emails with any of these statuses.
:param subject: (Optional) List emails with this subject.
:param search: (Optional) List emails by searching to all fields.
:param order_by: (Optional) List emails corresponding to specific criteria.
:param flags: (Optional) List emails containing only specific flags.
:return: :class:`ListEmailsResponse <ListEmailsResponse>`
Usage:
::
result = await api.list_emails()
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"GET",
f"/transactional-email/v1alpha1/regions/{param_region}/emails",
params={
"domain_id": domain_id,
"flags": flags,
"mail_from": mail_from,
"mail_rcpt": mail_rcpt,
"mail_to": mail_to,
"message_id": message_id,
"order_by": order_by,
"page": page,
"page_size": page_size or self.client.default_page_size,
"project_id": project_id or self.client.default_project_id,
"search": search,
"since": since,
"statuses": statuses,
"subject": subject,
"until": until,
},
)
self._throw_on_error(res)
return unmarshal_ListEmailsResponse(res.json())
async def list_emails_all(
self,
*,
region: Optional[Region] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
project_id: Optional[str] = None,
domain_id: Optional[str] = None,
message_id: Optional[str] = None,
since: Optional[datetime] = None,
until: Optional[datetime] = None,
mail_from: Optional[str] = None,
mail_to: Optional[str] = None,
mail_rcpt: Optional[str] = None,
statuses: Optional[List[EmailStatus]] = None,
subject: Optional[str] = None,
search: Optional[str] = None,
order_by: Optional[ListEmailsRequestOrderBy] = None,
flags: Optional[List[EmailFlag]] = None,
) -> List[Email]:
"""
List emails.
Retrieve the list of emails sent from a specific domain or for a specific Project or Organization. You must specify the `region`.
:param region: Region to target. If none is passed will use default region from the config.
:param page:
:param page_size:
:param project_id: (Optional) ID of the Project in which to list the emails.
:param domain_id: (Optional) ID of the domain for which to list the emails.
:param message_id: (Optional) ID of the message for which to list the emails.
:param since: (Optional) List emails created after this date.
:param until: (Optional) List emails created before this date.
:param mail_from: (Optional) List emails sent with this sender's email address.
:param mail_to: List emails sent to this recipient's email address.
:param mail_rcpt: (Optional) List emails sent to this recipient's email address.
:param statuses: (Optional) List emails with any of these statuses.
:param subject: (Optional) List emails with this subject.
:param search: (Optional) List emails by searching to all fields.
:param order_by: (Optional) List emails corresponding to specific criteria.
:param flags: (Optional) List emails containing only specific flags.
:return: :class:`List[ListEmailsResponse] <List[ListEmailsResponse]>`
Usage:
::
result = await api.list_emails_all()
"""
return await fetch_all_pages_async(
type=ListEmailsResponse,
key="emails",
fetcher=self.list_emails,
args={
"region": region,
"page": page,
"page_size": page_size,
"project_id": project_id,
"domain_id": domain_id,
"message_id": message_id,
"since": since,
"until": until,
"mail_from": mail_from,
"mail_to": mail_to,
"mail_rcpt": mail_rcpt,
"statuses": statuses,
"subject": subject,
"search": search,
"order_by": order_by,
"flags": flags,
},
)
async def get_statistics(
self,
*,
region: Optional[Region] = None,
project_id: Optional[str] = None,
domain_id: Optional[str] = None,
since: Optional[datetime] = None,
until: Optional[datetime] = None,
mail_from: Optional[str] = None,
) -> Statistics:
"""
Email statuses.
Get information on your emails' statuses.
:param region: Region to target. If none is passed will use default region from the config.
:param project_id: (Optional) Number of emails for this Project.
:param domain_id: (Optional) Number of emails sent from this domain (must be coherent with the `project_id` and the `organization_id`).
:param since: (Optional) Number of emails created after this date.
:param until: (Optional) Number of emails created before this date.
:param mail_from: (Optional) Number of emails sent with this sender's email address.
:return: :class:`Statistics <Statistics>`
Usage:
::
result = await api.get_statistics()
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"GET",
f"/transactional-email/v1alpha1/regions/{param_region}/statistics",
params={
"domain_id": domain_id,
"mail_from": mail_from,
"project_id": project_id or self.client.default_project_id,
"since": since,
"until": until,
},
)
self._throw_on_error(res)
return unmarshal_Statistics(res.json())
async def cancel_email(
self,
*,
email_id: str,
region: Optional[Region] = None,
) -> Email:
"""
Cancel an email.
You can cancel the sending of an email if it has not been sent yet. You must specify the `region` and the `email_id` of the email you want to cancel.
:param region: Region to target. If none is passed will use default region from the config.
:param email_id: ID of the email to cancel.
:return: :class:`Email <Email>`
Usage:
::
result = await api.cancel_email(email_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_email_id = validate_path_param("email_id", email_id)
res = self._request(
"POST",
f"/transactional-email/v1alpha1/regions/{param_region}/emails/{param_email_id}/cancel",
)
self._throw_on_error(res)
return unmarshal_Email(res.json())
async def create_domain(
self,
*,
domain_name: str,
accept_tos: bool,
region: Optional[Region] = None,
project_id: Optional[str] = None,
) -> Domain:
"""
Register a domain in a project.
You must specify the `region`, `project_id` and `domain_name` to register a domain in a specific Project.
:param region: Region to target. If none is passed will use default region from the config.
:param project_id: ID of the project to which the domain belongs.
:param domain_name: Fully qualified domain dame.
:param accept_tos: Accept Scaleway's Terms of Service.
:return: :class:`Domain <Domain>`
Usage:
::
result = await api.create_domain(
domain_name="example",
accept_tos=True,
)
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"POST",
f"/transactional-email/v1alpha1/regions/{param_region}/domains",
body=marshal_CreateDomainRequest(
CreateDomainRequest(
domain_name=domain_name,
accept_tos=accept_tos,
region=region,
project_id=project_id,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Domain(res.json())
async def get_domain(
self,
*,
domain_id: str,
region: Optional[Region] = None,
) -> Domain:
"""
Get information about a domain.
Retrieve information about a specific domain using the `region` and `domain_id` parameters.
:param region: Region to target. If none is passed will use default region from the config.
:param domain_id: ID of the domain.
:return: :class:`Domain <Domain>`
Usage:
::
result = await api.get_domain(domain_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_domain_id = validate_path_param("domain_id", domain_id)
res = self._request(
"GET",
f"/transactional-email/v1alpha1/regions/{param_region}/domains/{param_domain_id}",
)
self._throw_on_error(res)
return unmarshal_Domain(res.json())
async def wait_for_domain(
self,
*,
domain_id: str,
region: Optional[Region] = None,
options: Optional[WaitForOptions[Domain, Union[bool, Awaitable[bool]]]] = None,
) -> Domain:
"""
Waits for :class:`Domain <Domain>` to be in a final state.
:param region: Region to target. If none is passed will use default region from the config.
:param domain_id: ID of the domain.
:param options: The options for the waiter
:return: :class:`Domain <Domain>`
Usage:
::
result = api.wait_for_domain(domain_id="example")
"""
if not options:
options = WaitForOptions()
if not options.stop:
options.stop = lambda res: res.status not in DOMAIN_TRANSIENT_STATUSES
return await wait_for_resource_async(
fetcher=self.get_domain,
options=options,
args={
"domain_id": domain_id,
"region": region,
},
)
async def list_domains(
self,
*,
region: Optional[Region] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
project_id: Optional[str] = None,
status: Optional[List[DomainStatus]] = None,
organization_id: Optional[str] = None,
name: Optional[str] = None,
) -> ListDomainsResponse:
"""
List domains.
Retrieve domains in a specific project or in a specific Organization using the `region` parameter.
:param region: Region to target. If none is passed will use default region from the config.
:param page: Requested page number. Value must be greater or equal to 1.
:param page_size: Page size.
:param project_id:
:param status:
:param organization_id:
:param name:
:return: :class:`ListDomainsResponse <ListDomainsResponse>`
Usage:
::
result = await api.list_domains()
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"GET",
f"/transactional-email/v1alpha1/regions/{param_region}/domains",
params={
"name": name,
"organization_id": organization_id
or self.client.default_organization_id,
"page": page,
"page_size": page_size or self.client.default_page_size,
"project_id": project_id or self.client.default_project_id,
"status": status,
},
)
self._throw_on_error(res)
return unmarshal_ListDomainsResponse(res.json())
async def list_domains_all(
self,
*,
region: Optional[Region] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
project_id: Optional[str] = None,
status: Optional[List[DomainStatus]] = None,
organization_id: Optional[str] = None,
name: Optional[str] = None,
) -> List[Domain]:
"""
List domains.
Retrieve domains in a specific project or in a specific Organization using the `region` parameter.
:param region: Region to target. If none is passed will use default region from the config.
:param page: Requested page number. Value must be greater or equal to 1.
:param page_size: Page size.
:param project_id:
:param status:
:param organization_id:
:param name:
:return: :class:`List[ListDomainsResponse] <List[ListDomainsResponse]>`
Usage:
::
result = await api.list_domains_all()
"""
return await fetch_all_pages_async(
type=ListDomainsResponse,
key="domains",
fetcher=self.list_domains,
args={
"region": region,
"page": page,
"page_size": page_size,
"project_id": project_id,
"status": status,
"organization_id": organization_id,
"name": name,
},
)
async def revoke_domain(
self,
*,
domain_id: str,
region: Optional[Region] = None,
) -> Domain:
"""
Delete a domain.
You must specify the domain you want to delete by the `region` and `domain_id`. Deleting a domain is permanent and cannot be undone.
:param region: Region to target. If none is passed will use default region from the config.
:param domain_id: ID of the domain to delete.
:return: :class:`Domain <Domain>`
Usage:
::
result = await api.revoke_domain(domain_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_domain_id = validate_path_param("domain_id", domain_id)
res = self._request(
"POST",
f"/transactional-email/v1alpha1/regions/{param_region}/domains/{param_domain_id}/revoke",
)
self._throw_on_error(res)
return unmarshal_Domain(res.json())
async def check_domain(
self,
*,
domain_id: str,
region: Optional[Region] = None,
) -> Domain:
"""
Domain DNS check.
Perform an immediate DNS check of a domain using the `region` and `domain_id` parameters.
:param region: Region to target. If none is passed will use default region from the config.
:param domain_id: ID of the domain to check.
:return: :class:`Domain <Domain>`
Usage:
::
result = await api.check_domain(domain_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_domain_id = validate_path_param("domain_id", domain_id)
res = self._request(
"POST",
f"/transactional-email/v1alpha1/regions/{param_region}/domains/{param_domain_id}/check",
)
self._throw_on_error(res)
return unmarshal_Domain(res.json())
async def get_domain_last_status(
self,
*,
domain_id: str,
region: Optional[Region] = None,
) -> DomainLastStatus:
"""
Display SPF and DKIM records status and potential errors.
Display SPF and DKIM records status and potential errors, including the found records to make debugging easier.
:param region: Region to target. If none is passed will use default region from the config.
:param domain_id: ID of the domain to delete.
:return: :class:`DomainLastStatus <DomainLastStatus>`
Usage:
::
result = await api.get_domain_last_status(domain_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_domain_id = validate_path_param("domain_id", domain_id)
res = self._request(
"GET",
f"/transactional-email/v1alpha1/regions/{param_region}/domains/{param_domain_id}/verification",
)
self._throw_on_error(res)
return unmarshal_DomainLastStatus(res.json())
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/tem/v1alpha1/api.py
| 0.863377 | 0.152064 |
api.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
Region,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class ImageStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
READY = "ready"
DELETING = "deleting"
ERROR = "error"
LOCKED = "locked"
def __str__(self) -> str:
return str(self.value)
class ImageVisibility(str, Enum, metaclass=StrEnumMeta):
VISIBILITY_UNKNOWN = "visibility_unknown"
INHERIT = "inherit"
PUBLIC = "public"
PRIVATE = "private"
def __str__(self) -> str:
return str(self.value)
class ListImagesRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
class ListNamespacesRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
DESCRIPTION_ASC = "description_asc"
DESCRIPTION_DESC = "description_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
class ListTagsRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
class NamespaceStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
READY = "ready"
DELETING = "deleting"
ERROR = "error"
LOCKED = "locked"
def __str__(self) -> str:
return str(self.value)
class TagStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
READY = "ready"
DELETING = "deleting"
ERROR = "error"
LOCKED = "locked"
def __str__(self) -> str:
return str(self.value)
@dataclass
class Image:
"""
Image.
"""
id: str
"""
UUID of the image.
"""
name: str
"""
Name of the image, it must be unique within the namespace.
"""
namespace_id: str
"""
UUID of the namespace the image belongs to.
"""
status: ImageStatus
"""
Status of the image.
"""
status_message: Optional[str]
"""
Details of the image status.
"""
visibility: ImageVisibility
"""
Set to `public` to allow the image to be pulled without authentication. Else, set to `private`. Set to `inherit` to keep the same visibility configuration as the namespace.
"""
size: int
"""
Image size in bytes, calculated from the size of image layers.
Image size in bytes, calculated from the size of image layers. One layer used in two tags of the same image is counted once but one layer used in two images is counted twice.
"""
created_at: Optional[datetime]
"""
Date and time of image creation.
"""
updated_at: Optional[datetime]
"""
Date and time of last update.
"""
tags: List[str]
"""
List of docker tags of the image.
"""
@dataclass
class ListImagesResponse:
"""
List images response.
"""
images: List[Image]
"""
Paginated list of images that match the selected filters.
"""
total_count: int
"""
Total number of images that match the selected filters.
"""
@dataclass
class ListNamespacesResponse:
"""
List namespaces response.
"""
namespaces: List[Namespace]
"""
Paginated list of namespaces that match the selected filters.
"""
total_count: int
"""
Total number of namespaces that match the selected filters.
"""
@dataclass
class ListTagsResponse:
"""
List tags response.
"""
tags: List[Tag]
"""
Paginated list of tags that match the selected filters.
"""
total_count: int
"""
Total number of tags that match the selected filters.
"""
@dataclass
class Namespace:
"""
Namespace.
"""
id: str
"""
UUID of the namespace.
"""
name: str
"""
Name of the namespace, unique in a region accross all organizations.
"""
description: str
"""
Description of the namespace.
"""
organization_id: str
"""
Owner of the namespace.
"""
project_id: str
"""
Project of the namespace.
"""
status: NamespaceStatus
"""
Namespace status.
"""
status_message: str
"""
Namespace status details.
"""
endpoint: str
"""
Endpoint reachable by docker.
"""
is_public: bool
"""
Defines whether or not namespace is public.
"""
size: int
"""
Total size of the namespace, calculated as the sum of the size of all images in the namespace.
"""
created_at: Optional[datetime]
"""
Date and time of creation.
"""
updated_at: Optional[datetime]
"""
Date and time of last update.
"""
image_count: int
"""
Number of images in the namespace.
"""
region: Region
"""
Region the namespace belongs to.
"""
@dataclass
class Tag:
"""
Tag.
"""
id: str
"""
UUID of the tag.
"""
name: str
"""
Tag name, unique to an image.
"""
image_id: str
"""
Image ID the of the image the tag belongs to.
"""
status: TagStatus
"""
Tag status.
"""
digest: str
"""
Hash of the tag content. Several tags of a same image may have the same digest.
"""
created_at: Optional[datetime]
"""
Date and time of creation.
"""
updated_at: Optional[datetime]
"""
Date and time of last update.
"""
@dataclass
class ListNamespacesRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
page: Optional[int]
"""
A positive integer to choose the page to display.
"""
page_size: Optional[int]
"""
A positive integer lower or equal to 100 to select the number of items to display.
"""
order_by: Optional[ListNamespacesRequestOrderBy]
"""
Criteria to use when ordering namespace listings. Possible values are `created_at_asc`, `created_at_desc`, `name_asc`, `name_desc`, `region`, `status_asc` and `status_desc`. The default value is `created_at_asc`.
"""
organization_id: Optional[str]
"""
Filter by Organization ID.
"""
project_id: Optional[str]
"""
Filter by Project ID.
"""
name: Optional[str]
"""
Filter by the namespace name (exact match).
"""
@dataclass
class GetNamespaceRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
namespace_id: str
"""
UUID of the namespace.
"""
@dataclass
class CreateNamespaceRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
name: Optional[str]
"""
Name of the namespace.
"""
description: str
"""
Description of the namespace.
"""
organization_id: Optional[str]
"""
Namespace owner (deprecated).
One-of ('project_identifier'): at most one of 'organization_id', 'project_id' could be set.
:deprecated
"""
project_id: Optional[str]
"""
Project ID on which the namespace will be created.
One-of ('project_identifier'): at most one of 'organization_id', 'project_id' could be set.
"""
is_public: bool
"""
Defines whether or not namespace is public.
"""
@dataclass
class UpdateNamespaceRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
namespace_id: str
"""
ID of the namespace to update.
"""
description: Optional[str]
"""
Namespace description.
"""
is_public: Optional[bool]
"""
Defines whether or not the namespace is public.
"""
@dataclass
class DeleteNamespaceRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
namespace_id: str
"""
UUID of the namespace.
"""
@dataclass
class ListImagesRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
page: Optional[int]
"""
A positive integer to choose the page to display.
"""
page_size: Optional[int]
"""
A positive integer lower or equal to 100 to select the number of items to display.
"""
order_by: Optional[ListImagesRequestOrderBy]
"""
Criteria to use when ordering image listings. Possible values are `created_at_asc`, `created_at_desc`, `name_asc`, `name_desc`, `region`, `status_asc` and `status_desc`. The default value is `created_at_asc`.
"""
namespace_id: Optional[str]
"""
Filter by the namespace ID.
"""
name: Optional[str]
"""
Filter by the image name (exact match).
"""
organization_id: Optional[str]
"""
Filter by Organization ID.
"""
project_id: Optional[str]
"""
Filter by Project ID.
"""
@dataclass
class GetImageRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
image_id: str
"""
UUID of the image.
"""
@dataclass
class UpdateImageRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
image_id: str
"""
ID of the image to update.
"""
visibility: ImageVisibility
"""
Set to `public` to allow the image to be pulled without authentication. Else, set to `private`. Set to `inherit` to keep the same visibility configuration as the namespace.
"""
@dataclass
class DeleteImageRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
image_id: str
"""
UUID of the image.
"""
@dataclass
class ListTagsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
image_id: str
"""
UUID of the image.
"""
page: Optional[int]
"""
A positive integer to choose the page to display.
"""
page_size: Optional[int]
"""
A positive integer lower or equal to 100 to select the number of items to display.
"""
order_by: Optional[ListTagsRequestOrderBy]
"""
Criteria to use when ordering tag listings. Possible values are `created_at_asc`, `created_at_desc`, `name_asc`, `name_desc`, `region`, `status_asc` and `status_desc`. The default value is `created_at_asc`.
"""
name: Optional[str]
"""
Filter by the tag name (exact match).
"""
@dataclass
class GetTagRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
tag_id: str
"""
UUID of the tag.
"""
@dataclass
class DeleteTagRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
tag_id: str
"""
UUID of the tag.
"""
force: Optional[bool]
"""
If two tags share the same digest the deletion will fail unless this parameter is set to true (deprecated).
:deprecated
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/registry/v1/types.py
| 0.901271 | 0.247669 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from scaleway_core.utils import (
OneOfPossibility,
resolve_one_of,
)
from dateutil import parser
from .types import (
ImageVisibility,
Image,
ListImagesResponse,
ListNamespacesResponse,
ListTagsResponse,
Namespace,
Tag,
CreateNamespaceRequest,
UpdateNamespaceRequest,
UpdateImageRequest,
)
def unmarshal_Image(data: Any) -> Image:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Image' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("namespace_id", None)
args["namespace_id"] = field
field = data.get("size", None)
args["size"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("status_message", None)
args["status_message"] = field
field = data.get("tags", None)
args["tags"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("visibility", None)
args["visibility"] = field
return Image(**args)
def unmarshal_Namespace(data: Any) -> Namespace:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Namespace' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("description", None)
args["description"] = field
field = data.get("endpoint", None)
args["endpoint"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("image_count", None)
args["image_count"] = field
field = data.get("is_public", None)
args["is_public"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("region", None)
args["region"] = field
field = data.get("size", None)
args["size"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("status_message", None)
args["status_message"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return Namespace(**args)
def unmarshal_Tag(data: Any) -> Tag:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Tag' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("digest", None)
args["digest"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("image_id", None)
args["image_id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return Tag(**args)
def unmarshal_ListImagesResponse(data: Any) -> ListImagesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListImagesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("images", None)
args["images"] = [unmarshal_Image(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListImagesResponse(**args)
def unmarshal_ListNamespacesResponse(data: Any) -> ListNamespacesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListNamespacesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("namespaces", None)
args["namespaces"] = (
[unmarshal_Namespace(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListNamespacesResponse(**args)
def unmarshal_ListTagsResponse(data: Any) -> ListTagsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListTagsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("tags", None)
args["tags"] = [unmarshal_Tag(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListTagsResponse(**args)
def marshal_CreateNamespaceRequest(
request: CreateNamespaceRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
output.update(
resolve_one_of(
[
OneOfPossibility(
"project_id",
request.project_id or defaults.default_project_id
if request.project_id is not None
else None,
defaults.default_project_id,
),
OneOfPossibility(
"organization_id",
request.organization_id or defaults.default_organization_id
if request.organization_id is not None
else None,
defaults.default_organization_id,
),
]
),
)
if request.description is not None:
output["description"] = request.description
if request.is_public is not None:
output["is_public"] = request.is_public
if request.name is not None:
output["name"] = request.name
return output
def marshal_UpdateImageRequest(
request: UpdateImageRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.visibility is not None:
output["visibility"] = ImageVisibility(request.visibility)
return output
def marshal_UpdateNamespaceRequest(
request: UpdateNamespaceRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.is_public is not None:
output["is_public"] = request.is_public
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/registry/v1/marshalling.py
| 0.739422 | 0.249059 |
marshalling.py
|
pypi
|
from typing import Awaitable, List, Optional, Union
from scaleway_core.api import API
from scaleway_core.bridge import (
Region,
)
from scaleway_core.utils import (
WaitForOptions,
fetch_all_pages_async,
random_name,
validate_path_param,
wait_for_resource_async,
)
from .types import (
ImageVisibility,
ListImagesRequestOrderBy,
ListNamespacesRequestOrderBy,
ListTagsRequestOrderBy,
Image,
ListImagesResponse,
ListNamespacesResponse,
ListTagsResponse,
Namespace,
Tag,
CreateNamespaceRequest,
UpdateNamespaceRequest,
UpdateImageRequest,
)
from .content import (
IMAGE_TRANSIENT_STATUSES,
NAMESPACE_TRANSIENT_STATUSES,
TAG_TRANSIENT_STATUSES,
)
from .marshalling import (
marshal_CreateNamespaceRequest,
marshal_UpdateImageRequest,
marshal_UpdateNamespaceRequest,
unmarshal_Image,
unmarshal_Namespace,
unmarshal_Tag,
unmarshal_ListImagesResponse,
unmarshal_ListNamespacesResponse,
unmarshal_ListTagsResponse,
)
class RegistryV1API(API):
"""
Container Registry API.
Container Registry API.
"""
async def list_namespaces(
self,
*,
region: Optional[Region] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: ListNamespacesRequestOrderBy = ListNamespacesRequestOrderBy.CREATED_AT_ASC,
organization_id: Optional[str] = None,
project_id: Optional[str] = None,
name: Optional[str] = None,
) -> ListNamespacesResponse:
"""
List namespaces.
List all namespaces in a specified region. By default, the namespaces listed are ordered by creation date in ascending order. This can be modified via the order_by field. You can also define additional parameters for your query, such as the `instance_id` and `project_id` parameters.
:param region: Region to target. If none is passed will use default region from the config.
:param page: A positive integer to choose the page to display.
:param page_size: A positive integer lower or equal to 100 to select the number of items to display.
:param order_by: Criteria to use when ordering namespace listings. Possible values are `created_at_asc`, `created_at_desc`, `name_asc`, `name_desc`, `region`, `status_asc` and `status_desc`. The default value is `created_at_asc`.
:param organization_id: Filter by Organization ID.
:param project_id: Filter by Project ID.
:param name: Filter by the namespace name (exact match).
:return: :class:`ListNamespacesResponse <ListNamespacesResponse>`
Usage:
::
result = await api.list_namespaces()
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"GET",
f"/registry/v1/regions/{param_region}/namespaces",
params={
"name": name,
"order_by": order_by,
"organization_id": organization_id
or self.client.default_organization_id,
"page": page,
"page_size": page_size or self.client.default_page_size,
"project_id": project_id or self.client.default_project_id,
},
)
self._throw_on_error(res)
return unmarshal_ListNamespacesResponse(res.json())
async def list_namespaces_all(
self,
*,
region: Optional[Region] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: Optional[ListNamespacesRequestOrderBy] = None,
organization_id: Optional[str] = None,
project_id: Optional[str] = None,
name: Optional[str] = None,
) -> List[Namespace]:
"""
List namespaces.
List all namespaces in a specified region. By default, the namespaces listed are ordered by creation date in ascending order. This can be modified via the order_by field. You can also define additional parameters for your query, such as the `instance_id` and `project_id` parameters.
:param region: Region to target. If none is passed will use default region from the config.
:param page: A positive integer to choose the page to display.
:param page_size: A positive integer lower or equal to 100 to select the number of items to display.
:param order_by: Criteria to use when ordering namespace listings. Possible values are `created_at_asc`, `created_at_desc`, `name_asc`, `name_desc`, `region`, `status_asc` and `status_desc`. The default value is `created_at_asc`.
:param organization_id: Filter by Organization ID.
:param project_id: Filter by Project ID.
:param name: Filter by the namespace name (exact match).
:return: :class:`List[ListNamespacesResponse] <List[ListNamespacesResponse]>`
Usage:
::
result = await api.list_namespaces_all()
"""
return await fetch_all_pages_async(
type=ListNamespacesResponse,
key="namespaces",
fetcher=self.list_namespaces,
args={
"region": region,
"page": page,
"page_size": page_size,
"order_by": order_by,
"organization_id": organization_id,
"project_id": project_id,
"name": name,
},
)
async def get_namespace(
self,
*,
namespace_id: str,
region: Optional[Region] = None,
) -> Namespace:
"""
Get a namespace.
Retrieve information about a given namespace, specified by its `namespace_id` and region. Full details about the namespace, such as `description`, `project_id`, `status`, `endpoint`, `is_public`, `size`, and `image_count` are returned in the response.
:param region: Region to target. If none is passed will use default region from the config.
:param namespace_id: UUID of the namespace.
:return: :class:`Namespace <Namespace>`
Usage:
::
result = await api.get_namespace(namespace_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_namespace_id = validate_path_param("namespace_id", namespace_id)
res = self._request(
"GET",
f"/registry/v1/regions/{param_region}/namespaces/{param_namespace_id}",
)
self._throw_on_error(res)
return unmarshal_Namespace(res.json())
async def wait_for_namespace(
self,
*,
namespace_id: str,
region: Optional[Region] = None,
options: Optional[
WaitForOptions[Namespace, Union[bool, Awaitable[bool]]]
] = None,
) -> Namespace:
"""
Waits for :class:`Namespace <Namespace>` to be in a final state.
:param region: Region to target. If none is passed will use default region from the config.
:param namespace_id: UUID of the namespace.
:param options: The options for the waiter
:return: :class:`Namespace <Namespace>`
Usage:
::
result = api.wait_for_namespace(namespace_id="example")
"""
if not options:
options = WaitForOptions()
if not options.stop:
options.stop = lambda res: res.status not in NAMESPACE_TRANSIENT_STATUSES
return await wait_for_resource_async(
fetcher=self.get_namespace,
options=options,
args={
"namespace_id": namespace_id,
"region": region,
},
)
async def create_namespace(
self,
*,
description: str,
is_public: bool,
region: Optional[Region] = None,
name: Optional[str] = None,
organization_id: Optional[str] = None,
project_id: Optional[str] = None,
) -> Namespace:
"""
Create a namespace.
Create a new Container Registry namespace. You must specify the namespace name and region in which you want it to be created. Optionally, you can specify the `project_id` and `is_public` in the request payload.
:param region: Region to target. If none is passed will use default region from the config.
:param name: Name of the namespace.
:param description: Description of the namespace.
:param organization_id: Namespace owner (deprecated).
One-of ('project_identifier'): at most one of 'organization_id', 'project_id' could be set.
:param project_id: Project ID on which the namespace will be created.
One-of ('project_identifier'): at most one of 'organization_id', 'project_id' could be set.
:param is_public: Defines whether or not namespace is public.
:return: :class:`Namespace <Namespace>`
Usage:
::
result = await api.create_namespace(
description="example",
is_public=True,
)
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"POST",
f"/registry/v1/regions/{param_region}/namespaces",
body=marshal_CreateNamespaceRequest(
CreateNamespaceRequest(
description=description,
is_public=is_public,
region=region,
name=name or random_name(prefix="ns"),
organization_id=organization_id,
project_id=project_id,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Namespace(res.json())
async def update_namespace(
self,
*,
namespace_id: str,
region: Optional[Region] = None,
description: Optional[str] = None,
is_public: Optional[bool] = None,
) -> Namespace:
"""
Update a namespace.
Update the parameters of a given namespace, specified by its `namespace_id` and `region`. You can update the `description` and `is_public` parameters.
:param region: Region to target. If none is passed will use default region from the config.
:param namespace_id: ID of the namespace to update.
:param description: Namespace description.
:param is_public: Defines whether or not the namespace is public.
:return: :class:`Namespace <Namespace>`
Usage:
::
result = await api.update_namespace(namespace_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_namespace_id = validate_path_param("namespace_id", namespace_id)
res = self._request(
"PATCH",
f"/registry/v1/regions/{param_region}/namespaces/{param_namespace_id}",
body=marshal_UpdateNamespaceRequest(
UpdateNamespaceRequest(
namespace_id=namespace_id,
region=region,
description=description,
is_public=is_public,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Namespace(res.json())
async def delete_namespace(
self,
*,
namespace_id: str,
region: Optional[Region] = None,
) -> Namespace:
"""
Delete a namespace.
Delete a given namespace. You must specify, in the endpoint, the `region` and `namespace_id` parameters of the namespace you want to delete.
:param region: Region to target. If none is passed will use default region from the config.
:param namespace_id: UUID of the namespace.
:return: :class:`Namespace <Namespace>`
Usage:
::
result = await api.delete_namespace(namespace_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_namespace_id = validate_path_param("namespace_id", namespace_id)
res = self._request(
"DELETE",
f"/registry/v1/regions/{param_region}/namespaces/{param_namespace_id}",
)
self._throw_on_error(res)
return unmarshal_Namespace(res.json())
async def list_images(
self,
*,
region: Optional[Region] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: ListImagesRequestOrderBy = ListImagesRequestOrderBy.CREATED_AT_ASC,
namespace_id: Optional[str] = None,
name: Optional[str] = None,
organization_id: Optional[str] = None,
project_id: Optional[str] = None,
) -> ListImagesResponse:
"""
List images.
List all images in a specified region. By default, the images listed are ordered by creation date in ascending order. This can be modified via the order_by field. You can also define additional parameters for your query, such as the `namespace_id` and `project_id` parameters.
:param region: Region to target. If none is passed will use default region from the config.
:param page: A positive integer to choose the page to display.
:param page_size: A positive integer lower or equal to 100 to select the number of items to display.
:param order_by: Criteria to use when ordering image listings. Possible values are `created_at_asc`, `created_at_desc`, `name_asc`, `name_desc`, `region`, `status_asc` and `status_desc`. The default value is `created_at_asc`.
:param namespace_id: Filter by the namespace ID.
:param name: Filter by the image name (exact match).
:param organization_id: Filter by Organization ID.
:param project_id: Filter by Project ID.
:return: :class:`ListImagesResponse <ListImagesResponse>`
Usage:
::
result = await api.list_images()
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
res = self._request(
"GET",
f"/registry/v1/regions/{param_region}/images",
params={
"name": name,
"namespace_id": namespace_id,
"order_by": order_by,
"organization_id": organization_id
or self.client.default_organization_id,
"page": page,
"page_size": page_size or self.client.default_page_size,
"project_id": project_id or self.client.default_project_id,
},
)
self._throw_on_error(res)
return unmarshal_ListImagesResponse(res.json())
async def list_images_all(
self,
*,
region: Optional[Region] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: Optional[ListImagesRequestOrderBy] = None,
namespace_id: Optional[str] = None,
name: Optional[str] = None,
organization_id: Optional[str] = None,
project_id: Optional[str] = None,
) -> List[Image]:
"""
List images.
List all images in a specified region. By default, the images listed are ordered by creation date in ascending order. This can be modified via the order_by field. You can also define additional parameters for your query, such as the `namespace_id` and `project_id` parameters.
:param region: Region to target. If none is passed will use default region from the config.
:param page: A positive integer to choose the page to display.
:param page_size: A positive integer lower or equal to 100 to select the number of items to display.
:param order_by: Criteria to use when ordering image listings. Possible values are `created_at_asc`, `created_at_desc`, `name_asc`, `name_desc`, `region`, `status_asc` and `status_desc`. The default value is `created_at_asc`.
:param namespace_id: Filter by the namespace ID.
:param name: Filter by the image name (exact match).
:param organization_id: Filter by Organization ID.
:param project_id: Filter by Project ID.
:return: :class:`List[ListImagesResponse] <List[ListImagesResponse]>`
Usage:
::
result = await api.list_images_all()
"""
return await fetch_all_pages_async(
type=ListImagesResponse,
key="images",
fetcher=self.list_images,
args={
"region": region,
"page": page,
"page_size": page_size,
"order_by": order_by,
"namespace_id": namespace_id,
"name": name,
"organization_id": organization_id,
"project_id": project_id,
},
)
async def get_image(
self,
*,
image_id: str,
region: Optional[Region] = None,
) -> Image:
"""
Get an image.
Retrieve information about a given container image, specified by its `image_id` and region. Full details about the image, such as `name`, `namespace_id`, `status`, `visibility`, and `size` are returned in the response.
:param region: Region to target. If none is passed will use default region from the config.
:param image_id: UUID of the image.
:return: :class:`Image <Image>`
Usage:
::
result = await api.get_image(image_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_image_id = validate_path_param("image_id", image_id)
res = self._request(
"GET",
f"/registry/v1/regions/{param_region}/images/{param_image_id}",
)
self._throw_on_error(res)
return unmarshal_Image(res.json())
async def wait_for_image(
self,
*,
image_id: str,
region: Optional[Region] = None,
options: Optional[WaitForOptions[Image, Union[bool, Awaitable[bool]]]] = None,
) -> Image:
"""
Waits for :class:`Image <Image>` to be in a final state.
:param region: Region to target. If none is passed will use default region from the config.
:param image_id: UUID of the image.
:param options: The options for the waiter
:return: :class:`Image <Image>`
Usage:
::
result = api.wait_for_image(image_id="example")
"""
if not options:
options = WaitForOptions()
if not options.stop:
options.stop = lambda res: res.status not in IMAGE_TRANSIENT_STATUSES
return await wait_for_resource_async(
fetcher=self.get_image,
options=options,
args={
"image_id": image_id,
"region": region,
},
)
async def update_image(
self,
*,
image_id: str,
visibility: ImageVisibility,
region: Optional[Region] = None,
) -> Image:
"""
Update an image.
Update the parameters of a given image, specified by its `image_id` and `region`. You can update the `visibility` parameter.
:param region: Region to target. If none is passed will use default region from the config.
:param image_id: ID of the image to update.
:param visibility: Set to `public` to allow the image to be pulled without authentication. Else, set to `private`. Set to `inherit` to keep the same visibility configuration as the namespace.
:return: :class:`Image <Image>`
Usage:
::
result = await api.update_image(
image_id="example",
visibility=visibility_unknown,
)
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_image_id = validate_path_param("image_id", image_id)
res = self._request(
"PATCH",
f"/registry/v1/regions/{param_region}/images/{param_image_id}",
body=marshal_UpdateImageRequest(
UpdateImageRequest(
image_id=image_id,
visibility=visibility,
region=region,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Image(res.json())
async def delete_image(
self,
*,
image_id: str,
region: Optional[Region] = None,
) -> Image:
"""
Delete an image.
Delete a given image. You must specify, in the endpoint, the `region` and `image_id` parameters of the image you want to delete.
:param region: Region to target. If none is passed will use default region from the config.
:param image_id: UUID of the image.
:return: :class:`Image <Image>`
Usage:
::
result = await api.delete_image(image_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_image_id = validate_path_param("image_id", image_id)
res = self._request(
"DELETE",
f"/registry/v1/regions/{param_region}/images/{param_image_id}",
)
self._throw_on_error(res)
return unmarshal_Image(res.json())
async def list_tags(
self,
*,
image_id: str,
region: Optional[Region] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: ListTagsRequestOrderBy = ListTagsRequestOrderBy.CREATED_AT_ASC,
name: Optional[str] = None,
) -> ListTagsResponse:
"""
List tags.
List all tags for a given image, specified by region. By default, the tags listed are ordered by creation date in ascending order. This can be modified via the order_by field. You can also define additional parameters for your query, such as the `name`.
:param region: Region to target. If none is passed will use default region from the config.
:param image_id: UUID of the image.
:param page: A positive integer to choose the page to display.
:param page_size: A positive integer lower or equal to 100 to select the number of items to display.
:param order_by: Criteria to use when ordering tag listings. Possible values are `created_at_asc`, `created_at_desc`, `name_asc`, `name_desc`, `region`, `status_asc` and `status_desc`. The default value is `created_at_asc`.
:param name: Filter by the tag name (exact match).
:return: :class:`ListTagsResponse <ListTagsResponse>`
Usage:
::
result = await api.list_tags(image_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_image_id = validate_path_param("image_id", image_id)
res = self._request(
"GET",
f"/registry/v1/regions/{param_region}/images/{param_image_id}/tags",
params={
"name": name,
"order_by": order_by,
"page": page,
"page_size": page_size or self.client.default_page_size,
},
)
self._throw_on_error(res)
return unmarshal_ListTagsResponse(res.json())
async def list_tags_all(
self,
*,
image_id: str,
region: Optional[Region] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: Optional[ListTagsRequestOrderBy] = None,
name: Optional[str] = None,
) -> List[Tag]:
"""
List tags.
List all tags for a given image, specified by region. By default, the tags listed are ordered by creation date in ascending order. This can be modified via the order_by field. You can also define additional parameters for your query, such as the `name`.
:param region: Region to target. If none is passed will use default region from the config.
:param image_id: UUID of the image.
:param page: A positive integer to choose the page to display.
:param page_size: A positive integer lower or equal to 100 to select the number of items to display.
:param order_by: Criteria to use when ordering tag listings. Possible values are `created_at_asc`, `created_at_desc`, `name_asc`, `name_desc`, `region`, `status_asc` and `status_desc`. The default value is `created_at_asc`.
:param name: Filter by the tag name (exact match).
:return: :class:`List[ListTagsResponse] <List[ListTagsResponse]>`
Usage:
::
result = await api.list_tags_all(image_id="example")
"""
return await fetch_all_pages_async(
type=ListTagsResponse,
key="tags",
fetcher=self.list_tags,
args={
"image_id": image_id,
"region": region,
"page": page,
"page_size": page_size,
"order_by": order_by,
"name": name,
},
)
async def get_tag(
self,
*,
tag_id: str,
region: Optional[Region] = None,
) -> Tag:
"""
Get a tag.
Retrieve information about a given image tag, specified by its `tag_id` and region. Full details about the tag, such as `name`, `image_id`, `status`, and `digest` are returned in the response.
:param region: Region to target. If none is passed will use default region from the config.
:param tag_id: UUID of the tag.
:return: :class:`Tag <Tag>`
Usage:
::
result = await api.get_tag(tag_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_tag_id = validate_path_param("tag_id", tag_id)
res = self._request(
"GET",
f"/registry/v1/regions/{param_region}/tags/{param_tag_id}",
)
self._throw_on_error(res)
return unmarshal_Tag(res.json())
async def wait_for_tag(
self,
*,
tag_id: str,
region: Optional[Region] = None,
options: Optional[WaitForOptions[Tag, Union[bool, Awaitable[bool]]]] = None,
) -> Tag:
"""
Waits for :class:`Tag <Tag>` to be in a final state.
:param region: Region to target. If none is passed will use default region from the config.
:param tag_id: UUID of the tag.
:param options: The options for the waiter
:return: :class:`Tag <Tag>`
Usage:
::
result = api.wait_for_tag(tag_id="example")
"""
if not options:
options = WaitForOptions()
if not options.stop:
options.stop = lambda res: res.status not in TAG_TRANSIENT_STATUSES
return await wait_for_resource_async(
fetcher=self.get_tag,
options=options,
args={
"tag_id": tag_id,
"region": region,
},
)
async def delete_tag(
self,
*,
tag_id: str,
region: Optional[Region] = None,
force: Optional[bool] = None,
) -> Tag:
"""
Delete a tag.
Delete a given image tag. You must specify, in the endpoint, the `region` and `tag_id` parameters of the tag you want to delete.
:param region: Region to target. If none is passed will use default region from the config.
:param tag_id: UUID of the tag.
:param force: If two tags share the same digest the deletion will fail unless this parameter is set to true (deprecated).
:return: :class:`Tag <Tag>`
Usage:
::
result = await api.delete_tag(tag_id="example")
"""
param_region = validate_path_param(
"region", region or self.client.default_region
)
param_tag_id = validate_path_param("tag_id", tag_id)
res = self._request(
"DELETE",
f"/registry/v1/regions/{param_region}/tags/{param_tag_id}",
params={
"force": force,
},
)
self._throw_on_error(res)
return unmarshal_Tag(res.json())
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/registry/v1/api.py
| 0.907166 | 0.154217 |
api.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.utils import (
StrEnumMeta,
)
class ListProjectsRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
@dataclass
class ListProjectsResponse:
"""
List projects response.
"""
total_count: int
"""
Total number of Projects.
"""
projects: List[Project]
"""
Paginated returned Projects.
"""
@dataclass
class Project:
"""
Project.
"""
id: str
"""
ID of the Project.
"""
name: str
"""
Name of the Project.
"""
organization_id: str
"""
Organization ID of the Project.
"""
created_at: Optional[datetime]
"""
Creation date of the Project.
"""
updated_at: Optional[datetime]
"""
Update date of the Project.
"""
description: str
"""
Description of the Project.
"""
@dataclass
class CreateProjectRequest:
name: Optional[str]
"""
Name of the Project.
"""
organization_id: Optional[str]
"""
Organization ID of the Project.
"""
description: Optional[str]
"""
Description of the Project.
"""
@dataclass
class ListProjectsRequest:
organization_id: Optional[str]
"""
Organization ID of the Project.
"""
name: Optional[str]
"""
Name of the Project.
"""
page: Optional[int]
"""
Page number for the returned Projects.
"""
page_size: Optional[int]
"""
Maximum number of Project per page.
"""
order_by: Optional[ListProjectsRequestOrderBy]
"""
Sort order of the returned Projects.
"""
project_ids: Optional[List[str]]
"""
Project IDs to filter for. The results will be limited to any Projects with an ID in this array.
"""
@dataclass
class GetProjectRequest:
project_id: Optional[str]
"""
Project ID of the Project.
"""
@dataclass
class DeleteProjectRequest:
project_id: Optional[str]
"""
Project ID of the Project.
"""
@dataclass
class UpdateProjectRequest:
project_id: Optional[str]
"""
Project ID of the Project.
"""
name: Optional[str]
"""
Name of the Project.
"""
description: Optional[str]
"""
Description of the Project.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/account/v2/types.py
| 0.887394 | 0.260013 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from dateutil import parser
from .types import (
ListProjectsResponse,
Project,
CreateProjectRequest,
UpdateProjectRequest,
)
def unmarshal_Project(data: Any) -> Project:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Project' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("description", None)
args["description"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return Project(**args)
def unmarshal_ListProjectsResponse(data: Any) -> ListProjectsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListProjectsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("projects", None)
args["projects"] = (
[unmarshal_Project(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListProjectsResponse(**args)
def marshal_CreateProjectRequest(
request: CreateProjectRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
if request.organization_id is not None:
output["organization_id"] = (
request.organization_id or defaults.default_organization_id
)
return output
def marshal_UpdateProjectRequest(
request: UpdateProjectRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/account/v2/marshalling.py
| 0.737347 | 0.178777 |
marshalling.py
|
pypi
|
from typing import List, Optional
from scaleway_core.api import API
from scaleway_core.utils import (
fetch_all_pages_async,
random_name,
validate_path_param,
)
from .types import (
ListProjectsRequestOrderBy,
ListProjectsResponse,
Project,
CreateProjectRequest,
UpdateProjectRequest,
)
from .marshalling import (
marshal_CreateProjectRequest,
marshal_UpdateProjectRequest,
unmarshal_Project,
unmarshal_ListProjectsResponse,
)
class AccountV2API(API):
"""
Account API.
User related data.
This API allows you to manage projects.
"""
async def create_project(
self,
*,
name: Optional[str] = None,
organization_id: Optional[str] = None,
description: Optional[str] = None,
) -> Project:
"""
Create a new Project for an Organization.
Deprecated in favor of Account API v3.
Generate a new Project for an Organization, specifying its configuration including name and description.
:param name: Name of the Project.
:param organization_id: Organization ID of the Project.
:param description: Description of the Project.
:return: :class:`Project <Project>`
:deprecated
Usage:
::
result = await api.create_project()
"""
res = self._request(
"POST",
f"/account/v2/projects",
body=marshal_CreateProjectRequest(
CreateProjectRequest(
name=name or random_name(prefix="proj"),
organization_id=organization_id,
description=description,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Project(res.json())
async def list_projects(
self,
*,
organization_id: Optional[str] = None,
name: Optional[str] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: ListProjectsRequestOrderBy = ListProjectsRequestOrderBy.CREATED_AT_ASC,
project_ids: Optional[List[str]] = None,
) -> ListProjectsResponse:
"""
List all Projects of an Organization.
Deprecated in favor of Account API v3.
List all Projects of an Organization. The response will include the total number of Projects as well as their associated Organizations, names and IDs. Other information include the creation and update date of the Project.
:param organization_id: Organization ID of the Project.
:param name: Name of the Project.
:param page: Page number for the returned Projects.
:param page_size: Maximum number of Project per page.
:param order_by: Sort order of the returned Projects.
:param project_ids: Project IDs to filter for. The results will be limited to any Projects with an ID in this array.
:return: :class:`ListProjectsResponse <ListProjectsResponse>`
:deprecated
Usage:
::
result = await api.list_projects()
"""
res = self._request(
"GET",
f"/account/v2/projects",
params={
"name": name,
"order_by": order_by,
"organization_id": organization_id
or self.client.default_organization_id,
"page": page,
"page_size": page_size or self.client.default_page_size,
"project_ids": project_ids,
},
)
self._throw_on_error(res)
return unmarshal_ListProjectsResponse(res.json())
async def list_projects_all(
self,
*,
organization_id: Optional[str] = None,
name: Optional[str] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: Optional[ListProjectsRequestOrderBy] = None,
project_ids: Optional[List[str]] = None,
) -> List[Project]:
"""
List all Projects of an Organization.
Deprecated in favor of Account API v3.
List all Projects of an Organization. The response will include the total number of Projects as well as their associated Organizations, names and IDs. Other information include the creation and update date of the Project.
:param organization_id: Organization ID of the Project.
:param name: Name of the Project.
:param page: Page number for the returned Projects.
:param page_size: Maximum number of Project per page.
:param order_by: Sort order of the returned Projects.
:param project_ids: Project IDs to filter for. The results will be limited to any Projects with an ID in this array.
:return: :class:`List[ListProjectsResponse] <List[ListProjectsResponse]>`
:deprecated
Usage:
::
result = await api.list_projects_all()
"""
return await fetch_all_pages_async(
type=ListProjectsResponse,
key="projects",
fetcher=self.list_projects,
args={
"organization_id": organization_id,
"name": name,
"page": page,
"page_size": page_size,
"order_by": order_by,
"project_ids": project_ids,
},
)
async def get_project(
self,
*,
project_id: Optional[str] = None,
) -> Project:
"""
Get an existing Project.
Deprecated in favor of Account API v3.
Retrieve information about an existing Project, specified by its Project ID. Its full details, including ID, name and description, are returned in the response object.
:param project_id: Project ID of the Project.
:return: :class:`Project <Project>`
:deprecated
Usage:
::
result = await api.get_project()
"""
param_project_id = validate_path_param(
"project_id", project_id or self.client.default_project_id
)
res = self._request(
"GET",
f"/account/v2/projects/{param_project_id}",
)
self._throw_on_error(res)
return unmarshal_Project(res.json())
async def delete_project(
self,
*,
project_id: Optional[str] = None,
) -> Optional[None]:
"""
Delete an existing Project.
Deprecated in favor of Account API v3.
Delete an existing Project, specified by its Project ID. The Project needs to be empty (meaning there are no resources left in it) to be deleted effectively. Note that deleting a Project is permanent, and cannot be undone.
:param project_id: Project ID of the Project.
:deprecated
Usage:
::
result = await api.delete_project()
"""
param_project_id = validate_path_param(
"project_id", project_id or self.client.default_project_id
)
res = self._request(
"DELETE",
f"/account/v2/projects/{param_project_id}",
)
self._throw_on_error(res)
return None
async def update_project(
self,
*,
project_id: Optional[str] = None,
name: Optional[str] = None,
description: Optional[str] = None,
) -> Project:
"""
Update Project.
Deprecated in favor of Account API v3.
Update the parameters of an existing Project, specified by its Project ID. These parameters include the name and description.
:param project_id: Project ID of the Project.
:param name: Name of the Project.
:param description: Description of the Project.
:return: :class:`Project <Project>`
:deprecated
Usage:
::
result = await api.update_project()
"""
param_project_id = validate_path_param(
"project_id", project_id or self.client.default_project_id
)
res = self._request(
"PATCH",
f"/account/v2/projects/{param_project_id}",
body=marshal_UpdateProjectRequest(
UpdateProjectRequest(
project_id=project_id,
name=name,
description=description,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Project(res.json())
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/account/v2/api.py
| 0.899909 | 0.184712 |
api.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.utils import (
StrEnumMeta,
)
class ListProjectsRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
NAME_ASC = "name_asc"
NAME_DESC = "name_desc"
def __str__(self) -> str:
return str(self.value)
@dataclass
class ListProjectsResponse:
"""
List projects response.
"""
total_count: int
"""
Total number of Projects.
"""
projects: List[Project]
"""
Paginated returned Projects.
"""
@dataclass
class Project:
"""
Project.
"""
id: str
"""
ID of the Project.
"""
name: str
"""
Name of the Project.
"""
organization_id: str
"""
Organization ID of the Project.
"""
created_at: Optional[datetime]
"""
Creation date of the Project.
"""
updated_at: Optional[datetime]
"""
Update date of the Project.
"""
description: str
"""
Description of the Project.
"""
@dataclass
class ProjectApiCreateProjectRequest:
name: Optional[str]
"""
Name of the Project.
"""
organization_id: Optional[str]
"""
Organization ID of the Project.
"""
description: str
"""
Description of the Project.
"""
@dataclass
class ProjectApiListProjectsRequest:
organization_id: Optional[str]
"""
Organization ID of the Project.
"""
name: Optional[str]
"""
Name of the Project.
"""
page: Optional[int]
"""
Page number for the returned Projects.
"""
page_size: Optional[int]
"""
Maximum number of Project per page.
"""
order_by: Optional[ListProjectsRequestOrderBy]
"""
Sort order of the returned Projects.
"""
project_ids: Optional[List[str]]
"""
Project IDs to filter for. The results will be limited to any Projects with an ID in this array.
"""
@dataclass
class ProjectApiGetProjectRequest:
project_id: Optional[str]
"""
Project ID of the Project.
"""
@dataclass
class ProjectApiDeleteProjectRequest:
project_id: Optional[str]
"""
Project ID of the Project.
"""
@dataclass
class ProjectApiUpdateProjectRequest:
project_id: Optional[str]
"""
Project ID of the Project.
"""
name: Optional[str]
"""
Name of the Project.
"""
description: Optional[str]
"""
Description of the Project.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/account/v3/types.py
| 0.889918 | 0.246522 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from dateutil import parser
from .types import (
ListProjectsResponse,
Project,
ProjectApiCreateProjectRequest,
ProjectApiUpdateProjectRequest,
)
def unmarshal_Project(data: Any) -> Project:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Project' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("description", None)
args["description"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
return Project(**args)
def unmarshal_ListProjectsResponse(data: Any) -> ListProjectsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListProjectsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("projects", None)
args["projects"] = (
[unmarshal_Project(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListProjectsResponse(**args)
def marshal_ProjectApiCreateProjectRequest(
request: ProjectApiCreateProjectRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
if request.organization_id is not None:
output["organization_id"] = (
request.organization_id or defaults.default_organization_id
)
return output
def marshal_ProjectApiUpdateProjectRequest(
request: ProjectApiUpdateProjectRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.name is not None:
output["name"] = request.name
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/account/v3/marshalling.py
| 0.725065 | 0.161618 |
marshalling.py
|
pypi
|
from typing import List, Optional
from scaleway_core.api import API
from scaleway_core.utils import (
fetch_all_pages_async,
random_name,
validate_path_param,
)
from .types import (
ListProjectsRequestOrderBy,
ListProjectsResponse,
Project,
ProjectApiCreateProjectRequest,
ProjectApiUpdateProjectRequest,
)
from .marshalling import (
marshal_ProjectApiCreateProjectRequest,
marshal_ProjectApiUpdateProjectRequest,
unmarshal_Project,
unmarshal_ListProjectsResponse,
)
class AccountProjectV3API(API):
"""
Account API.
This API allows you to manage projects.
"""
async def create_project(
self,
*,
description: str,
name: Optional[str] = None,
organization_id: Optional[str] = None,
) -> Project:
"""
Create a new Project for an Organization.
Generate a new Project for an Organization, specifying its configuration including name and description.
:param name: Name of the Project.
:param organization_id: Organization ID of the Project.
:param description: Description of the Project.
:return: :class:`Project <Project>`
Usage:
::
result = await api.create_project(description="example")
"""
res = self._request(
"POST",
f"/account/v3/projects",
body=marshal_ProjectApiCreateProjectRequest(
ProjectApiCreateProjectRequest(
description=description,
name=name or random_name(prefix="proj"),
organization_id=organization_id,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Project(res.json())
async def list_projects(
self,
*,
organization_id: Optional[str] = None,
name: Optional[str] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: ListProjectsRequestOrderBy = ListProjectsRequestOrderBy.CREATED_AT_ASC,
project_ids: Optional[List[str]] = None,
) -> ListProjectsResponse:
"""
List all Projects of an Organization.
List all Projects of an Organization. The response will include the total number of Projects as well as their associated Organizations, names, and IDs. Other information includes the creation and update date of the Project.
:param organization_id: Organization ID of the Project.
:param name: Name of the Project.
:param page: Page number for the returned Projects.
:param page_size: Maximum number of Project per page.
:param order_by: Sort order of the returned Projects.
:param project_ids: Project IDs to filter for. The results will be limited to any Projects with an ID in this array.
:return: :class:`ListProjectsResponse <ListProjectsResponse>`
Usage:
::
result = await api.list_projects()
"""
res = self._request(
"GET",
f"/account/v3/projects",
params={
"name": name,
"order_by": order_by,
"organization_id": organization_id
or self.client.default_organization_id,
"page": page,
"page_size": page_size or self.client.default_page_size,
"project_ids": project_ids,
},
)
self._throw_on_error(res)
return unmarshal_ListProjectsResponse(res.json())
async def list_projects_all(
self,
*,
organization_id: Optional[str] = None,
name: Optional[str] = None,
page: Optional[int] = None,
page_size: Optional[int] = None,
order_by: Optional[ListProjectsRequestOrderBy] = None,
project_ids: Optional[List[str]] = None,
) -> List[Project]:
"""
List all Projects of an Organization.
List all Projects of an Organization. The response will include the total number of Projects as well as their associated Organizations, names, and IDs. Other information includes the creation and update date of the Project.
:param organization_id: Organization ID of the Project.
:param name: Name of the Project.
:param page: Page number for the returned Projects.
:param page_size: Maximum number of Project per page.
:param order_by: Sort order of the returned Projects.
:param project_ids: Project IDs to filter for. The results will be limited to any Projects with an ID in this array.
:return: :class:`List[ListProjectsResponse] <List[ListProjectsResponse]>`
Usage:
::
result = await api.list_projects_all()
"""
return await fetch_all_pages_async(
type=ListProjectsResponse,
key="projects",
fetcher=self.list_projects,
args={
"organization_id": organization_id,
"name": name,
"page": page,
"page_size": page_size,
"order_by": order_by,
"project_ids": project_ids,
},
)
async def get_project(
self,
*,
project_id: Optional[str] = None,
) -> Project:
"""
Get an existing Project.
Retrieve information about an existing Project, specified by its Project ID. Its full details, including ID, name and description, are returned in the response object.
:param project_id: Project ID of the Project.
:return: :class:`Project <Project>`
Usage:
::
result = await api.get_project()
"""
param_project_id = validate_path_param(
"project_id", project_id or self.client.default_project_id
)
res = self._request(
"GET",
f"/account/v3/projects/{param_project_id}",
)
self._throw_on_error(res)
return unmarshal_Project(res.json())
async def delete_project(
self,
*,
project_id: Optional[str] = None,
) -> Optional[None]:
"""
Delete an existing Project.
Delete an existing Project, specified by its Project ID. The Project needs to be empty (meaning there are no resources left in it) to be deleted effectively. Note that deleting a Project is permanent, and cannot be undone.
:param project_id: Project ID of the Project.
Usage:
::
result = await api.delete_project()
"""
param_project_id = validate_path_param(
"project_id", project_id or self.client.default_project_id
)
res = self._request(
"DELETE",
f"/account/v3/projects/{param_project_id}",
)
self._throw_on_error(res)
return None
async def update_project(
self,
*,
project_id: Optional[str] = None,
name: Optional[str] = None,
description: Optional[str] = None,
) -> Project:
"""
Update Project.
Update the parameters of an existing Project, specified by its Project ID. These parameters include the name and description.
:param project_id: Project ID of the Project.
:param name: Name of the Project.
:param description: Description of the Project.
:return: :class:`Project <Project>`
Usage:
::
result = await api.update_project()
"""
param_project_id = validate_path_param(
"project_id", project_id or self.client.default_project_id
)
res = self._request(
"PATCH",
f"/account/v3/projects/{param_project_id}",
body=marshal_ProjectApiUpdateProjectRequest(
ProjectApiUpdateProjectRequest(
project_id=project_id,
name=name,
description=description,
),
self.client,
),
)
self._throw_on_error(res)
return unmarshal_Project(res.json())
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/account/v3/api.py
| 0.917238 | 0.156234 |
api.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
Zone,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class FlexibleIPStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
READY = "ready"
UPDATING = "updating"
ATTACHED = "attached"
ERROR = "error"
DETACHING = "detaching"
LOCKED = "locked"
def __str__(self) -> str:
return str(self.value)
class ListFlexibleIPsRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
def __str__(self) -> str:
return str(self.value)
class MACAddressStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
READY = "ready"
UPDATING = "updating"
USED = "used"
ERROR = "error"
DELETING = "deleting"
def __str__(self) -> str:
return str(self.value)
class MACAddressType(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_TYPE = "unknown_type"
VMWARE = "vmware"
XEN = "xen"
KVM = "kvm"
def __str__(self) -> str:
return str(self.value)
@dataclass
class AttachFlexibleIPsResponse:
"""
Attach flexible i ps response.
"""
total_count: int
"""
Total count of flexible IPs that are being updated.
"""
flexible_ips: List[FlexibleIP]
"""
List of flexible IPs in an updating state.
"""
@dataclass
class DetachFlexibleIPsResponse:
"""
Detach flexible i ps response.
"""
total_count: int
"""
Total count of flexible IPs that are being detached.
"""
flexible_ips: List[FlexibleIP]
"""
List of flexible IPs in a detaching state.
"""
@dataclass
class FlexibleIP:
"""
Flexible ip.
"""
id: str
"""
ID of the flexible IP.
"""
organization_id: str
"""
ID of the Organization the flexible IP is attached to.
"""
project_id: str
"""
ID of the Project the flexible IP is attached to.
"""
description: str
"""
Flexible IP description.
"""
tags: List[str]
"""
Flexible IP tags.
"""
updated_at: Optional[datetime]
"""
Date on which the flexible IP was last updated.
"""
created_at: Optional[datetime]
"""
Date on which the flexible IP was created.
"""
status: FlexibleIPStatus
"""
Flexible IP status.
- ready : flexible IP is created and ready to be attached to a server or to be associated with a virtual MAC.
- updating: flexible IP is being attached to a server or a virtual MAC operation is ongoing
- attached: flexible IP is attached to a server
- error: a flexible IP operation resulted in an error
- detaching: flexible IP is being detached from a server
- locked: the resource of the flexible IP is locked.
"""
ip_address: str
"""
IP of the flexible IP.
"""
mac_address: Optional[MACAddress]
"""
MAC address of the flexible IP.
"""
server_id: Optional[str]
"""
ID of the server linked to the flexible IP.
"""
reverse: str
"""
Reverse DNS value.
"""
zone: Zone
"""
Availability Zone of the flexible IP.
"""
@dataclass
class ListFlexibleIPsResponse:
"""
List flexible i ps response.
"""
total_count: int
"""
Total count of matching flexible IPs.
"""
flexible_ips: List[FlexibleIP]
"""
List of all flexible IPs.
"""
@dataclass
class MACAddress:
"""
Mac address.
"""
id: str
"""
ID of the flexible IP.
"""
mac_address: str
"""
MAC address of the Virtual MAC.
"""
mac_type: MACAddressType
"""
Type of virtual MAC.
"""
status: MACAddressStatus
"""
Status of virtual MAC.
"""
updated_at: Optional[datetime]
"""
Date on which the virtual MAC was last updated.
"""
created_at: Optional[datetime]
"""
Date on which the virtual MAC was created.
"""
zone: Zone
"""
MAC address IP Availability Zone.
"""
@dataclass
class CreateFlexibleIPRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
project_id: Optional[str]
"""
ID of the project to associate with the Flexible IP.
"""
description: str
"""
Flexible IP description (max. of 255 characters).
"""
tags: Optional[List[str]]
"""
Tags to associate to the flexible IP.
"""
server_id: Optional[str]
"""
ID of the server to which the newly created flexible IP will be attached.
"""
reverse: Optional[str]
"""
Value of the reverse DNS.
"""
is_ipv6: bool
"""
Defines whether the flexible IP has an IPv6 address.
"""
@dataclass
class GetFlexibleIPRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
fip_id: str
"""
ID of the flexible IP.
"""
@dataclass
class ListFlexibleIPsRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
order_by: Optional[ListFlexibleIPsRequestOrderBy]
"""
Sort order of the returned flexible IPs.
"""
page: Optional[int]
"""
Page number.
"""
page_size: Optional[int]
"""
Maximum number of flexible IPs per page.
"""
tags: Optional[List[str]]
"""
Filter by tag, only flexible IPs with one or more matching tags will be returned.
"""
status: Optional[List[FlexibleIPStatus]]
"""
Filter by status, only flexible IPs with this status will be returned.
"""
server_ids: Optional[List[str]]
"""
Filter by server IDs, only flexible IPs with these server IDs will be returned.
"""
organization_id: Optional[str]
"""
Filter by Organization ID, only flexible IPs from this Organization will be returned.
"""
project_id: Optional[str]
"""
Filter by Project ID, only flexible IPs from this Project will be returned.
"""
@dataclass
class UpdateFlexibleIPRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
fip_id: str
"""
ID of the flexible IP to update.
"""
description: Optional[str]
"""
Flexible IP description (max. 255 characters).
"""
tags: Optional[List[str]]
"""
Tags associated with the flexible IP.
"""
reverse: Optional[str]
"""
Value of the reverse DNS.
"""
@dataclass
class DeleteFlexibleIPRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
fip_id: str
"""
ID of the flexible IP to delete.
"""
@dataclass
class AttachFlexibleIPRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
fips_ids: List[str]
"""
List of flexible IP IDs to attach to a server.
Multiple IDs can be provided, but note that flexible IPs must belong to the same MAC group (see details about MAC groups).
"""
server_id: str
"""
ID of the server on which to attach the flexible IPs.
"""
@dataclass
class DetachFlexibleIPRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
fips_ids: List[str]
"""
List of flexible IP IDs to detach from a server. Multiple IDs can be provided. Note that flexible IPs must belong to the same MAC group.
"""
@dataclass
class GenerateMACAddrRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
fip_id: str
"""
ID of the flexible IP for which to generate a virtual MAC.
"""
mac_type: Optional[MACAddressType]
"""
TODO.
"""
@dataclass
class DuplicateMACAddrRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
fip_id: str
"""
ID of the flexible IP on which to duplicate the virtual MAC.
Note that the flexible IPs need to be attached to the same server.
"""
duplicate_from_fip_id: str
"""
ID of the flexible IP to duplicate the Virtual MAC from.
Note that flexible IPs need to be attached to the same server.
"""
@dataclass
class MoveMACAddrRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
fip_id: str
dst_fip_id: str
@dataclass
class DeleteMACAddrRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
fip_id: str
"""
ID of the flexible IP from which to delete the virtual MAC.
If the flexible IP belongs to a MAC group, the MAC will be removed from both the MAC group and flexible IP.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/flexibleip/v1alpha1/types.py
| 0.877805 | 0.224693 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from dateutil import parser
from .types import (
MACAddressType,
AttachFlexibleIPsResponse,
DetachFlexibleIPsResponse,
FlexibleIP,
ListFlexibleIPsResponse,
MACAddress,
CreateFlexibleIPRequest,
UpdateFlexibleIPRequest,
AttachFlexibleIPRequest,
DetachFlexibleIPRequest,
GenerateMACAddrRequest,
DuplicateMACAddrRequest,
MoveMACAddrRequest,
)
def unmarshal_MACAddress(data: Any) -> MACAddress:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'MACAddress' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("id", None)
args["id"] = field
field = data.get("mac_address", None)
args["mac_address"] = field
field = data.get("mac_type", None)
args["mac_type"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("zone", None)
args["zone"] = field
return MACAddress(**args)
def unmarshal_FlexibleIP(data: Any) -> FlexibleIP:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'FlexibleIP' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("description", None)
args["description"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("ip_address", None)
args["ip_address"] = field
field = data.get("mac_address", None)
args["mac_address"] = unmarshal_MACAddress(field) if field is not None else None
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("reverse", None)
args["reverse"] = field
field = data.get("server_id", None)
args["server_id"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("tags", None)
args["tags"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("zone", None)
args["zone"] = field
return FlexibleIP(**args)
def unmarshal_AttachFlexibleIPsResponse(data: Any) -> AttachFlexibleIPsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'AttachFlexibleIPsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("flexible_ips", None)
args["flexible_ips"] = (
[unmarshal_FlexibleIP(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return AttachFlexibleIPsResponse(**args)
def unmarshal_DetachFlexibleIPsResponse(data: Any) -> DetachFlexibleIPsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'DetachFlexibleIPsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("flexible_ips", None)
args["flexible_ips"] = (
[unmarshal_FlexibleIP(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return DetachFlexibleIPsResponse(**args)
def unmarshal_ListFlexibleIPsResponse(data: Any) -> ListFlexibleIPsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListFlexibleIPsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("flexible_ips", None)
args["flexible_ips"] = (
[unmarshal_FlexibleIP(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListFlexibleIPsResponse(**args)
def marshal_AttachFlexibleIPRequest(
request: AttachFlexibleIPRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.fips_ids is not None:
output["fips_ids"] = request.fips_ids
if request.server_id is not None:
output["server_id"] = request.server_id
return output
def marshal_CreateFlexibleIPRequest(
request: CreateFlexibleIPRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.is_ipv6 is not None:
output["is_ipv6"] = request.is_ipv6
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.reverse is not None:
output["reverse"] = request.reverse
if request.server_id is not None:
output["server_id"] = request.server_id
if request.tags is not None:
output["tags"] = request.tags
return output
def marshal_DetachFlexibleIPRequest(
request: DetachFlexibleIPRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.fips_ids is not None:
output["fips_ids"] = request.fips_ids
return output
def marshal_DuplicateMACAddrRequest(
request: DuplicateMACAddrRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.duplicate_from_fip_id is not None:
output["duplicate_from_fip_id"] = request.duplicate_from_fip_id
return output
def marshal_GenerateMACAddrRequest(
request: GenerateMACAddrRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.mac_type is not None:
output["mac_type"] = MACAddressType(request.mac_type)
return output
def marshal_MoveMACAddrRequest(
request: MoveMACAddrRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.dst_fip_id is not None:
output["dst_fip_id"] = request.dst_fip_id
return output
def marshal_UpdateFlexibleIPRequest(
request: UpdateFlexibleIPRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.description is not None:
output["description"] = request.description
if request.reverse is not None:
output["reverse"] = request.reverse
if request.tags is not None:
output["tags"] = request.tags
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/flexibleip/v1alpha1/marshalling.py
| 0.761272 | 0.174797 |
marshalling.py
|
pypi
|
from typing import List
from .types import (
ImageState,
IpState,
PrivateNICState,
SecurityGroupState,
ServerState,
SnapshotState,
TaskStatus,
VolumeServerState,
VolumeState,
)
IMAGE_TRANSIENT_STATUSES: List[ImageState] = [
ImageState.CREATING,
]
"""
Lists transient statutes of the enum :class:`ImageState <ImageState>`.
"""
IP_TRANSIENT_STATUSES: List[IpState] = [
IpState.PENDING,
]
"""
Lists transient statutes of the enum :class:`IpState <IpState>`.
"""
PRIVATE_NIC_TRANSIENT_STATUSES: List[PrivateNICState] = [
PrivateNICState.SYNCING,
]
"""
Lists transient statutes of the enum :class:`PrivateNICState <PrivateNICState>`.
"""
SECURITY_GROUP_TRANSIENT_STATUSES: List[SecurityGroupState] = [
SecurityGroupState.SYNCING,
]
"""
Lists transient statutes of the enum :class:`SecurityGroupState <SecurityGroupState>`.
"""
SERVER_TRANSIENT_STATUSES: List[ServerState] = [
ServerState.STARTING,
ServerState.STOPPING,
]
"""
Lists transient statutes of the enum :class:`ServerState <ServerState>`.
"""
SNAPSHOT_TRANSIENT_STATUSES: List[SnapshotState] = [
SnapshotState.SNAPSHOTTING,
SnapshotState.IMPORTING,
SnapshotState.EXPORTING,
]
"""
Lists transient statutes of the enum :class:`SnapshotState <SnapshotState>`.
"""
TASK_TRANSIENT_STATUSES: List[TaskStatus] = [
TaskStatus.PENDING,
TaskStatus.STARTED,
TaskStatus.RETRY,
]
"""
Lists transient statutes of the enum :class:`TaskStatus <TaskStatus>`.
"""
VOLUME_SERVER_TRANSIENT_STATUSES: List[VolumeServerState] = [
VolumeServerState.SNAPSHOTTING,
VolumeServerState.FETCHING,
VolumeServerState.RESIZING,
VolumeServerState.SAVING,
VolumeServerState.HOTSYNCING,
]
"""
Lists transient statutes of the enum :class:`VolumeServerState <VolumeServerState>`.
"""
VOLUME_TRANSIENT_STATUSES: List[VolumeState] = [
VolumeState.SNAPSHOTTING,
VolumeState.FETCHING,
VolumeState.RESIZING,
VolumeState.SAVING,
VolumeState.HOTSYNCING,
]
"""
Lists transient statutes of the enum :class:`VolumeState <VolumeState>`.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/instance/v1/content.py
| 0.685002 | 0.278226 |
content.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from typing import Dict, List, Optional
from scaleway_core.bridge import (
Zone,
)
from .types import (
Arch,
BootType,
Bootscript,
Image,
ImageState,
PlacementGroup,
PrivateNIC,
SecurityGroup,
SecurityGroupPolicy,
SecurityGroupRule,
SecurityGroupRuleAction,
SecurityGroupRuleDirection,
SecurityGroupRuleProtocol,
SecurityGroupSummary,
SecurityGroupTemplate,
Server,
ServerAction,
ServerIp,
ServerIpv6,
ServerLocation,
ServerMaintenance,
ServerState,
ServerSummary,
Snapshot,
SnapshotBaseVolume,
SnapshotState,
Volume,
VolumeServerTemplate,
VolumeSummary,
VolumeVolumeType,
)
@dataclass
class _SetImageResponse:
"""
Set image response.
"""
image: Optional[Image]
@dataclass
class _SetSecurityGroupResponse:
"""
Set security group response.
"""
security_group: Optional[SecurityGroup]
@dataclass
class _SetSecurityGroupRuleResponse:
"""
Set security group rule response.
"""
rule: Optional[SecurityGroupRule]
@dataclass
class _SetServerResponse:
"""
Set server response.
"""
server: Optional[Server]
@dataclass
class _SetSnapshotResponse:
"""
Set snapshot response.
"""
snapshot: Optional[Snapshot]
@dataclass
class _CreateServerRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
name: Optional[str]
"""
Instance name.
"""
dynamic_ip_required: Optional[bool]
"""
Define if a dynamic IPv4 is required for the Instance.
"""
routed_ip_enabled: Optional[bool]
"""
If true, configure the Instance so it uses the new routed IP mode.
"""
commercial_type: str
"""
Define the Instance commercial type (i.e. GP1-S).
"""
image: str
"""
Instance image ID or label.
"""
volumes: Optional[Dict[str, VolumeServerTemplate]]
"""
Volumes attached to the server.
"""
enable_ipv6: bool
"""
True if IPv6 is enabled on the server.
"""
public_ip: Optional[str]
"""
ID of the reserved IP to attach to the Instance.
"""
public_ips: Optional[List[str]]
"""
A list of reserved IP IDs to attach to the Instance.
"""
boot_type: Optional[BootType]
"""
Boot type to use.
"""
bootscript: Optional[str]
"""
Bootscript ID to use when `boot_type` is set to `bootscript`.
:deprecated
"""
organization: Optional[str]
"""
Instance Organization ID.
One-of ('project_identifier'): at most one of 'organization', 'project' could be set.
:deprecated
"""
project: Optional[str]
"""
Instance Project ID.
One-of ('project_identifier'): at most one of 'organization', 'project' could be set.
"""
tags: Optional[List[str]]
"""
Instance tags.
"""
security_group: Optional[str]
"""
Security group ID.
"""
placement_group: Optional[str]
"""
Placement group ID if Instance must be part of a placement group.
"""
@dataclass
class _SetServerRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
id: str
"""
Instance unique ID.
"""
name: str
"""
Instance name.
"""
organization: Optional[str]
"""
Instance Organization ID.
"""
project: Optional[str]
"""
Instance Project ID.
"""
allowed_actions: Optional[List[ServerAction]]
"""
Provide a list of allowed actions on the server.
"""
tags: Optional[List[str]]
"""
Tags associated with the Instance.
"""
commercial_type: str
"""
Instance commercial type (eg. GP1-M).
"""
creation_date: Optional[datetime]
"""
Instance creation date.
"""
dynamic_ip_required: bool
"""
True if a dynamic IPv4 is required.
"""
routed_ip_enabled: Optional[bool]
"""
True to configure the instance so it uses the new routed IP mode (once this is set to True you cannot set it back to False).
"""
enable_ipv6: bool
"""
True if IPv6 is enabled.
"""
hostname: str
"""
Instance host name.
"""
image: Optional[Image]
"""
Provide information on the Instance image.
"""
protected: bool
"""
Instance protection option is activated.
"""
private_ip: Optional[str]
"""
Instance private IP address.
"""
public_ip: Optional[ServerIp]
"""
Information about the public IP.
"""
public_ips: Optional[List[ServerIp]]
"""
Information about all the public IPs attached to the server.
"""
modification_date: Optional[datetime]
"""
Instance modification date.
"""
state: ServerState
"""
Instance state.
"""
location: Optional[ServerLocation]
"""
Instance location.
"""
ipv6: Optional[ServerIpv6]
"""
Instance IPv6 address.
"""
bootscript: Optional[Bootscript]
"""
Instance bootscript.
:deprecated
"""
boot_type: BootType
"""
Instance boot type.
"""
volumes: Optional[Dict[str, Volume]]
"""
Instance volumes.
"""
security_group: Optional[SecurityGroupSummary]
"""
Instance security group.
"""
maintenances: Optional[List[ServerMaintenance]]
"""
Instance planned maintenances.
"""
state_detail: str
"""
Instance state_detail.
"""
arch: Arch
"""
Instance architecture (refers to the CPU architecture used for the Instance, e.g. x86_64, arm64).
"""
placement_group: Optional[PlacementGroup]
"""
Instance placement group.
"""
private_nics: Optional[List[PrivateNIC]]
"""
Instance private NICs.
"""
@dataclass
class _UpdateServerRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
server_id: str
"""
UUID of the Instance.
"""
name: Optional[str]
"""
Name of the Instance.
"""
boot_type: Optional[BootType]
tags: Optional[List[str]]
"""
Tags of the Instance.
"""
volumes: Optional[Dict[str, VolumeServerTemplate]]
bootscript: Optional[str]
"""
:deprecated
"""
dynamic_ip_required: Optional[bool]
routed_ip_enabled: Optional[bool]
"""
True to configure the instance so it uses the new routed IP mode (once this is set to True you cannot set it back to False).
"""
public_ips: Optional[List[ServerIp]]
enable_ipv6: Optional[bool]
protected: Optional[bool]
security_group: Optional[SecurityGroupTemplate]
placement_group: Optional[str]
"""
Placement group ID if Instance must be part of a placement group.
"""
private_nics: Optional[List[PrivateNIC]]
"""
Instance private NICs.
"""
commercial_type: Optional[str]
"""
Set the commercial_type for this Instance.
Warning: This field has some restrictions:
- Cannot be changed if the Instance is not in `stopped` state.
- Cannot be changed if the Instance is in a placement group.
- Local storage requirements of the target commercial_types must be fulfilled (i.e. if an Instance has 80GB of local storage, it can be changed into a GP1-XS, which has a maximum of 150GB, but it cannot be changed into a DEV1-S, which has only 20GB).
"""
@dataclass
class _SetImageRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
id: str
name: str
arch: Arch
creation_date: Optional[datetime]
modification_date: Optional[datetime]
default_bootscript: Optional[Bootscript]
"""
:deprecated
"""
extra_volumes: Optional[Dict[str, Volume]]
from_server: str
organization: Optional[str]
public: bool
root_volume: Optional[VolumeSummary]
state: ImageState
project: Optional[str]
tags: Optional[List[str]]
@dataclass
class _SetSnapshotRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
snapshot_id: str
id: str
name: str
organization: Optional[str]
volume_type: VolumeVolumeType
size: int
state: SnapshotState
base_volume: Optional[SnapshotBaseVolume]
creation_date: Optional[datetime]
modification_date: Optional[datetime]
project: Optional[str]
tags: Optional[List[str]]
@dataclass
class _SetSecurityGroupRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
id: str
"""
ID of the security group (will be ignored).
"""
name: str
"""
Name of the security group.
"""
tags: Optional[List[str]]
"""
Tags of the security group.
"""
creation_date: Optional[datetime]
"""
Creation date of the security group (will be ignored).
"""
modification_date: Optional[datetime]
"""
Modification date of the security group (will be ignored).
"""
description: str
"""
Description of the security group.
"""
enable_default_security: bool
"""
True to block SMTP on IPv4 and IPv6. This feature is read only, please open a support ticket if you need to make it configurable.
"""
inbound_default_policy: SecurityGroupPolicy
"""
Default inbound policy.
"""
outbound_default_policy: SecurityGroupPolicy
"""
Default outbound policy.
"""
organization: Optional[str]
"""
Security groups Organization ID.
"""
project: Optional[str]
"""
Security group Project ID.
"""
organization_default: Optional[bool]
"""
Please use project_default instead.
:deprecated
"""
project_default: bool
"""
True use this security group for future Instances created in this project.
"""
servers: Optional[List[ServerSummary]]
"""
Instances attached to this security group.
"""
stateful: bool
"""
True to set the security group as stateful.
"""
@dataclass
class _SetSecurityGroupRuleRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
security_group_id: str
security_group_rule_id: str
id: str
protocol: SecurityGroupRuleProtocol
direction: SecurityGroupRuleDirection
action: SecurityGroupRuleAction
ip_range: str
dest_port_from: Optional[int]
dest_port_to: Optional[int]
position: int
editable: bool
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/instance/v1/types_private.py
| 0.911308 | 0.250111 |
types_private.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
Money,
Region,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class DnsRecordStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_STATUS = "unknown_status"
VALID = "valid"
INVALID = "invalid"
def __str__(self) -> str:
return str(self.value)
class DnsRecordType(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_TYPE = "unknown_type"
A = "a"
CNAME = "cname"
MX = "mx"
TXT = "txt"
NS = "ns"
AAAA = "aaaa"
def __str__(self) -> str:
return str(self.value)
class DnsRecordsStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN = "unknown"
VALID = "valid"
INVALID = "invalid"
def __str__(self) -> str:
return str(self.value)
class HostingDnsStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_DNS_STATUS = "unknown_dns_status"
VALID = "valid"
INVALID = "invalid"
def __str__(self) -> str:
return str(self.value)
class HostingStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_STATUS = "unknown_status"
DELIVERING = "delivering"
READY = "ready"
DELETING = "deleting"
ERROR = "error"
LOCKED = "locked"
MIGRATING = "migrating"
def __str__(self) -> str:
return str(self.value)
class ListHostingsRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
def __str__(self) -> str:
return str(self.value)
class ListOffersRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
PRICE_ASC = "price_asc"
def __str__(self) -> str:
return str(self.value)
class NameserverStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_STATUS = "unknown_status"
VALID = "valid"
INVALID = "invalid"
def __str__(self) -> str:
return str(self.value)
class OfferQuotaWarning(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_QUOTA_WARNING = "unknown_quota_warning"
EMAIL_COUNT_EXCEEDED = "email_count_exceeded"
DATABASE_COUNT_EXCEEDED = "database_count_exceeded"
DISK_USAGE_EXCEEDED = "disk_usage_exceeded"
def __str__(self) -> str:
return str(self.value)
@dataclass
class DnsRecord:
"""
Dns record.
"""
name: str
"""
Record name.
"""
type_: DnsRecordType
"""
Record type.
"""
ttl: int
"""
Record time-to-live.
"""
value: str
"""
Record value.
"""
priority: Optional[int]
"""
Record priority level.
"""
status: DnsRecordStatus
"""
Record status.
"""
@dataclass
class DnsRecords:
"""
Dns records.
"""
records: List[DnsRecord]
"""
List of DNS records.
"""
name_servers: List[Nameserver]
"""
List of nameservers.
"""
status: DnsRecordsStatus
"""
Status of the records.
"""
@dataclass
class Hosting:
"""
Hosting.
"""
id: str
"""
ID of the Web Hosting plan.
"""
organization_id: str
"""
ID of the Scaleway Organization the Web Hosting plan belongs to.
"""
project_id: str
"""
ID of the Scaleway Project the Web Hosting plan belongs to.
"""
updated_at: Optional[datetime]
"""
Date on which the Web Hosting plan was last updated.
"""
created_at: Optional[datetime]
"""
Date on which the Web Hosting plan was created.
"""
status: HostingStatus
"""
Status of the Web Hosting plan.
"""
platform_hostname: str
"""
Hostname of the host platform.
"""
platform_number: Optional[int]
"""
Number of the host platform.
"""
offer_id: str
"""
ID of the active offer for the Web Hosting plan.
"""
offer_name: str
"""
Name of the active offer for the Web Hosting plan.
"""
domain: str
"""
Main domain associated with the Web Hosting plan.
"""
tags: List[str]
"""
List of tags associated with the Web Hosting plan.
"""
options: List[HostingOption]
"""
Array of any options activated for the Web Hosting plan.
"""
dns_status: HostingDnsStatus
"""
DNS status of the Web Hosting plan.
"""
cpanel_urls: Optional[HostingCpanelUrls]
"""
URL to connect to cPanel dashboard and to Webmail interface.
"""
username: str
"""
Main Web Hosting cPanel username.
"""
offer_end_of_life: bool
"""
Indicates if the hosting offer has reached its end of life.
"""
region: Region
"""
Region where the Web Hosting plan is hosted.
"""
@dataclass
class HostingCpanelUrls:
dashboard: str
webmail: str
@dataclass
class HostingOption:
"""
Hosting. option.
"""
id: str
"""
Option ID.
"""
name: str
"""
Option name.
"""
@dataclass
class ListHostingsResponse:
"""
List hostings response.
"""
total_count: int
"""
Number of Web Hosting plans returned.
"""
hostings: List[Hosting]
"""
List of Web Hosting plans.
"""
@dataclass
class ListOffersResponse:
"""
List offers response.
"""
offers: List[Offer]
"""
List of offers.
"""
@dataclass
class Nameserver:
"""
Nameserver.
"""
hostname: str
"""
Hostname of the nameserver.
"""
status: NameserverStatus
"""
Status of the nameserver.
"""
is_default: bool
"""
Defines whether the nameserver is the default one.
"""
@dataclass
class Offer:
"""
Offer.
"""
id: str
"""
Offer ID.
"""
billing_operation_path: str
"""
Unique identifier used for billing.
"""
product: Optional[OfferProduct]
"""
Product constituting this offer.
"""
price: Optional[Money]
"""
Price of this offer.
"""
available: bool
"""
If a hosting_id was specified in the call, defines whether this offer is available for that Web Hosting plan to migrate (update) to.
"""
quota_warnings: List[OfferQuotaWarning]
"""
Quota warnings, if the offer is not available for the specified hosting_id.
"""
end_of_life: bool
"""
Indicates if the offer has reached its end of life.
"""
@dataclass
class OfferProduct:
"""
Offer. product.
"""
name: str
"""
Product name.
"""
option: bool
"""
Product option.
"""
email_accounts_quota: int
email_storage_quota: int
databases_quota: int
hosting_storage_quota: int
support_included: bool
v_cpu: int
ram: int
@dataclass
class CreateHostingRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
offer_id: str
"""
ID of the selected offer for the Web Hosting plan.
"""
project_id: Optional[str]
"""
ID of the Scaleway Project in which to create the Web Hosting plan.
"""
email: Optional[str]
"""
Contact email for the Web Hosting client.
"""
tags: Optional[List[str]]
"""
List of tags for the Web Hosting plan.
"""
domain: str
"""
Domain name to link to the Web Hosting plan. You must already own this domain name, and have completed the DNS validation process beforehand.
"""
option_ids: Optional[List[str]]
"""
IDs of any selected additional options for the Web Hosting plan.
"""
@dataclass
class ListHostingsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
page: Optional[int]
"""
Page number to return, from the paginated results (must be a positive integer).
"""
page_size: Optional[int]
"""
Number of Web Hosting plans to return (must be a positive integer lower or equal to 100).
"""
order_by: Optional[ListHostingsRequestOrderBy]
"""
Sort order for Web Hosting plans in the response.
"""
tags: Optional[List[str]]
"""
Tags to filter for, only Web Hosting plans with matching tags will be returned.
"""
statuses: Optional[List[HostingStatus]]
"""
Statuses to filter for, only Web Hosting plans with matching statuses will be returned.
"""
domain: Optional[str]
"""
Domain to filter for, only Web Hosting plans associated with this domain will be returned.
"""
project_id: Optional[str]
"""
Project ID to filter for, only Web Hosting plans from this Project will be returned.
"""
organization_id: Optional[str]
"""
Organization ID to filter for, only Web Hosting plans from this Organization will be returned.
"""
@dataclass
class GetHostingRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
hosting_id: str
"""
Hosting ID.
"""
@dataclass
class UpdateHostingRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
hosting_id: str
"""
Hosting ID.
"""
email: Optional[str]
"""
New contact email for the Web Hosting plan.
"""
tags: Optional[List[str]]
"""
New tags for the Web Hosting plan.
"""
option_ids: Optional[List[str]]
"""
IDs of the new options for the Web Hosting plan.
"""
offer_id: Optional[str]
"""
ID of the new offer for the Web Hosting plan.
"""
@dataclass
class DeleteHostingRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
hosting_id: str
"""
Hosting ID.
"""
@dataclass
class RestoreHostingRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
hosting_id: str
"""
Hosting ID.
"""
@dataclass
class GetDomainDnsRecordsRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
domain: str
"""
Domain associated with the DNS records.
"""
@dataclass
class ListOffersRequest:
region: Optional[Region]
"""
Region to target. If none is passed will use default region from the config.
"""
order_by: ListOffersRequestOrderBy
"""
Sort order of offers in the response.
"""
without_options: bool
"""
Defines whether the response should consist of offers only, without options.
"""
only_options: bool
"""
Defines whether the response should consist of options only, without offers.
"""
hosting_id: Optional[str]
"""
ID of a Web Hosting plan, to check compatibility with returned offers (in case of wanting to update the plan).
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/webhosting/v1alpha1/types.py
| 0.852199 | 0.184584 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from scaleway_core.bridge import (
unmarshal_Money,
)
from dateutil import parser
from .types import (
DnsRecord,
DnsRecords,
Hosting,
HostingCpanelUrls,
HostingOption,
ListHostingsResponse,
ListOffersResponse,
Nameserver,
Offer,
OfferProduct,
CreateHostingRequest,
UpdateHostingRequest,
)
def unmarshal_HostingCpanelUrls(data: Any) -> HostingCpanelUrls:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'HostingCpanelUrls' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("dashboard", None)
args["dashboard"] = field
field = data.get("webmail", None)
args["webmail"] = field
return HostingCpanelUrls(**args)
def unmarshal_HostingOption(data: Any) -> HostingOption:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'HostingOption' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("id", None)
args["id"] = field
field = data.get("name", None)
args["name"] = field
return HostingOption(**args)
def unmarshal_OfferProduct(data: Any) -> OfferProduct:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'OfferProduct' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("databases_quota", None)
args["databases_quota"] = field
field = data.get("email_accounts_quota", None)
args["email_accounts_quota"] = field
field = data.get("email_storage_quota", None)
args["email_storage_quota"] = field
field = data.get("hosting_storage_quota", None)
args["hosting_storage_quota"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("option", None)
args["option"] = field
field = data.get("ram", None)
args["ram"] = field
field = data.get("support_included", None)
args["support_included"] = field
field = data.get("v_cpu", None)
args["v_cpu"] = field
return OfferProduct(**args)
def unmarshal_DnsRecord(data: Any) -> DnsRecord:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'DnsRecord' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("name", None)
args["name"] = field
field = data.get("priority", None)
args["priority"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("ttl", None)
args["ttl"] = field
field = data.get("type", None)
args["type_"] = field
field = data.get("value", None)
args["value"] = field
return DnsRecord(**args)
def unmarshal_Hosting(data: Any) -> Hosting:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Hosting' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("cpanel_urls", None)
args["cpanel_urls"] = (
unmarshal_HostingCpanelUrls(field) if field is not None else None
)
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("dns_status", None)
args["dns_status"] = field
field = data.get("domain", None)
args["domain"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("offer_end_of_life", None)
args["offer_end_of_life"] = field
field = data.get("offer_id", None)
args["offer_id"] = field
field = data.get("offer_name", None)
args["offer_name"] = field
field = data.get("options", None)
args["options"] = (
[unmarshal_HostingOption(v) for v in field] if field is not None else None
)
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("platform_hostname", None)
args["platform_hostname"] = field
field = data.get("platform_number", None)
args["platform_number"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("region", None)
args["region"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("tags", None)
args["tags"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("username", None)
args["username"] = field
return Hosting(**args)
def unmarshal_Nameserver(data: Any) -> Nameserver:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Nameserver' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("hostname", None)
args["hostname"] = field
field = data.get("is_default", None)
args["is_default"] = field
field = data.get("status", None)
args["status"] = field
return Nameserver(**args)
def unmarshal_Offer(data: Any) -> Offer:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Offer' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("available", None)
args["available"] = field
field = data.get("billing_operation_path", None)
args["billing_operation_path"] = field
field = data.get("end_of_life", None)
args["end_of_life"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("price", None)
args["price"] = unmarshal_Money(field) if field is not None else None
field = data.get("product", None)
args["product"] = unmarshal_OfferProduct(field) if field is not None else None
field = data.get("quota_warnings", None)
args["quota_warnings"] = field
return Offer(**args)
def unmarshal_DnsRecords(data: Any) -> DnsRecords:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'DnsRecords' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("name_servers", None)
args["name_servers"] = (
[unmarshal_Nameserver(v) for v in field] if field is not None else None
)
field = data.get("records", None)
args["records"] = (
[unmarshal_DnsRecord(v) for v in field] if field is not None else None
)
field = data.get("status", None)
args["status"] = field
return DnsRecords(**args)
def unmarshal_ListHostingsResponse(data: Any) -> ListHostingsResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListHostingsResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("hostings", None)
args["hostings"] = (
[unmarshal_Hosting(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListHostingsResponse(**args)
def unmarshal_ListOffersResponse(data: Any) -> ListOffersResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListOffersResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("offers", None)
args["offers"] = [unmarshal_Offer(v) for v in field] if field is not None else None
return ListOffersResponse(**args)
def marshal_CreateHostingRequest(
request: CreateHostingRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.domain is not None:
output["domain"] = request.domain
if request.email is not None:
output["email"] = request.email
if request.offer_id is not None:
output["offer_id"] = request.offer_id
if request.option_ids is not None:
output["option_ids"] = request.option_ids
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.tags is not None:
output["tags"] = request.tags
return output
def marshal_UpdateHostingRequest(
request: UpdateHostingRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.email is not None:
output["email"] = request.email
if request.offer_id is not None:
output["offer_id"] = request.offer_id
if request.option_ids is not None:
output["option_ids"] = request.option_ids
if request.tags is not None:
output["tags"] = request.tags
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/webhosting/v1alpha1/marshalling.py
| 0.729134 | 0.214013 |
marshalling.py
|
pypi
|
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import List, Optional
from scaleway_core.bridge import (
Zone,
)
from scaleway_core.utils import (
StrEnumMeta,
)
class ListServersRequestOrderBy(str, Enum, metaclass=StrEnumMeta):
CREATED_AT_ASC = "created_at_asc"
CREATED_AT_DESC = "created_at_desc"
def __str__(self) -> str:
return str(self.value)
class ServerStatus(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_STATUS = "unknown_status"
STARTING = "starting"
READY = "ready"
ERROR = "error"
REBOOTING = "rebooting"
UPDATING = "updating"
LOCKING = "locking"
LOCKED = "locked"
UNLOCKING = "unlocking"
REINSTALLING = "reinstalling"
def __str__(self) -> str:
return str(self.value)
class ServerTypeStock(str, Enum, metaclass=StrEnumMeta):
UNKNOWN_STOCK = "unknown_stock"
NO_STOCK = "no_stock"
LOW_STOCK = "low_stock"
HIGH_STOCK = "high_stock"
def __str__(self) -> str:
return str(self.value)
@dataclass
class ListOSResponse:
"""
List os response.
"""
total_count: int
"""
Total number of OS.
"""
os: List[OS]
"""
List of OS.
"""
@dataclass
class ListServerTypesResponse:
"""
List server types response.
"""
server_types: List[ServerType]
"""
Available server types.
"""
@dataclass
class ListServersResponse:
"""
List servers response.
"""
total_count: int
"""
Total number of servers.
"""
servers: List[Server]
"""
Paginated returned servers.
"""
@dataclass
class OS:
"""
Os.
"""
id: str
"""
Unique ID of the OS.
"""
name: str
"""
OS name.
"""
label: str
"""
OS name as it should be displayed.
"""
image_url: str
"""
URL of the image.
"""
compatible_server_types: List[str]
"""
List of compatible server types.
"""
@dataclass
class Server:
"""
Server.
"""
id: str
"""
UUID of the server.
"""
type_: str
"""
Type of the server.
"""
name: str
"""
Name of the server.
"""
project_id: str
"""
Project this server is associated with.
"""
organization_id: str
"""
Organization this server is associated with.
"""
ip: str
"""
IPv4 address of the server.
"""
vnc_url: str
"""
URL of the VNC.
"""
status: ServerStatus
"""
Current status of the server.
"""
created_at: Optional[datetime]
"""
Date on which the server was created.
"""
updated_at: Optional[datetime]
"""
Date on which the server was last updated.
"""
deletable_at: Optional[datetime]
"""
Date on which the server was last deleted.
"""
zone: Zone
"""
Zone of the server.
"""
@dataclass
class ServerType:
"""
Server type.
"""
cpu: Optional[ServerTypeCPU]
"""
CPU description.
"""
disk: Optional[ServerTypeDisk]
"""
Size of the local disk of the server.
"""
name: str
"""
Name of the type.
"""
memory: Optional[ServerTypeMemory]
"""
Size of memory available.
"""
stock: ServerTypeStock
"""
Current stock.
"""
minimum_lease_duration: Optional[str]
"""
Minimum duration of the lease in seconds.
Minimum duration of the lease in seconds (example. 3.4s).
"""
@dataclass
class ServerTypeCPU:
name: str
core_count: int
@dataclass
class ServerTypeDisk:
capacity: int
type_: str
@dataclass
class ServerTypeMemory:
capacity: int
type_: str
@dataclass
class ListServerTypesRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
@dataclass
class GetServerTypeRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
server_type: str
"""
Server type identifier.
"""
@dataclass
class CreateServerRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
name: Optional[str]
"""
Create a server with this given name.
"""
project_id: Optional[str]
"""
Create a server in the given project ID.
"""
type_: str
"""
Create a server of the given type.
"""
@dataclass
class ListServersRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
order_by: Optional[ListServersRequestOrderBy]
"""
Sort order of the returned servers.
"""
project_id: Optional[str]
"""
Only list servers of this project ID.
"""
organization_id: Optional[str]
"""
Only list servers of this Organization ID.
"""
page: Optional[int]
"""
Positive integer to choose the page to return.
"""
page_size: Optional[int]
"""
Positive integer lower or equal to 100 to select the number of items to return.
"""
@dataclass
class ListOSRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
page: Optional[int]
"""
Positive integer to choose the page to return.
"""
page_size: Optional[int]
"""
Positive integer lower or equal to 100 to select the number of items to return.
"""
server_type: Optional[str]
"""
List of compatible server types.
"""
name: Optional[str]
"""
Filter OS by name (note that "11.1" will return "11.1.2" and "11.1" but not "12")).
"""
@dataclass
class GetOSRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
os_id: str
"""
UUID of the OS you want to get.
"""
@dataclass
class GetServerRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
server_id: str
"""
UUID of the server you want to get.
"""
@dataclass
class UpdateServerRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
server_id: str
"""
UUID of the server you want to update.
"""
name: str
"""
Updated name for your server.
"""
@dataclass
class DeleteServerRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
server_id: str
"""
UUID of the server you want to delete.
"""
@dataclass
class RebootServerRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
server_id: str
"""
UUID of the server you want to reboot.
"""
@dataclass
class ReinstallServerRequest:
zone: Optional[Zone]
"""
Zone to target. If none is passed will use default zone from the config.
"""
server_id: str
"""
UUID of the server you want to reinstall.
"""
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/applesilicon/v1alpha1/types.py
| 0.89391 | 0.287737 |
types.py
|
pypi
|
from typing import Any, Dict
from scaleway_core.profile import ProfileDefaults
from dateutil import parser
from .types import (
ListOSResponse,
ListServerTypesResponse,
ListServersResponse,
OS,
Server,
ServerType,
ServerTypeCPU,
ServerTypeDisk,
ServerTypeMemory,
CreateServerRequest,
UpdateServerRequest,
)
def unmarshal_ServerTypeCPU(data: Any) -> ServerTypeCPU:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ServerTypeCPU' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("core_count", None)
args["core_count"] = field
field = data.get("name", None)
args["name"] = field
return ServerTypeCPU(**args)
def unmarshal_ServerTypeDisk(data: Any) -> ServerTypeDisk:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ServerTypeDisk' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("capacity", None)
args["capacity"] = field
field = data.get("type", None)
args["type_"] = field
return ServerTypeDisk(**args)
def unmarshal_ServerTypeMemory(data: Any) -> ServerTypeMemory:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ServerTypeMemory' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("capacity", None)
args["capacity"] = field
field = data.get("type", None)
args["type_"] = field
return ServerTypeMemory(**args)
def unmarshal_OS(data: Any) -> OS:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'OS' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("compatible_server_types", None)
args["compatible_server_types"] = field
field = data.get("id", None)
args["id"] = field
field = data.get("image_url", None)
args["image_url"] = field
field = data.get("label", None)
args["label"] = field
field = data.get("name", None)
args["name"] = field
return OS(**args)
def unmarshal_Server(data: Any) -> Server:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'Server' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("created_at", None)
args["created_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("deletable_at", None)
args["deletable_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("id", None)
args["id"] = field
field = data.get("ip", None)
args["ip"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("organization_id", None)
args["organization_id"] = field
field = data.get("project_id", None)
args["project_id"] = field
field = data.get("status", None)
args["status"] = field
field = data.get("type", None)
args["type_"] = field
field = data.get("updated_at", None)
args["updated_at"] = parser.isoparse(field) if type(field) is str else field
field = data.get("vnc_url", None)
args["vnc_url"] = field
field = data.get("zone", None)
args["zone"] = field
return Server(**args)
def unmarshal_ServerType(data: Any) -> ServerType:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ServerType' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("cpu", None)
args["cpu"] = unmarshal_ServerTypeCPU(field) if field is not None else None
field = data.get("disk", None)
args["disk"] = unmarshal_ServerTypeDisk(field) if field is not None else None
field = data.get("memory", None)
args["memory"] = unmarshal_ServerTypeMemory(field) if field is not None else None
field = data.get("minimum_lease_duration", None)
args["minimum_lease_duration"] = field
field = data.get("name", None)
args["name"] = field
field = data.get("stock", None)
args["stock"] = field
return ServerType(**args)
def unmarshal_ListOSResponse(data: Any) -> ListOSResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListOSResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("os", None)
args["os"] = [unmarshal_OS(v) for v in field] if field is not None else None
field = data.get("total_count", None)
args["total_count"] = field
return ListOSResponse(**args)
def unmarshal_ListServerTypesResponse(data: Any) -> ListServerTypesResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListServerTypesResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("server_types", None)
args["server_types"] = (
[unmarshal_ServerType(v) for v in field] if field is not None else None
)
return ListServerTypesResponse(**args)
def unmarshal_ListServersResponse(data: Any) -> ListServersResponse:
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'ListServersResponse' failed as data isn't a dictionary."
)
args: Dict[str, Any] = {}
field = data.get("servers", None)
args["servers"] = (
[unmarshal_Server(v) for v in field] if field is not None else None
)
field = data.get("total_count", None)
args["total_count"] = field
return ListServersResponse(**args)
def marshal_CreateServerRequest(
request: CreateServerRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
if request.project_id is not None:
output["project_id"] = request.project_id or defaults.default_project_id
if request.type_ is not None:
output["type"] = request.type_
return output
def marshal_UpdateServerRequest(
request: UpdateServerRequest,
defaults: ProfileDefaults,
) -> Dict[str, Any]:
output: Dict[str, Any] = {}
if request.name is not None:
output["name"] = request.name
return output
|
/scaleway_async-1.1.0-py3-none-any.whl/scaleway_async/applesilicon/v1alpha1/marshalling.py
| 0.744563 | 0.25281 |
marshalling.py
|
pypi
|
from __future__ import annotations
import dataclasses
import logging
import os
from dataclasses import dataclass
from typing import Optional, Type, TypeVar
import sys
import yaml
from scaleway_core import __version__
from scaleway_core.profile.file import CONFIG_PROPERTIES_TO_PROFILE
from .env import ENV_KEY_SCW_CONFIG_PATH, ENV_KEY_SCW_PROFILE, ENV_VARIABLES_TO_PROFILE
@dataclass
class ProfileDefaults:
default_organization_id: Optional[str] = None
"""
Your organization ID is the identifier of your account inside Scaleway infrastructure.
"""
default_project_id: Optional[str] = None
"""
Your project ID is the identifier of the project your resources are attached to.
"""
default_region: Optional[str] = None
"""
A region is represented as a geographical area such as France (Paris) or the Netherlands (Amsterdam).
It can contain multiple availability zones.
Examples: fr-par, nl-ams.
"""
default_zone: Optional[str] = None
"""
A region can be split into many availability zones (AZ).
Latency between multiple AZ of the same region are low as they have a common network layer.
Examples: fr-par-1, nl-ams-1
"""
default_page_size: Optional[int] = None
"""
The default number of results when requesting a paginated resource.
"""
@dataclass
class ProfileConfig:
access_key: Optional[str] = None
"""
You need an access key and a secret key to connect to Scaleway API.
Generate your access key at the following address: https://console.scaleway.com/project/credentials.
"""
secret_key: Optional[str] = None
"""
The secret key is the value that can be used to authenticate against the API (the value used in X-Auth-Token HTTP-header).
The secret key MUST remain secret and not given to anyone or published online.
Generate your secret key at the following address: https://console.scaleway.com/project/credentials.
"""
api_url: str = "https://api.scaleway.com"
"""
The Scaleway API URL.
Change that if you want to direct requests to a different endpoint.
"""
api_allow_insecure: bool = False
"""
Allow insecure connection to the API.
"""
user_agent: str = f"scaleway-sdk-python/{__version__}"
"""
The User-Agent sent with each request.
"""
ProfileSelf = TypeVar("ProfileSelf", bound="Profile")
@dataclass
class Profile(ProfileDefaults, ProfileConfig):
def merge(self, other: Profile) -> None:
"""
Merge the current profile with another one.
"""
for field in dataclasses.fields(Profile):
current_value = getattr(self, field.name)
if current_value is None:
setattr(self, field.name, getattr(other, field.name))
@classmethod
def from_env(cls: Type[ProfileSelf], force_none: bool = False) -> ProfileSelf:
"""
Loads profile from environment variables.
"""
profile = cls()
for env_variable, profile_property in ENV_VARIABLES_TO_PROFILE.items():
value = os.environ.get(env_variable)
if value is not None:
setattr(profile, profile_property, value)
elif force_none:
setattr(profile, profile_property, None)
return profile
@classmethod
def get_default_config_directory(cls) -> str:
xdg_config_path = os.environ.get("XDG_CONFIG_HOME")
if xdg_config_path is not None and xdg_config_path != "":
return os.path.join(xdg_config_path, "scw")
return os.path.join(os.path.expanduser("~"), ".config", "scw")
@classmethod
def get_default_config_file_path(cls, filepath: Optional[str] = None) -> str:
if filepath is not None:
return filepath
filepath = os.environ.get(ENV_KEY_SCW_CONFIG_PATH)
if filepath is not None and filepath != "":
return filepath
return os.path.join(Profile.get_default_config_directory(), "config.yaml")
@classmethod
def from_config_file(
cls: Type[ProfileSelf],
filepath: Optional[str] = None,
profile_name: Optional[str] = "default",
force_none: bool = False,
) -> ProfileSelf:
filepath = cls.get_default_config_file_path(filepath)
with open(filepath, "r") as f:
config = yaml.safe_load(f)
if type(config) is not dict:
raise ValueError("Invalid config file")
profile = cls()
for file_property, profile_property in CONFIG_PROPERTIES_TO_PROFILE.items():
value = config.get(file_property)
if value is not None:
setattr(profile, profile_property, value)
elif force_none:
setattr(profile, profile_property, None)
if profile_name is not None and profile_name != "default":
has_profile = (
"profiles" in config
and type(config["profiles"]) is dict
and profile_name in config["profiles"]
)
if not has_profile:
raise ValueError(f"Profile '{profile_name}' not found")
overrides = config["profiles"][profile_name]
if type(overrides) is not dict:
raise ValueError(f"Invalid profile '{profile_name}'")
for (
file_property,
profile_property,
) in CONFIG_PROPERTIES_TO_PROFILE.items():
value = overrides.get(file_property)
if value is not None:
setattr(profile, profile_property, value)
elif force_none:
setattr(profile, profile_property, None)
return profile
@classmethod
def from_config_file_and_env(
cls: Type[ProfileSelf],
filepath: Optional[str] = None,
profile_name: Optional[str] = os.environ.get(ENV_KEY_SCW_PROFILE, "default"),
) -> ProfileSelf:
"""
Loads profile from a config file and environment variables.
Environment variables override config file.
- If config file is not found, the profile is still loaded from environment variables.
- If you want it to throw an error in case of missing or invalid config file, use `Profile.from_config_file` and `Profile.from_env` instead.
"""
has_config_profile = False
try:
config_profile = cls.from_config_file(filepath, profile_name)
has_config_profile = True
except Exception as e:
logging.getLogger("scaleway").warning(
f"Could not load profile from config file: {e}"
)
env_profile = cls.from_env(force_none=has_config_profile)
if has_config_profile:
env_profile.merge(config_profile)
return env_profile
|
/scaleway_core-1.1.0.tar.gz/scaleway_core-1.1.0/scaleway_core/profile/profile.py
| 0.732305 | 0.184418 |
profile.py
|
pypi
|
from typing import Any, Awaitable, Callable, Dict, List, Optional, Type, TypeVar
T = TypeVar("T")
def _build_fetcher_args(
args: Dict[str, Any],
page: Optional[int],
) -> Dict[str, Any]:
"""
Builds the arguments to pass to the fetcher function.
"""
return {
**args,
"page": page or args.get("page") or 1,
}
def fetch_all_pages(
type: Type[T],
key: str,
fetcher: Callable[..., T],
args: Dict[str, Any],
page: Optional[int] = None,
) -> List[Any]:
"""
:param key: The key to use to get the list of items from the response
:param fetcher: The function to call to fetch the response
:return: The list of items
"""
fetcher_args = _build_fetcher_args(args, page)
page = fetcher_args.get("page") or 1
page_size = fetcher_args.get("page_size")
data = fetcher(**fetcher_args)
if not data:
return []
items: List[Any] = getattr(data, key)
if page_size is not None and len(items) < page_size:
return items
if not items:
return items
return items + fetch_all_pages(
type=type,
key=key,
fetcher=fetcher,
args=args,
page=page + 1,
)
async def fetch_all_pages_async(
type: Type[T],
key: str,
fetcher: Callable[..., Awaitable[T]],
args: Dict[str, Any],
page: Optional[int] = None,
) -> List[Any]:
"""
:param key: The key to use to get the list of items from the response
:param fetcher: The function to call to fetch the response
:return: The list of items
"""
fetcher_args = _build_fetcher_args(args, page)
page = fetcher_args.get("page") or 1
page_size = fetcher_args.get("page_size")
data = await fetcher(**fetcher_args)
if not data:
return []
items: List[Any] = getattr(data, key)
if page_size is not None and len(items) < page_size:
return items
if not items:
return items
return items + await fetch_all_pages_async(
type=type,
key=key,
fetcher=fetcher,
args=args,
page=page + 1,
)
|
/scaleway_core-1.1.0.tar.gz/scaleway_core-1.1.0/scaleway_core/utils/fetch_all_pages.py
| 0.898873 | 0.303138 |
fetch_all_pages.py
|
pypi
|
import asyncio
import inspect
import math
import random
import time
from dataclasses import dataclass
from typing import (
Any,
Awaitable,
Callable,
Dict,
Generator,
Generic,
Optional,
TypeVar,
Union,
)
system_random = random.SystemRandom()
T = TypeVar("T")
U = TypeVar("U")
WaitForStopCondition = Callable[[T], U]
@dataclass
class WaitForOptions(Generic[T, U]):
"""
The options to wait until a resource is ready.
"""
timeout: float = 300
"""
Timeout in seconds.
:default: 300 seconds (5 minutes).
"""
min_delay: float = 1
"""
The minimum delay before the next try in seconds.
:default: 1 second.
"""
max_delay: float = 30
"""
The maximum delay before the next try in seconds.
:default: 30 seconds.
"""
stop: Optional[WaitForStopCondition[T, U]] = None
"""
The condition to stop trying.
:default: Waits for non-transient value.
"""
def _exponential_backoff_strategy(
min_delay: float,
max_delay: float,
) -> Generator[float, None, None]:
"""
Generates a sequence of delays using an exponential backoff strategy.
:param min_delay: The minimum delay before the next try in seconds.
:param max_delay: The maximum delay before the next try in seconds.
"""
if min_delay < 1:
raise ValueError("min_delay must be greater than 1")
if max_delay < min_delay:
raise ValueError("max_delay must be greater than min_delay")
max = math.log(max_delay / min_delay) / math.log(2) + 1
attempt = 1
while True:
if attempt > max:
yield max_delay
else:
yield system_random.uniform(min_delay, min_delay * 2 ** (attempt - 1))
attempt += 1
def _delayed_loop(options: WaitForOptions[Any, Any]) -> Generator[float, None, None]:
strategy = _exponential_backoff_strategy(options.min_delay, options.max_delay)
timeout_time = time.time() + options.timeout
while time.time() <= timeout_time:
delay = next(strategy)
if timeout_time <= (time.time() + delay):
break
yield delay
raise TimeoutError("Timeout while waiting for resource")
async def wait_for_resource_async(
fetcher: Callable[..., Awaitable[T]],
options: WaitForOptions[T, Union[bool, Awaitable[bool]]],
args: Dict[str, Any],
) -> T:
"""
Fetches resource several times until an expected condition is reached, timeouts, or throws an exception.
"""
if options.stop is None:
raise ValueError("options.stop is required")
for delay in _delayed_loop(options):
await asyncio.sleep(delay)
resource = await fetcher(**args)
should_stop = options.stop(resource)
if inspect.isawaitable(should_stop):
should_stop = await should_stop
if should_stop:
return resource
raise TimeoutError("Timeout while waiting for resource")
def wait_for_resource(
fetcher: Callable[..., T],
options: WaitForOptions[T, bool],
args: Dict[str, Any],
) -> T:
"""
Fetches resource several times until an expected condition is reached, timeouts, or throws an exception.
"""
if options.stop is None:
raise ValueError("options.stop is required")
for delay in _delayed_loop(options):
time.sleep(delay)
resource = fetcher(**args)
if options.stop(resource):
return resource
raise TimeoutError("Timeout while waiting for resource")
|
/scaleway_core-1.1.0.tar.gz/scaleway_core-1.1.0/scaleway_core/utils/waiter.py
| 0.851876 | 0.364382 |
waiter.py
|
pypi
|
import random
ADJECTIVES = [
"admiring",
"adoring",
"affectionate",
"agitated",
"amazing",
"angry",
"awesome",
"beautiful",
"blissful",
"bold",
"boring",
"brave",
"busy",
"charming",
"clever",
"cool",
"compassionate",
"competent",
"condescending",
"confident",
"cranky",
"crazy",
"dazzling",
"determined",
"distracted",
"dreamy",
"eager",
"ecstatic",
"elastic",
"elated",
"elegant",
"eloquent",
"epic",
"exciting",
"fervent",
"festive",
"flamboyant",
"focused",
"friendly",
"frosty",
"funny",
"gallant",
"gifted",
"goofy",
"gracious",
"great",
"happy",
"hardcore",
"heuristic",
"hopeful",
"hungry",
"infallible",
"inspiring",
"interesting",
"intelligent",
"jolly",
"jovial",
"keen",
"kind",
"laughing",
"loving",
"lucid",
"magical",
"mystifying",
"modest",
"musing",
"naughty",
"nervous",
"nice",
"nifty",
"nostalgic",
"objective",
"optimistic",
"peaceful",
"pedantic",
"pensive",
"practical",
"priceless",
"quirky",
"quizzical",
"recursing",
"relaxed",
"reverent",
"romantic",
"sad",
"serene",
"sharp",
"silly",
"sleepy",
"stoic",
"strange",
"stupefied",
"suspicious",
"sweet",
"tender",
"thirsty",
"trusting",
"unruffled",
"upbeat",
"vibrant",
"vigilant",
"vigorous",
"wizardly",
"wonderful",
"xenodochial",
"youthful",
"zealous",
"zen",
]
NAMES = [
# Muhammad ibn Jābir al-Ḥarrānī al-Battānī was a founding father of astronomy. https://en.wikipedia.org/wiki/Mu%E1%B8%A5ammad_ibn_J%C4%81bir_al-%E1%B8%A4arr%C4%81n%C4%AB_al-Batt%C4%81n%C4%AB
"albattani",
# Frances E. Allen, became the first female IBM Fellow in 1989. In 2006, she became the first female recipient of the ACM's Turing Award. https://en.wikipedia.org/wiki/Frances_E._Allen
"allen",
# June Almeida - Scottish virologist who took the first pictures of the rubella virus - https://en.wikipedia.org/wiki/June_Almeida
"almeida",
# Kathleen Antonelli, American computer programmer and one of the six original programmers of the ENIAC - https://en.wikipedia.org/wiki/Kathleen_Antonelli
"antonelli",
# Maria Gaetana Agnesi - Italian mathematician, philosopher, theologian and humanitarian. She was the first woman to write a mathematics handbook and the first woman appointed as a Mathematics Professor at a University. https://en.wikipedia.org/wiki/Maria_Gaetana_Agnesi
"agnesi",
# Archimedes was a physicist, engineer and mathematician who invented too many things to list them here. https://en.wikipedia.org/wiki/Archimedes
"archimedes",
# Maria Ardinghelli - Italian translator, mathematician and physicist - https://en.wikipedia.org/wiki/Maria_Ardinghelli
"ardinghelli",
# Aryabhata - Ancient Indian mathematician-astronomer during 476-550 CE https://en.wikipedia.org/wiki/Aryabhata
"aryabhata",
# Wanda Austin - Wanda Austin is the President and CEO of The Aerospace Corporation, a leading architect for the US security space programs. https://en.wikipedia.org/wiki/Wanda_Austin
"austin",
# Charles Babbage invented the concept of a programmable computer. https://en.wikipedia.org/wiki/Charles_Babbage.
"babbage",
# Stefan Banach - Polish mathematician, was one of the founders of modern functional analysis. https://en.wikipedia.org/wiki/Stefan_Banach
"banach",
# Buckaroo Banzai and his mentor Dr. Hikita perfected the "oscillation overthruster", a device that allows one to pass through solid matter. - https://en.wikipedia.org/wiki/The_Adventures_of_Buckaroo_Banzai_Across_the_8th_Dimension
"banzai",
# John Bardeen co-invented the transistor - https://en.wikipedia.org/wiki/John_Bardeen
"bardeen",
# Jean Bartik, born Betty Jean Jennings, was one of the original programmers for the ENIAC computer. https://en.wikipedia.org/wiki/Jean_Bartik
"bartik",
# Laura Bassi, the world's first female professor https://en.wikipedia.org/wiki/Laura_Bassi
"bassi",
# Hugh Beaver, British engineer, founder of the Guinness Book of World Records https://en.wikipedia.org/wiki/Hugh_Beaver
"beaver",
# Alexander Graham Bell - an eminent Scottish-born scientist, inventor, engineer and innovator who is credited with inventing the first practical telephone - https://en.wikipedia.org/wiki/Alexander_Graham_Bell
"bell",
# Karl Friedrich Benz - a German automobile engineer. Inventor of the first practical motorcar. https://en.wikipedia.org/wiki/Karl_Benz
"benz",
# Homi J Bhabha - was an Indian nuclear physicist, founding director, and professor of physics at the Tata Institute of Fundamental Research. Colloquially known as "father of Indian nuclear programme"- https://en.wikipedia.org/wiki/Homi_J._Bhabha
"bhabha",
# Bhaskara II - Ancient Indian mathematician-astronomer whose work on calculus predates Newton and Leibniz by over half a millennium - https://en.wikipedia.org/wiki/Bh%C4%81skara_II#Calculus
"bhaskara",
# Sue Black - British computer scientist and campaigner. She has been instrumental in saving Bletchley Park, the site of World War II codebreaking - https://en.wikipedia.org/wiki/Sue_Black_(computer_scientist)
"black",
# Elizabeth Helen Blackburn - Australian-American Nobel laureate; best known for co-discovering telomerase. https://en.wikipedia.org/wiki/Elizabeth_Blackburn
"blackburn",
# Elizabeth Blackwell - American doctor and first American woman to receive a medical degree - https://en.wikipedia.org/wiki/Elizabeth_Blackwell
"blackwell",
# Niels Bohr is the father of quantum theory. https://en.wikipedia.org/wiki/Niels_Bohr.
"bohr",
# Kathleen Booth, she's credited with writing the first assembly language. https://en.wikipedia.org/wiki/Kathleen_Booth
"booth",
# Anita Borg - Anita Borg was the founding director of the Institute for Women and Technology (IWT). https://en.wikipedia.org/wiki/Anita_Borg
"borg",
# Satyendra Nath Bose - He provided the foundation for Bose–Einstein statistics and the theory of the Bose–Einstein condensate. - https://en.wikipedia.org/wiki/Satyendra_Nath_Bose
"bose",
# Katherine Louise Bouman is an imaging scientist and Assistant Professor of Computer Science at the California Institute of Technology. She researches computational methods for imaging, and developed an algorithm that made possible the picture first visualization of a black hole using the Event Horizon Telescope. - https://en.wikipedia.org/wiki/Katie_Bouman
"bouman",
# Evelyn Boyd Granville - She was one of the first African-American woman to receive a Ph.D. in mathematics; she earned it in 1949 from Yale University. https://en.wikipedia.org/wiki/Evelyn_Boyd_Granville
"boyd",
# Brahmagupta - Ancient Indian mathematician during 598-670 CE who gave rules to compute with zero - https://en.wikipedia.org/wiki/Brahmagupta#Zero
"brahmagupta",
# Walter Houser Brattain co-invented the transistor - https://en.wikipedia.org/wiki/Walter_Houser_Brattain
"brattain",
# Emmett Brown invented time travel. https://en.wikipedia.org/wiki/Emmett_Brown (thanks Brian Goff)
"brown",
# Linda Brown Buck - American biologist and Nobel laureate best known for her genetic and molecular analyses of the mechanisms of smell. https://en.wikipedia.org/wiki/Linda_B._Buck
"buck",
# Dame Susan Jocelyn Bell Burnell - Northern Irish astrophysicist who discovered radio pulsars and was the first to analyse them. https://en.wikipedia.org/wiki/Jocelyn_Bell_Burnell
"burnell",
# Annie Jump Cannon - pioneering female astronomer who classified hundreds of thousands of stars and created the system we use to understand stars today. https://en.wikipedia.org/wiki/Annie_Jump_Cannon
"cannon",
# Rachel Carson - American marine biologist and conservationist, her book Silent Spring and other writings are credited with advancing the global environmental movement. https://en.wikipedia.org/wiki/Rachel_Carson
"carson",
# Dame Mary Lucy Cartwright - British mathematician who was one of the first to study what is now known as chaos theory. Also known for Cartwright's theorem which finds applications in signal processing. https://en.wikipedia.org/wiki/Mary_Cartwright
"cartwright",
# George Washington Carver - American agricultural scientist and inventor. He was the most prominent black scientist of the early 20th century. https://en.wikipedia.org/wiki/George_Washington_Carver
"carver",
# Vinton Gray Cerf - American Internet pioneer, recognised as one of "the fathers of the Internet". With Robert Elliot Kahn, he designed TCP and IP, the primary data communication protocols of the Internet and other computer networks. https://en.wikipedia.org/wiki/Vint_Cerf
"cerf",
# Subrahmanyan Chandrasekhar - Astrophysicist known for his mathematical theory on different stages and evolution in structures of the stars. He has won nobel prize for physics - https://en.wikipedia.org/wiki/Subrahmanyan_Chandrasekhar
"chandrasekhar",
# Sergey Alexeyevich Chaplygin (Russian: Серге́й Алексе́евич Чаплы́гин; April 5, 1869 – October 8, 1942) was a Russian and Soviet physicist, mathematician, and mechanical engineer. He is known for mathematical formulas such as Chaplygin's equation and for a hypothetical substance in cosmology called Chaplygin gas, named after him. https://en.wikipedia.org/wiki/Sergey_Chaplygin
"chaplygin",
# Émilie du Châtelet - French natural philosopher, mathematician, physicist, and author during the early 1730s, known for her translation of and commentary on Isaac Newton's book Principia containing basic laws of physics. https://en.wikipedia.org/wiki/%C3%89milie_du_Ch%C3%A2telet
"chatelet",
# Asima Chatterjee was an Indian organic chemist noted for her research on vinca alkaloids, development of drugs for treatment of epilepsy and malaria - https://en.wikipedia.org/wiki/Asima_Chatterjee
"chatterjee",
# Pafnuty Chebyshev - Russian mathematician. He is known fo his works on probability, statistics, mechanics, analytical geometry and number theory https://en.wikipedia.org/wiki/Pafnuty_Chebyshev
"chebyshev",
# Bram Cohen - American computer programmer and author of the BitTorrent peer-to-peer protocol. https://en.wikipedia.org/wiki/Bram_Cohen
"cohen",
# David Lee Chaum - American computer scientist and cryptographer. Known for his seminal contributions in the field of anonymous communication. https://en.wikipedia.org/wiki/David_Chaum
"chaum",
# Joan Clarke - Bletchley Park code breaker during the Second World War who pioneered techniques that remained top secret for decades. Also an accomplished numismatist https://en.wikipedia.org/wiki/Joan_Clarke
"clarke",
# Jane Colden - American botanist widely considered the first female American botanist - https://en.wikipedia.org/wiki/Jane_Colden
"colden",
# Gerty Theresa Cori - American biochemist who became the third woman—and first American woman—to win a Nobel Prize in science, and the first woman to be awarded the Nobel Prize in Physiology or Medicine. Cori was born in Prague. https://en.wikipedia.org/wiki/Gerty_Cori
"cori",
# Seymour Roger Cray was an American electrical engineer and supercomputer architect who designed a series of computers that were the fastest in the world for decades. https://en.wikipedia.org/wiki/Seymour_Cray
"cray",
# This entry reflects a husband and wife team who worked together:
# Joan Curran was a Welsh scientist who developed radar and invented chaff, a radar countermeasure. https://en.wikipedia.org/wiki/Joan_Curran
# Samuel Curran was an Irish physicist who worked alongside his wife during WWII and invented the proximity fuse. https://en.wikipedia.org/wiki/Samuel_Curran
"curran",
# Marie Curie discovered radioactivity. https://en.wikipedia.org/wiki/Marie_Curie.
"curie",
# Charles Darwin established the principles of natural evolution. https://en.wikipedia.org/wiki/Charles_Darwin.
"darwin",
# Leonardo Da Vinci invented too many things to list here. https://en.wikipedia.org/wiki/Leonardo_da_Vinci.
"davinci",
# A. K. (Alexander Keewatin) Dewdney, Canadian mathematician, computer scientist, author and filmmaker. Contributor to Scientific American's "Computer Recreations" from 1984 to 1991. Author of Core War (program), The Planiverse, The Armchair Universe, The Magic Machine, The New Turing Omnibus, and more. https://en.wikipedia.org/wiki/Alexander_Dewdney
"dewdney",
# Satish Dhawan - Indian mathematician and aerospace engineer, known for leading the successful and indigenous development of the Indian space programme. https://en.wikipedia.org/wiki/Satish_Dhawan
"dhawan",
# Bailey Whitfield Diffie - American cryptographer and one of the pioneers of public-key cryptography. https://en.wikipedia.org/wiki/Whitfield_Diffie
"diffie",
# Edsger Wybe Dijkstra was a Dutch computer scientist and mathematical scientist. https://en.wikipedia.org/wiki/Edsger_W._Dijkstra.
"dijkstra",
# Paul Adrien Maurice Dirac - English theoretical physicist who made fundamental contributions to the early development of both quantum mechanics and quantum electrodynamics. https://en.wikipedia.org/wiki/Paul_Dirac
"dirac",
# Agnes Meyer Driscoll - American cryptanalyst during World Wars I and II who successfully cryptanalysed a number of Japanese ciphers. She was also the co-developer of one of the cipher machines of the US Navy, the CM. https://en.wikipedia.org/wiki/Agnes_Meyer_Driscoll
"driscoll",
# Donna Dubinsky - played an integral role in the development of personal digital assistants (PDAs) serving as CEO of Palm, Inc. and co-founding Handspring. https://en.wikipedia.org/wiki/Donna_Dubinsky
"dubinsky",
# Annie Easley - She was a leading member of the team which developed software for the Centaur rocket stage and one of the first African-Americans in her field. https://en.wikipedia.org/wiki/Annie_Easley
"easley",
# Thomas Alva Edison, prolific inventor https://en.wikipedia.org/wiki/Thomas_Edison
"edison",
# Albert Einstein invented the general theory of relativity. https://en.wikipedia.org/wiki/Albert_Einstein
"einstein",
# Alexandra Asanovna Elbakyan (Russian: Алекса́ндра Аса́новна Элбакя́н) is a Kazakhstani graduate student, computer programmer, internet pirate in hiding, and the creator of the site Sci-Hub. Nature has listed her in 2016 in the top ten people that mattered in science, and Ars Technica has compared her to Aaron Swartz. - https://en.wikipedia.org/wiki/Alexandra_Elbakyan
"elbakyan",
# Taher A. ElGamal - Egyptian cryptographer best known for the ElGamal discrete log cryptosystem and the ElGamal digital signature scheme. https://en.wikipedia.org/wiki/Taher_Elgamal
"elgamal",
# Gertrude Elion - American biochemist, pharmacologist and the 1988 recipient of the Nobel Prize in Medicine - https://en.wikipedia.org/wiki/Gertrude_Elion
"elion",
# James Henry Ellis - British engineer and cryptographer employed by the GCHQ. Best known for conceiving for the first time, the idea of public-key cryptography. https://en.wikipedia.org/wiki/James_H._Ellis
"ellis",
# Douglas Engelbart gave the mother of all demos: https://en.wikipedia.org/wiki/Douglas_Engelbart
"engelbart",
# Euclid invented geometry. https://en.wikipedia.org/wiki/Euclid
"euclid",
# Leonhard Euler invented large parts of modern mathematics. https://de.wikipedia.org/wiki/Leonhard_Euler
"euler",
# Michael Faraday - British scientist who contributed to the study of electromagnetism and electrochemistry. https://en.wikipedia.org/wiki/Michael_Faraday
"faraday",
# Horst Feistel - German-born American cryptographer who was one of the earliest non-government researchers to study the design and theory of block ciphers. Co-developer of DES and Lucifer. Feistel networks, a symmetric structure used in the construction of block ciphers are named after him. https://en.wikipedia.org/wiki/Horst_Feistel
"feistel",
# Pierre de Fermat pioneered several aspects of modern mathematics. https://en.wikipedia.org/wiki/Pierre_de_Fermat
"fermat",
# Enrico Fermi invented the first nuclear reactor. https://en.wikipedia.org/wiki/Enrico_Fermi.
"fermi",
# Richard Feynman was a key contributor to quantum mechanics and particle physics. https://en.wikipedia.org/wiki/Richard_Feynman
"feynman",
# Benjamin Franklin is famous for his experiments in electricity and the invention of the lightning rod.
"franklin",
# Yuri Alekseyevich Gagarin - Soviet pilot and cosmonaut, best known as the first human to journey into outer space. https://en.wikipedia.org/wiki/Yuri_Gagarin
"gagarin",
# Galileo was a founding father of modern astronomy, and faced politics and obscurantism to establish scientific truth. https://en.wikipedia.org/wiki/Galileo_Galilei
"galileo",
# Évariste Galois - French mathematician whose work laid the foundations of Galois theory and group theory, two major branches of abstract algebra, and the subfield of Galois connections, all while still in his late teens. https://en.wikipedia.org/wiki/%C3%89variste_Galois
"galois",
# Kadambini Ganguly - Indian physician, known for being the first South Asian female physician, trained in western medicine, to graduate in South Asia. https://en.wikipedia.org/wiki/Kadambini_Ganguly
"ganguly",
# William Henry "Bill" Gates III is an American business magnate, philanthropist, investor, computer programmer, and inventor. https://en.wikipedia.org/wiki/Bill_Gates
"gates",
# Johann Carl Friedrich Gauss - German mathematician who made significant contributions to many fields, including number theory, algebra, statistics, analysis, differential geometry, geodesy, geophysics, mechanics, electrostatics, magnetic fields, astronomy, matrix theory, and optics. https://en.wikipedia.org/wiki/Carl_Friedrich_Gauss
"gauss",
# Marie-Sophie Germain - French mathematician, physicist and philosopher. Known for her work on elasticity theory, number theory and philosophy. https://en.wikipedia.org/wiki/Sophie_Germain
"germain",
# Adele Goldberg, was one of the designers and developers of the Smalltalk language. https://en.wikipedia.org/wiki/Adele_Goldberg_(computer_scientist)
"goldberg",
# Adele Goldstine, born Adele Katz, wrote the complete technical description for the first electronic digital computer, ENIAC. https://en.wikipedia.org/wiki/Adele_Goldstine
"goldstine",
# Shafi Goldwasser is a computer scientist known for creating theoretical foundations of modern cryptography. Winner of 2012 ACM Turing Award. https://en.wikipedia.org/wiki/Shafi_Goldwasser
"goldwasser",
# James Golick, all around gangster.
"golick",
# Jane Goodall - British primatologist, ethologist, and anthropologist who is considered to be the world's foremost expert on chimpanzees - https://en.wikipedia.org/wiki/Jane_Goodall
"goodall",
# Stephen Jay Gould was was an American paleontologist, evolutionary biologist, and historian of science. He is most famous for the theory of punctuated equilibrium - https://en.wikipedia.org/wiki/Stephen_Jay_Gould
"gould",
# Carolyn Widney Greider - American molecular biologist and joint winner of the 2009 Nobel Prize for Physiology or Medicine for the discovery of telomerase. https://en.wikipedia.org/wiki/Carol_W._Greider
"greider",
# Alexander Grothendieck - German-born French mathematician who became a leading figure in the creation of modern algebraic geometry. https://en.wikipedia.org/wiki/Alexander_Grothendieck
"grothendieck",
# Lois Haibt - American computer scientist, part of the team at IBM that developed FORTRAN - https://en.wikipedia.org/wiki/Lois_Haibt
"haibt",
# Margaret Hamilton - Director of the Software Engineering Division of the MIT Instrumentation Laboratory, which developed on-board flight software for the Apollo space program. https://en.wikipedia.org/wiki/Margaret_Hamilton_(scientist)
"hamilton",
# Caroline Harriet Haslett - English electrical engineer, electricity industry administrator and champion of women's rights. Co-author of British Standard 1363 that specifies AC power plugs and sockets used across the United Kingdom (which is widely considered as one of the safest designs). https://en.wikipedia.org/wiki/Caroline_Haslett
"haslett",
# Stephen Hawking pioneered the field of cosmology by combining general relativity and quantum mechanics. https://en.wikipedia.org/wiki/Stephen_Hawking
"hawking",
# Martin Edward Hellman - American cryptologist, best known for his invention of public-key cryptography in co-operation with Whitfield Diffie and Ralph Merkle. https://en.wikipedia.org/wiki/Martin_Hellman
"hellman",
# Werner Heisenberg was a founding father of quantum mechanics. https://en.wikipedia.org/wiki/Werner_Heisenberg
"heisenberg",
# Grete Hermann was a German philosopher noted for her philosophical work on the foundations of quantum mechanics. https://en.wikipedia.org/wiki/Grete_Hermann
"hermann",
# Caroline Lucretia Herschel - German astronomer and discoverer of several comets. https://en.wikipedia.org/wiki/Caroline_Herschel
"herschel",
# Heinrich Rudolf Hertz - German physicist who first conclusively proved the existence of the electromagnetic waves. https://en.wikipedia.org/wiki/Heinrich_Hertz
"hertz",
# Jaroslav Heyrovský was the inventor of the polarographic method, father of the electroanalytical method, and recipient of the Nobel Prize in 1959. His main field of work was polarography. https://en.wikipedia.org/wiki/Jaroslav_Heyrovsk%C3%BD
"heyrovsky",
# Dorothy Hodgkin was a British biochemist, credited with the development of protein crystallography. She was awarded the Nobel Prize in Chemistry in 1964. https://en.wikipedia.org/wiki/Dorothy_Hodgkin
"hodgkin",
# Douglas R. Hofstadter is an American professor of cognitive science and author of the Pulitzer Prize and American Book Award-winning work Goedel, Escher, Bach: An Eternal Golden Braid in 1979. A mind-bending work which coined Hofstadter's Law: "It always takes longer than you expect, even when you take into account Hofstadter's Law." https://en.wikipedia.org/wiki/Douglas_Hofstadter
"hofstadter",
# Erna Schneider Hoover revolutionized modern communication by inventing a computerized telephone switching method. https://en.wikipedia.org/wiki/Erna_Schneider_Hoover
"hoover",
# Grace Hopper developed the first compiler for a computer programming language and is credited with popularizing the term "debugging" for fixing computer glitches. https://en.wikipedia.org/wiki/Grace_Hopper
"hopper",
# Frances Hugle, she was an American scientist, engineer, and inventor who contributed to the understanding of semiconductors, integrated circuitry, and the unique electrical principles of microscopic materials. https://en.wikipedia.org/wiki/Frances_Hugle
"hugle",
# Hypatia - Greek Alexandrine Neoplatonist philosopher in Egypt who was one of the earliest mothers of mathematics - https://en.wikipedia.org/wiki/Hypatia
"hypatia",
# Teruko Ishizaka - Japanese scientist and immunologist who co-discovered the antibody class Immunoglobulin E. https://en.wikipedia.org/wiki/Teruko_Ishizaka
"ishizaka",
# Mary Jackson, American mathematician and aerospace engineer who earned the highest title within NASA's engineering department - https://en.wikipedia.org/wiki/Mary_Jackson_(engineer)
"jackson",
# Yeong-Sil Jang was a Korean scientist and astronomer during the Joseon Dynasty; he invented the first metal printing press and water gauge. https://en.wikipedia.org/wiki/Jang_Yeong-sil
"jang",
# Mae Carol Jemison - is an American engineer, physician, and former NASA astronaut. She became the first black woman to travel in space when she served as a mission specialist aboard the Space Shuttle Endeavour - https://en.wikipedia.org/wiki/Mae_Jemison
"jemison",
# Betty Jennings - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Jean_Bartik
"jennings",
# Mary Lou Jepsen, was the founder and chief technology officer of One Laptop Per Child (OLPC), and the founder of Pixel Qi. https://en.wikipedia.org/wiki/Mary_Lou_Jepsen
"jepsen",
# Katherine Coleman Goble Johnson - American physicist and mathematician contributed to the NASA. https://en.wikipedia.org/wiki/Katherine_Johnson
"johnson",
# Irène Joliot-Curie - French scientist who was awarded the Nobel Prize for Chemistry in 1935. Daughter of Marie and Pierre Curie. https://en.wikipedia.org/wiki/Ir%C3%A8ne_Joliot-Curie
"joliot",
# Karen Spärck Jones came up with the concept of inverse document frequency, which is used in most search engines today. https://en.wikipedia.org/wiki/Karen_Sp%C3%A4rck_Jones
"jones",
# A. P. J. Abdul Kalam - is an Indian scientist aka Missile Man of India for his work on the development of ballistic missile and launch vehicle technology - https://en.wikipedia.org/wiki/A._P._J._Abdul_Kalam
"kalam",
# Sergey Petrovich Kapitsa (Russian: Серге́й Петро́вич Капи́ца; 14 February 1928 – 14 August 2012) was a Russian physicist and demographer. He was best known as host of the popular and long-running Russian scientific TV show, Evident, but Incredible. His father was the Nobel laureate Soviet-era physicist Pyotr Kapitsa, and his brother was the geographer and Antarctic explorer Andrey Kapitsa. - https://en.wikipedia.org/wiki/Sergey_Kapitsa
"kapitsa",
# Susan Kare, created the icons and many of the interface elements for the original Apple Macintosh in the 1980s, and was an original employee of NeXT, working as the Creative Director. https://en.wikipedia.org/wiki/Susan_Kare
"kare",
# Mstislav Keldysh - a Soviet scientist in the field of mathematics and mechanics, academician of the USSR Academy of Sciences (1946), President of the USSR Academy of Sciences (1961–1975), three times Hero of Socialist Labor (1956, 1961, 1971), fellow of the Royal Society of Edinburgh (1968). https://en.wikipedia.org/wiki/Mstislav_Keldysh
"keldysh",
# Mary Kenneth Keller, Sister Mary Kenneth Keller became the first American woman to earn a PhD in Computer Science in 1965. https://en.wikipedia.org/wiki/Mary_Kenneth_Keller
"keller",
# Johannes Kepler, German astronomer known for his three laws of planetary motion - https://en.wikipedia.org/wiki/Johannes_Kepler
"kepler",
# Omar Khayyam - Persian mathematician, astronomer and poet. Known for his work on the classification and solution of cubic equations, for his contribution to the understanding of Euclid's fifth postulate and for computing the length of a year very accurately. https://en.wikipedia.org/wiki/Omar_Khayyam
"khayyam",
# Har Gobind Khorana - Indian-American biochemist who shared the 1968 Nobel Prize for Physiology - https://en.wikipedia.org/wiki/Har_Gobind_Khorana
"khorana",
# Jack Kilby invented silicon integrated circuits and gave Silicon Valley its name. - https://en.wikipedia.org/wiki/Jack_Kilby
"kilby",
# Maria Kirch - German astronomer and first woman to discover a comet - https://en.wikipedia.org/wiki/Maria_Margarethe_Kirch
"kirch",
# Donald Knuth - American computer scientist, author of "The Art of Computer Programming" and creator of the TeX typesetting system. https://en.wikipedia.org/wiki/Donald_Knuth
"knuth",
# Sophie Kowalevski - Russian mathematician responsible for important original contributions to analysis, differential equations and mechanics - https://en.wikipedia.org/wiki/Sofia_Kovalevskaya
"kowalevski",
# Marie-Jeanne de Lalande - French astronomer, mathematician and cataloguer of stars - https://en.wikipedia.org/wiki/Marie-Jeanne_de_Lalande
"lalande",
# Hedy Lamarr - Actress and inventor. The principles of her work are now incorporated into modern Wi-Fi, CDMA and Bluetooth technology. https://en.wikipedia.org/wiki/Hedy_Lamarr
"lamarr",
# Leslie B. Lamport - American computer scientist. Lamport is best known for his seminal work in distributed systems and was the winner of the 2013 Turing Award. https://en.wikipedia.org/wiki/Leslie_Lamport
"lamport",
# Mary Leakey - British paleoanthropologist who discovered the first fossilized Proconsul skull - https://en.wikipedia.org/wiki/Mary_Leakey
"leakey",
# Henrietta Swan Leavitt - she was an American astronomer who discovered the relation between the luminosity and the period of Cepheid variable stars. https://en.wikipedia.org/wiki/Henrietta_Swan_Leavitt
"leavitt",
# Esther Miriam Zimmer Lederberg - American microbiologist and a pioneer of bacterial genetics. https://en.wikipedia.org/wiki/Esther_Lederberg
"lederberg",
# Inge Lehmann - Danish seismologist and geophysicist. Known for discovering in 1936 that the Earth has a solid inner core inside a molten outer core. https://en.wikipedia.org/wiki/Inge_Lehmann
"lehmann",
# Daniel Lewin - Mathematician, Akamai co-founder, soldier, 9/11 victim-- Developed optimization techniques for routing traffic on the internet. Died attempting to stop the 9-11 hijackers. https://en.wikipedia.org/wiki/Daniel_Lewin
"lewin",
# Ruth Lichterman - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Ruth_Teitelbaum
"lichterman",
# Barbara Liskov - co-developed the Liskov substitution principle. Liskov was also the winner of the Turing Prize in 2008. - https://en.wikipedia.org/wiki/Barbara_Liskov
"liskov",
# Ada Lovelace invented the first algorithm. https://en.wikipedia.org/wiki/Ada_Lovelace (thanks James Turnbull)
"lovelace",
# Auguste and Louis Lumière - the first filmmakers in history - https://en.wikipedia.org/wiki/Auguste_and_Louis_Lumi%C3%A8re
"lumiere",
# Mahavira - Ancient Indian mathematician during 9th century AD who discovered basic algebraic identities - https://en.wikipedia.org/wiki/Mah%C4%81v%C4%ABra_(mathematician)
"mahavira",
# Lynn Margulis (b. Lynn Petra Alexander) - an American evolutionary theorist and biologist, science author, educator, and popularizer, and was the primary modern proponent for the significance of symbiosis in evolution. - https://en.wikipedia.org/wiki/Lynn_Margulis
"margulis",
# Yukihiro Matsumoto - Japanese computer scientist and software programmer best known as the chief designer of the Ruby programming language. https://en.wikipedia.org/wiki/Yukihiro_Matsumoto
"matsumoto",
# James Clerk Maxwell - Scottish physicist, best known for his formulation of electromagnetic theory. https://en.wikipedia.org/wiki/James_Clerk_Maxwell
"maxwell",
# Maria Mayer - American theoretical physicist and Nobel laureate in Physics for proposing the nuclear shell model of the atomic nucleus - https://en.wikipedia.org/wiki/Maria_Mayer
"mayer",
# John McCarthy invented LISP: https://en.wikipedia.org/wiki/John_McCarthy_(computer_scientist)
"mccarthy",
# Barbara McClintock - a distinguished American cytogeneticist, 1983 Nobel Laureate in Physiology or Medicine for discovering transposons. https://en.wikipedia.org/wiki/Barbara_McClintock
"mcclintock",
# Anne Laura Dorinthea McLaren - British developmental biologist whose work helped lead to human in-vitro fertilisation. https://en.wikipedia.org/wiki/Anne_McLaren
"mclaren",
# Malcolm McLean invented the modern shipping container: https://en.wikipedia.org/wiki/Malcom_McLean
"mclean",
# Kay McNulty - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Kathleen_Antonelli
"mcnulty",
# Gregor Johann Mendel - Czech scientist and founder of genetics. https://en.wikipedia.org/wiki/Gregor_Mendel
"mendel",
# Dmitri Mendeleev - a chemist and inventor. He formulated the Periodic Law, created a farsighted version of the periodic table of elements, and used it to correct the properties of some already discovered elements and also to predict the properties of eight elements yet to be discovered. https://en.wikipedia.org/wiki/Dmitri_Mendeleev
"mendeleev",
# Lise Meitner - Austrian/Swedish physicist who was involved in the discovery of nuclear fission. The element meitnerium is named after her - https://en.wikipedia.org/wiki/Lise_Meitner
"meitner",
# Carla Meninsky, was the game designer and programmer for Atari 2600 games Dodge 'Em and Warlords. https://en.wikipedia.org/wiki/Carla_Meninsky
"meninsky",
# Ralph C. Merkle - American computer scientist, known for devising Merkle's puzzles - one of the very first schemes for public-key cryptography. Also, inventor of Merkle trees and co-inventor of the Merkle-Damgård construction for building collision-resistant cryptographic hash functions and the Merkle-Hellman knapsack cryptosystem. https://en.wikipedia.org/wiki/Ralph_Merkle
"merkle",
# Johanna Mestorf - German prehistoric archaeologist and first female museum director in Germany - https://en.wikipedia.org/wiki/Johanna_Mestorf
"mestorf",
# Maryam Mirzakhani - an Iranian mathematician and the first woman to win the Fields Medal. https://en.wikipedia.org/wiki/Maryam_Mirzakhani
"mirzakhani",
# Rita Levi-Montalcini - Won Nobel Prize in Physiology or Medicine jointly with colleague Stanley Cohen for the discovery of nerve growth factor (https://en.wikipedia.org/wiki/Rita_Levi-Montalcini)
"montalcini",
# Gordon Earle Moore - American engineer, Silicon Valley founding father, author of Moore's law. https://en.wikipedia.org/wiki/Gordon_Moore
"moore",
# Samuel Morse - contributed to the invention of a single-wire telegraph system based on European telegraphs and was a co-developer of the Morse code - https://en.wikipedia.org/wiki/Samuel_Morse
"morse",
# Ian Murdock - founder of the Debian project - https://en.wikipedia.org/wiki/Ian_Murdock
"murdock",
# May-Britt Moser - Nobel prize winner neuroscientist who contributed to the discovery of grid cells in the brain. https://en.wikipedia.org/wiki/May-Britt_Moser
"moser",
# John Napier of Merchiston - Scottish landowner known as an astronomer, mathematician and physicist. Best known for his discovery of logarithms. https://en.wikipedia.org/wiki/John_Napier
"napier",
# John Forbes Nash, Jr. - American mathematician who made fundamental contributions to game theory, differential geometry, and the study of partial differential equations. https://en.wikipedia.org/wiki/John_Forbes_Nash_Jr.
"nash",
# John von Neumann - todays computer architectures are based on the von Neumann architecture. https://en.wikipedia.org/wiki/Von_Neumann_architecture
"neumann",
# Isaac Newton invented classic mechanics and modern optics. https://en.wikipedia.org/wiki/Isaac_Newton
"newton",
# Florence Nightingale, more prominently known as a nurse, was also the first female member of the Royal Statistical Society and a pioneer in statistical graphics https://en.wikipedia.org/wiki/Florence_Nightingale#Statistics_and_sanitary_reform
"nightingale",
# Alfred Nobel - a Swedish chemist, engineer, innovator, and armaments manufacturer (inventor of dynamite) - https://en.wikipedia.org/wiki/Alfred_Nobel
"nobel",
# Emmy Noether, German mathematician. Noether's Theorem is named after her. https://en.wikipedia.org/wiki/Emmy_Noether
"noether",
# Poppy Northcutt. Poppy Northcutt was the first woman to work as part of NASA’s Mission Control. http://www.businessinsider.com/poppy-northcutt-helped-apollo-astronauts-2014-12?op=1
"northcutt",
# Robert Noyce invented silicon integrated circuits and gave Silicon Valley its name. - https://en.wikipedia.org/wiki/Robert_Noyce
"noyce",
# Panini - Ancient Indian linguist and grammarian from 4th century CE who worked on the world's first formal system - https://en.wikipedia.org/wiki/P%C4%81%E1%B9%87ini#Comparison_with_modern_formal_systems
"panini",
# Ambroise Pare invented modern surgery. https://en.wikipedia.org/wiki/Ambroise_Par%C3%A9
"pare",
# Blaise Pascal, French mathematician, physicist, and inventor - https://en.wikipedia.org/wiki/Blaise_Pascal
"pascal",
# Louis Pasteur discovered vaccination, fermentation and pasteurization. https://en.wikipedia.org/wiki/Louis_Pasteur.
"pasteur",
# Cecilia Payne-Gaposchkin was an astronomer and astrophysicist who, in 1925, proposed in her Ph.D. thesis an explanation for the composition of stars in terms of the relative abundances of hydrogen and helium. https://en.wikipedia.org/wiki/Cecilia_Payne-Gaposchkin
"payne",
# Radia Perlman is a software designer and network engineer and most famous for her invention of the spanning-tree protocol (STP). https://en.wikipedia.org/wiki/Radia_Perlman
"perlman",
# Rob Pike was a key contributor to Unix, Plan 9, the X graphic system, utf-8, and the Go programming language. https://en.wikipedia.org/wiki/Rob_Pike
"pike",
# Henri Poincaré made fundamental contributions in several fields of mathematics. https://en.wikipedia.org/wiki/Henri_Poincar%C3%A9
"poincare",
# Laura Poitras is a director and producer whose work, made possible by open source crypto tools, advances the causes of truth and freedom of information by reporting disclosures by whistleblowers such as Edward Snowden. https://en.wikipedia.org/wiki/Laura_Poitras
"poitras",
# Tat’yana Avenirovna Proskuriakova (Russian: Татья́на Авени́ровна Проскуряко́ва) (January 23 [O.S. January 10] 1909 – August 30, 1985) was a Russian-American Mayanist scholar and archaeologist who contributed significantly to the deciphering of Maya hieroglyphs, the writing system of the pre-Columbian Maya civilization of Mesoamerica. https://en.wikipedia.org/wiki/Tatiana_Proskouriakoff
"proskuriakova",
# Claudius Ptolemy - a Greco-Egyptian writer of Alexandria, known as a mathematician, astronomer, geographer, astrologer, and poet of a single epigram in the Greek Anthology - https://en.wikipedia.org/wiki/Ptolemy
"ptolemy",
# C. V. Raman - Indian physicist who won the Nobel Prize in 1930 for proposing the Raman effect. - https://en.wikipedia.org/wiki/C._V._Raman
"raman",
# Srinivasa Ramanujan - Indian mathematician and autodidact who made extraordinary contributions to mathematical analysis, number theory, infinite series, and continued fractions. - https://en.wikipedia.org/wiki/Srinivasa_Ramanujan
"ramanujan",
# Sally Kristen Ride was an American physicist and astronaut. She was the first American woman in space, and the youngest American astronaut. https://en.wikipedia.org/wiki/Sally_Ride
"ride",
# Dennis Ritchie - co-creator of UNIX and the C programming language. - https://en.wikipedia.org/wiki/Dennis_Ritchie
"ritchie",
# Ida Rhodes - American pioneer in computer programming, designed the first computer used for Social Security. https://en.wikipedia.org/wiki/Ida_Rhodes
"rhodes",
# Julia Hall Bowman Robinson - American mathematician renowned for her contributions to the fields of computability theory and computational complexity theory. https://en.wikipedia.org/wiki/Julia_Robinson
"robinson",
# Wilhelm Conrad Röntgen - German physicist who was awarded the first Nobel Prize in Physics in 1901 for the discovery of X-rays (Röntgen rays). https://en.wikipedia.org/wiki/Wilhelm_R%C3%B6ntgen
"roentgen",
# Rosalind Franklin - British biophysicist and X-ray crystallographer whose research was critical to the understanding of DNA - https://en.wikipedia.org/wiki/Rosalind_Franklin
"rosalind",
# Vera Rubin - American astronomer who pioneered work on galaxy rotation rates. https://en.wikipedia.org/wiki/Vera_Rubin
"rubin",
# Meghnad Saha - Indian astrophysicist best known for his development of the Saha equation, used to describe chemical and physical conditions in stars - https://en.wikipedia.org/wiki/Meghnad_Saha
"saha",
# Jean E. Sammet developed FORMAC, the first widely used computer language for symbolic manipulation of mathematical formulas. https://en.wikipedia.org/wiki/Jean_E._Sammet
"sammet",
# Mildred Sanderson - American mathematician best known for Sanderson's theorem concerning modular invariants. https://en.wikipedia.org/wiki/Mildred_Sanderson
"sanderson",
# Satoshi Nakamoto is the name used by the unknown person or group of people who developed bitcoin, authored the bitcoin white paper, and created and deployed bitcoin's original reference implementation. https://en.wikipedia.org/wiki/Satoshi_Nakamoto
"satoshi",
# Adi Shamir - Israeli cryptographer whose numerous inventions and contributions to cryptography include the Ferge Fiat Shamir identification scheme, the Rivest Shamir Adleman (RSA) public-key cryptosystem, the Shamir's secret sharing scheme, the breaking of the Merkle-Hellman cryptosystem, the TWINKLE and TWIRL factoring devices and the discovery of differential cryptanalysis (with Eli Biham). https://en.wikipedia.org/wiki/Adi_Shamir
"shamir",
# Claude Shannon - The father of information theory and founder of digital circuit design theory. (https://en.wikipedia.org/wiki/Claude_Shannon)
"shannon",
# Carol Shaw - Originally an Atari employee, Carol Shaw is said to be the first female video game designer. https://en.wikipedia.org/wiki/Carol_Shaw_(video_game_designer)
"shaw",
# Dame Stephanie "Steve" Shirley - Founded a software company in 1962 employing women working from home. https://en.wikipedia.org/wiki/Steve_Shirley
"shirley",
# William Shockley co-invented the transistor - https://en.wikipedia.org/wiki/William_Shockley
"shockley",
# Lina Solomonovna Stern (or Shtern; Russian: Лина Соломоновна Штерн; 26 August 1878 – 7 March 1968) was a Soviet biochemist, physiologist and humanist whose medical discoveries saved thousands of lives at the fronts of World War II. She is best known for her pioneering work on blood–brain barrier, which she described as hemato-encephalic barrier in 1921. https://en.wikipedia.org/wiki/Lina_Stern
"shtern",
# Françoise Barré-Sinoussi - French virologist and Nobel Prize Laureate in Physiology or Medicine; her work was fundamental in identifying HIV as the cause of AIDS. https://en.wikipedia.org/wiki/Fran%C3%A7oise_Barr%C3%A9-Sinoussi
"sinoussi",
# Betty Snyder - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Betty_Holberton
"snyder",
# Cynthia Solomon - Pioneer in the fields of artificial intelligence, computer science and educational computing. Known for creation of Logo, an educational programming language. https://en.wikipedia.org/wiki/Cynthia_Solomon
"solomon",
# Frances Spence - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Frances_Spence
"spence",
# Michael Stonebraker is a database research pioneer and architect of Ingres, Postgres, VoltDB and SciDB. Winner of 2014 ACM Turing Award. https://en.wikipedia.org/wiki/Michael_Stonebraker
"stonebraker",
# Ivan Edward Sutherland - American computer scientist and Internet pioneer, widely regarded as the father of computer graphics. https://en.wikipedia.org/wiki/Ivan_Sutherland
"sutherland",
# Janese Swanson (with others) developed the first of the Carmen Sandiego games. She went on to found Girl Tech. https://en.wikipedia.org/wiki/Janese_Swanson
"swanson",
# Aaron Swartz was influential in creating RSS, Markdown, Creative Commons, Reddit, and much of the internet as we know it today. He was devoted to freedom of information on the web. https://en.wikiquote.org/wiki/Aaron_Swartz
"swartz",
# Bertha Swirles was a theoretical physicist who made a number of contributions to early quantum theory. https://en.wikipedia.org/wiki/Bertha_Swirles
"swirles",
# Helen Brooke Taussig - American cardiologist and founder of the field of paediatric cardiology. https://en.wikipedia.org/wiki/Helen_B._Taussig
"taussig",
# Valentina Tereshkova is a Russian engineer, cosmonaut and politician. She was the first woman to fly to space in 1963. In 2013, at the age of 76, she offered to go on a one-way mission to Mars. https://en.wikipedia.org/wiki/Valentina_Tereshkova
"tereshkova",
# Nikola Tesla invented the AC electric system and every gadget ever used by a James Bond villain. https://en.wikipedia.org/wiki/Nikola_Tesla
"tesla",
# Marie Tharp - American geologist and oceanic cartographer who co-created the first scientific map of the Atlantic Ocean floor. Her work led to the acceptance of the theories of plate tectonics and continental drift. https://en.wikipedia.org/wiki/Marie_Tharp
"tharp",
# Ken Thompson - co-creator of UNIX and the C programming language - https://en.wikipedia.org/wiki/Ken_Thompson
"thompson",
# Linus Torvalds invented Linux and Git. https://en.wikipedia.org/wiki/Linus_Torvalds
"torvalds",
# Youyou Tu - Chinese pharmaceutical chemist and educator known for discovering artemisinin and dihydroartemisinin, used to treat malaria, which has saved millions of lives. Joint winner of the 2015 Nobel Prize in Physiology or Medicine. https://en.wikipedia.org/wiki/Tu_Youyou
"tu",
# Alan Turing was a founding father of computer science. https://en.wikipedia.org/wiki/Alan_Turing.
"turing",
# Varahamihira - Ancient Indian mathematician who discovered trigonometric formulae during 505-587 CE - https://en.wikipedia.org/wiki/Var%C4%81hamihira#Contributions
"varahamihira",
# Dorothy Vaughan was a NASA mathematician and computer programmer on the SCOUT launch vehicle program that put America's first satellites into space - https://en.wikipedia.org/wiki/Dorothy_Vaughan
"vaughan",
# Sir Mokshagundam Visvesvaraya - is a notable Indian engineer. He is a recipient of the Indian Republic's highest honour, the Bharat Ratna, in 1955. On his birthday, 15 September is celebrated as Engineer's Day in India in his memory - https://en.wikipedia.org/wiki/Visvesvaraya
"visvesvaraya",
# Christiane Nüsslein-Volhard - German biologist, won Nobel Prize in Physiology or Medicine in 1995 for research on the genetic control of embryonic development. https://en.wikipedia.org/wiki/Christiane_N%C3%BCsslein-Volhard
"volhard",
# Cédric Villani - French mathematician, won Fields Medal, Fermat Prize and Poincaré Price for his work in differential geometry and statistical mechanics. https://en.wikipedia.org/wiki/C%C3%A9dric_Villani
"villani",
# Marlyn Wescoff - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Marlyn_Meltzer
"wescoff",
# Sylvia B. Wilbur - British computer scientist who helped develop the ARPANET, was one of the first to exchange email in the UK and a leading researcher in computer-supported collaborative work. https://en.wikipedia.org/wiki/Sylvia_Wilbur
"wilbur",
# Andrew Wiles - Notable British mathematician who proved the enigmatic Fermat's Last Theorem - https://en.wikipedia.org/wiki/Andrew_Wiles
"wiles",
# Roberta Williams, did pioneering work in graphical adventure games for personal computers, particularly the King's Quest series. https://en.wikipedia.org/wiki/Roberta_Williams
"williams",
# Malcolm John Williamson - British mathematician and cryptographer employed by the GCHQ. Developed in 1974 what is now known as Diffie-Hellman key exchange (Diffie and Hellman first published the scheme in 1976). https://en.wikipedia.org/wiki/Malcolm_J._Williamson
"williamson",
# Sophie Wilson designed the first Acorn Micro-Computer and the instruction set for ARM processors. https://en.wikipedia.org/wiki/Sophie_Wilson
"wilson",
# Jeannette Wing - co-developed the Liskov substitution principle. - https://en.wikipedia.org/wiki/Jeannette_Wing
"wing",
# Steve Wozniak invented the Apple I and Apple II. https://en.wikipedia.org/wiki/Steve_Wozniak
"wozniak",
# The Wright brothers, Orville and Wilbur - credited with inventing and building the world's first successful airplane and making the first controlled, powered and sustained heavier-than-air human flight - https://en.wikipedia.org/wiki/Wright_brothers
"wright",
# Chien-Shiung Wu - Chinese-American experimental physicist who made significant contributions to nuclear physics. https://en.wikipedia.org/wiki/Chien-Shiung_Wu
"wu",
# Rosalyn Sussman Yalow - Rosalyn Sussman Yalow was an American medical physicist, and a co-winner of the 1977 Nobel Prize in Physiology or Medicine for development of the radioimmunoassay technique. https://en.wikipedia.org/wiki/Rosalyn_Sussman_Yalow
"yalow",
# Ada Yonath - an Israeli crystallographer, the first woman from the Middle East to win a Nobel prize in the sciences. https://en.wikipedia.org/wiki/Ada_Yonath
"yonath",
# Nikolay Yegorovich Zhukovsky (Russian: Никола́й Его́рович Жуко́вский, January 17 1847 – March 17, 1921) was a Russian scientist, mathematician and engineer, and a founding father of modern aero- and hydrodynamics. Whereas contemporary scientists scoffed at the idea of human flight, Zhukovsky was the first to undertake the study of airflow. He is often called the Father of Russian Aviation. https://en.wikipedia.org/wiki/Nikolay_Yegorovich_Zhukovsky
"zhukovsky",
]
system_random = random.SystemRandom()
def random_name(prefix: str = "", separator: str = "-") -> str:
name = f"{system_random.choice(ADJECTIVES)}{separator}{system_random.choice(NAMES)}"
# Steve Wozniak is not boring. This is part of the docker names spec.
if name == f"boring{separator}wozniak":
return random_name(prefix, separator)
return f"{prefix}{separator}{name}" if prefix else name
|
/scaleway_core-1.1.0.tar.gz/scaleway_core-1.1.0/scaleway_core/utils/random_name.py
| 0.499512 | 0.567038 |
random_name.py
|
pypi
|
from dataclasses import dataclass
from datetime import datetime
from typing import Any, Dict, List
from dateutil import parser
@dataclass
class TimeSeriesPoint:
"""
Represents a point in a TimeSeries.
"""
timestamp: datetime
"""
Date of the point.
"""
value: float
"""
Value of the point.
"""
def unmarshal_TimeSeriesPoint(data: Any) -> TimeSeriesPoint:
"""
Unmarshal an instance of TimeSeriesPoint from the given data.
"""
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'TimeSeriesPoint' failed as data isn't a dictionary."
)
return TimeSeriesPoint(
timestamp=parser.isoparse(data["timestamp"]),
value=data["value"],
)
def marshal_TimeSeriesPoint(data: TimeSeriesPoint) -> Dict[str, Any]:
"""
Marshal an instance of TimeSeriesPoint into a JSON compatible data structure.
"""
return {
"timestamp": data.timestamp.isoformat(),
"value": data.value,
}
@dataclass
class TimeSeries:
"""
Represents a time series that could be used for graph purposes.
"""
name: str
"""
Name of the metric.
"""
points: List[TimeSeriesPoint]
"""
Points contains all the points that composed the series.
"""
metadata: Dict[str, str]
"""
Metadata contains some string metadata related to a metric.
"""
def unmarshal_TimeSeries(data: Any) -> TimeSeries:
"""
Unmarshal an instance of TimeSeries from the given data.
"""
if type(data) is not dict:
raise TypeError(
f"Unmarshalling the type 'TimeSeries' failed as data isn't a dictionary."
)
return TimeSeries(
name=data["name"],
points=[unmarshal_TimeSeriesPoint(point) for point in data["points"]],
metadata=data["metadata"],
)
def marshal_TimeSeries(data: TimeSeries) -> Dict[str, Any]:
"""
Marshal an instance of TimeSeries into a JSON compatible data structure.
"""
return {
"name": data.name,
"points": [marshal_TimeSeriesPoint(point) for point in data.points],
"metadata": data.metadata,
}
|
/scaleway_core-1.1.0.tar.gz/scaleway_core-1.1.0/scaleway_core/bridge/timeseries.py
| 0.929015 | 0.678024 |
timeseries.py
|
pypi
|
from scalib.metrics import SNR, Ttest
from scalib.modeling import LDAClassifier
from scalib.attacks import FactorGraph, BPState
from scalib.postprocessing import rank_accuracy
from utils import sbox, gen_traces
import numpy as np
def main():
nc = 256
npoi = 2
# Parameters
std = 2
ntraces_a = 40
ntraces_p = 20000
print(
"1. Generate simulated traces (Hamming weight + Gaussian noise) with parameters:"
)
print(f" ntraces for profiling: {ntraces_p}")
print(f" ntraces for attack: {ntraces_a}")
print(f" noise std: {std}")
traces_p, labels_p = gen_traces(
ntraces_p, std, random_key=True, random_plaintext=True
)
traces_a, labels_a = gen_traces(
ntraces_a, std, random_key=False, random_plaintext=True
)
_, ns = traces_p.shape
print("2. POI selection with SNR")
print(" 2.1 Compute SNR for 16 Sbox outputs xi")
# y array with xi values
x = np.zeros((ntraces_p, 16), dtype=np.uint16)
for i in range(16):
x[:, i] = labels_p[f"x{i}"]
# estimate SNR
snr = SNR(nc=nc, ns=ns, np=16)
snr.fit_u(traces_p, x)
snr_val = snr.get_snr()
print(" 2.2 Select POIs with highest SNR.")
pois = [np.argsort(snr_val[i])[-npoi:] for i in range(16)]
print("3. Profiling")
# We build a LDA model (pooled Gaussian templates) for each of the 16
# Sboxes (xi).
print(" 3.1 Build LDAClassifier for each xi")
models = []
for i in range(16):
lda = LDAClassifier(nc=nc, ns=npoi, p=1)
lda.fit_u(l=traces_p[:, pois[i]], x=labels_p[f"x{i}"].astype(np.uint16))
lda.solve()
models.append(lda)
print(" 3.2 Get xi distributions from attack traces")
probas = [models[i].predict_proba(traces_a[:, pois[i]]) for i in range(16)]
print("4. Attack")
print(" 4.1 Create the SASCA Graph")
graph_desc = f"""
NC {nc}
TABLE sbox # The Sbox
"""
for i in range(16):
graph_desc += f"""
VAR SINGLE k{i} # The key
PUB MULTI p{i} # The plaintext
VAR MULTI x{i} # Sbox output
VAR MULTI y{i} # Sbox input
PROPERTY y{i} = k{i} ^ p{i} # Key addition
PROPERTY x{i} = sbox[y{i}] # Sbox lookup
"""
# Initialize FactorGraph with the graph description and the required tables
factor_graph = FactorGraph(graph_desc, {"sbox": sbox})
print(" 4.2 Create belief propagation state.")
# We have to give the number of attack traces and the values for the public variables.
bp = BPState(
factor_graph,
ntraces_a,
{f"p{i}": labels_a[f"p{i}"].astype(np.uint32) for i in range(16)},
)
for i in range(16):
bp.set_evidence(f"x{i}", probas[i])
print(" 4.3 Run belief propagation")
for i in range(16):
bp.bp_acyclic(f"k{i}")
print("5. Attack evaluation")
print(" 5.1 Byte-wise attack")
# correct secret key
secret_key = []
# distribution for each of the key bytes
key_distribution = []
# the best key guess of the adversary
guess_key = []
# rank for all the key bytes
ranks = []
for i in range(16):
sk = labels_a[f"k{i}"][0] # secret key byte
distribution = bp.get_distribution(f"k{i}")
guess_key.append(np.argmax(distribution))
ranks.append(256 - np.where(np.argsort(distribution) == sk)[0])
secret_key.append(sk)
key_distribution.append(distribution)
print("")
print(" secret key (hex):", " ".join(["%3x" % (x) for x in secret_key]))
print(" best key (hex):", " ".join(["%3x" % (x) for x in guess_key]))
print(" key byte ranks :", " ".join(["%3d" % (x) for x in ranks]))
print("")
print(f" 5.2 Estimate full log2 key rank:")
key_distribution = np.array(key_distribution)
# Scores are negative log-likelihoods.
# Put a lower-bound on the probabilities, oterwise we might get NaNs.
scores = -np.log2(np.maximum(key_distribution, 2**-128))
rmin, r, rmax = rank_accuracy(scores, secret_key, 1)
lrmin, lr, lrmax = (np.log2(rmin), np.log2(r), np.log2(rmax))
print("")
print(f" {lrmin} < {lr} < {lrmax}")
print("")
if __name__ == "__main__":
main()
|
/scalib-0.5.5.tar.gz/scalib-0.5.5/examples/aes_attack.py
| 0.764276 | 0.403156 |
aes_attack.py
|
pypi
|
from contextlib import contextmanager
from .termui import get_terminal_size
from .parser import split_opt
from ._compat import term_len
# Can force a width. This is used by the test system
FORCED_WIDTH = None
def measure_table(rows):
widths = {}
for row in rows:
for idx, col in enumerate(row):
widths[idx] = max(widths.get(idx, 0), term_len(col))
return tuple(y for x, y in sorted(widths.items()))
def iter_rows(rows, col_count):
for row in rows:
row = tuple(row)
yield row + ('',) * (col_count - len(row))
def wrap_text(text, width=78, initial_indent='', subsequent_indent='',
preserve_paragraphs=False):
"""A helper function that intelligently wraps text. By default, it
assumes that it operates on a single paragraph of text but if the
`preserve_paragraphs` parameter is provided it will intelligently
handle paragraphs (defined by two empty lines).
If paragraphs are handled, a paragraph can be prefixed with an empty
line containing the ``\\b`` character (``\\x08``) to indicate that
no rewrapping should happen in that block.
:param text: the text that should be rewrapped.
:param width: the maximum width for the text.
:param initial_indent: the initial indent that should be placed on the
first line as a string.
:param subsequent_indent: the indent string that should be placed on
each consecutive line.
:param preserve_paragraphs: if this flag is set then the wrapping will
intelligently handle paragraphs.
"""
from ._textwrap import TextWrapper
text = text.expandtabs()
wrapper = TextWrapper(width, initial_indent=initial_indent,
subsequent_indent=subsequent_indent,
replace_whitespace=False)
if not preserve_paragraphs:
return wrapper.fill(text)
p = []
buf = []
indent = None
def _flush_par():
if not buf:
return
if buf[0].strip() == '\b':
p.append((indent or 0, True, '\n'.join(buf[1:])))
else:
p.append((indent or 0, False, ' '.join(buf)))
del buf[:]
for line in text.splitlines():
if not line:
_flush_par()
indent = None
else:
if indent is None:
orig_len = term_len(line)
line = line.lstrip()
indent = orig_len - term_len(line)
buf.append(line)
_flush_par()
rv = []
for indent, raw, text in p:
with wrapper.extra_indent(' ' * indent):
if raw:
rv.append(wrapper.indent_only(text))
else:
rv.append(wrapper.fill(text))
return '\n\n'.join(rv)
class HelpFormatter(object):
"""This class helps with formatting text-based help pages. It's
usually just needed for very special internal cases, but it's also
exposed so that developers can write their own fancy outputs.
At present, it always writes into memory.
:param indent_increment: the additional increment for each level.
:param width: the width for the text. This defaults to the terminal
width clamped to a maximum of 78.
"""
def __init__(self, indent_increment=2, width=None, max_width=None):
self.indent_increment = indent_increment
if max_width is None:
max_width = 80
if width is None:
width = FORCED_WIDTH
if width is None:
width = max(min(get_terminal_size()[0], max_width) - 2, 50)
self.width = width
self.current_indent = 0
self.buffer = []
def write(self, string):
"""Writes a unicode string into the internal buffer."""
self.buffer.append(string)
def indent(self):
"""Increases the indentation."""
self.current_indent += self.indent_increment
def dedent(self):
"""Decreases the indentation."""
self.current_indent -= self.indent_increment
def write_usage(self, prog, args='', prefix='Usage: '):
"""Writes a usage line into the buffer.
:param prog: the program name.
:param args: whitespace separated list of arguments.
:param prefix: the prefix for the first line.
"""
usage_prefix = '%*s%s ' % (self.current_indent, prefix, prog)
text_width = self.width - self.current_indent
if text_width >= (term_len(usage_prefix) + 20):
# The arguments will fit to the right of the prefix.
indent = ' ' * term_len(usage_prefix)
self.write(wrap_text(args, text_width,
initial_indent=usage_prefix,
subsequent_indent=indent))
else:
# The prefix is too long, put the arguments on the next line.
self.write(usage_prefix)
self.write('\n')
indent = ' ' * (max(self.current_indent, term_len(prefix)) + 4)
self.write(wrap_text(args, text_width,
initial_indent=indent,
subsequent_indent=indent))
self.write('\n')
def write_heading(self, heading):
"""Writes a heading into the buffer."""
self.write('%*s%s:\n' % (self.current_indent, '', heading))
def write_paragraph(self):
"""Writes a paragraph into the buffer."""
if self.buffer:
self.write('\n')
def write_text(self, text):
"""Writes re-indented text into the buffer. This rewraps and
preserves paragraphs.
"""
text_width = max(self.width - self.current_indent, 11)
indent = ' ' * self.current_indent
self.write(wrap_text(text, text_width,
initial_indent=indent,
subsequent_indent=indent,
preserve_paragraphs=True))
self.write('\n')
def write_dl(self, rows, col_max=30, col_spacing=2):
"""Writes a definition list into the buffer. This is how options
and commands are usually formatted.
:param rows: a list of two item tuples for the terms and values.
:param col_max: the maximum width of the first column.
:param col_spacing: the number of spaces between the first and
second column.
"""
rows = list(rows)
widths = measure_table(rows)
if len(widths) != 2:
raise TypeError('Expected two columns for definition list')
first_col = min(widths[0], col_max) + col_spacing
for first, second in iter_rows(rows, len(widths)):
self.write('%*s%s' % (self.current_indent, '', first))
if not second:
self.write('\n')
continue
if term_len(first) <= first_col - col_spacing:
self.write(' ' * (first_col - term_len(first)))
else:
self.write('\n')
self.write(' ' * (first_col + self.current_indent))
text_width = max(self.width - first_col - 2, 10)
lines = iter(wrap_text(second, text_width).splitlines())
if lines:
self.write(next(lines) + '\n')
for line in lines:
self.write('%*s%s\n' % (
first_col + self.current_indent, '', line))
else:
self.write('\n')
@contextmanager
def section(self, name):
"""Helpful context manager that writes a paragraph, a heading,
and the indents.
:param name: the section name that is written as heading.
"""
self.write_paragraph()
self.write_heading(name)
self.indent()
try:
yield
finally:
self.dedent()
@contextmanager
def indentation(self):
"""A context manager that increases the indentation."""
self.indent()
try:
yield
finally:
self.dedent()
def getvalue(self):
"""Returns the buffer contents."""
return ''.join(self.buffer)
def join_options(options):
"""Given a list of option strings this joins them in the most appropriate
way and returns them in the form ``(formatted_string,
any_prefix_is_slash)`` where the second item in the tuple is a flag that
indicates if any of the option prefixes was a slash.
"""
rv = []
any_prefix_is_slash = False
for opt in options:
prefix = split_opt(opt)[0]
if prefix == '/':
any_prefix_is_slash = True
rv.append((len(prefix), opt))
rv.sort(key=lambda x: x[0])
rv = ', '.join(x[1] for x in rv)
return rv, any_prefix_is_slash
|
/scalr-ctl-7.16.2.tar.gz/scalr-ctl-7.16.2/scalrctl/click/formatting.py
| 0.800497 | 0.26781 |
formatting.py
|
pypi
|
import math
import random
import time
import uuid
from typing import List
from scalr.cloud import CloudAdapter, CloudInstance
from scalr.config import PolicyConfig, ScaleDownSelectionEnum, ScalingConfig
from scalr.log import log
from scalr.policy.factory import PolicyAdapterFactory
class Scalr:
def __init__(self, config: ScalingConfig) -> None:
self.config = config
self.desired: int = 0
log.debug("Init scalr")
def get_unique_name(self, prefix: str) -> str:
uid = str(uuid.uuid4()).split("-")[0]
return f"{prefix}-{uid}"
def calc_diff(self, factor: float, current_size: int) -> int:
log.info(f"Factor: {factor}")
log.info(f"Current: {current_size}")
calc_current_size: int = current_size
if current_size == 0 and factor > 0:
log.warning("Current size was 0 but set to 1 factor calculation")
calc_current_size = 1
desired: int = math.ceil(calc_current_size * factor)
log.info(f"Calculated desired by factor: {desired}")
if self.config.max < desired:
log.info(
f"Desired {desired} > max {self.config.max}, resetted to max",
)
desired = self.config.max
elif self.config.min > desired:
log.info(
f"Desired {desired} < min {self.config.min}, resetted to min",
)
desired = self.config.min
else:
log.info(
f"Desired withing boundaries: min {self.config.min} =< desired {desired} =< max {self.config.max}",
)
log.info(f"Final desired: {desired}")
self.desired = desired
diff = desired - current_size
log.info(f"Calculated diff: {diff}")
if diff < 0 and 0 <= self.config.max_step_down < diff * -1:
log.info(f"Hit max down step: {self.config.max_step_down}")
diff = self.config.max_step_down * -1
return diff
def get_factor(self, policy_configs: List[PolicyConfig]) -> float:
scaling_factor: float = 0
for policy_config in policy_configs:
policy = PolicyAdapterFactory.create(source=policy_config.source)
policy.configure(config=policy_config)
policy_factor: float = policy.get_scaling_factor()
log.debug(f"Policy scaling factor: {policy_factor}")
if policy_factor <= 0:
log.debug(f"Ignoring factor 0, keep current scaling factor: {scaling_factor}")
continue
if policy_factor > scaling_factor:
scaling_factor = policy_factor
log.debug(f"Set scaling factor: {scaling_factor}")
continue
log.debug(f"Keep current scaling factor: {scaling_factor}")
return scaling_factor
def scale(self, diff: int, cloud: CloudAdapter) -> None:
if self.config.min > self.config.max:
raise Exception(f"Error: min {self.config.min} > max {self.config.max}")
if diff > 0:
self.scale_up(diff, cloud)
elif diff < 0:
self.scale_down(diff * -1, cloud)
else:
log.info("No scaling action taken")
if not self.config.dry_run:
cloud.ensure_instances_running()
def cooldown(self) -> None:
if self.config.dry_run:
return
log.info(f"Cooling down for {self.config.cooldown_timeout}s")
for i in range(self.config.cooldown_timeout):
time.sleep(1)
log.info("Cooldown finished")
def scale_up(self, diff: int, cloud: CloudAdapter):
log.info(f"Scaling up {diff}")
while diff > 0:
instance_name = self.get_unique_name(prefix=self.config.name)
if not self.config.dry_run:
log.info(f"Creating instance {instance_name}")
cloud.deploy_instance(name=instance_name)
else:
log.info(f"Dry run creating instance {instance_name}")
diff -= 1
def scale_down(self, diff: int, cloud: CloudAdapter):
log.info(f"Scaling down {diff}")
instances = cloud.get_current_instances()
while diff > 0:
instance = self.select_instance(
strategy=self.config.scale_down_selection, current_servers=instances
)
if not self.config.dry_run:
log.info(f"Deleting instance {instance}")
cloud.destroy_instance(instance=instance)
else:
log.info(f"Dry run deleting instance {instance}")
diff -= 1
def select_instance(
self, strategy: str, current_servers: List[CloudInstance]
) -> CloudInstance:
if not current_servers:
raise Exception("Error: No current instances found")
if strategy == ScaleDownSelectionEnum.oldest:
index = -1
elif strategy == ScaleDownSelectionEnum.youngest:
index = 0
else:
index = random.randint(0, len(current_servers) - 1)
return current_servers.pop(index)
|
/scalr-ngine-0.16.0.tar.gz/scalr-ngine-0.16.0/scalr/scalr.py
| 0.686895 | 0.176778 |
scalr.py
|
pypi
|
import base64
import os
from dataclasses import asdict
from typing import List, Optional
import requests
from scalr.cloud import CloudAdapter, GenericCloudInstance
from scalr.log import log
VULTR_API_KEY: str = str(os.getenv("VULTR_API_KEY"))
class Vultr:
VULTR_API_URL: str = "https://api.vultr.com/v2"
def __init__(self, api_key: str) -> None:
self.api_key = api_key
def query_api(
self,
method: str,
path: str,
params: Optional[dict] = None,
json: Optional[dict] = None,
) -> requests.Response:
r = requests.request(
method=method,
url=f"{self.VULTR_API_URL}/{path}",
headers={
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
},
params=params,
json=json,
timeout=10,
)
r.raise_for_status()
return r
def list_instances(self, tag=None, label=None) -> List[dict]:
params = {
"tag": tag,
"label": label,
}
r = self.query_api("get", "instances", params=params)
return r.json().get("instances", dict())
def start_instance(self, instance_id: str) -> None:
self.query_api("post", f"instances/{instance_id}/start")
def delete_instance(self, instance_id: str) -> None:
self.query_api("delete", f"instances/{instance_id}")
def create_instance(
self,
region,
plan,
os_id: Optional[str] = None,
script_id: Optional[str] = None,
iso_id: Optional[str] = None,
snapshot_id: Optional[str] = None,
enable_ipv6: Optional[bool] = None,
attach_private_network: Optional[List[str]] = None,
label: Optional[str] = None,
sshkey_id: Optional[List[str]] = None,
backups: Optional[str] = None,
app_id: Optional[str] = None,
image_id: Optional[str] = None,
user_data: Optional[str] = None,
ddos_protection: Optional[bool] = None,
activation_email: Optional[bool] = None,
hostname: Optional[str] = None,
tag: Optional[str] = None,
firewall_group_id: Optional[str] = None,
enable_private_network: Optional[bool] = None,
) -> dict:
if user_data:
user_data = base64.b64encode(user_data.encode("utf-8")).decode("utf-8")
json = {
"region": region,
"plan": plan,
"os_id": os_id,
"script_id": script_id,
"iso_id": iso_id,
"snapshot_id": snapshot_id,
"enable_ipv6": enable_ipv6,
"attach_private_network": attach_private_network,
"label": label,
"sshkey_id": sshkey_id,
"backups": backups,
"app_id": app_id,
"image_id": image_id,
"user_data": user_data,
"ddos_protection": ddos_protection,
"activation_email": activation_email,
"hostname": hostname,
"tag": tag,
"firewall_group_id": firewall_group_id,
"enable_private_network": enable_private_network,
}
r = self.query_api("post", "instances", json=json)
return r.json().get("instance", dict())
class VultrCloudAdapter(CloudAdapter):
def __init__(self):
super().__init__()
self.vultr = Vultr(api_key=str(os.getenv("VULTR_API_KEY")))
def get_current_instances(self) -> List[GenericCloudInstance]:
filter_tag = f"scalr={self.filter}"
log.info(f"vultr: Querying with filter_tag: {filter_tag}")
servers = self.vultr.list_instances(tag=filter_tag)
return [
GenericCloudInstance(
id=server["id"],
name=server["label"],
status=server["power_status"],
)
for server in sorted(servers, key=lambda i: i["date_created"])
]
def ensure_instances_running(self) -> None:
log.info("vultr: ensure running")
for instance in self.get_current_instances():
log.info(f"vultr: instance {instance.name} status {instance.status}")
if instance.status == "running":
continue
if instance.status == "stopped":
try:
self.vultr.start_instance(instance_id=instance.id)
log.info(f"vultr: Instance {instance.name} started")
except Exception as ex:
log.error(ex)
def deploy_instance(self, name: str) -> None:
log.info(f"vultr: Deploying new instance named {name}")
launch_config = self.launch.copy()
launch_config.update(
{
"label": name,
"hostname": name,
"tag": f"scalr={self.filter}",
}
)
self.vultr.create_instance(**launch_config)
def destroy_instance(self, instance: GenericCloudInstance) -> None:
log.info(f"vultr: Destroying instance {instance}")
self.vultr.delete_instance(instance_id=instance.id)
|
/scalr-ngine-0.16.0.tar.gz/scalr-ngine-0.16.0/scalr/cloud/adapters/vultr.py
| 0.772187 | 0.163813 |
vultr.py
|
pypi
|
from dataclasses import dataclass
from typing import List
from scalr.cloud import CloudAdapter, CloudInstance
from scalr.log import log
import digitalocean
@dataclass
class DigitalOceanCloudInstance(CloudInstance):
droplet: digitalocean.Droplet
def __repr__(self) -> str:
return str(self.droplet.name)
class DigitaloceanCloudAdapter(CloudAdapter):
def __init__(self):
self.client = digitalocean.Manager()
def get_current_instances(self) -> List[DigitalOceanCloudInstance]:
filter_tag = f"scalr={filter}"
log.info(f"digitalocean: Querying with filter_tag: {filter_tag}")
droplets = self.client.get_all_droplets(tag_name=filter_tag)
return [
DigitalOceanCloudInstance(droplet)
for droplet in sorted(droplets, key=lambda i: i.created_at)
]
def ensure_instances_running(self) -> None:
log.info("digitalocean: ensure running")
for instance in self.get_current_instances():
log.info(
f"digitalocean: instance {instance.droplet.name} status {instance.droplet.status}"
)
if instance.droplet.status == "off":
instance.droplet.power_on()
def deploy_instance(self, name: str) -> None:
log.info(f"digitalocean: Deploying instance with name {name}")
launch_config = self.launch.copy()
launch_config.update(
{
"label": name,
"hostname": name,
"tag": f"scalr={self.filter}",
}
)
droplet = digitalocean.Droplet(
name=name,
region=launch_config["region"],
image=launch_config["image"],
size_slug=launch_config["size"],
ssh_keys=launch_config["ssh_keys"],
user_data=launch_config.get("user_data", ""),
ipv6=launch_config.get("ipv6", False),
)
droplet.create()
tag = digitalocean.Tag(name=f"scalr:{self.filter}")
tag.create()
tag.add_droplets([droplet.id])
log.info(f"Creating droplet {name}")
def destroy_instance(self, instance: DigitalOceanCloudInstance) -> None:
log.info(f"digitalocean: Destroying instance {instance}")
instance.droplet.destroy()
|
/scalr-ngine-0.16.0.tar.gz/scalr-ngine-0.16.0/scalr/cloud/adapters/digitalocean.py
| 0.840554 | 0.163679 |
digitalocean.py
|
pypi
|
class AgentStatus(object):
"""The main status container object, holding references to all other status elements.
"""
def __init__(self):
# The time (in seconds past epoch) when the agent process was launched.
self.launch_time = None
# The user name the agent process is running under.
self.user = None
# The version string for the agent.
self.version = None
# The name of the host the agent is running on.
self.server_host = None
# The URL of the scalyr server that the agent is connected to (such as https://www.scalyr.com/).
self.scalyr_server = None
# The path for the agent's log file.
self.log_path = None
# The ConfigStatus object recording the status for the configuration file.
self.config_status = None
# The CopyingManagerStatus object recording the status of the log copying manager (or none if CopyingManager
# has not been started). This contains information about the different log paths being watched and the
# progress of copying their bytes.
self.copying_manager_status = None
# The MonitorManagerStatus object recording the status of the monitor manager (or none if the MonitorManager
# has not been started). This contains information about the different ScalyrMonitors being run.
self.monitor_manager_status = None
class OverallStats(object):
"""Used to track stats that are calculated over the lifetime of the agent.
"""
def __init__(self):
# The time in seconds past epoch when the agent was started.
self.launch_time = None
# The version string for the agent.
self.version = None
# The current number of paths the log copier is watching.
self.num_watched_paths = 0
# The current number of file paths the log copier is copying.
self.num_copying_paths = 0
# The current number of running monitors.
self.num_running_monitors = 0
# The current number of monitors that should be running but are not.
self.num_dead_monitor = 0
# The total amount of user time CPU used by the agent (cpu secs).
self.user_cpu = 0
# The total amount of system time CPU used by the agent (cpu secs)
self.system_cpu = 0
# The current resident size in bytes of the agent process.
self.rss_size = 0
# The total number of log bytes copied to the Scalyr servers.
self.total_bytes_copied = 0
# The total number of log bytes that were skipped and were not considered to be sent to the Scalyr servers.
self.total_bytes_skipped = 0
# The total number of log bytes that were not sent to the Scalyr servers due to subsampling rules.
self.total_bytes_subsampled = 0
# The total number of log bytes that were not sent to Scalyr due to errors on either the client or server side.
self.total_bytes_failed = 0
# The total number of redactions that were applied to the log lines before being sent to the Scalyr servers.
self.total_redactions = 0
# The total number of errors seen when issuing a copy request.
self.total_copy_requests_errors = 0
# The total number of lines reported by monitors.
self.total_monitor_reported_lines = 0
# The total number of errors seen by executing monitors.
self.total_monitor_errors = 0
def __add__(self, other):
"""Adds all of the 'total_' fields of this instance and other together and returns a new OverallStats containing
the result.
"""
result = OverallStats()
result.total_bytes_copied = self.total_bytes_copied + other.total_bytes_copied
result.total_bytes_skipped = self.total_bytes_skipped + other.total_bytes_skipped
result.total_bytes_subsampled = self.total_bytes_subsampled + other.total_bytes_subsampled
result.total_bytes_failed = self.total_bytes_failed + other.total_bytes_failed
result.total_redactions = self.total_redactions + other.total_redactions
result.total_copy_requests_errors = self.total_copy_requests_errors + other.total_copy_requests_errors
result.total_monitor_reported_lines = self.total_monitor_reported_lines + other.total_monitor_reported_lines
result.total_monitor_errors = self.total_monitor_errors + other.total_monitor_errors
return result
class ConfigStatus(object):
"""The status pertaining to parsing of the configuration file."""
def __init__(self):
# The path of the configuration file.
self.path = None
# The paths for additional configuration files read from the config directory.
self.additional_paths = []
# The last time the configuration file changed and was read by this agent.
self.last_read_time = None
# A status line describing if the configuration file was successfully parsed.
self.status = None
# If the status file count not be parsed/used, a string describing the error.
self.last_error = None
# The last time the configuration file was successfully parsed.
self.last_good_read = None
# The last time the agent checked to see if the configuration file has changed.
self.last_check_time = None
class CopyingManagerStatus(object):
"""The status object containing information about the agent's copying components."""
def __init__(self):
# The total number of bytes successfully uploaded.
self.total_bytes_uploaded = 0
# The last time the agent successfully copied bytes from log files to the Scalyr servers.
self.last_success_time = None
# The last time the agent attempted to copy bytes from log files to the Scalyr servers.
self.last_attempt_time = None
# The size of the request for the last attempt.
self.last_attempt_size = None
# The last response from the Scalyr servers.
self.last_response = None
# The last status from the last response (should be 'success').
self.last_response_status = None
# The total number of failed copy requests.
self.total_errors = None
# LogMatcherStatus objects for each of the log paths being watched for copying.
self.log_matchers = []
class LogMatcherStatus(object):
"""The status object containing information about all of the copying being performed for a particular
log path including globbing."""
def __init__(self):
# The path.
self.log_path = None
# True if the log path contains globbing characters.
self.is_glob = None
# The last time the agent checked the path for new matches.
self.last_check_time = None
# For any matching file paths, a LogProcessorStatus object describing the copying.
self.log_processors_status = []
class LogProcessorStatus(object):
"""The status object containing information about the progress of the bytes being copied for a particular
file."""
def __init__(self):
# The path of the file (will not contain glob characters). This will be a path to an existing file.
self.log_path = None
# The last time the file was checked for new bytes.
self.last_scan_time = None
# The total bytes copied to the Scalyr servers.
self.total_bytes_copied = 0
# The number of bytes that still need to be sent to the Scalyr servers.
self.total_bytes_pending = 0
# The total bytes that were skipped (due to the log lines being too old, or the agent falling behind).
self.total_bytes_skipped = 0
# The total bytes that failed due to errors at either the server or client.
self.total_bytes_failed = 0
# The total bytes that were not sent to the server due to subsampling rules.
self.total_bytes_dropped_by_sampling = 0
# The total number of log lines copied to the Scalyr servers.
self.total_lines_copied = 0
# The total number of log lines that were not sent to the server due to subsampling rules.
self.total_lines_dropped_by_sampling = 0
# The total number of redactions applied to the log lines copied to the server.
self.total_redactions = 0
class MonitorManagerStatus(object):
"""The status object containing information about all of the running monitors."""
def __init__(self):
# The total number of monitors that are currently running.
self.total_alive_monitors = 0
# The MonitorStatus object for each monitor that is currently running or should be running.
self.monitors_status = []
class MonitorStatus(object):
"""The status object for a specific instance of a ScalyrMonitor."""
def __init__(self):
# The name of the monitor.
self.monitor_name = None
# The total number of metric lines reported by the monitor.
self.reported_lines = 0
# The total number of errors produced by the monitor.
self.errors = 0
# Whether or not the monitor is running.
self.is_alive = False
|
/scalyr-agent-2-2.0.0.beta.3.tar.gz/scalyr-agent-2-2.0.0.beta.3/scalyr_agent/agent_status.py
| 0.709422 | 0.407481 |
agent_status.py
|
pypi
|
from threading import Lock
import scalyr_agent.scalyr_logging as scalyr_logging
from scalyr_agent.util import StoppableThread
log = scalyr_logging.getLogger(__name__)
class ScalyrMonitor(StoppableThread):
"""The base class for all monitors used by the agent.
An instance of a monitor will be created for every reference to this module in the "monitors"
section in the agent.json configuration file. The monitor will be
executed in its own thread and will be expected to send all output to
provided Loggers. Do not used stdout, stderr.
Public attributes: (must be updated in derived constructor)
log_config: A dict containing the log configuration object that
should be used to configure the agent to copy the log generated
by this module. It has the same format as the entries in the
"logs" section in agent.json. In particular, the module
can use this to specify the path of the log file where all emitted metric values
from this monitor will be sent (using self._logger), set attributes to
associate with all log lines generated by the module, specify a parser
for the log, as well as set sampling rules.
Note, if the "path" field in "log_config" is not absolute, it will be resolved relative to the
directory specified by the "agent_log_path" option in the configuration file.
disabled: A boolean indicating if this module instance should be
run.
"""
def __init__(self, monitor_config, logger, sample_interval_secs=30):
"""Constructs an instance of the monitor.
It is optional for derived classes to override this method. The can instead
override _initialize which is invoked during initialization.
TODO: Determine which approach is preferred by developers and recommend that.
If a derived class overrides __init__, they must invoke this method in the override method.
This method will set default values for
all public attributes (log_config, disabled, etc). These
may be overwritten by the derived class.
The derived classes must raise an Exception (or something derived from Exception)
in this method if the provided configuration is invalid or if there is any other
error known at this time preventing the module from running.
Arguments:
module_config: A dict containing the configuration information
for this module instance from the configuration file. The
only valid values are strings, ints, longs, floats, and booleans.
logger: The logger to use for output.
sample_interval_secs: The interval in seconds to wait between gathering
samples.
"""
# The MonitorConfig object created from the config for this monitor instance.
self._config = MonitorConfig(monitor_config)
# The logger instance that this monitor should use to report all information and metric values.
self._logger = logger
self.monitor_name = monitor_config['module']
log_path = self.monitor_name.split('.')[-1] + '.log'
self.disabled = False
# TODO: For now, just leverage the logic in the loggers for naming this monitor. However,
# we should have it be more dynamic where the monitor can override it.
if logger.component.find('monitor:') == 0:
self.monitor_name = logger.component[8:]
else:
self.monitor_name = logger.component
self.log_config = {
"path": log_path,
}
# This lock protects all variables that can be access by other threads, reported_lines,
# emitted_lines, and errors. It does not protect _run_state since that already has its own lock.
self.__lock = Lock()
self.__reported_lines = 0
self.__errors = 0
self._sample_interval_secs = sample_interval_secs
self.__metric_log_open = False
self._initialize()
StoppableThread.__init__(self, name='metric thread')
def _initialize(self):
"""Can be overridden by derived classes to perform initialization functions before the monitor is run.
This is meant to allow derived monitors to perform some initialization and configuration validation
without having to override the __init__ method (and be responsible for passing all of the arguments
to the super class).
The derived classes must raise an Exception (or something derived from Exception)
in this method if the provided configuration is invalid or if there is any other
error known at this time preventing the module from running.
"""
pass
@property
def module_name(self):
"""Returns the name of the module that will run this monitor.
"""
return self._config['module']
def reported_lines(self):
"""Returns the number of metric lines emitted to the metric log for this monitor.
This is calculated by counting how many times the logger instance on this monitor's report_values
method was invoked and all the times any logger has logged anything with metric_log_for_monitor set
to this monitor.
"""
self.__lock.acquire()
result = self.__reported_lines
self.__lock.release()
return result
def errors(self):
"""Returns the number of errors experienced by the monitor as it is running.
For monitors just implementing the gather_sample method, this will be the number of times
that invocation raised an exception. If a monitor overrides the run method, then it is up to
them to increment the errors as appropriate using increment_counter.
"""
self.__lock.acquire()
result = self.__errors
self.__lock.release()
return result
def increment_counter(self, reported_lines=0, errors=0):
"""Increment some of the counters pertaining to the performance of this monitor.
"""
self.__lock.acquire()
self.__reported_lines += reported_lines
self.__errors += errors
self.__lock.release()
def run(self):
"""Begins executing the monitor, writing metric output to logger.
Implements the business logic for this monitor. This method will
be invoked by the agent on its own thread. This method should
only return if the monitor instance should no longer be executed or
if the agent is shutting down.
The default implementation of this method will invoke the
"gather_sample" once every sample_period time, emitting the returned
dict to logger. Derived classes may override this method if they
wish.
This method should use "self._logger" to report any output. It should use
"self._logger.emit_value" to report any metric values generated by the monitor
plugin. See the documentation for 'scalyr_logging.AgentLogger.emit_value' method for more details.
"""
try:
self._logger.info('Starting monitor')
while not self._is_stopped():
try:
self.gather_sample()
except Exception:
self._logger.exception('Failed to gather sample due to the following exception')
self.increment_counter(errors=1)
self._sleep_but_awaken_if_stopped(self._sample_interval_secs)
self._logger.info('Monitor has finished')
except Exception:
self._logger.exception('Monitor died from due to exception:', error_code='failedMonitor')
def gather_sample(self):
"""Derived classes should implement this method to gather a data sample for this monitor plugin
and report it.
If the default "run" method implementation is not overridden, then
derived classes must implement this method to actual perform the
monitor-specific work to gather whatever information it should be
collecting.
It is expected that the derived class will report any gathered metric samples
by using the 'emit_value' method on self._logger. They may invoke that method
multiple times in a single 'gather_sample' call to report multiple metrics.
See the documentation for 'scalyr_logging.AgentLogger.emit_value' method for more details.
Any exceptions raised by this method will be reported as an error but will
not stop execution of the monitor.
"""
pass
def set_sample_interval(self, secs):
"""Sets the number of seconds between calls to gather_sample when running.
This must be invoked before the monitor is started.
Params:
secs: The number of seconds, which can be fractional.
"""
self._sample_interval_secs = secs
def open_metric_log(self):
try:
self._logger.openMetricLogForMonitor(self.log_config['path'], self)
self.__metric_log_open = True
return True
except Exception:
self._logger.exception('Failed to open metric log', error_code='failedMetricLog')
return False
def close_metric_log(self):
if self.__metric_log_open:
self._logger.closeMetricLog()
self.__metric_log_open = False
def _is_stopped(self):
"""Returns whether or not the "stop" method has been invoked."""
return not self._run_state.is_running()
def _sleep_but_awaken_if_stopped(self, time_to_sleep):
"""Sleeps for the specified amount of seconds or until the stop() method on this instance is invoked, whichever
comes first.
Arguments:
time_to_sleep: The number of seconds to sleep.
Returns:
True if the stop() has been invoked.
"""
return self._run_state.sleep_but_awaken_if_stopped(time_to_sleep)
class MonitorConfig(object):
"""Encapsulates configuration parameters for a single monitor instance and includes helper utilities to
validate configuration values.
This supports most of the operators and methods that dict supports, but has additional support to allow
Monitor developers to easily validate configuration values. See the get method for more details.
This abstraction does not support any mutator operations. The configuration is read-only.
"""
def __init__(self, content=None):
"""Initializes MonitorConfig.
Arguments:
content: A dict containing the key/values pairs to use.
"""
self.__map = {}
if content is not None:
for x in content:
self.__map[x] = content[x]
def __len__(self):
"""Returns the number of keys in the JsonObject"""
return len(self.__map)
def get(self, field, required_field=False, max_value=None, min_value=None,
convert_to=None, default=None):
"""Returns the value for the requested field.
This method will optionally apply some validation rules as indicated by the optional arguments. If any
of these validation operations fail, then a BadMonitorConfiguration exception is raised. Monitor developers are
encouraged to catch this exception at their layer.
Arguments:
field: The name of the field.
required_field: If True, then will raise a BadMonitorConfiguration exception if the
field is not present.
convert_to: If not None, then will convert the value for the field to the specified
type. Only int, bool, float, long, str, and unicode are supported. If the type
conversion cannot be done, a BadMonitorConfiguration exception is raised.
The only true conversions allowed are those from str, unicode value to other types
such as int, bool, long, float. Trivial conversions are allowed from int, long to float,
but not the other way around. Additionally, any primitive type can be converted to str, unicode.
default: The value to return if the field is not present in the configuration. This is ignored
if 'required_field' is True.
max_value: If not None, the maximum allowed value for field. Raises a BadMonitorConfiguration
if the value is greater.
min_value: If not None, the minimum allowed value for field. Raises a BadMonitorConfiguration
if the value is less than.
Returns the value.
"""
if required_field and field not in self.__map:
raise BadMonitorConfiguration('Missing required field "%s"' % field, field)
result = self.__map.get(field, default)
if result is None:
return result
if convert_to is not None and type(result) != convert_to:
result = self.__perform_conversion(field, result, convert_to)
if max_value is not None and result > max_value:
raise BadMonitorConfiguration('Value of %s in field "%s" is invalid; maximum is %s' % (
str(result), field, str(max_value)), field)
if min_value is not None and result < min_value:
raise BadMonitorConfiguration('Value of %s in field "%s" is invalid; minimum is %s' % (
str(result), field, str(min_value)), field)
return result
def __perform_conversion(self, field_name, value, convert_to):
value_type = type(value)
primitive_types = (int, long, float, str, unicode, bool)
if convert_to not in primitive_types:
raise Exception('Unsupported type for conversion passed as convert_to: "%s"' % str(convert_to))
if value_type not in primitive_types:
raise BadMonitorConfiguration('Unable to convert type %s for field "%s" to type %s' % (
str(value_type), field_name, str(convert_to)), field_name)
# Anything is allowed to go to str/unicode
if convert_to == str or convert_to == unicode:
return convert_to(value)
# Anything is allowed to go from string/unicode to the conversion type, as long as it can be parsed.
# Handle bool first.
if value_type in (str, unicode):
if convert_to == bool:
return str(value).lower() == 'true'
elif convert_to in (int, float, long):
try:
return convert_to(value)
except ValueError:
raise BadMonitorConfiguration('Could not parse value %s for field "%s" as numeric type %s' % (
value, field_name, str(convert_to)), field_name)
if convert_to == bool:
raise BadMonitorConfiguration('A numeric value %s was given for boolean field "%s"' % (
str(value), field_name), field_name)
# At this point, we are trying to convert a number to another number type. We only allow long to int,
# and long, int to float.
if convert_to == float and value_type in (long, int):
return float(value)
if convert_to == long and value_type == int:
return long(value)
raise BadMonitorConfiguration('A numeric value of %s was given for field "%s" but a %s is required.', (
str(value), field_name, str(convert_to)))
def __iter__(self):
return self.__map.iterkeys()
def iteritems(self):
"""Returns an iterator over the items (key/value tuple) for this object."""
return self.__map.iteritems()
def itervalues(self):
"""Returns an iterator over the values for this object."""
return self.__map.itervalues()
def iterkeys(self):
"""Returns an iterator over the keys for this object."""
return self.__map.iterkeys()
def items(self):
"""Returns a list of items (key/value tuple) for this object."""
return self.__map.items()
def values(self):
"""Returns a list of values for this object."""
return self.__map.values()
def keys(self):
"""Returns a list keys for this object."""
return self.__map.keys()
def __getitem__(self, field):
if not field in self:
raise KeyError('The missing field "%s" in monitor config.' % field)
return self.__map[field]
def copy(self):
result = MonitorConfig()
result.__map = self.__map.copy()
return result
def __contains__(self, key):
"""Returns True if the JsonObject contains a value for key."""
return key in self.__map
def __eq__(self, other):
if other is None:
return False
if type(self) is not type(other):
return False
assert isinstance(other.__map, dict)
return self.__map == other.__map
def __ne__(self, other):
return not self.__eq__(other)
class BadMonitorConfiguration(Exception):
"""Exception indicating a bad monitor configuration, such as missing a required field."""
def __init__(self, message, field):
self.field = field
Exception.__init__(self, message)
|
/scalyr-agent-2-2.0.0.beta.3.tar.gz/scalyr-agent-2-2.0.0.beta.3/scalyr_agent/scalyr_monitor.py
| 0.688468 | 0.366987 |
scalyr_monitor.py
|
pypi
|
import re
from cStringIO import StringIO
from scalyr_agent.json_lib import JsonConversionException, JsonObject, JsonArray
# Used below to escape characters found in strings when
# writing strings as a JSON string.
ESCAPES = {
ord('"'): ('\\"', u'\\"'),
ord('\\'): ('\\\\', u'\\\\'),
ord('\b'): ('\\b', u'\\b'),
ord('\f'): ('\\f', u'\\f'),
ord('\n'): ('\\n', u'\\n'),
ord('\r'): ('\\r', u'\\r'),
ord('\t'): ('\\t', u'\\t')
}
def serialize(value, output=None, use_fast_encoding=False):
"""Serializes the specified value as JSON.
Arguments:
value: The value to write. Can be a bool, int, long, float, dict, and list. If this value
is a list or dict, then all of their elements must also be one of these types. A value of
None will be written as null.
output: If specified, this should be a StringIO object to collect the output.
use_fast_encoding: To be used only when JSON is going to be sent as part of a request to
the Scalyr servers. We support a non-spec variant that allows us to skip a UTF-8 decoding step.
Returns:
The string containing the JSON if the output argument is None. Otherwise, the results are
written to output and the output object is returned.
"""
if output is None:
output = StringIO()
# Remember that we have to return a string and not the output object.
return_as_string = True
else:
return_as_string = False
value_type = type(value)
if value is None:
output.write('null')
elif value_type is str or value_type is unicode:
output.write('"')
output.write(__to_escaped_string(value, use_fast_encoding=use_fast_encoding))
output.write('"')
elif value_type is dict or value_type is JsonObject:
output.write('{')
first = True
for key in sorted(value.iterkeys()):
if not first:
output.write(',')
output.write('"')
output.write(__to_escaped_string(key, use_fast_encoding=use_fast_encoding))
output.write('":')
serialize(value[key], output, use_fast_encoding=use_fast_encoding)
first = False
output.write('}')
elif value_type is list or value_type is JsonArray:
output.write('[')
first = True
for element in value:
if not first:
output.write(',')
serialize(element, output, use_fast_encoding=use_fast_encoding)
first = False
output.write(']')
elif value_type is int or value_type is long:
output.write(str(value))
elif value_type is bool:
if value:
output.write('true')
else:
output.write('false')
elif value_type is float:
# TODO: Handle Nan and Infinite
output.write(str(value))
else:
raise JsonConversionException('Unknown value type when attempting to serialize as json: %s' %
str(value_type))
if return_as_string:
return output.getvalue()
else:
return output
# Some regular expressions used for an optimized string escaping method
# based on code in the json lib in 2.7.
ESCAPE_OPT = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t\x7f]')
ESCAPE_DCT_OPT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
# Add in translations for \x00 to \x1f and then \x7f
for i in range(0x20):
ESCAPE_DCT_OPT.setdefault(chr(i), '\\u%0.4x' % i)
ESCAPE_DCT_OPT.setdefault(chr(127), '\\u007f')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
def __to_escaped_string(string_value, use_fast_encoding=False, use_optimization=True):
"""Returns a string that is properly escaped by JSON standards.
Arguments:
string_value: The value to return. Should be a str and not unicode
but we might work either way.
use_fast_encoding: If True, then uses a faster but non-spec escaping method
that only the Scalyr servers will work with.
use_optimization: If True, use the optimized path for escaping. This only
applies if string_value is 'str' and use_fast_encoding is True and
string_value does not have any high ascii. If any of these conditions are
not met, then this method falls back to the unoptimized approach.
Returns:
The escaped string.
"""
result = StringIO()
if type(string_value) is unicode:
type_index = 1
elif not use_fast_encoding:
string_value = string_value.decode('utf8')
type_index = 1
elif not use_optimization:
type_index = 0
elif HAS_UTF8.search(string_value) is None:
def replace(match):
return ESCAPE_DCT_OPT[match.group(0)]
return ESCAPE_OPT.sub(replace, string_value)
else:
type_index = 0
for x in string_value:
x_ord = ord(x)
if x_ord in ESCAPES:
result.write(ESCAPES[x_ord][type_index])
# Reference: http://www.unicode.org/versions/Unicode5.1.0/
# 127 = \u007f
# 159 = \u009f
# 8192 = \u2000
# 8447 = \u20ff
elif 0 <= x_ord <= 31 or 127 <= x_ord <= 159 or 8192 <= x_ord <= 8447:
if type_index == 0:
result.write('\\u%0.4x' % x_ord)
else:
result.write(u'\\u%0.4x' % x_ord)
else:
result.write(x)
return result.getvalue()
# TODO: Remove these methods that we used to benchmark the optimized approach.
def run_trial_optimized(string_input, count):
for i in range(count):
__to_escaped_string(string_input, use_fast_encoding=True, use_optimization=True)
return __to_escaped_string(string_input, use_fast_encoding=True, use_optimization=True)
def run_trial(string_input, count):
for i in range(count):
__to_escaped_string(string_input, use_fast_encoding=True, use_optimization=False)
return __to_escaped_string(string_input, use_fast_encoding=True, use_optimization=False)
|
/scalyr-agent-2-2.0.0.beta.3.tar.gz/scalyr-agent-2-2.0.0.beta.3/scalyr_agent/json_lib/serializer.py
| 0.613005 | 0.1982 |
serializer.py
|
pypi
|
import cStringIO
import errno
import socket
import SocketServer
import struct
import time
class ServerProcessor(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
"""Base class for simple servers that only need to accept incoming connections, perform some actions on
individual commands, and return no output.
This abstraction is meant can be used to implement trivial monitoring servers such as graphite or
OpenTSDB. It is meant to run in its own thread, and will spawn a new thread for each incoming
connection. Multiple commands can be received on each connection, but each command is executed
independently (there is no connection state shared between the commands on an individual connection).
To use this abstraction, you must provide implementations for the parse_request and execute_request
methods. Additionally, you should probably provide an implementation of report_connection_problem so
that errors generated will handling an individual connection can be recorded.
"""
def __init__(self, server_port, localhost_socket=True, max_connection_idle_time=300, max_request_size=100*1024,
buffer_size=100*1024, run_state=None):
"""Creates a new instance that will accept new connections on the specified port.
To begin accepting connections, you must invoke the 'run' method.
Params:
server_port: The port on which to accept connections.
localhost_socket: If True, this instance will only bind a socket on the localhost address so that
external connections will not be accepted.
max_connection_idle_time: The maximum idle time to allow for a connection before it is closed down.
The idle time is calculated by the last time a complete request was received on the connection.
max_request_size: The maximum request size to allow in bytes.
buffer_size: The buffer size for receiving incoming requests per connection. This must be greater
than max_request_size
run_state: The run_state instance that controls when this instance should stop accepting new connections.
"""
if localhost_socket:
server_address = ('localhost', server_port)
else:
server_address = (socket.gethostname(), server_port)
# Allow for quick server socket re-use. This helps prevent problems if we are quickly restarting the agent.
self.allow_reuse_address = True
self.max_connection_idle_time = max_connection_idle_time
self.max_request_size = max_request_size
self.buffer_size = buffer_size
self.run_state = run_state
# Make sure our shutdown method is invoked when run_state becomes stopped.
if run_state is not None:
self.run_state.register_on_stop_callback(self.shutdown)
SocketServer.TCPServer.__init__(self, server_address, ConnectionHandler)
def run(self):
"""Begins accepting new connections and processing the incoming requests.
This does not return until either the 'shutdown' method is invoked or the run_state instance passed into
the initializer is stopped.
"""
self.serve_forever()
def parse_request(self, request_input, num_available_bytes):
"""Reads the incoming data from an individual connection's request_input buffer (file object) and returns a
string containing the next complete request that should be processed by this instance.
Derived classes must override this method. You may use some helper parsers such as LineRequestParser
and Int32RequestParser to implement this method.
The request_input's read position is set to point at the next byte to read. Any bytes that are read by
this method are considered to be consumed and will not be passed to any future 'parse_request' invocations
even if this method returns None. Care must be taken such that only the bytes that are part of the current
request are consumed by this method.
If a complete request is not found in request_input then None should be returned. When more input is
received on a connection, this method will be invoked again at this same position.
Params:
request_input: A file object that holds the incoming data for an individual connection. It is implemented
using a StringIO instance so it is inefficient to use 'seek' and 'tell' to reposition the position
if necessary. (This is useful if an incomplete server request was found and you need to reset the
position to the start.) Note, the position of the buffer is not guaranteed to be zero (previous
commands may still be in the buffer.
num_available_bytes: The number of bytes between the current read position and the end of the buffer.
Returns:
A string containing the next complete request that should be executed for this connection, or None
if there is no complete request. Additionally, if request_input's read position has been moved,
then those bytes are considered to be consumed for the connection.
"""
pass
def execute_request(self, request):
"""Executes a request that was returned by a previous invocation to 'parse_request'.
Derived classes must implement this method to take some action on the request.
Params:
request: The request to executed.
"""
def report_connection_problem(self, exception):
"""Report an exception raised while handling an individual connection.
If this is invoked, it also means that the connection that generated this exception will be closed.
Params:
exception: The exception that was thrown. This is always invoked in an exception handler, so it
is valid to report the exception using a logger's 'exception' method.
"""
pass
class LineRequestParser(object):
"""Simple abstraction that implements a 'parse_request' that can be used to parse incoming requests
that are terminated by either '\n' or '\r\n'.
"""
def __init__(self, max_request_size):
"""Creates a new instance.
Params:
max_request_size: The maximum number of bytes that can be contained in an individual request.
"""
self.__max_request_size = max_request_size
def parse_request(self, input_buffer, num_bytes):
"""Returns the next complete request from 'input_buffer'.
If there is a complete line at the start of 'input_buffer' (where complete line is determined by it
ending in a newline character), then consumes those bytes from 'input_buffer' and returns the string
including the newline. Otherwise None is returned and no bytes are consumed from 'input_buffer'
Params:
input_buffer: The bytes to read.
num_bytes: The number of bytes available in 'input_buffer'.
Returns:
A string containing the next complete request read from 'input_buffer' or None if there is none.
Raises:
RequestSizeExceeded if a line is found to exceed the maximum request size.
"""
original_position = None
try:
original_position = input_buffer.tell()
line = input_buffer.readline(self.__max_request_size + 1)
bytes_received = len(line)
if bytes_received > self.__max_request_size:
# We just consume these bytes if the line did exceeded the maximum. To some degree, this
# typically does not matter since once we see any error on a connection, we close it down.
original_position = None
raise RequestSizeExceeded(bytes_received, self.__max_request_size)
# Check to see if a complete line was actually returned since if readline() hit EOF, then
# it will return whatever line was left without a newline.
if bytes_received > 0 and line[-1] == '\n':
original_position = None
return line
finally:
if original_position is not None:
input_buffer.seek(original_position)
class Int32RequestParser(object):
"""Simple abstraction that implements a 'parse_request' that can be used to parse incoming requests
that are sent using an integer prefix format.
This supports binary protocols where each request is prefixed by a 4 byte integer in network order
that specifies the size of the request in bytes. Those bytes are then read from the input stream.
Some monitor protocols, such as Graphite's pickle protocol uses this format.
"""
def __init__(self, max_request_size):
"""Creates a new instance.
Params:
max_request_size: The maximum number of bytes that can be contained in an individual request.
"""
self.__format = "!I"
self.__prefix_length = struct.calcsize(self.__format)
self.__max_request_size = max_request_size
def parse_request(self, input_buffer, num_bytes):
"""Returns the next complete request from 'input_buffer'.
If there is a complete request at the start of 'input_buffer', it is returned. A complete request
is one whose initial 4 byte length prefixed has been received as well as the number of bytes specified
in that prefix. This method will consume all of those bytes and return only the complete request
payload (not the initial 4 byte length field). If no request is found, then None is returned and
no bytes are consumed from 'input_buffer'
Params:
input_buffer: The bytes to read.
num_bytes: The number of bytes available in 'input_buffer'.
Returns:
A string containing the next complete request read from 'input_buffer' or None if there is none.
Raises:
RequestSizeExceeded if a line is found to exceed the maximum request size.
"""
original_position = None
try:
original_position = input_buffer.tell()
# Make sure we have 4 bytes so that we can at least read the length prefix, and then try to read
# the complete data payload.
if num_bytes > self.__prefix_length:
length, = struct.unpack(self.__format, input_buffer.read(self.__prefix_length))
if length > self.__max_request_size:
raise RequestSizeExceeded(length, self.__max_request_size)
if length + self.__prefix_length <= num_bytes:
original_position = None
return input_buffer.read(length)
return None
finally:
if original_position is not None:
input_buffer.seek(original_position)
class ConnectionIdleTooLong(Exception):
"""Raised when the time since a connection last received a complete request has exceeded the maximum connection
idle time.
"""
def __init__(self, time_since_last_activity, max_inactivity):
Exception.__init__(self, 'Connection has been idle too long. No data has been received for '
'%d seconds and limit is %d' % (time_since_last_activity, max_inactivity))
class RequestSizeExceeded(Exception):
"""Raised when an incoming request has exceeded the maximum allowed request size.
"""
def __init__(self, request_size, max_request_size):
Exception.__init__(self, 'The current request size of %d exceeded maximum allowed of %d' % (
request_size, max_request_size))
class ConnectionProcessor(object):
"""An internal abstraction that reads requests from an incoming request_stream and executes them.
This manages an individual connection, including raising a 'ConnectionIdleTooLong' exception when a
complete request has not been received in the allowed time.
"""
# This abstraction exists really only for testing purposes. It could have been implemented as part of
# 'ConnectionHandler', but since that class derives from 'SocketServer.BaseRequestHandler', it is
# difficult to test. That is because 'SocketServer.BaseRequestHandler' does non-test-friendly things like
# invoking 'handle' as part of instance initialization.
def __init__(self, request_stream, request_executor, run_state, max_connection_idle_time):
"""Returns a new instance. You must invoke 'run' to begin processing requests.
Params:
request_stream: An instance of 'RequestSream' containing the incoming bytes for a connection.
request_executor: A method to invoke to execute each request that takes in just a single
run_state: The run_state that controls when this process should stop.
max_inactivity: The maximum number of seconds to wait between commands before raising a
ConnectionIdleTooLong exception and closing the connection.
"""
self.__request_stream = request_stream
self.__request_executor = request_executor
self.__run_state = run_state
self.__max_connection_idle_time = max_connection_idle_time
self.__last_request_time = None
# The amount of time in seconds to sleep when there are no bytes available on the socket until we check
# again.
self.__polling_interval = 0.5
def run(self):
"""Begins reading requests from the incoming request stream and executing them.
This will not return until the 'run_state' instance passed in at initialization has been stopped.
"""
while self.run_single_cycle(time.time()):
pass
def run_single_cycle(self, current_time=None):
"""Performs a single cycle of reading the next available request and executing it, or waiting for
the polling interval for the next request.
This is exposed only for testing purposes.
Arguments:
current_time: If provided, uses the specified time as the current time. Used for testing.
Returns: False if either the original stream has been closed or if the run state is now stopped.
"""
if current_time is None:
current_time = time.time()
if self.__last_request_time is None:
self.__last_request_time = current_time
request = self.__request_stream.read_request(timeout=self.__polling_interval, run_state=self.__run_state)
if request is not None:
self.__request_executor(request)
self.__last_request_time = current_time
elif current_time - self.__last_request_time > self.__max_connection_idle_time:
raise ConnectionIdleTooLong(self.__last_request_time - time.time(), self.__max_connection_idle_time)
return self.__run_state.is_running() and not self.__request_stream.at_end()
class ConnectionHandler(SocketServer.BaseRequestHandler):
"""The hanlder class that is used by ServerProcess to handle incoming connections.
"""
# The bulk of the work for this class is actually implemented in ConnectionProcess to allow for
# easier testing.
def handle(self):
try:
# Create an instance of RequestStream for the incoming connection and then use a ConnectionProcessor
# to handle it.
request_stream = RequestStream(self.request, self.server.parse_request,
max_buffer_size=self.server.buffer_size,
max_request_size=self.server.max_request_size)
processor = ConnectionProcessor(request_stream, self.server.execute_request, self.server.run_state,
self.server.max_connection_idle_time)
processor.run()
except Exception, e:
self.server.report_connection_problem(e)
class RequestStream(object):
"""Provides a specialized buffered interface for reading requests from a socket.
This essentially puts a memory buffer in front of an incoming socket to efficiently read requests from
the incoming stream and reset the read position when required by incomplete requests.
"""
def __init__(self, incoming_socket, request_parser, max_buffer_size=100*1024, max_request_size=100*1024):
"""Creates a new instance.
Params:
incoming_socket: The incoming socket.
request_parser: A method that will attempt to parse the next complete request from the incoming
stream and return it if possible. Takes two arguments, an StringIO containing the incoming
bytes and an integer specifying the number of bytes available in the buffer.
max_buffer_size: The maximum buffer to use for buffering the incoming requests.
max_request_size: The maximum allowed size for each request.
"""
# We use non-blocking sockets so that we can response quicker when the agent is
# shutting down. It does mean there will be some delay between bytes showing up on the
# connection and when we read them.
incoming_socket.setblocking(0)
self.__socket = incoming_socket
self.__request_parser = request_parser
self.__max_buffer_size = max_buffer_size
self.__max_request_size = max_request_size
if self.__max_buffer_size < self.__max_request_size:
raise Exception('You cannot have a max buffer size smaller than your max request size (%d > %d)' % (
self.__max_buffer_size, self.__max_request_size))
# The number of bytes in _buffer.
self.__current_buffer_size = 0
# Whether or not the socket has been closed and the stream should be considered at the end.
self.__at_end = False
# The actual buffer. We will maintain an invariant that the position of the buffer is always pointing at
# the next byte to read.
self.__buffer = cStringIO.StringIO()
def read_request(self, timeout=0.5, run_state=None):
"""Attempts to read the next complete request from the socket and return it.
If there is no request to be immediately read from the socket, will wait for 'timeout' seconds for
more data to be received on the socket and read the request from that.
Params:
timeout: The number of seconds to wait for more data on the socket if there is currently no complete
request.
run_state: If run_state's 'stop' method is invoked, then this method will attempt to return
as quickly as possible (not waiting the full 'timeout' period.)
Returns:
The request if one was found. Otherwise None, which can either indicate the socket was closed or
'timeout' expired and no request was still available. You may invoke 'at_end' to determine if the
socket has been closed.
"""
do_full_compaction = True
try:
# Try to read the request from the already existing buffered input if possible.
bytes_available_to_read = self.__get_buffer_write_position() - self.__get_buffer_read_position()
if bytes_available_to_read > 0:
parsed_request = self.__request_parser(self.__buffer, bytes_available_to_read)
if parsed_request is not None:
do_full_compaction = False
return parsed_request
# No data immediately available. Wait a few milliseconds for some more to come in.
if not self.__sleep_until_timeout_or_stopped(timeout, run_state):
return None
do_full_compaction = True
data = self.__socket.recv(self.__max_buffer_size - self.__get_buffer_write_position())
# If we get nothing back, then the connection has been closed. If it is not closed and there is
# no data, then we would get a socket.timeout or socket.error which are handled below.
if not data:
self.__at_end = True
return None
# Add the new bytes to the buffer.
bytes_available_to_read += len(data)
self.__add_to_buffer(data)
# Attempt to parse.
parsed_request = self.__request_parser(self.__buffer, bytes_available_to_read)
if parsed_request is not None:
# The parser should be checking the max request size as well, but we do a quick double
# check here as well.
if len(parsed_request) > self.__max_request_size:
raise RequestSizeExceeded(len(parsed_request), self.__max_request_size)
do_full_compaction = False
return parsed_request
# If we didn't find a complete request but we are at our maximum buffer size, then we are screwed.. we don't
# any more room to try to read more bytes to complete that request.. so just call it an error.
if bytes_available_to_read == self.__max_buffer_size:
raise RequestSizeExceeded(self.__max_buffer_size, self.__max_buffer_size)
return None
except socket.timeout:
return None
except socket.error, e:
if e.errno == errno.EAGAIN:
return None
else:
raise e
finally:
# We do a full compaction in general if we did not return anything and there is no room
# left to copy new bytes in.
if do_full_compaction and self.__get_buffer_write_position() == self.__max_buffer_size:
# Do a compaction if our buffer is at the limit, but we also have bytes at the front of it that have
# already been consumed (i.e., are read position is not at the beginning of the buffer.. so we can
# reclaiming the bytes before it to make room.)
self.__full_compaction()
else:
# Always try to do a quick compaction to make room in the buffer.
self.__quick_compaction()
def at_end(self):
"""Returns True if the underlying socket has been closed.
"""
return self.__at_end
def get_buffer_size(self):
"""Returns the size of the underlying buffer, which may also include bytes for requests already returned
by 'read_request'.
This is used just for testing purposes.
"""
return self.__get_buffer_write_position()
def __add_to_buffer(self, new_data):
"""Adds 'new_data' to the underlying buffer.
Params:
new_data: A string containing the new bytes to add.
"""
# Get the original position because this is the current read position. We need to move our position
# to the end of the buffer to do the write, and then move it back to this read position.
original_position = None
try:
original_position = self.__buffer.tell()
self.__buffer.seek(0, 2)
self.__buffer.write(new_data)
self.__current_buffer_size = self.__buffer.tell()
finally:
if original_position is not None:
self.__buffer.seek(original_position)
def __get_buffer_write_position(self):
"""Returns the current write position of the buffer relative to the start of the buffer. This is where
new bytes will be added. This essentially says how many bytes the entire buffer is consuming."""
return self.__current_buffer_size
def __get_buffer_read_position(self):
"""Returns the current read position of the buffer. This is where we will next attempt to read bytes
to parse server requests. It is relatively to the start of the entire buffer, which may contain
bytes that have already been returned by previous 'read_request' invocations."""
# We have an invariant that the current position is always the read position.
return self.__buffer.tell()
def __sleep_until_timeout_or_stopped(self, timeout, run_state):
"""Sleeps for the specified number of seconds, unless 'run_state' becomes stopped in which case
it attempts to return as quickly as possible.
Params:
timeout: The number of seconds to sleep.
run_state: If not None, then will attempt to return as quickly as possible of 'run_state' becomes stopped.
Returns:
True if 'run_state' is not stopped.
"""
if run_state is not None:
run_state.sleep_but_awaken_if_stopped(timeout)
return run_state.is_running()
else:
time.sleep(timeout)
return True
def __full_compaction(self):
"""Compacts the memory buffer by remove all bytes that have already been read.
"""
# Read the leftover data and write it into a new buffer.
remaining_data = self.__buffer.read()
self.__buffer.close()
self.__buffer = cStringIO.StringIO()
self.__buffer.write(remaining_data)
self.__current_buffer_size = self.__buffer.tell()
self.__buffer.seek(0)
def __quick_compaction(self):
"""Attempts a quick compaction by seeing if all bytes in the current buffer have been read. If so,
we can just throw out the old buffer and create a new one since we do not need to copy any leftover bytes.
"""
if self.__get_buffer_read_position() == self.__get_buffer_write_position():
self.__buffer.close()
self.__buffer = cStringIO.StringIO()
self.__current_buffer_size = 0
|
/scalyr-agent-2-2.0.0.beta.3.tar.gz/scalyr-agent-2-2.0.0.beta.3/scalyr_agent/monitor_utils/server_processors.py
| 0.717705 | 0.332229 |
server_processors.py
|
pypi
|
import os
import re
import threading
import time
import scalyr_agent.third_party.tcollector.tcollector as tcollector
from Queue import Empty
from scalyr_agent.scalyr_monitor import ScalyrMonitor
from scalyr_agent.third_party.tcollector.tcollector import ReaderThread
from scalyr_agent.json_lib.objects import JsonObject
from scalyr_agent.util import StoppableThread
class TcollectorOptions(object):
"""Bare minimum implementation of an object to represent the tcollector options."""
def __init__(self):
# The collector directory.
self.cdir = None
# An option we created to prevent the tcollector code from failing on fatal in certain locations.
# Instead, an exception will be thrown.
self.no_fatal_on_error = True
class WriterThread(StoppableThread):
"""A thread that pulls lines off of a reader thread and writes them to the log. This is needed
to replace tcollector's SenderThread which sent the lines to a tsdb server. Instead, we write them
to our log file.
"""
def __init__(self, monitor, queue, logger, error_logger):
"""Initializes the instance.
Arguments:
monitor: The monitor instance associated with this tcollector.
queue: The Queue of lines (strings) that are pending to be written to the log. These should
come from the ReaderThread as it reads and transforms the data from the running collectors.
logger: The Logger to use to report metric values.
error_logger: The Logger to use to report diagnostic information about the running of the monitor.
"""
StoppableThread.__init__(self, name='tcollector writer thread')
self.__monitor = monitor
self.__queue = queue
self.__max_uncaught_exceptions = 100
self.__logger = logger
self.__error_logger = error_logger
self.__timestamp_matcher = re.compile('(\\S+)\\s+\\d+\\s+(.*)')
self.__key_value_matcher = re.compile('(\\S+)=(\\S+)')
def __rewrite_tsdb_line(self, line):
"""Rewrites the TSDB line emitted by the collectors to the format used by the agent-metrics parser."""
# Strip out the timestamp that is the second token on the line.
match = self.__timestamp_matcher.match(line)
if match is not None:
line = '%s %s' % (match.group(1), match.group(2))
# Now rewrite any key/value pairs from foo=bar to foo="bar"
line = self.__key_value_matcher.sub('\\1="\\2"', line)
return line
def run(self):
errors = 0 # How many uncaught exceptions in a row we got.
while self._run_state.is_running():
try:
try:
line = self.__rewrite_tsdb_line(self.__queue.get(True, 5))
except Empty:
continue
# It is important that we check is_running before we act upon any element
# returned by the queue. See the 'stop' method for details.
if not self._run_state.is_running():
continue
self.__logger.info(line, metric_log_for_monitor=self.__monitor)
while True:
try:
line = self.__rewrite_tsdb_line(self.__queue.get(False))
except Empty:
break
if not self._run_state.is_running():
continue
self.__logger.info(line, metric_log_for_monitor=self.__monitor)
errors = 0 # We managed to do a successful iteration.
except (ArithmeticError, EOFError, EnvironmentError, LookupError,
ValueError):
errors += 1
if errors > self.__max_uncaught_exceptions:
raise
self.__error_logger.exception('Uncaught exception in SenderThread, ignoring')
self._run_state.sleep_but_awaken_if_stopped(1)
continue
def stop(self, wait_on_join=True, join_timeout=5):
self._run_state.stop()
# This thread may be blocking on self.__queue.get, so we add a fake entry to the
# queue to get it to return. Since we set run_state to stop before we do this, and we always
# check run_state before acting on an element from queue, it should be ignored.
if self._run_state.is_running():
self.__queue.put('ignore this')
StoppableThread.stop(self, wait_on_join=wait_on_join, join_timeout=join_timeout)
class SystemMetricsMonitor(ScalyrMonitor):
"""A Scalyr agent monitor that records system metrics using tcollector.
"""
def __init__(self, monitor_config, logger):
"""Creates an instance of the monitor.
Arguments:
module_config: The dict containing the configuration for this monitor as given in configuration file.
error_logger: The Logger instance to use to report diagnostic information about the running of this
monitor.
"""
ScalyrMonitor.__init__(self, monitor_config, logger)
# Set up tags for this file.
tags = JsonObject()
if 'tags' in monitor_config:
tags = monitor_config['tags']
if not type(tags) is dict:
raise Exception('The tags field in the configuration for the system_metrics module is not a dict '
'as expected')
# Make a copy just to be safe.
tags = JsonObject(content=tags)
tags['parser'] = 'agent-metrics'
self.log_config = {
'attributes': tags,
'parser': 'agent-metrics',
'path': 'linux_system_metrics.log',
}
collector_directory = SystemMetricsMonitor.__get_collectors_directory()
if 'collectors_directory' in monitor_config:
collector_directory = os.path.realpath(monitor_config['collectors_directory'])
if not os.path.isdir(collector_directory):
raise Exception('No such directory for collectors: %s' % collector_directory)
self.options = TcollectorOptions()
self.options.cdir = collector_directory
self.modules = tcollector.load_etc_dir(self.options, tags)
self.tags = tags
def run(self):
"""Begins executing the monitor, writing metric output to logger.
Arguments:
logger: The Logger instance to use to write all information
gathered by the monitor. All non-diagnostic output should
be emitted here.
"""
tcollector.override_logging(self._logger)
tcollector.reset_for_new_run()
# At this point we're ready to start processing, so start the ReaderThread
# so we can have it running and pulling in data from reading the stdins of all the collectors
# that will be soon running.
reader = ReaderThread(0, 300, self._run_state)
reader.start()
# Start the writer thread that grabs lines off of the reader thread's queue and emits
# them to the log.
writer = WriterThread(self, reader.readerq, self._logger, self._logger)
writer.start()
# Now run the main loop which will constantly watch the collector module files, reloading / spawning
# collectors as necessary. This should only terminate once is_stopped becomes true.
tcollector.main_loop(self.options, self.modules, None, self.tags, False, self._run_state)
self._logger.debug('Shutting down')
tcollector.shutdown(invoke_exit=False)
writer.stop(wait_on_join=False)
self._logger.debug('Shutting down -- joining the reader thread.')
reader.join(1)
self._logger.debug('Shutting down -- joining the writer thread.')
writer.stop(join_timeout=1)
@staticmethod
def __get_collectors_directory():
# We determine the collectors directory by looking at the parent directory of where this file resides
# and then going down third_party/tcollector/collectors.
return os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, 'third_party', 'tcollector',
'collectors')
if __name__ == "__main__":
# Run in stand-alone mode for testing, just emitting log to stdout.
ScalyrMonitor.run_standalone_monitor(SystemMetricsMonitor)
|
/scalyr-agent-2-2.0.0.beta.3.tar.gz/scalyr-agent-2-2.0.0.beta.3/scalyr_agent/builtin_monitors/linux_system_metrics.py
| 0.731442 | 0.281399 |
linux_system_metrics.py
|
pypi
|
import re
import os
from scalyr_agent.scalyr_monitor import ScalyrMonitor
# Pattern that matches the first line of a string
first_line_pattern = re.compile('[^\r\n]+')
# ShellMonitor implementation
class ShellMonitor(ScalyrMonitor):
"""A Scalyr agent monitor which executes a specified shell command, and records the output.
"""
def _initialize(self):
# Fetch and validate our configuration options.
self.command = self._config.get("command", required_field=True)
self.max_characters = self._config.get("max_characters", default=200, convert_to=int, min_value=0,
max_value=10000)
self.log_all_lines = self._config.get("log_all_lines", default=False)
extract_expression = self._config.get("extract", default="")
if extract_expression:
self.extractor = re.compile(extract_expression)
# Verify that the extract expression contains a matching group, i.e. a parenthesized clause.
# We perform a quick-and-dirty test here, which will work for most regular expressions.
# If we miss a bad expression, it will result in a stack trace being logged when the monitor
# executes.
if extract_expression.find("(") < 0:
raise Exception("extract expression [%s] must contain a matching group" % (extract_expression))
else:
self.extractor = None
def gather_sample(self):
# Run the command
command = self.command
stdin, stdout, stderr = os.popen3(command)
stdout_text = stdout.read()
stderr_text = stderr.read()
stdin.close()
stdout.close()
stderr.close()
output = stderr_text
if len(stderr_text) > 0 and len(stdout_text) > 0:
output += "\n"
output += stdout_text
# Apply any extraction pattern
if self.extractor is not None:
match = self.extractor.search(output)
if match is not None:
output = match.group(1)
# Apply log_all_lines and max_characters, and record the result.
if self.log_all_lines:
s = output
else:
first_line = first_line_pattern.search(output)
s = ''
if first_line is not None:
s = first_line.group().strip()
if len(s) > self.max_characters:
s = s[:self.max_characters] + "..."
self._logger.emit_value('output', s, extra_fields={'command': self.command, 'length': len(output)})
|
/scalyr-agent-2-2.0.0.beta.3.tar.gz/scalyr-agent-2-2.0.0.beta.3/scalyr_agent/builtin_monitors/shell_monitor.py
| 0.437944 | 0.277754 |
shell_monitor.py
|
pypi
|
import datetime as dt
from scam.CommManager import CommManager
class CLI:
def __init__(self):
try:
self.cm = CommManager()
except BaseException as e:
print('Error: {0}'.format(str(e)))
# This function retrieves the reservation requested by the user, either by name or by ID.
# If the user chooses the search by ID, a single reservation is shown if it's found.
# If the user chooses the search by name and there exist multiple reservation under that name, all of these
# are displayed
def command_1(self):
try:
command = str(input('Commands: type 1 to get a reservation from a name, 2 to get a reservation from an ID \n'))
if command == '1':
name = str(input('Insert name:'))
result = self.cm.get_reservations(name=name)
for res in result:
print_result(res)
elif command == '2':
res_id = int(input('Insert ID:'))
result = self.cm.get_reservations(reservation_id=res_id)
print_result(result)
else:
print('Invalid command')
except BaseException as e:
print('Error: {0}'.format(str(e)))
# This function allows the user to submit a new reservation
def command_2(self):
try:
res = self.input_reservation()
check = self.cm.make_reservation(location=res['destination'],
persons=res['n_people'],
name=res['res_name'],
phone_number=res['phone'],
reservation=res['timestamp'])
if check:
print('Succes: your reservation id is {}'.format(check))
else:
raise BaseException('Failed')
except BaseException as e:
print('Error: {0}'.format(str(e)))
# This function allows the user to cancel a reservation by its ID
def command_3(self):
try:
res_id = int(input('Type the ID of the reservation you want to delete \n'))
res = self.cm.delete_reservation(reservation_id=res_id)
if res:
print('Reservation {} canceled'.format(res_id))
else:
print('Reservation {} not found'.format(res_id))
except BaseException as e:
print('Error: {0}'.format(str(e)))
# This function allows the user to look up a reservation by its ID and to modify its attributes
def command_4(self):
try:
res_id = int(input('Type the ID of the reservation you want to modify: \n'))
reservation = self.cm.get_reservations(reservation_id=res_id)
print_result(reservation)
print("Modified reservation:")
new_reservation = self.input_reservation()
check = self.cm.modify_reservation(
reservation_id=res_id,
location=new_reservation['destination'],
name=new_reservation['res_name'],
phone_number=new_reservation['phone'],
persons=new_reservation['n_people'],
reservation=new_reservation['timestamp']
)
if check:
print('Success')
else:
raise BaseException('Failed')
except BaseException as e:
print('Error: {0}'.format(str(e)))
# This function executes the CLI
def run(self):
try:
cmd = str(input('Commands: type 1 to look up, 2 to book, 3 to cancel, '
'4 to modify a reservation, 5 to exit \n'))
while cmd != '5':
if cmd == '1':
self.command_1()
elif cmd == '2':
self.command_2()
elif cmd == '3':
self.command_3()
elif cmd == '4':
self.command_4()
else:
print('Invalid command')
cmd = str(input('Commands: type 1 to look up, 2 to book, 3 to delete, '
'4 to modify a reservation, 5 to exit \n'))
print('Exit')
except BaseException as e:
print('Error: {0}'.format(str(e)))
# This function reads the attributes necessary to define a reservation
# and returns a dictionary containing these attributes (destination, timestamp, n_people, res_name, phone)
def input_reservation(self):
destinations = self.cm.get_destinations()
print('Type a destination number: ')
print_destinations(destinations)
destination = int(input())
if not check_destination(destination, destinations):
raise ValueError('Invalid destination. Type a valid destination number')
date_str = input('Type datetime as d-m-Y H:M \n')
date_dt = dt.datetime.strptime(date_str, '%d-%m-%Y %H:%M')
timestamp = int(dt.datetime.timestamp(date_dt))
if not check_date(date_dt):
raise ValueError('Datetime must come after current datetime')
n_people = int(input('Number of people \n'))
res_name = input('Under the name of \n')
phone = input('Phone number \n')
return {
'destination': destination,
'timestamp': timestamp,
'n_people': n_people,
'res_name': res_name,
'phone': phone
}
# This function checks whether the input date comes after the current date
# date: datetime
def check_date(date):
return isinstance(date, dt.datetime) and date > dt.datetime.now()
# This function checks whether the input destination code is an available destination
# destination: code (int) provided by the user
# destinations: dictionary containing destination codes and their names
def check_destination(destination, destinations):
keys = [d['id'] for d in destinations]
return destination in keys
# This function prints the destination names and their code, so that the user can input a valid code
def print_destinations(destinations):
for d in destinations:
print(str(d['id']) + '. ' + d['name'])
# This function prints the reservation attributes
# res is a dictionary containing these attributes
def print_result(res):
for key in res:
if key != 'reservation':
print("{0}: {1}".format(str(key), str(res[key])))
else:
print("{0}: {1}".format(str(key), str(dt.datetime.fromtimestamp(int(res[key])))))
|
/scam_me-0.2.1-py3-none-any.whl/scam/CLI.py
| 0.437583 | 0.189784 |
CLI.py
|
pypi
|
from keras import Model
import numpy as np
from scam.exceptions import InvalidState
from scam.utils import resize_activations, normalize_activations
class ScoreCAM:
def __init__(self, model_input, last_conv_output, softmax_output, input_shape, cam_batch_size=None):
"""
Prepares class activation mappings
:param model_input: input layer of CNN, normally takes batch of images as an input. Currently batch must be limited to a single image
:param last_conv_output: last convolutional layer. The last conv layer contains the most complete information about image.
:param softmax_output: flat softmax (or similar) layer describing the class certainty
:param input_shape: Expecting a batch of a single input sample 1 x M X N X ...; it is assumed that 2D image of M x N dimensions is served as an input, which can be multiplied with a 2D-mask.
:param cam_batch_size: Optional, defaults to None, which will result in inference of batches of size 32.
"""
self.model_input = model_input
self.last_conv_output = last_conv_output
self.softmax_output = softmax_output
self.last_conv_model = Model(inputs=model_input, outputs=last_conv_output)
self.softmax_model = Model(inputs=model_input, outputs=softmax_output)
self.input_shape = input_shape
self.cam_batch_size = cam_batch_size
self.normalized_maps = None
self.classes_activation_scale = None
def prepare_cam(self, input):
output_conv = self.last_conv_model.predict(input)
# Only first image from convolutions will be used
resized = resize_activations(output_conv[0], self.input_shape)
# filter_size x input_shape[0] x input_shape[1] - resized to original input dimensions
normalized_maps = normalize_activations(resized)
# repeat input
repeat_input = np.tile(input, (normalized_maps.shape[0], 1, 1, 1))
expanded_activation_maps = np.expand_dims(normalized_maps, axis=3)
masked_images = np.multiply(repeat_input, expanded_activation_maps)
# input: filter_size x input_shape[0] x input_shape[1] -> Output filter_size x Classes_Count
self.classes_activation_scale = self.softmax_model.predict(masked_images,
batch_size=self.cam_batch_size)
self.normalized_maps = normalized_maps
def get_class_heatmap(self, class_id):
if self.normalized_maps is None or self.classes_activation_scale is None:
raise InvalidState('Call prepare_cam before accessing get_class_heatmap, '
'activations must be prepared via prepare_cam')
final_weights = self.classes_activation_scale[:, class_id]
final_maps = np.multiply(self.normalized_maps, final_weights.reshape((-1, 1, 1)))
# ReLU
final_maps_max = np.max(final_maps, axis=0)
final_class_activation_map = np.where(final_maps_max > 0, final_maps_max, 0)
return final_class_activation_map
|
/scam_net_rewintous-0.0.1-py3-none-any.whl/scam/keras.py
| 0.909636 | 0.565179 |
keras.py
|
pypi
|
import scamp
from inspect import signature
from functools import partial
from typing import Callable
class KeyPlane:
"""
Abstraction used to transform keyboard input into a two-dimensional control space. Each key pressed corresponds
to a vertical and horizontal position, which is passed to the callback function, along with (optionally)
whether it was an up or down keystroke and what modifiers were present.
:param callback: a function of the form "callback(coordinates, up_or_down, modifiers)". Can also have the
signature "callback(coordinates, up_or_down)" or "callback(coordinates)", in which case it is simply not
passed that information.
:param normalize_coordinates: if True, the keys coordinates go from 0-1 horizontally and vertically. if False,
they go from 0-3 (inclusive) vertically and 0-9, 10 or 11 horizontally (depending on row)
:ivar modifiers_down: a list of modifier keys currently pressed
"""
_key_codes_by_row_and_column = [
[90, 88, 67, 86, 66, 78, 77, 188, 190, 191],
[65, 83, 68, 70, 71, 72, 74, 75, 76, 186, 222],
[81, 87, 69, 82, 84, 89, 85, 73, 79, 80, 219, 221],
[49, 50, 51, 52, 53, 54, 55, 56, 57, 48, 189, 187]
]
#: tuple of all names of modifier keys
all_modifiers = ("ctrl", "alt", "shift", "cmd", "caps_lock", "tab",
"enter", "backspace", "up", "left", "down", "up")
def __init__(self, callback: Callable, normalize_coordinates: bool = False):
self.callback = callback
self.normalize_coordinates = normalize_coordinates
self.modifiers_down = []
@property
def callback(self) -> Callable:
"""
Callback function as described in the constructor, which responds to keyboard presses.
"""
return self._callback
@callback.setter
def callback(self, value):
assert callable(value)
self._num_callback_arguments = len(signature(value).parameters)
assert self._num_callback_arguments > 0, "KeyPlane callback must take from one to three arguments."
self._callback = value
def start(self, suppress: bool = False, blocking: bool = False, session: bool = None) -> None:
"""
Starts up the KeyPlane listening to keyboard input.
:param suppress: if True, suppresses all other keyboard events so that nothing gets triggered unintentionally.
(Make sure you have a way of stopping the script with the mouse!)
:param blocking: if True causes this call to block (by calling :func:`wait_forever` on the underlying
:class:`scamp.session.Session`).
:param session: a :class:`scamp.session.Session` on which to run the keyboard listener. If None, looks to see
if there's a session running on the current thread.
"""
def key_handler(name, number, press_or_release):
if name is None:
# catches something weird that happens with shift-alt and shit-tab
return
if name.replace("_r", "") in KeyPlane.all_modifiers:
modifier = name.replace("_r", "")
if press_or_release == "press":
if modifier not in self.modifiers_down:
self.modifiers_down.append(modifier)
else:
if modifier in self.modifiers_down:
self.modifiers_down.remove(modifier)
for y, codes_row in enumerate(KeyPlane._key_codes_by_row_and_column):
if number in codes_row:
x = codes_row.index(number)
if self.normalize_coordinates:
x /= len(codes_row) - 1
y /= 3
if self._num_callback_arguments > 2:
self.callback((x, y), press_or_release, self.modifiers_down)
elif self._num_callback_arguments > 1:
self.callback((x, y), press_or_release)
else:
self.callback((x, y))
return
if session is None:
if scamp.current_clock() is not None and isinstance(scamp.current_clock().master, scamp.Session):
session = scamp.current_clock().master
else:
session = scamp.Session()
session.register_keyboard_listener(
on_press=partial(key_handler, press_or_release="press"),
on_release=partial(key_handler, press_or_release="release"),
suppress=suppress
)
if blocking:
session.wait_forever()
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/interaction/key_plane.py
| 0.862496 | 0.518851 |
key_plane.py
|
pypi
|
from __future__ import annotations
import itertools
from fractions import Fraction
from typing import Sequence
from expenvelope.envelope import Envelope, SavesToJSON
from scamp_extensions.utilities.sequences import multi_option_method
from .utilities import ratio_to_cents
import math
from numbers import Real
import logging
from copy import deepcopy
class PitchInterval(SavesToJSON):
"""
Represents an interval between two pitches. This combines a cents displacement and a frequency ratio, allowing
it to represent both just and equal-tempered intervals, or even a combination of both. PitchIntervals can be
added, negated, and subtracted.
:param cents: cents displacement
:param ratio: frequency ratio, either instead of or in addition to the cents displacement
"""
def __init__(self, cents: float, ratio: Fraction):
self.cents = cents
self.ratio = ratio
@classmethod
def parse(cls, representation):
"""
Parses several different possible types of data into a PitchInterval object.
:param representation: One of the following:
- a float (representing cents)
- an int or a Fraction object (representing a ratio)
- a tuple of (cents, ratio)
- a string, which will be evaluated as a (cents, ratio) tuple if it has a comma, and will be evaluated as
a Fraction if it has a slash. e.g. "3" is a ratio, "37." is cents, "4/3" is a ratio, and "200., 5/4"
is a cents displacement followed by a ratio.
:return: a PitchInterval
"""
if isinstance(representation, dict):
return cls._from_json(representation)
elif isinstance(representation, str):
if "," in representation:
cents_string, ratio_string = representation.split(",")
return cls(float(cents_string), Fraction(ratio_string))
elif "/" in representation:
return cls(0, Fraction(representation))
else:
return cls.parse(eval(representation))
elif hasattr(representation, "__len__"):
return cls(float(representation[0]), Fraction(representation[1]))
elif isinstance(representation, float):
return cls(representation, Fraction(1))
elif isinstance(representation, (int, Fraction)):
return cls(0., Fraction(representation))
else:
raise ValueError("Cannot parse given representation as a pitch interval.")
def to_cents(self) -> float:
"""
Resolves this interval to its size in cents.
"""
return self.cents + ratio_to_cents(self.ratio)
def to_half_steps(self) -> float:
"""
Resolves this interval to its size in half steps.
"""
return self.to_cents() / 100
def to_scala_string(self):
"""
Returns a string representation of this interval for use in exporting to scala files. Scala intervals can be
either in cents or frequency ratio, however, unlike :class:`PitchInterval`, they cannot combine the two. Thus,
if this PitchInterval combines the two, it will be converted to a flat cents value.
"""
if self.cents == 0 and self.ratio == 1:
return "0."
elif self.cents == 0:
return str(self.ratio)
elif self.ratio == 1:
return str(self.cents)
else:
return str(self.to_cents())
# ------------------------------------- Loading / Saving ---------------------------------------
def _to_dict(self):
return {"cents": self.cents, "ratio": [self.ratio.numerator, self.ratio.denominator]}
@classmethod
def _from_dict(cls, json_dict):
json_dict["ratio"] = Fraction(*json_dict["ratio"])
return cls(**json_dict)
def __neg__(self):
return PitchInterval(-self.cents, 1/self.ratio)
def __add__(self, other):
if not isinstance(other, PitchInterval):
raise ValueError("PitchIntervals can only be added or subtracted from other PitchIntervals.")
return PitchInterval(self.cents + other.cents, self.ratio * other.ratio)
def __sub__(self, other):
return self + -other
def __repr__(self):
return "PitchInterval({}, {})".format(self.cents, self.ratio)
class ScaleType(SavesToJSON):
"""
A ScaleType represents the intervallic relationships in a scale without specifying a specific starting point.
This maps closely to what is represented in a Scala .scl file, which is why this object can load from and
save to that format. In fact, the one difference between the data stored here and that stored in a .scl file is
that this object allows a scale degree to be defined by both a cents offset and a subsequently applied ratio.
:param intervals: a sequence of intervals above the starting note. These can be either :class:`PitchInterval`
objects or anything that can be interpreted by :func:`PitchInterval.parse`.
"""
_standard_equal_tempered_patterns = {
"chromatic": [100.],
"diatonic": [200., 400., 500., 700., 900., 1100., 1200.],
"melodic minor": [200., 300., 500., 700., 900., 1100., 1200.],
"harmonic minor": [200., 300., 500., 700., 800., 1100., 1200.],
"whole tone": [200., 400., 600., 800., 1000., 1200.],
"octatonic": [200., 300., 500., 600., 800., 900., 1100., 1200.],
"pentatonic": [200., 400., 700., 900., 1200.],
"blues": [300., 500., 600., 700., 1000., 1200.]
}
def __init__(self, *intervals):
self.intervals = [x if isinstance(x, PitchInterval) else PitchInterval.parse(x) for x in intervals]
def to_half_steps(self) -> Sequence[float]:
"""
Returns a list of floats representing the number of half steps from the starting pitch for each scale degree.
"""
return [interval.to_half_steps() for interval in self.intervals]
def rotate(self, steps: int, in_place: bool = True) -> ScaleType:
"""
Rotates the step sizes of this scale type in the manner of a modal shift. E.g. going from ionian to lydian
would be a rotation of 3.
:param steps: the number of steps to shift the starting point of the scale up or down by. Can be negative.
:param in_place: whether to modify this ScaleType in place, or to return a modified copy.
:return: the modified ScaleType
"""
intervals = self.intervals if in_place else deepcopy(self.intervals)
steps = steps % len(intervals)
if steps == 0:
rotated_intervals = intervals
else:
shift_first_intervals_up = intervals[steps:] + [x + intervals[-1] for x in intervals[:steps]]
rotated_intervals = [x - intervals[steps - 1] for x in shift_first_intervals_up]
if in_place:
self.intervals = rotated_intervals
return self
else:
return ScaleType(*rotated_intervals)
# ------------------------------------- Class Methods ---------------------------------------
@classmethod
def chromatic(cls):
"""Returns a 12-tone equal tempered chromatic ScaleType."""
return cls(*ScaleType._standard_equal_tempered_patterns["chromatic"])
@classmethod
def diatonic(cls, modal_shift: int = 0) -> ScaleType:
"""
Returns a diatonic ScaleType with the specified modal shift.
:param modal_shift: how many steps up or down to shift the starting note of the scale. 0 returns ionian,
1 returns dorian, 2 returns phrygian, etc. (There are also convenience methods for creating these
modal scale types.)
"""
return cls(*ScaleType._standard_equal_tempered_patterns["diatonic"]).rotate(modal_shift)
@classmethod
def major(cls, modal_shift: int = 0) -> ScaleType:
"""Alias of :func:`ScaleType.diatonic`."""
return cls.diatonic(modal_shift)
@classmethod
def ionian(cls, modal_shift: int = 0) -> ScaleType:
"""Alias of :func:`ScaleType.diatonic`."""
return cls.diatonic(modal_shift)
@classmethod
def dorian(cls) -> ScaleType:
"""Convenience method for creating a dorian ScaleType."""
return cls.diatonic(1)
@classmethod
def phrygian(cls) -> ScaleType:
"""Convenience method for creating a phrygian ScaleType."""
return cls.diatonic(2)
@classmethod
def lydian(cls) -> ScaleType:
"""Convenience method for creating a lydian ScaleType."""
return cls.diatonic(3)
@classmethod
def mixolydian(cls) -> ScaleType:
"""Convenience method for creating a myxolydian ScaleType."""
return cls.diatonic(4)
@classmethod
def aeolian(cls) -> ScaleType:
"""Convenience method for creating an aeolian ScaleType."""
return cls.diatonic(5)
@classmethod
def natural_minor(cls) -> ScaleType:
"""Alias of :func:`ScaleType.aeolian`."""
return cls.aeolian()
@classmethod
def locrian(cls) -> ScaleType:
"""Convenience method for creating an locrian ScaleType."""
return cls.diatonic(6)
@classmethod
def harmonic_minor(cls, modal_shift: int = 0) -> ScaleType:
"""
Returns a harmonic minor ScaleType with the specified modal shift.
:param modal_shift: How many steps up or down to shift the starting note of the scale. The default value of
zero creates the standard harmonic minor scale.
"""
return cls(*ScaleType._standard_equal_tempered_patterns["harmonic minor"]).rotate(modal_shift)
@classmethod
def melodic_minor(cls, modal_shift: int = 0) -> ScaleType:
"""
Returns a melodic minor ScaleType with the specified modal shift.
:param modal_shift: How many steps up or down to shift the starting note of the scale. The default value of
zero creates the standard melodic minor scale.
"""
return cls(*ScaleType._standard_equal_tempered_patterns["melodic minor"]).rotate(modal_shift)
@classmethod
def whole_tone(cls) -> ScaleType:
"""Convenience method for creating a whole tone ScaleType."""
return cls(*ScaleType._standard_equal_tempered_patterns["whole tone"])
@classmethod
def octatonic(cls, whole_step_first: bool = True) -> ScaleType:
"""
Convenience method for creating an octatonic (alternating whole and half steps) ScaleType
:param whole_step_first: whether to start with a whole step or a half step.
"""
if whole_step_first:
return cls(*ScaleType._standard_equal_tempered_patterns["octatonic"])
else:
return cls(*ScaleType._standard_equal_tempered_patterns["octatonic"]).rotate(1)
@classmethod
def pentatonic(cls, modal_shift: int = 0) -> ScaleType:
"""
Returns a pentatonic ScaleType with the specified modal shift.
:param modal_shift: how many steps up or down to shift the starting note of the scale. A shift of 3 creates
a minor pentatonic scale.
"""
return cls(*ScaleType._standard_equal_tempered_patterns["pentatonic"]).rotate(modal_shift)
@classmethod
def pentatonic_minor(cls) -> ScaleType:
"""Convenience method for creating a pentatonic minor ScaleType."""
return cls.pentatonic(4)
@classmethod
def blues(cls) -> ScaleType:
"""Convenience method for creating a blues ScaleType."""
return cls(*ScaleType._standard_equal_tempered_patterns["blues"])
# ------------------------------------- Loading / Saving ---------------------------------------
def save_to_scala(self, file_path: str, description: str = "Mystery scale saved using SCAMP") -> None:
"""
Converts and saves this ScaleType to a scala file at the given file path. Note that any intervals that combine
cents and ratio information will be flattened out to only cents information, since the combination is not
possible in scala files.
:param file_path: path of the file to save
:param description: description of the scale for the file header
"""
lines = ["! {}".format(file_path.split("/")[-1]),
"!",
"{}".format(description),
str(len(self.intervals)),
"!"]
lines.extend(interval.to_scala_string() for interval in self.intervals)
with open(file_path, "w") as scala_file:
scala_file.write("\n".join(lines))
@classmethod
def load_from_scala(cls, file_path: str) -> ScaleType:
"""
Loads a ScaleType from a scala file.
:param file_path: file path of a correctly formatted scala file
"""
pitch_entries = []
with open(file_path, "r") as scala_file:
lines = scala_file.read().split("\n")
description = num_steps = None
for line in lines:
line = line.strip()
if line.startswith("!") or len(line) == 0:
continue
elif description is None:
description = line
elif num_steps is None:
num_steps = int(line)
else:
first_non_numeric_char = None
for i, char in enumerate(line):
if not (char.isnumeric() or char in (".", "/")):
first_non_numeric_char = i
break
if first_non_numeric_char is None:
pitch_entries.append(line)
else:
pitch_entries.append(line[:i])
if len(pitch_entries) != num_steps:
logging.warning("Wrong number of pitches in Scala file. "
"That's fine, I guess, but though you should know...")
return cls(*pitch_entries)
def _to_dict(self):
return {
"intervals": self.intervals
}
@classmethod
def _from_dict(cls, json_dict):
return cls(*json_dict["intervals"])
def __repr__(self):
return "ScaleType({})".format(self.intervals)
class Scale(SavesToJSON):
"""
Class representing a scale starting on a specific pitch. A :class:`Scale` combines a :class:`ScaleType` with a
starting pitch, and also an option as to whether the pitch collection should cycle (as pretty much all the
standard scales do). To illustrate the difference between a :class:`ScaleType` and a :class:`Scale`, "D dorian"
would be represented by a :class:`Scale`, whereas "dorian" would be represented by a :class:`ScaleType`.
:param scale_type: a :class:`ScaleType` object
:param start_pitch: a pitch to treat as the starting note of the scale
:param cycle: whether or not this scale cycles. If so, the interval from the first pitch to the last pitch is
treated as the cycle size.
"""
def __init__(self, scale_type: ScaleType, start_pitch: Real, cycle: bool = True):
self.scale_type = scale_type
self._start_pitch = start_pitch
self._cycle = cycle
self._initialize_instance_vars()
@property
def start_pitch(self) -> Real:
"""The pitch that scale starts from."""
return self._start_pitch
@start_pitch.setter
def start_pitch(self, value):
self._start_pitch = value
self._initialize_instance_vars()
@property
def cycle(self) -> bool:
"""Whether or not this scale repeats after a full cycle."""
return self._cycle
@cycle.setter
def cycle(self, value):
self._cycle = value
self._initialize_instance_vars()
def _initialize_instance_vars(self):
# convert the scale type to a list of MIDI-valued seed pitches
self._seed_pitches = (self._start_pitch,) + tuple(self._start_pitch + x for x in self.scale_type.to_half_steps())
self._envelope = Envelope.from_points(*zip(range(len(self._seed_pitches)), self._seed_pitches))
self._inverse_envelope = Envelope.from_points(*zip(self._seed_pitches, range(len(self._seed_pitches))))
self.num_steps = len(self._seed_pitches) - 1
self.width = self._seed_pitches[-1] - self._seed_pitches[0] if self._cycle else None
@classmethod
def from_pitches(cls, seed_pitches: Sequence[Real], cycle: bool = True) -> Scale:
"""
Constructs a Scale from a list of seed pitches, given as floating-point MIDI pitch values. For instance, a
C major scale could be constructed by calling Scale.from_pitches([60, 62, 64, 65, 67, 69, 71, 72]). Note that
the upper C needs to be specified, since it is not assumed that scales will be octave repeating, and the repeat
interval is given by the distance between the first and last seed pitch. Also note that this particular C major
scale would place scale degree 0 at middle C, whereas Scale.from_pitches([48, 50, 52, 53, 55, 57, 59, 60]) would
place it an octave lower.
:param seed_pitches: a list of floating-point MIDI pitch values.
:param cycle: Whether or not to cycle the scale, creating multiple "octaves" (or perhaps not octaves if the
scale repeats at a different interval.
"""
return cls(ScaleType(*(100. * (x - seed_pitches[0]) for x in seed_pitches[1:])), seed_pitches[0], cycle=cycle)
@classmethod
def from_scala_file(cls, file_path: str, start_pitch: Real, cycle: bool = True) -> Scale:
"""
Constructs a Scale from a scala file located at the given file path, and a start pitch.
:param file_path: path of the scala file to load
:param start_pitch: the pitch to define as scale degree 0
:param cycle: whether or not this scale is treated as cyclic
"""
return cls(ScaleType.load_from_scala(file_path), start_pitch, cycle=cycle)
@classmethod
def from_start_pitch_and_cent_or_ratio_intervals(cls, start_pitch: Real, intervals, cycle: bool = True) -> Scale:
"""
Creates a scale from a start pitch and a sequence of intervals (either cents or frequency ratios).
:param start_pitch: The pitch to start on
:param intervals: a sequence of intervals above the start pitch. These can be either :class:`PitchInterval`
objects or anything that can be interpreted by :func:`PitchInterval.parse`.
:param cycle: whether or not this scale is treated as cyclic. See explanation in :func:`Scale.from_pitches`
about defining cyclic scales.
"""
return cls(ScaleType(*intervals), start_pitch, cycle=cycle)
@multi_option_method
def degree_to_pitch(self, degree: Real) -> float:
"""
Given a degree of the scale, returns the pitch that it corresponds to. Degree 0 corresponds to the start
pitch, and negative degrees correspond to notes below the start pitch (for cyclical scales). Fractional degrees
are possible and result in pitches interpolated between the scale degrees.
:param degree: a (potentially floating-point) scale degree
"""
if self._cycle:
cycle_displacement = math.floor(degree / self.num_steps)
mod_degree = degree % self.num_steps
return self._envelope.value_at(mod_degree) + cycle_displacement * self.width
else:
return self._envelope.value_at(degree)
@multi_option_method
def pitch_to_degree(self, pitch: Real) -> float:
"""
Given a pitch, returns the scale degree that it corresponds to. Pitches that lie in between the notes of the
scale will return fractional degrees via interpolation.
:param pitch: a pitch, potentially in between scale degrees
"""
if self._cycle:
cycle_displacement = math.floor((pitch - self._seed_pitches[0]) / self.width)
mod_pitch = (pitch - self._seed_pitches[0]) % self.width + self._seed_pitches[0]
return self._inverse_envelope.value_at(mod_pitch) + cycle_displacement * self.num_steps
else:
return self._inverse_envelope.value_at(pitch)
@multi_option_method
def round(self, pitch: Real) -> float:
"""Rounds the given pitch to the nearest note of the scale."""
return self.degree_to_pitch(round(self.pitch_to_degree(pitch)))
@multi_option_method
def floor(self, pitch: Real) -> float:
"""Returns the nearest note of the scale below or equal to the given pitch."""
return self.degree_to_pitch(math.floor(self.pitch_to_degree(pitch)))
@multi_option_method
def ceil(self, pitch: Real) -> float:
"""Returns the nearest note of the scale above or equal to the given pitch."""
return self.degree_to_pitch(math.ceil(self.pitch_to_degree(pitch)))
# ------------------------------------- Transformations ---------------------------------------
def transpose(self, half_steps: float) -> Scale:
"""
Transposes this scale (in place) by the given number of half steps.
:param half_steps: number of half steps to transpose up or down by
:return: self, for chaining purposes
"""
self._start_pitch = self._start_pitch + half_steps
self._initialize_instance_vars()
return self
def transposed(self, half_steps: float) -> Scale:
"""
Same as :func:`Scale.transpose`, except that it returns a transposed copy, leaving this scale unaltered.
"""
copy = self.duplicate()
copy.transpose(half_steps)
return copy
# ------------------------------------- Class Methods ---------------------------------------
@classmethod
def chromatic(cls, start_pitch: Real = 60, cycle: bool = True) -> Scale:
"""
Returns a 12-tone equal tempered chromatic scale starting on the specified pitch.
:param start_pitch: the pitch this scale starts from (doesn't affect the scale in this case, but affects
where we count scale degrees from).
:param cycle: whether or not this scale repeats after an octave or is constrained to a single octave.
"""
return cls(ScaleType.chromatic(), start_pitch, cycle=cycle)
@classmethod
def diatonic(cls, start_pitch: Real, modal_shift: int = 0, cycle: bool = True) -> Scale:
"""
Returns a diatonic scale starting on the specified pitch, and with the specified modal shift.
:param start_pitch: the pitch this scale starts from
:param modal_shift: how many steps up or down to shift the scale's interval relationships. 0 is ionian, 1 is
dorian, 2 is phrygian, etc. (There are also convenience methods for creating these modal scales.)
:param cycle: whether or not this scale repeats after an octave or is constrained to a single octave.
"""
return cls(ScaleType.diatonic(modal_shift), start_pitch, cycle=cycle)
@classmethod
def major(cls, start_pitch: Real, modal_shift: int = 0, cycle: bool = True) -> Scale:
"""Alias of :func:`Scale.diatonic`."""
return cls.diatonic(start_pitch, modal_shift, cycle)
@classmethod
def ionian(cls, start_pitch: Real, modal_shift: int = 0, cycle: bool = True) -> Scale:
"""Alias of :func:`Scale.diatonic`."""
return cls.diatonic(start_pitch, modal_shift, cycle)
@classmethod
def dorian(cls, start_pitch: Real, cycle: bool = True) -> Scale:
"""
Convenience method for creating a dorian scale with the given start pitch. (Same as :func:`Scale.diatonic` with
a modal shift of 1.)
"""
return cls(ScaleType.dorian(), start_pitch, cycle=cycle)
@classmethod
def phrygian(cls, start_pitch: Real, cycle: bool = True) -> Scale:
"""
Convenience method for creating a phrygian scale with the given start pitch. (Same as :func:`Scale.diatonic`
with a modal shift of 2.)
"""
return cls(ScaleType.phrygian(), start_pitch, cycle=cycle)
@classmethod
def lydian(cls, start_pitch: Real, cycle: bool = True) -> Scale:
"""
Convenience method for creating a lydian scale with the given start pitch. (Same as :func:`Scale.diatonic`
with a modal shift of 3.)
"""
return cls(ScaleType.lydian(), start_pitch, cycle=cycle)
@classmethod
def mixolydian(cls, start_pitch: Real, cycle: bool = True) -> Scale:
"""
Convenience method for creating a mixolydian scale with the given start pitch. (Same as :func:`Scale.diatonic`
with a modal shift of 4.)
"""
return cls(ScaleType. mixolydian(), start_pitch, cycle=cycle)
@classmethod
def aeolian(cls, start_pitch: Real, cycle: bool = True) -> Scale:
"""
Convenience method for creating a aeolian scale with the given start pitch. (Same as :func:`Scale.diatonic`
with a modal shift of 5.)
"""
return cls(ScaleType.aeolian(), start_pitch, cycle=cycle)
@classmethod
def natural_minor(cls, start_pitch: Real, cycle: bool = True) -> Scale:
"""Alias of :func:`Scale.aeolian`."""
return cls.aeolian(start_pitch, cycle)
@classmethod
def locrian(cls, start_pitch: Real, cycle: bool = True) -> Scale:
"""
Convenience method for creating a locrian scale with the given start pitch. (Same as :func:`Scale.diatonic`
with a modal shift of 6.)
"""
return cls(ScaleType.locrian(), start_pitch, cycle=cycle)
@classmethod
def harmonic_minor(cls, start_pitch: Real, modal_shift: int = 0, cycle: bool = True) -> Scale:
"""
Returns a harmonic minor scale starting on the specified pitch, and with the specified modal shift.
:param start_pitch: the pitch this scale starts from
:param modal_shift: How many steps up or down to shift the scale's interval relationships. To get a regular
harmonic minor scale, simply use the default modal shift of 0.
:param cycle: whether or not this scale repeats after an octave or is constrained to a single octave.
"""
return cls(ScaleType.harmonic_minor(modal_shift), start_pitch, cycle=cycle)
@classmethod
def melodic_minor(cls, start_pitch: Real, modal_shift: int = 0, cycle: bool = True) -> Scale:
"""
Returns a melodic minor scale starting on the specified pitch, and with the specified modal shift.
:param start_pitch: the pitch this scale starts from
:param modal_shift: How many steps up or down to shift the scale's interval relationships. To get a regular
melodic minor scale, simply use the default modal shift of 0. A so-called acoustic scale (major sharp-4,
flat-7) can be produced with a modal shift of 4.
:param cycle: whether or not this scale repeats after an octave or is constrained to a single octave.
"""
return cls(ScaleType.melodic_minor(modal_shift), start_pitch, cycle=cycle)
@classmethod
def whole_tone(cls, start_pitch: Real, cycle: bool = True) -> Scale:
"""
Returns a whole tone scale with the given start pitch.
:param start_pitch: the pitch this scale starts from
:param cycle: whether or not this scale repeats after an octave or is constrained to a single octave.
"""
return cls(ScaleType.whole_tone(), start_pitch, cycle=cycle)
@classmethod
def octatonic(cls, start_pitch: Real, cycle: bool = True, whole_step_first: bool = True) -> Scale:
"""
Returns an octatonic scale with the given start pitch.
:param start_pitch: the pitch this scale starts from
:param cycle: whether or not this scale repeats after an octave or is constrained to a single octave.
:param whole_step_first: whether this is a whole-half or half-whole octatonic scale.
"""
return cls(ScaleType.octatonic(whole_step_first=whole_step_first), start_pitch, cycle=cycle)
@classmethod
def pentatonic(cls, start_pitch: Real, modal_shift: int = 0, cycle: bool = True) -> Scale:
"""
Returns a pentatonic scale starting on the specified pitch, and with the specified modal shift.
:param start_pitch: the pitch this scale starts from
:param modal_shift: How many steps up or down to shift the scale's interval relationships. To get a regular
harmonic minor scale, simply use the default modal shift of 0.
:param cycle: whether or not this scale repeats after an octave or is constrained to a single octave.
"""
return cls(ScaleType.pentatonic(modal_shift), start_pitch, cycle=cycle)
@classmethod
def pentatonic_minor(cls, start_pitch: Real, cycle: bool = True) -> Scale:
"""
Returns a pentatonic minor scale starting on the specified pitch.
:param start_pitch: the pitch this scale starts from
:param cycle: whether or not this scale repeats after an octave or is constrained to a single octave.
"""
return cls(ScaleType.pentatonic_minor(), start_pitch, cycle=cycle)
@classmethod
def blues(cls, start_pitch: Real, cycle: bool = True) -> Scale:
"""
Returns a 6-note blues scale starting on the specified pitch.
:param start_pitch: the pitch this scale starts from
:param cycle: whether or not this scale repeats after an octave or is constrained to a single octave.
"""
return cls(ScaleType.blues(), start_pitch, cycle=cycle)
# ------------------------------------- Loading / Saving ---------------------------------------
def _to_dict(self):
return {
"scale_type": self.scale_type,
"start_pitch": self._start_pitch,
"cycle": self._cycle
}
@classmethod
def _from_dict(cls, json_dict):
return cls(**json_dict)
# ------------------------------------- Special Methods ---------------------------------------
def __getitem__(self, item):
if isinstance(item, Real):
return self.degree_to_pitch(item)
elif isinstance(item, slice):
start = 0 if item.start is None else item.start
step = 1 if item.step is None else item.step
if item.stop is None:
return (self.degree_to_pitch(x) for x in itertools.count(start, step))
else:
return [self.degree_to_pitch(x)
for x in itertools.islice(itertools.count(start, step), int((item.stop - start) / step))]
elif isinstance(item, (list, tuple)):
pieces = [[self.__getitem__(x)] if isinstance(x, Real) else self.__getitem__(x) for x in item]
if all(isinstance(x, list) for x in pieces):
return sum(pieces, start=[])
else:
return itertools.chain(*pieces)
def __iter__(self):
for step_num in range(self.num_steps + 1):
yield self.degree_to_pitch(step_num)
def __contains__(self, item):
if not self._cycle:
return item in self._seed_pitches
else:
return (item - self.start_pitch) % self.width + self.start_pitch in self._seed_pitches
def __repr__(self):
return "Scale({}, {}{})".format(
repr(self.scale_type),
self._start_pitch,
", cycle={}".format(self._cycle) if not self._cycle else ""
)
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/pitch/scale.py
| 0.964472 | 0.590897 |
scale.py
|
pypi
|
from typing import Sequence, Dict
from numbers import Real
from scamp_extensions.utilities.sequences import multi_option_function
import math
# ----------------------------------------------- Pitch Space Conversions ---------------------------------------------
@multi_option_function
def ratio_to_cents(ratio: Real) -> Real:
"""
Given a frequency ratio, convert it to a corresponding number of cents.
:param ratio: frequency ratio (e.g. 1.5 for a perfect fifth)
"""
return math.log2(ratio) * 1200
@multi_option_function
def cents_to_ratio(cents: Real) -> Real:
"""
Given a number of cents, convert it to a corresponding frequency ratio.
:param cents: number of cents (e.g. 700 for a perfect fifth)
"""
return math.pow(2, cents / 1200)
@multi_option_function
def midi_to_hertz(midi_value: Real, A: Real = 440) -> Real:
"""
Given a MIDI pitch, returns the corresponding frequency in hertz.
:param midi_value: a midi pitch (e.g. 60 for middle C)
:param A: the tuning of A4 in hertz
"""
return A * math.pow(2, (midi_value - 69) / 12)
@multi_option_function
def hertz_to_midi(hertz_value: Real, A: Real = 440) -> Real:
"""
Given a frequency in hertz, returns the corresponding (floating point) MIDI pitch.
:param hertz_value: a frequency in hertz
:param A: the tuning of A4 in hertz
"""
return 12 * math.log2(hertz_value / A) + 69
@multi_option_function
def freq_to_bark(f: Real) -> Real:
"""
Converts a frequency in hertz to a (floating point) Bark number according to the psychoacoustic Bark scale
(https://en.wikipedia.org/wiki/Bark_scale). This is a scale that compensates for the unevenness in human pitch
acuity across our range of hearing. Here we use the function approximation proposed by Terhardt, which was chosen
in part for its ease of inverse calculation.
:param f: the input frequency
"""
return 13.3 * math.atan(0.75*f/1000.0)
# the inverse formula
@multi_option_function
def bark_to_freq(b: Real) -> Real:
"""
Converts a Bark number to its corresponding frequency in hertz. See :func:`freq_to_bark`.
:param b: a (floating point) bark number
"""
return math.tan(b/13.3)*1000.0/0.75
_pitch_class_displacements = {
'c': 0,
'd': 2,
'e': 4,
'f': 5,
'g': 7,
'a': 9,
'b': 11
}
_accidental_displacements = {
'#': 1,
's': 1,
'f': -1,
'b': -1,
'x': 2,
'bb': -2
}
@multi_option_function
def note_name_to_number(note_name: str) -> int:
"""
Converts a note name (e.g. "Bb5" or "C#2") to its corresponding MIDI number.
:param note_name: The note name, e.g. "Bb5". The accidental can be any of "#", "s", "f", "b", "x", or "bb".
Uses the convention of "C4" = 60.
"""
note_name = note_name.lower().replace(' ', '')
pitch_class_name = note_name[0]
octave = note_name[-1]
accidental = note_name[1:-1]
return (int(octave) + 1) * 12 + \
_pitch_class_displacements[pitch_class_name] + \
(_accidental_displacements[accidental] if accidental in _accidental_displacements else 0)
# ----------------------------------------------------- Other ---------------------------------------------------------
def map_keyboard_to_microtonal_pitches(microtonal_pitches: Sequence[float],
squared_penalty: bool = True) -> Dict[int, float]:
"""
Given a list of microtonal (floating-point) MIDI pitches, finds an efficient map from keyboard-friendly (integer)
pitches to the original microtonal pitches. This is really useful if you're trying to audition a microtonal
collection on the keyboard and don't want to deal with making a mapping manually. Note: the code here is taken
nearly verbatim from StackOverflow user `sacha` in response to this question:
https://stackoverflow.com/questions/61825905/match-list-of-floats-to-nearest-integers-without-repeating
:param microtonal_pitches: a collection of floating-point (microtonal) pitches
:param squared_penalty: whether or not the choice is based on simple or squared error. (I.e. are we using taxicab
or euclidean distance.)
:return: a dictionary mapping keyboard-friendly (integer) pitches to the microtonal collection given
"""
import math
import numpy as np
from scipy.optimize import linear_sum_assignment
from scipy.spatial.distance import cdist
microtonal_pitches = np.array(microtonal_pitches)
# hacky safety-net -> which candidates to look at
min_ = math.floor(microtonal_pitches.min())
max_ = math.ceil(microtonal_pitches.max())
gap = max_ - min_
cands = np.arange(min_ - gap, max_ + gap)
cost_matrix = cdist(microtonal_pitches[:, np.newaxis], cands[:, np.newaxis])
if squared_penalty:
cost_matrix = np.square(cost_matrix)
row_ind, col_ind = linear_sum_assignment(cost_matrix)
solution = cands[col_ind]
# cost would be computed like this:
# `cost = cost_matrix[row_ind, col_ind].sum()`
return {rounded_p: p for p, rounded_p in zip(microtonal_pitches, solution)}
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/pitch/utilities.py
| 0.951684 | 0.808559 |
utilities.py
|
pypi
|
from typing import List
from mido import MidiFile
from collections import namedtuple
Note = namedtuple("Note", "track channel pitch volume start_time length")
def scrape_midi_file_to_note_list(midi_file_path) -> List[Note]:
"""
Scrapes a list of :class:`Note` objects from all of the tracks of the given MIDI file.
:param midi_file_path: path to midi file
"""
mid = MidiFile(midi_file_path, clip=True)
notes_started = {}
notes = []
for which_track, track in enumerate(mid.tracks):
t = 0
for message in track:
t += message.time / mid.ticks_per_beat
if message.type == "note_off" or (message.type == "note_on" and message.velocity == 0):
try:
volume, start_time = notes_started[(message.note, message.channel)]
notes.append(Note(which_track, message.channel, message.note, volume, start_time, t - start_time))
except KeyError:
print("KEY ERROR")
pass
elif message.type == "note_on":
notes_started[(message.note, message.channel)] = message.velocity / 127, t
notes.sort(key=lambda note: note.start_time)
return notes
def scrape_midi_file_to_dict(midi_file_path) -> dict:
"""
Scrapes a dictionary of note info from a MIDI file.
:param midi_file_path: the MIDI file path
:return: a dict with the following keys, each of which is presented in chronological order of the notes from which
they derive: "pitches", "start_times", "volumes", "lengths", "inter_onset_times" (how long since the last note
started), "tracks"
"""
notes = scrape_midi_file_to_note_list(midi_file_path)
tracks, channels, pitches, volumes, start_times, lengths = zip(*notes)
tracks = list(tracks)
pitches = list(pitches)
start_times = list(start_times)
volumes = list(volumes)
lengths = list(lengths)
inter_onset_times = [t2 - t1 for t1, t2 in zip(start_times[:-1], start_times[1:])]
return {
"pitches": pitches,
"start_times": start_times,
"volumes": volumes,
"lengths": lengths,
"inter_onset_times": inter_onset_times,
"tracks": tracks
}
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/parsing/midi.py
| 0.833223 | 0.42185 |
midi.py
|
pypi
|
from numbers import Real
from typing import Tuple, Callable, Sequence
from scamp import EnvelopeSegment, Performance, PerformancePart
import drawsvg
from scamp import Envelope
# -------------------------------------------------- Color/gradients --------------------------------------------------
_default_cm_envelope_red = Envelope.from_levels((0, 0, 78, 151, 211, 250, 255, 255, 255))
_default_cm_envelope_green = Envelope.from_levels((0, 0, 0, 0, 0, 30, 170, 250, 255))
_default_cm_envelope_blue = Envelope.from_levels((0, 81, 122, 118, 64, 0, 0, 97, 255))
def default_color_map(intensity):
return _default_cm_envelope_red.value_at(intensity), \
_default_cm_envelope_green.value_at(intensity), \
_default_cm_envelope_blue.value_at(intensity)
def rgb_to_hex(rgb):
return '#%02x%02x%02x' % tuple(int(x) for x in rgb)
def make_intensity_gradient(envelope, start_x, end_x, color_map=default_color_map, value_range=None):
envelope = envelope.duplicate()
envelope.normalize_to_duration(1)
if value_range is not None:
envelope.shift_vertical(-value_range[0])
envelope.scale_vertical(1/(value_range[1] - value_range[0]))
# subdivide envelope segments until none of them are covering more than 0.1 in range
gaps_too_big = True
while gaps_too_big:
gaps_too_big = False
for segment in envelope.segments:
if abs(segment.end_level - segment.start_level) > 0.1:
envelope.insert_interpolated((segment.start_time + segment.end_time) / 2)
gaps_too_big = True
grad = drawsvg.LinearGradient(start_x, 0, end_x, 0)
for segment in envelope.segments:
grad.addStop(segment.start_time, rgb_to_hex(color_map(segment.start_level)))
grad.addStop(1, rgb_to_hex(color_map(envelope.end_level())))
return grad
def get_fill(parameter, start_x, end_x, color_map=default_color_map, value_range=None):
if isinstance(parameter, Envelope):
return make_intensity_gradient(parameter, start_x, end_x, color_map, value_range)
else:
return rgb_to_hex(color_map(
parameter if value_range is None else (parameter - value_range[0]) / (value_range[1] - value_range[0])
))
# -------------------------------------------------- Draw note --------------------------------------------------
def _get_unit_slope_vector(slope):
return 1 / (slope ** 2 + 1) ** 0.5, slope / (slope ** 2 + 1) ** 0.5
def _get_segment_raw(height_segment: EnvelopeSegment, width_segment: EnvelopeSegment, fill,
outline_width, outline_color):
"""Returns the shape of a single envelope segment as a list of three drawing elements (fill and both outlines)"""
p1 = height_segment.start_time, height_segment.start_level
p4 = height_segment.end_time, height_segment.end_level
dist = ((height_segment.end_time - height_segment.start_time) ** 2 +
(height_segment.end_level - height_segment.start_level) ** 2) ** 0.5 / 3
start_slope = height_segment.start_slope()
start_unit_slope_vector = _get_unit_slope_vector(start_slope)
start_unit_normal_vector = - start_unit_slope_vector[1], start_unit_slope_vector[0]
p2 = height_segment.start_time + dist * start_unit_slope_vector[0], \
height_segment.start_level + dist * start_unit_slope_vector[1]
end_slope = height_segment.end_slope()
end_unit_slope_vector = _get_unit_slope_vector(end_slope)
end_unit_normal_vector = - end_unit_slope_vector[1], end_unit_slope_vector[0]
p3 = height_segment.end_time - dist * end_unit_slope_vector[0], \
height_segment.end_level - dist * end_unit_slope_vector[1]
start_a = p1[0] + start_unit_normal_vector[0] * width_segment.start_level,\
p1[1] + start_unit_normal_vector[1] * width_segment.start_level
start_b = p1[0] - start_unit_normal_vector[0] * width_segment.start_level,\
p1[1] - start_unit_normal_vector[1] * width_segment.start_level
control_1a = p2[0] + start_unit_normal_vector[0] * width_segment.value_at(width_segment.start_time + width_segment.duration / 3),\
p2[1] + start_unit_normal_vector[1] * width_segment.value_at(width_segment.start_time + width_segment.duration / 3)
control_1b = p2[0] - start_unit_normal_vector[0] * width_segment.value_at(width_segment.start_time + width_segment.duration / 3),\
p2[1] - start_unit_normal_vector[1] * width_segment.value_at(width_segment.start_time + width_segment.duration / 3)
control_2a = p3[0] + end_unit_normal_vector[0] * width_segment.value_at(width_segment.start_time + width_segment.duration * 2 / 3),\
p3[1] + end_unit_normal_vector[1] * width_segment.value_at(width_segment.start_time + width_segment.duration * 2 / 3)
control_2b = p3[0] - end_unit_normal_vector[0] * width_segment.value_at(width_segment.start_time + width_segment.duration * 2 / 3),\
p3[1] - end_unit_normal_vector[1] * width_segment.value_at(width_segment.start_time + width_segment.duration * 2 / 3)
end_a = p4[0] + end_unit_normal_vector[0] * width_segment.end_level,\
p4[1] + end_unit_normal_vector[1] * width_segment.end_level
end_b = p4[0] - end_unit_normal_vector[0] * width_segment.end_level,\
p4[1] - end_unit_normal_vector[1] * width_segment.end_level
return [
drawsvg.Path(fill=fill, close=True, stroke='none').
M(*start_a).C(*control_1a, *control_2a, *end_a).L(*end_b).
C(*control_2b, *control_1b, *start_b).L(*start_a),
drawsvg.Path(stroke=outline_color, stroke_width=outline_width, fill='none').
M(*start_a).C(*control_1a, *control_2a, *end_a),
drawsvg.Path(stroke=outline_color, stroke_width=outline_width, fill='none').
M(*start_b).C(*control_1b, *control_2b, *end_b),
]
def _draw_note_raw(draw: drawsvg.Drawing, height_envelope: Envelope, width_envelope: Envelope, fill,
outline_width, outline_color):
"""
Draws a note shape, based on envelopes in drawing coordinates.
:param draw: the drawsvg.Drawing used
:param height_envelope: an envelope representing the curve itself, normalized to drawing coordinates
:param width_envelope: an envelope representing the curve width, normalized to drawing coordinates
:param fill: the color or gradient to use
:param outline_width: width of the stroke outline of the note
:param outline_color: color of the outline of the note
"""
key_points = set(height_envelope.times).union(set(width_envelope.times))
for t in key_points:
height_envelope.insert_interpolated(t)
width_envelope.insert_interpolated(t)
outlines = []
fill_chunks = []
fill_chunks.append(drawsvg.Circle(height_envelope.end_time(), height_envelope.end_level(),
width_envelope.end_level(), fill=fill, stroke="none"))
outlines.append(drawsvg.Circle(height_envelope.end_time(), height_envelope.end_level(),
width_envelope.end_level(), fill="none", stroke=outline_color,
stroke_width=outline_width))
for height_segment, width_segment in zip(height_envelope.segments, width_envelope.segments):
fill_chunks.append(drawsvg.Circle(height_segment.start_time, height_segment.start_level,
width_segment.start_level, fill=fill, stroke="none"))
outlines.append(drawsvg.Circle(height_segment.start_time, height_segment.start_level, width_segment.start_level,
fill='none', stroke=outline_color, stroke_width=outline_width))
fill_chunk, *segment_outlines = _get_segment_raw(height_segment, width_segment, fill,
outline_width, outline_color)
fill_chunks.append(fill_chunk)
outlines.extend(segment_outlines)
draw.extend(outlines)
draw.extend(fill_chunks)
def _draw_note_attack_only(draw: drawsvg.Drawing, height_envelope: Envelope, width_envelope: Envelope, fill,
outline_width, outline_color):
"""
Draws just the attack of a note shape, based on envelopes in drawing coordinates.
:param draw: the drawsvg.Drawing used
:param height_envelope: an envelope representing the curve itself, normalized to drawing coordinates
:param width_envelope: an envelope representing the curve width, normalized to drawing coordinates
:param fill: the color or gradient to use
:param outline_width: width of the stroke outline of the note
:param outline_color: color of the outline of the note
"""
draw.append(
drawsvg.Circle(height_envelope.start_time(), height_envelope.start_level(),
width_envelope.start_level(), fill=fill, stroke=outline_color, stroke_width=outline_width)
)
class PartNoteGraph:
"""
Class that takes a performance part and a bunch of drawing/visualization settings,
and can render to a `class:svgDraw.Drawing`.
:param performance_part: the PerformancePart on which to base this note graph
:param height_parameter: the playback parameter of that governs each note's height on the graph; defaults to pitch
:param height_parameter_range: range of values expected from the parameter that governs height. If the parameter is
pitch, defaults to the min and max pitch found in the part. Otherwise defaults to (0, 1).
:param width_parameter: the playback parameter of that governs each note's width on the graph; defaults to volume
:param width_parameter_range: range of values expected from the parameter that governs width. If the parameter is
pitch, defaults to the min and max pitch found in the part. Otherwise defaults to (0, 1).
:param width_range: range of note widths mapped to in the drawing
:param color_parameter: the parameter governing note color, if any (overrides fill_color); inactive by default
:param color_parameter_range: range of values expected from the parameter that governs color. If the parameter is
pitch, defaults to the min and max pitch found in the part. Otherwise defaults to (0, 1).
:param color_map: function from the interval [0, 1] to an RGB color tuple, where 0 represents the color
parameter at the bottom of its range and 1 at the top.
:param time_range: time range to map horizontal space to; defaults to 0 to length of part
:param fill_color: the fill color of note glyphs; overridden by color parameter, if active
:param outline_color: note outline color
:param outline_width: note outline width
:param guide_lines: the values of the height parameter at which to draw horizontal guide lines.
:param guide_line_width: width of the guide lines
:param guide_line_color: color of the guide lines
:param attack_only: if true, only draw the attack of each note as a circle.
"""
def __init__(self, performance_part: PerformancePart, height_parameter: str = "pitch",
height_parameter_range: Tuple[Real, Real] = None, width_parameter: str = "volume",
width_parameter_range: Tuple[Real, Real] = None, width_range: Tuple[Real, Real] = (1, 20),
color_parameter: str = None, color_parameter_range: Tuple[Real, Real] = None,
color_map: Callable[[Real], Tuple[Real, Real, Real]] = default_color_map,
time_range: Tuple[Real, Real] = None, fill_color: str = "black", outline_color: str = "black",
outline_width: Real = 1, guide_lines: Sequence[Real] = (), guide_line_width: Real = 2,
guide_line_color: str = 'black', attack_only: bool = False):
self.performance_part = performance_part
self.height_parameter = height_parameter
self.height_parameter_range = height_parameter_range if height_parameter_range is not None \
else self._get_part_pitch_range() if height_parameter == "pitch" else (0, 1)
self.width_parameter = width_parameter
self.width_parameter_range = width_parameter_range if width_parameter_range is not None \
else self._get_part_pitch_range() if width_parameter == "pitch" else (0, 1)
self.width_range = width_range
self.color_parameter = color_parameter
self.color_parameter_range = color_parameter_range if color_parameter_range is not None \
else self._get_part_pitch_range() if color_parameter == "pitch" else (0, 1)
self.color_map = color_map
self.time_range = (0, performance_part.end_beat) if time_range is None else time_range
self.fill_color = fill_color
self.outline_color = outline_color
self.outline_width = outline_width
self.attack_only = attack_only
self.guide_lines = guide_lines
self.guide_line_width = guide_line_width
self.guide_line_color = guide_line_color
def _get_part_pitch_range(self):
return min(note.pitch.min_level() if isinstance(note.pitch, Envelope) else note.pitch
for note in self.performance_part.get_note_iterator()),\
max(note.pitch.max_level() if isinstance(note.pitch, Envelope) else note.pitch
for note in self.performance_part.get_note_iterator())
def render(self, drawing: drawsvg.Drawing, bottom_left: Tuple[Real, Real], dimensions: Tuple[Real, Real]):
for note in self.performance_part.get_note_iterator():
height = note.pitch if self.height_parameter == "pitch" \
else note.volume if self.height_parameter == "volume" \
else note.properties["param_" + self.height_parameter] \
if ("param_" + self.height_parameter) in note.properties else 0
height_envelope = height.duplicate() if isinstance(height, Envelope) else Envelope((height, height), (note.length_sum(),))
height_envelope.remove_segments_after(note.length_sum())
height_envelope.shift_vertical(-self.height_parameter_range[0])
height_envelope.scale_vertical(dimensions[1] / (self.height_parameter_range[1] - self.height_parameter_range[0]))
height_envelope.shift_vertical(bottom_left[1])
height_envelope.scale_horizontal(dimensions[0] / (self.time_range[1] - self.time_range[0]))
height_envelope.shift_horizontal(bottom_left[0] + dimensions[0] * (note.start_beat - self.time_range[0]) /
(self.time_range[1] - self.time_range[0]))
width = note.pitch if self.width_parameter == "pitch" \
else note.volume if self.width_parameter == "volume" \
else note.properties["param_" + self.width_parameter] \
if ("param_" + self.width_parameter) in note.properties else 0
width_envelope = width.duplicate() if isinstance(width, Envelope) else Envelope((width, width), (note.length_sum(),))
width_envelope.remove_segments_after(note.length_sum())
width_envelope.shift_vertical(-self.width_parameter_range[0])
width_envelope.scale_vertical((self.width_range[1] - self.width_range[0]) /
(self.width_parameter_range[1] - self.width_parameter_range[0]))
width_envelope.shift_vertical(self.width_range[0])
width_envelope.scale_horizontal(dimensions[0] / (self.time_range[1] - self.time_range[0]))
width_envelope.shift_horizontal(bottom_left[0] + dimensions[0] * (note.start_beat - self.time_range[0]) /
(self.time_range[1] - self.time_range[0]))
if self.color_parameter is not None:
color = note.pitch if self.color_parameter == "pitch" \
else note.volume if self.color_parameter == "volume" \
else note.properties.extra_playback_parameters[self.color_parameter] \
if self.color_parameter in note.properties.extra_playback_parameters else 0
note_fill = get_fill(color, height_envelope.start_time(), height_envelope.end_time(),
self.color_map, self.color_parameter_range)
else:
note_fill = self.fill_color
if self.attack_only:
_draw_note_attack_only(drawing, height_envelope, width_envelope, note_fill, self.outline_width,
self.outline_color)
else:
_draw_note_raw(drawing, height_envelope, width_envelope, note_fill, self.outline_width,
self.outline_color)
self._render_guide_lines(drawing, bottom_left, dimensions)
def _render_guide_lines(self, drawing: drawsvg.Drawing, bottom_left: Tuple[Real, Real],
dimensions: Tuple[Real, Real]):
for value in self.guide_lines:
line_height = bottom_left[1] + (value - self.height_parameter_range[0]) / \
(self.height_parameter_range[1] - self.height_parameter_range[0]) * dimensions[1]
drawing.append(
drawsvg.Line(
bottom_left[0], line_height, bottom_left[0] + dimensions[0], line_height,
stroke_width=self.guide_line_width, stroke=self.guide_line_color
)
)
def render_to_file(self, file_path, dimensions, bg_color=None, h_padding=100, v_padding=100, pixel_scale=2):
unpadded_dimensions = dimensions[0] - 2 * h_padding, dimensions[1] - 2 * v_padding
d = drawsvg.Drawing(*dimensions, displayInline=False)
if bg_color is not None:
d.append(drawsvg.Rectangle(0, 0, *dimensions, fill=bg_color))
self.render(d, (h_padding, v_padding), unpadded_dimensions)
d.setPixelScale(pixel_scale)
d.saveSvg(file_path)
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/engraving/note_graph.py
| 0.859752 | 0.374791 |
note_graph.py
|
pypi
|
from typing import MutableMapping, Any, Tuple
from functools import lru_cache
import random
class LSystem:
"""
Simple implementation of an LSystem. Each generation is a string consisting of an alphabet of characters.
Optionally, these characters can be assigned meanings.
:param seed_string: the initial string
:param production_rules: dictionary describing how each letter evolves in a subsequent generation. Any letter
not found in the dictionary is assumed to be a constant. Also, stochastic rules are possible by providing
a list or tuple of outcomes for a given letter, or a list/tuple consisting of a list of outcomes and a
list of weightings.
:param meanings: (optional) dictionary specifying the meaning of each letter. Should contain an entry for every
letter potentially encountered.
:ivar seed: the initial string
:ivar rules: dictionary describing how each letter evolves in a subsequent generation. Any letter
not found in the dictionary is assumed to be a constant
:ivar meanings: (optional) dictionary specifying the meaning of each letter. Should contain an entry for every
letter potentially encountered.
"""
def __init__(self, seed_string: str, production_rules: MutableMapping[str, str],
meanings: MutableMapping[str, Any] = None):
self.seed = seed_string
self.rules = production_rules
self.meanings = meanings
def _process_letter(self, letter):
if letter in self.rules:
rule_outcome = self.rules[letter]
if isinstance(rule_outcome, (list, tuple)):
if len(rule_outcome) == 2 and isinstance(rule_outcome[0], (list, tuple)) \
and isinstance(rule_outcome[1], (list, tuple)):
outcomes, weights = rule_outcome
return random.choices(outcomes, weights=weights, k=1)[0]
else:
return random.choice(rule_outcome)
else:
return rule_outcome
else:
return letter
@lru_cache()
def get_generation(self, n: int) -> str:
"""
Get the state of the system at the nth generation of iteration, where n=0 is the initial state. The first time
a generation is requested, all previous generations must be processed; however, thereafter they are cached.
:param n: which generation
"""
if n < 0 or not isinstance(n, int):
raise ValueError("Invalid LSystem generation; must be integer >= 0.")
if n == 0:
return self.seed
return "".join(self._process_letter(letter) for letter in self.get_generation(n - 1))
def get_generation_meanings(self, n: int) -> Tuple:
"""
Get the meanings associated with the given generation, according to the meanings dictionary.
:param n: which generation
"""
if self.meanings is None:
raise ValueError("Cannot get generation meanings; meanings were not defined for this LSystem.")
return tuple(self.meanings[letter] for letter in self.get_generation(n))
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/process/l_systems.py
| 0.935516 | 0.600745 |
l_systems.py
|
pypi
|
import random
def random_walk(start_value, step=1, turn_around_chance=0.5, clamp_min=None, clamp_max=None):
"""
Returns a generator starting on `start_value` that randomly walks between `clamp_min` and `clamp_max`, using a
step size of `step`, and turning around with probability `turn_around_chance`
:param start_value: walk start value
:param step: walk step size
:param turn_around_chance: probability of turning around (affects wiggliness)
:param clamp_min: random walk bounces off of this lower limit. If None, walk can go arbitrarily low.
:param clamp_max: random walk bounces off of this upper limit. If None, walk can go arbitrarily high.
"""
x = start_value
current_step = random.choice([-step, step])
while True:
x += current_step
if clamp_min is not None and x < clamp_min:
current_step = step
x += 2 * step
elif clamp_max is not None and x > clamp_max:
current_step = -step
x -= 2 * step
else:
if random.random() < turn_around_chance:
current_step *= -1
yield x
def non_repeating_shuffle(input_list, stop_after=float("inf"), insertion_threshold=0.5):
"""
Returns a generator that randomly selects items from the input list, avoiding repeat selections.
:param input_list: the list to shuffle through
:param stop_after: stops after returning this many items
:param insertion_threshold: how close to insert an item back in the deck after it has been selected. When close to
1, the same item can be returned in close proximity to itself, when close to 0, we cycle through almost every
other item before getting the same item again. (At 0, becomes a deterministic repeated shuffle.)
"""
deck = list(input_list)
max_insert_point = int(len(input_list) * insertion_threshold)
random.shuffle(deck)
while stop_after > 0:
top_card = deck.pop()
yield top_card
deck.insert(random.randint(0, max_insert_point), top_card)
stop_after -= 1
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/process/generators.py
| 0.904277 | 0.409929 |
generators.py
|
pypi
|
from typing import Sequence, Union, Callable
from clockblocks import current_clock, Clock
from expenvelope import Envelope, EnvelopeSegment
from expenvelope.envelope import T
class TimeVaryingParameter(Envelope):
"""
A simple wrapper around :class:`~expenvelope.envelope.Envelope` that is aware of the current time or beat in the
current clock. Simply call it like a function to get its value at the current time or beat.
:param levels: see :class:`~expenvelope.envelope.Envelope`
:param durations: see :class:`~expenvelope.envelope.Envelope`
:param curve_shapes: see :class:`~expenvelope.envelope.Envelope`
:param offset: see :class:`~expenvelope.envelope.Envelope`
:param clock: the clock whose time/beat this TimeVaryingParameter uses for lookup. Defaults to the current active clock.
:param units: either "beats" or "time"; whether or not to use the time or the beat of the clock to look up the
parameter value
"""
def __init__(self, levels: Sequence = (0,), durations: Sequence[float] = (),
curve_shapes: Sequence[Union[float, str]] = None, offset: float = 0,
clock: Clock = None, units: str = "beats"):
super().__init__(levels=levels, durations=durations, curve_shapes=curve_shapes, offset=offset)
self._initialize(clock, units)
def _initialize(self, clock, units):
if units not in ("beats", "time"):
raise ValueError("`units` argument must be either \"beats\" or \"time\"")
self.clock = current_clock() if clock is None else clock
if self.clock is None:
raise ValueError("No clock was specified, and there was no clock available on the current thread. (Did"
"you create this TimeVaryingParameter before creating a Session or master clock?)")
self.get_moment = self.clock.time if units == "time" else self.clock.beat
self.instantiation_time = self.get_moment()
def finished(self):
return self.get_moment() - self.instantiation_time >= self.length()
@classmethod
def from_segments(cls, segments: Sequence[EnvelopeSegment], clock: Clock = None, units: str = "beats") -> T:
"""
Same as :func:`~expenvelope.envelope.Envelope.from_segments`, but taking an optional clock and units parameter.
(See :class:`TimeVaryingParameter`)
"""
instance = super().from_segments(segments)
instance._initialize(clock, units)
return instance
@classmethod
def from_levels_and_durations(cls, levels: Sequence, durations: Sequence[float],
curve_shapes: Sequence[Union[float, str]] = None, offset: float = 0,
clock: Clock = None, units: str = "beats") -> T:
"""
Same as :func:`~expenvelope.envelope.Envelope.from_levels_and_durations`, but taking an optional clock and units
parameter. (See :class:`TimeVaryingParameter`)
"""
instance = super().from_levels_and_durations(levels, durations, curve_shapes, offset)
instance._initialize(clock, units)
return instance
@classmethod
def from_levels(cls, levels: Sequence, length: float = 1.0, offset: float = 0,
clock: Clock = None, units: str = "beats") -> T:
"""
Same as :func:`~expenvelope.envelope.Envelope.from_levels`, but taking an optional clock and units
parameter. (See :class:`TimeVaryingParameter`)
"""
instance = super().from_levels(levels, length, offset)
instance._initialize(clock, units)
return instance
@classmethod
def from_list(cls, constructor_list: Sequence, clock: Clock = None, units: str = "beats") -> T:
"""
Same as :func:`~expenvelope.envelope.Envelope.from_list`, but taking an optional clock and units
parameter. (See :class:`TimeVaryingParameter`)
"""
instance = super().from_list(constructor_list)
instance._initialize(clock, units)
return instance
@classmethod
def from_points(cls, *points: Sequence, clock: Clock = None, units: str = "beats") -> T:
"""
Same as :func:`~expenvelope.envelope.Envelope.from_points`, but taking an optional clock and units
parameter. (See :class:`TimeVaryingParameter`)
"""
instance = super().from_points(*points)
instance._initialize(clock, units)
return instance
@classmethod
def release(cls, duration: float, start_level=1, curve_shape: Union[float, str] = None,
clock: Clock = None, units: str = "beats") -> T:
"""
Same as :func:`~expenvelope.envelope.Envelope.release`, but taking an optional clock and units
parameter. (See :class:`TimeVaryingParameter`)
"""
instance = super().release(duration, start_level, curve_shape)
instance._initialize(clock, units)
return instance
@classmethod
def ar(cls, attack_length: float, release_length: float, peak_level=1,
attack_shape: Union[float, str] = None, release_shape: Union[float, str] = None,
clock: Clock = None, units: str = "beats") -> T:
"""
Same as :func:`~expenvelope.envelope.Envelope.ar`, but taking an optional clock and units
parameter. (See :class:`TimeVaryingParameter`)
"""
instance = super().ar(attack_length, release_length, peak_level, attack_shape, release_shape)
instance._initialize(clock, units)
return instance
@classmethod
def asr(cls, attack_length: float, sustain_level, sustain_length: float, release_length: float,
attack_shape: Union[float, str] = None, release_shape: Union[float, str] = None,
clock: Clock = None, units: str = "beats") -> T:
"""
Same as :func:`~expenvelope.envelope.Envelope.asr`, but taking an optional clock and units
parameter. (See :class:`TimeVaryingParameter`)
"""
instance = super().asr(attack_length, sustain_level, sustain_length,
release_length, attack_shape, release_shape)
instance._initialize(clock, units)
return instance
@classmethod
def adsr(cls, attack_length: float, attack_level, decay_length: float, sustain_level, sustain_length: float,
release_length: float, attack_shape: Union[float, str] = None, decay_shape: Union[float, str] = None,
release_shape: Union[float, str] = None, clock: Clock = None, units: str = "beats") -> T:
"""
Same as :func:`~expenvelope.envelope.Envelope.adsr`, but taking an optional clock and units
parameter. (See :class:`TimeVaryingParameter`)
"""
instance = super().adsr(attack_length, attack_level, decay_length, sustain_level, sustain_length,
release_length, attack_shape, decay_shape, release_shape)
instance._initialize(clock, units)
return instance
@classmethod
def from_function(cls, function: Callable[[float], float], domain_start: float = 0, domain_end: float = 1,
resolution_multiple: int = 2, key_point_precision: int = 2000,
key_point_iterations: int = 5, clock: Clock = None, units: str = "beats") -> T:
"""
Same as :func:`~expenvelope.envelope.Envelope.from_function`, but taking an optional clock and units
parameter. (See :class:`TimeVaryingParameter`)
"""
instance = super().from_function(function, domain_start, domain_end, resolution_multiple,
key_point_precision, key_point_iterations)
instance._initialize(clock, units)
return instance
def __call__(self, *args, **kwargs):
return self.value_at(self.get_moment() - self.instantiation_time)
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/utilities/time_varying_parameter.py
| 0.976197 | 0.552117 |
time_varying_parameter.py
|
pypi
|
from scamp.utilities import is_x_pow_of_y, floor_x_to_pow_of_y, ceil_x_to_pow_of_y, round_x_to_pow_of_y, \
floor_to_multiple, ceil_to_multiple, round_to_multiple, is_multiple, prime_factor, is_prime
from math import gcd
import math
from expenvelope import EnvelopeSegment
from numbers import Real
from .sequences import multi_option_function
def lcm(a, b):
"""Return lowest common multiple."""
return a * b // gcd(a, b)
def remap(value_or_values, out_min, out_max, in_min=None, in_max=None,
input_warp="lin", output_warp="lin", clip=True):
"""
Rescales the given value or values so that they fall within the given output range. Not efficient or vectorized,
but unless you're using large datasets, that shouldn't be an issue.
:param value_or_values: the value or values to rescale
:param out_min: lower bound of output range
:param out_max: upper bound of output range
:param in_min: lower bound of input range (defaults to minimum input value)
:param in_max: upper bound of input range (defaults to maximum input value)
:param input_warp: either "lin", or "exp" depending on whether the input values are linearly (e.g. pitch) or
exponentially (e.g. frequency) spaced.
:param output_warp: either "lin", "exp", or a number that corresponds to the shape of the warping curve. When using
a number, 0 is linear, > 0 warps outputs towards the bottom of the range, and < 0 warps outputs towards the
top of the range (see `~expenvelope.envelope.Envelope` for a description of `curve_shape`).
:param clip: if True, clip output values so that they do not go outside of the designated output range
:return: a suitable warped output value or list of output values
"""
if not hasattr(value_or_values, '__len__'):
if in_min is None or in_max is None:
raise ValueError("When rescaling a single value, must supply in_min and in_max parameters.")
return remap([value_or_values], out_min, out_max, in_min, in_max, input_warp, output_warp)[0]
if in_min is None:
in_min = min(value_or_values)
if in_max is None:
in_max = max(value_or_values)
if input_warp == "exp":
log_in_min, log_in_max = math.log(in_min), math.log(in_max)
log_in_range = log_in_max - log_in_min
normalized_data = [(math.log(x) - log_in_min) / log_in_range
for x in value_or_values]
else:
in_range = in_max - in_min
normalized_data = [(x - in_min) / in_range for x in value_or_values]
if clip:
normalized_data = [min(max(x, 0), 1) for x in normalized_data]
if output_warp == "exp":
log_out_min, log_out_max = math.log(out_min), math.log(out_max)
log_out_range = log_out_max - log_out_min
return [math.exp(log_out_min + log_out_range * x) for x in normalized_data]
elif isinstance(output_warp, Real):
warp_envelope = EnvelopeSegment(0, 1, out_min, out_max, output_warp)
return [warp_envelope.value_at(x) for x in normalized_data]
else:
out_range = out_max - out_min
return [out_min + out_range * x for x in normalized_data]
def wrap_to_range(x, range_min, range_max, mirror=False):
"""
Wraps the input x into the given range, either jumping back to the other side of the range at the boundaries,
or if the mirror parameter is set, reflecting at the boundaries.
:param x: the input
:param range_min: minimum of wrapping range
:param range_max: maximum of wrapping range
:param mirror: whether to mirror at the boundaries
"""
width = (range_max - range_min)
if mirror:
mod_double_range = (x - range_min) % (2 * width)
if mod_double_range > width:
return 2 * width - mod_double_range + range_min
else:
return mod_double_range + range_min
else:
return (x - range_min) % width + range_min
floor_x_to_pow_of_y = multi_option_function(floor_x_to_pow_of_y)
ceil_x_to_pow_of_y = multi_option_function(ceil_x_to_pow_of_y)
round_x_to_pow_of_y = multi_option_function(round_x_to_pow_of_y)
floor_to_multiple = multi_option_function(floor_to_multiple)
ceil_to_multiple = multi_option_function(ceil_to_multiple)
round_to_multiple = multi_option_function(round_to_multiple)
class AtanWarp:
"""
A warping function that uses an appropriately scaled arctan to warp values from the input range to the output range.
See :func:`atan_warp` for details. This callable object version is callable allows you to define the warp on
instantiation and then call it like a function. E.g:
.. code-block:: python
warper = AtanWarp(-10, 10, 100, 200)
print(warper(0), warper(-5), warper(20))
:param in_lo: soft input minimum
:param in_hi: soft input maximum
:param out_min: hard input minimum
:param out_max: hard input maximum
"""
def __init__(self, in_lo: float, in_hi: float, out_min: float, out_max: float):
self.in_center = (in_hi + in_lo) / 2
self.out_center = (out_max + out_min) / 2
self.out_width = (out_max - out_min) / 2
self.slope = (out_max - out_min) / (in_hi - in_lo)
def __call__(self, val):
return math.atan((val - self.in_center) * (math.pi / 2) / self.out_width * self.slope) * self.out_width / (
math.pi / 2) + self.out_center
def atan_warp(value, in_lo, in_hi, out_min, out_max) -> float:
"""
Uses an appropriately scaled arctan function to warp values from the input range to the output range. The input
range is a soft boundary for the inputs expected, whereas the output range is a hard limit, to which extreme
high and low input values asymptote. At the exact center of the input range there is no warping, and the slope
of the transformation is the same as if it were linear; as input values approach or exceed the boundaries
of the input range, the output approaches, but never exceeds the output range.
:param value: the value to warp
:param in_lo: soft input minimum
:param in_hi: soft input maximum
:param out_min: hard input minimum
:param out_max: hard input maximum
"""
return AtanWarp(in_lo, in_hi, out_min, out_max)(value)
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/utilities/math.py
| 0.949389 | 0.477189 |
math.py
|
pypi
|
from __future__ import annotations
import logging
from collections import namedtuple
from clockblocks import Clock
from scamp import ScampInstrument, Session, SpellingPolicy, NoteProperties, NoteHandle, ChordHandle
from scamp.utilities import NoteProperty
from typing import Sequence, Optional, Union, Tuple
_PresetInfo = namedtuple("_PresetInfo", "name, instrument, bundled_properties, bundled_properties_on_switch, "
"bundled_properties_on_switch_away")
class MultiPresetInstrument:
"""
A convenient wrapper for bundling multiple `ScampInstrument` objects or soundfont presets into a single notated
part, and assigning particular notations to these presets.
:param session: the session in which this MultiPresetInstrument operates
:param name: name of this MultiPresetInstrument (e.g. in the score)
:param default_spelling_policy: see :class:`~scamp.instruments.ScampInstrument`
:param clef_preference: see :class:`~scamp.instruments.ScampInstrument`
"""
def __init__(self, session: Session, name: str, default_spelling_policy: Optional[SpellingPolicy] = None,
clef_preference: str = "from_name"):
self.host_session = session
self.notation_part = self.host_session.new_silent_part(name, default_spelling_policy, clef_preference)
self.presets = []
self.last_preset_played = None
@property
def name(self):
"""Name of this MultiPresetInstrument (e.g. in the score)"""
return self.notation_part.name
@name.setter
def name(self, value):
self.notation_part.name = value
def add_preset(self, name: str, instrument_or_soundfont_preset: Union[ScampInstrument, str, int, Tuple[int, int]],
bundled_properties: Union[str, dict, Sequence, NoteProperty] = None,
bundled_properties_on_switch: Union[str, dict, Sequence, NoteProperty] = None,
bundled_properties_on_switch_away: Union[str, dict, Sequence, NoteProperty] = None,
make_default=False):
"""
Add a new preset with a given instrument and name.
:param instrument_or_soundfont_preset: either a ScampInstrument, or a soundfont preset name or number.
:param name: name for this preset used when calling play_note
:param bundled_properties: Any properties that we wish to bundle with every note played by this preset. For
example, diamond noteheads for harmonics.
:param bundled_properties_on_switch: Any properties that we wish to bundle with this preset when we switch to
it (the last note was a different preset). For example, "pizz."
:param bundled_properties_on_switch_away: Any properties that we wish to bundle with this preset when we switch
to it (the last note was a different preset). For example, "pizz."
:param make_default: if True, moves this preset to the front of the list so that it becomes the default
preset. If this is the first preset defined, it will become the default regardless of the setting of
this parameter.
:return: self, for chaining purposes
"""
if bundled_properties is not None:
bundled_properties = NoteProperties.interpret(bundled_properties)
if bundled_properties_on_switch is not None:
bundled_properties_on_switch = NoteProperties.interpret(bundled_properties_on_switch)
if bundled_properties_on_switch_away is not None:
bundled_properties_on_switch_away = NoteProperties.interpret(bundled_properties_on_switch_away)
inst = instrument_or_soundfont_preset if isinstance(instrument_or_soundfont_preset, ScampInstrument) else \
self.host_session.new_part("{}-{}".format(self.notation_part.name, name), instrument_or_soundfont_preset)
preset = _PresetInfo(name, inst, bundled_properties, bundled_properties_on_switch,
bundled_properties_on_switch_away)
if make_default:
self.presets.insert(0, preset)
else:
self.presets.append(preset)
return self
def _get_preset_index(self, preset_name: str) -> Union[int, None]:
names = [x[0] for x in self.presets]
if preset_name in names:
return [x[0] for x in self.presets].index(preset_name)
else:
return None
def _resolve_preset(self, preset_name: str) -> _PresetInfo:
if len(self.presets) == 0:
return None, None, None, None, None
elif preset_name is None: # use the default preset
return self.presets[0]
else:
index = self._get_preset_index(preset_name)
if index is None:
logging.warning("MultiPresetInstrument {} could not resolve preset {}. Falling back to default preset".
format(self.name, preset_name))
return self.presets[0]
return self.presets[index]
def _check_if_switched(self, preset_name) -> bool:
return (self.last_preset_played != preset_name) if self.last_preset_played is not None \
else (preset_name != self.presets[0].name)
def _resolve_properties(self, preset_info: _PresetInfo, note_properties):
# make preset_switch_properties None unless it switched. _check_if_switched also keeps track of last preset
if self._check_if_switched(preset_info.name):
preset_switch_properties = preset_info.bundled_properties_on_switch
last_preset_switch_away_properties = \
self._resolve_preset(self.last_preset_played).bundled_properties_on_switch_away \
if self.last_preset_played is not None else None
else:
preset_switch_properties = last_preset_switch_away_properties = None
self.last_preset_played = preset_info.name
# make a blank of NoteProperties and incorporate all of the preset properties
return NoteProperties().incorporate(preset_info.bundled_properties).\
incorporate(last_preset_switch_away_properties).\
incorporate(preset_switch_properties).\
incorporate(NoteProperties.interpret(note_properties))
def play_note(self, pitch, volume, length, properties: Union[str, dict, Sequence, NoteProperty] = None,
preset: str = None, blocking: bool = True, clock: Clock = None) -> None:
"""
Play a note using this MultiPresetInstrument
:param pitch: see :func:`~scamp.instruments.ScampInstrument.play_note`
:param volume: see :func:`~scamp.instruments.ScampInstrument.play_note`
:param length: see :func:`~scamp.instruments.ScampInstrument.play_note`
:param preset: Name of the preset to use for this note.
:param properties: see :func:`~scamp.instruments.ScampInstrument.play_note`
:param blocking: see :func:`~scamp.instruments.ScampInstrument.play_note`
:param clock: see :func:`~scamp.instruments.ScampInstrument.play_note`
"""
preset_info = self._resolve_preset(preset)
properties = self._resolve_properties(preset_info, properties)
if preset_info.instrument is not None:
# this will happen so long as there's a preset to resolve to
preset_info.instrument.play_note(
pitch, volume, length, properties=properties, blocking=False, clock=clock, transcribe=False)
else:
logging.warning("MultiPresetInstrument {} does not have any presets. (Probably a mistake?)".
format(self.name))
self.notation_part.play_note(pitch, volume, length, properties=properties, blocking=blocking, clock=clock)
def play_chord(self, pitches: Sequence, volume, length, properties: Union[str, dict, Sequence, NoteProperty] = None,
preset: str = None, blocking: bool = True, clock: Clock = None) -> None:
"""
Play a chord using this MultiPresetInstrument.
:param pitches: see :func:`~scamp.instruments.ScampInstrument.play_chord`
:param volume: see :func:`~scamp.instruments.ScampInstrument.play_chord`
:param length: see :func:`~scamp.instruments.ScampInstrument.play_chord`
:param properties: see :func:`~scamp.instruments.ScampInstrument.play_chord`
:param preset: Name of the preset to use for this chord.
:param blocking: see :func:`~scamp.instruments.ScampInstrument.play_chord`
:param clock: see :func:`~scamp.instruments.ScampInstrument.play_chord`
"""
preset_info = self._resolve_preset(preset)
properties = self._resolve_properties(preset_info, properties)
if preset_info.instrument is not None:
# this will happen so long as there's a preset to resolve to
preset_info.instrument.play_chord(pitches, volume, length, properties=properties,
blocking=False, clock=clock, transcribe=False)
else:
logging.warning("MultiPresetInstrument {} does not have any presets. (Probably a mistake?)".
format(self.name))
self.notation_part.play_chord(pitches, volume, length, properties=properties, blocking=blocking, clock=clock)
def start_note(self, pitch: float, volume: float, properties: Union[str, dict, Sequence, NoteProperty] = None,
preset: str = None, clock: Clock = None, max_volume: float = 1) -> MultiNoteHandle:
"""
Start a note using this MultiPresetInstrument.
:param pitch: see :func:`~scamp.instruments.ScampInstrument.start_note`
:param volume: see :func:`~scamp.instruments.ScampInstrument.start_note`
:param properties: see :func:`~scamp.instruments.ScampInstrument.start_note`
:param preset: name of the preset to use for this note.
:param clock: see :func:`~scamp.instruments.ScampInstrument.start_note`
:param max_volume: see :func:`~scamp.instruments.ScampInstrument.start_note`
:return: a :class:`MultiNoteHandle` with which to later manipulate the note
"""
handles = []
preset_info = self._resolve_preset(preset)
properties = self._resolve_properties(preset_info, properties)
if preset_info.instrument is not None:
# this will happen so long as there's a preset to resolve to
handles.append(preset_info.instrument.start_note(pitch, volume, properties,
clock=clock, max_volume=max_volume, flags="no_transcribe"))
else:
logging.warning("MultiPresetInstrument {} does not have any presets. (Probably a mistake?)".
format(self.name))
handles.append(self.notation_part.start_note(pitch, volume, properties, clock=clock, max_volume=max_volume))
return MultiNoteHandle(handles)
def start_chord(self, pitches: Sequence[float], volume: float,
properties: Union[str, dict, Sequence, NoteProperty] = None, preset: str = None,
clock: Clock = None, max_volume: float = 1) -> MultiNoteHandle:
"""
Start a note using this MultiPresetInstrument.
:param pitches: see :func:`~scamp.instruments.ScampInstrument.start_chord`
:param volume: see :func:`~scamp.instruments.ScampInstrument.start_chord`
:param properties: see :func:`~scamp.instruments.ScampInstrument.start_chord`
:param preset: name of the preset to use for this note.
:param clock: see :func:`~scamp.instruments.ScampInstrument.start_chord`
:param max_volume: see :func:`~scamp.instruments.ScampInstrument.start_chord`
:return: a :class:`MultiNoteHandle` with which to later manipulate the chord
"""
handles = []
preset_info = self._resolve_preset(preset)
properties = self._resolve_properties(preset_info, properties)
if preset_info.instrument is not None:
# this will happen so long as there's a preset to resolve to
handles.append(preset_info.instrument.start_chord(pitches, volume, properties,
clock=clock, max_volume=max_volume, flags="no_transcribe"))
else:
logging.warning("MultiPresetInstrument {} does not have any presets. (Probably a mistake?)".
format(self.name))
handles.append(self.notation_part.start_chord(pitches, volume, properties, clock=clock, max_volume=max_volume))
return MultiNoteHandle(handles)
def send_midi_cc(self, cc_number: int, value_from_0_to_1: float) -> None:
"""
Send a midi cc message to every :class:`~scamp.instruments.ScampInstrument:` used by this MultiPresetInstrument.
:param cc_number: MIDI cc number
:param value_from_0_to_1: value to send (scaled from 0 to 1)
"""
for preset_info in self.presets:
preset_info.instrument.send_midi_cc(cc_number, value_from_0_to_1)
def end_all_notes(self) -> None:
"""
Ends all notes currently playing
"""
for preset_info in self.presets:
preset_info.instrument.end_all_notes()
def num_notes_playing(self) -> int:
"""
Returns the number of notes currently playing.
"""
return self.notation_part.num_notes_playing()
def set_max_pitch_bend(self, semitones: int) -> None:
"""
Set the max pitch bend for all midi playback implementations on this instrument
"""
for preset_info in self.presets:
preset_info.instrument.set_max_pitch_bend(semitones)
@property
def clef_preference(self):
"""
The clef preference for this instrument. See :class:`~scamp.instruments.ScampInstrument`
"""
return self.notation_part.clef_preference
@clef_preference.setter
def clef_preference(self, value):
self.notation_part.clef_preference = value
@property
def default_spelling_policy(self):
"""
The default spelling policy for notes played back by this instrument.
See :class:`~scamp.instruments.ScampInstrument`
"""
return self.notation_part.default_spelling_policy
@default_spelling_policy.setter
def default_spelling_policy(self, value: Union[SpellingPolicy, str]):
self.notation_part.default_spelling_policy = value
class MultiNoteHandle:
"""
The equivalent of an :class:`~scamp.instruments.NoteHandle` but for a :class:`MultiPresetInstrument`.
:param note_handles: a list of the NoteHandles for the underlying ScampInstruments. (One for the silent
notation part, and one for the active preset.)
"""
def __init__(self, note_handles: Sequence[Union[NoteHandle, ChordHandle]] = ()):
self.note_handles = note_handles
def change_parameter(self, param_name: str, target_value_or_values: Union[float, Sequence],
transition_length_or_lengths: Union[float, Sequence] = 0,
transition_curve_shape_or_shapes: Union[float, Sequence] = 0, clock: Clock = None) -> None:
"""
See :func:`~scamp.instruments.NoteHandle.change_parameter`
"""
for note_handle in self.note_handles:
note_handle.change_parameter(param_name, target_value_or_values, transition_length_or_lengths,
transition_curve_shape_or_shapes, clock)
def change_pitch(self, target_value_or_values: Union[float, Sequence],
transition_length_or_lengths: Union[float, Sequence] = 0,
transition_curve_shape_or_shapes: Union[float, Sequence] = 0, clock: Clock = None) -> None:
"""
See :func:`~scamp.instruments.NoteHandle.change_pitch`
"""
for note_handle in self.note_handles:
note_handle.change_pitch(target_value_or_values, transition_length_or_lengths,
transition_curve_shape_or_shapes, clock)
def change_volume(self, target_value_or_values: Union[float, Sequence],
transition_length_or_lengths: Union[float, Sequence] = 0,
transition_curve_shape_or_shapes: Union[float, Sequence] = 0, clock: Clock = None) -> None:
"""
See :func:`~scamp.instruments.NoteHandle.change_volume`
"""
for note_handle in self.note_handles:
note_handle.change_volume(target_value_or_values, transition_length_or_lengths,
transition_curve_shape_or_shapes, clock)
def split(self) -> None:
"""
See :func:`~scamp.instruments.NoteHandle.split`
"""
for note_handle in self.note_handles:
note_handle.split()
def end(self) -> None:
"""
See :func:`~scamp.instruments.NoteHandle.end`
"""
for note_handle in self.note_handles:
note_handle.end()
def __repr__(self):
return "MultiNoteHandle({})".format(self.note_handles)
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/playback/multi_preset_instrument.py
| 0.919769 | 0.227266 |
multi_preset_instrument.py
|
pypi
|
from scamp.playback_implementations import OSCPlaybackImplementation
from .sc_lang import SCLangInstance
from scamp.instruments import ScampInstrument, Ensemble
from scamp.utilities import resolve_path
class SCPlaybackImplementation(OSCPlaybackImplementation):
"""
A subclass of :class:`~scamp.playback_implementations.OSCPlaybackImplementation` designed to communicate with
a running copy of SCLang (via an :class:`~scamp_extensions.supercollider.sc_lang.SCLangInstance`).
:param host_instrument: the host instrument for this playback implementation
:param synth_def: a string of SCLang code representing the SynthDef to run. This should take at least the
the arguments "freq" (to which the pitch is sent), "volume" (to which the not volume is sent), and "gate"
(which is used to start and stop the note).
"""
sclang_instance = None
def __init__(self, synth_def: str):
if SCPlaybackImplementation.sclang_instance is None:
SCPlaybackImplementation.sclang_instance = SCLangInstance()
if synth_def.isalnum() or synth_def[0] == "\\" and synth_def[1:].isalnum():
# just the name of the synth_def
def_name = synth_def.replace("\\", "")
compile_synth_def = False
else:
def_name = synth_def.split("\\")[1].split(",")[0].strip()
compile_synth_def = True
super().__init__(SCPlaybackImplementation.sclang_instance.port, ip_address="127.0.0.1", message_prefix=def_name)
if compile_synth_def:
SCPlaybackImplementation.sclang_instance.new_synth_def(synth_def)
def add_sc_extensions():
"""
Adds several new functions to the :class:`~scamp.instruments.ScampInstrument` class, as well as to the
:class:`~scamp.instruments.Ensemble` (and therefore :class:`~scamp.session.Session`).
New instance methods of `ScampInstrument`:
``add_supercollider_playback(self, synth_def: str)``: takes a string containing a SuperCollider SynthDef, and adds
a :class:`SCPlaybackImplementation` to this instrument that uses that SynthDef to synthesize sound. (This starts
up instances of sclang and scsynth in the background.)
``remove_supercollider_playback(self)``: removes the (most recently added) :class:`SCPlaybackImplementation` from
this instrument's playback_implementations.
New instance methods of `Ensemble` / `Session`:
``new_supercollider_part(self, name: str, synth_def: str)``: Similarly to any of the other "new_part" methods, this
adds and returns a newly created ScampInstrument that uses an :class:`SCPlaybackImplementation` based on the
given synth def string.
``get_sclang_instance(self)``: Returns the instance of :class:`SCLangInstance` that this ensemble is using for
supercollider playback (or creates one if none is running).
``start_recording_sc_output(self, path, num_channels=2)``: Tells SuperCollider to start recording the playback to
and audio file at the given path, using the specified number of channels.
``stop_recording_sc_output(self)``: Stops recording SuperCollider playback to an audio file.
"""
def _add_supercollider_playback(self, synth_def):
self.playback_implementations.append(SCPlaybackImplementation(synth_def))
return self
def _remove_supercollider_playback(self):
for index in reversed(range(len(self.playback_implementations))):
if isinstance(self.playback_implementations[index], SCPlaybackImplementation):
self.playback_implementations.pop(index)
break
return self
ScampInstrument.add_supercollider_playback = _add_supercollider_playback
ScampInstrument.remove_supercollider_playback = _remove_supercollider_playback
def _new_supercollider_part(self, name=None, synth_def=None):
assert synth_def is not None
name = "Track " + str(len(self.instruments) + 1) if name is None else name
instrument = self.new_silent_part(name)
instrument.add_supercollider_playback(synth_def)
return instrument
Ensemble.new_supercollider_part = _new_supercollider_part
def _get_sc_instance(self):
if SCPlaybackImplementation in self.shared_resources:
if "sclang_instance" in self.shared_resources[SCPlaybackImplementation]:
return self.shared_resources[SCPlaybackImplementation]["sclang_instance"]
else:
new_sc_instance = SCLangInstance()
self.shared_resources[SCPlaybackImplementation]["sclang_instance"] = SCLangInstance()
else:
new_sc_instance = SCLangInstance()
self.shared_resources[SCPlaybackImplementation] = {"sclang_instance": new_sc_instance}
return new_sc_instance
Ensemble.get_sclang_instance = _get_sc_instance
def _start_recording_sc_output(self, path, num_channels=2):
self.get_sclang_instance().send_message("/recording/start", [resolve_path(path), num_channels])
def _stop_recording_sc_output(self):
self.get_sclang_instance().send_message("/recording/stop", 0)
Ensemble.start_recording_sc_output = _start_recording_sc_output
Ensemble.stop_recording_sc_output = _stop_recording_sc_output
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/playback/supercollider/sc_playback_implementation.py
| 0.864982 | 0.27064 |
sc_playback_implementation.py
|
pypi
|
from numbers import Real
from typing import List, Union, Sequence
from .metric_structure import MeterArithmeticGroup, INT_OR_FLOAT
def indispensability_array_from_expression(meter_arithmetic_expression: str, normalize: bool = False,
break_up_large_numbers: bool = False,
upbeats_before_group_length: bool = True) -> List[INT_OR_FLOAT]:
"""
Generates an array of indispensability values for a meter and subdivision, as expressed by a meter arithmetic
expression. Such expressions allow great flexibility in describing metric structure, making possible additive,
multiplicative, and hybrid metrical structures.
:param meter_arithmetic_expression: An string expression representing a metric hierarchy (meter and subdivision
structure). For instance, "2 * 3" would create the eighth-note subdivisions of 6/8, and "2 + 3 + 2" would create
an additive meter (2+3+2)/8. "(2 + 3 + 2) * 3" would create a kind of hybrid of these: seven main beats in a
2 + 3 + 2 pattern, each of which is subdivided in 3. This might be notated as 6/8+9/8+6/8.
:param normalize: if True, indispensabilities range from 0 to 1. If false, they count up from 0.
:param break_up_large_numbers: if True, numbers greater than 3 are broken up into a sum of 2's
followed by one 3 if odd. This is the Barlow approach.
:param upbeats_before_group_length: see description in :func:`metric_structure.flatten_beat_groups`. Affects the
result when there are groups of uneven length at some level of metric structure. To achieve the standard
Barlowian result, set this to False. I think it works better as True, though.
:return: a list of indispensabilities for the pulses of the given meter.
"""
return MeterArithmeticGroup.parse(meter_arithmetic_expression) \
.to_metric_structure(break_up_large_numbers) \
.get_indispensability_array(normalize=normalize, upbeats_before_group_length=upbeats_before_group_length)
def indispensability_array_from_strata(*rhythmic_strata: Union[int, Sequence[int]], normalize: bool = False,
break_up_large_numbers: bool = False,
upbeats_before_group_length: bool = True) -> List[INT_OR_FLOAT]:
"""
Alternate implementation of :func:`~scamp_extensions.composers.barlicity.get_indispensability_array`, leveraging
the :class:`~scamp_extensions.rhythm.metric_structure.MetricStructure` class to do the calculations.
:param rhythmic_strata: can be either tuples, representing additive metric layers, or integers, representing simple
metric layers.
:param normalize: if True, indispensabilities range from 0 to 1. If false, they count up from 0.
:param break_up_large_numbers: if True, numbers greater than 3 are broken up into a sum of 2's
followed by one 3 if odd. This is the Barlow approach.
:param upbeats_before_group_length: see description in :func:`metric_structure.flatten_beat_groups`. Affects the
result when there are groups of uneven length at some level of metric structure. To achieve the standard
Barlowian result, set this to False. I think it works better as True, though.
:return: a list of indispensabilities for the pulses of the given meter.
"""
expression = "*".join(
("("+"+".join(str(y) for y in x)+")" if hasattr(x, "__len__") else str(x)) for x in rhythmic_strata
)
return indispensability_array_from_expression(
expression, normalize=normalize, break_up_large_numbers=break_up_large_numbers,
upbeats_before_group_length=upbeats_before_group_length
)
def barlow_style_indispensability_array(*rhythmic_strata: Union[int, Sequence[int]],
normalize: bool = False) -> List[INT_OR_FLOAT]:
"""
Alternate implementation of :func:`~scamp_extensions.composers.barlicity.get_standard_indispensability_array`,
leveraging the :class:`~scamp_extensions.rhythm.metric_structure.MetricStructure` class to do the calculations.
:param rhythmic_strata: can be either tuples, representing additive metric layers, or integers, representing simple
metric layers.
:param normalize: if True, indispensabilities range from 0 to 1. If false, they count up from 0.
:return: a list of indispensabilities for the pulses of the given meter.
"""
if not all(isinstance(x, int) for x in rhythmic_strata):
raise ValueError("Standard Barlow indispensability arrays must be based on from integer strata.")
return indispensability_array_from_expression("*".join(str(x) for x in rhythmic_strata), normalize=normalize,
break_up_large_numbers=True, upbeats_before_group_length=False)
|
/scamp_extensions-0.3.5.post1-py3-none-any.whl/scamp_extensions/rhythm/indispensability.py
| 0.929368 | 0.633339 |
indispensability.py
|
pypi
|
from __future__ import unicode_literals, division, print_function
import logging
import struct
from .parsing import Parser
from .errors import InvalidFormat, EmptyRead
logger = logging.getLogger(__name__)
class WartsRecord(object):
"""Base class for a Warts record. This class should not be
instanciated directly, you should call the parsing factory
`WartsRecord.parse(file)` to obtain an instance of an appropriate
subclass.
"""
WARTS_HEADER_FORMAT = ">HHI"
# Mapping from types (as seen in the Warts object header) to parsing class
WARTS_TYPES = {}
def __init__(self, parser):
self.p = parser
@staticmethod
def register_warts_type(*args):
"""Decorator that can be used by a subclass to register its Warts type
to the parser. For instance:
@WartsRecord.register_warts_type(0x0042)
class MyRecordType(WartsRecord):
pass
"""
def _register_warts_type(cls):
WartsRecord.WARTS_TYPES[args[0]] = cls
return cls
return _register_warts_type
@classmethod
def parse(cls, fd):
"""
Given a file-like stream, parse the next record and return an instance
of the appropriate class. If the record is of an unknown type, an
instance of UnknownRecord is returned.
If the end of file is reached, return None.
If something goes wrong, a subclass of errors.ParseError is raised.
Except in case of serious errors (for instance an error when
reading from the input), the stream is always positioned at the
start of the next record.
This is roughly similar to a factory, producing an instance of a
subclass based on the type found in the file header.
"""
# TODO: handle I/O errors related to reading from a stream
header = fd.read(struct.calcsize(cls.WARTS_HEADER_FORMAT))
if len(header) == 0: # EOF
return None
magic, type_, length = struct.unpack(cls.WARTS_HEADER_FORMAT, header)
if magic != 0x1205:
raise InvalidFormat("Invalid magic header")
buf = fd.read(length)
p = Parser(buf)
# Use type to select the right class here
subclass = cls.WARTS_TYPES.get(type_, UnknownRecord)
logger.debug("Parsing a %s (%d bytes)", subclass.__name__,
length)
record = subclass(p)
record.type = type_
record.length = length
record.parse()
return record
def parse_options(self, options):
"""Given a list of Option instances, parse them from the input.
For each option, if it is present in the input, we create the
corresponding attribute in the current Python object. The
attribute is set to None when the option is not present.
Implementation note: the option format allows to ignore
unknown options when parsing (for instance, when new options
are added to Scamper before the parsing library has
implemented them). This is only possible if the "position" of
new options (that is, the position in the bitmask) is strictly
increasing and there is no gap. Put simply, all unknown
options MUST be at the end, because we have no idea of the
length of an unknown option. This is precisely why most
network protocols use TLV encoding...
"""
flags = self.p.read_flags()
if flags == 0:
return
options_length = self.p.read_uint16()
expected_bytes_read = self.p.offset + options_length
# Note: the warts(5) man page uses 1-base indexing to document
# the bit positions, but we use 0-based indexing for sanity.
for position, option in enumerate(options):
if not flags & (1 << position):
setattr(self, option.attr_name, None)
continue
value = option.parse_function(self.p)
if option.ignore:
continue
setattr(self, option.attr_name, value)
# Check that we haven't read too much
if self.p.offset > expected_bytes_read:
raise InvalidFormat("Inconsistent option length")
# Skip past unknown options
if self.p.offset < expected_bytes_read:
logger.debug("Skipping %d bytes worth of unknown options",
expected_bytes_read - self.p.offset)
self.p.offset = expected_bytes_read
class UnknownRecord(WartsRecord):
"""Default class returned when we encounter a record with an unknown type.
The payload of the record is stored in [self.data], as a bytes object."""
def parse(self):
logger.info("Ignoring unknown record %s", self)
self.data = self.p.buf
def __str__(self):
return 'Unknown(type={}, length={})'.format(self.type, self.length)
|
/scamper-pywarts-0.2.1.tar.gz/scamper-pywarts-0.2.1/warts/base.py
| 0.734501 | 0.332771 |
base.py
|
pypi
|
from __future__ import unicode_literals, division, print_function
import struct
import ctypes
from collections import namedtuple
import logging
import socket
import six
from .errors import ParseError, InvalidFormat, EmptyRead, IncompleteRead, ReadError
logger = logging.getLogger(__name__)
# Represents an ICMP extension. [data] is a bunch of undecoded bytes.
IcmpExtension = namedtuple('IcmpExtension', ['class_', 'type_', 'data'])
class Parser(object):
"""Simple object that offers a number of parsing primitives on a buffer,
and records an offset into the buffer (i.e. the total number of bytes
parsed so far).
"""
def __init__(self, buf):
self.buf = buf
# Offset in bytes, i.e. the number of bytes parsed so far
self.offset = 0
self.addresses = list()
def read_from_format(self, format):
"""Decode data from the buffer, according to a format string suitable for
struct.unpack.
"""
size = struct.calcsize(format)
res = struct.unpack_from(format, self.buf, self.offset)
self.offset += size
return res
def read_uint8(self):
res = six.indexbytes(self.buf, self.offset)
self.offset += 1
return res
def read_uint16(self):
return self.read_from_format('>H')[0]
def read_uint32(self):
return self.read_from_format('>I')[0]
def read_timeval(self):
sec, usec = self.read_from_format('>II')
return sec + usec / 1000000
def read_bytes(self, length):
"""Returns a fixed-length chunk of the buffer, as a bytes() object."""
res = self.buf[self.offset:self.offset + length]
self.offset += length
return res
def read_address(self):
length = self.read_uint8()
if length > 0:
type_ = self.read_uint8()
addr = self.read_bytes(length)
if type_ == 0x01:
addr_str = socket.inet_ntop(socket.AF_INET, addr)
elif type_ == 0x02:
addr_str = socket.inet_ntop(socket.AF_INET6, addr)
self.addresses.append(addr_str)
# TODO: decode UTF-8 address when using python2
return addr_str
else:
id_ = self.read_uint32()
try:
return self.addresses[id_]
except IndexError:
raise InvalidFormat("Invalid referenced address")
def read_string(self):
"""Read a zero-terminated UTF-8 string from the buffer."""
# TODO: do we really need to make a copy?
s = bytes(ctypes.string_at(self.buf[self.offset:]))
# Seek to the end of the string (including the final zero char)
self.offset += len(s) + 1
return s.decode('utf-8')
def read_icmpext(self):
"""Read "ICMP extension data", which is turned into a list of
IcmpExtension instances."""
total_length = self.read_uint16()
extensions = list()
expected_bytes = self.offset + total_length
while self.offset < expected_bytes:
ext_length, ext_class, ext_type = self.read_from_format('>HBB')
ext_data = self.read_bytes(ext_length)
extensions.append(IcmpExtension(ext_class, ext_type, ext_data))
if self.offset > expected_bytes:
raise InvalidFormat("Inconsistent ICMP extension length")
return extensions
def read_flags(self):
"""Parse and return a bitmask representing the option flags"""
# We use a python integer as a bitmask (fast, especially for less than 32 flags)
flags = 0
bit_pos = 0
# See warts(5) to understand the weird encoding
while True:
byte = self.read_uint8()
flags |= ((byte & 0x7F) << bit_pos)
if byte & 0x80 == 0:
break
bit_pos += 7
return flags
class Option(object):
"""
Simple container for an optional field. It describes a parsing
function that should be called to parse the option, and an
attribute name used to store the resulting value in a Python
object.
The parse function should be a method from the Parser class (it will
be passed a Parser instance).
If [ignore] is True, the option should be parsed, but the value
should be thrown away instead of being recorded in a Python
object. This is mostly useful to ignore options related to the
deprecated address format.
"""
def __init__(self, attr_name, parse_function, ignore=False):
self.attr_name = attr_name
self.parse_function = parse_function
self.ignore = ignore
|
/scamper-pywarts-0.2.1.tar.gz/scamper-pywarts-0.2.1/warts/parsing.py
| 0.698329 | 0.352954 |
parsing.py
|
pypi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.