ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a4492393cabcdab89bb19cef5af3173f7ee9c55 | """
Author: <REPLACE>
Project: 100DaysPython
File: module3_day29_fileManipulations.py
Creation Date: <REPLACE>
Description: <REPLACE>
"""
import os
# First change the working directory to point to the folder containing the files
os.chdir("./audio")
# The `.listdir()` function populates the contents of the folder. This can be iterated over to work with the files.
for file in os.listdir():
# The files contain the format `title_module_day_track.mp3`
# The `os.path.splitext()` function separates the extension from the file name. This can be used to create a tuple
# of the file name and the file extension.
file_name, file_ext = os.path.splitext(file)
# Since the folder can contain files other than `.mp3`, the program will be told to ignore all other extensions.
if file_ext != ".mp3":
continue
# Similar to the method of splitting off the file extension, the title, module, day, and track can all be separated
# into a tuple by splitting on the underscore.
title, module, day, track = file_name.split("_")
# The track number includes the number sign which isn't ideal and needs to be removed. Additionally, since there are
# tracks in the double digits, the system will sort track 10 immediately after track 1. Therefore, padding also
# needs to be applied using the `.zfill()` method to ensure proper order.
track = track[1:].zfill(2)
# The `.rename()` function can then be used to rename the file with the desired format.
new_name = f"{track}-{title}{file_ext}"
os.rename(file, new_name) |
py | 1a44932c46c43858b5d67b9f60230d42412f9b1e | #!/usr/bin/python
import datetime
from transformers import TFBertForSequenceClassification
import tensorflow as tf
from tensorflow.keras import Input
from tensorflow.keras import backend as K, initializers, regularizers, constraints
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Activation, Layer, Dropout, LSTM, Dense, InputLayer
from tensorflow.keras.losses import Loss
class Attention(Layer):
"""
SOURCE: https://gist.github.com/cbaziotis/6428df359af27d58078ca5ed9792bd6d
"""
def __init__(self,
W_regularizer=None, b_regularizer=None,
W_constraint=None, b_constraint=None,
bias=True,
return_attention=False,
**kwargs):
"""
Keras Layer that implements an Attention mechanism for temporal data.
Supports Masking.
Follows the work of Raffel et al. [https://arxiv.org/abs/1512.08756]
# Input shape
3D tensor with shape: `(samples, steps, features)`.
# Output shape
2D tensor with shape: `(samples, features)`.
:param kwargs:
Just put it on top of an RNN Layer (GRU/LSTM/SimpleRNN) with return_sequences=True.
The dimensions are inferred based on the output shape of the RNN.
Note: The layer has been tested with Keras 1.x
Example:
# 1
model.add(LSTM(64, return_sequences=True))
model.add(Attention())
# next add a Dense layer (for classification/regression) or whatever...
# 2 - Get the attention scores
hidden = LSTM(64, return_sequences=True)(words)
sentence, word_scores = Attention(return_attention=True)(hidden)
"""
self.supports_masking = True
self.return_attention = return_attention
self.init = initializers.get('glorot_uniform')
self.W_regularizer = regularizers.get(W_regularizer)
self.b_regularizer = regularizers.get(b_regularizer)
self.W_constraint = constraints.get(W_constraint)
self.b_constraint = constraints.get(b_constraint)
self.bias = bias
super(Attention, self).__init__(**kwargs)
def get_config(self):
config = super().get_config().copy()
config.update({
'supports_masking': self.supports_masking,
'return_attention': self.return_attention,
'init': self.init,
'W_regularizer': self.W_regularizer,
'b_regularizer': self.b_regularizer,
'W_constraint': self.W_constraint,
'b_constraint': self.b_constraint,
'bias': self.bias,
})
return config
def build(self, input_shape):
assert len(input_shape) == 3
self.W = self.add_weight(shape=(input_shape[-1],),
initializer=self.init,
name='{}_W'.format(self.name),
regularizer=self.W_regularizer,
constraint=self.W_constraint)
if self.bias:
self.b = self.add_weight(shape=(input_shape[1],),
initializer='zero',
name='{}_b'.format(self.name),
regularizer=self.b_regularizer,
constraint=self.b_constraint)
else:
self.b = None
self.built = True
def compute_mask(self, input, input_mask=None):
# do not pass the mask to the next layers
return None
def call(self, x, mask=None):
eij = dot_product(x, self.W)
if self.bias:
eij += self.b
eij = K.tanh(eij)
a = K.exp(eij)
# apply mask after the exp. will be re-normalized next
if mask is not None:
# Cast the mask to floatX to avoid float64 upcasting in theano
a *= K.cast(mask, K.floatx())
# in some cases especially in the early stages of training the sum may be almost zero
# and this results in NaN's. A workaround is to add a very small positive number ε to the sum.
# a /= K.cast(K.sum(a, axis=1, keepdims=True), K.floatx())
a /= K.cast(K.sum(a, axis=1, keepdims=True) + K.epsilon(), K.floatx())
weighted_input = x * K.expand_dims(a)
result = K.sum(weighted_input, axis=1)
if self.return_attention:
return [result, a]
return result
def compute_output_shape(self, input_shape):
if self.return_attention:
return [(input_shape[0], input_shape[-1]),
(input_shape[0], input_shape[1])]
else:
return input_shape[0], input_shape[-1]
class RankingError(Loss):
def __init__(self, batch_size):
super().__init__()
self.batch_size = batch_size
def call(self, y_true, y_diff):
pos = tf.constant([1.0 for i in range(self.batch_size)])
neg = tf.constant([-1.0 for i in range(self.batch_size)])
sign = tf.where(tf.equal(y_true,1.0), pos, neg)
return tf.math.maximum(0.0, 1.0 - sign * y_diff)
def dot_product(x, kernel):
"""
SOURCE: https://gist.github.com/cbaziotis/6428df359af27d58078ca5ed9792bd6d
Wrapper for dot product operation, in order to be compatible with both
Theano and Tensorflow
Args:
x (): input
kernel (): weights
Returns:
"""
if K.backend() == 'tensorflow':
# todo: check that this is correct
return K.squeeze(K.dot(x, K.expand_dims(kernel)), axis=-1)
else:
return K.dot(x, kernel)
def build_base_model(input_shape, hidden_units, dropout_prob, model_name='base'):
model = Sequential(name=model_name)
model.add(LSTM(hidden_units, input_shape=input_shape, return_sequences=True, name='lstm'))
model.add(Attention(name='attention'))
model.add(Dropout(dropout_prob))
model.add(Dense(1, activation='sigmoid', name='dense'))
return model
def build_ranking_model(base_forward_func, input1, input2):
out_s1 = base_forward_func(input1)
out_s1 = Layer(name='out_s1')(out_s1)
out_s2 = base_forward_func(input2)
out_diff = Layer(name='out_diff')(tf.math.subtract(out_s1, out_s2, name='out_diff'))
if isinstance(input1, list) and isinstance(input2, list):
total_inputs = input1 + input2
else:
total_inputs = [input1] + [input2]
return tf.keras.Model(inputs=total_inputs, outputs=[out_s1, out_diff], name='ranking')
def load_bert_model(model_path):
cbert_model = TFBertForSequenceClassification.from_pretrained(model_path)
cbert_model.classifier.activation = tf.keras.activations.sigmoid
return cbert_model |
py | 1a4493a6267f2bba4cec49f925c3ca62c618ecfa | from .utils import *
from .models import *
from .opt import *
|
py | 1a44968d5bf0ebea7e93c0157943ae4c455ffe49 | from __future__ import annotations
import inspect
import re
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Type, Union, cast
import numpy as np
from pandas._libs import (
Interval,
Period,
Timestamp,
algos as libalgos,
internals as libinternals,
lib,
writers,
)
from pandas._libs.internals import BlockPlacement
from pandas._libs.tslibs import conversion
from pandas._typing import ArrayLike, Dtype, DtypeObj, Scalar, Shape
from pandas.util._validators import validate_bool_kwarg
from pandas.core.dtypes.cast import (
astype_dt64_to_dt64tz,
astype_nansafe,
can_hold_element,
convert_scalar_for_putitemlike,
find_common_type,
infer_dtype_from,
infer_dtype_from_scalar,
maybe_downcast_numeric,
maybe_downcast_to_dtype,
maybe_promote,
maybe_upcast,
soft_convert_objects,
)
from pandas.core.dtypes.common import (
DT64NS_DTYPE,
TD64NS_DTYPE,
is_categorical_dtype,
is_datetime64_dtype,
is_datetime64tz_dtype,
is_dtype_equal,
is_extension_array_dtype,
is_integer,
is_list_like,
is_object_dtype,
is_re,
is_re_compilable,
is_sparse,
pandas_dtype,
)
from pandas.core.dtypes.dtypes import CategoricalDtype, ExtensionDtype
from pandas.core.dtypes.generic import ABCDataFrame, ABCIndex, ABCPandasArray, ABCSeries
from pandas.core.dtypes.missing import isna
import pandas.core.algorithms as algos
from pandas.core.array_algos.putmask import (
putmask_inplace,
putmask_smart,
putmask_without_repeat,
)
from pandas.core.array_algos.replace import compare_or_regex_search, replace_regex
from pandas.core.array_algos.transforms import shift
from pandas.core.arrays import (
Categorical,
DatetimeArray,
ExtensionArray,
PandasArray,
PandasDtype,
TimedeltaArray,
)
from pandas.core.base import PandasObject
import pandas.core.common as com
from pandas.core.construction import extract_array
from pandas.core.indexers import (
check_setitem_lengths,
is_empty_indexer,
is_exact_shape_match,
is_scalar_indexer,
)
import pandas.core.missing as missing
from pandas.core.nanops import nanpercentile
if TYPE_CHECKING:
from pandas import Index
from pandas.core.arrays._mixins import NDArrayBackedExtensionArray
class Block(PandasObject):
"""
Canonical n-dimensional unit of homogeneous dtype contained in a pandas
data structure
Index-ignorant; let the container take care of that
"""
values: Union[np.ndarray, ExtensionArray]
__slots__ = ["_mgr_locs", "values", "ndim"]
is_numeric = False
is_float = False
is_datetime = False
is_datetimetz = False
is_timedelta = False
is_bool = False
is_object = False
is_extension = False
_can_hold_na = False
_can_consolidate = True
_validate_ndim = True
@classmethod
def _simple_new(
cls, values: ArrayLike, placement: BlockPlacement, ndim: int
) -> Block:
"""
Fastpath constructor, does *no* validation
"""
obj = object.__new__(cls)
obj.ndim = ndim
obj.values = values
obj._mgr_locs = placement
return obj
def __init__(self, values, placement, ndim: int):
"""
Parameters
----------
values : np.ndarray or ExtensionArray
placement : BlockPlacement (or castable)
ndim : int
1 for SingleBlockManager/Series, 2 for BlockManager/DataFrame
"""
# TODO(EA2D): ndim will be unnecessary with 2D EAs
self.ndim = self._check_ndim(values, ndim)
self.mgr_locs = placement
self.values = self._maybe_coerce_values(values)
if self._validate_ndim and self.ndim and len(self.mgr_locs) != len(self.values):
raise ValueError(
f"Wrong number of items passed {len(self.values)}, "
f"placement implies {len(self.mgr_locs)}"
)
def _maybe_coerce_values(self, values):
"""
Ensure we have correctly-typed values.
Parameters
----------
values : np.ndarray, ExtensionArray, Index
Returns
-------
np.ndarray or ExtensionArray
"""
return values
def _check_ndim(self, values, ndim):
"""
ndim inference and validation.
Infers ndim from 'values' if not provided to __init__.
Validates that values.ndim and ndim are consistent if and only if
the class variable '_validate_ndim' is True.
Parameters
----------
values : array-like
ndim : int or None
Returns
-------
ndim : int
Raises
------
ValueError : the number of dimensions do not match
"""
if ndim is None:
ndim = values.ndim
if self._validate_ndim and values.ndim != ndim:
raise ValueError(
"Wrong number of dimensions. "
f"values.ndim != ndim [{values.ndim} != {ndim}]"
)
return ndim
@property
def _holder(self):
"""
The array-like that can hold the underlying values.
None for 'Block', overridden by subclasses that don't
use an ndarray.
"""
return None
@property
def _consolidate_key(self):
return self._can_consolidate, self.dtype.name
@property
def is_view(self) -> bool:
""" return a boolean if I am possibly a view """
values = self.values
values = cast(np.ndarray, values)
return values.base is not None
@property
def is_categorical(self) -> bool:
return self._holder is Categorical
@property
def is_datelike(self) -> bool:
""" return True if I am a non-datelike """
return self.is_datetime or self.is_timedelta
def external_values(self):
"""
The array that Series.values returns (public attribute).
This has some historical constraints, and is overridden in block
subclasses to return the correct array (e.g. period returns
object ndarray and datetimetz a datetime64[ns] ndarray instead of
proper extension array).
"""
return self.values
def internal_values(self):
"""
The array that Series._values returns (internal values).
"""
return self.values
def array_values(self) -> ExtensionArray:
"""
The array that Series.array returns. Always an ExtensionArray.
"""
return PandasArray(self.values)
def get_values(self, dtype: Optional[Dtype] = None):
"""
return an internal format, currently just the ndarray
this is often overridden to handle to_dense like operations
"""
if is_object_dtype(dtype):
return self.values.astype(object)
return self.values
def get_block_values_for_json(self) -> np.ndarray:
"""
This is used in the JSON C code.
"""
# TODO(EA2D): reshape will be unnecessary with 2D EAs
return np.asarray(self.values).reshape(self.shape)
@property
def fill_value(self):
return np.nan
@property
def mgr_locs(self):
return self._mgr_locs
@mgr_locs.setter
def mgr_locs(self, new_mgr_locs):
if not isinstance(new_mgr_locs, libinternals.BlockPlacement):
new_mgr_locs = libinternals.BlockPlacement(new_mgr_locs)
self._mgr_locs = new_mgr_locs
def make_block(self, values, placement=None) -> Block:
"""
Create a new block, with type inference propagate any values that are
not specified
"""
if placement is None:
placement = self.mgr_locs
if self.is_extension:
values = _block_shape(values, ndim=self.ndim)
return make_block(values, placement=placement, ndim=self.ndim)
def make_block_same_class(self, values, placement=None, ndim=None):
""" Wrap given values in a block of same type as self. """
if placement is None:
placement = self.mgr_locs
if ndim is None:
ndim = self.ndim
return type(self)(values, placement=placement, ndim=ndim)
def __repr__(self) -> str:
# don't want to print out all of the items here
name = type(self).__name__
if self.ndim == 1:
result = f"{name}: {len(self)} dtype: {self.dtype}"
else:
shape = " x ".join(str(s) for s in self.shape)
result = f"{name}: {self.mgr_locs.indexer}, {shape}, dtype: {self.dtype}"
return result
def __len__(self) -> int:
return len(self.values)
def __getstate__(self):
return self.mgr_locs.indexer, self.values
def __setstate__(self, state):
self.mgr_locs = libinternals.BlockPlacement(state[0])
self.values = state[1]
self.ndim = self.values.ndim
def _slice(self, slicer):
""" return a slice of my values """
return self.values[slicer]
def getitem_block(self, slicer, new_mgr_locs=None):
"""
Perform __getitem__-like, return result as block.
As of now, only supports slices that preserve dimensionality.
"""
if new_mgr_locs is None:
axis0_slicer = slicer[0] if isinstance(slicer, tuple) else slicer
new_mgr_locs = self.mgr_locs[axis0_slicer]
elif not isinstance(new_mgr_locs, BlockPlacement):
new_mgr_locs = BlockPlacement(new_mgr_locs)
new_values = self._slice(slicer)
if self._validate_ndim and new_values.ndim != self.ndim:
raise ValueError("Only same dim slicing is allowed")
return type(self)._simple_new(new_values, new_mgr_locs, self.ndim)
@property
def shape(self):
return self.values.shape
@property
def dtype(self):
return self.values.dtype
def iget(self, i):
return self.values[i]
def set_inplace(self, locs, values):
"""
Modify block values in-place with new item value.
Notes
-----
`set` never creates a new array or new Block, whereas `setitem` _may_
create a new array and always creates a new Block.
"""
self.values[locs] = values
def delete(self, loc) -> None:
"""
Delete given loc(-s) from block in-place.
"""
self.values = np.delete(self.values, loc, 0)
self.mgr_locs = self.mgr_locs.delete(loc)
def apply(self, func, **kwargs) -> List[Block]:
"""
apply the function to my values; return a block if we are not
one
"""
with np.errstate(all="ignore"):
result = func(self.values, **kwargs)
return self._split_op_result(result)
def reduce(self, func, ignore_failures: bool = False) -> List[Block]:
# We will apply the function and reshape the result into a single-row
# Block with the same mgr_locs; squeezing will be done at a higher level
assert self.ndim == 2
try:
result = func(self.values)
except (TypeError, NotImplementedError):
if ignore_failures:
return []
raise
if np.ndim(result) == 0:
# TODO(EA2D): special case not needed with 2D EAs
res_values = np.array([[result]])
else:
res_values = result.reshape(-1, 1)
nb = self.make_block(res_values)
return [nb]
def _split_op_result(self, result) -> List[Block]:
# See also: split_and_operate
if is_extension_array_dtype(result) and result.ndim > 1:
# TODO(EA2D): unnecessary with 2D EAs
# if we get a 2D ExtensionArray, we need to split it into 1D pieces
nbs = []
for i, loc in enumerate(self.mgr_locs):
vals = result[i]
block = self.make_block(values=vals, placement=[loc])
nbs.append(block)
return nbs
if not isinstance(result, Block):
result = self.make_block(result)
return [result]
def fillna(
self, value, limit=None, inplace: bool = False, downcast=None
) -> List[Block]:
"""
fillna on the block with the value. If we fail, then convert to
ObjectBlock and try again
"""
inplace = validate_bool_kwarg(inplace, "inplace")
mask = isna(self.values)
mask = _extract_bool_array(mask)
if limit is not None:
limit = libalgos.validate_limit(None, limit=limit)
mask[mask.cumsum(self.ndim - 1) > limit] = False
if not self._can_hold_na:
if inplace:
return [self]
else:
return [self.copy()]
if self._can_hold_element(value):
nb = self if inplace else self.copy()
putmask_inplace(nb.values, mask, value)
# TODO: should be nb._maybe_downcast?
return self._maybe_downcast([nb], downcast)
# we can't process the value, but nothing to do
if not mask.any():
return [self] if inplace else [self.copy()]
# operate column-by-column
def f(mask, val, idx):
block = self.coerce_to_target_dtype(value)
# slice out our block
if idx is not None:
# i.e. self.ndim == 2
block = block.getitem_block(slice(idx, idx + 1))
return block.fillna(value, limit=limit, inplace=inplace, downcast=None)
return self.split_and_operate(None, f, inplace)
def _split(self) -> List[Block]:
"""
Split a block into a list of single-column blocks.
"""
assert self.ndim == 2
new_blocks = []
for i, ref_loc in enumerate(self.mgr_locs):
vals = self.values[slice(i, i + 1)]
nb = self.make_block(vals, [ref_loc])
new_blocks.append(nb)
return new_blocks
def split_and_operate(
self, mask, f, inplace: bool, ignore_failures: bool = False
) -> List[Block]:
"""
split the block per-column, and apply the callable f
per-column, return a new block for each. Handle
masking which will not change a block unless needed.
Parameters
----------
mask : 2-d boolean mask
f : callable accepting (1d-mask, 1d values, indexer)
inplace : bool
ignore_failures : bool, default False
Returns
-------
list of blocks
"""
if mask is None:
mask = np.broadcast_to(True, shape=self.shape)
new_values = self.values
def make_a_block(nv, ref_loc):
if isinstance(nv, list):
assert len(nv) == 1, nv
assert isinstance(nv[0], Block)
block = nv[0]
else:
# Put back the dimension that was taken from it and make
# a block out of the result.
nv = _block_shape(nv, ndim=self.ndim)
block = self.make_block(values=nv, placement=ref_loc)
return block
# ndim == 1
if self.ndim == 1:
if mask.any():
nv = f(mask, new_values, None)
else:
nv = new_values if inplace else new_values.copy()
block = make_a_block(nv, self.mgr_locs)
return [block]
# ndim > 1
new_blocks = []
for i, ref_loc in enumerate(self.mgr_locs):
m = mask[i]
v = new_values[i]
# need a new block
if m.any() or m.size == 0:
# Apply our function; we may ignore_failures if this is a
# reduction that is dropping nuisance columns GH#37827
try:
nv = f(m, v, i)
except TypeError:
if ignore_failures:
continue
else:
raise
else:
nv = v if inplace else v.copy()
block = make_a_block(nv, [ref_loc])
new_blocks.append(block)
return new_blocks
def _maybe_downcast(self, blocks: List[Block], downcast=None) -> List[Block]:
# no need to downcast our float
# unless indicated
if downcast is None and (self.is_float or self.is_datelike):
return blocks
return extend_blocks([b.downcast(downcast) for b in blocks])
def downcast(self, dtypes=None) -> List[Block]:
""" try to downcast each item to the dict of dtypes if present """
# turn it off completely
if dtypes is False:
return [self]
values = self.values
if self.ndim == 1:
# try to cast all non-floats here
if dtypes is None:
dtypes = "infer"
nv = maybe_downcast_to_dtype(values, dtypes)
return [self.make_block(nv)]
# ndim > 1
if dtypes is None:
return [self]
if not (dtypes == "infer" or isinstance(dtypes, dict)):
raise ValueError(
"downcast must have a dictionary or 'infer' as its argument"
)
elif dtypes != "infer":
raise AssertionError("dtypes as dict is not supported yet")
# operate column-by-column
# this is expensive as it splits the blocks items-by-item
def f(mask, val, idx):
val = maybe_downcast_to_dtype(val, dtype="infer")
return val
return self.split_and_operate(None, f, False)
def astype(self, dtype, copy: bool = False, errors: str = "raise"):
"""
Coerce to the new dtype.
Parameters
----------
dtype : str, dtype convertible
copy : bool, default False
copy if indicated
errors : str, {'raise', 'ignore'}, default 'raise'
- ``raise`` : allow exceptions to be raised
- ``ignore`` : suppress exceptions. On error return original object
Returns
-------
Block
"""
errors_legal_values = ("raise", "ignore")
if errors not in errors_legal_values:
invalid_arg = (
"Expected value of kwarg 'errors' to be one of "
f"{list(errors_legal_values)}. Supplied value is '{errors}'"
)
raise ValueError(invalid_arg)
if inspect.isclass(dtype) and issubclass(dtype, ExtensionDtype):
msg = (
f"Expected an instance of {dtype.__name__}, "
"but got the class instead. Try instantiating 'dtype'."
)
raise TypeError(msg)
dtype = pandas_dtype(dtype)
try:
new_values = self._astype(dtype, copy=copy)
except (ValueError, TypeError):
# e.g. astype_nansafe can fail on object-dtype of strings
# trying to convert to float
if errors == "ignore":
new_values = self.values
else:
raise
newb = self.make_block(new_values)
if newb.is_numeric and self.is_numeric:
if newb.shape != self.shape:
raise TypeError(
f"cannot set astype for copy = [{copy}] for dtype "
f"({self.dtype.name} [{self.shape}]) to different shape "
f"({newb.dtype.name} [{newb.shape}])"
)
return newb
def _astype(self, dtype: DtypeObj, copy: bool) -> ArrayLike:
values = self.values
if is_datetime64tz_dtype(dtype) and is_datetime64_dtype(values.dtype):
return astype_dt64_to_dt64tz(values, dtype, copy, via_utc=True)
if is_dtype_equal(values.dtype, dtype):
if copy:
return values.copy()
return values
if isinstance(values, ExtensionArray):
values = values.astype(dtype, copy=copy)
else:
values = astype_nansafe(values, dtype, copy=copy)
return values
def convert(
self,
copy: bool = True,
datetime: bool = True,
numeric: bool = True,
timedelta: bool = True,
) -> List[Block]:
"""
attempt to coerce any object types to better types return a copy
of the block (if copy = True) by definition we are not an ObjectBlock
here!
"""
return [self.copy()] if copy else [self]
def _can_hold_element(self, element: Any) -> bool:
""" require the same dtype as ourselves """
raise NotImplementedError("Implemented on subclasses")
def should_store(self, value: ArrayLike) -> bool:
"""
Should we set self.values[indexer] = value inplace or do we need to cast?
Parameters
----------
value : np.ndarray or ExtensionArray
Returns
-------
bool
"""
return is_dtype_equal(value.dtype, self.dtype)
def to_native_types(self, na_rep="nan", quoting=None, **kwargs):
""" convert to our native types format """
values = self.values
mask = isna(values)
itemsize = writers.word_len(na_rep)
if not self.is_object and not quoting and itemsize:
values = values.astype(str)
if values.dtype.itemsize / np.dtype("U1").itemsize < itemsize:
# enlarge for the na_rep
values = values.astype(f"<U{itemsize}")
else:
values = np.array(values, dtype="object")
values[mask] = na_rep
return self.make_block(values)
# block actions #
def copy(self, deep: bool = True):
""" copy constructor """
values = self.values
if deep:
values = values.copy()
return self.make_block_same_class(values, ndim=self.ndim)
def replace(
self,
to_replace,
value,
inplace: bool = False,
regex: bool = False,
) -> List[Block]:
"""
replace the to_replace value with value, possible to create new
blocks here this is just a call to putmask. regex is not used here.
It is used in ObjectBlocks. It is here for API compatibility.
"""
inplace = validate_bool_kwarg(inplace, "inplace")
original_to_replace = to_replace
if not self._can_hold_element(to_replace):
# We cannot hold `to_replace`, so we know immediately that
# replacing it is a no-op.
# Note: If to_replace were a list, NDFrame.replace would call
# replace_list instead of replace.
return [self] if inplace else [self.copy()]
values = self.values
mask = missing.mask_missing(values, to_replace)
if not mask.any():
# Note: we get here with test_replace_extension_other incorrectly
# bc _can_hold_element is incorrect.
return [self] if inplace else [self.copy()]
if not self._can_hold_element(value):
blk = self.astype(object)
return blk.replace(
to_replace=original_to_replace,
value=value,
inplace=True,
regex=regex,
)
blk = self if inplace else self.copy()
putmask_inplace(blk.values, mask, value)
blocks = blk.convert(numeric=False, copy=not inplace)
return blocks
def _replace_regex(
self,
to_replace,
value,
inplace: bool = False,
convert: bool = True,
mask=None,
) -> List[Block]:
"""
Replace elements by the given value.
Parameters
----------
to_replace : object or pattern
Scalar to replace or regular expression to match.
value : object
Replacement object.
inplace : bool, default False
Perform inplace modification.
convert : bool, default True
If true, try to coerce any object types to better types.
mask : array-like of bool, optional
True indicate corresponding element is ignored.
Returns
-------
List[Block]
"""
if not self._can_hold_element(to_replace):
# i.e. only ObjectBlock, but could in principle include a
# String ExtensionBlock
return [self] if inplace else [self.copy()]
rx = re.compile(to_replace)
new_values = self.values if inplace else self.values.copy()
replace_regex(new_values, rx, value, mask)
block = self.make_block(new_values)
if convert:
nbs = block.convert(numeric=False)
else:
nbs = [block]
return nbs
def _replace_list(
self,
src_list: List[Any],
dest_list: List[Any],
inplace: bool = False,
regex: bool = False,
) -> List[Block]:
"""
See BlockManager._replace_list docstring.
"""
# Exclude anything that we know we won't contain
pairs = [
(x, y) for x, y in zip(src_list, dest_list) if self._can_hold_element(x)
]
if not len(pairs):
# shortcut, nothing to replace
return [self] if inplace else [self.copy()]
src_len = len(pairs) - 1
def comp(s: Scalar, mask: np.ndarray, regex: bool = False) -> np.ndarray:
"""
Generate a bool array by perform an equality check, or perform
an element-wise regular expression matching
"""
if isna(s):
return ~mask
return compare_or_regex_search(self.values, s, regex, mask)
if self.is_object:
# Calculate the mask once, prior to the call of comp
# in order to avoid repeating the same computations
mask = ~isna(self.values)
masks = [comp(s[0], mask, regex) for s in pairs]
else:
# GH#38086 faster if we know we dont need to check for regex
masks = [missing.mask_missing(self.values, s[0]) for s in pairs]
masks = [_extract_bool_array(x) for x in masks]
rb = [self if inplace else self.copy()]
for i, (src, dest) in enumerate(pairs):
new_rb: List["Block"] = []
for blk in rb:
m = masks[i]
convert = i == src_len # only convert once at the end
result = blk._replace_coerce(
to_replace=src,
value=dest,
mask=m,
inplace=inplace,
regex=regex,
)
if convert and blk.is_object:
result = extend_blocks(
[b.convert(numeric=False, copy=True) for b in result]
)
new_rb.extend(result)
rb = new_rb
return rb
def setitem(self, indexer, value):
"""
Attempt self.values[indexer] = value, possibly creating a new array.
Parameters
----------
indexer : tuple, list-like, array-like, slice
The subset of self.values to set
value : object
The value being set
Returns
-------
Block
Notes
-----
`indexer` is a direct slice/positional indexer. `value` must
be a compatible shape.
"""
transpose = self.ndim == 2
if isinstance(indexer, np.ndarray) and indexer.ndim > self.ndim:
raise ValueError(f"Cannot set values with ndim > {self.ndim}")
# coerce None values, if appropriate
if value is None:
if self.is_numeric:
value = np.nan
# coerce if block dtype can store value
values = self.values
if not self._can_hold_element(value):
# current dtype cannot store value, coerce to common dtype
# TODO: can we just use coerce_to_target_dtype for all this
if hasattr(value, "dtype"):
dtype = value.dtype
elif lib.is_scalar(value) and not isna(value):
dtype, _ = infer_dtype_from_scalar(value, pandas_dtype=True)
else:
# e.g. we are bool dtype and value is nan
# TODO: watch out for case with listlike value and scalar/empty indexer
dtype, _ = maybe_promote(np.array(value).dtype)
return self.astype(dtype).setitem(indexer, value)
dtype = find_common_type([values.dtype, dtype])
assert not is_dtype_equal(self.dtype, dtype)
# otherwise should have _can_hold_element
return self.astype(dtype).setitem(indexer, value)
if self.dtype.kind in ["m", "M"]:
arr = self.array_values().T
arr[indexer] = value
return self
# value must be storable at this moment
if is_extension_array_dtype(getattr(value, "dtype", None)):
# We need to be careful not to allow through strings that
# can be parsed to EADtypes
is_ea_value = True
arr_value = value
else:
is_ea_value = False
arr_value = np.array(value)
if transpose:
values = values.T
# length checking
check_setitem_lengths(indexer, value, values)
exact_match = is_exact_shape_match(values, arr_value)
if is_empty_indexer(indexer, arr_value):
# GH#8669 empty indexers
pass
elif is_scalar_indexer(indexer, self.ndim):
# setting a single element for each dim and with a rhs that could
# be e.g. a list; see GH#6043
values[indexer] = value
elif exact_match and is_categorical_dtype(arr_value.dtype):
# GH25495 - If the current dtype is not categorical,
# we need to create a new categorical block
values[indexer] = value
if values.ndim == 2:
# TODO(EA2D): special case not needed with 2D EAs
if values.shape[-1] != 1:
# shouldn't get here (at least until 2D EAs)
raise NotImplementedError
values = values[:, 0]
return self.make_block(Categorical(values, dtype=arr_value.dtype))
elif exact_match and is_ea_value:
# GH#32395 if we're going to replace the values entirely, just
# substitute in the new array
return self.make_block(arr_value)
# if we are an exact match (ex-broadcasting),
# then use the resultant dtype
elif exact_match:
# We are setting _all_ of the array's values, so can cast to new dtype
values[indexer] = value
values = values.astype(arr_value.dtype, copy=False)
elif is_ea_value:
# GH#38952
if values.ndim == 1:
values[indexer] = value
else:
# TODO(EA2D): special case not needed with 2D EA
values[indexer] = value.to_numpy(values.dtype).reshape(-1, 1)
# set
else:
values[indexer] = value
if transpose:
values = values.T
block = self.make_block(values)
return block
def putmask(self, mask, new) -> List[Block]:
"""
putmask the data to the block; it is possible that we may create a
new dtype of block
Return the resulting block(s).
Parameters
----------
mask : np.ndarray[bool], SparseArray[bool], or BooleanArray
new : a ndarray/object
Returns
-------
List[Block]
"""
transpose = self.ndim == 2
mask = _extract_bool_array(mask)
assert not isinstance(new, (ABCIndex, ABCSeries, ABCDataFrame))
new_values = self.values # delay copy if possible.
# if we are passed a scalar None, convert it here
if not is_list_like(new) and isna(new) and not self.is_object:
# FIXME: make sure we have compatible NA
new = self.fill_value
if self._can_hold_element(new):
if self.dtype.kind in ["m", "M"]:
arr = self.array_values()
arr = cast("NDArrayBackedExtensionArray", arr)
if transpose:
arr = arr.T
arr.putmask(mask, new)
return [self]
if transpose:
new_values = new_values.T
putmask_without_repeat(new_values, mask, new)
return [self]
elif not mask.any():
return [self]
else:
# may need to upcast
if transpose:
mask = mask.T
if isinstance(new, np.ndarray):
new = new.T
# operate column-by-column
def f(mask, val, idx):
if idx is None:
# ndim==1 case.
n = new
else:
if isinstance(new, np.ndarray):
n = np.squeeze(new[idx % new.shape[0]])
else:
n = np.array(new)
# type of the new block
dtype, _ = maybe_promote(n.dtype)
# we need to explicitly astype here to make a copy
n = n.astype(dtype)
nv = putmask_smart(val, mask, n)
return nv
new_blocks = self.split_and_operate(mask, f, True)
return new_blocks
def coerce_to_target_dtype(self, other):
"""
coerce the current block to a dtype compat for other
we will return a block, possibly object, and not raise
we can also safely try to coerce to the same dtype
and will receive the same block
"""
# if we cannot then coerce to object
dtype, _ = infer_dtype_from(other, pandas_dtype=True)
new_dtype = find_common_type([self.dtype, dtype])
return self.astype(new_dtype, copy=False)
def interpolate(
self,
method: str = "pad",
axis: int = 0,
index: Optional[Index] = None,
inplace: bool = False,
limit: Optional[int] = None,
limit_direction: str = "forward",
limit_area: Optional[str] = None,
fill_value: Optional[Any] = None,
coerce: bool = False,
downcast: Optional[str] = None,
**kwargs,
):
inplace = validate_bool_kwarg(inplace, "inplace")
if not self._can_hold_na:
# If there are no NAs, then interpolate is a no-op
return self if inplace else self.copy()
# a fill na type method
try:
m = missing.clean_fill_method(method)
except ValueError:
m = None
if m is not None:
if fill_value is not None:
# similar to validate_fillna_kwargs
raise ValueError("Cannot pass both fill_value and method")
return self._interpolate_with_fill(
method=m,
axis=axis,
inplace=inplace,
limit=limit,
limit_area=limit_area,
downcast=downcast,
)
# validate the interp method
m = missing.clean_interp_method(method, **kwargs)
assert index is not None # for mypy
return self._interpolate(
method=m,
index=index,
axis=axis,
limit=limit,
limit_direction=limit_direction,
limit_area=limit_area,
fill_value=fill_value,
inplace=inplace,
downcast=downcast,
**kwargs,
)
def _interpolate_with_fill(
self,
method: str = "pad",
axis: int = 0,
inplace: bool = False,
limit: Optional[int] = None,
limit_area: Optional[str] = None,
downcast: Optional[str] = None,
) -> List[Block]:
""" fillna but using the interpolate machinery """
inplace = validate_bool_kwarg(inplace, "inplace")
assert self._can_hold_na # checked by caller
values = self.values if inplace else self.values.copy()
values = missing.interpolate_2d(
values,
method=method,
axis=axis,
limit=limit,
limit_area=limit_area,
)
blocks = [self.make_block_same_class(values, ndim=self.ndim)]
return self._maybe_downcast(blocks, downcast)
def _interpolate(
self,
method: str,
index: Index,
fill_value: Optional[Any] = None,
axis: int = 0,
limit: Optional[int] = None,
limit_direction: str = "forward",
limit_area: Optional[str] = None,
inplace: bool = False,
downcast: Optional[str] = None,
**kwargs,
) -> List[Block]:
""" interpolate using scipy wrappers """
inplace = validate_bool_kwarg(inplace, "inplace")
data = self.values if inplace else self.values.copy()
# only deal with floats
if not self.is_float:
if self.dtype.kind not in ["i", "u"]:
return [self]
data = data.astype(np.float64)
if fill_value is None:
fill_value = self.fill_value
if method in ("krogh", "piecewise_polynomial", "pchip"):
if not index.is_monotonic:
raise ValueError(
f"{method} interpolation requires that the index be monotonic."
)
# process 1-d slices in the axis direction
def func(yvalues: np.ndarray) -> np.ndarray:
# process a 1-d slice, returning it
# should the axis argument be handled below in apply_along_axis?
# i.e. not an arg to missing.interpolate_1d
return missing.interpolate_1d(
xvalues=index,
yvalues=yvalues,
method=method,
limit=limit,
limit_direction=limit_direction,
limit_area=limit_area,
fill_value=fill_value,
bounds_error=False,
**kwargs,
)
# interp each column independently
interp_values = np.apply_along_axis(func, axis, data)
blocks = [self.make_block_same_class(interp_values)]
return self._maybe_downcast(blocks, downcast)
def take_nd(self, indexer, axis: int, new_mgr_locs=None, fill_value=lib.no_default):
"""
Take values according to indexer and return them as a block.bb
"""
# algos.take_nd dispatches for DatetimeTZBlock, CategoricalBlock
# so need to preserve types
# sparse is treated like an ndarray, but needs .get_values() shaping
values = self.values
if fill_value is lib.no_default:
fill_value = self.fill_value
allow_fill = False
else:
allow_fill = True
new_values = algos.take_nd(
values, indexer, axis=axis, allow_fill=allow_fill, fill_value=fill_value
)
# Called from three places in managers, all of which satisfy
# this assertion
assert not (axis == 0 and new_mgr_locs is None)
if new_mgr_locs is None:
new_mgr_locs = self.mgr_locs
if not is_dtype_equal(new_values.dtype, self.dtype):
return self.make_block(new_values, new_mgr_locs)
else:
return self.make_block_same_class(new_values, new_mgr_locs)
def diff(self, n: int, axis: int = 1) -> List[Block]:
""" return block for the diff of the values """
new_values = algos.diff(self.values, n, axis=axis, stacklevel=7)
return [self.make_block(values=new_values)]
def shift(self, periods: int, axis: int = 0, fill_value=None):
""" shift the block by periods, possibly upcast """
# convert integer to float if necessary. need to do a lot more than
# that, handle boolean etc also
new_values, fill_value = maybe_upcast(self.values, fill_value)
new_values = shift(new_values, periods, axis, fill_value)
return [self.make_block(new_values)]
def where(self, other, cond, errors="raise", axis: int = 0) -> List[Block]:
"""
evaluate the block; return result block(s) from the result
Parameters
----------
other : a ndarray/object
cond : np.ndarray[bool], SparseArray[bool], or BooleanArray
errors : str, {'raise', 'ignore'}, default 'raise'
- ``raise`` : allow exceptions to be raised
- ``ignore`` : suppress exceptions. On error return original object
axis : int, default 0
Returns
-------
List[Block]
"""
import pandas.core.computation.expressions as expressions
assert not isinstance(other, (ABCIndex, ABCSeries, ABCDataFrame))
assert errors in ["raise", "ignore"]
transpose = self.ndim == 2
values = self.values
orig_other = other
if transpose:
values = values.T
cond = _extract_bool_array(cond)
if cond.ravel("K").all():
result = values
else:
# see if we can operate on the entire block, or need item-by-item
# or if we are a single block (ndim == 1)
if (
(self.dtype.kind in ["b", "i", "u"])
and lib.is_float(other)
and np.isnan(other)
):
# GH#3733 special case to avoid object-dtype casting
# and go through numexpr path instead.
# In integer case, np.where will cast to floats
pass
elif not self._can_hold_element(other):
# we cannot coerce, return a compat dtype
# we are explicitly ignoring errors
block = self.coerce_to_target_dtype(other)
blocks = block.where(orig_other, cond, errors=errors, axis=axis)
return self._maybe_downcast(blocks, "infer")
if not (
(self.dtype.kind in ["b", "i", "u"])
and lib.is_float(other)
and np.isnan(other)
):
# convert datetime to datetime64, timedelta to timedelta64
other = convert_scalar_for_putitemlike(other, values.dtype)
# By the time we get here, we should have all Series/Index
# args extracted to ndarray
result = expressions.where(cond, values, other)
if self._can_hold_na or self.ndim == 1:
if transpose:
result = result.T
return [self.make_block(result)]
# might need to separate out blocks
axis = cond.ndim - 1
cond = cond.swapaxes(axis, 0)
mask = np.array([cond[i].all() for i in range(cond.shape[0])], dtype=bool)
result_blocks: List[Block] = []
for m in [mask, ~mask]:
if m.any():
result = cast(np.ndarray, result) # EABlock overrides where
taken = result.take(m.nonzero()[0], axis=axis)
r = maybe_downcast_numeric(taken, self.dtype)
nb = self.make_block(r.T, placement=self.mgr_locs[m])
result_blocks.append(nb)
return result_blocks
def _unstack(self, unstacker, fill_value, new_placement):
"""
Return a list of unstacked blocks of self
Parameters
----------
unstacker : reshape._Unstacker
fill_value : int
Only used in ExtensionBlock._unstack
Returns
-------
blocks : list of Block
New blocks of unstacked values.
mask : array_like of bool
The mask of columns of `blocks` we should keep.
"""
new_values, mask = unstacker.get_new_values(
self.values.T, fill_value=fill_value
)
mask = mask.any(0)
# TODO: in all tests we have mask.all(); can we rely on that?
new_values = new_values.T[mask]
new_placement = new_placement[mask]
blocks = [make_block(new_values, placement=new_placement)]
return blocks, mask
def quantile(self, qs, interpolation="linear", axis: int = 0):
"""
compute the quantiles of the
Parameters
----------
qs: a scalar or list of the quantiles to be computed
interpolation: type of interpolation, default 'linear'
axis: axis to compute, default 0
Returns
-------
Block
"""
# We should always have ndim == 2 because Series dispatches to DataFrame
assert self.ndim == 2
values = self.get_values()
is_empty = values.shape[axis] == 0
orig_scalar = not is_list_like(qs)
if orig_scalar:
# make list-like, unpack later
qs = [qs]
if is_empty:
# create the array of na_values
# 2d len(values) * len(qs)
result = np.repeat(
np.array([self.fill_value] * len(qs)), len(values)
).reshape(len(values), len(qs))
else:
# asarray needed for Sparse, see GH#24600
mask = np.asarray(isna(values))
result = nanpercentile(
values,
np.array(qs) * 100,
axis=axis,
na_value=self.fill_value,
mask=mask,
ndim=values.ndim,
interpolation=interpolation,
)
result = np.array(result, copy=False)
result = result.T
if orig_scalar and not lib.is_scalar(result):
# result could be scalar in case with is_empty and self.ndim == 1
assert result.shape[-1] == 1, result.shape
result = result[..., 0]
result = lib.item_from_zerodim(result)
ndim = np.ndim(result)
return make_block(result, placement=np.arange(len(result)), ndim=ndim)
def _replace_coerce(
self,
to_replace,
value,
mask: np.ndarray,
inplace: bool = True,
regex: bool = False,
) -> List[Block]:
"""
Replace value corresponding to the given boolean array with another
value.
Parameters
----------
to_replace : object or pattern
Scalar to replace or regular expression to match.
value : object
Replacement object.
mask : np.ndarray[bool]
True indicate corresponding element is ignored.
inplace : bool, default True
Perform inplace modification.
regex : bool, default False
If true, perform regular expression substitution.
Returns
-------
List[Block]
"""
if mask.any():
if not regex:
nb = self.coerce_to_target_dtype(value)
if nb is self and not inplace:
nb = nb.copy()
putmask_inplace(nb.values, mask, value)
return [nb]
else:
regex = _should_use_regex(regex, to_replace)
if regex:
return self._replace_regex(
to_replace,
value,
inplace=inplace,
convert=False,
mask=mask,
)
return self.replace(to_replace, value, inplace=inplace, regex=False)
return [self]
class ExtensionBlock(Block):
"""
Block for holding extension types.
Notes
-----
This holds all 3rd-party extension array types. It's also the immediate
parent class for our internal extension types' blocks, CategoricalBlock.
ExtensionArrays are limited to 1-D.
"""
_can_consolidate = False
_validate_ndim = False
is_extension = True
values: ExtensionArray
def __init__(self, values, placement, ndim: int):
"""
Initialize a non-consolidatable block.
'ndim' may be inferred from 'placement'.
This will call continue to call __init__ for the other base
classes mixed in with this Mixin.
"""
# Placement must be converted to BlockPlacement so that we can check
# its length
if not isinstance(placement, libinternals.BlockPlacement):
placement = libinternals.BlockPlacement(placement)
# Maybe infer ndim from placement
if ndim is None:
if len(placement) != 1:
ndim = 1
else:
ndim = 2
super().__init__(values, placement, ndim=ndim)
if self.ndim == 2 and len(self.mgr_locs) != 1:
# TODO(EA2D): check unnecessary with 2D EAs
raise AssertionError("block.size != values.size")
@property
def shape(self):
# TODO(EA2D): override unnecessary with 2D EAs
if self.ndim == 1:
return (len(self.values),)
return len(self.mgr_locs), len(self.values)
def iget(self, col):
if self.ndim == 2 and isinstance(col, tuple):
# TODO(EA2D): unnecessary with 2D EAs
col, loc = col
if not com.is_null_slice(col) and col != 0:
raise IndexError(f"{self} only contains one item")
elif isinstance(col, slice):
if col != slice(None):
raise NotImplementedError(col)
return self.values[[loc]]
return self.values[loc]
else:
if col != 0:
raise IndexError(f"{self} only contains one item")
return self.values
def set_inplace(self, locs, values):
# NB: This is a misnomer, is supposed to be inplace but is not,
# see GH#33457
assert locs.tolist() == [0]
self.values = values
def putmask(self, mask, new) -> List[Block]:
"""
See Block.putmask.__doc__
"""
mask = _extract_bool_array(mask)
new_values = self.values
if isinstance(new, (np.ndarray, ExtensionArray)) and len(new) == len(mask):
new = new[mask]
mask = safe_reshape(mask, new_values.shape)
new_values[mask] = new
return [self.make_block(values=new_values)]
def _maybe_coerce_values(self, values):
"""
Unbox to an extension array.
This will unbox an ExtensionArray stored in an Index or Series.
ExtensionArrays pass through. No dtype coercion is done.
Parameters
----------
values : Index, Series, ExtensionArray
Returns
-------
ExtensionArray
"""
return extract_array(values)
@property
def _holder(self):
# For extension blocks, the holder is values-dependent.
return type(self.values)
@property
def fill_value(self):
# Used in reindex_indexer
return self.values.dtype.na_value
@property
def _can_hold_na(self):
# The default ExtensionArray._can_hold_na is True
return self._holder._can_hold_na
@property
def is_view(self) -> bool:
"""Extension arrays are never treated as views."""
return False
@property
def is_numeric(self):
return self.values.dtype._is_numeric
def setitem(self, indexer, value):
"""
Attempt self.values[indexer] = value, possibly creating a new array.
This differs from Block.setitem by not allowing setitem to change
the dtype of the Block.
Parameters
----------
indexer : tuple, list-like, array-like, slice
The subset of self.values to set
value : object
The value being set
Returns
-------
Block
Notes
-----
`indexer` is a direct slice/positional indexer. `value` must
be a compatible shape.
"""
if not self._can_hold_element(value):
# This is only relevant for DatetimeTZBlock, which has a
# non-trivial `_can_hold_element`.
# https://github.com/pandas-dev/pandas/issues/24020
# Need a dedicated setitem until GH#24020 (type promotion in setitem
# for extension arrays) is designed and implemented.
return self.astype(object).setitem(indexer, value)
if isinstance(indexer, tuple):
# TODO(EA2D): not needed with 2D EAs
# we are always 1-D
indexer = indexer[0]
check_setitem_lengths(indexer, value, self.values)
self.values[indexer] = value
return self
def get_values(self, dtype: Optional[Dtype] = None):
# ExtensionArrays must be iterable, so this works.
# TODO(EA2D): reshape not needed with 2D EAs
return np.asarray(self.values).reshape(self.shape)
def array_values(self) -> ExtensionArray:
return self.values
def to_native_types(self, na_rep="nan", quoting=None, **kwargs):
"""override to use ExtensionArray astype for the conversion"""
values = self.values
mask = isna(values)
values = np.asarray(values.astype(object))
values[mask] = na_rep
# TODO(EA2D): reshape not needed with 2D EAs
# we are expected to return a 2-d ndarray
return self.make_block(values)
def take_nd(
self, indexer, axis: int = 0, new_mgr_locs=None, fill_value=lib.no_default
):
"""
Take values according to indexer and return them as a block.
"""
if fill_value is lib.no_default:
fill_value = None
# TODO(EA2D): special case not needed with 2D EAs
# axis doesn't matter; we are really a single-dim object
# but are passed the axis depending on the calling routing
# if its REALLY axis 0, then this will be a reindex and not a take
new_values = self.values.take(indexer, fill_value=fill_value, allow_fill=True)
# Called from three places in managers, all of which satisfy
# this assertion
assert not (self.ndim == 1 and new_mgr_locs is None)
if new_mgr_locs is None:
new_mgr_locs = self.mgr_locs
return self.make_block_same_class(new_values, new_mgr_locs)
def _can_hold_element(self, element: Any) -> bool:
# TODO: We may need to think about pushing this onto the array.
# We're doing the same as CategoricalBlock here.
return True
def _slice(self, slicer):
"""
Return a slice of my values.
Parameters
----------
slicer : slice, ndarray[int], or a tuple of these
Valid (non-reducing) indexer for self.values.
Returns
-------
np.ndarray or ExtensionArray
"""
# return same dims as we currently have
if not isinstance(slicer, tuple) and self.ndim == 2:
# reached via getitem_block via _slice_take_blocks_ax0
# TODO(EA2D): won't be necessary with 2D EAs
slicer = (slicer, slice(None))
if isinstance(slicer, tuple) and len(slicer) == 2:
first = slicer[0]
if not isinstance(first, slice):
raise AssertionError(
"invalid slicing for a 1-ndim ExtensionArray", first
)
# GH#32959 only full-slicers along fake-dim0 are valid
# TODO(EA2D): won't be necessary with 2D EAs
new_locs = self.mgr_locs[first]
if len(new_locs):
# effectively slice(None)
slicer = slicer[1]
else:
raise AssertionError(
"invalid slicing for a 1-ndim ExtensionArray", slicer
)
return self.values[slicer]
def fillna(self, value, limit=None, inplace=False, downcast=None):
values = self.values if inplace else self.values.copy()
values = values.fillna(value=value, limit=limit)
return [
self.make_block_same_class(
values=values, placement=self.mgr_locs, ndim=self.ndim
)
]
def interpolate(
self, method="pad", axis=0, inplace=False, limit=None, fill_value=None, **kwargs
):
values = self.values if inplace else self.values.copy()
return self.make_block_same_class(
values=values.fillna(value=fill_value, method=method, limit=limit),
placement=self.mgr_locs,
)
def diff(self, n: int, axis: int = 1) -> List[Block]:
if axis == 0 and n != 0:
# n==0 case will be a no-op so let is fall through
# Since we only have one column, the result will be all-NA.
# Create this result by shifting along axis=0 past the length of
# our values.
return super().diff(len(self.values), axis=0)
if axis == 1:
# TODO(EA2D): unnecessary with 2D EAs
# we are by definition 1D.
axis = 0
return super().diff(n, axis)
def shift(
self, periods: int, axis: int = 0, fill_value: Any = None
) -> List[ExtensionBlock]:
"""
Shift the block by `periods`.
Dispatches to underlying ExtensionArray and re-boxes in an
ExtensionBlock.
"""
return [
self.make_block_same_class(
self.values.shift(periods=periods, fill_value=fill_value),
placement=self.mgr_locs,
ndim=self.ndim,
)
]
def where(self, other, cond, errors="raise", axis: int = 0) -> List[Block]:
cond = _extract_bool_array(cond)
assert not isinstance(other, (ABCIndex, ABCSeries, ABCDataFrame))
if isinstance(other, np.ndarray) and other.ndim == 2:
# TODO(EA2D): unnecessary with 2D EAs
assert other.shape[1] == 1
other = other[:, 0]
if isinstance(cond, np.ndarray) and cond.ndim == 2:
# TODO(EA2D): unnecessary with 2D EAs
assert cond.shape[1] == 1
cond = cond[:, 0]
if lib.is_scalar(other) and isna(other):
# The default `other` for Series / Frame is np.nan
# we want to replace that with the correct NA value
# for the type
other = self.dtype.na_value
if is_sparse(self.values):
# TODO(SparseArray.__setitem__): remove this if condition
# We need to re-infer the type of the data after doing the
# where, for cases where the subtypes don't match
dtype = None
else:
dtype = self.dtype
result = self.values.copy()
icond = ~cond
if lib.is_scalar(other):
set_other = other
else:
set_other = other[icond]
try:
result[icond] = set_other
except (NotImplementedError, TypeError):
# NotImplementedError for class not implementing `__setitem__`
# TypeError for SparseArray, which implements just to raise
# a TypeError
result = self._holder._from_sequence(
np.where(cond, self.values, other), dtype=dtype
)
return [self.make_block_same_class(result, placement=self.mgr_locs)]
def _unstack(self, unstacker, fill_value, new_placement):
# ExtensionArray-safe unstack.
# We override ObjectBlock._unstack, which unstacks directly on the
# values of the array. For EA-backed blocks, this would require
# converting to a 2-D ndarray of objects.
# Instead, we unstack an ndarray of integer positions, followed by
# a `take` on the actual values.
n_rows = self.shape[-1]
dummy_arr = np.arange(n_rows)
new_values, mask = unstacker.get_new_values(dummy_arr, fill_value=-1)
mask = mask.any(0)
# TODO: in all tests we have mask.all(); can we rely on that?
blocks = [
self.make_block_same_class(
self.values.take(indices, allow_fill=True, fill_value=fill_value),
[place],
)
for indices, place in zip(new_values.T, new_placement)
]
return blocks, mask
class HybridMixin:
"""
Mixin for Blocks backed (maybe indirectly) by ExtensionArrays.
"""
array_values: Callable
def _can_hold_element(self, element: Any) -> bool:
values = self.array_values()
try:
values._validate_setitem_value(element)
return True
except (ValueError, TypeError):
return False
class ObjectValuesExtensionBlock(HybridMixin, ExtensionBlock):
"""
Block providing backwards-compatibility for `.values`.
Used by PeriodArray and IntervalArray to ensure that
Series[T].values is an ndarray of objects.
"""
def external_values(self):
return self.values.astype(object)
class NumericBlock(Block):
__slots__ = ()
is_numeric = True
def _can_hold_element(self, element: Any) -> bool:
return can_hold_element(self.dtype, element)
@property
def _can_hold_na(self):
return self.dtype.kind not in ["b", "i", "u"]
@property
def is_bool(self):
return self.dtype.kind == "b"
class FloatBlock(NumericBlock):
__slots__ = ()
is_float = True
def to_native_types(
self, na_rep="", float_format=None, decimal=".", quoting=None, **kwargs
):
""" convert to our native types format """
values = self.values
# see gh-13418: no special formatting is desired at the
# output (important for appropriate 'quoting' behaviour),
# so do not pass it through the FloatArrayFormatter
if float_format is None and decimal == ".":
mask = isna(values)
if not quoting:
values = values.astype(str)
else:
values = np.array(values, dtype="object")
values[mask] = na_rep
return self.make_block(values)
from pandas.io.formats.format import FloatArrayFormatter
formatter = FloatArrayFormatter(
values,
na_rep=na_rep,
float_format=float_format,
decimal=decimal,
quoting=quoting,
fixed_width=False,
)
res = formatter.get_result_as_array()
return self.make_block(res)
class DatetimeLikeBlockMixin(HybridMixin, Block):
"""Mixin class for DatetimeBlock, DatetimeTZBlock, and TimedeltaBlock."""
@property
def _holder(self):
return DatetimeArray
@property
def fill_value(self):
return np.datetime64("NaT", "ns")
def get_values(self, dtype: Optional[Dtype] = None):
"""
return object dtype as boxed values, such as Timestamps/Timedelta
"""
if is_object_dtype(dtype):
# DTA/TDA constructor and astype can handle 2D
return self._holder(self.values).astype(object)
return self.values
def internal_values(self):
# Override to return DatetimeArray and TimedeltaArray
return self.array_values()
def array_values(self):
return self._holder._simple_new(self.values)
def iget(self, key):
# GH#31649 we need to wrap scalars in Timestamp/Timedelta
# TODO(EA2D): this can be removed if we ever have 2D EA
return self.array_values().reshape(self.shape)[key]
def diff(self, n: int, axis: int = 0) -> List[Block]:
"""
1st discrete difference.
Parameters
----------
n : int
Number of periods to diff.
axis : int, default 0
Axis to diff upon.
Returns
-------
A list with a new TimeDeltaBlock.
Notes
-----
The arguments here are mimicking shift so they are called correctly
by apply.
"""
# TODO(EA2D): reshape not necessary with 2D EAs
values = self.array_values().reshape(self.shape)
new_values = values - values.shift(n, axis=axis)
return [
TimeDeltaBlock(new_values, placement=self.mgr_locs.indexer, ndim=self.ndim)
]
def shift(self, periods, axis=0, fill_value=None):
# TODO(EA2D) this is unnecessary if these blocks are backed by 2D EAs
values = self.array_values()
new_values = values.shift(periods, fill_value=fill_value, axis=axis)
return self.make_block_same_class(new_values)
def to_native_types(self, na_rep="NaT", **kwargs):
""" convert to our native types format """
arr = self.array_values()
result = arr._format_native_types(na_rep=na_rep, **kwargs)
return self.make_block(result)
def where(self, other, cond, errors="raise", axis: int = 0) -> List[Block]:
# TODO(EA2D): reshape unnecessary with 2D EAs
arr = self.array_values().reshape(self.shape)
cond = _extract_bool_array(cond)
try:
res_values = arr.T.where(cond, other).T
except (ValueError, TypeError):
return super().where(other, cond, errors=errors, axis=axis)
# TODO(EA2D): reshape not needed with 2D EAs
res_values = res_values.reshape(self.values.shape)
nb = self.make_block_same_class(res_values)
return [nb]
class DatetimeBlock(DatetimeLikeBlockMixin):
__slots__ = ()
is_datetime = True
@property
def _can_hold_na(self):
return True
def _maybe_coerce_values(self, values):
"""
Input validation for values passed to __init__. Ensure that
we have datetime64ns, coercing if necessary.
Parameters
----------
values : array-like
Must be convertible to datetime64
Returns
-------
values : ndarray[datetime64ns]
Overridden by DatetimeTZBlock.
"""
if values.dtype != DT64NS_DTYPE:
values = conversion.ensure_datetime64ns(values)
if isinstance(values, DatetimeArray):
values = values._data
assert isinstance(values, np.ndarray), type(values)
return values
def set_inplace(self, locs, values):
"""
See Block.set.__doc__
"""
values = conversion.ensure_datetime64ns(values, copy=False)
self.values[locs] = values
class DatetimeTZBlock(ExtensionBlock, DatetimeBlock):
""" implement a datetime64 block with a tz attribute """
values: DatetimeArray
__slots__ = ()
is_datetimetz = True
is_extension = True
internal_values = Block.internal_values
_can_hold_element = DatetimeBlock._can_hold_element
to_native_types = DatetimeBlock.to_native_types
diff = DatetimeBlock.diff
fill_value = np.datetime64("NaT", "ns")
where = DatetimeBlock.where
array_values = ExtensionBlock.array_values
@property
def _holder(self):
return DatetimeArray
def _maybe_coerce_values(self, values):
"""
Input validation for values passed to __init__. Ensure that
we have datetime64TZ, coercing if necessary.
Parameters
----------
values : array-like
Must be convertible to datetime64
Returns
-------
values : DatetimeArray
"""
if not isinstance(values, self._holder):
values = self._holder(values)
if values.tz is None:
raise ValueError("cannot create a DatetimeTZBlock without a tz")
return values
@property
def is_view(self) -> bool:
""" return a boolean if I am possibly a view """
# check the ndarray values of the DatetimeIndex values
return self.values._data.base is not None
def get_values(self, dtype: Optional[Dtype] = None):
"""
Returns an ndarray of values.
Parameters
----------
dtype : np.dtype
Only `object`-like dtypes are respected here (not sure
why).
Returns
-------
values : ndarray
When ``dtype=object``, then and object-dtype ndarray of
boxed values is returned. Otherwise, an M8[ns] ndarray
is returned.
DatetimeArray is always 1-d. ``get_values`` will reshape
the return value to be the same dimensionality as the
block.
"""
values = self.values
if is_object_dtype(dtype):
values = values.astype(object)
# TODO(EA2D): reshape unnecessary with 2D EAs
# Ensure that our shape is correct for DataFrame.
# ExtensionArrays are always 1-D, even in a DataFrame when
# the analogous NumPy-backed column would be a 2-D ndarray.
return np.asarray(values).reshape(self.shape)
def external_values(self):
# NB: this is different from np.asarray(self.values), since that
# return an object-dtype ndarray of Timestamps.
if self.is_datetimetz:
# avoid FutureWarning in .astype in casting from dt64t to dt64
return self.values._data
return np.asarray(self.values.astype("datetime64[ns]", copy=False))
def fillna(self, value, limit=None, inplace=False, downcast=None):
# We support filling a DatetimeTZ with a `value` whose timezone
# is different by coercing to object.
if self._can_hold_element(value):
return super().fillna(value, limit, inplace, downcast)
# different timezones, or a non-tz
return self.astype(object).fillna(
value, limit=limit, inplace=inplace, downcast=downcast
)
def quantile(self, qs, interpolation="linear", axis=0):
naive = self.values.view("M8[ns]")
# TODO(EA2D): kludge for 2D block with 1D values
naive = naive.reshape(self.shape)
blk = self.make_block(naive)
res_blk = blk.quantile(qs, interpolation=interpolation, axis=axis)
# TODO(EA2D): ravel is kludge for 2D block with 1D values, assumes column-like
aware = self._holder(res_blk.values.ravel(), dtype=self.dtype)
return self.make_block_same_class(aware, ndim=res_blk.ndim)
def _check_ndim(self, values, ndim):
"""
ndim inference and validation.
This is overridden by the DatetimeTZBlock to check the case of 2D
data (values.ndim == 2), which should only be allowed if ndim is
also 2.
The case of 1D array is still allowed with both ndim of 1 or 2, as
if the case for other EAs. Therefore, we are only checking
`values.ndim > ndim` instead of `values.ndim != ndim` as for
consolidated blocks.
"""
if ndim is None:
ndim = values.ndim
if values.ndim > ndim:
raise ValueError(
"Wrong number of dimensions. "
f"values.ndim != ndim [{values.ndim} != {ndim}]"
)
return ndim
class TimeDeltaBlock(DatetimeLikeBlockMixin):
__slots__ = ()
is_timedelta = True
_can_hold_na = True
is_numeric = False
fill_value = np.timedelta64("NaT", "ns")
def _maybe_coerce_values(self, values):
if values.dtype != TD64NS_DTYPE:
# non-nano we will convert to nano
if values.dtype.kind != "m":
# caller is responsible for ensuring timedelta64 dtype
raise TypeError(values.dtype) # pragma: no cover
values = TimedeltaArray._from_sequence(values)._data
if isinstance(values, TimedeltaArray):
values = values._data
assert isinstance(values, np.ndarray), type(values)
return values
@property
def _holder(self):
return TimedeltaArray
def fillna(self, value, **kwargs):
# TODO(EA2D): if we operated on array_values, TDA.fillna would handle
# raising here.
if is_integer(value):
# Deprecation GH#24694, GH#19233
raise TypeError(
"Passing integers to fillna for timedelta64[ns] dtype is no "
"longer supported. To obtain the old behavior, pass "
"`pd.Timedelta(seconds=n)` instead."
)
return super().fillna(value, **kwargs)
class ObjectBlock(Block):
__slots__ = ()
is_object = True
_can_hold_na = True
def _maybe_coerce_values(self, values):
if issubclass(values.dtype.type, (str, bytes)):
values = np.array(values, dtype=object)
return values
@property
def is_bool(self):
"""
we can be a bool if we have only bool values but are of type
object
"""
return lib.is_bool_array(self.values.ravel("K"))
def reduce(self, func, ignore_failures: bool = False) -> List[Block]:
"""
For object-dtype, we operate column-wise.
"""
assert self.ndim == 2
values = self.values
if len(values) > 1:
# split_and_operate expects func with signature (mask, values, inplace)
def mask_func(mask, values, inplace):
if values.ndim == 1:
values = values.reshape(1, -1)
return func(values)
return self.split_and_operate(
None, mask_func, False, ignore_failures=ignore_failures
)
try:
res = func(values)
except TypeError:
if not ignore_failures:
raise
return []
assert isinstance(res, np.ndarray)
assert res.ndim == 1
res = res.reshape(1, -1)
return [self.make_block_same_class(res)]
def convert(
self,
copy: bool = True,
datetime: bool = True,
numeric: bool = True,
timedelta: bool = True,
) -> List[Block]:
"""
attempt to cast any object types to better types return a copy of
the block (if copy = True) by definition we ARE an ObjectBlock!!!!!
"""
# operate column-by-column
def f(mask, val, idx):
shape = val.shape
values = soft_convert_objects(
val.ravel(),
datetime=datetime,
numeric=numeric,
timedelta=timedelta,
copy=copy,
)
if isinstance(values, np.ndarray):
# TODO(EA2D): allow EA once reshape is supported
values = values.reshape(shape)
return values
if self.ndim == 2:
blocks = self.split_and_operate(None, f, False)
else:
values = f(None, self.values.ravel(), None)
blocks = [self.make_block(values)]
return blocks
def _maybe_downcast(self, blocks: List[Block], downcast=None) -> List[Block]:
if downcast is not None:
return blocks
# split and convert the blocks
return extend_blocks([b.convert(datetime=True, numeric=False) for b in blocks])
def _can_hold_element(self, element: Any) -> bool:
return True
def replace(
self,
to_replace,
value,
inplace: bool = False,
regex: bool = False,
) -> List[Block]:
# Note: the checks we do in NDFrame.replace ensure we never get
# here with listlike to_replace or value, as those cases
# go through _replace_list
regex = _should_use_regex(regex, to_replace)
if regex:
return self._replace_regex(to_replace, value, inplace=inplace)
else:
return super().replace(to_replace, value, inplace=inplace, regex=False)
def _should_use_regex(regex: bool, to_replace: Any) -> bool:
"""
Decide whether to treat `to_replace` as a regular expression.
"""
if is_re(to_replace):
regex = True
regex = regex and is_re_compilable(to_replace)
# Don't use regex if the pattern is empty.
regex = regex and re.compile(to_replace).pattern != ""
return regex
class CategoricalBlock(ExtensionBlock):
__slots__ = ()
def _replace_list(
self,
src_list: List[Any],
dest_list: List[Any],
inplace: bool = False,
regex: bool = False,
) -> List[Block]:
if len(algos.unique(dest_list)) == 1:
# We likely got here by tiling value inside NDFrame.replace,
# so un-tile here
return self.replace(src_list, dest_list[0], inplace, regex)
return super()._replace_list(src_list, dest_list, inplace, regex)
def replace(
self,
to_replace,
value,
inplace: bool = False,
regex: bool = False,
) -> List[Block]:
inplace = validate_bool_kwarg(inplace, "inplace")
result = self if inplace else self.copy()
result.values.replace(to_replace, value, inplace=True)
return [result]
# -----------------------------------------------------------------
# Constructor Helpers
def get_block_type(values, dtype: Optional[Dtype] = None):
"""
Find the appropriate Block subclass to use for the given values and dtype.
Parameters
----------
values : ndarray-like
dtype : numpy or pandas dtype
Returns
-------
cls : class, subclass of Block
"""
# We use vtype and kind checks because they are much more performant
# than is_foo_dtype
dtype = cast(np.dtype, pandas_dtype(dtype) if dtype else values.dtype)
vtype = dtype.type
kind = dtype.kind
cls: Type[Block]
if is_sparse(dtype):
# Need this first(ish) so that Sparse[datetime] is sparse
cls = ExtensionBlock
elif isinstance(dtype, CategoricalDtype):
cls = CategoricalBlock
elif vtype is Timestamp:
cls = DatetimeTZBlock
elif vtype is Interval or vtype is Period:
cls = ObjectValuesExtensionBlock
elif isinstance(dtype, ExtensionDtype):
# Note: need to be sure PandasArray is unwrapped before we get here
cls = ExtensionBlock
elif kind == "M":
cls = DatetimeBlock
elif kind == "m":
cls = TimeDeltaBlock
elif kind == "f":
cls = FloatBlock
elif kind in ["c", "i", "u", "b"]:
cls = NumericBlock
else:
cls = ObjectBlock
return cls
def make_block(values, placement, klass=None, ndim=None, dtype: Optional[Dtype] = None):
# Ensure that we don't allow PandasArray / PandasDtype in internals.
# For now, blocks should be backed by ndarrays when possible.
if isinstance(values, ABCPandasArray):
values = values.to_numpy()
if ndim and ndim > 1:
# TODO(EA2D): special case not needed with 2D EAs
values = np.atleast_2d(values)
if isinstance(dtype, PandasDtype):
dtype = dtype.numpy_dtype
if klass is None:
dtype = dtype or values.dtype
klass = get_block_type(values, dtype)
elif klass is DatetimeTZBlock and not is_datetime64tz_dtype(values.dtype):
# TODO: This is no longer hit internally; does it need to be retained
# for e.g. pyarrow?
values = DatetimeArray._simple_new(values, dtype=dtype)
return klass(values, ndim=ndim, placement=placement)
# -----------------------------------------------------------------
def extend_blocks(result, blocks=None):
""" return a new extended blocks, given the result """
if blocks is None:
blocks = []
if isinstance(result, list):
for r in result:
if isinstance(r, list):
blocks.extend(r)
else:
blocks.append(r)
else:
assert isinstance(result, Block), type(result)
blocks.append(result)
return blocks
def _block_shape(values: ArrayLike, ndim: int = 1) -> ArrayLike:
""" guarantee the shape of the values to be at least 1 d """
if values.ndim < ndim:
shape = values.shape
if not is_extension_array_dtype(values.dtype):
# TODO(EA2D): https://github.com/pandas-dev/pandas/issues/23023
# block.shape is incorrect for "2D" ExtensionArrays
# We can't, and don't need to, reshape.
# error: "ExtensionArray" has no attribute "reshape"
values = values.reshape(tuple((1,) + shape)) # type: ignore[attr-defined]
return values
def safe_reshape(arr: ArrayLike, new_shape: Shape) -> ArrayLike:
"""
Reshape `arr` to have shape `new_shape`, unless it is an ExtensionArray,
in which case it will be returned unchanged (see gh-13012).
Parameters
----------
arr : np.ndarray or ExtensionArray
new_shape : Tuple[int]
Returns
-------
np.ndarray or ExtensionArray
"""
if not is_extension_array_dtype(arr.dtype):
# Note: this will include TimedeltaArray and tz-naive DatetimeArray
# TODO(EA2D): special case will be unnecessary with 2D EAs
arr = np.asarray(arr).reshape(new_shape)
return arr
def _extract_bool_array(mask: ArrayLike) -> np.ndarray:
"""
If we have a SparseArray or BooleanArray, convert it to ndarray[bool].
"""
if isinstance(mask, ExtensionArray):
# We could have BooleanArray, Sparse[bool], ...
# Except for BooleanArray, this is equivalent to just
# np.asarray(mask, dtype=bool)
mask = mask.to_numpy(dtype=bool, na_value=False)
assert isinstance(mask, np.ndarray), type(mask)
assert mask.dtype == bool, mask.dtype
return mask
|
py | 1a44973f8c1681937fc9e2b28e8013ccdc58aa9b | """Switch platform for Advantage Air integration."""
from homeassistant.helpers.entity import ToggleEntity
from .const import (
ADVANTAGE_AIR_STATE_OFF,
ADVANTAGE_AIR_STATE_ON,
DOMAIN as ADVANTAGE_AIR_DOMAIN,
)
from .entity import AdvantageAirEntity
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up AdvantageAir toggle platform."""
instance = hass.data[ADVANTAGE_AIR_DOMAIN][config_entry.entry_id]
entities = []
for ac_key, ac_device in instance["coordinator"].data["aircons"].items():
if ac_device["info"]["freshAirStatus"] != "none":
entities.append(AdvantageAirFreshAir(instance, ac_key))
async_add_entities(entities)
class AdvantageAirFreshAir(AdvantageAirEntity, ToggleEntity):
"""Representation of Advantage Air fresh air control."""
_attr_icon = "mdi:air-filter"
def __init__(self, instance, ac_key):
"""Initialize an Advantage Air fresh air control."""
super().__init__(instance, ac_key)
self._attr_name = f'{self._ac["name"]} Fresh Air'
self._attr_unique_id = (
f'{self.coordinator.data["system"]["rid"]}-{ac_key}-freshair'
)
@property
def is_on(self):
"""Return the fresh air status."""
return self._ac["freshAirStatus"] == ADVANTAGE_AIR_STATE_ON
async def async_turn_on(self, **kwargs):
"""Turn fresh air on."""
await self.async_change(
{self.ac_key: {"info": {"freshAirStatus": ADVANTAGE_AIR_STATE_ON}}}
)
async def async_turn_off(self, **kwargs):
"""Turn fresh air off."""
await self.async_change(
{self.ac_key: {"info": {"freshAirStatus": ADVANTAGE_AIR_STATE_OFF}}}
)
|
py | 1a4497b47fca36450c86950c64ed8ce2abcec046 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2021, Cisco Systems
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
DOCUMENTATION = r"""
---
module: external_radius_server
short_description: Resource module for External Radius Server
description:
- Manage operations create, update and delete of the resource External Radius Server.
version_added: '1.0.0'
extends_documentation_fragment:
- cisco.ise.module
author: Rafael Campos (@racampos)
options:
accountingPort:
description: Valid Range 1 to 65535.
type: int
authenticationPort:
description: Valid Range 1 to 65535.
type: int
authenticatorKey:
description: The authenticatorKey is required only if enableKeyWrap is true, otherwise
it must be ignored or empty. The maximum length is 20 ASCII characters or 40 HEXADECIMAL
characters (depend on selection in field 'keyInputFormat').
type: str
description:
description: External Radius Server's description.
type: str
enableKeyWrap:
description: KeyWrap may only be enabled if it is supported on the device. When
running in FIPS mode this option should be enabled for such devices.
type: bool
encryptionKey:
description: The encryptionKey is required only if enableKeyWrap is true, otherwise
it must be ignored or empty. The maximum length is 16 ASCII characters or 32 HEXADECIMAL
characters (depend on selection in field 'keyInputFormat').
type: str
hostIP:
description: The IP of the host - must be a valid IPV4 address.
type: str
id:
description: External Radius Server's id.
type: str
keyInputFormat:
description: Specifies the format of the input for fields 'encryptionKey' and 'authenticatorKey'.
Allowed Values - ASCII - HEXADECIMAL.
type: str
name:
description: Resource Name. Allowed charactera are alphanumeric and _ (underscore).
type: str
proxyTimeout:
description: Valid Range 1 to 600.
type: int
retries:
description: Valid Range 1 to 9.
type: int
sharedSecret:
description: Shared secret maximum length is 128 characters.
type: str
timeout:
description: Valid Range 1 to 120.
type: int
requirements:
- ciscoisesdk >= 1.1.0
- python >= 3.5
seealso:
# Reference by Internet resource
- name: External Radius Server reference
description: Complete reference of the External Radius Server object model.
link: https://ciscoisesdk.readthedocs.io/en/latest/api/api.html#v3-0-0-summary
"""
EXAMPLES = r"""
- name: Update by id
cisco.ise.external_radius_server:
ise_hostname: "{{ise_hostname}}"
ise_username: "{{ise_username}}"
ise_password: "{{ise_password}}"
ise_verify: "{{ise_verify}}"
state: present
accountingPort: 0
authenticationPort: 0
authenticatorKey: string
description: string
enableKeyWrap: true
encryptionKey: string
hostIP: string
id: string
keyInputFormat: string
name: string
proxyTimeout: 0
retries: 0
sharedSecret: string
timeout: 0
- name: Delete by id
cisco.ise.external_radius_server:
ise_hostname: "{{ise_hostname}}"
ise_username: "{{ise_username}}"
ise_password: "{{ise_password}}"
ise_verify: "{{ise_verify}}"
state: absent
id: string
- name: Create
cisco.ise.external_radius_server:
ise_hostname: "{{ise_hostname}}"
ise_username: "{{ise_username}}"
ise_password: "{{ise_password}}"
ise_verify: "{{ise_verify}}"
state: present
accountingPort: 0
authenticationPort: 0
authenticatorKey: string
description: string
enableKeyWrap: true
encryptionKey: string
hostIP: string
keyInputFormat: string
name: string
proxyTimeout: 0
retries: 0
sharedSecret: string
timeout: 0
"""
RETURN = r"""
ise_response:
description: A dictionary or list with the response returned by the Cisco ISE Python SDK
returned: always
type: dict
sample: >
{
"id": "string",
"name": "string",
"description": "string",
"hostIP": "string",
"sharedSecret": "string",
"enableKeyWrap": true,
"encryptionKey": "string",
"authenticatorKey": "string",
"keyInputFormat": "string",
"authenticationPort": 0,
"accountingPort": 0,
"timeout": 0,
"retries": 0,
"proxyTimeout": 0,
"link": {
"rel": "string",
"href": "string",
"type": "string"
}
}
ise_update_response:
description: A dictionary or list with the response returned by the Cisco ISE Python SDK
returned: always
version_added: "1.1.0"
type: dict
sample: >
{
"UpdatedFieldsList": {
"updatedField": {
"field": "string",
"oldValue": "string",
"newValue": "string"
},
"field": "string",
"oldValue": "string",
"newValue": "string"
}
}
"""
|
py | 1a449815d70843d59824faf0989db0d95985ba6d | #coding=utf8
import os
import itchat
from NetEaseMusicApi import interact_select_song
HELP_MSG = u'''\
欢迎使用微信网易云音乐
帮助: 显示帮助
关闭: 关闭歌曲
歌名: 按照引导播放音乐\
'''
with open('stop.mp3', 'w') as f: pass
def close_music():
os.startfile('stop.mp3')
@itchat.msg_register(itchat.content.TEXT)
def music_player(msg):
if msg['ToUserName'] != 'filehelper': return
if msg['Text'] == u'关闭':
close_music()
itchat.send(u'音乐已关闭', 'filehelper')
if msg['Text'] == u'帮助':
itchat.send(HELP_MSG, 'filehelper')
else:
itchat.send(interact_select_song(msg['Text']), 'filehelper')
itchat.auto_login(True, enableCmdQR=True)
itchat.send(HELP_MSG, 'filehelper')
itchat.run()
|
py | 1a44991fa0f84e25a12fb98814bc40962c1d1759 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class MessagePreventionRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'jaq', '2016-11-23', 'MessagePrevention')
def get_CallerName(self):
return self.get_query_params().get('CallerName')
def set_CallerName(self,CallerName):
self.add_query_param('CallerName',CallerName)
def get_Ip(self):
return self.get_query_params().get('Ip')
def set_Ip(self,Ip):
self.add_query_param('Ip',Ip)
def get_ProtocolVersion(self):
return self.get_query_params().get('ProtocolVersion')
def set_ProtocolVersion(self,ProtocolVersion):
self.add_query_param('ProtocolVersion',ProtocolVersion)
def get_Source(self):
return self.get_query_params().get('Source')
def set_Source(self,Source):
self.add_query_param('Source',Source)
def get_PhoneNumber(self):
return self.get_query_params().get('PhoneNumber')
def set_PhoneNumber(self,PhoneNumber):
self.add_query_param('PhoneNumber',PhoneNumber)
def get_Email(self):
return self.get_query_params().get('Email')
def set_Email(self,Email):
self.add_query_param('Email',Email)
def get_UserId(self):
return self.get_query_params().get('UserId')
def set_UserId(self,UserId):
self.add_query_param('UserId',UserId)
def get_IdType(self):
return self.get_query_params().get('IdType')
def set_IdType(self,IdType):
self.add_query_param('IdType',IdType)
def get_CurrentUrl(self):
return self.get_query_params().get('CurrentUrl')
def set_CurrentUrl(self,CurrentUrl):
self.add_query_param('CurrentUrl',CurrentUrl)
def get_Agent(self):
return self.get_query_params().get('Agent')
def set_Agent(self,Agent):
self.add_query_param('Agent',Agent)
def get_Cookie(self):
return self.get_query_params().get('Cookie')
def set_Cookie(self,Cookie):
self.add_query_param('Cookie',Cookie)
def get_SessionId(self):
return self.get_query_params().get('SessionId')
def set_SessionId(self,SessionId):
self.add_query_param('SessionId',SessionId)
def get_MacAddress(self):
return self.get_query_params().get('MacAddress')
def set_MacAddress(self,MacAddress):
self.add_query_param('MacAddress',MacAddress)
def get_Referer(self):
return self.get_query_params().get('Referer')
def set_Referer(self,Referer):
self.add_query_param('Referer',Referer)
def get_UserName(self):
return self.get_query_params().get('UserName')
def set_UserName(self,UserName):
self.add_query_param('UserName',UserName)
def get_CompanyName(self):
return self.get_query_params().get('CompanyName')
def set_CompanyName(self,CompanyName):
self.add_query_param('CompanyName',CompanyName)
def get_Address(self):
return self.get_query_params().get('Address')
def set_Address(self,Address):
self.add_query_param('Address',Address)
def get_IDNumber(self):
return self.get_query_params().get('IDNumber')
def set_IDNumber(self,IDNumber):
self.add_query_param('IDNumber',IDNumber)
def get_BankCardNumber(self):
return self.get_query_params().get('BankCardNumber')
def set_BankCardNumber(self,BankCardNumber):
self.add_query_param('BankCardNumber',BankCardNumber)
def get_RegisterIp(self):
return self.get_query_params().get('RegisterIp')
def set_RegisterIp(self,RegisterIp):
self.add_query_param('RegisterIp',RegisterIp)
def get_RegisterDate(self):
return self.get_query_params().get('RegisterDate')
def set_RegisterDate(self,RegisterDate):
self.add_query_param('RegisterDate',RegisterDate)
def get_LoginIp(self):
return self.get_query_params().get('LoginIp')
def set_LoginIp(self,LoginIp):
self.add_query_param('LoginIp',LoginIp)
def get_LoginDate(self):
return self.get_query_params().get('LoginDate')
def set_LoginDate(self,LoginDate):
self.add_query_param('LoginDate',LoginDate)
def get_ExtendData(self):
return self.get_query_params().get('ExtendData')
def set_ExtendData(self,ExtendData):
self.add_query_param('ExtendData',ExtendData)
def get_PasswordHash(self):
return self.get_query_params().get('PasswordHash')
def set_PasswordHash(self,PasswordHash):
self.add_query_param('PasswordHash',PasswordHash)
def get_JsToken(self):
return self.get_query_params().get('JsToken')
def set_JsToken(self,JsToken):
self.add_query_param('JsToken',JsToken)
def get_SDKToken(self):
return self.get_query_params().get('SDKToken')
def set_SDKToken(self,SDKToken):
self.add_query_param('SDKToken',SDKToken) |
py | 1a4499f0b2a1e68fb2e8d4423883be84c84aa0b4 | # coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.11
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re
# python 2 and python 3 compatibility library
import six
from typing import List, Optional
from .. import models
class KMIPApi(object):
def __init__(self, api_client):
self.api_client = api_client
def api211_kmip_delete_with_http_info(
self,
authorization=None, # type: str
x_request_id=None, # type: str
names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> None
"""Delete KMIP server object
Deletes KMIP server objects.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api211_kmip_delete_with_http_info(async_req=True)
>>> result = thread.get()
:param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`, `/login`, and `/logout`)
:param str x_request_id: Supplied by client during request or generated by server.
:param list[str] names: Performs the operation on the unique name specified. Enter multiple names in comma-separated format. For example, `name01,name02`.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
if names is not None:
if not isinstance(names, list):
names = [names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/api/2.11/kmip', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api211_kmip_get_with_http_info(
self,
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
total_item_count=None, # type: bool
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.KmipGetResponse
"""List KMIP server objects
Displays the list of KMIP server objects.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api211_kmip_get_with_http_info(async_req=True)
>>> result = thread.get()
:param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`, `/login`, and `/logout`)
:param str x_request_id: Supplied by client during request or generated by server.
:param str continuation_token: A token used to retrieve the next page of data with some consistency guaranteed. The token is a Base64 encoded value. Set `continuation_token` to the system-generated token taken from the `x-next-token` header field of the response. A query has reached its last page when the response does not include a token. Pagination requires the `limit` and `continuation_token` query parameters.
:param str filter: Narrows down the results to only the response objects that satisfy the filter criteria.
:param int limit: Limits the size of the response to the specified number of objects on each page. To return the total number of resources, set `limit=0`. The total number of resources is returned as a `total_item_count` value. If the page size requested is larger than the system maximum limit, the server returns the maximum limit, disregarding the requested page size.
:param list[str] names: Performs the operation on the unique name specified. Enter multiple names in comma-separated format. For example, `name01,name02`.
:param int offset: The starting position based on the results of the query in relation to the full set of response objects returned.
:param list[str] sort: Returns the response objects in the order specified. Set `sort` to the name in the response by which to sort. Sorting can be performed on any of the names in the response, and the objects can be sorted in ascending or descending order. By default, the response objects are sorted in ascending order. To sort in descending order, append the minus sign (`-`) to the name. A single request can be sorted on multiple objects. For example, you can sort all volumes from largest to smallest volume size, and then sort volumes of the same size in ascending order by volume name. To sort on multiple names, list the names as comma-separated values.
:param bool total_item_count: If set to `true`, the `total_item_count` matching the specified query parameters is calculated and returned in the response. If set to `false`, the `total_item_count` is `null` in the response. This may speed up queries where the `total_item_count` is large. If not specified, defaults to `false`.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: KmipGetResponse
If the method is called asynchronously,
returns the request thread.
"""
if names is not None:
if not isinstance(names, list):
names = [names]
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'limit' in params and params['limit'] < 1:
raise ValueError("Invalid value for parameter `limit` when calling `api211_kmip_get`, must be a value greater than or equal to `1`")
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api211_kmip_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
if 'total_item_count' in params:
query_params.append(('total_item_count', params['total_item_count']))
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/api/2.11/kmip', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='KmipGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api211_kmip_patch_with_http_info(
self,
kmip=None, # type: models.KmipPatch
authorization=None, # type: str
x_request_id=None, # type: str
names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.KmipResponse
"""Modify KMIP attributes
Modifies one or more attributes of KMIP server objects.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api211_kmip_patch_with_http_info(kmip, async_req=True)
>>> result = thread.get()
:param KmipPatch kmip: (required)
:param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`, `/login`, and `/logout`)
:param str x_request_id: Supplied by client during request or generated by server.
:param list[str] names: Performs the operation on the unique name specified. Enter multiple names in comma-separated format. For example, `name01,name02`.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: KmipResponse
If the method is called asynchronously,
returns the request thread.
"""
if names is not None:
if not isinstance(names, list):
names = [names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
# verify the required parameter 'kmip' is set
if kmip is None:
raise TypeError("Missing the required parameter `kmip` when calling `api211_kmip_patch`")
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
if 'kmip' in params:
body_params = params['kmip']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/api/2.11/kmip', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='KmipResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api211_kmip_post_with_http_info(
self,
kmip=None, # type: models.KmipPost
authorization=None, # type: str
x_request_id=None, # type: str
names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.KmipResponse
"""Create KMIP server object
Creates KMIP server objects.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api211_kmip_post_with_http_info(kmip, async_req=True)
>>> result = thread.get()
:param KmipPost kmip: (required)
:param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`, `/login`, and `/logout`)
:param str x_request_id: Supplied by client during request or generated by server.
:param list[str] names: Performs the operation on the unique name specified. Enter multiple names in comma-separated format. For example, `name01,name02`.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: KmipResponse
If the method is called asynchronously,
returns the request thread.
"""
if names is not None:
if not isinstance(names, list):
names = [names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
# verify the required parameter 'kmip' is set
if kmip is None:
raise TypeError("Missing the required parameter `kmip` when calling `api211_kmip_post`")
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
if 'kmip' in params:
body_params = params['kmip']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/api/2.11/kmip', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='KmipResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api211_kmip_test_get_with_http_info(
self,
names=None, # type: List[str]
authorization=None, # type: str
x_request_id=None, # type: str
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.KmipTestResultGetResponse
"""Lists KMIP connection tests
Displays communication data between a FlashArray and KMIP server.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api211_kmip_test_get_with_http_info(names, async_req=True)
>>> result = thread.get()
:param list[str] names: Performs the operation on the unique name specified. For example, `name01`. Enter multiple names in comma-separated format. (required)
:param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`, `/login`, and `/logout`)
:param str x_request_id: Supplied by client during request or generated by server.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: KmipTestResultGetResponse
If the method is called asynchronously,
returns the request thread.
"""
if names is not None:
if not isinstance(names, list):
names = [names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
# verify the required parameter 'names' is set
if names is None:
raise TypeError("Missing the required parameter `names` when calling `api211_kmip_test_get`")
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/api/2.11/kmip/test', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='KmipTestResultGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
|
py | 1a449a5243aaf49167ddd66ebe865515ae67bf63 | import pytest
from hypothesis import given, settings, HealthCheck
from hypothesis import reproduce_failure # pylint: disable=unused-import
from itertools import product
import numpy as np
from tests.hypothesis_helper import dfs_min2, dfs_no_min
from os import environ
if environ.get("TRAVIS"):
max_examples = 10
deadline = None
else:
max_examples = 100
deadline = None
strandedness = [False, "same", "opposite"]
binary_methods = [
"set_union", "set_intersect", "overlap", "nearest", "intersect",
"subtract", "join"
]
unary_methods = [
"merge", "sort", "cluster", "pc", "mpc", "spc", "drop_duplicate_positions",
"drop"
]
method_chain = product(binary_methods, binary_methods)
# cannot start with an operation that makes pyrange unstranded and then try a stranded op
strandedness_chain = list(product(["same", "opposite"], strandedness)) + list(
product(strandedness, [None]))
@pytest.mark.bedtools
@pytest.mark.parametrize("strandedness_chain,method_chain",
product(strandedness_chain, method_chain))
@settings(
max_examples=max_examples,
deadline=deadline,
print_blob=True,
suppress_health_check=HealthCheck.all())
@given(gr=dfs_no_min(), gr2=dfs_no_min(), gr3=dfs_no_min()) # pylint: disable=no-value-for-parameter
# @reproduce_failure('5.5.4', b'AXicY2RAA4xIJCoLygcAALIABg==') # test_three_in_a_row[strandedness_chain122-method_chain122]
# @reproduce_failure('5.5.4', b'AXicY2QAAUYGKGBkxM9nAAABEAAJ') # test_three_in_a_row[strandedness_chain45-method_chain45]
# @reproduce_failure('5.5.4', b'AXicY2RAA4xIJDY+AAC2AAY=') # test_three_in_a_row[strandedness_chain24-method_chain24]
def test_three_in_a_row(gr, gr2, gr3, strandedness_chain, method_chain):
s1, s2 = strandedness_chain
f1, f2 = method_chain
suffix_methods = ["nearest", "join"]
if f1 in suffix_methods and f2 in suffix_methods:
m1 = getattr(gr, f1)
gr2 = m1(gr2, strandedness=s1)
if len(gr2) > 0:
assert gr2.Start.dtype == np.int64
assert (gr2.Start >= 0).all() and (gr2.End >= 0).all()
m2 = getattr(gr2, f2)
gr3 = m2(gr3, strandedness=s2, suffix="_c")
print(gr3)
if len(gr3) > 0:
assert gr3.Start.dtype == np.int64
assert (gr3.Start >= 0).all() and (gr3.End >= 0).all()
else:
m1 = getattr(gr, f1)
gr2 = m1(gr2, strandedness=s1)
if len(gr2) > 0:
assert gr2.Start.dtype == np.int64
assert (gr2.Start >= 0).all() and (gr2.End >= 0).all()
m2 = getattr(gr2, f2)
gr3 = m2(gr3, strandedness=s2)
print(gr3)
if len(gr3) > 0:
assert gr3.Start.dtype == np.int64
assert (gr3.Start >= 0).all() and (gr3.End >= 0).all()
# @pytest.mark.bedtools
# @pytest.mark.parametrize("strandedness_chain,method_chain",
# product(strandedness_chain, method_chain))
# @settings(
# max_examples=max_examples,
# deadline=deadline,
# suppress_health_check=HealthCheck.all())
# @given(gr=dfs_no_min(), gr2=dfs_no_min(), gr3=dfs_no_min()) # pylint: disable=no-value-for-parameter
# def test_three_in_a_row(gr, gr2, gr3, strandedness_chain, method_chain):
# s1, s2 = strandedness_chain
# f1, f2 = method_chain
# # print(s1, s2)
# # print(f1, f2)
# m1 = getattr(gr, f1)
# gr2 = m1(gr2, strandedness=s1)
# m2 = getattr(gr2, f2)
# gr3 = m2(gr3, strandedness=s2)
|
py | 1a449a732ba81f995221b38250145fb2a3f0789c | import os
import shlex
import subprocess
import h5py
import numpy as np
import torch
import torch.utils.data as data
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
def _get_data_files(list_filename):
with open(list_filename) as f:
return [line.rstrip() for line in f]
def _load_data_file(name):
f = h5py.File(name, "r")
data = f["data"][:]
label = f["label"][:]
return data, label
class Indoor3DSemSeg(data.Dataset):
def __init__(self, num_points, train=True, download=True, data_precent=1.0):
super().__init__()
self.data_precent = data_precent
self.folder = "indoor3d_sem_seg_hdf5_data"
self.data_dir = os.path.join(BASE_DIR, self.folder)
self.url = (
"https://shapenet.cs.stanford.edu/media/indoor3d_sem_seg_hdf5_data.zip"
)
if download and not os.path.exists(self.data_dir):
zipfile = os.path.join(BASE_DIR, os.path.basename(self.url))
subprocess.check_call(
shlex.split("curl {} -o {}".format(self.url, zipfile))
)
subprocess.check_call(
shlex.split("unzip {} -d {}".format(zipfile, BASE_DIR))
)
subprocess.check_call(shlex.split("rm {}".format(zipfile)))
self.train, self.num_points = train, num_points
all_files = _get_data_files(os.path.join(self.data_dir, "all_files.txt"))
room_filelist = _get_data_files(
os.path.join(self.data_dir, "room_filelist.txt")
)
data_batchlist, label_batchlist = [], []
for f in all_files:
data, label = _load_data_file(os.path.join(BASE_DIR, f))
data_batchlist.append(data)
label_batchlist.append(label)
data_batches = np.concatenate(data_batchlist, 0)
labels_batches = np.concatenate(label_batchlist, 0)
test_area = "Area_5"
train_idxs, test_idxs = [], []
for i, room_name in enumerate(room_filelist):
if test_area in room_name:
test_idxs.append(i)
else:
train_idxs.append(i)
if self.train:
self.points = data_batches[train_idxs, ...]
self.labels = labels_batches[train_idxs, ...]
else:
self.points = data_batches[test_idxs, ...]
self.labels = labels_batches[test_idxs, ...]
def __getitem__(self, idx):
pt_idxs = np.arange(0, self.num_points)
np.random.shuffle(pt_idxs)
current_points = torch.from_numpy(self.points[idx, pt_idxs].copy()).float()
current_labels = torch.from_numpy(self.labels[idx, pt_idxs].copy()).long()
return current_points, current_labels
def __len__(self):
return int(self.points.shape[0] * self.data_precent)
def set_num_points(self, pts):
self.num_points = pts
def randomize(self):
pass
if __name__ == "__main__":
dset = Indoor3DSemSeg(16, "./", train=True)
print(dset[0])
print(len(dset))
dloader = torch.utils.data.DataLoader(dset, batch_size=32, shuffle=True)
for i, data in enumerate(dloader, 0):
inputs, labels = data
if i == len(dloader) - 1:
print(inputs.size())
|
py | 1a449acb1d4b0c82d2f45029a8369908955315ff | __author__ = 'thauser'
from argh import arg
from six import iteritems
import logging
from pnc_cli import swagger_client
from pnc_cli import utils
from pnc_cli.pnc_api import pnc_api
@arg("-p", "--page-size", help="Limit the amount of builds returned")
@arg("--page-index", help="Select the index of page", type=int)
@arg("-s", "--sort", help="Sorting RSQL")
@arg("-q", help="RSQL query")
def list_builds(page_size=200, page_index=0, sort="", q=""):
"""
List all builds
:param page_size: number of builds returned per query
:param sort: RSQL sorting query
:param q: RSQL query
:return:
"""
response = utils.checked_api_call(pnc_api.builds_running, 'get_all', page_size=page_size, page_index=page_index, sort=sort, q=q)
if response:
return response.content
|
py | 1a449be89e3a5885223220ea3590f3c0b46f3357 | # coding=utf-8
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Learning rate decay functions."""
import math
from megatron import print_rank_0
class AnnealingLR(object):
"""Anneals the learning rate."""
def __init__(self, optimizer, start_lr,
warmup_iter, total_iters,
decay_style, last_iter, min_lr=0.0,
use_checkpoint_lr_scheduler=True,
override_lr_scheduler=False):
# Class values.
self.optimizer = optimizer
self.start_lr = start_lr
self.min_lr = min_lr
self.warmup_iter = warmup_iter
self.num_iters = last_iter
self.end_iter = total_iters
assert self.end_iter > 0
self.decay_style = decay_style
self.override_lr_scheduler = override_lr_scheduler
self.use_checkpoint_lr_scheduler = use_checkpoint_lr_scheduler
if self.override_lr_scheduler:
assert not self.use_checkpoint_lr_scheduler, 'both override and '\
'use-checkpoint are set.'
# Set the learning rate
self.step(self.num_iters)
print_rank_0('> learning rate decay style: {}'.format(self.decay_style))
def get_lr(self):
"""Learning rate decay functions from:
https://openreview.net/pdf?id=BJYwwY9ll pg. 4"""
num_iters_ = min(self.num_iters, self.end_iter - self.warmup_iter)
# Warmup.
if self.warmup_iter > 0 and self.num_iters <= self.warmup_iter:
return float(self.start_lr) * num_iters_ / self.warmup_iter
num_iters_ = num_iters_ - self.warmup_iter
if self.decay_style == 'linear':
lr = self.start_lr * (self.end_iter - num_iters_) / self.end_iter
elif self.decay_style == 'cosine':
lr = self.start_lr / 2.0 * (math.cos(
math.pi * num_iters_ / self.end_iter) + 1)
elif self.decay_style == 'exponential':
# exp(-0.693) = 1/2
lr = self.start_lr * math.exp(-0.693 * num_iters_ / self.end_iter)
else:
lr = self.start_lr
return max(lr, self.min_lr)
def step(self, step_num=None):
"""Set lr for all parameters groups."""
if step_num is None:
step_num = self.num_iters + 1
self.num_iters = step_num
new_lr = self.get_lr()
for group in self.optimizer.param_groups:
group['lr'] = new_lr
def state_dict(self):
state_dict = {
'start_lr': self.start_lr,
'warmup_iter': self.warmup_iter,
'num_iters': self.num_iters,
'decay_style': self.decay_style,
'end_iter': self.end_iter,
'min_lr': self.min_lr
}
return state_dict
def _check_and_set(self, cls_value, sd_value, name):
"""Auxiliary function for checking the values in the checkpoint and
setting them."""
if self.override_lr_scheduler:
print_rank_0(' > overriding {} value to {}'.format(name, cls_value))
return cls_value
if not self.use_checkpoint_lr_scheduler:
assert cls_value == sd_value, 'AnnealingLR: class input value' \
'and checkpoint values for {} do not match'.format(name)
print_rank_0(' > using checkpoint value {} for {}'.format(sd_value,
name))
return sd_value
def load_state_dict(self, sd):
self.start_lr = self._check_and_set(self.start_lr, sd['start_lr'],
'learning rate')
self.min_lr = self._check_and_set(self.min_lr, sd['min_lr'],
'minimum learning rate')
self.warmup_iter = self._check_and_set(self.warmup_iter,
sd['warmup_iter'],
'warmup iterations')
self.end_iter = self._check_and_set(self.end_iter, sd['end_iter'],
'total number of iterations')
self.decay_style = self._check_and_set(self.decay_style,
sd['decay_style'],
'decay style')
self.num_iters = sd['num_iters']
self.step(self.num_iters)
|
py | 1a449cc8227aa77d61666190007b681cb5d35b44 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Partially based on AboutMethods in the Ruby Koans
#
from runner.koan import *
def my_global_function(a,b):
return a + b
class AboutMethods(Koan):
def test_calling_a_global_function(self):
self.assertEqual(5, my_global_function(2,3))
# NOTE: Wrong number of arguments is not a SYNTAX error, but a
# runtime error.
def test_calling_functions_with_wrong_number_of_arguments(self):
try:
my_global_function()
except TypeError as exception:
msg = exception.args[0]
# Note, the text comparison works for Python 3.2
# It has changed in the past and may change in the future
self.assertRegex(msg,
r'my_global_function\(\) missing 2 required positional arguments')
try:
my_global_function(1, 2, 3)
except Exception as e:
msg = e.args[0]
# Note, watch out for parenthesis. They need slashes in front!
self.assertRegex(msg, r'my_global_function\(\) takes 2 positional arguments')
# ------------------------------------------------------------------
def pointless_method(self, a, b):
sum = a + b
def test_which_does_not_return_anything(self):
self.assertEqual(None, self.pointless_method(1, 2))
# Notice that methods accessed from class scope do not require
# you to pass the first "self" argument?
# ------------------------------------------------------------------
def method_with_defaults(self, a, b='default_value'):
return [a, b]
def test_calling_with_default_values(self):
self.assertEqual([1, 'default_value'], self.method_with_defaults(1))
self.assertEqual([1, 2], self.method_with_defaults(1, 2))
# ------------------------------------------------------------------
def method_with_var_args(self, *args):
return args
def test_calling_with_variable_arguments(self):
self.assertEqual((), self.method_with_var_args())
self.assertEqual(('one',), self.method_with_var_args('one'))
self.assertEqual(('one', 'two'), self.method_with_var_args('one', 'two'))
# ------------------------------------------------------------------
def function_with_the_same_name(self, a, b):
return a + b
def test_functions_without_self_arg_are_global_functions(self):
def function_with_the_same_name(a, b):
return a * b
self.assertEqual(12, function_with_the_same_name(3,4))
def test_calling_methods_in_same_class_with_explicit_receiver(self):
def function_with_the_same_name(a, b):
return a * b
self.assertEqual(7, self.function_with_the_same_name(3,4))
# ------------------------------------------------------------------
def another_method_with_the_same_name(self):
return 10
link_to_overlapped_method = another_method_with_the_same_name
def another_method_with_the_same_name(self):
return 42
def test_that_old_methods_are_hidden_by_redefinitions(self):
self.assertEqual(42, self.another_method_with_the_same_name())
def test_that_overlapped_method_is_still_there(self):
self.assertEqual(10, self.link_to_overlapped_method())
# ------------------------------------------------------------------
def empty_method(self):
pass
def test_methods_that_do_nothing_need_to_use_pass_as_a_filler(self):
self.assertEqual(None, self.empty_method())
def test_pass_does_nothing_at_all(self):
"You"
"shall"
"not"
pass
self.assertEqual(True, "Still got to this line" != None)
# ------------------------------------------------------------------
def one_line_method(self): return 'Madagascar'
def test_no_indentation_required_for_one_line_statement_bodies(self):
self.assertEqual("Madagascar", self.one_line_method())
# ------------------------------------------------------------------
def method_with_documentation(self):
"A string placed at the beginning of a function is used for documentation"
return "ok"
def test_the_documentation_can_be_viewed_with_the_doc_method(self):
self.assertRegex(self.method_with_documentation.__doc__, "A string placed at the beginning of a function is used for documentation")
# ------------------------------------------------------------------
class Dog:
def name(self):
return "Fido"
def _tail(self):
# Prefixing a method with an underscore implies private scope
return "wagging"
def __password(self):
return 'password' # Genius!
def test_calling_methods_in_other_objects(self):
rover = self.Dog()
self.assertEqual('Fido', rover.name())
def test_private_access_is_implied_but_not_enforced(self):
rover = self.Dog()
# This is a little rude, but legal
self.assertEqual('wagging', rover._tail())
def test_attributes_with_double_underscore_prefixes_are_subject_to_name_mangling(self):
rover = self.Dog()
with self.assertRaises(NameError): password = __password()
# But this still is!
self.assertEqual('password', rover._Dog__password())
# Name mangling exists to avoid name clash issues when subclassing.
# It is not for providing effective access protection
|
py | 1a449cfc41d508df372632b96fb8f9f8ecc55c0c |
import random
import numpy as np
import matplotlib.pyplot as plt
def plus_minus_one_generator(): #funcao que retora +1 ou -1, com probabilidade
x = [-1,1,-1,1,-1,1] # de 50% para ambos.
return random.choice(x)
def main():
i =0
u = []
while(i < 50):
u.append(plus_minus_one_generator()) ## constroi o u pedido
i+=1
print(u, end = "\n\n")
plt.ion()
plt.plot(u) ##plota u
plt.show()
c = [1,0.7,-0.3]
h = [0.9,-0.5,0.5,-0.4,0.3,-0.3,0.2,-0.1]
y = np.convolve(c,u) ## y = c*u
print(y, end = "\n\n")
plt.ion()
plt.plot(y) ##plota y
plt.show()
z = np.convolve(h,y) ## z = h*y
print(z)
plt.ion()
plt.plot(z) ##plota z
plt.show()
plt.xlabel('U(azul), Y(verde) e Z(laranja)') # configurando as devidas
# legendas
##e1 = [1,0,0,0,0,0,0,0] #teste da propiedade e1*u = z,
##u = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15] # aproximadamente
##print(np.convolve(e1,u))
return
main()
|
py | 1a449d7879545a058594ec6fb9e6c20c3735f0bf | # Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import numpy as np
from openvino.tools.mo.front.common.partial_infer.eltwise import eltwise_infer
from openvino.tools.mo.graph.graph import Graph, Node
from openvino.tools.mo.ops.clamp import AttributedClamp
from openvino.tools.mo.ops.op import Op
activation_ops = ['Sigmoid', 'Tanh', 'ReLU6', 'Exp', 'Elu', 'LogicalNot', 'Floor', 'Ceiling']
class Activation(Op):
enabled = False
operation = None
op = None
version = 'opset1'
def __init__(self, graph: Graph, attrs: dict):
super().__init__(graph, {
'type': self.op,
'op': self.op,
'operation': self.operation,
'version': self.version,
'infer': self.infer,
'in_ports_count': 1,
'out_ports_count': 1,
}, attrs)
@classmethod
def infer(cls, node: Node):
return eltwise_infer(node, node.operation)
class Sigmoid(Activation):
op = 'Sigmoid'
operation = staticmethod(lambda x: 1 / (1 + np.ma.exp(-x)))
class Sin(Activation):
op = 'Sin'
operation = staticmethod(lambda x: np.ma.sin(x))
class Sinh(Activation):
op = 'Sinh'
operation = staticmethod(lambda x: np.ma.sinh(x))
class Asin(Activation):
op = 'Asin'
operation = staticmethod(lambda x: np.ma.arcsin(x))
class Asinh(Activation):
op = 'Asinh'
version = 'opset4'
operation = staticmethod(lambda x: np.arcsinh(x))
class Cos(Activation):
op = 'Cos'
operation = staticmethod(lambda x: np.ma.cos(x))
class Cosh(Activation):
op = 'Cosh'
operation = staticmethod(lambda x: np.ma.cosh(x))
class Acos(Activation):
op = 'Acos'
operation = staticmethod(lambda x: np.ma.arccos(x))
class Acosh(Activation):
op = 'Acosh'
version = 'opset4'
operation = staticmethod(lambda x: np.ma.arccosh(x))
class Tan(Activation):
op = 'Tan'
operation = staticmethod(lambda x: np.ma.tan(x))
class Tanh(Activation):
op = 'Tanh'
operation = staticmethod(lambda x: np.ma.tanh(x))
class Atan(Activation):
op = 'Atan'
operation = staticmethod(lambda x: np.ma.arctan(x))
class Atanh(Activation):
op = 'Atanh'
version = 'opset4'
operation = staticmethod(lambda x: np.ma.arctanh(x))
class ReLU6(AttributedClamp):
def __init__(self, graph: Graph, attrs: dict):
relu6_attrs = {'min': 0, 'max': 6}
relu6_attrs.update(attrs)
super().__init__(graph, relu6_attrs)
class Exp(Activation):
op = 'Exp'
operation = staticmethod(lambda x: np.ma.exp(x))
class ReLU(Activation):
op = 'ReLU'
operation = staticmethod(lambda x: np.ma.maximum(0, x))
class Erf(Activation):
op = 'Erf'
operation = None
class Floor(Activation):
op = 'Floor'
operation = staticmethod(lambda x: np.ma.floor(x))
class Ceiling(Activation):
op = 'Ceiling'
operation = staticmethod(lambda x: np.ma.ceil(x))
class Abs(Activation):
op = 'Abs'
operation = staticmethod(lambda x: np.ma.abs(x))
class Sign(Activation):
op = 'Sign'
operation = staticmethod(lambda x: np.sign(x))
class Elu(Activation):
op = 'Elu'
def __init__(self, graph: Graph, attrs):
elu_attrs = {'alpha': 1.0}
elu_attrs.update(attrs)
super().__init__(graph, elu_attrs)
@staticmethod
def elu(values: np.ndarray, alpha: float):
values = values.astype(float)
for index, x in np.ndenumerate(values):
if x < 0:
values[index] = alpha * (np.ma.exp(x) - 1)
return values
@classmethod
def infer(cls, node: Node):
return eltwise_infer(node, lambda x, alpha: Elu.elu(x, alpha), alpha=node.alpha)
def backend_attrs(self):
return ['alpha']
class ThresholdedRelu(Activation):
# The operation will be decomposed to primitive operations
op = 'ThresholdedRelu'
def __init__(self, graph: Graph, attrs):
trelu_attrs = {'alpha': 1.0, 'type': None}
trelu_attrs.update(attrs)
super().__init__(graph, trelu_attrs)
@staticmethod
def thresholded_relu(values: np.ndarray, alpha: float):
values = values.astype(float)
for index, x in np.ndenumerate(values):
values[index] = values[index] * (x > alpha)
return values
@classmethod
def infer(cls, node: Node):
return eltwise_infer(node, lambda x, alpha: ThresholdedRelu.thresholded_relu(x, alpha), alpha=node.alpha)
class LeakyReLU(Op):
op = 'LeakyReLU'
def __init__(self, graph: Graph, attrs: dict):
super().__init__(graph, {
'type': self.op,
'op': self.op,
'infer': self.infer,
'in_ports_count': 1,
'out_ports_count': 1,
}, attrs)
@staticmethod
def leaky_relu(values: np.ndarray, negative_slope: float):
for index, x in np.ndenumerate(values):
if x < 0:
values[index] = negative_slope * x
return values
@staticmethod
def infer(node: Node):
return eltwise_infer(node, lambda x, negative_slope: LeakyReLU.leaky_relu(x, negative_slope),
negative_slope=node.negative_slope)
def supported_attrs(self):
return ['negative_slope']
class LogicalNot(Activation):
op = 'LogicalNot'
enabled = False
def __init__(self, graph: Graph, attrs: dict):
not_attrs = {'type_infer': self.type_infer}
not_attrs.update(attrs)
super().__init__(graph, not_attrs)
operation = staticmethod(lambda x: np.ma.logical_not(x))
@staticmethod
def type_infer(node: Node):
node.out_port(0).set_data_type(np.bool)
class Log(Activation):
op = 'Log'
operation = staticmethod(lambda x: np.ma.log(x))
class SoftPlus(Activation):
op = 'SoftPlus'
version = 'opset4'
operation = staticmethod(lambda x: np.ma.log(np.ma.exp(x) + 1.0))
class Mish(Activation):
op = 'Mish'
version = 'opset4'
operation = staticmethod(lambda x: x * np.ma.tanh(np.ma.log(np.ma.exp(x) + 1.0)))
class HSwish(Activation):
op = 'HSwish'
version = 'opset4'
operation = staticmethod(lambda x: x * np.ma.minimum(np.ma.maximum(x + 3.0, 0.0), 6.0) / 6.0)
class HSigmoid(Activation):
op = 'HSigmoid'
version = 'opset5'
operation = staticmethod(lambda x: np.ma.minimum(np.ma.maximum(x + 3.0, 0.0), 6.0) / 6.0)
class Swish(Op):
op = 'Swish'
def __init__(self, graph: Graph, attrs: dict):
mandatory_props = {
'op': self.op,
'type': self.op,
'version': 'opset4',
'infer': self.infer,
'in_ports_count': 2,
'out_ports_count': 1,
}
super().__init__(graph, mandatory_props, attrs)
@staticmethod
def infer(node: Node):
node_name = node.soft_get('name', node.id)
node.out_port(0).data.set_shape(node.in_port(0).data.get_shape())
beta = 1.0
if node.is_in_port_connected(1):
beta = node.in_port(1).data.get_value()
if beta is not None:
assert beta.ndim == 0, 'The "beta" value for node {} must be a scalar'.format(node_name)
beta = beta.item()
input_value = node.in_port(1).data.get_value()
if input_value is not None and beta is not None:
node.out_port(0).data.set_value(input_value / (1.0 + np.exp(-input_value * beta)))
|
py | 1a449e8d07ecda5d5764e4f03596c174b2bfea9c | """The Rules and Setup for Chess."""
# Starting board for new game
starting_board = [
["R", "N", "B", "Q", "K", "B", "N", "R"],
["P", "P", "P", "P", "P", "P", "P", "P"],
["", "", "", "", "", "", "", ""],
["", "", "", "", "", "", "", ""],
["", "", "", "", "", "", "", ""],
["", "", "", "", "", "", "", ""],
["p", "p", "p", "p", "p", "p", "p", "p"],
["r", "n", "b", "q", "k", "b", "n", "r"],
]
# Group pieces by colour
white_pieces = ["P", "R", "N", "B", "Q", "K"]
black_pieces = ["p", "r", "n", "b", "q", "k"]
# Piece dictionary
piece_dictionary = {
"P": "Pawn",
"R": "Rook",
"N": "Knight",
"B": "Bishop",
"Q": "Queen",
"K": "King",
"p": "Pawn",
"r": "Rook",
"n": "Knight",
"b": "Bishop",
"q": "Queen",
"k": "King",
}
|
py | 1a44a053e31cc772ed3c8aa4840b07e533384340 | import numpy as np
import os
import tensorflow as tf
from PIL import Image
import utility as Utility
from make_mnist_datasets import Make_mnist_datasets
#global variants
batchsize = 100
data_size = 6000
noise_num = 100
class_num = 10
n_epoch = 1000
l2_norm_lambda = 0.001
alpha_P = 0.5
alpha_pseudo = 0.1
alpha_apply_thr = 200
keep_prob_rate = 0.5
mnist_file_name = ["mnist_train_img.npy", "mnist_train_label.npy", "mnist_test_img.npy", "mnist_test_label.npy"]
board_dir_name = "data27" #directory for tensorboard
seed = 1234
np.random.seed(seed=seed)
# adam_b1_d = 0.5
# adam_b1_c = 0.5
# adam_b1_g = 0.5
out_image_dir = './out_images_tripleGAN' #output image file
out_model_dir = './out_models_tripleGAN' #output model file
try:
os.mkdir(out_image_dir)
os.mkdir(out_model_dir)
os.mkdir('./out_images_Debug') #for debug
except:
# print("mkdir error")
pass
make_mnist = Make_mnist_datasets(mnist_file_name, alpha_P)
def leaky_relu(x, alpha):
return tf.nn.relu(x) - alpha * tf.nn.relu(-x)
def gaussian_noise(input, std): #used at discriminator
noise = tf.random_normal(shape=tf.shape(input), mean=0.0, stddev=std, dtype=tf.float32, seed=seed)
return input + noise
#generator------------------------------------------------------------------
def generator(y, z, reuse=False):
with tf.variable_scope('generator', reuse=reuse):
wg1 = tf.get_variable('wd1', [class_num + noise_num, 500], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bg1 = tf.get_variable('gb1', [500], initializer=tf.constant_initializer(0.0))
scaleg2 = tf.get_variable('sg2', [500], initializer=tf.constant_initializer(1.0))
betag2 = tf.get_variable('beg2', [500], initializer=tf.constant_initializer(0.0))
wg3 = tf.get_variable('wg3', [500, 500], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bg3 = tf.get_variable('bg3', [500], initializer=tf.constant_initializer(0.0))
scaleg4 = tf.get_variable('sg4', [500], initializer=tf.constant_initializer(1.0))
betag4 = tf.get_variable('beg4', [500], initializer=tf.constant_initializer(0.0))
wg5 = tf.get_variable('wg5', [500, 784], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bg5 = tf.get_variable('bg5', [784], initializer=tf.constant_initializer(0.0))
#concat label and noise
concat0 = tf.concat([y, z], axis=1, name='G_concat0')
#layer1 linear
fc1 = tf.matmul(concat0, wg1, name='G_matmul1') + bg1
#softplus function
sp1 = tf.log(tf.clip_by_value(1 + tf.exp(fc1), 1e-10, 1e+30), name='G_softmax1')
#layer2 batch normalization
batch_mean2, batch_var2 = tf.nn.moments(sp1, [0])
bn2 = tf.nn.batch_normalization(sp1, batch_mean2, batch_var2, betag2, scaleg2 , 0.0001, name='G_BN2')
#layer3 linear
fc3 = tf.matmul(bn2, wg3, name='G_matmul3') + bg3
#softplus function
sp3 = tf.log(tf.clip_by_value(1 + tf.exp(fc3), 1e-10, 1e+30), name='G_softmax3')
#layer4 batch normalization
batch_mean4, batch_var4 = tf.nn.moments(sp3, [0])
bn4 = tf.nn.batch_normalization(sp3, batch_mean4, batch_var4, betag4, scaleg4 , 0.0001, name='G_BN4')
#layer5 linear
fc5 = tf.matmul(bn4, wg5, name='G_matmul5') + bg5
#sigmoid function
sig5 = tf.nn.sigmoid(fc5, name='G_sigmoid5')
#reshape to 28x28 image
x_gen = tf.reshape(sig5, [-1, 28, 28, 1])
return x_gen, y
#discriminator-----------------------------------------------------------------
def discriminator(x, y, reuse=False):
with tf.variable_scope('discriminator', reuse=reuse):
wd1 = tf.get_variable('wd1', [794, 1000], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bd1 = tf.get_variable('bd1', [1000], initializer=tf.constant_initializer(0.0))
wd2 = tf.get_variable('wd2', [1000, 500], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bd2 = tf.get_variable('bd2', [500], initializer=tf.constant_initializer(0.0))
wd3 = tf.get_variable('wd3', [500, 250], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bd3 = tf.get_variable('bd3', [250], initializer=tf.constant_initializer(0.0))
wd4 = tf.get_variable('wd4', [250, 250], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bd4 = tf.get_variable('bd4', [250], initializer=tf.constant_initializer(0.0))
wd5 = tf.get_variable('wd5', [250, 250], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bd5 = tf.get_variable('bd5', [250], initializer=tf.constant_initializer(0.0))
wd6 = tf.get_variable('wd6', [250, 1], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bd6 = tf.get_variable('bd6', [1], initializer=tf.constant_initializer(0.0))
x_reshape = tf.reshape(x, [-1, 28 * 28])
# concat image and label
concat0 = tf.concat([x_reshape, y], axis=1, name='D_concat0')
# layer1 linear
#gaussian noise
gn1 = gaussian_noise(concat0, 0.3)
#fully-connected
fc1 = tf.matmul(gn1, wd1, name='D_matmul1') + bd1
# leakyReLU function
lr1 = leaky_relu(fc1, alpha=0.2)
# layer2 linear
#gaussian noise
gn2 = gaussian_noise(lr1, 0.5)
#fully-connected
fc2 = tf.matmul(gn2, wd2, name='D_matmul2') + bd2
# leakyReLU function
lr2 = leaky_relu(fc2, alpha=0.2)
# layer3 linear
#gaussian noise
gn3 = gaussian_noise(lr2, 0.5)
#fully-connected
fc3 = tf.matmul(gn3, wd3, name='D_matmul3') + bd3
# leakyReLU function
lr3 = leaky_relu(fc3, alpha=0.2)
# layer4 linear
#gaussian noise
gn4 = gaussian_noise(lr3, 0.5)
#fully-connected
fc4 = tf.matmul(gn4, wd4, name='D_matmul4') + bd4
# leakyReLU function
lr4 = leaky_relu(fc4, alpha=0.2)
# layer5 linear
#gaussian noise
gn5 = gaussian_noise(lr4, 0.5)
#fully-connected
fc5 = tf.matmul(gn5, wd5, name='D_matmul5') + bd5
# leakyReLU function
lr5 = leaky_relu(fc5, alpha=0.2)
# layer6 linear
#gaussian noise
gn6 = gaussian_noise(lr5, 0.5)
#fully-connected
fc6 = tf.matmul(gn6, wd6, name='D_matmul6') + bd6
# softplus function
out_dis = tf.nn.sigmoid(fc6, name='D_sigmoid')
norm_L2 = tf.nn.l2_loss(wd1) + tf.nn.l2_loss(wd2) + tf.nn.l2_loss(wd3) + tf.nn.l2_loss(wd4) + tf.nn.l2_loss(wd5) \
+ tf.nn.l2_loss(wd6)
return out_dis, norm_L2
#classifier-----------------------------------------------------------------
def classifier(xc, keep_prob, reuse=False):
with tf.variable_scope('classifier', reuse=reuse):
wc1 = tf.get_variable('wc1', [5, 5, 1, 32], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bc1 = tf.get_variable('bc1', [32], initializer=tf.constant_initializer(0.0))
wc2 = tf.get_variable('wc2', [3, 3, 32, 64], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bc2 = tf.get_variable('bc2', [64], initializer=tf.constant_initializer(0.0))
wc3 = tf.get_variable('wc3', [3, 3, 64, 64], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bc3 = tf.get_variable('bc3', [64], initializer=tf.constant_initializer(0.0))
wc4 = tf.get_variable('wc4', [3, 3, 64, 128], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bc4 = tf.get_variable('bc4', [128], initializer=tf.constant_initializer(0.0))
wc5 = tf.get_variable('wc5', [3, 3, 128, 128], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bc5 = tf.get_variable('bc5', [128], initializer=tf.constant_initializer(0.0))
wc6 = tf.get_variable('wc6', [128, 10], initializer=tf.random_normal_initializer
(mean=0.0, stddev=0.05, seed=seed), dtype=tf.float32)
bc6 = tf.get_variable('bc6', [10], initializer=tf.constant_initializer(0.0))
#layer1 convolution
conv1 = tf.nn.conv2d(xc, wc1, strides=[1, 1, 1, 1], padding="SAME", name='C_conv1') + bc1
# relu function
conv1_relu = tf.nn.relu(conv1)
#max pooling
conv1_pool = tf.nn.max_pool(conv1_relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding="SAME")
#drop out
conv1_drop = tf.nn.dropout(conv1_pool, keep_prob)
#layer2 convolution
conv2 = tf.nn.conv2d(conv1_drop, wc2, strides=[1, 1, 1, 1], padding="SAME", name='C_conv2') + bc2
# relu function
conv2_relu = tf.nn.relu(conv2)
#layer3 convolution
conv3 = tf.nn.conv2d(conv2_relu, wc3, strides=[1, 1, 1, 1], padding="SAME", name='C_conv3') + bc3
# relu function
conv3_relu = tf.nn.relu(conv3)
#max pooling
conv3_pool = tf.nn.max_pool(conv3_relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding="SAME")
#drop out
conv3_drop = tf.nn.dropout(conv3_pool, keep_prob)
#layer4 convolution
conv4 = tf.nn.conv2d(conv3_drop, wc4, strides=[1, 1, 1, 1], padding="SAME", name='C_conv4') + bc4
# relu function
conv4_relu = tf.nn.relu(conv4)
#layer5 convolution
conv5 = tf.nn.conv2d(conv4_relu, wc5, strides=[1, 1, 1, 1], padding="SAME", name='C_conv5') + bc5
# relu function
conv5_relu = tf.nn.relu(conv5)
# conv6 = tf.nn.conv2d(conv5_relu, wc6, strides=[1, 1, 1, 1], padding="SAME") + bc6
# global average pooling.... reduce mean
ap5 = tf.reduce_mean(conv5_relu, axis=[1, 2], name='C_global_average')
#layer6 full-connected
fc6 = tf.matmul(ap5, wc6, name='C_matmul6') + bc6
#softmax
yc = tf.nn.softmax(fc6, name='C_softmax')
# tf.summary.histogram("Cconv1", conv1)
# tf.summary.histogram("Cconv2", conv2)
# tf.summary.histogram("Cconv3", conv3)
# tf.summary.histogram("Cconv4", conv4)
# tf.summary.histogram("Cconv5", conv5)
# tf.summary.histogram("Cap5", ap5)
tf.summary.histogram("Cfc6", fc6)
tf.summary.histogram("yc", yc)
return xc, yc
# placeholder
yg_ = tf.placeholder(tf.float32, [None, class_num], name='yg_') #label to generator
z_ = tf.placeholder(tf.float32, [None, noise_num], name='z_') #noise to generator
xc1_ = tf.placeholder(tf.float32, [None, 28, 28, 1], name='xc1_') #labeled image to classifier
xc2_ = tf.placeholder(tf.float32, [None, 28, 28, 1], name='xc2_') #unlabeled image to classifier
yd_ = tf.placeholder(tf.float32, [None, class_num], name='yd_') #label to discriminator
xd_ = tf.placeholder(tf.float32, [None, 28, 28, 1], name='xd_') #labeled image to discriminator
d_dis_g_ = tf.placeholder(tf.float32, [None, 1], name='d_dis_g_') #target of discriminator related to generator
d_dis_r_ = tf.placeholder(tf.float32, [None, 1], name='d_dis_r_') #target of discriminator related to real image
d_dis_c_ = tf.placeholder(tf.float32, [None, 1], name='d_dis_c_') #target of discriminator related to classifier
yc1_ = tf.placeholder(tf.float32, [None, class_num], name='yc1_') #target label of classifier related to real image
alpha_p_flag_ = tf.placeholder(tf.float32, name='alpha_p_flag_') #(0,1) apply alpha pseudo or not
keep_prob_ = tf.placeholder(tf.float32, name='keep_prob_') #dropout rate
# stream around generator
x_gen, y_gen = generator(yg_, z_, reuse=False)
# stream around classifier
x_cla_0, y_cla_0 = classifier(x_gen, keep_prob_, reuse=False) # from generator
x_cla_1, y_cla_1 = classifier(xc1_, keep_prob_, reuse=True) # real image labeled
x_cla_2, y_cla_2 = classifier(xc2_, keep_prob_, reuse=True) # real image unlabeled
# loss_RP = - tf.reduce_mean(y_gen * tf.log(y_cla_0)) #loss in case generated image
# loss_RL = - tf.reduce_mean(yc1_ * tf.log(y_cla_1)) #loss in case real image
loss_RP = - tf.reduce_mean(y_gen * tf.log(tf.clip_by_value(y_cla_0, 1e-10, 1e+30)), name='Loss_RP') #loss in case generated image
loss_RL = - tf.reduce_mean(yc1_ * tf.log(tf.clip_by_value(y_cla_1, 1e-10, 1e+30)), name='Loss_RL') #loss in case real image
#stream around discriminator
out_dis_g, normL2_1 = discriminator(x_gen, y_gen, reuse=False) #from generator
out_dis_r, normL2_2 = discriminator(xd_, yd_, reuse=True) #real image and label
out_dis_c, normL2_3 = discriminator(x_cla_2, y_cla_2, reuse=True) #from classifier
loss_dis_g = tf.reduce_mean(tf.square(out_dis_g - d_dis_g_), name='Loss_dis_gen') #loss related to generator
loss_dis_r = tf.reduce_mean(tf.square(out_dis_r - d_dis_r_), name='Loss_dis_rea') #loss related to real imaeg
loss_dis_c = tf.reduce_mean(tf.square(out_dis_c - d_dis_c_), name='Loss_dis_cla') #loss related to classifier
norm_L2 = normL2_1 + normL2_2 + normL2_3
#total loss of discriminator
loss_dis_total = loss_dis_r + alpha_P * loss_dis_c + (1 - alpha_P) * loss_dis_g + l2_norm_lambda * norm_L2
#total loss of classifier
loss_cla_total = alpha_P * loss_dis_c + loss_RL + alpha_p_flag_ * alpha_pseudo * loss_RP
#total loss of generator
loss_gen_total = (1 - alpha_P) * loss_dis_g
# tf.summary.scalar('loss_dis_total', loss_dis_total)
# tf.summary.histogram("wc1", wc1)
# # tf.summary.histogram("wc2", wc2)
# # tf.summary.histogram("wc3", wc3)
# # tf.summary.histogram("wc4", wc4)
# # tf.summary.histogram("wc5", wc5)
# # tf.summary.histogram("wc6", wc6)
# # tf.summary.histogram("bc1", bc1)
# # tf.summary.histogram("bc2", bc2)
# # tf.summary.histogram("bc3", bc3)
# # tf.summary.histogram("bc4", bc4)
# # tf.summary.histogram("bc5", bc5)
# tf.summary.histogram("bc6", bc6)
tf.summary.scalar('loss_cla_total', loss_cla_total)
tf.summary.scalar('loss_dis_c', loss_dis_c)
tf.summary.scalar('loss_RL', loss_RL)
tf.summary.scalar('loss_RP', loss_RP)
# tf.summary.scalar('loss_gen_total', loss_gen_total)
merged = tf.summary.merge_all()
# t_vars = tf.trainable_variables()
g_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="generator")
d_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="discriminator")
c_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="classifier")
# d_vars = [var for var in t_vars if 'd' in var.name]
# g_vars = [var for var in t_vars if 'g' in var.name]
# c_vars = [var for var in t_vars if 'c' in var.name]
train_dis = tf.train.AdamOptimizer(learning_rate=0.001, beta1=0.5).minimize(loss_dis_total, var_list=d_vars
# var_list=[wd1, wd2, wd3, wd4, wd5, wd6, bd1, bd2, bd3, bd4, bd5, bd6]
, name='Adam_dis')
train_gen = tf.train.AdamOptimizer(learning_rate=0.001, beta1=0.5).minimize(loss_gen_total, var_list=g_vars
# var_list=[wg1, wg3, wg5, bg1, bg3, bg5, betag2, scaleg2, betag4, scaleg4]
, name='Adam_gen')
train_cla = tf.train.AdamOptimizer(learning_rate=0.001, beta1=0.5).minimize(loss_cla_total, var_list=c_vars
# var_list=[wc1, wc2, wc3, wc4, wc5, wc6, bc1, bc2, bc3, bc4, bc5, bc6]
, name='Adam_cla')
# train_cla = tf.train.AdamOptimizer(learning_rate=0.001, beta1=0.5).minimize(loss_cla_total,
# var_list=[wc1, wc2, wc3, wc4, wc5, wc6, bc1, bc2, bc3, bc4, bc5, bc6,
# wg1, wg3, wg5, bg1, bg3, bg5, betag2, scaleg2, betag4, scaleg4]
# , name='Adam_cla')
sess = tf.Session()
sess.run(tf.global_variables_initializer())
summary_writer = tf.summary.FileWriter(board_dir_name, sess.graph)
#training loop
for epoch in range(0, n_epoch):
sum_loss_gen = np.float32(0)
sum_loss_dis = np.float32(0)
sum_loss_dis_r = np.float32(0)
sum_loss_dis_c0 = np.float32(0)
sum_loss_dis_g0 = np.float32(0)
sum_loss_cla = np.float32(0)
sum_accu_cla = np.float32(0)
sum_loss_dis_c1 = np.float32(0)
sum_loss_RL = np.float32(0)
sum_loss_RP = np.float32(0)
len_img_real = make_mnist.make_data_for_1_epoch()
#debug
# print("make_mnist.img_real_1epoch.shape = ", make_mnist.img_real_1epoch.shape)
# print("make_mnist.img_cla_1epoch.shape = ", make_mnist.img_cla_1epoch.shape)
for i in range(0, len_img_real, batchsize):
img_real_batch, img_cla_batch, label_real_batch = make_mnist.get_data_for_1_batch(i, batchsize, alpha_P)
#debug
# if epoch == 0 and i == 0:
# make_mnist.print_img_and_label(img_real_batch, label_real_batch, 7)
#cal each batchsize
len_real_batch = len(img_real_batch)
len_cla_batch = len(img_cla_batch)
len_gen_batch = int(len(img_real_batch) * alpha_P)
z = np.random.uniform(0, 1, len_gen_batch * noise_num)
z = z.reshape(-1, noise_num).astype(np.float32)
label_gen_int = np.random.randint(0, class_num, len_gen_batch)
label_gen = make_mnist.convert_to_10class_(label_gen_int)
d_dis_g_1_ = np.array([1.0], dtype=np.float32).reshape(1, 1)
d_dis_g_1 = np.tile(d_dis_g_1_, (len_gen_batch, 1))
d_dis_g_0_ = np.array([0.0], dtype=np.float32).reshape(1, 1)
d_dis_g_0 = np.tile(d_dis_g_0_, (len_gen_batch, 1))
d_dis_r_1 = np.array([1.0], dtype=np.float32).reshape(1, 1)
d_dis_r = np.tile(d_dis_r_1, (len_real_batch, 1))
d_dis_c_1_ = np.array([1.0], dtype=np.float32).reshape(1, 1)
d_dis_c_1 = np.tile(d_dis_c_1_, (len_cla_batch, 1))
d_dis_c_0_ = np.array([0.0], dtype=np.float32).reshape(1, 1)
d_dis_c_0 = np.tile(d_dis_c_0_, (len_cla_batch, 1))
#debug
# d_vars_ = sess.run(d_vars, feed_dict={z_:z, yg_:label_gen, yd_: label_real_batch, xd_: img_real_batch,
# xc2_: img_cla_batch, d_dis_g_: d_dis_g_0, d_dis_r_: d_dis_r_1,
# d_dis_c_:d_dis_c_0, keep_prob_:keep_prob_rate})
#
# print("d_vars =", d_vars)
#train discriminator
sess.run(train_dis, feed_dict={z_:z, yg_:label_gen, yd_: label_real_batch, xd_: img_real_batch,
xc2_: img_cla_batch, d_dis_g_: d_dis_g_0, d_dis_r_: d_dis_r_1,
d_dis_c_:d_dis_c_0, keep_prob_:keep_prob_rate})
#train classifier
if epoch > alpha_apply_thr:
sess.run(train_cla, feed_dict={z_:z, yg_:label_gen, xc1_: img_real_batch, xc2_: img_cla_batch,
yc1_: label_real_batch, d_dis_c_: d_dis_c_1,keep_prob_:keep_prob_rate,
alpha_p_flag_:1.0})
else:
sess.run(train_cla, feed_dict={z_: z, yg_: label_gen, xc1_: img_real_batch, xc2_: img_cla_batch,
yc1_: label_real_batch, d_dis_c_: d_dis_c_1, keep_prob_: keep_prob_rate,
alpha_p_flag_: 0.0})
#train generator
sess.run(train_gen, feed_dict={z_: z, yg_: label_gen, d_dis_g_: d_dis_g_1})
loss_gen_total_ = sess.run(loss_gen_total, feed_dict={z_:z, yg_:label_gen, d_dis_g_: d_dis_g_1})
loss_dis_total_, loss_dis_r_, loss_dis_g_0, loss_dis_c_0 = sess.run([loss_dis_total, loss_dis_r, loss_dis_g, loss_dis_c],
feed_dict={z_:z, yg_:label_gen, yd_: label_real_batch,
xd_: img_real_batch, xc2_: img_cla_batch, d_dis_g_: d_dis_g_0,
d_dis_r_: d_dis_r_1, d_dis_c_:d_dis_c_0, keep_prob_:1.0})
loss_cla_total_, loss_dis_c_1, loss_RL_, loss_RP_ = sess.run([loss_cla_total, loss_dis_c, loss_RL, loss_RP],
feed_dict={z_:z, yg_:label_gen, xc1_: img_real_batch, xc2_: img_cla_batch,
yc1_: label_real_batch, d_dis_c_: d_dis_c_1, keep_prob_:1.0, alpha_p_flag_: 0.0})
#for tensorboard
merged_ = sess.run(merged, feed_dict={z_:z, yg_:label_gen, xc1_: img_real_batch,
xc2_: img_cla_batch,yc1_: label_real_batch,
d_dis_c_: d_dis_c_1, keep_prob_:1.0, alpha_p_flag_: 0.0})
summary_writer.add_summary(merged_, epoch)
sum_loss_gen += loss_gen_total_
sum_loss_dis += loss_dis_total_
sum_loss_dis_r += loss_dis_r_
sum_loss_dis_c0 += loss_dis_c_0
sum_loss_dis_g0 += loss_dis_g_0
sum_loss_cla += loss_cla_total_
sum_loss_dis_c1 += loss_dis_c_1
sum_loss_RL += loss_RL_
sum_loss_RP += loss_RP_
print("-----------------------------------------------------")
print("epoch =", epoch , ", Total Loss of G =", sum_loss_gen, ", Total Loss of D =", sum_loss_dis,
", Total Loss of C =", sum_loss_cla)
print("Discriminator: Loss Real =", sum_loss_dis_r, ", Loss C =", sum_loss_dis_c0, ", Loss D =", sum_loss_dis_g0,)
print("Classifier: Loss adv =", sum_loss_dis_c1, ", Loss RL =", sum_loss_RL, ", Loss RP =", sum_loss_RP,)
if epoch % 10 == 0:
sample_num_h = 10
sample_num = sample_num_h ** 2
z_test = np.random.uniform(0, 1, sample_num_h * noise_num).reshape(1, sample_num_h, noise_num)
z_test = np.tile(z_test, (sample_num_h, 1, 1))
z_test = z_test.reshape(-1, sample_num).astype(np.float32)
label_gen_int = np.arange(10).reshape(10, 1).astype(np.float32)
label_gen_int = np.tile(label_gen_int, (1, 10)).reshape(sample_num)
label_gen_test = make_mnist.convert_to_10class_(label_gen_int)
gen_images = sess.run(x_gen, feed_dict={z_:z_test, yg_:label_gen_test})
Utility.make_output_img(gen_images, sample_num_h, out_image_dir, epoch)
# z_only_1 = np.random.uniform(0, 1, noise_num).reshape(1, noise_num)
# label_gen_only_1 = np.array([4]).reshape(1, 1).astype(np.float32)
# label_gen_only_1_class = make_mnist.convert_to_10class_(label_gen_only_1)
# gen_image_1 = sess.run(x_gen, feed_dict={z_:z_only_1, yg_:label_gen_only_1_class})
#
# Utility.make_1_img(gen_image_1)
|
py | 1a44a0f466a9f2b92470190b4569c094e81285e8 | from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from manager.vmtools import *
from models import *
# Create your views here.
@login_required(login_url="login/")
def home(request):
dash = Dashboard(manager_count=VMTools.manager_count(),host_count=VMTools.host_count(),vm_count=VMTools.vm_count(),total_backup=VMTools.total_backup_size(),success_rate=VMTools.backup_success_rate())
return render(request,"dashboard.html",{'dash': dash })
|
py | 1a44a22e0076b3d0659a4ac79344abca7f5fc6da | # Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Network Hosts are responsible for allocating IPs and setting up network.
There are multiple backend drivers that handle specific types of networking
topologies. All of the network commands are issued to a subclass of
:class:`NetworkManager`.
"""
import collections
import datetime
import functools
import math
import re
import uuid
import netaddr
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_service import periodic_task
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import netutils
from oslo_utils import strutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from nova import context
from nova import exception
from nova.i18n import _, _LI, _LE, _LW
from nova import ipv6
from nova import manager
from nova.network import api as network_api
from nova.network import driver
from nova.network import floating_ips
from nova.network import model as network_model
from nova.network import rpcapi as network_rpcapi
from nova.network.security_group import openstack_driver
from nova import objects
from nova.objects import base as obj_base
from nova.objects import quotas as quotas_obj
from nova import servicegroup
from nova import utils
LOG = logging.getLogger(__name__)
network_opts = [
cfg.StrOpt('flat_network_bridge',
help='Bridge for simple network instances'),
cfg.StrOpt('flat_network_dns',
default='8.8.4.4',
help='DNS server for simple network'),
cfg.BoolOpt('flat_injected',
default=False,
help='Whether to attempt to inject network setup into guest'),
cfg.StrOpt('flat_interface',
help='FlatDhcp will bridge into this interface if set'),
cfg.IntOpt('vlan_start',
default=100,
min=1,
max=4094,
help='First VLAN for private networks'),
cfg.StrOpt('vlan_interface',
help='VLANs will bridge into this interface if set'),
cfg.IntOpt('num_networks',
default=1,
help='Number of networks to support'),
cfg.StrOpt('vpn_ip',
default='$my_ip',
help='Public IP for the cloudpipe VPN servers'),
cfg.IntOpt('vpn_start',
default=1000,
help='First Vpn port for private networks'),
cfg.IntOpt('network_size',
default=256,
help='Number of addresses in each private subnet'),
cfg.StrOpt('fixed_range_v6',
default='fd00::/48',
help='Fixed IPv6 address block'),
cfg.StrOpt('gateway',
help='Default IPv4 gateway'),
cfg.StrOpt('gateway_v6',
help='Default IPv6 gateway'),
cfg.IntOpt('cnt_vpn_clients',
default=0,
help='Number of addresses reserved for vpn clients'),
cfg.IntOpt('fixed_ip_disassociate_timeout',
default=600,
help='Seconds after which a deallocated IP is disassociated'),
cfg.IntOpt('create_unique_mac_address_attempts',
default=5,
help='Number of attempts to create unique mac address'),
cfg.BoolOpt('fake_call',
default=False,
help='If True, skip using the queue and make local calls'),
cfg.BoolOpt('teardown_unused_network_gateway',
default=False,
help='If True, unused gateway devices (VLAN and bridge) are '
'deleted in VLAN network mode with multi hosted '
'networks'),
cfg.BoolOpt('force_dhcp_release',
default=True,
help='If True, send a dhcp release on instance termination'),
cfg.BoolOpt('update_dns_entries',
default=False,
help='If True, when a DNS entry must be updated, it sends a '
'fanout cast to all network hosts to update their DNS '
'entries in multi host mode'),
cfg.IntOpt("dns_update_periodic_interval",
default=-1,
help='Number of seconds to wait between runs of updates to DNS '
'entries.'),
cfg.StrOpt('dhcp_domain',
default='novalocal',
help='Domain to use for building the hostnames'),
cfg.StrOpt('l3_lib',
default='nova.network.l3.LinuxNetL3',
help="Indicates underlying L3 management library"),
]
CONF = cfg.CONF
CONF.register_opts(network_opts)
CONF.import_opt('use_ipv6', 'nova.netconf')
CONF.import_opt('my_ip', 'nova.netconf')
CONF.import_opt('network_topic', 'nova.network.rpcapi')
CONF.import_opt('fake_network', 'nova.network.linux_net')
CONF.import_opt('share_dhcp_address', 'nova.objects.network')
CONF.import_opt('network_device_mtu', 'nova.objects.network')
class RPCAllocateFixedIP(object):
"""Mixin class originally for FlatDCHP and VLAN network managers.
used since they share code to RPC.call allocate_fixed_ip on the
correct network host to configure dnsmasq
"""
servicegroup_api = None
def _allocate_fixed_ips(self, context, instance_id, host, networks,
**kwargs):
"""Calls allocate_fixed_ip once for each network."""
green_threads = []
vpn = kwargs.get('vpn')
requested_networks = kwargs.get('requested_networks')
addresses_by_network = {}
if requested_networks is not None:
for request in requested_networks:
addresses_by_network[request.network_id] = request.address
for network in networks:
if 'uuid' in network and network['uuid'] in addresses_by_network:
address = addresses_by_network[network['uuid']]
else:
address = None
# NOTE(vish): if we are not multi_host pass to the network host
# NOTE(tr3buchet): but if we are, host came from instance.host
if not network['multi_host']:
host = network['host']
# NOTE(vish): if there is no network host, set one
if host is None:
host = self.network_rpcapi.set_network_host(context,
network)
if host != self.host:
# need to call allocate_fixed_ip to correct network host
green_threads.append(utils.spawn(
self.network_rpcapi._rpc_allocate_fixed_ip,
context, instance_id, network['id'], address, vpn,
host))
else:
# i am the correct host, run here
self.allocate_fixed_ip(context, instance_id, network,
vpn=vpn, address=address)
# wait for all of the allocates (if any) to finish
for gt in green_threads:
gt.wait()
def _rpc_allocate_fixed_ip(self, context, instance_id, network_id,
**kwargs):
"""Sits in between _allocate_fixed_ips and allocate_fixed_ip to
perform network lookup on the far side of rpc.
"""
network = self._get_network_by_id(context, network_id)
return self.allocate_fixed_ip(context, instance_id, network, **kwargs)
def deallocate_fixed_ip(self, context, address, host=None, teardown=True,
instance=None):
"""Call the superclass deallocate_fixed_ip if i'm the correct host
otherwise call to the correct host
"""
fixed_ip = objects.FixedIP.get_by_address(
context, address, expected_attrs=['network'])
network = fixed_ip.network
# NOTE(vish): if we are not multi_host pass to the network host
# NOTE(tr3buchet): but if we are, host came from instance.host
if not network.multi_host:
host = network.host
if host == self.host:
# NOTE(vish): deallocate the fixed ip locally
return super(RPCAllocateFixedIP, self).deallocate_fixed_ip(context,
address, instance=instance)
if network.multi_host:
service = objects.Service.get_by_host_and_binary(
context, host, 'nova-network')
if not service or not self.servicegroup_api.service_is_up(service):
# NOTE(vish): deallocate the fixed ip locally but don't
# teardown network devices
return super(RPCAllocateFixedIP, self).deallocate_fixed_ip(
context, address, teardown=False, instance=instance)
self.network_rpcapi.deallocate_fixed_ip(context, address, host,
instance)
class NetworkManager(manager.Manager):
"""Implements common network manager functionality.
This class must be subclassed to support specific topologies.
host management:
hosts configure themselves for networks they are assigned to in the
table upon startup. If there are networks in the table which do not
have hosts, those will be filled in and have hosts configured
as the hosts pick them up one at time during their periodic task.
The one at a time part is to flatten the layout to help scale
"""
target = messaging.Target(version='1.16')
# If True, this manager requires VIF to create a bridge.
SHOULD_CREATE_BRIDGE = False
# If True, this manager requires VIF to create VLAN tag.
SHOULD_CREATE_VLAN = False
# if True, this manager leverages DHCP
DHCP = False
timeout_fixed_ips = True
required_create_args = []
def __init__(self, network_driver=None, *args, **kwargs):
self.driver = driver.load_network_driver(network_driver)
self.instance_dns_manager = importutils.import_object(
CONF.instance_dns_manager)
self.instance_dns_domain = CONF.instance_dns_domain
self.floating_dns_manager = importutils.import_object(
CONF.floating_ip_dns_manager)
self.network_api = network_api.API()
self.network_rpcapi = network_rpcapi.NetworkAPI()
self.security_group_api = (
openstack_driver.get_openstack_security_group_driver())
self.servicegroup_api = servicegroup.API()
l3_lib = kwargs.get("l3_lib", CONF.l3_lib)
self.l3driver = importutils.import_object(l3_lib)
self.quotas_cls = objects.Quotas
super(NetworkManager, self).__init__(service_name='network',
*args, **kwargs)
@staticmethod
def _uses_shared_ip(network):
shared = network.get('share_address') or CONF.share_dhcp_address
return not network.get('multi_host') or shared
@utils.synchronized('get_dhcp')
def _get_dhcp_ip(self, context, network_ref, host=None):
"""Get the proper dhcp address to listen on."""
# NOTE(vish): If we are sharing the dhcp_address then we can just
# return the dhcp_server from the database.
if self._uses_shared_ip(network_ref):
return network_ref.get('dhcp_server') or network_ref['gateway']
if not host:
host = self.host
network_id = network_ref['id']
try:
fip = objects.FixedIP.get_by_network_and_host(context,
network_id,
host)
return fip.address
except exception.FixedIpNotFoundForNetworkHost:
elevated = context.elevated()
fip = objects.FixedIP.associate_pool(elevated,
network_id,
host=host)
return fip.address
def get_dhcp_leases(self, ctxt, network_ref):
"""Broker the request to the driver to fetch the dhcp leases."""
LOG.debug('Get DHCP leases for network %s', network_ref['uuid'])
return self.driver.get_dhcp_leases(ctxt, network_ref)
def init_host(self):
"""Do any initialization that needs to be run if this is a
standalone service.
"""
# NOTE(vish): Set up networks for which this host already has
# an ip address.
ctxt = context.get_admin_context()
for network in objects.NetworkList.get_by_host(ctxt, self.host):
self._setup_network_on_host(ctxt, network)
if CONF.update_dns_entries:
LOG.debug('Update DNS on network %s for host %s',
network['uuid'], self.host)
dev = self.driver.get_dev(network)
self.driver.update_dns(ctxt, dev, network)
LOG.info(_LI('Configured network %(network)s on host %(host)s'),
{'network': network['uuid'], 'host': self.host})
@periodic_task.periodic_task
def _disassociate_stale_fixed_ips(self, context):
if self.timeout_fixed_ips:
now = timeutils.utcnow()
timeout = CONF.fixed_ip_disassociate_timeout
time = now - datetime.timedelta(seconds=timeout)
num = objects.FixedIP.disassociate_all_by_timeout(context,
self.host,
time)
if num:
LOG.debug('Disassociated %s stale fixed IP(s)', num)
def set_network_host(self, context, network_ref):
"""Safely sets the host of the network."""
# TODO(mriedem): Remove this compat shim when network RPC API version
# 1.0 is dropped.
if not isinstance(network_ref, obj_base.NovaObject):
network_ref = objects.Network._from_db_object(
context, objects.Network(), network_ref)
LOG.debug('Setting host %s for network %s', self.host,
network_ref.uuid, context=context)
network_ref.host = self.host
network_ref.save()
return self.host
def _do_trigger_security_group_members_refresh_for_instance(self,
instance_id):
# NOTE(francois.charlier): the instance may have been deleted already
# thus enabling `read_deleted`
admin_context = context.get_admin_context(read_deleted='yes')
instance = objects.Instance.get_by_uuid(admin_context, instance_id)
try:
# NOTE(vish): We need to make sure the instance info cache has been
# updated with new ip info before we trigger the
# security group refresh. This is somewhat inefficient
# but avoids doing some dangerous refactoring for a
# bug fix.
nw_info = self.get_instance_nw_info(admin_context, instance_id,
None, None)
ic = objects.InstanceInfoCache.new(admin_context, instance_id)
ic.network_info = nw_info
ic.save(update_cells=False)
except exception.InstanceInfoCacheNotFound:
pass
groups = instance.security_groups
group_ids = [group.id for group in groups]
self.security_group_api.trigger_members_refresh(admin_context,
group_ids)
# NOTE(hanlind): This method can be removed in version 2.0 of the RPC API
def get_instance_uuids_by_ip_filter(self, context, filters):
fixed_ip_filter = filters.get('fixed_ip')
ip_filter = re.compile(str(filters.get('ip')))
ipv6_filter = re.compile(str(filters.get('ip6')))
LOG.debug('Get instance uuids by IP filters. Fixed IP filter: %s. '
'IP filter: %s. IPv6 filter: %s', fixed_ip_filter,
str(filters.get('ip')), str(filters.get('ip6')))
# NOTE(jkoelker) Should probably figure out a better way to do
# this. But for now it "works", this could suck on
# large installs.
vifs = objects.VirtualInterfaceList.get_all(context)
results = []
for vif in vifs:
if vif.instance_uuid is None:
continue
network = self._get_network_by_id(context, vif.network_id)
fixed_ipv6 = None
if network['cidr_v6'] is not None:
fixed_ipv6 = ipv6.to_global(network['cidr_v6'],
vif.address,
context.project_id)
if fixed_ipv6 and ipv6_filter.match(fixed_ipv6):
results.append({'instance_uuid': vif.instance_uuid,
'ip': fixed_ipv6})
fixed_ips = objects.FixedIPList.get_by_virtual_interface_id(
context, vif.id)
for fixed_ip in fixed_ips:
if not fixed_ip or not fixed_ip.address:
continue
if str(fixed_ip.address) == fixed_ip_filter:
results.append({'instance_uuid': vif.instance_uuid,
'ip': fixed_ip.address})
continue
if ip_filter.match(str(fixed_ip.address)):
results.append({'instance_uuid': vif.instance_uuid,
'ip': fixed_ip.address})
continue
for floating_ip in fixed_ip.floating_ips:
if not floating_ip or not floating_ip.address:
continue
if ip_filter.match(str(floating_ip.address)):
results.append({'instance_uuid': vif.instance_uuid,
'ip': floating_ip.address})
continue
return results
def _get_networks_for_instance(self, context, instance_id, project_id,
requested_networks=None):
"""Determine & return which networks an instance should connect to."""
# TODO(tr3buchet) maybe this needs to be updated in the future if
# there is a better way to determine which networks
# a non-vlan instance should connect to
if requested_networks is not None and len(requested_networks) != 0:
network_uuids = [request.network_id
for request in requested_networks]
networks = self._get_networks_by_uuids(context, network_uuids)
else:
try:
networks = objects.NetworkList.get_all(context)
except exception.NoNetworksFound:
return []
# return only networks which are not vlan networks
return [network for network in networks if not network.vlan]
def allocate_for_instance(self, context, **kwargs):
"""Handles allocating the various network resources for an instance.
rpc.called by network_api
"""
instance_uuid = kwargs['instance_id']
if not uuidutils.is_uuid_like(instance_uuid):
instance_uuid = kwargs.get('instance_uuid')
host = kwargs['host']
project_id = kwargs['project_id']
rxtx_factor = kwargs['rxtx_factor']
requested_networks = kwargs.get('requested_networks')
if (requested_networks and
not isinstance(requested_networks,
objects.NetworkRequestList)):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest.from_tuple(t)
for t in requested_networks])
vpn = kwargs['vpn']
macs = kwargs['macs']
admin_context = context.elevated()
networks = self._get_networks_for_instance(context,
instance_uuid, project_id,
requested_networks=requested_networks)
networks_list = [self._get_network_dict(network)
for network in networks]
LOG.debug('Networks retrieved for instance: |%s|',
networks_list, context=context, instance_uuid=instance_uuid)
try:
self._allocate_mac_addresses(admin_context, instance_uuid,
networks, macs)
except Exception:
with excutils.save_and_reraise_exception():
# If we fail to allocate any one mac address, clean up all
# allocated VIFs
objects.VirtualInterface.delete_by_instance_uuid(
context, instance_uuid)
self._allocate_fixed_ips(admin_context, instance_uuid,
host, networks, vpn=vpn,
requested_networks=requested_networks)
if CONF.update_dns_entries:
network_ids = [network['id'] for network in networks]
self.network_rpcapi.update_dns(context, network_ids)
net_info = self.get_instance_nw_info(admin_context, instance_uuid,
rxtx_factor, host)
LOG.info(_LI("Allocated network: '%s' for instance"), net_info,
instance_uuid=instance_uuid,
context=context)
return net_info
def deallocate_for_instance(self, context, **kwargs):
"""Handles deallocating various network resources for an instance.
rpc.called by network_api
kwargs can contain fixed_ips to circumvent another db lookup
"""
# NOTE(francois.charlier): in some cases the instance might be
# deleted before the IPs are released, so we need to get deleted
# instances too
read_deleted_context = context.elevated(read_deleted='yes')
if 'instance' in kwargs:
instance = kwargs['instance']
instance_uuid = instance.uuid
host = instance.host
else:
instance_id = kwargs['instance_id']
if uuidutils.is_uuid_like(instance_id):
instance = objects.Instance.get_by_uuid(
read_deleted_context, instance_id)
else:
instance = objects.Instance.get_by_id(
read_deleted_context, instance_id)
# NOTE(russellb) in case instance_id was an ID and not UUID
instance_uuid = instance.uuid
host = kwargs.get('host')
try:
requested_networks = kwargs.get('requested_networks')
if requested_networks:
# NOTE(obondarev): Temporary and transitional
if isinstance(requested_networks, objects.NetworkRequestList):
requested_networks = requested_networks.as_tuples()
network_ids = set([net_id for (net_id, ip)
in requested_networks])
fixed_ips = [ip for (net_id, ip) in requested_networks if ip]
else:
fixed_ip_list = objects.FixedIPList.get_by_instance_uuid(
read_deleted_context, instance_uuid)
network_ids = set([str(fixed_ip.network_id) for fixed_ip
in fixed_ip_list])
fixed_ips = [str(ip.address) for ip in fixed_ip_list]
except exception.FixedIpNotFoundForInstance:
network_ids = set([])
fixed_ips = []
LOG.debug("Network deallocation for instance",
context=context, instance_uuid=instance_uuid)
# deallocate fixed ips
for fixed_ip in fixed_ips:
self.deallocate_fixed_ip(context, fixed_ip, host=host,
instance=instance)
if CONF.update_dns_entries:
self.network_rpcapi.update_dns(context, list(network_ids))
# deallocate vifs (mac addresses)
objects.VirtualInterface.delete_by_instance_uuid(
read_deleted_context, instance_uuid)
LOG.info(_LI("Network deallocated for instance (fixed IPs: '%s')"),
fixed_ips, context=context, instance_uuid=instance_uuid)
@messaging.expected_exceptions(exception.InstanceNotFound)
def get_instance_nw_info(self, context, instance_id, rxtx_factor,
host, instance_uuid=None, **kwargs):
"""Creates network info list for instance.
called by allocate_for_instance and network_api
context needs to be elevated
:returns: network info list [(network,info),(network,info)...]
where network = dict containing pertinent data from a network db object
and info = dict containing pertinent networking data
"""
if not uuidutils.is_uuid_like(instance_id):
instance_id = instance_uuid
instance_uuid = instance_id
LOG.debug('Get instance network info', instance_uuid=instance_uuid)
try:
fixed_ips = objects.FixedIPList.get_by_instance_uuid(
context, instance_uuid)
except exception.FixedIpNotFoundForInstance:
fixed_ips = []
LOG.debug('Found %d fixed IPs associated to the instance in the '
'database.',
len(fixed_ips), instance_uuid=instance_uuid)
nw_info = network_model.NetworkInfo()
vifs = collections.OrderedDict()
for fixed_ip in fixed_ips:
vif = fixed_ip.virtual_interface
if not vif:
LOG.warning(_LW('No VirtualInterface for FixedIP: %s'),
str(fixed_ip.address), instance_uuid=instance_uuid)
continue
if not fixed_ip.network:
LOG.warning(_LW('No Network for FixedIP: %s'),
str(fixed_ip.address), instance_uuid=instance_uuid)
continue
if vif.uuid in vifs:
current = vifs[vif.uuid]
else:
current = {
'id': vif.uuid,
'type': network_model.VIF_TYPE_BRIDGE,
'address': vif.address,
}
vifs[vif.uuid] = current
net_dict = self._get_network_dict(fixed_ip.network)
network = network_model.Network(**net_dict)
subnets = self._get_subnets_from_network(context,
fixed_ip.network,
host)
network['subnets'] = subnets
current['network'] = network
try:
current['rxtx_cap'] = (fixed_ip.network['rxtx_base'] *
rxtx_factor)
except (TypeError, KeyError):
pass
if fixed_ip.network.cidr_v6 and vif.address:
# NOTE(vish): I strongly suspect the v6 subnet is not used
# anywhere, but support it just in case
# add the v6 address to the v6 subnet
address = ipv6.to_global(fixed_ip.network.cidr_v6,
vif.address,
fixed_ip.network.project_id)
model_ip = network_model.FixedIP(address=address)
current['network']['subnets'][1]['ips'].append(model_ip)
# add the v4 address to the v4 subnet
model_ip = network_model.FixedIP(address=str(fixed_ip.address))
for ip in fixed_ip.floating_ips:
floating_ip = network_model.IP(address=str(ip['address']),
type='floating')
model_ip.add_floating_ip(floating_ip)
current['network']['subnets'][0]['ips'].append(model_ip)
for vif in vifs.values():
nw_info.append(network_model.VIF(**vif))
LOG.debug('Built network info: |%s|', nw_info,
instance_uuid=instance_uuid)
return nw_info
@staticmethod
def _get_network_dict(network):
"""Returns the dict representing necessary and meta network fields."""
# get generic network fields
network_dict = {'id': network['uuid'],
'bridge': network['bridge'],
'label': network['label'],
'tenant_id': network['project_id']}
# get extra information
if network.get('injected'):
network_dict['injected'] = network['injected']
return network_dict
@staticmethod
def _extract_subnets(network):
"""Returns information about the IPv4 and IPv6 subnets
associated with a Neutron Network UUID.
"""
subnet_v4 = {
'network_id': network.uuid,
'cidr': network.cidr,
'gateway': network.gateway,
'dhcp_server': getattr(network, 'dhcp_server'),
'broadcast': network.broadcast,
'netmask': network.netmask,
'version': 4,
'dns1': network.dns1,
'dns2': network.dns2}
# TODO(tr3buchet): I'm noticing we've assumed here that all dns is v4.
# this is probably bad as there is no way to add v6
# dns to nova
subnet_v6 = {
'network_id': network.uuid,
'cidr': network.cidr_v6,
'gateway': network.gateway_v6,
'dhcp_server': None,
'broadcast': None,
'netmask': network.netmask_v6,
'version': 6,
'dns1': None,
'dns2': None}
def ips_to_strs(net):
for key, value in net.items():
if isinstance(value, netaddr.ip.BaseIP):
net[key] = str(value)
return net
return [ips_to_strs(subnet_v4), ips_to_strs(subnet_v6)]
def _get_subnets_from_network(self, context, network, instance_host=None):
"""Returns the 1 or 2 possible subnets for a nova network."""
extracted_subnets = self._extract_subnets(network)
subnets = []
for subnet in extracted_subnets:
subnet_dict = {'cidr': subnet['cidr'],
'gateway': network_model.IP(
address=subnet['gateway'],
type='gateway')}
# deal with dhcp
if self.DHCP:
if network.get('multi_host'):
dhcp_server = self._get_dhcp_ip(context, network,
instance_host)
else:
dhcp_server = self._get_dhcp_ip(context, subnet)
subnet_dict['dhcp_server'] = dhcp_server
subnet_object = network_model.Subnet(**subnet_dict)
# add dns info
for k in ['dns1', 'dns2']:
if subnet.get(k):
subnet_object.add_dns(
network_model.IP(address=subnet[k], type='dns'))
subnet_object['ips'] = []
subnets.append(subnet_object)
return subnets
def _allocate_mac_addresses(self, context, instance_uuid, networks, macs):
"""Generates mac addresses and creates vif rows in db for them."""
# make a copy we can mutate
if macs is not None:
available_macs = set(macs)
for network in networks:
if macs is None:
self._add_virtual_interface(context, instance_uuid,
network['id'])
else:
try:
mac = available_macs.pop()
except KeyError:
raise exception.VirtualInterfaceCreateException()
self._add_virtual_interface(context, instance_uuid,
network['id'], mac)
def _add_virtual_interface(self, context, instance_uuid, network_id,
mac=None):
attempts = 1 if mac else CONF.create_unique_mac_address_attempts
for i in range(attempts):
try:
vif = objects.VirtualInterface(context)
vif.address = mac or utils.generate_mac_address()
vif.instance_uuid = instance_uuid
vif.network_id = network_id
vif.uuid = str(uuid.uuid4())
vif.create()
return vif
except exception.VirtualInterfaceCreateException:
# Try again up to max number of attempts
pass
raise exception.VirtualInterfaceMacAddressException()
def add_fixed_ip_to_instance(self, context, instance_id, host, network_id,
rxtx_factor=None):
"""Adds a fixed IP to an instance from specified network."""
if uuidutils.is_uuid_like(network_id):
network = self.get_network(context, network_id)
else:
network = self._get_network_by_id(context, network_id)
LOG.debug('Add fixed IP on network %s', network['uuid'],
instance_uuid=instance_id)
self._allocate_fixed_ips(context, instance_id, host, [network])
return self.get_instance_nw_info(context, instance_id, rxtx_factor,
host)
# NOTE(russellb) This method can be removed in 2.0 of this API. It is
# deprecated in favor of the method in the base API.
def get_backdoor_port(self, context):
"""Return backdoor port for eventlet_backdoor."""
return self.backdoor_port
def remove_fixed_ip_from_instance(self, context, instance_id, host,
address, rxtx_factor=None):
"""Removes a fixed IP from an instance from specified network."""
LOG.debug('Remove fixed IP %s', address, instance_uuid=instance_id)
fixed_ips = objects.FixedIPList.get_by_instance_uuid(context,
instance_id)
for fixed_ip in fixed_ips:
if str(fixed_ip.address) == address:
self.deallocate_fixed_ip(context, address, host)
# NOTE(vish): this probably isn't a dhcp ip so just
# deallocate it now. In the extremely rare
# case that this is a race condition, we
# will just get a warn in lease or release.
if not fixed_ip.leased:
fixed_ip.disassociate()
return self.get_instance_nw_info(context, instance_id,
rxtx_factor, host)
raise exception.FixedIpNotFoundForSpecificInstance(
instance_uuid=instance_id, ip=address)
def _validate_instance_zone_for_dns_domain(self, context, instance):
if not self.instance_dns_domain:
return True
instance_domain = self.instance_dns_domain
domainref = objects.DNSDomain.get_by_domain(context, instance_domain)
if domainref is None:
LOG.warning(_LW('instance-dns-zone not found |%s|.'),
instance_domain, instance=instance)
return True
dns_zone = domainref.availability_zone
instance_zone = instance.get('availability_zone')
if dns_zone and (dns_zone != instance_zone):
LOG.warning(_LW('instance-dns-zone is |%(domain)s|, '
'which is in availability zone |%(zone)s|. '
'Instance is in zone |%(zone2)s|. '
'No DNS record will be created.'),
{'domain': instance_domain,
'zone': dns_zone,
'zone2': instance_zone},
instance=instance)
return False
else:
return True
def allocate_fixed_ip(self, context, instance_id, network, **kwargs):
"""Gets a fixed IP from the pool."""
# TODO(vish): when this is called by compute, we can associate compute
# with a network, or a cluster of computes with a network
# and use that network here with a method like
# network_get_by_compute_host
address = None
# NOTE(vish) This db query could be removed if we pass az and name
# (or the whole instance object).
instance = objects.Instance.get_by_uuid(context, instance_id)
LOG.debug('Allocate fixed IP on network %s', network['uuid'],
instance=instance)
# A list of cleanup functions to call on error
cleanup = []
# Check the quota; can't put this in the API because we get
# called into from other places
quotas = self.quotas_cls(context=context)
quota_project, quota_user = quotas_obj.ids_from_instance(context,
instance)
try:
quotas.reserve(fixed_ips=1, project_id=quota_project,
user_id=quota_user)
cleanup.append(functools.partial(quotas.rollback, context))
except exception.OverQuota as exc:
usages = exc.kwargs['usages']
used = (usages['fixed_ips']['in_use'] +
usages['fixed_ips']['reserved'])
LOG.warning(_LW("Quota exceeded for project %(pid)s, tried to "
"allocate fixed IP. %(used)s of %(allowed)s are "
"in use or are already reserved."),
{'pid': quota_project, 'used': used,
'allowed': exc.kwargs['quotas']['fixed_ips']},
instance_uuid=instance_id)
raise exception.FixedIpLimitExceeded()
try:
if network['cidr']:
# NOTE(mriedem): allocate the vif before associating the
# instance to reduce a race window where a previous instance
# was associated with the fixed IP and has released it, because
# release_fixed_ip will disassociate if allocated is False.
vif = objects.VirtualInterface.get_by_instance_and_network(
context, instance_id, network['id'])
if vif is None:
LOG.debug('vif for network %(network)s is used up, '
'trying to create new vif',
{'network': network['id']}, instance=instance)
vif = self._add_virtual_interface(context,
instance_id, network['id'])
address = kwargs.get('address', None)
if address:
LOG.debug('Associating instance with specified fixed IP '
'%(address)s in network %(network)s on subnet '
'%(cidr)s.' %
{'address': address, 'network': network['id'],
'cidr': network['cidr']},
instance=instance)
fip = objects.FixedIP.associate(
context, str(address), instance_id, network['id'],
vif_id=vif.id)
else:
LOG.debug('Associating instance with fixed IP from pool '
'in network %(network)s on subnet %(cidr)s.' %
{'network': network['id'],
'cidr': network['cidr']},
instance=instance)
fip = objects.FixedIP.associate_pool(
context.elevated(), network['id'], instance_id,
vif_id=vif.id)
LOG.debug('Associated instance with fixed IP: %s', fip,
instance=instance)
address = str(fip.address)
cleanup.append(functools.partial(fip.disassociate, context))
LOG.debug('Refreshing security group members for instance.',
instance=instance)
self._do_trigger_security_group_members_refresh_for_instance(
instance_id)
cleanup.append(functools.partial(
self._do_trigger_security_group_members_refresh_for_instance, # noqa
instance_id))
name = instance.display_name
if self._validate_instance_zone_for_dns_domain(context, instance):
self.instance_dns_manager.create_entry(
name, str(fip.address), "A", self.instance_dns_domain)
cleanup.append(functools.partial(
self.instance_dns_manager.delete_entry,
name, self.instance_dns_domain))
self.instance_dns_manager.create_entry(
instance_id, str(fip.address), "A",
self.instance_dns_domain)
cleanup.append(functools.partial(
self.instance_dns_manager.delete_entry,
instance_id, self.instance_dns_domain))
LOG.debug('Setting up network %(network)s on host %(host)s.' %
{'network': network['id'], 'host': self.host},
instance=instance)
self._setup_network_on_host(context, network)
cleanup.append(functools.partial(
self._teardown_network_on_host,
context, network))
quotas.commit()
if address is None:
# TODO(mriedem): should _setup_network_on_host return the addr?
LOG.debug('Fixed IP is setup on network %s but not returning '
'the specific IP from the base network manager.',
network['uuid'], instance=instance)
else:
LOG.debug('Allocated fixed IP %s on network %s', address,
network['uuid'], instance=instance)
return address
except Exception:
with excutils.save_and_reraise_exception():
for f in cleanup:
try:
f()
except Exception:
LOG.warning(_LW('Error cleaning up fixed IP '
'allocation. Manual cleanup may '
'be required.'), exc_info=True)
def deallocate_fixed_ip(self, context, address, host=None, teardown=True,
instance=None):
"""Returns a fixed IP to the pool."""
fixed_ip_ref = objects.FixedIP.get_by_address(
context, address, expected_attrs=['network'])
instance_uuid = fixed_ip_ref.instance_uuid
vif_id = fixed_ip_ref.virtual_interface_id
LOG.debug('Deallocate fixed IP %s', address,
instance_uuid=instance_uuid)
if not instance:
# NOTE(vish) This db query could be removed if we pass az and name
# (or the whole instance object).
# NOTE(danms) We can't use fixed_ip_ref.instance because
# instance may be deleted and the relationship
# doesn't extend to deleted instances
instance = objects.Instance.get_by_uuid(
context.elevated(read_deleted='yes'), instance_uuid)
quotas = self.quotas_cls(context=context)
quota_project, quota_user = quotas_obj.ids_from_instance(context,
instance)
try:
quotas.reserve(fixed_ips=-1, project_id=quota_project,
user_id=quota_user)
except Exception:
LOG.exception(_LE("Failed to update usages deallocating "
"fixed IP"))
try:
self._do_trigger_security_group_members_refresh_for_instance(
instance_uuid)
if self._validate_instance_zone_for_dns_domain(context, instance):
for n in self.instance_dns_manager.get_entries_by_address(
address, self.instance_dns_domain):
self.instance_dns_manager.delete_entry(n,
self.instance_dns_domain)
fixed_ip_ref.allocated = False
fixed_ip_ref.save()
if teardown:
network = fixed_ip_ref.network
if CONF.force_dhcp_release:
dev = self.driver.get_dev(network)
# NOTE(vish): The below errors should never happen, but
# there may be a race condition that is causing
# them per
# https://code.launchpad.net/bugs/968457,
# so we log a message to help track down
# the possible race.
if not vif_id:
LOG.info(_LI("Unable to release %s because vif "
"doesn't exist"), address)
return
vif = objects.VirtualInterface.get_by_id(context, vif_id)
if not vif:
LOG.info(_LI("Unable to release %s because vif "
"object doesn't exist"), address)
return
# NOTE(cfb): Call teardown before release_dhcp to ensure
# that the IP can't be re-leased after a release
# packet is sent.
self._teardown_network_on_host(context, network)
# NOTE(vish): This forces a packet so that the
# release_fixed_ip callback will
# get called by nova-dhcpbridge.
try:
self.driver.release_dhcp(dev, address, vif.address)
except exception.NetworkDhcpReleaseFailed:
LOG.error(_LE("Error releasing DHCP for IP %(address)s"
" with MAC %(mac_address)s"),
{'address': address,
'mac_address': vif.address},
instance=instance)
# NOTE(yufang521247): This is probably a failed dhcp fixed
# ip. DHCPRELEASE packet sent to dnsmasq would not trigger
# dhcp-bridge to run. Thus it is better to disassociate
# such fixed ip here.
fixed_ip_ref = objects.FixedIP.get_by_address(
context, address)
if (instance_uuid == fixed_ip_ref.instance_uuid and
not fixed_ip_ref.leased):
LOG.debug('Explicitly disassociating fixed IP %s from '
'instance.', address,
instance_uuid=instance_uuid)
fixed_ip_ref.disassociate()
else:
# We can't try to free the IP address so just call teardown
self._teardown_network_on_host(context, network)
except Exception:
with excutils.save_and_reraise_exception():
try:
quotas.rollback()
except Exception:
LOG.warning(_LW("Failed to rollback quota for "
"deallocate fixed IP: %s"), address,
instance=instance)
# Commit the reservations
quotas.commit()
def lease_fixed_ip(self, context, address):
"""Called by dhcp-bridge when IP is leased."""
LOG.debug('Leased IP |%s|', address, context=context)
fixed_ip = objects.FixedIP.get_by_address(context, address)
if fixed_ip.instance_uuid is None:
LOG.warning(_LW('IP %s leased that is not associated'), fixed_ip,
context=context)
return
fixed_ip.leased = True
fixed_ip.save()
if not fixed_ip.allocated:
LOG.warning(_LW('IP |%s| leased that isn\'t allocated'), fixed_ip,
context=context, instance_uuid=fixed_ip.instance_uuid)
def release_fixed_ip(self, context, address, mac=None):
"""Called by dhcp-bridge when IP is released."""
LOG.debug('Released IP |%s|', address, context=context)
fixed_ip = objects.FixedIP.get_by_address(context, address)
if fixed_ip.instance_uuid is None:
LOG.warning(_LW('IP %s released that is not associated'), fixed_ip,
context=context)
return
if not fixed_ip.leased:
LOG.warning(_LW('IP %s released that was not leased'), fixed_ip,
context=context, instance_uuid=fixed_ip.instance_uuid)
else:
fixed_ip.leased = False
fixed_ip.save()
if not fixed_ip.allocated:
# NOTE(mriedem): Sometimes allocate_fixed_ip will associate the
# fixed IP to a new instance while an old associated instance is
# being deallocated. So we check to see if the mac is for the VIF
# that is associated to the instance that is currently associated
# with the fixed IP because if it's not, we hit this race and
# should ignore the request so we don't disassociate the fixed IP
# from the wrong instance.
if mac:
LOG.debug('Checking to see if virtual interface with MAC '
'%(mac)s is still associated to instance.',
{'mac': mac}, instance_uuid=fixed_ip.instance_uuid)
vif = objects.VirtualInterface.get_by_address(context, mac)
if vif:
LOG.debug('Found VIF: %s', vif,
instance_uuid=fixed_ip.instance_uuid)
if vif.instance_uuid != fixed_ip.instance_uuid:
LOG.info(_LI("Ignoring request to release fixed IP "
"%(address)s with MAC %(mac)s since it "
"is now associated with a new instance "
"that is in the process of allocating "
"it's network."),
{'address': address, 'mac': mac},
instance_uuid=fixed_ip.instance_uuid)
return
else:
LOG.debug('No VIF was found for MAC: %s', mac,
instance_uuid=fixed_ip.instance_uuid)
LOG.debug('Disassociating fixed IP %s from instance.', address,
instance_uuid=fixed_ip.instance_uuid)
fixed_ip.disassociate()
@staticmethod
def _convert_int_args(kwargs):
int_args = ("network_size", "num_networks",
"vlan_start", "vpn_start")
for key in int_args:
try:
value = kwargs.get(key)
if value is None:
continue
kwargs[key] = int(value)
except ValueError:
raise exception.InvalidIntValue(key=key)
def create_networks(self, context,
label, cidr=None, multi_host=None, num_networks=None,
network_size=None, cidr_v6=None,
gateway=None, gateway_v6=None, bridge=None,
bridge_interface=None, dns1=None, dns2=None,
fixed_cidr=None, allowed_start=None,
allowed_end=None, **kwargs):
arg_names = ("label", "cidr", "multi_host", "num_networks",
"network_size", "cidr_v6",
"gateway", "gateway_v6", "bridge",
"bridge_interface", "dns1", "dns2",
"fixed_cidr", "allowed_start", "allowed_end")
if 'mtu' not in kwargs:
kwargs['mtu'] = CONF.network_device_mtu
if 'dhcp_server' not in kwargs:
kwargs['dhcp_server'] = gateway
if 'enable_dhcp' not in kwargs:
kwargs['enable_dhcp'] = True
if 'share_address' not in kwargs:
kwargs['share_address'] = CONF.share_dhcp_address
for name in arg_names:
kwargs[name] = locals()[name]
self._convert_int_args(kwargs)
# check for certain required inputs
# NOTE: We can remove this check after v2.0 API code is removed because
# jsonschema has checked already before this.
label = kwargs["label"]
if not label:
raise exception.NetworkNotCreated(req="label")
# Size of "label" column in nova.networks is 255, hence the restriction
# NOTE: We can remove this check after v2.0 API code is removed because
# jsonschema has checked already before this.
if len(label) > 255:
raise exception.LabelTooLong()
# NOTE: We can remove this check after v2.0 API code is removed because
# jsonschema has checked already before this.
if not (kwargs["cidr"] or kwargs["cidr_v6"]):
raise exception.NetworkNotCreated(req="cidr or cidr_v6")
kwargs["bridge"] = kwargs["bridge"] or CONF.flat_network_bridge
kwargs["bridge_interface"] = (kwargs["bridge_interface"] or
CONF.flat_interface)
for fld in self.required_create_args:
if not kwargs[fld]:
raise exception.NetworkNotCreated(req=fld)
if kwargs["cidr_v6"]:
# NOTE(vish): just for validation
try:
netaddr.IPNetwork(kwargs["cidr_v6"])
except netaddr.AddrFormatError:
raise exception.InvalidCidr(cidr=kwargs["cidr_v6"])
if kwargs["cidr"]:
try:
fixnet = netaddr.IPNetwork(kwargs["cidr"])
except netaddr.AddrFormatError:
raise exception.InvalidCidr(cidr=kwargs["cidr"])
kwargs["num_networks"] = kwargs["num_networks"] or CONF.num_networks
if not kwargs["network_size"]:
if kwargs["cidr"]:
each_subnet_size = fixnet.size / kwargs["num_networks"]
if each_subnet_size > CONF.network_size:
subnet = 32 - int(math.log(CONF.network_size, 2))
oversize_msg = _LW(
'Subnet(s) too large, defaulting to /%s.'
' To override, specify network_size flag.') % subnet
LOG.warning(oversize_msg)
kwargs["network_size"] = CONF.network_size
else:
kwargs["network_size"] = fixnet.size
else:
kwargs["network_size"] = CONF.network_size
kwargs["multi_host"] = (
CONF.multi_host
if kwargs["multi_host"] is None
else strutils.bool_from_string(kwargs["multi_host"]))
kwargs["vlan_start"] = kwargs.get("vlan_start") or CONF.vlan_start
kwargs["vpn_start"] = kwargs.get("vpn_start") or CONF.vpn_start
kwargs["dns1"] = kwargs["dns1"] or CONF.flat_network_dns
if kwargs["fixed_cidr"]:
try:
kwargs["fixed_cidr"] = netaddr.IPNetwork(kwargs["fixed_cidr"])
except netaddr.AddrFormatError:
raise exception.InvalidCidr(cidr=kwargs["fixed_cidr"])
# Subnet of fixed IPs must fall within fixed range
if kwargs["fixed_cidr"] not in fixnet:
raise exception.AddressOutOfRange(
address=kwargs["fixed_cidr"].network, cidr=fixnet)
LOG.debug('Create network: |%s|', kwargs)
return self._do_create_networks(context, **kwargs)
@staticmethod
def _index_of(subnet, ip):
try:
start = netaddr.IPAddress(ip)
except netaddr.AddrFormatError:
raise exception.InvalidAddress(address=ip)
index = start.value - subnet.value
if index < 0 or index >= subnet.size:
raise exception.AddressOutOfRange(address=ip, cidr=str(subnet))
return index
def _validate_cidr(self, context, nets, subnets_v4, fixed_net_v4):
used_subnets = [net.cidr for net in nets]
def find_next(subnet):
next_subnet = subnet.next()
while next_subnet in subnets_v4:
next_subnet = next_subnet.next()
if next_subnet in fixed_net_v4:
return next_subnet
for subnet in list(subnets_v4):
if subnet in used_subnets:
next_subnet = find_next(subnet)
if next_subnet:
subnets_v4.remove(subnet)
subnets_v4.append(next_subnet)
subnet = next_subnet
else:
raise exception.CidrConflict(cidr=subnet,
other=subnet)
for used_subnet in used_subnets:
if subnet in used_subnet:
raise exception.CidrConflict(cidr=subnet,
other=used_subnet)
if used_subnet in subnet:
next_subnet = find_next(subnet)
if next_subnet:
subnets_v4.remove(subnet)
subnets_v4.append(next_subnet)
subnet = next_subnet
else:
raise exception.CidrConflict(cidr=subnet,
other=used_subnet)
def _do_create_networks(self, context,
label, cidr, multi_host, num_networks,
network_size, cidr_v6, gateway, gateway_v6, bridge,
bridge_interface, dns1=None, dns2=None,
fixed_cidr=None, mtu=None, dhcp_server=None,
enable_dhcp=None, share_address=None,
allowed_start=None, allowed_end=None, **kwargs):
"""Create networks based on parameters."""
# NOTE(jkoelker): these are dummy values to make sure iter works
# TODO(tr3buchet): disallow carving up networks
fixed_net_v4 = netaddr.IPNetwork('0/32')
fixed_net_v6 = netaddr.IPNetwork('::0/128')
subnets_v4 = []
subnets_v6 = []
if kwargs.get('ipam'):
if cidr_v6:
subnets_v6 = [netaddr.IPNetwork(cidr_v6)]
if cidr:
subnets_v4 = [netaddr.IPNetwork(cidr)]
else:
subnet_bits = int(math.ceil(math.log(network_size, 2)))
if cidr_v6:
fixed_net_v6 = netaddr.IPNetwork(cidr_v6)
prefixlen_v6 = 128 - subnet_bits
# smallest subnet in IPv6 ethernet network is /64
if prefixlen_v6 > 64:
prefixlen_v6 = 64
subnets_v6 = fixed_net_v6.subnet(prefixlen_v6,
count=num_networks)
if cidr:
fixed_net_v4 = netaddr.IPNetwork(cidr)
prefixlen_v4 = 32 - subnet_bits
subnets_v4 = list(fixed_net_v4.subnet(prefixlen_v4,
count=num_networks))
if cidr:
# NOTE(jkoelker): This replaces the _validate_cidrs call and
# prevents looping multiple times
try:
nets = objects.NetworkList.get_all(context)
except exception.NoNetworksFound:
nets = []
num_used_nets = len(nets)
self._validate_cidr(context, nets, subnets_v4, fixed_net_v4)
networks = objects.NetworkList(context=context, objects=[])
subnets = six.moves.zip_longest(subnets_v4, subnets_v6)
for index, (subnet_v4, subnet_v6) in enumerate(subnets):
net = objects.Network(context=context)
uuid = kwargs.get('uuid')
if uuid:
net.uuid = uuid
net.bridge = bridge
net.bridge_interface = bridge_interface
net.multi_host = multi_host
net.dns1 = dns1
net.dns2 = dns2
net.mtu = mtu
net.enable_dhcp = enable_dhcp
net.share_address = share_address
net.project_id = kwargs.get('project_id')
if num_networks > 1:
net.label = '%s_%d' % (label, index)
else:
net.label = label
bottom_reserved = self._bottom_reserved_ips
top_reserved = self._top_reserved_ips
extra_reserved = []
if cidr and subnet_v4:
current = subnet_v4[1]
if allowed_start:
val = self._index_of(subnet_v4, allowed_start)
current = netaddr.IPAddress(allowed_start)
bottom_reserved = val
if allowed_end:
val = self._index_of(subnet_v4, allowed_end)
top_reserved = subnet_v4.size - 1 - val
net.cidr = str(subnet_v4)
net.netmask = str(subnet_v4.netmask)
net.broadcast = str(subnet_v4.broadcast)
if gateway:
net.gateway = gateway
else:
net.gateway = current
current += 1
net.dhcp_server = dhcp_server or net.gateway
net.dhcp_start = current
current += 1
if net.dhcp_start == net.dhcp_server:
net.dhcp_start = current
extra_reserved.append(str(net.dhcp_server))
extra_reserved.append(str(net.gateway))
if cidr_v6 and subnet_v6:
net.cidr_v6 = str(subnet_v6)
if gateway_v6:
# use a pre-defined gateway if one is provided
net.gateway_v6 = str(gateway_v6)
else:
net.gateway_v6 = str(subnet_v6[1])
net.netmask_v6 = str(subnet_v6.netmask)
if CONF.network_manager == 'nova.network.manager.VlanManager':
vlan = kwargs.get('vlan', None)
if not vlan:
index_vlan = index + num_used_nets
vlan = kwargs['vlan_start'] + index_vlan
used_vlans = [x.vlan for x in nets]
if vlan in used_vlans:
# That vlan is used, try to get another one
used_vlans.sort()
vlan = used_vlans[-1] + 1
net.vpn_private_address = net.dhcp_start
extra_reserved.append(str(net.vpn_private_address))
net.dhcp_start = net.dhcp_start + 1
net.vlan = vlan
net.bridge = 'br%s' % vlan
# NOTE(vish): This makes ports unique across the cloud, a more
# robust solution would be to make them uniq per ip
index_vpn = index + num_used_nets
net.vpn_public_port = kwargs['vpn_start'] + index_vpn
net.create()
networks.objects.append(net)
if cidr and subnet_v4:
self._create_fixed_ips(context, net.id, fixed_cidr,
extra_reserved, bottom_reserved,
top_reserved)
# NOTE(danms): Remove this in RPC API v2.0
return obj_base.obj_to_primitive(networks)
def delete_network(self, context, fixed_range, uuid,
require_disassociated=True):
# Prefer uuid but we'll also take cidr for backwards compatibility
elevated = context.elevated()
if uuid:
network = objects.Network.get_by_uuid(elevated, uuid)
elif fixed_range:
network = objects.Network.get_by_cidr(elevated, fixed_range)
LOG.debug('Delete network %s', network['uuid'])
if require_disassociated and network.project_id is not None:
raise exception.NetworkHasProject(project_id=network.project_id)
network.destroy()
@property
def _bottom_reserved_ips(self):
"""Number of reserved IPs at the bottom of the range."""
return 2 # network, gateway
@property
def _top_reserved_ips(self):
"""Number of reserved IPs at the top of the range."""
return 1 # broadcast
def _create_fixed_ips(self, context, network_id, fixed_cidr=None,
extra_reserved=None, bottom_reserved=0,
top_reserved=0):
"""Create all fixed IPs for network."""
network = self._get_network_by_id(context, network_id)
if extra_reserved is None:
extra_reserved = []
if not fixed_cidr:
fixed_cidr = netaddr.IPNetwork(network['cidr'])
num_ips = len(fixed_cidr)
ips = []
for index in range(num_ips):
address = str(fixed_cidr[index])
if (index < bottom_reserved or num_ips - index <= top_reserved or
address in extra_reserved):
reserved = True
else:
reserved = False
ips.append({'network_id': network_id,
'address': address,
'reserved': reserved})
objects.FixedIPList.bulk_create(context, ips)
def _allocate_fixed_ips(self, context, instance_id, host, networks,
**kwargs):
"""Calls allocate_fixed_ip once for each network."""
raise NotImplementedError()
def setup_networks_on_host(self, context, instance_id, host, instance=None,
teardown=False):
"""calls setup/teardown on network hosts for an instance."""
green_threads = []
if teardown:
call_func = self._teardown_network_on_host
else:
call_func = self._setup_network_on_host
if instance is None:
instance = objects.Instance.get_by_id(context, instance_id)
vifs = objects.VirtualInterfaceList.get_by_instance_uuid(
context, instance.uuid)
LOG.debug('Setup networks on host', instance=instance)
for vif in vifs:
network = objects.Network.get_by_id(context, vif.network_id)
if not network.multi_host:
# NOTE (tr3buchet): if using multi_host, host is instance.host
host = network['host']
if self.host == host or host is None:
# at this point i am the correct host, or host doesn't
# matter -> FlatManager
call_func(context, network)
else:
# i'm not the right host, run call on correct host
green_threads.append(utils.spawn(
self.network_rpcapi.rpc_setup_network_on_host, context,
network.id, teardown, host))
# wait for all of the setups (if any) to finish
for gt in green_threads:
gt.wait()
def rpc_setup_network_on_host(self, context, network_id, teardown):
if teardown:
call_func = self._teardown_network_on_host
else:
call_func = self._setup_network_on_host
# subcall from original setup_networks_on_host
network = objects.Network.get_by_id(context, network_id)
call_func(context, network)
def _initialize_network(self, network):
if network.enable_dhcp:
is_ext = (network.dhcp_server is not None and
network.dhcp_server != network.gateway)
self.l3driver.initialize_network(network.cidr, is_ext)
self.l3driver.initialize_gateway(network)
def _setup_network_on_host(self, context, network):
"""Sets up network on this host."""
raise NotImplementedError()
def _teardown_network_on_host(self, context, network):
"""Sets up network on this host."""
raise NotImplementedError()
def validate_networks(self, context, networks):
"""check if the networks exists and host
is set to each network.
"""
LOG.debug('Validate networks')
if networks is None or len(networks) == 0:
return
for network_uuid, address in networks:
# check if the fixed IP address is valid and
# it actually belongs to the network
if address is not None:
if not netutils.is_valid_ip(address):
raise exception.FixedIpInvalid(address=address)
fixed_ip_ref = objects.FixedIP.get_by_address(
context, address, expected_attrs=['network'])
network = fixed_ip_ref.network
if network.uuid != network_uuid:
raise exception.FixedIpNotFoundForNetwork(
address=address, network_uuid=network_uuid)
if fixed_ip_ref.instance_uuid is not None:
raise exception.FixedIpAlreadyInUse(
address=address,
instance_uuid=fixed_ip_ref.instance_uuid)
def _get_network_by_id(self, context, network_id):
return objects.Network.get_by_id(context, network_id,
project_only='allow_none')
def _get_networks_by_uuids(self, context, network_uuids):
networks = objects.NetworkList.get_by_uuids(
context, network_uuids, project_only="allow_none")
networks.sort(key=lambda x: network_uuids.index(x.uuid))
return networks
def get_vifs_by_instance(self, context, instance_id):
"""Returns the vifs associated with an instance."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
instance = objects.Instance.get_by_id(context, instance_id)
LOG.debug('Get VIFs for instance', instance=instance)
# NOTE(russellb) No need to object-ify this since
# get_vifs_by_instance() is unused and set to be removed.
vifs = objects.VirtualInterfaceList.get_by_instance_uuid(context,
instance.uuid)
for vif in vifs:
if vif.network_id is not None:
network = self._get_network_by_id(context, vif.network_id)
vif.net_uuid = network.uuid
return [dict(vif) for vif in vifs]
def get_instance_id_by_floating_address(self, context, address):
"""Returns the instance id a floating IP's fixed IP is allocated to."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
LOG.debug('Get instance for floating address %s', address)
fixed_ip = objects.FixedIP.get_by_floating_address(context, address)
if fixed_ip is None:
return None
else:
return fixed_ip.instance_uuid
def get_network(self, context, network_uuid):
# NOTE(vish): used locally
return objects.Network.get_by_uuid(context.elevated(), network_uuid)
def get_all_networks(self, context):
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
try:
return obj_base.obj_to_primitive(
objects.NetworkList.get_all(context))
except exception.NoNetworksFound:
return []
def disassociate_network(self, context, network_uuid):
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
network = self.get_network(context, network_uuid)
network.disassociate(context, network.id)
def get_fixed_ip(self, context, id):
"""Return a fixed IP."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return objects.FixedIP.get_by_id(context, id)
def get_fixed_ip_by_address(self, context, address):
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return objects.FixedIP.get_by_address(context, address)
def get_vif_by_mac_address(self, context, mac_address):
"""Returns the vifs record for the mac_address."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
# NOTE(russellb) No need to object-ify this since
# get_vifs_by_instance() is unused and set to be removed.
vif = objects.VirtualInterface.get_by_address(context, mac_address)
if vif.network_id is not None:
network = self._get_network_by_id(context, vif.network_id)
vif.net_uuid = network.uuid
return vif
@periodic_task.periodic_task(
spacing=CONF.dns_update_periodic_interval)
def _periodic_update_dns(self, context):
"""Update local DNS entries of all networks on this host."""
networks = objects.NetworkList.get_by_host(context, self.host)
for network in networks:
dev = self.driver.get_dev(network)
self.driver.update_dns(context, dev, network)
def update_dns(self, context, network_ids):
"""Called when fixed IP is allocated or deallocated."""
if CONF.fake_network:
return
LOG.debug('Update DNS for network ids: %s', network_ids)
networks = [network for network in
objects.NetworkList.get_by_host(context, self.host)
if network.multi_host and network.id in network_ids]
for network in networks:
dev = self.driver.get_dev(network)
self.driver.update_dns(context, dev, network)
def add_network_to_project(self, ctxt, project_id, network_uuid):
raise NotImplementedError()
class FlatManager(NetworkManager):
"""Basic network where no vlans are used.
FlatManager does not do any bridge or vlan creation. The user is
responsible for setting up whatever bridges are specified when creating
networks through nova-manage. This bridge needs to be created on all
compute hosts.
The idea is to create a single network for the host with a command like:
nova-manage network create 192.168.0.0/24 1 256. Creating multiple
networks for one manager is currently not supported, but could be
added by modifying allocate_fixed_ip and get_network to get the network
with new logic. Arbitrary lists of addresses in a single network can
be accomplished with manual db editing.
If flat_injected is True, the compute host will attempt to inject network
config into the guest. It attempts to modify /etc/network/interfaces and
currently only works on debian based systems. To support a wider range of
OSes, some other method may need to be devised to let the guest know which
IP it should be using so that it can configure itself. Perhaps an attached
disk or serial device with configuration info.
Metadata forwarding must be handled by the gateway, and since nova does
not do any setup in this mode, it must be done manually. Requests to
169.254.169.254 port 80 will need to be forwarded to the api server.
"""
timeout_fixed_ips = False
required_create_args = ['bridge']
def _allocate_fixed_ips(self, context, instance_id, host, networks,
**kwargs):
"""Calls allocate_fixed_ip once for each network."""
requested_networks = kwargs.get('requested_networks')
addresses_by_network = {}
if requested_networks is not None:
for request in requested_networks:
addresses_by_network[request.network_id] = request.address
for network in networks:
if network['uuid'] in addresses_by_network:
address = addresses_by_network[network['uuid']]
else:
address = None
self.allocate_fixed_ip(context, instance_id,
network, address=address)
def deallocate_fixed_ip(self, context, address, host=None, teardown=True,
instance=None):
"""Returns a fixed IP to the pool."""
super(FlatManager, self).deallocate_fixed_ip(context, address, host,
teardown,
instance=instance)
objects.FixedIP.disassociate_by_address(context, address)
def _setup_network_on_host(self, context, network):
"""Setup Network on this host."""
# NOTE(tr3buchet): this does not need to happen on every ip
# allocation, this functionality makes more sense in create_network
# but we'd have to move the flat_injected flag to compute
network.injected = CONF.flat_injected
network.save()
def _teardown_network_on_host(self, context, network):
"""Tear down network on this host."""
pass
# NOTE(justinsb): The floating ip functions are stub-implemented.
# We were throwing an exception, but this was messing up horizon.
# Timing makes it difficult to implement floating ips here, in Essex.
def get_floating_ip(self, context, id):
"""Returns a floating IP as a dict."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return None
def get_floating_pools(self, context):
"""Returns list of floating pools."""
# NOTE(maurosr) This method should be removed in future, replaced by
# get_floating_ip_pools. See bug #1091668
return {}
def get_floating_ip_pools(self, context):
"""Returns list of floating IP pools."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return {}
def get_floating_ip_by_address(self, context, address):
"""Returns a floating IP as a dict."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return None
def get_floating_ips_by_project(self, context):
"""Returns the floating IPs allocated to a project."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return []
def get_floating_ips_by_fixed_address(self, context, fixed_address):
"""Returns the floating IPs associated with a fixed_address."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return []
# NOTE(hanlind): This method can be removed in version 2.0 of the RPC API
def allocate_floating_ip(self, context, project_id, pool):
"""Gets a floating IP from the pool."""
return None
# NOTE(hanlind): This method can be removed in version 2.0 of the RPC API
def deallocate_floating_ip(self, context, address,
affect_auto_assigned):
"""Returns a floating IP to the pool."""
return None
# NOTE(hanlind): This method can be removed in version 2.0 of the RPC API
def associate_floating_ip(self, context, floating_address, fixed_address,
affect_auto_assigned=False):
"""Associates a floating IP with a fixed IP.
Makes sure everything makes sense then calls _associate_floating_ip,
rpc'ing to correct host if i'm not it.
"""
return None
# NOTE(hanlind): This method can be removed in version 2.0 of the RPC API
def disassociate_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Disassociates a floating IP from its fixed IP.
Makes sure everything makes sense then calls _disassociate_floating_ip,
rpc'ing to correct host if i'm not it.
"""
return None
def migrate_instance_start(self, context, instance_uuid,
floating_addresses,
rxtx_factor=None, project_id=None,
source=None, dest=None):
pass
def migrate_instance_finish(self, context, instance_uuid,
floating_addresses, host=None,
rxtx_factor=None, project_id=None,
source=None, dest=None):
pass
def update_dns(self, context, network_ids):
"""Called when fixed IP is allocated or deallocated."""
pass
class FlatDHCPManager(RPCAllocateFixedIP, floating_ips.FloatingIP,
NetworkManager):
"""Flat networking with dhcp.
FlatDHCPManager will start up one dhcp server to give out addresses.
It never injects network settings into the guest. It also manages bridges.
Otherwise it behaves like FlatManager.
"""
SHOULD_CREATE_BRIDGE = True
DHCP = True
required_create_args = ['bridge']
def init_host(self):
"""Do any initialization that needs to be run if this is a
standalone service.
"""
ctxt = context.get_admin_context()
networks = objects.NetworkList.get_by_host(ctxt, self.host)
self.driver.iptables_manager.defer_apply_on()
self.l3driver.initialize(fixed_range=False, networks=networks)
super(FlatDHCPManager, self).init_host()
self.init_host_floating_ips()
self.driver.iptables_manager.defer_apply_off()
def _setup_network_on_host(self, context, network):
"""Sets up network on this host."""
network.dhcp_server = self._get_dhcp_ip(context, network)
self._initialize_network(network)
# NOTE(vish): if dhcp server is not set then don't dhcp
if not CONF.fake_network and network.enable_dhcp:
dev = self.driver.get_dev(network)
# NOTE(dprince): dhcp DB queries require elevated context
elevated = context.elevated()
self.driver.update_dhcp(elevated, dev, network)
if CONF.use_ipv6:
self.driver.update_ra(context, dev, network)
gateway = utils.get_my_linklocal(dev)
network.gateway_v6 = gateway
network.save()
def _teardown_network_on_host(self, context, network):
# NOTE(vish): if dhcp server is not set then don't dhcp
if not CONF.fake_network and network.enable_dhcp:
network['dhcp_server'] = self._get_dhcp_ip(context, network)
dev = self.driver.get_dev(network)
# NOTE(dprince): dhcp DB queries require elevated context
elevated = context.elevated()
self.driver.update_dhcp(elevated, dev, network)
def _get_network_dict(self, network):
"""Returns the dict representing necessary and meta network fields."""
# get generic network fields
network_dict = super(FlatDHCPManager, self)._get_network_dict(network)
# get flat dhcp specific fields
if self.SHOULD_CREATE_BRIDGE:
network_dict['should_create_bridge'] = self.SHOULD_CREATE_BRIDGE
if network.get('bridge_interface'):
network_dict['bridge_interface'] = network['bridge_interface']
if network.get('multi_host'):
network_dict['multi_host'] = network['multi_host']
return network_dict
class VlanManager(RPCAllocateFixedIP, floating_ips.FloatingIP, NetworkManager):
"""Vlan network with dhcp.
VlanManager is the most complicated. It will create a host-managed
vlan for each project. Each project gets its own subnet. The networks
and associated subnets are created with nova-manage using a command like:
nova-manage network create 10.0.0.0/8 3 16. This will create 3 networks
of 16 addresses from the beginning of the 10.0.0.0 range.
A dhcp server is run for each subnet, so each project will have its own.
For this mode to be useful, each project will need a vpn to access the
instances in its subnet.
"""
SHOULD_CREATE_BRIDGE = True
SHOULD_CREATE_VLAN = True
DHCP = True
required_create_args = ['bridge_interface']
def __init__(self, network_driver=None, *args, **kwargs):
super(VlanManager, self).__init__(network_driver=network_driver,
*args, **kwargs)
# NOTE(cfb) VlanManager doesn't enforce quotas on fixed IP addresses
# because a project is assigned an entire network.
self.quotas_cls = objects.QuotasNoOp
def init_host(self):
"""Do any initialization that needs to be run if this is a
standalone service.
"""
LOG.debug('Setup network on host %s', self.host)
ctxt = context.get_admin_context()
networks = objects.NetworkList.get_by_host(ctxt, self.host)
self.driver.iptables_manager.defer_apply_on()
self.l3driver.initialize(fixed_range=False, networks=networks)
NetworkManager.init_host(self)
self.init_host_floating_ips()
self.driver.iptables_manager.defer_apply_off()
def allocate_fixed_ip(self, context, instance_id, network, **kwargs):
"""Gets a fixed IP from the pool."""
LOG.debug('Allocate fixed IP on network %s', network['uuid'],
instance_uuid=instance_id)
# NOTE(mriedem): allocate the vif before associating the
# instance to reduce a race window where a previous instance
# was associated with the fixed IP and has released it, because
# release_fixed_ip will disassociate if allocated is False.
vif = objects.VirtualInterface.get_by_instance_and_network(
context, instance_id, network['id'])
if vif is None:
LOG.debug('vif for network %(network)s and instance '
'%(instance_id)s is used up, '
'trying to create new vif',
{'network': network['id'],
'instance_id': instance_id})
vif = self._add_virtual_interface(context,
instance_id, network['id'])
if kwargs.get('vpn', None):
address = network['vpn_private_address']
fip = objects.FixedIP.associate(context, str(address),
instance_id, network['id'],
reserved=True,
vif_id=vif.id)
else:
address = kwargs.get('address', None)
if address:
fip = objects.FixedIP.associate(context, str(address),
instance_id,
network['id'],
vif_id=vif.id)
else:
fip = objects.FixedIP.associate_pool(
context, network['id'], instance_id,
vif_id=vif.id)
address = fip.address
if not kwargs.get('vpn', None):
self._do_trigger_security_group_members_refresh_for_instance(
instance_id)
# NOTE(vish) This db query could be removed if we pass az and name
# (or the whole instance object).
instance = objects.Instance.get_by_uuid(context, instance_id)
name = instance.display_name
if self._validate_instance_zone_for_dns_domain(context, instance):
self.instance_dns_manager.create_entry(name, address,
"A",
self.instance_dns_domain)
self.instance_dns_manager.create_entry(instance_id, address,
"A",
self.instance_dns_domain)
self._setup_network_on_host(context, network)
LOG.debug('Allocated fixed IP %s on network %s', address,
network['uuid'], instance=instance)
return address
def add_network_to_project(self, context, project_id, network_uuid=None):
"""Force adds another network to a project."""
LOG.debug('Add network %s to project %s', network_uuid, project_id)
if network_uuid is not None:
network_id = self.get_network(context, network_uuid).id
else:
network_id = None
objects.Network.associate(context, project_id, network_id, force=True)
def associate(self, context, network_uuid, associations):
"""Associate or disassociate host or project to network."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
LOG.debug('Associate network %s: |%s|', network_uuid, associations)
network = self.get_network(context, network_uuid)
network_id = network.id
if 'host' in associations:
host = associations['host']
if host is None:
network.disassociate(context, network_id,
host=True, project=False)
else:
network.host = self.host
network.save()
if 'project' in associations:
project = associations['project']
if project is None:
network.disassociate(context, network_id,
host=False, project=True)
else:
network.associate(context, project, network_id, force=True)
def _get_network_by_id(self, context, network_id):
# NOTE(vish): Don't allow access to networks with project_id=None as
# these are networks that haven't been allocated to a
# project yet.
return objects.Network.get_by_id(context, network_id,
project_only=True)
def _get_networks_by_uuids(self, context, network_uuids):
# NOTE(vish): Don't allow access to networks with project_id=None as
# these are networks that haven't been allocated to a
# project yet.
networks = objects.NetworkList.get_by_uuids(
context, network_uuids, project_only=True)
networks.sort(key=lambda x: network_uuids.index(x.uuid))
return networks
def _get_networks_for_instance(self, context, instance_id, project_id,
requested_networks=None):
"""Determine which networks an instance should connect to."""
# get networks associated with project
if requested_networks is not None and len(requested_networks) != 0:
network_uuids = [request.network_id
for request in requested_networks]
networks = self._get_networks_by_uuids(context, network_uuids)
else:
# NOTE(vish): Allocates network on demand so requires admin.
networks = objects.NetworkList.get_by_project(
context.elevated(), project_id)
return networks
def create_networks(self, context, **kwargs):
"""Create networks based on parameters."""
self._convert_int_args(kwargs)
kwargs["vlan_start"] = kwargs.get("vlan_start") or CONF.vlan_start
kwargs["num_networks"] = (kwargs.get("num_networks") or
CONF.num_networks)
kwargs["network_size"] = (kwargs.get("network_size") or
CONF.network_size)
# Check that num_networks + vlan_start is not > 4094, fixes lp708025
if kwargs["num_networks"] + kwargs["vlan_start"] > 4094:
raise ValueError(_('The sum between the number of networks and'
' the vlan start cannot be greater'
' than 4094'))
# Check that vlan is not greater than 4094 or less then 1
vlan_num = kwargs.get("vlan", None)
if vlan_num is not None:
try:
vlan_num = int(vlan_num)
except ValueError:
raise ValueError(_("vlan must be an integer"))
if vlan_num > 4094:
raise ValueError(_('The vlan number cannot be greater than'
' 4094'))
if vlan_num < 1:
raise ValueError(_('The vlan number cannot be less than 1'))
# check that num networks and network size fits in fixed_net
fixed_net = netaddr.IPNetwork(kwargs['cidr'])
if fixed_net.size < kwargs['num_networks'] * kwargs['network_size']:
raise ValueError(_('The network range is not '
'big enough to fit %(num_networks)s networks. Network '
'size is %(network_size)s') % kwargs)
kwargs['bridge_interface'] = (kwargs.get('bridge_interface') or
CONF.vlan_interface)
LOG.debug('Create network: |%s|', kwargs)
return NetworkManager.create_networks(
self, context, vpn=True, **kwargs)
@utils.synchronized('setup_network', external=True)
def _setup_network_on_host(self, context, network):
"""Sets up network on this host."""
if not network.vpn_public_address:
address = CONF.vpn_ip
network.vpn_public_address = address
network.save()
else:
address = network.vpn_public_address
network.dhcp_server = self._get_dhcp_ip(context, network)
self._initialize_network(network)
# NOTE(vish): only ensure this forward if the address hasn't been set
# manually.
if address == CONF.vpn_ip and hasattr(self.driver,
"ensure_vpn_forward"):
self.l3driver.add_vpn(CONF.vpn_ip,
network.vpn_public_port,
network.vpn_private_address)
if not CONF.fake_network:
dev = self.driver.get_dev(network)
# NOTE(dprince): dhcp DB queries require elevated context
if network.enable_dhcp:
elevated = context.elevated()
self.driver.update_dhcp(elevated, dev, network)
if CONF.use_ipv6:
self.driver.update_ra(context, dev, network)
gateway = utils.get_my_linklocal(dev)
network.gateway_v6 = gateway
network.save()
@utils.synchronized('setup_network', external=True)
def _teardown_network_on_host(self, context, network):
if not CONF.fake_network:
network['dhcp_server'] = self._get_dhcp_ip(context, network)
dev = self.driver.get_dev(network)
# NOTE(ethuleau): For multi hosted networks, if the network is no
# more used on this host and if VPN forwarding rule aren't handed
# by the host, we delete the network gateway.
vpn_address = network['vpn_public_address']
if (CONF.teardown_unused_network_gateway and
network['multi_host'] and vpn_address != CONF.vpn_ip and
not objects.Network.in_use_on_host(context, network['id'],
self.host)):
LOG.debug("Remove unused gateway %s", network['bridge'])
if network.enable_dhcp:
self.driver.kill_dhcp(dev)
self.l3driver.remove_gateway(network)
if not self._uses_shared_ip(network):
fip = objects.FixedIP.get_by_address(context,
network.dhcp_server)
fip.allocated = False
fip.host = None
fip.save()
# NOTE(vish): if dhcp server is not set then don't dhcp
elif network.enable_dhcp:
# NOTE(dprince): dhcp DB queries require elevated context
elevated = context.elevated()
self.driver.update_dhcp(elevated, dev, network)
def _get_network_dict(self, network):
"""Returns the dict representing necessary and meta network fields."""
# get generic network fields
network_dict = super(VlanManager, self)._get_network_dict(network)
# get vlan specific network fields
if self.SHOULD_CREATE_BRIDGE:
network_dict['should_create_bridge'] = self.SHOULD_CREATE_BRIDGE
if self.SHOULD_CREATE_VLAN:
network_dict['should_create_vlan'] = self.SHOULD_CREATE_VLAN
for k in ['vlan', 'bridge_interface', 'multi_host']:
if network.get(k):
network_dict[k] = network[k]
return network_dict
@property
def _bottom_reserved_ips(self):
"""Number of reserved IPs at the bottom of the range."""
return super(VlanManager, self)._bottom_reserved_ips + 1 # vpn server
@property
def _top_reserved_ips(self):
"""Number of reserved IPs at the top of the range."""
parent_reserved = super(VlanManager, self)._top_reserved_ips
return parent_reserved + CONF.cnt_vpn_clients
|
py | 1a44a496f7d8776b4a53e874ebf60277b6d78c42 | """
PipelineWise CLI - Pipelinewise class
"""
import logging
import os
import shutil
import signal
import sys
import json
import copy
import psutil
import pidfile
from datetime import datetime
from time import time
from typing import Dict, Optional, List
from joblib import Parallel, delayed, parallel_backend
from tabulate import tabulate
from . import utils
from . import commands
from .commands import TapParams, TargetParams, TransformParams
from .config import Config
from .alert_sender import AlertSender
from .alert_handlers.base_alert_handler import BaseAlertHandler
# pylint: disable=too-many-lines,too-many-instance-attributes,too-many-public-methods
class PipelineWise:
"""PipelineWise main Class"""
INCREMENTAL = 'INCREMENTAL'
LOG_BASED = 'LOG_BASED'
FULL_TABLE = 'FULL_TABLE'
STATUS_SUCCESS = 'SUCCESS'
STATUS_FAILED = 'FAILED'
TRANSFORM_FIELD_CONNECTOR_NAME = 'transform-field'
def __init__(self, args, config_dir, venv_dir, profiling_dir=None):
self.profiling_mode = args.profiler
self.profiling_dir = profiling_dir
self.drop_pg_slot = False
self.args = args
self.logger = logging.getLogger(__name__)
self.config_dir = config_dir
self.venv_dir = venv_dir
self.extra_log = args.extra_log
self.pipelinewise_bin = os.path.join(self.venv_dir, 'cli', 'bin', 'pipelinewise')
self.config_path = os.path.join(self.config_dir, 'config.json')
self.load_config()
self.alert_sender = AlertSender(self.config.get('alert_handlers'))
if args.tap != '*':
self.tap = self.get_tap(args.target, args.tap)
self.tap_bin = self.get_connector_bin(self.tap['type'])
self.tap_python_bin = self.get_connector_python_bin(self.tap['type'])
if args.target != '*':
self.target = self.get_target(args.target)
self.target_bin = self.get_connector_bin(self.target['type'])
self.target_python_bin = self.get_connector_python_bin(self.target['type'])
self.transform_field_bin = self.get_connector_bin(self.TRANSFORM_FIELD_CONNECTOR_NAME)
self.transform_field_python_bin = self.get_connector_python_bin(self.TRANSFORM_FIELD_CONNECTOR_NAME)
self.tap_run_log_file = None
# Catch SIGINT and SIGTERM to exit gracefully
for sig in [signal.SIGINT, signal.SIGTERM]:
signal.signal(sig, self._exit_gracefully)
def send_alert(self,
message: str,
level: str = BaseAlertHandler.ERROR,
exc: Exception = None) -> dict:
"""
Send alert messages to every alert handler if sender is not disabled for the tap
Args:
message: the alert message
level: alert level
exc: optional exception that triggered the alert
Returns:
Dictionary with number of successfully sent alerts
"""
stats = {'sent': 0}
send_alert = self.tap.get('send_alert', True)
if send_alert:
stats = self.alert_sender.send_to_all_handlers(message=message, level=level, exc=exc)
return stats
def create_consumable_target_config(self, target_config, tap_inheritable_config):
"""
Create consumable target config by appending "inheritable" config to the common target config
"""
dict_a, dict_b = {}, {}
try:
dict_a = utils.load_json(target_config)
dict_b = utils.load_json(tap_inheritable_config)
# Copy everything from dictB into dictA - Not a real merge
dict_a.update(dict_b)
# Save the new dict as JSON into a temp file
tempfile_path = utils.create_temp_file(dir=self.get_temp_dir(),
prefix='target_config_',
suffix='.json')[1]
utils.save_json(dict_a, tempfile_path)
return tempfile_path
except Exception as exc:
raise Exception(f'Cannot merge JSON files {dict_a} {dict_b} - {exc}') from exc
# pylint: disable=too-many-statements,too-many-branches,too-many-nested-blocks,too-many-locals,too-many-arguments
def create_filtered_tap_properties(self, target_type, tap_type, tap_properties, tap_state, filters,
create_fallback=False):
"""
Create a filtered version of tap properties file based on specific filter conditions.
Return values:
1) A temporary JSON file where only those tables are selected to
sync which meet the filter criteria
2) List of tap_stream_ids where filter criteria matched
3) OPTIONAL when create_fallback is True:
Temporary JSON file with table that don't meet the
filter criteria
4) OPTIONAL when create_fallback is True:
List of tap_stream_ids where filter criteria don't match
"""
# Get filter conditions with default values from input dictionary
# Nothing selected by default
f_selected = filters.get('selected', None)
f_target_type = filters.get('target_type', None)
f_tap_type = filters.get('tap_type', None)
f_replication_method = filters.get('replication_method', None)
f_initial_sync_required = filters.get('initial_sync_required', None)
# Lists of tables that meet and don't meet the filter criteria
filtered_tap_stream_ids = []
fallback_filtered_stream_ids = []
self.logger.debug('Filtering properties JSON by conditions: %s', filters)
try:
# Load JSON files
properties = utils.load_json(tap_properties)
state = utils.load_json(tap_state)
# Create a dictionary for tables that don't meet filter criteria
fallback_properties = copy.deepcopy(properties) if create_fallback else {}
# Foreach stream (table) in the original properties
for stream_idx, stream in enumerate(properties.get('streams', tap_properties)):
initial_sync_required = False
# Collect required properties from the properties file
tap_stream_id = stream.get('tap_stream_id')
table_name = stream.get('table_name')
metadata = stream.get('metadata', [])
# Collect further properties from the properties file under the metadata key
table_meta = {}
meta_idx = 0
for meta_idx, meta in enumerate(metadata):
if isinstance(meta, dict) and len(meta.get('breadcrumb', [])) == 0:
table_meta = meta.get('metadata')
break
# Can we make sure that the stream has the right metadata?
# To be safe, check if no right metadata has been found, then throw an exception.
if not table_meta:
self.logger.error('Stream %s has no metadata with no breadcrumbs: %s.', tap_stream_id, metadata)
raise Exception(f'Missing metadata in stream {tap_stream_id}')
selected = table_meta.get('selected', False)
replication_method = table_meta.get('replication-method', None)
# Detect if initial sync is required. Look into the state file, get the bookmark
# for the current stream (table) and if valid bookmark doesn't exist then
# initial sync is required
bookmarks = state.get('bookmarks', {}) if isinstance(state, dict) else {}
new_stream = False
# if stream not in bookmarks, then it's a new table
if tap_stream_id not in bookmarks:
new_stream = True
initial_sync_required = True
else:
stream_bookmark = bookmarks[tap_stream_id]
if self._is_initial_sync_required(replication_method, stream_bookmark):
initial_sync_required = True
# Compare actual values to the filter conditions.
# Set the "selected" key to True if actual values meet the filter criteria
# Set the "selected" key to False if the actual values don't meet the filter criteria
# pylint: disable=too-many-boolean-expressions
if (
(f_selected is None or selected == f_selected) and
(f_target_type is None or target_type in f_target_type) and
(f_tap_type is None or tap_type in f_tap_type) and
(f_replication_method is None or replication_method in f_replication_method) and
(f_initial_sync_required is None or initial_sync_required == f_initial_sync_required)
):
self.logger.debug("""Filter condition(s) matched:
Table : %s
Tap Stream ID : %s
Selected : %s
Replication Method : %s
Init Sync Required : %s
""", table_name, tap_stream_id, selected, replication_method, initial_sync_required)
# Filter condition matched: mark table as selected to sync
properties['streams'][stream_idx]['metadata'][meta_idx]['metadata']['selected'] = True
filtered_tap_stream_ids.append(tap_stream_id)
# Filter condition matched:
# if the stream is a new table and is a singer stream, then mark it as selected to sync in the
# the fallback properties as well if the table is selected in the original properties.
# Otherwise, mark it as not selected
if create_fallback:
if new_stream and replication_method in [self.INCREMENTAL, self.LOG_BASED]:
fallback_properties['streams'][stream_idx]['metadata'][meta_idx]['metadata'][
'selected'] = True
if selected:
fallback_filtered_stream_ids.append(tap_stream_id)
else:
fallback_properties['streams'][stream_idx]['metadata'][meta_idx]['metadata'][
'selected'] = False
else:
# Filter condition didn't match: mark table as not selected to sync
properties['streams'][stream_idx]['metadata'][meta_idx]['metadata']['selected'] = False
# Filter condition didn't match: mark table as selected to sync in the fallback properties
# Fallback only if the table is selected in the original properties
if create_fallback and selected is True:
fallback_properties['streams'][stream_idx]['metadata'][meta_idx]['metadata']['selected'] = True
fallback_filtered_stream_ids.append(tap_stream_id)
# Save the generated properties file(s) and return
# Fallback required: Save filtered and fallback properties JSON
if create_fallback:
# Save to files: filtered and fallback properties
temp_properties_path = utils.create_temp_file(dir=self.get_temp_dir(),
prefix='properties_',
suffix='.json')[1]
utils.save_json(properties, temp_properties_path)
temp_fallback_properties_path = utils.create_temp_file(dir=self.get_temp_dir(),
prefix='properties_',
suffix='.json')[1]
utils.save_json(fallback_properties, temp_fallback_properties_path)
return temp_properties_path, \
filtered_tap_stream_ids, \
temp_fallback_properties_path, \
fallback_filtered_stream_ids
# Fallback not required: Save only the filtered properties JSON
temp_properties_path = utils.create_temp_file(dir=self.get_temp_dir(),
prefix='properties_',
suffix='.json')[1]
utils.save_json(properties, temp_properties_path)
return temp_properties_path, filtered_tap_stream_ids
except Exception as exc:
raise Exception(f'Cannot create JSON file - {exc}') from exc
def load_config(self):
"""
Load configuration
"""
self.logger.debug('Loading config at %s', self.config_path)
config = utils.load_json(self.config_path)
if config:
self.config = config
else:
self.config = {}
def get_temp_dir(self):
"""
Returns the tap specific temp directory
"""
return os.path.join(self.config_dir, 'tmp')
def get_tap_dir(self, target_id, tap_id):
"""
Get absolute path of a tap directory
"""
return os.path.join(self.config_dir, target_id, tap_id)
def get_tap_log_dir(self, target_id, tap_id):
"""
Get absolute path of a tap log directory
"""
return os.path.join(self.get_tap_dir(target_id, tap_id), 'log')
def get_target_dir(self, target_id):
"""
Get absolute path of a target directory
"""
return os.path.join(self.config_dir, target_id)
def get_connector_bin(self, connector_type):
"""
Get absolute path of a connector executable
"""
return os.path.join(self.venv_dir, connector_type, 'bin', connector_type)
def get_connector_python_bin(self, connector_type):
"""
Get absolute path of a connector python command
"""
return os.path.join(self.venv_dir, connector_type, 'bin', 'python')
@classmethod
def get_connector_files(cls, connector_dir):
"""
Get connector file paths
"""
return {
'config': os.path.join(connector_dir, 'config.json'),
'inheritable_config': os.path.join(connector_dir, 'inheritable_config.json'),
'properties': os.path.join(connector_dir, 'properties.json'),
'state': os.path.join(connector_dir, 'state.json'),
'transformation': os.path.join(connector_dir, 'transformation.json'),
'selection': os.path.join(connector_dir, 'selection.json'),
'pidfile': os.path.join(connector_dir, 'pipelinewise.pid')
}
def get_targets(self):
"""
Get every target
"""
self.logger.debug('Getting targets from %s', self.config_path)
self.load_config()
try:
targets = self.config.get('targets', [])
except Exception as exc:
raise Exception('Targets not defined') from exc
return targets
def get_target(self, target_id: str) -> Dict:
"""
Get target by id
"""
self.logger.debug('Getting %s target', target_id)
targets = self.get_targets()
target = next((item for item in targets if item['id'] == target_id), False)
if not target:
raise Exception(f'Cannot find {target_id} target')
target_dir = self.get_target_dir(target_id)
if os.path.isdir(target_dir):
target['files'] = self.get_connector_files(target_dir)
else:
raise Exception(f'Cannot find target at {target_dir}')
return target
def get_taps(self, target_id):
"""
Get every tap from a specific target
"""
self.logger.debug('Getting taps from %s target', target_id)
target = self.get_target(target_id)
try:
taps = target['taps']
# Add tap status
for tap_idx, tap in enumerate(taps):
taps[tap_idx]['status'] = self.detect_tap_status(target_id, tap['id'])
except Exception as exc:
raise Exception(f'No taps defined for {target_id} target') from exc
return taps
def get_tap(self, target_id, tap_id):
"""
Get tap by id from a specific target
"""
self.logger.debug('Getting %s tap from target %s', tap_id, target_id)
taps = self.get_taps(target_id)
tap = next((item for item in taps if item['id'] == tap_id), False)
if not tap:
raise Exception(f'Cannot find {tap_id} tap in {target_id} target')
tap_dir = self.get_tap_dir(target_id, tap_id)
if os.path.isdir(tap_dir):
tap['files'] = self.get_connector_files(tap_dir)
else:
raise Exception(f'Cannot find tap at {tap_dir}')
# Add target and status details
tap['target'] = self.get_target(target_id)
tap['status'] = self.detect_tap_status(target_id, tap_id)
return tap
# pylint: disable=too-many-branches,too-many-statements,too-many-nested-blocks,too-many-locals
def merge_schemas(self, old_schema, new_schema):
"""
Merge two schemas
"""
schema_with_diff = new_schema
if not old_schema:
schema_with_diff = new_schema
else:
new_streams = new_schema['streams']
old_streams = old_schema['streams']
for new_stream_idx, new_stream in enumerate(new_streams):
new_tap_stream_id = new_stream['tap_stream_id']
old_stream = next((item for item in old_streams if item['tap_stream_id'] == new_tap_stream_id), False)
# Is this a new stream?
if not old_stream:
new_schema['streams'][new_stream_idx]['is-new'] = True
# Copy stream selection from the old properties
else:
# Find table specific metadata entries in the old and new streams
new_stream_table_mdata_idx = 0
old_stream_table_mdata_idx = 0
try:
new_stream_table_mdata_idx = \
[i for i, md in enumerate(new_stream['metadata']) if md['breadcrumb'] == []][0]
old_stream_table_mdata_idx = \
[i for i, md in enumerate(old_stream['metadata']) if md['breadcrumb'] == []][0]
except Exception:
pass
# Copy is-new flag from the old stream
try:
new_schema['streams'][new_stream_idx]['is-new'] = old_stream['is-new']
except Exception:
pass
# Copy selected from the old stream
try:
new_schema['streams'][new_stream_idx]['metadata'][new_stream_table_mdata_idx]['metadata'][
'selected'] = old_stream['metadata'][old_stream_table_mdata_idx]['metadata']['selected']
except Exception:
pass
# Copy replication method from the old stream
try:
new_schema['streams'][new_stream_idx]['metadata'] \
[new_stream_table_mdata_idx]['metadata']['replication-method'] = \
old_stream['metadata'][old_stream_table_mdata_idx]['metadata']['replication-method']
except Exception:
pass
# Copy replication key from the old stream
try:
new_schema['streams'][new_stream_idx]['metadata'][new_stream_table_mdata_idx] \
['metadata']['replication-key'] = \
old_stream['metadata'][old_stream_table_mdata_idx]['metadata'][
'replication-key']
except Exception:
pass
# Is this new or modified field?
new_fields = new_schema['streams'][new_stream_idx]['schema']['properties']
old_fields = old_stream['schema']['properties']
for new_field_key in new_fields:
new_field = new_fields[new_field_key]
new_field_mdata_idx = -1
# Find new field metadata index
for i, mdata in enumerate(new_schema['streams'][new_stream_idx]['metadata']):
if len(mdata['breadcrumb']) == 2 and mdata['breadcrumb'][0] == 'properties' and \
mdata['breadcrumb'][1] == new_field_key:
new_field_mdata_idx = i
# Field exists
if new_field_key in old_fields.keys():
old_field = old_fields[new_field_key]
old_field_mdata_idx = -1
# Find old field metadata index
for i, mdata in enumerate(old_stream['metadata']):
if len(mdata['breadcrumb']) == 2 and mdata['breadcrumb'][0] == 'properties' and \
mdata['breadcrumb'][1] == new_field_key:
old_field_mdata_idx = i
new_mdata = new_schema['streams'][new_stream_idx]['metadata'][new_field_mdata_idx][
'metadata']
old_mdata = old_stream['metadata'][old_field_mdata_idx]['metadata']
# Copy is-new flag from the old properties
try:
new_mdata['is-new'] = old_mdata['is-new']
except Exception:
pass
# Copy is-modified flag from the old properties
try:
new_mdata['is-modified'] = old_mdata['is-modified']
except Exception:
pass
# Copy field selection from the old properties
try:
new_mdata['selected'] = old_mdata['selected']
except Exception:
pass
# Field exists and type is the same - Do nothing more in the schema
if new_field == old_field:
self.logger.debug('Field exists in %s stream with the same type: %s: %s',
new_tap_stream_id, new_field_key, new_field)
# Field exists but types are different - Mark the field as modified in the metadata
else:
self.logger.debug('Field exists in %s stream but types are different: %s: %s}',
new_tap_stream_id, new_field_key, new_field)
try:
new_schema['streams'][new_stream_idx]['metadata'][new_field_mdata_idx]['metadata'][
'is-modified'] = True
new_schema['streams'][new_stream_idx]['metadata'][new_field_mdata_idx]['metadata'][
'is-new'] = False
except Exception:
pass
# New field - Mark the field as new in the metadata
else:
self.logger.debug('New field in stream %s: %s: %s', new_tap_stream_id, new_field_key,
new_field)
try:
new_schema['streams'][new_stream_idx]['metadata'][new_field_mdata_idx]['metadata'][
'is-new'] = True
except Exception:
pass
schema_with_diff = new_schema
return schema_with_diff
def make_default_selection(self, schema, selection_file):
"""
Select the streams to sync in schema from a selection JSON file
"""
if os.path.isfile(selection_file):
self.logger.debug('Loading pre defined selection from %s', selection_file)
tap_selection = utils.load_json(selection_file)
selection = tap_selection['selection']
streams = schema['streams']
for stream_idx, stream in enumerate(streams):
tap_stream_id = stream.get('tap_stream_id')
tap_stream_sel = False
for sel in selection:
if 'tap_stream_id' in sel and tap_stream_id.lower() == sel['tap_stream_id'].lower():
tap_stream_sel = sel
# Find table specific metadata entries in the old and new streams
try:
stream_table_mdata_idx = [i for i, md in enumerate(stream['metadata']) if md['breadcrumb'] == []][0]
except Exception as exc:
raise Exception(f'Metadata of stream {tap_stream_id} doesn\'t have an empty breadcrumb') from exc
if tap_stream_sel:
self.logger.debug('Mark %s tap_stream_id as selected with properties %s', tap_stream_id,
tap_stream_sel)
schema['streams'][stream_idx]['metadata'][stream_table_mdata_idx]['metadata']['selected'] = True
if 'replication_method' in tap_stream_sel:
schema['streams'][stream_idx]['metadata'][stream_table_mdata_idx]['metadata'][
'replication-method'] = tap_stream_sel['replication_method']
if 'replication_key' in tap_stream_sel:
schema['streams'][stream_idx]['metadata'][stream_table_mdata_idx]['metadata'][
'replication-key'] = tap_stream_sel['replication_key']
else:
self.logger.debug('Mark %s tap_stream_id as not selected', tap_stream_id)
schema['streams'][stream_idx]['metadata'][stream_table_mdata_idx]['metadata']['selected'] = False
return schema
def init(self):
"""
Initialise and create a sample project. The project will contain sample YAML configuration for every
supported tap and target connects.
"""
self.logger.info('Initialising new project %s...', self.args.name)
project_dir = os.path.join(os.getcwd(), self.args.name)
# Create project dir if not exists
if os.path.exists(project_dir):
self.logger.error('Directory exists and cannot create new project: %s', self.args.name)
sys.exit(1)
else:
os.mkdir(project_dir)
for yaml in sorted(utils.get_sample_file_paths()):
yaml_basename = os.path.basename(yaml)
dst = os.path.join(project_dir, yaml_basename)
self.logger.info('Creating %s...', yaml_basename)
shutil.copyfile(yaml, dst)
def test_tap_connection(self):
"""
Test the tap connection. It will connect to the data source that is defined in the tap and will return
success if it’s available.
"""
tap_id = self.tap['id']
tap_type = self.tap['type']
target_id = self.target['id']
target_type = self.target['type']
self.logger.info('Testing %s (%s) tap connection in %s (%s) target', tap_id, tap_type, target_id, target_type)
# Generate and run the command to run the tap directly
# We will use the discover option to test connection
tap_config = self.tap['files']['config']
command = f'{self.tap_bin} --config {tap_config} --discover'
if self.profiling_mode:
dump_file = os.path.join(self.profiling_dir, f'tap_{tap_id}.pstat')
command = f'{self.tap_python_bin} -m cProfile -o {dump_file} {command}'
result = commands.run_command(command)
# Get output and errors from tap
# pylint: disable=unused-variable
returncode, new_schema, tap_output = result
if returncode != 0:
self.logger.error('Testing tap connection (%s - %s) FAILED', target_id, tap_id)
sys.exit(1)
# If the connection success then the response needs to be a valid JSON string
if not utils.is_json(new_schema):
self.logger.error('Schema discovered by %s (%s) is not a valid JSON.', tap_id, tap_type)
sys.exit(1)
else:
self.logger.info('Testing tap connection (%s - %s) PASSED', target_id, tap_id)
# pylint: disable=too-many-locals,inconsistent-return-statements
def discover_tap(self, tap=None, target=None):
"""
Run a specific tap in discovery mode. Discovery mode is connecting to the data source
and collecting information that is required for running the tap.
"""
if tap is None:
tap = self.tap
if target is None:
target = self.target
# Define tap props
tap_id = tap.get('id')
tap_type = tap.get('type')
tap_config_file = tap.get('files', {}).get('config')
tap_properties_file = tap.get('files', {}).get('properties')
tap_selection_file = tap.get('files', {}).get('selection')
tap_bin = self.get_connector_bin(tap_type)
tap_python_bin = self.get_connector_python_bin(tap_type)
# Define target props
target_id = target.get('id')
target_type = target.get('type')
self.logger.info('Discovering %s (%s) tap in %s (%s) target...', tap_id, tap_type, target_id, target_type)
# Generate and run the command to run the tap directly
command = f'{tap_bin} --config {tap_config_file} --discover'
if self.profiling_mode:
dump_file = os.path.join(self.profiling_dir, f'tap_{tap_id}.pstat')
command = f'{tap_python_bin} -m cProfile -o {dump_file} {command}'
self.logger.debug('Discovery command: %s', command)
result = commands.run_command(command)
# Get output and errors from tap
# pylint: disable=unused-variable
returncode, new_schema, output = result
if returncode != 0:
return f'{target_id} - {tap_id}: {output}'
# Convert JSON string to object
try:
new_schema = json.loads(new_schema)
except Exception as exc:
self.logger.exception(exc)
return f'Schema discovered by {tap_id} ({tap_type}) is not a valid JSON.'
# Merge the old and new schemas and diff changes
old_schema = utils.load_json(tap_properties_file)
if old_schema:
schema_with_diff = self.merge_schemas(old_schema, new_schema)
else:
schema_with_diff = new_schema
# Make selection from selection.json if exists
try:
schema_with_diff = self.make_default_selection(schema_with_diff, tap_selection_file)
schema_with_diff = utils.delete_keys_from_dict(
self.make_default_selection(schema_with_diff, tap_selection_file),
# Removing multipleOf json schema validations from properties.json,
# that's causing run time issues
['multipleOf'])
except Exception as exc:
return f'Cannot load selection JSON at {tap_selection_file}. {str(exc)}'
# Post import checks
post_import_errors = self._run_post_import_tap_checks(tap, schema_with_diff, target_id)
if len(post_import_errors) > 0:
return f'Post import tap checks failed in tap {tap_id}: {post_import_errors}'
# Save the new catalog into the tap
try:
self.logger.info('Writing new properties file with changes into %s', tap_properties_file)
utils.save_json(schema_with_diff, tap_properties_file)
except Exception as exc:
return f'Cannot save file. {str(exc)}'
def detect_tap_status(self, target_id, tap_id):
"""
Detect status of a tap
"""
self.logger.debug('Detecting %s tap status in %s target', tap_id, target_id)
tap_dir = self.get_tap_dir(target_id, tap_id)
log_dir = self.get_tap_log_dir(target_id, tap_id)
connector_files = self.get_connector_files(tap_dir)
status = {
'currentStatus': 'unknown',
'lastStatus': 'unknown',
'lastTimestamp': None
}
# Tap exists but configuration not completed
if not os.path.isfile(connector_files['config']):
status['currentStatus'] = 'not-configured'
# Tap exists and has log in running status
elif os.path.isdir(log_dir) and len(utils.search_files(log_dir, patterns=['*.log.running'])) > 0:
status['currentStatus'] = 'running'
# Configured and not running
else:
status['currentStatus'] = 'ready'
# Get last run instance
if os.path.isdir(log_dir):
log_files = utils.search_files(log_dir, patterns=['*.log.success', '*.log.failed'], sort=True)
if len(log_files) > 0:
last_log_file = log_files[0]
log_attr = utils.extract_log_attributes(last_log_file)
status['lastStatus'] = log_attr['status']
status['lastTimestamp'] = log_attr['timestamp']
return status
def status(self):
"""
Prints a status summary table of every imported pipeline with their tap and target.
"""
targets = self.get_targets()
tab_headers = [
'Tap ID',
'Tap Type',
'Target ID',
'Target Type',
'Enabled',
'Status',
'Last Sync',
'Last Sync Result'
]
tab_body = []
pipelines = 0
for target in targets:
taps = self.get_taps(target['id'])
for tap in taps:
tab_body.append([
tap.get('id', '<Unknown>'),
tap.get('type', '<Unknown>'),
target.get('id', '<Unknown>'),
target.get('type', '<Unknown>'),
tap.get('enabled', '<Unknown>'),
tap.get('status', {}).get('currentStatus', '<Unknown>'),
tap.get('status', {}).get('lastTimestamp', '<Unknown>'),
tap.get('status', {}).get('lastStatus', '<Unknown>')
])
pipelines += 1
print(tabulate(tab_body, headers=tab_headers, tablefmt='simple'))
print(f'{pipelines} pipeline(s)')
def run_tap_singer(self,
tap: TapParams,
target: TargetParams,
transform: TransformParams,
stream_buffer_size: int = 0) -> str:
"""
Generate and run piped shell command to sync tables using singer taps and targets
"""
# Build the piped executable command
command = commands.build_singer_command(tap=tap,
target=target,
transform=transform,
stream_buffer_size=stream_buffer_size,
stream_buffer_log_file=self.tap_run_log_file,
profiling_mode=self.profiling_mode,
profiling_dir=self.profiling_dir)
# Do not run if another instance is already running
log_dir = os.path.dirname(self.tap_run_log_file)
if os.path.isdir(log_dir) and len(utils.search_files(log_dir, patterns=['*.log.running'])) > 0:
self.logger.info(
'Failed to run. Another instance of the same tap is already running. '
'Log file detected in running status at %s', log_dir)
sys.exit(1)
start = None
state = None
def update_state_file(line: str) -> str:
# Update state variable with latest state
if utils.is_state_message(line):
# if it has been more than 2 seconds since we last updated the state file
# update it again with newly received state
nonlocal start, state
if start is None or time() - start >= 2:
with open(tap.state, 'w') as state_file:
state_file.write(line)
# Update start time to be the current time.
start = time()
# Keep track of state message so that we do one last file update at the end of the run_tap_singer
# function. This is to avoid the edge case where the last state message and the one before it are
# less than 2 sec apart.
state = line
return line
# Singer tap is running in subprocess.
# Collect the formatted logs and log it in the main PipelineWise process as well.
# Logs are already formatted at this stage so not using logging functions to avoid double formatting.
def update_state_file_with_extra_log(line: str) -> str:
sys.stdout.write(line)
return update_state_file(line)
# Run command with update_state_file as a callback to call for every stdout line
if self.extra_log:
commands.run_command(command, self.tap_run_log_file, update_state_file_with_extra_log)
else:
commands.run_command(command, self.tap_run_log_file, update_state_file)
# update the state file one last time to make sure it always has the last state message.
if state is not None:
with open(tap.state, 'w') as statefile:
statefile.write(state)
def run_tap_fastsync(self, tap: TapParams, target: TargetParams, transform: TransformParams):
"""
Generating and running shell command to sync tables using the native fastsync components
"""
# Build the fastsync executable command
command = commands.build_fastsync_command(tap=tap,
target=target,
transform=transform,
venv_dir=self.venv_dir,
temp_dir=self.get_temp_dir(),
tables=self.args.tables,
profiling_mode=self.profiling_mode,
profiling_dir=self.profiling_dir,
drop_pg_slot=self.drop_pg_slot)
# Do not run if another instance is already running
log_dir = os.path.dirname(self.tap_run_log_file)
if os.path.isdir(log_dir) and len(utils.search_files(log_dir, patterns=['*.log.running'])) > 0:
self.logger.info(
'Failed to run. Another instance of the same tap is already running. '
'Log file detected in running status at %s', log_dir)
sys.exit(1)
# Fastsync is running in subprocess.
# Collect the formatted logs and log it in the main PipelineWise process as well
# Logs are already formatted at this stage so not using logging functions to avoid double formatting.
def add_fastsync_output_to_main_logger(line: str) -> str:
sys.stdout.write(line)
return line
if self.extra_log:
# Run command and copy fastsync output to main logger
commands.run_command(command, self.tap_run_log_file, add_fastsync_output_to_main_logger)
else:
# Run command
commands.run_command(command, self.tap_run_log_file)
# pylint: disable=too-many-statements,too-many-locals
def run_tap(self):
"""
Generating command(s) to run tap to sync data from source to target
The generated commands can use one or multiple commands of:
1. Fastsync:
Native and optimised component to sync table from a
specific type of tap into a specific type of target.
This command will be used automatically when FULL_TABLE
replication method selected or when initial sync is required.
2. Singer Taps and Targets:
Dynamic components following the singer specification to
sync tables from multiple sources to multiple targets.
This command will be used automatically when INCREMENTAL
and LOG_BASED replication method selected. FULL_TABLE
replication are not using the singer components because
they are too slow to sync large tables.
"""
tap_id = self.tap['id']
tap_type = self.tap['type']
target_id = self.target['id']
target_type = self.target['type']
stream_buffer_size = self.tap.get('stream_buffer_size', commands.DEFAULT_STREAM_BUFFER_SIZE)
self.logger.info('Running %s tap in %s target', tap_id, target_id)
# Run only if tap enabled
if not self.tap.get('enabled', False):
self.logger.info('Tap %s is not enabled.', self.tap['name'])
sys.exit(1)
# Run only if not running
tap_status = self.detect_tap_status(target_id, tap_id)
if tap_status['currentStatus'] == 'running':
self.logger.info('Tap %s is currently running.', self.tap['name'])
sys.exit(1)
# Generate and run the command to run the tap directly
tap_config = self.tap['files']['config']
tap_inheritable_config = self.tap['files']['inheritable_config']
tap_properties = self.tap['files']['properties']
tap_state = self.tap['files']['state']
tap_transformation = self.tap['files']['transformation']
target_config = self.target['files']['config']
# Some target attributes can be passed and override by tap (aka. inheritable config)
# We merge the two configs and use that with the target
cons_target_config = self.create_consumable_target_config(target_config, tap_inheritable_config)
# Output will be redirected into target and tap specific log directory
log_dir = self.get_tap_log_dir(target_id, tap_id)
current_time = datetime.utcnow().strftime('%Y%m%d_%H%M%S')
# Create fastsync and singer specific filtered tap properties that contains only
# the the tables that needs to be synced by the specific command
(
tap_properties_fastsync,
fastsync_stream_ids,
tap_properties_singer,
singer_stream_ids
) = self.create_filtered_tap_properties(
target_type,
tap_type,
tap_properties,
tap_state, {
'selected': True,
'target_type': ['target-snowflake', 'target-redshift', 'target-postgres'],
'tap_type': ['tap-mysql', 'tap-postgres', 'tap-s3-csv', 'tap-mongodb'],
'initial_sync_required': True
},
create_fallback=True)
start_time = datetime.now()
try:
with pidfile.PIDFile(self.tap['files']['pidfile']):
target_params = TargetParams(id=target_id,
type=target_type,
bin=self.target_bin,
python_bin=self.target_python_bin,
config=cons_target_config)
transform_params = TransformParams(bin=self.transform_field_bin,
python_bin=self.transform_field_python_bin,
config=tap_transformation,
tap_id=tap_id,
target_id=target_id)
# Run fastsync for FULL_TABLE replication method
if len(fastsync_stream_ids) > 0:
self.logger.info('Table(s) selected to sync by fastsync: %s', fastsync_stream_ids)
self.tap_run_log_file = os.path.join(log_dir, f'{target_id}-{tap_id}-{current_time}.fastsync.log')
tap_params = TapParams(id=tap_id,
type=tap_type,
bin=self.tap_bin,
python_bin=self.tap_python_bin,
config=tap_config,
properties=tap_properties_fastsync,
state=tap_state)
self.run_tap_fastsync(tap=tap_params,
target=target_params,
transform=transform_params)
else:
self.logger.info('No table available that needs to be sync by fastsync')
# Run singer tap for INCREMENTAL and LOG_BASED replication methods
if len(singer_stream_ids) > 0:
self.logger.info('Table(s) selected to sync by singer: %s', singer_stream_ids)
self.tap_run_log_file = os.path.join(log_dir, f'{target_id}-{tap_id}-{current_time}.singer.log')
tap_params = TapParams(id=tap_id,
type=tap_type,
bin=self.tap_bin,
python_bin=self.tap_python_bin,
config=tap_config,
properties=tap_properties_singer,
state=tap_state)
self.run_tap_singer(tap=tap_params,
target=target_params,
transform=transform_params,
stream_buffer_size=stream_buffer_size)
else:
self.logger.info('No table available that needs to be sync by singer')
except pidfile.AlreadyRunningError:
self.logger.error('Another instance of the tap is already running.')
utils.silentremove(cons_target_config)
utils.silentremove(tap_properties_fastsync)
utils.silentremove(tap_properties_singer)
sys.exit(1)
# Delete temp files if there is any
except commands.RunCommandException as exc:
self.logger.exception(exc)
utils.silentremove(cons_target_config)
utils.silentremove(tap_properties_fastsync)
utils.silentremove(tap_properties_singer)
self._print_tap_run_summary(self.STATUS_FAILED, start_time, datetime.now())
self.send_alert(message=f'{tap_id} tap failed', exc=exc)
sys.exit(1)
except Exception as exc:
utils.silentremove(cons_target_config)
utils.silentremove(tap_properties_fastsync)
utils.silentremove(tap_properties_singer)
self._print_tap_run_summary(self.STATUS_FAILED, start_time, datetime.now())
self.send_alert(message=f'{tap_id} tap failed', exc=exc)
raise exc
utils.silentremove(cons_target_config)
utils.silentremove(tap_properties_fastsync)
utils.silentremove(tap_properties_singer)
self._print_tap_run_summary(self.STATUS_SUCCESS, start_time, datetime.now())
def stop_tap(self):
"""
Stop running tap
The command finds the tap specific pidfile that was created by run_tap command and sends
a SIGINT to the process. The SIGINT signal triggers _exit_gracefully function automatically and
the tap stops running.
"""
pidfile_path = self.tap['files']['pidfile']
try:
with open(pidfile_path) as pidf:
pid = int(pidf.read())
parent = psutil.Process(pid)
# Terminate child processes
for child in parent.children(recursive=True):
self.logger.info('Sending SIGINT to child pid %s...', child.pid)
child.send_signal(signal.SIGINT)
# Terminate main process
self.logger.info('Sending SIGINT to main pid %s...', parent.pid)
parent.send_signal(signal.SIGINT)
except ProcessLookupError:
self.logger.error('Pid %s not found. Is the tap running on this machine? '
'Stopping taps remotely is not supported.', pid)
sys.exit(1)
except FileNotFoundError:
self.logger.error('No pidfile found at %s. Tap does not seem to be running.', pidfile_path)
sys.exit(1)
# pylint: disable=too-many-locals
def sync_tables(self):
"""
Sync every or a list of selected tables from a specific tap.
It performs an initial sync and resets the table bookmarks to their new location.
The function is using the fastsync components hence it's only
available for taps and targets where the native and optimised
fastsync component is implemented.
"""
tap_id = self.tap['id']
tap_type = self.tap['type']
target_id = self.target['id']
target_type = self.target['type']
fastsync_bin = utils.get_fastsync_bin(self.venv_dir, tap_type, target_type)
self.logger.info('Syncing tables from %s (%s) to %s (%s)...', tap_id, tap_type, target_id, target_type)
# Run only if tap enabled
if not self.tap.get('enabled', False):
self.logger.info('Tap %s is not enabled.', self.tap['name'])
sys.exit(1)
# Run only if tap not running
tap_status = self.detect_tap_status(target_id, tap_id)
if tap_status['currentStatus'] == 'running':
self.logger.info('Tap %s is currently running and cannot sync. Stop the tap and try again.',
self.tap['name'])
sys.exit(1)
# Tap exists but configuration not completed
if not os.path.isfile(fastsync_bin):
self.logger.error('Table sync function is not implemented from %s datasources to %s type of targets',
tap_type, target_type)
sys.exit(1)
# Generate and run the command to run the tap directly
tap_config = self.tap['files']['config']
tap_inheritable_config = self.tap['files']['inheritable_config']
tap_properties = self.tap['files']['properties']
tap_state = self.tap['files']['state']
tap_transformation = self.tap['files']['transformation']
target_config = self.target['files']['config']
# Set drop_pg_slot to True if we want to sync the whole tap
# This flag will be used by FastSync PG to (PG/SF/Redshift)
self.drop_pg_slot = bool(not self.args.tables)
# Some target attributes can be passed and override by tap (aka. inheritable config)
# We merge the two configs and use that with the target
cons_target_config = self.create_consumable_target_config(target_config, tap_inheritable_config)
# Output will be redirected into target and tap specific log directory
log_dir = self.get_tap_log_dir(target_id, tap_id)
current_time = datetime.utcnow().strftime('%Y%m%d_%H%M%S')
# sync_tables command always using fastsync
try:
with pidfile.PIDFile(self.tap['files']['pidfile']):
self.tap_run_log_file = os.path.join(log_dir, f'{target_id}-{tap_id}-{current_time}.fastsync.log')
# Create parameters as NamedTuples
tap_params = TapParams(
id=tap_id,
type=tap_type,
bin=self.tap_bin,
python_bin=self.tap_python_bin,
config=tap_config,
properties=tap_properties,
state=tap_state)
target_params = TargetParams(
id=target_id,
type=target_type,
bin=self.target_bin,
python_bin=self.target_python_bin,
config=cons_target_config
)
transform_params = TransformParams(
bin=self.transform_field_bin,
config=tap_transformation,
python_bin=self.transform_field_python_bin,
tap_id=tap_id,
target_id=target_id
)
self.run_tap_fastsync(tap=tap_params,
target=target_params,
transform=transform_params)
except pidfile.AlreadyRunningError:
self.logger.error('Another instance of the tap is already running.')
utils.silentremove(cons_target_config)
sys.exit(1)
# Delete temp file if there is any
except commands.RunCommandException as exc:
self.logger.exception(exc)
utils.silentremove(cons_target_config)
self.send_alert(message=f'Failed to sync tables in {tap_id} tap', exc=exc)
sys.exit(1)
except Exception as exc:
utils.silentremove(cons_target_config)
self.send_alert(message=f'Failed to sync tables in {tap_id} tap', exc=exc)
raise exc
utils.silentremove(cons_target_config)
def validate(self):
"""
Validates a project directory with YAML tap and target files.
"""
yaml_dir = self.args.dir
self.logger.info('Searching YAML config files in %s', yaml_dir)
tap_yamls, target_yamls = utils.get_tap_target_names(yaml_dir)
self.logger.info('Detected taps: %s', tap_yamls)
self.logger.info('Detected targets: %s', target_yamls)
target_schema = utils.load_schema('target')
tap_schema = utils.load_schema('tap')
vault_secret = self.args.secret
target_ids = set()
# Validate target json schemas and that no duplicate IDs exist
for yaml_file in target_yamls:
self.logger.info('Started validating %s', yaml_file)
loaded_yaml = utils.load_yaml(os.path.join(yaml_dir, yaml_file), vault_secret)
utils.validate(loaded_yaml, target_schema)
if loaded_yaml['id'] in target_ids:
self.logger.error('Duplicate target found "%s"', loaded_yaml['id'])
sys.exit(1)
target_ids.add(loaded_yaml['id'])
self.logger.info('Finished validating %s', yaml_file)
tap_ids = set()
# Validate tap json schemas, check that every tap has valid 'target' and that no duplicate IDs exist
for yaml_file in tap_yamls:
self.logger.info('Started validating %s', yaml_file)
loaded_yaml = utils.load_yaml(os.path.join(yaml_dir, yaml_file), vault_secret)
utils.validate(loaded_yaml, tap_schema)
if loaded_yaml['id'] in tap_ids:
self.logger.error('Duplicate tap found "%s"', loaded_yaml['id'])
sys.exit(1)
if loaded_yaml['target'] not in target_ids:
self.logger.error("Can'f find the target with the ID '%s' referenced in '%s'. Available target IDs: %s",
loaded_yaml['target'], yaml_file, target_ids)
sys.exit(1)
tap_ids.add(loaded_yaml['id'])
self.logger.info('Finished validating %s', yaml_file)
self.logger.info('Validation successful')
def import_project(self):
"""
Take a list of YAML files from a directory and use it as the source to build
singer compatible json files and organise them into pipeline directory structure
"""
# Read the YAML config files and transform/save into singer compatible
# JSON files in a common directory structure
config = Config.from_yamls(self.config_dir, self.args.dir, self.args.secret)
config.save()
# Activating tap stream selections
#
# Run every tap in discovery mode to generate the singer specific
# properties.json files for the taps. The properties file than
# updated to replicate only the tables that is defined in the YAML
# files and to use the required replication methods
#
# The tap Discovery mode needs to connect to each source databases and
# doing that sequentially is slow. For a better performance we do it
# in parallel.
self.logger.info('ACTIVATING TAP STREAM SELECTIONS...')
total_targets = 0
total_taps = 0
discover_excs = []
# Import every tap from every target
start_time = datetime.now()
for target in config.targets.values():
total_targets += 1
total_taps += len(target.get('taps'))
with parallel_backend('threading', n_jobs=-1):
# Discover taps in parallel and return the list of exception of the failed ones
discover_excs.extend(list(filter(None,
Parallel(verbose=100)(delayed(self.discover_tap)(
tap=tap,
target=target
) for tap in target.get('taps')))))
# Log summary
end_time = datetime.now()
# pylint: disable=logging-too-many-args
self.logger.info(
"""
-------------------------------------------------------
IMPORTING YAML CONFIGS FINISHED
-------------------------------------------------------
Total targets to import : %s
Total taps to import : %s
Taps imported successfully : %s
Taps failed to import : %s
Runtime : %s
-------------------------------------------------------
""",
total_targets,
total_taps,
total_taps - len(discover_excs),
str(discover_excs),
end_time - start_time
)
if len(discover_excs) > 0:
sys.exit(1)
def encrypt_string(self):
"""
Encrypt the supplied string using the provided vault secret
"""
b_ciphertext = utils.vault_encrypt(self.args.string, self.args.secret)
yaml_text = utils.vault_format_ciphertext_yaml(b_ciphertext)
print(yaml_text)
print('Encryption successful')
def _is_initial_sync_required(self, replication_method: str, stream_bookmark: Dict) -> bool:
"""
Detects if a stream needs initial sync or not.
Initial sync is required for INCREMENTAL and LOG_BASED tables
where the state file has no valid bookmark.
Valid bookmark keys:
'replication_key_value' key created for INCREMENTAL tables
'log_pos' key created by MySQL LOG_BASED tables
'lsn' key created by PostgreSQL LOG_BASED tables
'modified_since' key created by CSV S3 INCREMENTAL tables
'token' key created by MongoDB LOG_BASED tables
FULL_TABLE replication method is taken as initial sync required
:param replication_method: stream replication method
:param stream_bookmark: stream state bookmark
:return: Boolean, True if needs initial sync, False otherwise
"""
return replication_method == self.FULL_TABLE \
or (replication_method == self.INCREMENTAL and
'replication_key_value' not in stream_bookmark and
'modified_since' not in stream_bookmark) \
or (replication_method == self.LOG_BASED and
'lsn' not in stream_bookmark and
'log_pos' not in stream_bookmark and
'token' not in stream_bookmark)
# pylint: disable=unused-argument
def _exit_gracefully(self, sig, frame, exit_code=1):
self.logger.info('Stopping gracefully...')
# Rename log files from running to terminated status
if self.tap_run_log_file:
tap_run_log_file_running = f'{self.tap_run_log_file}.running'
tap_run_log_file_terminated = f'{self.tap_run_log_file}.terminated'
if os.path.isfile(tap_run_log_file_running):
os.rename(tap_run_log_file_running, tap_run_log_file_terminated)
sys.exit(exit_code)
def _print_tap_run_summary(self, status, start_time, end_time):
summary = f"""
-------------------------------------------------------
TAP RUN SUMMARY
-------------------------------------------------------
Status : {status}
Runtime : {end_time - start_time}
-------------------------------------------------------
"""
# Print summary to stdout
self.logger.info(summary)
# Add summary to tap run log file
if self.tap_run_log_file:
tap_run_log_file_success = f'{self.tap_run_log_file}.success'
tap_run_log_file_failed = f'{self.tap_run_log_file}.failed'
# Find which log file we need to write the summary
log_file_to_write_summary = None
if os.path.isfile(tap_run_log_file_success):
log_file_to_write_summary = tap_run_log_file_success
elif os.path.isfile(tap_run_log_file_failed):
log_file_to_write_summary = tap_run_log_file_failed
# Append the summary to the right log file
if log_file_to_write_summary:
with open(log_file_to_write_summary, 'a') as logfile:
logfile.write(summary)
# pylint: disable=unused-variable
def _run_post_import_tap_checks(self, tap: Dict, catalog: Dict, target_id: str) -> List:
"""
Run post import checks on a tap.
:param tap: dictionary containing all taps details
:param catalog: tap properties object
:param target_id: ID of the target used by the tap
:return: List of errors. If no error returns an empty list
"""
errors = []
error = self.__validate_transformations(
tap.get('files', {}).get('transformation'),
catalog,
tap['id'],
target_id)
if error:
errors.append(error)
# Foreach stream (table) in the original properties
for stream_idx, stream in enumerate(catalog.get('streams', catalog)):
# Collect required properties from the properties file
tap_stream_id = stream.get('tap_stream_id')
metadata = stream.get('metadata', [])
# Collect further properties from the tap and target properties
table_meta = {}
for meta_idx, meta in enumerate(metadata):
if isinstance(meta, dict) and len(meta.get('breadcrumb', [])) == 0:
table_meta = meta.get('metadata')
break
selected = table_meta.get('selected', False)
replication_method = table_meta.get('replication-method')
table_key_properties = table_meta.get('table-key-properties', [])
primary_key_required = tap.get('primary_key_required', True)
# Check if primary key is set for INCREMENTAL and LOG_BASED replications
if (selected and replication_method in [self.INCREMENTAL, self.LOG_BASED] and
len(table_key_properties) == 0 and primary_key_required):
errors.append(f'No primary key set for {tap_stream_id} stream ({replication_method})')
break
return errors
def __validate_transformations(
self,
transformation_file: str,
catalog: Dict,
tap_id: str,
target_id: str) -> Optional[str]:
"""
Run validation of transformation config
Args:
transformation_file: path to transformation config
catalog: Catalog object
tap_id: The ID of the tap to which the transformations belong
target_id: the ID of the target used by the tap
Returns: error as string
"""
if transformation_file:
# create a temp file with the content being the given catalog object
# we need this file to execute the validation cli command
temp_catalog_file = utils.create_temp_file(dir=self.get_temp_dir(),
prefix='properties_',
suffix='.json')[1]
utils.save_json(catalog, temp_catalog_file)
command = f"""
{self.transform_field_bin} --validate --config {transformation_file} --catalog {temp_catalog_file}
"""
if self.profiling_mode:
dump_file = os.path.join(self.profiling_dir, f'transformation_{tap_id}_{target_id}.pstat')
command = f'{self.transform_field_python_bin} -m cProfile -o {dump_file} {command}'
self.logger.debug('Transformation validation command: %s', command)
result = commands.run_command(command)
# Get output and errors from command
returncode, _, stderr = result
if returncode != 0:
return stderr
|
py | 1a44a4e81ade917b9628e340b702beda7fabadca | import pytest
from plenum.common.exceptions import RequestRejectedException, \
RequestNackedException
from indy_common.constants import POOL_RESTART, ACTION, START, DATETIME
from plenum.common.constants import TXN_TYPE
from plenum.test.helper import sdk_gen_request, sdk_sign_and_submit_req_obj, \
sdk_get_reply, sdk_get_and_check_replies
def test_fail_pool_restart_with_steward_role(
sdk_pool_handle, sdk_wallet_steward, looper):
op = {
TXN_TYPE: POOL_RESTART,
ACTION: START,
}
req_obj = sdk_gen_request(op, identifier=sdk_wallet_steward[1])
req = sdk_sign_and_submit_req_obj(looper,
sdk_pool_handle,
sdk_wallet_steward,
req_obj)
with pytest.raises(RequestRejectedException) as excinfo:
sdk_get_and_check_replies(looper, [req], 100)
assert excinfo.match("STEWARD cannot do action with type = " +
POOL_RESTART)
def test_fail_pool_restart_with_invalid_datetime(
sdk_pool_handle, sdk_wallet_steward, looper):
invalid_datetime = "12.05.2018 4/40"
op = {
TXN_TYPE: POOL_RESTART,
ACTION: START,
DATETIME: invalid_datetime
}
req_obj = sdk_gen_request(op, identifier=sdk_wallet_steward[1])
req = sdk_sign_and_submit_req_obj(looper,
sdk_pool_handle,
sdk_wallet_steward,
req_obj)
with pytest.raises(RequestNackedException) as excinfo:
sdk_get_and_check_replies(looper, [req], 100)
assert excinfo.match("datetime " + invalid_datetime + " is not valid")
|
py | 1a44a54bc8b7341b2ef814e49bc7010eed6d8cc0 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: common.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='common.proto',
package='buildbucket.v2',
syntax='proto3',
serialized_options=_b('Z4go.chromium.org/luci/buildbucket/proto;buildbucketpb'),
serialized_pb=_b('\n\x0c\x63ommon.proto\x12\x0e\x62uildbucket.v2\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"8\n\nExecutable\x12\x14\n\x0c\x63ipd_package\x18\x01 \x01(\t\x12\x14\n\x0c\x63ipd_version\x18\x02 \x01(\t\"\xc3\x01\n\rStatusDetails\x12M\n\x13resource_exhaustion\x18\x03 \x01(\x0b\x32\x30.buildbucket.v2.StatusDetails.ResourceExhaustion\x12\x36\n\x07timeout\x18\x04 \x01(\x0b\x32%.buildbucket.v2.StatusDetails.Timeout\x1a\x14\n\x12ResourceExhaustion\x1a\t\n\x07TimeoutJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03\"2\n\x03Log\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08view_url\x18\x02 \x01(\t\x12\x0b\n\x03url\x18\x03 \x01(\t\"O\n\x0cGerritChange\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63hange\x18\x03 \x01(\x03\x12\x10\n\x08patchset\x18\x04 \x01(\x03\"Y\n\rGitilesCommit\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\t\x12\x0b\n\x03ref\x18\x04 \x01(\t\x12\x10\n\x08position\x18\x05 \x01(\r\"(\n\nStringPair\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"i\n\tTimeRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"_\n\x12RequestedDimension\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\x12-\n\nexpiration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration*\x87\x01\n\x06Status\x12\x16\n\x12STATUS_UNSPECIFIED\x10\x00\x12\r\n\tSCHEDULED\x10\x01\x12\x0b\n\x07STARTED\x10\x02\x12\x0e\n\nENDED_MASK\x10\x04\x12\x0b\n\x07SUCCESS\x10\x0c\x12\x0b\n\x07\x46\x41ILURE\x10\x14\x12\x11\n\rINFRA_FAILURE\x10$\x12\x0c\n\x08\x43\x41NCELED\x10\x44*%\n\x07Trinary\x12\t\n\x05UNSET\x10\x00\x12\x07\n\x03YES\x10\x01\x12\x06\n\x02NO\x10\x02\x42\x36Z4go.chromium.org/luci/buildbucket/proto;buildbucketpbb\x06proto3')
,
dependencies=[google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,])
_STATUS = _descriptor.EnumDescriptor(
name='Status',
full_name='buildbucket.v2.Status',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='STATUS_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SCHEDULED', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STARTED', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ENDED_MASK', index=3, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUCCESS', index=4, number=12,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FAILURE', index=5, number=20,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INFRA_FAILURE', index=6, number=36,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CANCELED', index=7, number=68,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=824,
serialized_end=959,
)
_sym_db.RegisterEnumDescriptor(_STATUS)
Status = enum_type_wrapper.EnumTypeWrapper(_STATUS)
_TRINARY = _descriptor.EnumDescriptor(
name='Trinary',
full_name='buildbucket.v2.Trinary',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSET', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='YES', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NO', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=961,
serialized_end=998,
)
_sym_db.RegisterEnumDescriptor(_TRINARY)
Trinary = enum_type_wrapper.EnumTypeWrapper(_TRINARY)
STATUS_UNSPECIFIED = 0
SCHEDULED = 1
STARTED = 2
ENDED_MASK = 4
SUCCESS = 12
FAILURE = 20
INFRA_FAILURE = 36
CANCELED = 68
UNSET = 0
YES = 1
NO = 2
_EXECUTABLE = _descriptor.Descriptor(
name='Executable',
full_name='buildbucket.v2.Executable',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='cipd_package', full_name='buildbucket.v2.Executable.cipd_package', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cipd_version', full_name='buildbucket.v2.Executable.cipd_version', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=97,
serialized_end=153,
)
_STATUSDETAILS_RESOURCEEXHAUSTION = _descriptor.Descriptor(
name='ResourceExhaustion',
full_name='buildbucket.v2.StatusDetails.ResourceExhaustion',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=308,
serialized_end=328,
)
_STATUSDETAILS_TIMEOUT = _descriptor.Descriptor(
name='Timeout',
full_name='buildbucket.v2.StatusDetails.Timeout',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=330,
serialized_end=339,
)
_STATUSDETAILS = _descriptor.Descriptor(
name='StatusDetails',
full_name='buildbucket.v2.StatusDetails',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='resource_exhaustion', full_name='buildbucket.v2.StatusDetails.resource_exhaustion', index=0,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timeout', full_name='buildbucket.v2.StatusDetails.timeout', index=1,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_STATUSDETAILS_RESOURCEEXHAUSTION, _STATUSDETAILS_TIMEOUT, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=156,
serialized_end=351,
)
_LOG = _descriptor.Descriptor(
name='Log',
full_name='buildbucket.v2.Log',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='buildbucket.v2.Log.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='view_url', full_name='buildbucket.v2.Log.view_url', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='url', full_name='buildbucket.v2.Log.url', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=353,
serialized_end=403,
)
_GERRITCHANGE = _descriptor.Descriptor(
name='GerritChange',
full_name='buildbucket.v2.GerritChange',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='host', full_name='buildbucket.v2.GerritChange.host', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='project', full_name='buildbucket.v2.GerritChange.project', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='change', full_name='buildbucket.v2.GerritChange.change', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='patchset', full_name='buildbucket.v2.GerritChange.patchset', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=405,
serialized_end=484,
)
_GITILESCOMMIT = _descriptor.Descriptor(
name='GitilesCommit',
full_name='buildbucket.v2.GitilesCommit',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='host', full_name='buildbucket.v2.GitilesCommit.host', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='project', full_name='buildbucket.v2.GitilesCommit.project', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='buildbucket.v2.GitilesCommit.id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ref', full_name='buildbucket.v2.GitilesCommit.ref', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position', full_name='buildbucket.v2.GitilesCommit.position', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=486,
serialized_end=575,
)
_STRINGPAIR = _descriptor.Descriptor(
name='StringPair',
full_name='buildbucket.v2.StringPair',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='buildbucket.v2.StringPair.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='buildbucket.v2.StringPair.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=577,
serialized_end=617,
)
_TIMERANGE = _descriptor.Descriptor(
name='TimeRange',
full_name='buildbucket.v2.TimeRange',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='start_time', full_name='buildbucket.v2.TimeRange.start_time', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='end_time', full_name='buildbucket.v2.TimeRange.end_time', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=619,
serialized_end=724,
)
_REQUESTEDDIMENSION = _descriptor.Descriptor(
name='RequestedDimension',
full_name='buildbucket.v2.RequestedDimension',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='buildbucket.v2.RequestedDimension.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='buildbucket.v2.RequestedDimension.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expiration', full_name='buildbucket.v2.RequestedDimension.expiration', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=726,
serialized_end=821,
)
_STATUSDETAILS_RESOURCEEXHAUSTION.containing_type = _STATUSDETAILS
_STATUSDETAILS_TIMEOUT.containing_type = _STATUSDETAILS
_STATUSDETAILS.fields_by_name['resource_exhaustion'].message_type = _STATUSDETAILS_RESOURCEEXHAUSTION
_STATUSDETAILS.fields_by_name['timeout'].message_type = _STATUSDETAILS_TIMEOUT
_TIMERANGE.fields_by_name['start_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_TIMERANGE.fields_by_name['end_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_REQUESTEDDIMENSION.fields_by_name['expiration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
DESCRIPTOR.message_types_by_name['Executable'] = _EXECUTABLE
DESCRIPTOR.message_types_by_name['StatusDetails'] = _STATUSDETAILS
DESCRIPTOR.message_types_by_name['Log'] = _LOG
DESCRIPTOR.message_types_by_name['GerritChange'] = _GERRITCHANGE
DESCRIPTOR.message_types_by_name['GitilesCommit'] = _GITILESCOMMIT
DESCRIPTOR.message_types_by_name['StringPair'] = _STRINGPAIR
DESCRIPTOR.message_types_by_name['TimeRange'] = _TIMERANGE
DESCRIPTOR.message_types_by_name['RequestedDimension'] = _REQUESTEDDIMENSION
DESCRIPTOR.enum_types_by_name['Status'] = _STATUS
DESCRIPTOR.enum_types_by_name['Trinary'] = _TRINARY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Executable = _reflection.GeneratedProtocolMessageType('Executable', (_message.Message,), dict(
DESCRIPTOR = _EXECUTABLE,
__module__ = 'common_pb2'
# @@protoc_insertion_point(class_scope:buildbucket.v2.Executable)
))
_sym_db.RegisterMessage(Executable)
StatusDetails = _reflection.GeneratedProtocolMessageType('StatusDetails', (_message.Message,), dict(
ResourceExhaustion = _reflection.GeneratedProtocolMessageType('ResourceExhaustion', (_message.Message,), dict(
DESCRIPTOR = _STATUSDETAILS_RESOURCEEXHAUSTION,
__module__ = 'common_pb2'
# @@protoc_insertion_point(class_scope:buildbucket.v2.StatusDetails.ResourceExhaustion)
))
,
Timeout = _reflection.GeneratedProtocolMessageType('Timeout', (_message.Message,), dict(
DESCRIPTOR = _STATUSDETAILS_TIMEOUT,
__module__ = 'common_pb2'
# @@protoc_insertion_point(class_scope:buildbucket.v2.StatusDetails.Timeout)
))
,
DESCRIPTOR = _STATUSDETAILS,
__module__ = 'common_pb2'
# @@protoc_insertion_point(class_scope:buildbucket.v2.StatusDetails)
))
_sym_db.RegisterMessage(StatusDetails)
_sym_db.RegisterMessage(StatusDetails.ResourceExhaustion)
_sym_db.RegisterMessage(StatusDetails.Timeout)
Log = _reflection.GeneratedProtocolMessageType('Log', (_message.Message,), dict(
DESCRIPTOR = _LOG,
__module__ = 'common_pb2'
# @@protoc_insertion_point(class_scope:buildbucket.v2.Log)
))
_sym_db.RegisterMessage(Log)
GerritChange = _reflection.GeneratedProtocolMessageType('GerritChange', (_message.Message,), dict(
DESCRIPTOR = _GERRITCHANGE,
__module__ = 'common_pb2'
# @@protoc_insertion_point(class_scope:buildbucket.v2.GerritChange)
))
_sym_db.RegisterMessage(GerritChange)
GitilesCommit = _reflection.GeneratedProtocolMessageType('GitilesCommit', (_message.Message,), dict(
DESCRIPTOR = _GITILESCOMMIT,
__module__ = 'common_pb2'
# @@protoc_insertion_point(class_scope:buildbucket.v2.GitilesCommit)
))
_sym_db.RegisterMessage(GitilesCommit)
StringPair = _reflection.GeneratedProtocolMessageType('StringPair', (_message.Message,), dict(
DESCRIPTOR = _STRINGPAIR,
__module__ = 'common_pb2'
# @@protoc_insertion_point(class_scope:buildbucket.v2.StringPair)
))
_sym_db.RegisterMessage(StringPair)
TimeRange = _reflection.GeneratedProtocolMessageType('TimeRange', (_message.Message,), dict(
DESCRIPTOR = _TIMERANGE,
__module__ = 'common_pb2'
# @@protoc_insertion_point(class_scope:buildbucket.v2.TimeRange)
))
_sym_db.RegisterMessage(TimeRange)
RequestedDimension = _reflection.GeneratedProtocolMessageType('RequestedDimension', (_message.Message,), dict(
DESCRIPTOR = _REQUESTEDDIMENSION,
__module__ = 'common_pb2'
# @@protoc_insertion_point(class_scope:buildbucket.v2.RequestedDimension)
))
_sym_db.RegisterMessage(RequestedDimension)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
py | 1a44a69f0894dc962f6eac6715bc7134803fdbcb | #!/usr/bin/env python
# -*- coding: utf-8 -*-
duel_map = (
'8aaaaaa',
'7wwwwww',
'6cccccc',
'5eeeeee',
'-------',
'.......',
'.......',
'.......',
'.......',
'.......',
'.......',
'.......',
'.......',
'-------',
'0eeeeee',
'1cccccc',
'2wwwwww',
'3aaaaaa',
)
duel_map_long = (
'||||||||||||||||||',
'awc0!........!5cwa',
'awce!........!ecwa',
'awce!........!ecwa',
'awce!........!ecwa',
'awce!........!ecwa',
'awce!........!ecwa',
'awce!........!ecwa',
'||||||||||||||||||',
)
duel_map_passage = (
'||||||||||||||||||||||||||||',
'aawwccee0!........!5eeccwwaa',
'aawwccee.!........!.eeccwwaa',
'||||||||||||||||||||||||||||',
)
duel_map_big = (
'aaaaaaaaaaaaaaa',
'wwwwwwwwwwwwwww',
'ccccccccccccccc',
'5eeeeeeeeeeeeee',
'---------------',
'...............',
'...............',
'...............',
'...............',
'...............',
'...............',
'...............',
'...............',
'---------------',
'0eeeeeeeeeeeeee',
'ccccccccccccccc',
'wwwwwwwwwwwwwww',
'aaaaaaaaaaaaaaa',
)
duel_map_ship = (
'eee',
'eee',
'5ce',
'---',
'0ce',
'eee',
'eee',
)
duel_map_ship_deck = (
# Гандек Фэнси
'||||||||||||||||||||||||||||||',
'1awwcceee!..............!0.!5.',
'.awwcceee!..............!..!c.',
'2awwcceee!..............!..!..',
'.awwcceee!..............!c.!..',
'||||||||||||||||||||||||||||||',
)
duel_map_ship_gang = (
# Кубрик Гангсвэя, пороховой погреб, атака тензиной банды.
'||||||||||||||||||',
'.0......c!.ceee.5|',
'.1......c!.ceee.6|',
'.2......c!.ceee.7|',
'.3.......!.ceee..|',
'||||||||||||||||||',
)
duel_map_near = (
'5c',
'--',
'0c',
)
duel_map_range_near = (
'5c.',
'~~~',
'0c.',
)
duel_map_range_squad = (
'aaaaaaaaaaaaaaaaaaa',
'eeeeeeeeeeeeeeeeeee',
'.......5.c.........',
'~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~',
'.......0.c.........',
'eeeeeeeeeeeeeeeeeee',
'aaaaaaaaaaaaaaaaaaa',
)
duel_map_range_squad_vs_one = (
'........5c.........',
'~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~',
'eeeeeeee0ceeeeeeeee',
'aaaaaaaaaaaaaaaaaaa',
)
peace_map = (
# Лечение раненых, отдых, перевооружение
'.....!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!.....',
'.....!..................................................!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!..................................................!.....',
'.....!--------------------------------------------------!.....',
'.....!..................................................!.....',
'.....!..................................................!.....',
'.....!--------------------------------------------------!.....',
'.....!.10.11.12.13.14.15.16.17.18.19....................!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!.....',
)
enslave_map = (
# Захват пленных:
# fearless seek select_strongest enslave grapple -volley auto
# Если сдаются:
# -attack -spellcast -volley -engage auto
'.....!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!.....',
'.....!..................................................!.....',
'.....!MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!.........................................9.8.7.6.5!.....',
'.....!--------------------------------------------------!.....',
'.....!..................................................!.....',
'.....!..................................................!.....',
'.....!--------------------------------------------------!.....',
'.....!.0.1.2.3.4........................................!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!.....',
)
battle_map_ship_deck = (
# Палуба Ганг-и-Савайя
'||||||||||||||||||||||||||||||',
'11wwwce!.!eecwwwwwwwwwwwwww51.',
'12wwwce!.!eecwwwwwwwwwwwwww52.',
'13wwwce!.!eecwwwwwwwwwwwwww53.',
'14wwwce!.!eecwwwwwwwwwwwwww54.',
'15wwwce!.!eecwwwwwwwwwwwwww55.',
'16wwwce!.!eecwwwwwwwwwwwwww56.',
'||||||||||||||||||||||||||||||',
)
battle_map_ship_deck_surround = (
# Палуба Ганг-и-Савайя
'||||||||||||||||||||||||||||||',
'11wwwce!.!eecwwwwwwwwwww51!21.',
'12wwwce!.!eecwwwwwwwwwww52!ww.',
'13wwwce!.!eecwwwwwwwwwww53!ww.',
'14wwwce!.!eecwwwwwwwwwww54!ww.',
'15wwwce!.!eecwwwwwwwwwww55!ww.',
'16wwwce!.!eecwwwwwwwwwww56!ww.',
'||||||||||||||||||||||||||||||',
)
battle_map_ship_deck_small = (
# Палуба шлюпа
'h|||||||||||||||||||||',
'.51eec!.!eecwwwwwww11!',
'.52eec!.!eecwwwwwww12!',
'.53eec!.!eecwwwwwww13!',
'||||||||||||||||||||||',
)
battle_map_city_cats = (
# Наши:
# 2 -- сотня лучников с короткими луками
# 1 -- сотня лёгкой пехоты с короткими луками
# 0 -- кошки мистические ловкачи
# 2,a -- лучники, CR 1/8, AC 16 (клёпаная кожа, щиты, короткие мечи и короткие луки)
# 1,w -- пехота, CR 1/8, AC 16 (клёпаная кожа, щиты, короткие мечи и короткие луки)
# 1,e -- элита, CR 1/2, AC 19 (чешуйчатая броня, щиты, рапиры, кинжалы, и короткие луки)
# 1,c -- офицеры CR 1, AC 20 (сержанты и капитан отряда, панцири, руна "Shield")
# 0,e -- кошки-плуты (lvl 3), CR 1, AC 17 (руны "Mage_Armor" и "Shield", 48 заклинаний "Sleep")
# 0,c -- Глим и Динк (lvl 9), CR 5, AC 18 ("Invisibility", "Hold_Person", "Phantasmal_Force")
'WWWWWWWWWWWWWWWWCCCCCCCCCWWWWWWWWWWWWWWW',
'WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW3',
'||||||||||||||||||||||||||||||||||||||||',
',,,,T,,,,,,T,,,,,,,T,,,,,T,,,,ttt,,,Tt,,',
'tt..ttttttt...tttttttt..ttttttttt..ttttt',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'2wwwwwwcee!eeee5!ceewwwwwwwwwwwwww7!ee..',
'wwwwwwwcee!eeee6!ceewwwwwwwwwwwwwww!ee..',
'wwwwwwwcee!eeee.!ceewwwwwwwwwwwwwww!eec.',
'wwwwwwwcee!eeeec!ceewwwwwwwwwwwwwww!eec.',
'wwwwwwwcee!eeeec!ceewwwwwwwwwwwwwww!ee..',
'wwwwwwwcee!eeeee!ceewwwwwwwwwwwwwww!ee..',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'tteeeece!.eeecee!ttttt..ttttttttt..ttttt',
',,eeeeee!.eeeeee!,,,T,..,,,,T,,,,..,,,,,',
'AAAAAC,,!AAACAA,!,,,^^^^^^,,,,,^^^^^^,,,',
'AAAAAA,,!AAAAAA,!,,,^^^^^^,,,,,^^^^^^,,,',
'^^^^^^,,!^^^^^^,!,,,^^^^^^,,,,,^^^^^^,,,',
'^^^^^^,0!^^^^^^1!,,,^^^^^^,,,,,^^^^^^,,,',
'tttttttt!ttttttt!ttttttttttttttttttttttt',
'||||||||||||||||||||||||||||||||||||||||',
'8aaaaaaaaaaaaaaaaaa!9aaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaa!aaaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaa!aaaaaaaaaaaaaaaaaaaa',
'aaaaaCCCCCCaaaaaaaa!aaaaaaCCCCCCCaaaaaaa',
'aaaaaaaaaaaaaaaaaaa!aaaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaa!aaaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaa!aaaaaaaaaaaaaaaaaaaa',
)
battle_map_city_cats_militia = (
# Бой ополчения с регулярами.
# Улица 20 футов
'||||||||||||||||||||||||||||||||||||||||',
'0wcaaawwwwwwwwwwwee!....ceewwwwwwwwwwww5',
'wwcaaawwwwwwwwwwwee!....ceewwwwwwwwwwww.',
'wwcaaawwwwwwwwwwwee!....ceewwwwwwwwwwww.',
'wwcaaawwwwwwwwwwwee!....ceewwwwwwwwwwww.',
'||||||||||||||||||||||||||||||||||||||||',
'8aaaaaaaaaaaaaaaaaa!9aaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaa!aaaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaa!aaaaaaaaaaaaaaaaaaaa',
'aaaaaCCCCCCaaaaaaaa!aaaaaaCCCCCCCaaaaaaa',
'aaaaaaaaaaaaaaaaaaa!aaaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaa!aaaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaa!aaaaaaaaaaaaaaaaaaaa',
)
sea_map_hermione = (
# Concorde-class 12-pounder frigate
# https://en.wikipedia.org/wiki/French_frigate_Hermione_(2014)
# Tons burthen.......550 tons; 1160 ton burthen
# Length.............44.2 m (145 ft)
# Beam...............11.24 m (36.9 ft)
# Draught............5.78 m (19.0 ft)
# 32 guns: 26 × 12-pounder long guns; 6 6-pounder long guns
# Complement: 6 officers, 10 guards, 292 seamen
# 2x2 тайла на пушку с каждого борта.
# На палубе место на 190 человек.
# 1200 кубометров трюма.
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____~~~~~~~~~~~~~~',
'~~~_____________________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~_______~~~~~~~~~~~~~',
'~~__===========================__~~~~~~~~~~~~~~~~~~~~~~~~~~__==O==__~~~~~~~~~~~~',
'~~_=...........................=__~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~__=...........................=__~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~__O.....O.......O.........O...O__~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~__=...........................=__~~~~~~~~~~~~~~~~~~~~~~~~~_=..O..=_~~~~~~~~~~~~',
'~~_=...........................=__~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~__===========================__~~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~~_____________________________~~~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~~|||||||||||||||||||||||||||||~~~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~||...........................||~~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~|.............................||~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~||.............................||~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~||O.....O.......O.........O...O||~~~~~~~~~~~~~~~~~~~~~~~~~_=..O..=_~~~~~~~~~~~~',
'~||.............................||~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~|.............................||~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~||...........................||~~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~~|||||||||||||||||||||||||||||~~~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~~~|||||||||||||||||||||||||||||~~~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~~||===========================||~~~~~~~~~~~~~~~~~~~~~~~~~_=..O..=_~~~~~~~~~~~~',
'~~~|=============================||~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~||=============================||~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~||O.....O.......O.........O...O||~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~||=============================||~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~~|=============================||~~~~~~~~~~~~~~~~~~~~~~~~_=.....=_~~~~~~~~~~~~',
'~~~||===========================||~~~~~~~~~~~~~~~~~~~~~~~~~__==O==__~~~~~~~~~~~~',
'~~~~|||||||||||||||||||||||||||||~~~~~~~~~~~~~~~~~~~~~~~~~~~_______~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____~~~~~~~~~~~~~~',
)
sea_map_long = (
# Три триеры (150 футов):
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~~',
'~aaaaaaaaaaaaaaaaaaaahhh~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~hhhaaaaaaaaaaaaaaaaaaaa~',
'0aaaaaaaaHHHHaaaaaaaahhh=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~5hhhaaaaaaHHHHaaaaaaaaaa=',
'~aaaaaaaaaaaaaaaaaaaahhh~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~hhhaaaaaaaaaaaaaaaaaaaa~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~~',
'~aaaaaaaaaaaaaaaaaaaAAAA~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAaaaaaaaaaaaaaaaaaaa~',
'1EEEEEEEEECCCCEEEEEEAAAA=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~6AAAAEEEEEECCCCCEEEEEEEE=',
'~aaaaaaaaaaaaaaaaaaaAAAA~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAaaaaaaaaaaaaaaaaaaa~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~~',
'~aaaaaaaaaaaaaaaaaaaAAAA~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAaaaaaaaaaaaaaaaaaaa~',
'2EEEEEEEEECCCCEEEEEEAAAA=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~7AAAAEEEEEECCCCCEEEEEEEE=',
'~aaaaaaaaaaaaaaaaaaaAAAA~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAaaaaaaaaaaaaaaaaaaa~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_animals = (
# Тест осьминогов:
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~',
'~aaaaaaaaaaaaaaaaaaaaaaa~~~~~~~~~~',
'5AAAAEEEEEEEEEEEEEEEEEEE=~~~~~~~~~',
'~AAAAAAAAACCCCCCAAAAAAAA~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~AAAAAAAAAAAAAAAAAAAAAAA~~~~~~~~~~',
'0AAAAAAAAAAAAAAAAAAAAAAA~~~~~~~~~~',
'1eeeeeeeCCCCCCeeeeeeeeee~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_animals_underwater = (
# Тест осьминогов:
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ee~~~~~~~~~~~~~~~~~~~',
'~~~~wwwwwwwwwwwwwwwwwwwwweeccc~~~~~~~~~~~~~~~ee0~~~~~~~~~~~~~~~~~~',
'~~~~wwwwwwwwwwwwwwwwwwwwweeeee~~~~~~~~~~~~~~~ee~~~~~~~~~~~~~~~~~~~',
'~~~~wwwwwwwwwwwwwwwwwwwwweeccc5~~~~~~~~~~~~~~cc~~~~~~~~~~~~~~~~~~~',
'~~~~wwwwwwwwwwwwwwwwwwwwweeeee~~~~~~~~~~~~~~~cc~~~~~~~~~~~~~~~~~~~',
'~~~~wwwwwwwwwwwwwwwwwwwwweeccc~~~~~~~~~~~~~~~ee~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~6~~~~~~~ee~~~~~~~~~~~~~~~~~~~',
'~~~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaacaca~~~~~ee~~~~~~~~~~~~~~~~~~~',
'~~~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaacaca~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaacaca~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaacaca~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaacaca~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaacaca~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaacaca~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaacaca~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaacaca~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaacaca~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_animals_flagship = (
# Тест осьминогов:
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ee~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ee~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ee~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~AAAaaaaaaaaaaaaaaaaaaaaaaaaaaAAC~~ee0~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~ACAaaaaaaaaaaaaaaaaaaaaaaaaaaAAC~~cc~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~5ACAaaaaaaaaaaaaaaaaaaaaaaaaaaAAC~~cc~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~AAAaaaaaaaaaaaaaaaaaaaaaaaaaaAAC~~cc~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ee~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ee~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_monsters_surround_archers = (
# Морские чудовища:
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~0~1~2~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~wwwwwwwwcwwwwwwwwwwwwcwwwwwwwwcwwwwwwwwww~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~weeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeew~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~cwe~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~eewc~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~we~5AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC~~eew~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~cwe~6ACAaaaaaaaaaaaaaaaaaaaaaaaaaaAAC~~ccwc~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~we~7ACAaaaaaaaaaaaaaaaaaaaaaaaaaaAAC~~ccw~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~we~8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC~~ccw~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~cwe~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~eewc~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~weeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeew~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~wwwwwwwcwwwwwwwwwwwcwwwwwwwwwwcwwwwwwwwww~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_monsters_surround_warriors = (
# Морские чудовища:
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~0~1~2~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~wwwwwwwwcwwwwwwwwwwwwcwwwwwwwwcwwwwwwwwww~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~weeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeew~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~cwe~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~eewc~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~we~5EWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWE9~eew~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~cwe~6ECAAAwwwwcwwcwwwwwwwwwcwwwwAAACE~~ccwc~~~~~~~~~~~~~~~~~~~~',
'...........we~7ECAAAwwwwcwwwwwwwcwwwwwwcwwAAACE~~ccw.....................',
'~~~~~~~~~~~we~8EWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWE~~ccw~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~cwe~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~eewc~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~weeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeew~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~wwwwwwwcwwwwwwwwwwwcwwwwwwwwwwcwwwwwwwwww~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_animals_long = (
# Тест осьминогов:
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~aaaaaaaaaaaaaaaaaaaaaaa~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'5AAAAEEEEEEEEEEEEEEEEEEE=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~AAAAAAAAACCCCCCAAAAAAAA~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAAAAAAAAAAAAAAAAAAAA',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~0AAAAAAAAAAAAAAAAAAAAAAA',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~1eeeeeeeCCCCCCeeeeeeeeee',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_near = (
# Триеры вплотную (50 футов):
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~~',
'~aaaaaaaaaaaaaaaaaaaaaaa~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~aaaaaaaaaaaaaaaaaaaaaaa~',
'5EEEEEEEEEEEEEEEEEEEAAAA=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~6EEEEEEEEEEEEEEEEEEEAAAA=',
'~AAAAAAAAAAAAAAAAAAACCCC~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAAAAAAAAAAAAAAAACCCC~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~~',
'~CCCCAAAAAAAAAAAAAAAAAAA~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~CCCCAAAAAAAAAAAAAAAAAAA~',
'0AAAAEEEEEEEEEEEEEEEEEEE=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~1AAAAEEEEEEEEEEEEEEEEEEE=',
'~aaaaaaaaaaaaaaaaaaaaaaa~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~aaaaaaaaaaaaaaaaaaaaaaa~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_boarding_old = (
# Абортаж
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~CCCCAAAAAAAAAAAAAAAAAAA~~~~~~~~~~~~~~~~~~~~~~~~',
'4AAAAAAAAAAAAAAAAAAAAAAA=~~~~~~~~~~~~~~~~~~~~~~~',
'~aaaaaaaaaaaaaaaaaaaaaaa~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~______________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~wwwwwwwwwwwwwwwwwwwwwww~~~~~~~~~~~~~~~~~~~~~~~~',
'0WCWWCWWCWWCWWCWWCWWCWWW=~~~~~~~~~~~~~~~~~~~~~~~',
'~EEEEEEEEEEEEEEEEEEEEEEE~~~~~~~~~~~~~~~~~~~~~~~~',
'~!!!!!!!!!!!!!!!!!!!!!!~~_____________________~~',
'~WWWWWWWWWWWWWWWWWWWWWW!EEEEwwwwwwwwwwwwwwwwwww~',
'5ECEECEECEECEECEECEECEE!EEEECCCCCEEEEEEEEEEEEEE2',
'~WWWWWWWWWWWWWWWWWWWWWW!EEEEwwwwwwwwwwwwwwwwwww~',
'~!!!!!!!!!!!!!!!!!!!!!!~~_____________________~~',
'~EEEEEEEEEEEEEEEEEEEEEEE~~~~~~~~~~~~~~~~~~~~~~~~',
'1WCWWCWWCWWCWWCWWCWWCWWW=~~~~~~~~~~~~~~~~~~~~~~~',
'~wwwwwwwwwwwwwwwwwwwwwww~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~CCCCAAAAAAAAAAAAAAAAAAA~~~~~~~~~~~~~~~~~~~~~~~~',
'3AAAAAAAAAAAAAAAAAAAAAAA=~~~~~~~~~~~~~~~~~~~~~~~',
'~aaaaaaaaaaaaaaaaaaaaaaa~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_archers = (
# Абортаж
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~aaaaaaaaaaaaaaaaaaaaaaa~~~~~~~~~~~~~~~~~~~~~~~~',
'3AAAAAAAAACCCCAAAAAAAAAA=~~~~~~~~~~~~~~~~~~~~~~~',
'~AAAAAAAAAAAAAAAAAAAAAAA~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~aaaaaaaaaaaaaaaaaaaaaaa~~~~~~~~~~~~~~~~~~~~~~~~',
'0AAAAAAAAACCCCAAAAAAAAAA=~~~~~~~~~~~~~~~~~~~~~~~',
'~AAAAAAAAAAAAAAAAAAAAAAA~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~!!!!!!!!!!!!!!!!!!!!!!~~~~~~~~~~~~~~~~~~~~~~~~~',
'~WWWWWWWWWWWWWWWWWWWWWW!~~~~~~~~~~~~~~~~~~~~~~~~',
'5ECEECEECEECEECEECEECEE!~~~~~~~~~~~~~~~~~~~~~~~~',
'~WWWWWWWWWWWWWWWWWWWWWW!~~~~~~~~~~~~~~~~~~~~~~~~',
'~!!!!!!!!!!!!!!!!!!!!!!~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~AAAAAAAAAAAAAAAAAAAAAAA~~~~~~~~~~~~~~~~~~~~~~~~',
'1AAAAAAAACCCCAAAAAAAAAAA=~~~~~~~~~~~~~~~~~~~~~~~',
'~aaaaaaaaaaaaaaaaaaaaaaa~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~AAAAAAAAAAAAAAAAAAAAAAA~~~~~~~~~~~~~~~~~~~~~~~~',
'4AAAAAAAACCCCAAAAAAAAAAA=~~~~~~~~~~~~~~~~~~~~~~~',
'~aaaaaaaaaaaaaaaaaaaaaaa~~~~~~~~~~~~~~~~~~~~~~~~',
'~~_____________________~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_archers_sentinel_cliff_dummies_long_ch1 = (
# Осада "Сторожевой Скалы". Перестрелка со флотом.
'=3|=2|=1!=0|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~___________!_________~',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~WWCCWWWWWWWW!7AAAAAACCC',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~5WWCCWWWWWWWW!AAAAAAACCC',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~WWCCWWWWWWWW!AAAAAAACCC',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_________._!_________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_________._!_________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~WWCCWWWWWWWW!8AAAAAACCC',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~6WWCCWWWWWWWW!AAAAAAACCC',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~WWCCWWWWWWWW!AAAAAAACCC',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_________._!_________~',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~',
'AA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_________._!_________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~WWCCWWWWWWWW!9AAAACCCAA',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.WWCCWWWWWWWW!AAAAACCCAA',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~WWCCWWWWWWWW!AAAAACCCAA',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~___________!_________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~EEEWWWWWWWWWWWWWWCCWWWW',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~9EEEWWWWWWWWWWWWWWCCWWWW',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~EEEWWWWWWWWWWWWWWCCWWWW',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_archers_sentinel_cliff_dummies_volley_long_ch1 = (
# Осада "Сторожевой Скалы". Перестрелка со флотом.
'====3#!=1!=0|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~9A!AA7~~~~6~~~~~5~~',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'..cc.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!CC_~~_WWW_~_WWW_',
'..cc.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!!!_~~_WWW_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!CC_~~_WWW_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!CC_~~_WWW_~_WWW_',
'..aa.#!CE!CA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!CC_~~_WCW_~_WCW_',
'..aa.#!CE!CA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WCW_~_WCW_',
'..aa.#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_C!AA_~~_WCW_~_WCW_',
'!!!!!#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WCW_~_WCW_',
'====2#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WCW_~_WCW_',
'aaaaa#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'aaaaa#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'aaaaa#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_WWW_~_WWW_',
'ccccc#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~8~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_archers_sentinel_cliff_dummies_volley_short_ch1 = (
# Осада "Сторожевой Скалы". Перестрелка со флотом.
'====3#!=1!=0|~~~~~~~~~~~~~~~~~~~~~~~~9A!AA7~~~~6~~~~~~5~~',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'..cc.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!CC_~~_AAAA_~_WWW_',
'..cc.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!!!_~~_AAAA_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!CC_~~_AAAA_~_WWW_',
'..aa.#!EE!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!CC_~~_ACAA_~_WWW_',
'..aa.#!CE!CA|~~~~~~~~~~~~~~~~~~~~~~~~_A!CC_~~_ACAA_~_WCW_',
'..aa.#!CE!CA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_ACAA_~_WCW_',
'..aa.#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~_C!AA_~~_ACAA_~_WCW_',
'!!!!!#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_ACAA_~_WCW_',
'====2#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WCW_',
'aaaaa#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'aaaaa#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'aaaaa#!CA!CA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~_AAAA_~_WWW_',
'ccccc#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~_A!AA_~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~8~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_archers_sentinel_cliff_long_ch1 = (
# Осада "Сторожевой Скалы". Перестрелка со флотом.
'=3|=2|=1!=0|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~CCCAAAAAAAAAAAAAAAAAAAA',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~5CCCAAAAAAAAAAAAAAAAAAAA',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~CCCAAAAAAAAAAAAAAAAAAAA',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAAAAAAAAAAAAAAACCCAA',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~6AAAAAAAAAAAAAAAAAACCCAA',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAAAAAAAAAAAAAAACCCAA',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAAAAAAAAAAAAAAACCCAA',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~7AAAAAAAAAAAAAAAAAACCCAA',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAAAAAAAAAAAAAAACCCAA',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~EEEWWWWWWWWWWWWWWCCWWWW',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~9EEEWWWWWWWWWWWWWWCCWWWW',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~EEEWWWWWWWWWWWWWWCCWWWW',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_archers_sentinel_cliff_long_near_ch1 = (
# Осада "Сторожевой Скалы". Перестрелка со флотом.
'=3|=2|=1!=0|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~CCCAAAAAAAAAAAAAAAAAAAA',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~5CCCAAAAAAAAAAAAAAAAAAAA',
'AA|AA|AA!CC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~CCCAAAAAAAAAAAAAAAAAAAA',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAAAAAAAAAAAAAAACCCAA',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~6AAAAAAAAAAAAAAAAAACCCAA',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAAAAAAAAAAAAAAACCCAA',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'CA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|CA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAAAAAAAAAAAAAAACCCAA',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~7AAAAAAAAAAAAAAAAAACCCAA',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~AAAAAAAAAAAAAAAAAACCCAA',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~wwwwwwwwwwwwwwwwwwwwwww',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~9weeeeeeeecccceeeeeeeeee',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~wwwwwwwwwwwwwwwwwwwwwww',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_____________________~',
'AA|AA|CC!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'AA|AA|AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
sea_map_archers_sentinel_cliff_volley_coast_ch1 = (
# Осада "Сторожевой Скалы". Обстрел пляжа у "Скалистого холма":
'====4#!32!10|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.6.7..8.9...',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..cc.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..cc.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~5~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wecw!Wewcwwa',
'!!!!!!!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wecw!Wewcwwa',
'=====#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_EWW__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_ECW__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_ECW__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_ECW__~~~~~~Wecw!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_ECW__~~~~~~Wecw!Wewwwwa',
'ccccc#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_ECW__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_ECW__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_EWW__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_WWW__~~~~~~Wwww!Wewwwwa',
)
sea_map_archers_sentinel_cliff_near_counter_ch1 = (
# Осада "Сторожевой Скалы". Обстрел пляжа у "Скалистого холма":
'====4#!32!10|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.6.7..8.9...',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..cc.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..cc.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~5~~~~~~~~~Wwww!Wewwwwa',
'..aa.#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wecw!Wewcwwa',
'!!!!!!!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wecw!Wewcwwa',
'=====#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_ACA__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AC!AC|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_ACA__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_ACA__~~~~~~Wecw!Wewcwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_ACA__~~~~~~Wecw!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wecw!Wewwwwa',
'ccccc#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wwww!Wewwwwa',
'aaaaa#!AA!AA|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~_AAA__~~~~~~Wwww!Wewwwwa',
)
sea_map_archers_sentinel_cliff_coast_near_ch1 = (
# Охотницы в укреплении на "Побережье" отстреливают беглецов с кораблей:
'|||||||||||||.............................................',
'========0=AA|..................................5.6.7.8.9..',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAC|.................................wawawceawawa',
'........aaAC|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'........aaAA|.................................wawawceawawa',
'==========AA|.................................wawawceawawa',
'|||||||||||||.................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
'..............................................wawawceawawa',
)
sea_map_archers_sentinel_cliff_volley_coast_faram_ch1 = (
# Осада "Сторожевой Скалы". Перестрелка со флотом.
'=====#!==!==#~~~~~8~~~~~~.5.......6..!.............!www!....|||..1..........',
'..aa.#!EE!AA#~~~_AAAA_~~~wwwwwwwwwwww!.............!wwc!....|CA.............',
'..aa.#!EE!AA#~~~_AAAA_~~~wwwwwwwwwwwe!.............!wwc!....|CA.............',
'..aa.#!EE!AA#~~~_AAAA_~~~wwwwwwwwwwww!.............!wwc!....####!!!!!!!!!!!!',
'..aa.#!EE!AA#~~~_AAAA_~~~wwwwwwwwwwwe!.............!wwe!....#..aaaaa...0....',
'..aa.#!EE!AA#~~~_AAAA_~~~wwwwwwwwwwww!.............!wwe!....#..aaaaa........',
'..cc.#!EE!AA#~~~_AAAA_~~~wwwcewwwwwwe!.............!wwe!....#..aaaaa........',
'..cc.#!EE!AA#~~~_AAAA_~~~wwwcewwwwwww!.............!wwe!....#..aaaaa........',
'..aa.#!EE!AA#~~~_AAAA_~~~wwwcewwwwwwe!.............!wwe!....#..aaaaa........',
'..aa.#!EE!AA#~~~_AAAA_~~~wwwcewwwwwcw!.............!wwe!....#..aaaaa........',
'..aa.#!CE!CA#~~~_AAAA_~~~wwwcewwwwwce!.............!wwe!....#..aaaaa........',
'..aa.#!CE!CA#~~~_AACA_~~~wwwcewwwwwcw!.............!7we!....#..acaaa........',
'..aa.#!CA!CA#~~~_AACA_~~~wwwcewwwwwce!.............!wwe!....#..acaaa........',
'!!!!!!!!!!!!#~~~_AAAA_~~~wwwcewwwwwcw!.............!wwe!....#..acaaa........',
'=====#!=====#~~~_AAAA_~~~wwwcewwwwwwe!.............!wwe!....#..acaaa........',
'aaaaa#!aaaaa#~~~_AAAA_~~~wwwcewwwwwww!.............!wwe!....#..acaaa........',
'aaaaa#!aaaaa#~~~_AAAA_~~~wwwcewwwwwwe!.............!wwe!....#..aaaaa........',
'aaaaa#!aaaaa#~~~_AAAA_~~~wwwcewwwwwww!.............!wwe!....#..aaaaa........',
'aaaaa#!aaaaa#~~~_AAAA_~~~wwwcewwwwwwe!.............!wwe!....#..aaaaa........',
'aaaaa#!aaaaa#~~~_AAAA_~~~wwwcewwwwwww!.............!wwe!....#..aaaaa........',
'aaaaa#!aaaaa#~~~_AAAA_~~~wwwcewwwwwwe!.............!wwe!....#..aaaaa........',
'aaaaa#!aaaaa#~~~_AAAA_~~~wwwcewwwwwww!.............!wwe!....#..aaaaa........',
'aaaaa#!aaaaa#~~~_AAAA_~~~wwwwwwwwwwwe!.............!wwe!....#..aaaaa........',
'aaaaa#!aaaaa#~~~_AAAA_~~~wwwwwwwwwwww!.............!wwe!....#..aaaaa........',
'aaaaa#!aaaaa#~~~_AAAA_~~~wwwwwwwwwwwe!.............!wwe!....#|||||||||||||||',
'ccccc#!cccca#~~~~~~~~~~~~!!!!!!!!!!!!!!!!!!!!!!!!!!!wwc!....#CA.............',
'aaaaa#!aaaaa#~~~~~~~~~~~~.............!............!wwc!....#CA.............',
'aaaaa#!aaaaa#~~~~~~~~~~~~.............!wwwwwwwwwwwe!wwc!....#||!!!!!!!!!!!!!',
'aaaaa#!aaaaa#~~~~~~~~~~~~.............!wwwwwwwwwwce!www!....#...............',
'aaaaa#!aaaaa#~~~~~~~~~~~~.............!wwwwwwwwwwce!www!....###.............',
'aaaaa#!aaaaa#~~~~~~~~~~~~.............!wwwwwwwwwwce!www!....|c..............',
'aaaaa#!aaaaa#~~~~~~~~~~~~.............!wwwwwwwwwwce!www!....|c..............',
'aaaaa#!aaaaa#~~~~~~~~~~~~.............!wwwwwwwwwwce!www!....###.............',
'aaaaa#!aaaaa#~~~~~~~~~~~~.............!wwwwwwwwwwwe!...!....|...............',
'aaaaa#!aaaaa#~~~~~~~~~~~~.............!wwwwwwwwwwwe!.7.!....|...............',
'aaaaa#!aaaaa#~~~~~~~~~~~~.............!wwwwwwwwwwwe!...!....|...............',
)
sea_map_archers_sentinel_cliff_south_coast_ch1 = (
# Захватываем флот:
'aaaaaaa0a1!......!..9...~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~4....',
'aaaaaaaaae!......!......~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.....',
'aaaaaaaaae!......!......~~_____________________~~~~~~~~~~~~~~~~~~eeee.',
'aaaaaaacce!......!......~AAAAAAAAAAAAAAAAAAAAAAA~~~~~~~~~~~~~~~~~eeee.',
'aaaaaaacce!......!......5AAAAAAAAAAAAAAAAAACCCCA~~~~~~~~~~~~~~~~~eeee.',
'aaaaaaacce!......!.......AAAAAAAAAAAAAAAAAAAAAAA~~~~~~~~~~~~~~~~~ccee.',
'aaaaaaacce!......!..............................~~~~~~~~~~~~~~~~~ccee.',
'aaaaaaacce!......!..............................~~~~~~~~~~~~~~~~~eeee.',
'aaaaaaacce!......!wwwwww........................~~~~~~~~~~~~~~~~~eeee.',
'aaaaaaaaae!......!wwwwww........................~~~~~~~~~~~~~~~~~eeee.',
'aaaaaaaaae!......!wwwwww........................~~~~~~~~~~~~~~~~~eeee.',
'!!!!!!!!!!!......!ewwwww........................~~~~~~~~~~~~~~~~~eeee.',
'wwwwwwwww2!......!ewwwww........................~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwwe!......!ewwwww.AAAAAAAAAAAAAAAAAACCCAA~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwwe!......!ewwwww6AAAAAAAAAAAAAAAAAACCCAA~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!ewwcww.AAAAAAAAAAAAAAAAAACCCAA~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!ewwcww........................~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!ewwcww........................~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!ewwwww........................~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!ewwwww........................~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!ewwwww........................~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!ewwwww.AAAAAAAAAAAAAAAAAACCCAA~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwwe!......!ewwwww7AAAAAAAAAAAAAAAAAACCCAA~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwwe!......!ewwwww.AAAAAAAAAAAAAAAAAACCCAA~~~~~~~~~~~~~~~~~~~~~~',
'!!!!!!!!!!!......!wwwwww.._____________________~~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwww3!......!wwwwww~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwwe!......!wwwwww~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwwe!......!wwwwww~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!......~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!......~~_____________________~~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!.....~~AAAAAAAAAAAAAAAAAAAAAAA~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!.....~8AAAAAAAACCCCCCCCAAAAAAA~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!.....~~AAAAAAAAAAAAAAAAAAAAAAA~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwce!......!......~~_____________________~~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwwe!......!......~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'wwwwwwwwwe!......!......~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
)
battle_map = (
# Тест поля боя (10x60):
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!.................6.5!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!.................1.0!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_long_rotate = (
# Тест поля боя (10x60):
# Та же карта, но с поворотом на 90 градусов
'....................................',
'....................................',
'....................................',
'....................................',
'....................................',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'aaacwwwwwwe0!..........!5ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe1!..........!6ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'aaacwwwwwwe.!..........!.ewwwwwwcaaa',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'....................................',
'....................................',
'....................................',
'....................................',
'....................................',
)
battle_map_long_rotate_elite_vs_elite = (
# Тест поля боя (10x60):
# Та же карта, но с поворотом на 90 градусов
'....................................',
'....................................',
'....................................',
'....................................',
'....................................',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'aaaeecceeee0!..........!5eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee1!..........!6eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'aaaeecceeee.!..........!.eeecceeeaaa',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'....................................',
'....................................',
'....................................',
'....................................',
'....................................',
)
battle_map_test_commanders = (
# Тест поля боя (10x60):
'.....!cccccccccccccccccccc!.....',
'.....!cccccccccccccccccccc!.....',
'.....!cccccccccccccccccccc!.....',
'.....!cccccccccccccccccccc!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!................56..!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!................01..!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_test_commanders_vs_commanders = (
# Тест поля боя (10x60):
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!................56..!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!................01..!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
)
battle_map_wide = (
# Тест поля боя (20x60):
'...............!aaaaaaaaaaaaaaaaaaaa!...............',
'...............!aaaaaaaaaaaaaaaaaaaa!...............',
'...............!aaaaaaaaaaaaaaaaaaaa!...............',
'...............!cccccccccccccccccccc!...............',
'...............!wwwwwwwwwwwwwwwwwwww!...............',
'...............!wwwwwwwwwwwwwwwwwwww!...............',
'...............!wwwwwwwwwwwwwwwwwwww!...............',
'...............!wwwwwwwwwwwwwwwwwwww!...............',
'...............!wwwwwwwwwwwwwwwwwwww!...............',
'...............!wwwwwwwwwwwwwwwwwwww!...............',
'...............!eeeeeeeeeeeeeeeeeeee!...............',
'...............!.................6.5!...............',
'...............!--------------------!...............',
'...............!....................!...............',
'...............!....................!...............',
'...............!....................!...............',
'...............!....................!...............',
'...............!....................!...............',
'...............!....................!...............',
'...............!....................!...............',
'...............!....................!...............',
'...............!....................!...............',
'...............!....................!...............',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'.....!.............................0.1........!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccccccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_barricades = (
# Тест поля боя (10x60):
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaa5!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!AAAAAAAAAAAAAAAAAAAA!.....',
'.....!AAAAAAAAAAAAAAAAAAAA!.....',
'||||||||||||||||||||||||||||||||',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!...................0!.....',
'.....!AAAAAAAAAAAAAAAAAAAA!.....',
'.....!AAAAAAAAAAAAAAAAAAAA!.....',
'.....!cccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_center = (
# Тест поля боя (10x60):
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!...................5!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!.......eeeeeee.....0!.....',
'.....!.......eeeeeee......!.....',
'.....!wwwwwwwcccccccwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_hero_center_band = (
# Тест поля боя (10x60):
# Строй внизу -- свиное рыло. Самое то для варваров:
# http://zamok.ucoz.net/publ/sostav_armij_srednevekovja/1-1-0-31
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!6eeeeeeeeeeeeeeeeeee!.....',
'.....!5eeeeeeeeeeeeeeeeeee!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!eeeeee!eeeee!eeeeeee!.....',
'.....!eeeeee!eeeee!eeeeeee!.....',
'.....!1wwwww!cccc0!wwwwwww!.....',
'.....!wwwwww!-----!wwwwwww!.....',
'.....!wwwwwwcccccccwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_hero_center = (
# Тест поля боя (10x60):
# Строй внизу -- свиное рыло. Самое то для варваров:
# http://zamok.ucoz.net/publ/sostav_armij_srednevekovja/1-1-0-31
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!6eeeeeeeeeeeeeeeeeee!.....',
'.....!5eeeeeeeeeeeeeeeeeee!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!eeeee!eeeeeee!eeeeee!.....',
'.....!eeeee!eeeeeee!eeeeee!.....',
'.....!wwwww!cccccc0!wwwwww!.....',
'.....!wwwww!-------!wwwwww!.....',
'.....!1wwwwwcccccccwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_hero_center_elite = (
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!6eeeeeeeeeeeeeeeeeee!.....',
'.....!5eeeeeeeeeeeeeeeeeee!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!eeeee!eeeeeee!eeeeee!.....',
'.....!eeeee!ccccccc!eeeeee!.....',
'.....!wwwww!cccccc0!wwwwww!.....',
'.....!wwwww!-------!wwwwww!.....',
'.....!1wwwwwcccccccwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_hero = (
# Тест поля боя (10x60):
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!6eeeeeeeeeeeeeeeeeee!.....',
'.....!--------------------!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!cccccccccccccccccccc!.....',
'.....!5eeeeeeeeeeeeeeeeeee!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!0eeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!--------------------!.....',
'.....!1eeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_hero_mix = (
# Тест поля боя (10x60):
'.....!....................!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!6eeeeeeeeeeeeeeeeeee!.....',
'.....!5eeeeeeeeeeeeeeeeeee!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!0eeeeeeeeeeeeeeeeeee!.....',
'.....!1eeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!....................!.....',
)
battle_map_hero_flank = (
'.....!aaaaaaaaaaaaaaaaaaaa!......................................',
'.....!aaaaaaaaaaaaaaaaaaaa!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!...............................ceeee..',
'.....!cccccccccccccccccccc!...............................ceeee..',
'.....!wwwwwwwwwwwwwwwwwwww!...............................ceeee..',
'.....!6eeeeeeeeeeeeeeeeeee!...............................ceeee.0',
'.....!5eeeeeeeeeeeeeeeeeee!...............................ceeee..',
'.....!--------------------!--------------------------------------',
'.....!....................!......................................',
'.....!....................!......................................',
'.....!....................!......................................',
'.....!....................!......................................',
'.....!....................!......................................',
'.....!....................!......................................',
'.....!....................!......................................',
'.....!....................!......................................',
'.....!....................!......................................',
'.....!....................!......................................',
'.....!--------------------!......................................',
'.....!eeeeeeeeeeeeeeeeeeee!......................................',
'.....!1eeeeeeeeeeeeeeeeeee!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!......................................',
'.....!cccccccccc..........!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!......................................',
'.....!wwwwwwwwwwwwwwwwwwww!......................................',
'.....!aaaaaaaaaaaaaaaaaaaa!......................................',
'.....!aaaaaaaaaaaaaaaaaaaa!......................................',
)
battle_map_hero_defence = (
# Тест поля боя (10x60):
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww......',
'.....!wwwwwwwwwwwwwwwwwww6!.....',
'.....!--------------------!.....',
'.....!5eeeeeeeeeeeeeeeeeee!.....',
'.....!aaaaaaaccccaaaaaaaaa!.....',
'.....!--------------------!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww......',
'.....!eeeeeeeeeeeeeeeeeee6!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!1eeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'......cccccccccccccccccccc!.....',
'.....!--------------------!.....',
'.....!0eeeeeeeeeeeeeeeeeee!.....',
'.....!aaaaaaaccccaaaaaaaaa!.....',
'.....!--------------------!.....',
'.....!1wwwwwwwwwwwwwwwwwww!.....',
'......wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_hero_cover = (
# Тест поля боя (10x60):
'.....!6eeeeeeeeeeeeeeeeeee!.....',
'.....!aaaaaaaccccaaaaaaaaa!.....',
'.....!--------------------!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeee5!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!0eeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!--------------------!.....',
'.....!1eeeeeeeeeeeeeeeeeee!.....',
'.....!aaaaaaaccccaaaaaaaaa!.....',
)
battle_map_elite_vs_elite = (
# Тест поля боя (10x60):
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!................6..5!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!................1..0!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
)
battle_map_vs_elite = (
# Тест поля боя (10x60):
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!................6..5!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!................1..0!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_surrounded = (
# Тест поля боя (10x60):
'.........................!..........!....................!..........!..........................',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!wwwwwwwwwwwwwwwwwwww!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!wwwwwwwwwwwwwwwwwwww!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!wwwwwwwwwwwwwwwwwwww!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!wwwwwwwwwwwwwwwwwwww!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!cccccccccccccccccccc!..........!eewwcwwwwwww..............',
'.........65..wwwwwwwcwwee!..........!wwwwwwwwwwwwwwwwwwww!..........!eewwcwwwwwww..66..........',
'.............wwwwwwwcwwee!..........!wwwwwwwwwwwwwwwwwwww!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!eeeeeeeeeeeeeeeeeeee!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!55..................!..........!eewwcwwwwwww..............',
'-------------------------!---------------------------------------------------------------------',
'.............wwwwwwwcwwee!..........!................11.0!..........!eewwcwwwwwww..............',
'........56...wwwwwwwcwwee!..........!eeeeeeeeeeeeeeeeeeee!..........!eewwcwwwwwww...57.........',
'.............wwwwwwwcwwee!..........!wwwwwwwwwwwwwwwwwwww!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!wwwwwwwwwwwwwwwwwwww!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!cccccccccccccccccccc!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!wwwwwwwwwwwwwwwwwwww!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!wwwwwwwwwwwwwwwwwwww!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!wwwwwwwwwwwwwwwwwwww!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'-------------------------!----------!....................!----------!--------------------------',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'........75...wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww...76.........',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'.............wwwwwwwcwwee!..........!....................!..........!eewwcwwwwwww..............',
'.........................!..........!....................!..........!..........................',
)
battle_map_elite_surrounded = (
# Тест поля боя (10x60):
'.........................!..........!....................!..........!..........................',
'.............wwwwwwwwwcee!..........!....................!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!wwwwwwwwwwwwwwwwwwww!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!wwwwwwwwwwwwwwwwwwww!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!wwwwwwwwwwwwwwwwwwww!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!wwwwwwwwwwwwwwwwwwww!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!cccccccccccccccccccc!..........!eecwwwwwwwww..............',
'.........65..wwwwwwwwwcee!..........!wwwwwwwwwwwwwwwwwwww!..........!eecwwwwwwwww..66..........',
'.............wwwwwwwwwcee!..........!wwwwwwwwwwwwwwwwwwww!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!eeeeeeeeeeeeeeeeeeee!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!55..................!..........!eecwwwwwwwww..............',
'-------------------------!---------------------------------------------------------------------',
'.............wwwwwwwwwcee!..........!................11.0!..........!eecwwwwwwwww..............',
'........56...wwwwwwwwwcee!..........!eeeeeeeeeeeeeeeeeeee!..........!eecwwwwwwwww...57.........',
'.............wwwwwwwwwcee!..........!eeeeeeeeeeeeeeeeeeee!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!eeeeeeeeeeeeeeeeeeee!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!cccccccccccccccccccc!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!cccccccccccccccccccc!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!eeeeeeeeeeeeeeeeeeee!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!wwwwwwwwwwwwwwwwwwww!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!wwwwwwwwwwwwwwwwwwww!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!wwwwwwwwwwwwwwwwwwww!..........!eecwwwwwwwww..............',
'-------------------------!----------!....................!----------!--------------------------',
'.............wwwwwwwwwcee!..........!....................!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!....................!..........!eecwwwwwwwww..............',
'........75...wwwwwwwwwcee!..........!....................!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!....................!..........!eecwwwwwwwww...76.........',
'.............wwwwwwwwwcee!..........!....................!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!....................!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!....................!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!....................!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!....................!..........!eecwwwwwwwww..............',
'.............wwwwwwwwwcee!..........!....................!..........!eecwwwwwwwww..............',
'.........................!..........!....................!..........!..........................',
)
battle_map_elite_squad = (
# Тест поля боя (10x60):
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!................6..5!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!................1..0!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
)
battle_map_brave = (
# Тест поля боя (10x60):
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!...................5!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!...................0!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_cover = (
# Тест поля боя (10x60):
'.....!...................5!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!...................0!.....',
)
battle_map_cover_archers = (
# Тест поля боя (10x60):
'.....!...................5!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!cccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!...................0!.....',
)
battle_map_near = (
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!MMMMMMMMMMMMMMMMMMMM!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!5.6.................!.....',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'.....!0.1.................!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!MMMMMMMMMMMMMMMMMMMM!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_near_archers = (
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!.....5.6.7............................................!.....',
'.....!------------------------------------------------------!.....',
'.....!.....0.1.2............................................!.....',
'.....!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccccccccccccccccccccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_near_defence = (
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!...................5!.....',
'.....!--------------------!.....',
'.....!...................0!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_archers_volley = (
# Тест поля боя (10x60):
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwweeeeeewwwwwww!.....',
'.....!wwwwwwwccccccwwwwwww!.....',
'.....!wwwwwwweeeeeewwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!...................5!.....',
'################################',
'################################',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'################################',
'################################',
'.....!................01..!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!.....cccccccccc.....!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_palisade = (
# Тесты элементалей и заклинаний против стен.
'------------',
'!c.c.c.c.c.!',
'!..........!',
'!w.w.w.w.w.!',
'!..........!',
'!.........5!',
'!----------!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!----------!',
'!c.c.c.c.c.!',
'!..........!',
'!e.e.e.e.e.!',
'!..........!',
'!a.a.a.a.a.!',
'!..........!',
'!a.a.a.a.a.!',
'!.........0!',
'------------',
)
battle_map_palisade_volley = (
# Тесты элементалей и заклинаний против стен.
'------------',
'!c.c.c.c.c.!',
'!..........!',
'!w.w.w.w.w.!',
'!..........!',
'!e.e.e.e.e.!',
'!.........5!',
'!----------!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'############',
'############',
'############',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!..........!',
'!----------!',
'!a.a.a.a.a.!',
'!..........!',
'!a.a.c.a.a.!',
'!..........!',
'!a.a.a.a.a.!',
'!..........!',
'!..........!',
'!.........0!',
'------------',
)
sea_map_artillery = (
# Тесты артиллерии
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~-WWWWWWWWWWWWWWWWWWWWWWWWWW8~~~~~~~~~',
'~~~~~7WWWWWWWWCCCCCCCCCCWWWWWWWW-~~~~~~~~~',
'~~~~~-WWWWWWWWEEEEEEEEEEWWWWWWWW-~~~~~~~~~',
'~~~~~-WWWWWWWWWWWWWWWWWWWWWWWWWW-~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~-AAAAAAAAAAAAAAAAAAAAAAAAAA6~~~~~~~~~',
'~~~~~5AAAAAAAACCCCCCCCCCAAAAAAAA-~~~~~~~~~',
'~~~~~-AAAAAAAAAAAAAAAAAAAAAAAAAA-~~~~~~~~~',
'~~~~~-AAAAAAAAAAAAAAAAAAAAAAAAAA-~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'##########################################',
'##########################################',
'..........!...................0!..........',
'..........!a.a.a.a.c.a.a.a.a.a.!..........',
'..........!....................!..........',
'..........!a.a.a.a.a.a.a.a.a.a.!..........',
'..........!--------------------!..........',
'..........!...................1!..........',
'..........!....................!..........',
'..........!....................!..........',
)
sea_map_artillery_ship = (
# Тесты артиллерии
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~5.e.e.e.e.e.e.e.e.e.e.e.e.~~~~~~~~~~~~',
'~~~~c.........................c~~~~~~~~~~~',
'~~~~~.e.e.e.e.e.e.e.e.e.e.e.e.~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~6.w.w.w.w.w.w.w.w.w.w.w.w.~~~~~~~~~~~~',
'~~~~c.........................c~~~~~~~~~~~',
'~~~~~.w.w.w.w.w.w.w.w.w.w.w.w.~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'##########################################',
'##########################################',
'..........!.................1.0!..........',
'..........!a.a.a.a.c.c.a.a.a.a.!..........',
'..........!....................!..........',
'..........!a.a.a.a.a.a.a.a.a.a.!..........',
'..........!--------------------!..........',
)
sea_map_artillery_ship_all = (
# Тесты артиллерии
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~5.e.e.e.e.e.e.e.e.e.e.e.e.~~~~~~~~~~~~',
'~~~~c.........................c~~~~~~~~~~~',
'~~~~~.e.e.e.e.e.e.e.e.e.e.e.e.~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~.~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~6.w.w.w.w.w.w.w.w.w.w.w.w.~~~~~~~~~~~~',
'~~~~c.........................c~~~~~~~~~~~',
'~~~~~.w.w.w.w.w.w.w.w.w.w.w.w.~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'##########################################',
'##########################################',
'..........!.................1.0!..........',
'..........!a.a.a.a.c.c.a.a.a.a.!..........',
'..........!....................!..........',
'..........!a.a.a.a.a.a.a.a.a.a.!..........',
'..........!--------------------!..........',
)
sea_map_artillery_easy = (
# Тесты артиллерии
# Незащищённый экипаж
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~-wwwwwwwwwwwwwwwwwwwwwwwwww8~~~~~~~~~',
'~~~~~7wwwwwwwwccccccccccwwwwwwww-~~~~~~~~~',
'~~~~~-wwwwwwwweeeeeeeeeewwwwwwww-~~~~~~~~~',
'~~~~~-wwwwwwwwwwwwwwwwwwwwwwwwww-~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~-aaaaaaaaaaaaaaaaaaaaaaaaaa6~~~~~~~~~',
'~~~~~5aaaaaaaaccccccccccaaaaaaaa-~~~~~~~~~',
'~~~~~-aaaaaaaaaaaaaaaaaaaaaaaaaa-~~~~~~~~~',
'~~~~~-aaaaaaaaaaaaaaaaaaaaaaaaaa-~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'##########################################',
'##########################################',
'..........!...................0!..........',
'..........!a.a.a.a.c.a.a.a.a.a.!..........',
'..........!....................!..........',
'..........!a.a.a.a.a.a.a.a.a.a.!..........',
'..........!--------------------!..........',
'..........!...................1!..........',
'..........!....................!..........',
'..........!....................!..........',
)
sea_map_artillery_archers = (
# Тесты артиллерии
# Сравниваем с лучниками в тех же условиях.
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~-wwwwwwwwwwwwwwwwwwwwwwwwww8~~~~~~~~~',
'~~~~~7wwwwwwwwccccccccccwwwwwwww-~~~~~~~~~',
'~~~~~-wwwwwwwweeeeeeeeeewwwwwwww-~~~~~~~~~',
'~~~~~-wwwwwwwwwwwwwwwwwwwwwwwwww-~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~-AAAAAAAAAAAAAAAAAAAAAAAAAA6~~~~~~~~~',
'~~~~~5AAAAAAAACCCCCCCCCCAAAAAAAA-~~~~~~~~~',
'~~~~~-AAAAAAAAAAAAAAAAAAAAAAAAAA-~~~~~~~~~',
'~~~~~-AAAAAAAAAAAAAAAAAAAAAAAAAA-~~~~~~~~~',
'~~~~~~~________________________~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'##########################################',
'##########################################',
'.....0....AAAAAAAAAAAAAAAAAAAAAAAAAA......',
'..........AAAAAAAACCCCCCCCCCAAAAAAAA......',
'..........AAAAAAAAAAAAAAAAAAAAAAAAAA......',
'..........AAAAAAAAAAAAAAAAAAAAAAAAAA......',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'.....1....aaaaaaaaaaaaaaaaaaaaaaaaaa......',
'..........aaaaaaaaccccccccccaaaaaaaa......',
'..........aaaaaaaaaaaaaaaaaaaaaaaaaa......',
'..........aaaaaaaaaaaaaaaaaaaaaaaaaa......',
)
sea_map_artillery_magic = (
# Тесты артиллерии
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~________________________~~~~~~',
'~wwwwwwwwwwwwwwwwwwwwwwwwww8~~~~',
'-wwwwwwwwwwwcwwwwwwwwwwwwww-~~~~',
'7wwwwwwwwweeeeewwwwwwwwwwww=~~~~',
'~wwwwwwwwwwwwwwwwwwwwwwwwww~~~~~',
'~~________________________~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~________________________~~~~~~',
'~aaaaaaaaaaaaaaaaaaaaaaaaaa6~~~~',
'-aaaaaaaaaccccaaaaaaaaaaaaa-~~~~',
'5aaaaaaaaaeeeeaaaaaaaaaaaaa=~~~~',
'~~________________________~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'||||||||||||||||||||||||||||||||',
'||||||||||||||||||||||||||||||||',
'.....!...................0!.....',
'.....!a.a.a.a.c.a.a.a.aa.a!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!...ccccccccccccc...1!.....',
'.....!....................!.....',
'.....!....................!.....',
)
battle_map_artillery_trap = (
# Тесты ловушек
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!...................5!.....',
'.....!--------------------!.....',
'################################',
'################################',
'.....!...................0!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!.......aaccaa.......!.....',
'.....!.......aaccaa.......!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
)
battle_map_archers_vs_infantry = (
# Тест поля боя (10x60):
'................................',
'................................',
'wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww',
'wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww',
'wwwwwwwwwwwwcccccccwwwwwwwwwwwww',
'wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww',
'eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee',
'.....!...................5!.....',
'!!!!!!--------------------!!!!!!',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!...................0!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaacccccccaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
)
battle_map_archers_vs_archers = (
# Тест поля боя (10x60):
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!...................5!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!...................0!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_archers = (
# Тест поля боя (10x60):
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwccccccwwwwwww!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!...................5!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!.................01.!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!.....cccccccccc.....!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_archers_hero = (
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!AAAAAAAACCAAAAAAAAAA!.....',
'.....!AAAAAAAAAAAAAAAAAAAA!.....',
'.....!...................5!.....',
'.....!--------------------!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!--------------------!.....',
'.....!...................0!.....',
'.....!....................!.....',
'.....!........cc..........!.....',
'.....!........cc..........!.....',
'.....!.....eeeeeeee.......!.....',
'.....!.....eeeeeeee.......!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
'.....!....................!.....',
)
battle_map_city_2 = (
# Тест поля боя (10x60):
'#####!aaaaaaaaaaaaaaaaaaaa!#####',
'#####!aaaaaaaaaaaaaaaaaaaa!#####',
'#####!aaaaaaaaaaaaaaaaaaaa!#####',
'#####!aaaaaaaaaaaaaaaaaaaa!#####',
'#####!wwwwwwwwwwwwwwwwwwww!#####',
'#####!wwwwwwwwwwwwwwwwwwww!#####',
'#####!wwwwwwwwwwwwwwwwwwww!#####',
'#####!cccccccccccccccccccc!#####',
'.....!eeeeeeeeeeeeeeeeeeee!#####',
'#####!eeeeeeeeeeeeeeeeeeee!#####',
'#####!...................5!.....',
'#####!--------------------!#####',
'.....!..#####|....#######.!#####',
'#####!..#####|....#######.!#####',
'#####!..#####|....#######.!#####',
'#####!..#####|......|.....!.....',
'#####!..#####|....#######.!#####',
'#####!..TT...|....#######.!#####',
'#####!..#####|....#######.!#####',
'#####!.T#####|......|..##.!#####',
'#####!..#####.......|..##.!#####',
'#####!..#####.......|...#.!#####',
'#####|..|||||......ttT||||!#####',
'#####!...####.....tttttttt!ttttt',
'.....!--------------------!ttttt',
'#####!...................0!ttttt',
'#####!eeeeeeeeeeeeeeeeeeee!.....',
'#####!eeeeeeeeeeeeeeeeeeee!#####',
'#####!cccccccccccccccccccc!#####',
'.....!wwwwwwwwwwwwwwwwwwww!#####',
'#####!wwwwwwwwwwwwwwwwwwww!#####',
'#####!wwwwwwwwwwwwwwwwwwww!.....',
'#####!aaaaaaaaaaaaaaaaaaaa!#####',
'#####!aaaaaaaaaaaaaaaaaaaa!#####',
'.....!aaaaaaaaaaaaaaaaaaaa!#####',
'.....!aaaaaaaaaaaaaaaaaaaa!#####',
)
battle_map_city = (
# Тест поля боя (10x60):
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!cccccccccccccccccccc!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!...................5!.....',
'.....!--------------------!.....',
'.....!.........tttt.......!.....',
'#####!.###.....tttt..####.!.....',
'#####!.....T...tttt.......!.....',
'#####T.........tttt..ttt.T######',
'#####T..TT...#.......ttt.T######',
'#####T..TT...#...........T######',
'#####T.......#..####.....T######',
'#####T.TT....#..####...###!.....',
'#####T.................###!.....',
'#####!...TTT.....####...##!.....',
'#####!...TTT.......ttT....!.....',
'#####!.............ttT....!.....',
'.....!--------------------!.....',
'.....!...................0!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!eeeeeeeeeeeeeeeeeeee!.....',
'.....!cccccccccccccccccccc!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!wwwwwwwwwwwwwwwwwwww!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
'.....!aaaaaaaaaaaaaaaaaaaa!.....',
)
battle_map_forest = (
# Тест поля боя (10x60):
'.##..!aaaaaaaaaaaaaaaaaaaa!.tt#.',
'.T#.#!aaaaaaaaaaaaTaaaaaaa!.##T#',
'#...#!cccccTcccccccccccccc!#....',
'..#.t!wwwwwwwwwwwwwwwwwwww!##tt#',
'.T#..!wwwwwwwwwwwwwwwwwwww!.....',
'..#..!wwwwwwwwTwwwwwwwwTww!.#Tt#',
'.t..T!wwwwwwwwwwwwwwwwwwww!.....',
'.##.#!wwwwwTwwwwwwwwwwwwww!#.t#.',
'.tt#.!wwwwwwwwwwwwwTwwwwww!#.t##',
'...#.!eeeeeTeeeeeeeeeeeeee!..tt.',
'.#T.#!...................5!T.##.',
'.#t.#!--------------------!...t#',
'..t..!..#.#T..#.t..#T..##t!#....',
'.##t.!#t...#t.##tt.#..tt..!.#.##',
'.tT##!##T#t.....T.......T#!..Tt.',
'.t.#.!......##.#..##tt.#.t!.tt.#',
'.....!.tT#..T..#tt...t....!....#',
'.T.##!..##...##..t..Tt.#t.!.t..#',
'.#..T!..#t.T..t......#.#.#!..#T.',
'..t#.!.#..........##t....#!.#tt#',
'...#.!##tTt...##T.....T.#.!.T...',
'.Tt..!.#t.#..T...tt#...##.!#..##',
'.##..!.....t...#...#t...tt!#..#.',
'#T.t.!#.t#.T....t##tT...t.!.t..T',
'...T.!--------------------!#...#',
'..#..!...................0!...##',
'.#t.T!eeeeeeeeeeeeeeeeeeee!#T...',
'.#t..!wwTwwwwwwwwwwwwTwwww!##.##',
'.T...!wwwwwwwwwwwwwwwwwwww!.#.t.',
'.#..#!wwwwwTwwwwwwwwwwwwww!#T...',
'...#T!wwwwwwwwwwwwTwwwwwww!....#',
'.tt.#!wwTwwwwTwwwwwwwwwwww!#t.t#',
'....#!wwwwwwwwwwwwwwTwwwww!#....',
'.T.#t!cccccccccccccccccccc!#t.T.',
'#t.t.!aaaaaaaTaaaaaaaaaaaa!..t.#',
'#..#t!aaaaaaaaaaaaaaaaaaaa!#..##',
)
battle_map_forest_archers = (
# Тест поля боя (10x60):
'.##..!aaaaaaaaaaaaaaaaaaaa!.tt#.',
'.T#.#!aaaaaaaaaaaaTaaaaaaa!.##T#',
'..#.t!wwwwwwwwwwwwwwwwwwww!##tt#',
'.T#..!wwwwwwwwwwwwwwwwwwww!.....',
'..#..!wwwwwwwwTwwwwwwwwTww!.#Tt#',
'.t..T!wwwwwwwwwwwwwwwwwwww!.....',
'.##.#!wwwwwTwwwwwwwwwwwwww!#.t#.',
'.tt#.!wwwwwwwwwwwwwTwwwwww!#.t##',
'#...#!cccccTcccccccccccccc!#....',
'...#.!eeeeeTeeeeeeeeeeeeee!..tt.',
'.#T.#!...................5!T.##.',
'.#t.#!--------------------!...t#',
'..t..!..#.#T..#.t..#T..##t!#....',
'.##t.!#t...#t.##tt.#..tt..!.#.##',
'.tT##!##T#t.....T.......T#!..Tt.',
'.t.#.!......##.#..##tt.#.t!.tt.#',
'.....!.tT#..T..#tt...t....!....#',
'.T.##!..##...##..t..Tt.#t.!.t..#',
'.#..T!..#t.T..t......#.#.#!..#T.',
'..t#.!.#..........##t....#!.#tt#',
'...#.!##tTt...##T.....T.#.!.T...',
'.Tt..!.#t.#..T...tt#...##.!#..##',
'.##..!.....t...#...#t...tt!#..#.',
'#T.t.!#.t#.T....t##tT...t.!.t..T',
'...T.!--------------------!#...#',
'..#..!...................0!...##',
'#t.t.!aaaaaaaTaaaaaaaaaaaa!..t.#',
'#..#t!aaaaaaaaaaaaaaaaaaaa!#..##',
'.T.#t!cccccccccccccccccccc!#t.T.',
'#t.t.!aaaaaaaTaaaaaaaaaaaa!..t.#',
'#..#t!aaaaaaaaaaaaaaaaaaaa!#..##',
'#t.t.!aaaaaaaTaaaaaaaaaaaa!..t.#',
'#..#t!aaaaaaaaaaaaaaaaaaaa!#..##',
)
battle_map_long_road_ch1 = (
# Forest
# Бой на лесной дороги. Павсаний vs варлоки Кема'Эш и Кара'Ям.
'.......ttT#..T..#tt...t..T#..T..#tt...t.....tT#..T..#tt...tT..#..T..#tt...t..t......#',
'.T.##....##...##..t..Tt.###...##..t..Tt.#t...##...##..t..T.#..#...##..t..Tt.#t.#.t..#',
'.#..T....#t.T..t......#.##t.T..t......#.#.#..#t.T..t.......#..t.T..t......#.##.#..#T.',
'-------------------------------------------------------------------------------------',
'aaaaaacaaaawwwwwweee.0!aaaaaawwwwwcee...3!......!7...aaacaaaaaa.!5.aaaaaaacaaaaaaaaaa',
'aaaaaacaaaawwwwwweee..!aaaaaawwwwwcee....!......!....aaacaaaaaa.!..aaaaaaacaaaaaaaaaa',
'aaaaaacaaaawwwwwweee..!aaaaaawwwwwcee....!......!....aaacaaaaaa.!..aaaaaaacaaaaaaaaaa',
'aaaaaacaaaawwwwwweee..!aaaaaawwwwwcee....!......!....aaacaaaaaa.!..aaaaaaacaaaaaaaaaa',
'aaaaaacaaaawwwwwweee..!aaaaaawwwwwcee....!......!....aaacaaaaaa.!..aaaaaaacaaaaaaaaaa',
'----------------------!aaaaaawwwwwcee....!......!....aaacaaaaaa.!--------------------',
'aaaaaacaaaawwwwwweee.1!aaaaaawwwwwcee....!......!....aaacaaaaaa.!..aaaaaaacaaaaaaaaaa',
'aaaaaacaaaawwwwwweee..!aaaaaawwwwwcee....!......!....aaacaaaaaa.!..aaaaaaacaaaaaaaaaa',
'aaaaaacaaaawwwwwweee..!aaaaaawwwwwcee....!......!....aaacaaaaaa.!..aaaaaaacaaaaaaaaaa',
'aaaaaacaaaawwwwwweee..!aaaaaawwwwwcee....!......!....aaacaaaaaa.!..aaaaaaacaaaaaaaaaa',
'aaaaaacaaaawwwwwweee..!aaaaaawwwwwcee....!......!....aaacaaaaaa.!..aaaaaaacaaaaaaaaaa',
'-------------------------------------------------------------------------------------',
'.......ttT#..T..#tt...t..T#..T..#tt...t.....tT#..T..#tt...tT..#..T..#tt...t..t......#',
'.T.##....##...##..t..Tt.###...##..t..Tt.#t...##...##..t..T.#..#...##..t..Tt.#t.#.t..#',
'.#..T....#t.T..t......#.##t.T..t......#.#.#..#t.T..t.......#..t.T..t......#.##.#..#T.',
)
battle_map_long_archers_ch1 = (
# Лучники Энзифа на холме vs гастаты Юлия.
'aaaaaaaaaAAA..!..........................................................!.....................',
'aaaaaaaaaAAA..!..........................................................!.....................',
'aaaaaaaaaAAA..!......t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!.....................',
'aaaaaaaaaAAA..!..........................................................!..ewwwwwwwwwwaaaaaaaa',
'aaaaaaaaaAAA..!..........................................................!..ewwwwwwwwwwaaaaaaaa',
'aaaaaaaaaAAA..!......t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwwaaaaaaaa',
'aaaaaaaaaAAA..!..........................................................!..ewwwwwwwwwwaaaaaaaa',
'aaaaaaaaaAAA.5!..........................................................!0.ewwwwwwwwwwaaaaaaaa',
'aaaaaCaaaAAA.6!......t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!1.ewwwwwwwwwcaaaaaaaa',
'aaaaaCaaaAAA..!..........................................................!..ewwwwwwwwwcaaaaaaaa',
'aaaaaCaaaAAA..!..........................................................!..ewwwwwwwwwcaaaaaaaa',
'aaaaaCaaaAAA..!......t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwcaaaaaaaa',
'aaaaaaaaaAAA..!..........................................................!..ewwwwwwwwwwaaaaaaaa',
'aaaaaaaaaAAA..!..........................................................!..ewwwwwwwwwwaaaaaaaa',
'aaaaaaaaaAAA..!......t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwwaaaaaaaa',
'aaaaaaaaaAAA..!..........................................................!..ewwwwwwwwwwaaaaaaaa',
'aaaaaaaaaAAA..!..........................................................!..ewwwwwwwwwwaaaaaaaa',
'aaaaaaaaaAAA..!......t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwwaaaaaaaa',
'aaaaaaaaaAAA..!..........................................................!---------------------',
'aaaaaaaaaAAA..!..........................................................!t..t..t..t..t..t..t..',
'aaaaaaaaaAAA..!......t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!t..t..t..t..t..t..t..',
'aaaaaaaaaAAA..!..........................................................!t..t..t..t..t..t..t..',
)
battle_map_long_archers_cliff_ch1 = (
# Лучники Энзифа на холме vs гастаты Юлия.
'aaaaaaaaaAAA..!||||......................................................!.....................',
'aaaaaaaaaAAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!.....................',
'aaaaaaaaaAAA..!||||......................................................!..eaaaaaaaaawa.......',
'aaaaaCaaaAAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..eaaaaaaaaawa.......',
'aaaaaCaaaAAA..!||||......................................................!..eaaaaaaaaawa.......',
'aaaaaCaaaAAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..eaaaaaaaaawa.......',
'aaaaaCaaaAAA.5!||||......................................................!0.eaaaaaaaaawa.......',
'aaaaaCaaaAAA.6!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!1.eaaaaaaaaaca.......',
'aaaaaCaaaAAA..!||||......................................................!..eaaaaaaaaaca.......',
'aaaaaCaaaAAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..eaaaaaaaaawa.......',
'aaaaaCaaaAAA..!||||......................................................!..eaaaaaaaaaca.......',
'aaaaaCaaaAAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..eaaaaaaaaaca.......',
'aaaaaCaaaAAA..!||||......................................................!..eaaaaaaaaawa.......',
'aaaaaaaaaAAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..eaaaaaaaaawa.......',
'aaaaaaaaaAAA..!||||......................................................!..eaaaaaaaaawa.......',
'aaaaaaaaaAAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..eaaaaaaaaawa.......',
'aaaaaaaaaAAA..!||||......................................................!..eaaaaaaaaawa.......',
'aaaaaaaaaAAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..eaaaaaaaaawa.......',
'aaaaaaaaaAAA..!||||......................................................!..eaaaaaaaaawa.......',
'aaaaaaaaaAAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..eaaaaaaaaawa.......',
'aaaaaaaaaAAA..!||||......................................................!---------------------',
'aaaaaaaaaAAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!t..t..t..t..t..t..t..',
)
battle_map_long_archers_enzif_ch1 = (
# Обстрел следопытами преторианцев
'AAA..!||||......................................................!.....................',
'AAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!.....................',
'AAA..!||||......................................................!..ewwwwwwwwwwaaaaaaaa',
'AAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwwaaaaaaaa',
'AAA..!||||......................................................!..ewwwwwwwwwwaaaaaaaa',
'AAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwwaaaaaaaa',
'CCC.5!||||......................................................!0.ewwwwwwwwwwaaaaaaaa',
'CCC.6!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!1.ewwwwwwwwwcaaaaaaaa',
'CCC..!||||......................................................!..ewwwwwwwwwcaaaaaaaa',
'CCC..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwwaaaaaaaa',
'CCC..!||||......................................................!..ewwwwwwwwwcaaaaaaaa',
'CCC..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwcaaaaaaaa',
'AAA..!||||......................................................!..ewwwwwwwwwwaaaaaaaa',
'AAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwwaaaaaaaa',
'AAA..!||||......................................................!..ewwwwwwwwwwaaaaaaaa',
'AAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwwaaaaaaaa',
'AAA..!||||......................................................!..ewwwwwwwwwwaaaaaaaa',
'AAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwwaaaaaaaa',
'AAA..!||||......................................................!..ewwwwwwwwwwaaaaaaaa',
'AAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!..ewwwwwwwwwwaaaaaaaa',
'AAA..!||||......................................................!---------------------',
'AAA..!||||..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t..t......!t..t..t..t..t..t..t..',
)
battle_map_long_camp_ch1 = (
# Бой на "Западных кручах". Ополчение Аксиотеи vs демоны Кема'Эша.
'#######################################################################',
'#######################################################################',
'#######################################################################',
'#######################################################################',
'#######################################################################',
'#######################################################################',
'#######################################################################',
'#######################################################################',
'################################||||||||||||||||||||||||||||||||||#####',
'################################|1c!aaaa.a.aaca..0...............|#####',
'################################|!!!!!!!!!!aaaa..................|#####',
'################################|..cwwwwww!!aaa..................|#####',
'################################|wwwwwwwww!!aa!!!!!!!!!!!!!!!!...|#####',
'***********!********************|...wwwwwww!aa!..............!...|#####',
'***********!********************|5....wwwww!!!!..............!...|#####',
'aaaaaaaacc.!....................|.....wwwwwww.!..............!...|#####',
'aaaaaaaacc.!....................!!!!..wwww....!..............!...|#####',
'aaaaaaaacc6!....................!..!..wwww....!..............!...|#####',
'aaaaaaaacc7!....................!.a!..wwww....!..............!...|#####',
'aaaaaaaacc.!....................|aa!!!!ww!!!!!!..............!...|#####',
'aaaaaaaacc.!....................|aaaaa!ww!aa!................!...|#####',
'***********!********************|aaaaa!ww!aa!................!...|#####',
'***********!********************|aaaaa!!!!aa!................!...|#####',
'################################|aaaaaaaaaaa!................!...|#####',
'################################|aaaaaaaaaaa!................!...|#####',
'################################|.....aaaaaa!!!!!!!!!!!!!!!!!!...|#####',
'################################|................................|#####',
'################################|................................|#####',
'################################|................................|#####',
'################################||||||||||||||||||||||||||||||||||#####',
'#######################################################################',
'#######################################################################',
'#######################################################################',
'#######################################################################',
'#######################################################################',
'#######################################################################',
)
battle_map_long_run_enzif_ch1 = (
# Бой Гая Юлия и "Козопасов Павсания" с лучниками Энзифа
'............!.eeeeecceeeeeeeecceeeeeeeeeeeeeeee!.........................',
'............!eeeeeecceeeeeeeecceeeeeeeeeeeeeeee!.........................',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!.........................',
'............!0wwwwwwwwwccwwwwwwwwwccwwwwwwwwwww!eeeeeee..................',
'............!1wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!eecceee...2..............',
'wwwwwwwwwwww!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!eecceee..................',
'wwwwwwwwwwww!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!eeeeeee..................',
'wwwwwwwwwwww!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!!!!!!!!!!!!!!!!!!!!!!!!!!',
'wwwwwwwwwwww!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!wwwwwwwwww4!eee..........',
'wwwwwwwwwwww!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!wwwwwwwwwww!eee..........',
'wwwwwwwwwwww!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!wwwwwwwwwww!eee..........',
'wwwcwwwwwwww!5aaacaaaaaaaaacaaaaaaacaaaa!a6aa!a!wwwwwwwwcww!eeee.........',
'wwwcwwwwwwww!aaaaaaaaaaaaaaaaaaaaaaaaaaa!acca!a!wwwwwwwwwww!eeee.........',
'wwwwwwwwwwww!aaaaaaaaaaaaaaaaaaaaaaaaaaa!aaaa!a!wwwwwwwwwww!eeee.........',
'wwwwwwwwwwww!aaaaaaaaaaaaaaaaaaaaaaaaaaa!aaaa!a!wwwwwwwwwww!ccee.........',
'wwwwwwwwwwww!aaaaaaaaaaaaaaaaaaaaaaaaaaa!aaaa!a!wwwwwwwwwww!ccee.........',
'wwwcwwwwwwww!aaaaaaaaaaaaaaaaaaaaaaaaaaa!!!!!!a!wwwwwwwwwww!eeee.........',
'wwwcwwwwwwww!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!wwwwwwwwwww!eeee.........',
'wwwwwwwwwwww!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!wwwwwwwwwww!eeee.........',
'wwwwwwwwwwww!aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!wwwwwwwwwww!eeee.........',
'!!!!!!!!!!!!!!||||||||||||||...|||||||||||||||||!!!!!!!!!!!!!!!!!!!!!!!!!',
'^^^^^^^^^^^^^^|............................AAAA|wwwwwwwwww...............',
'^^^^^^^^^^^^^^|............................aaaA|wwwwwwwwww...............',
'^^^^^^^^^^^^^^|............................aaaA|wwwwwwwwww..............3',
'^^^^^^^^^^^^^^|....!!!!!!!!!!!!!!!!!!!!!!!!!aaa!wwwwwwwwww...............',
'^^^^^^^^^^^^^^|....!.......................!aac!wwwwwwwwww...............',
'^^^^^^^^^^^^^^|....!.......................!aaa!wwwwwwwwww...............',
'^^^^^^^^^^^^^^|....!.......................!aaA|wwwwwwwcww...............',
'^^^^^^^^^^^^^^|....!.......................!aaA|wwwwwwwwww...............',
'^^^^^^^^^^^^^^|....!.......................!aaa|wwwwwwwwww...............',
'^^^^^^^^^^^^^^|....!.......................!aaa!wwwwwwwwww...............',
'^^^^^^^^^^^^^^|....!.......................!aaa!wwwwwwwwww...............',
'^^^^^^^^^^^^^^|....!.......................!aaa!wwwwwwwwww...............',
'^^^^^^^^^^^^^^|....!.......................!aaa|wwwwwwwwww...............',
'^^^^^^^^^^^^^^|....!.......................!aaa|wwwwwwwwww...............',
'^^^^^^^^^^^^^^|....!.......................!aaa|wwwwwwwwww...............',
#............'^^|....!.......................!aaa|^^^^.....................',
#............'^^|....!!!!!!!!!!!!!!!!!!!!!!!!!aaa|^^^^.....................',
#............'^^|.............................aaa|^^^^.....................',
#............'^^|.............................aaa|^^^^.....................',
#............'^^|.............................aaa|^^^^.....................',
#............'^^||||||||||||||||||||||||||||||||||^^^^.....................',
#............'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^.....................',
#............'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^.....................',
#............'.............................................................',
)
battle_map_long_run_enzif_end_ch1 = (
# Преследование лучников Энзифа.
'#########################################################################',
'#########################################################################',
'#########################################################################',
'#########################################################################',
'#########################################################################',
'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
't.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.!wwwwwwwwwwwww...',
't.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.!wwwwwwwwwwwww...',
't.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.!wwwwwwwwwwwww...',
't.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.!wwwwwwwwcwwww...',
't.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.!wwwwwwwwcwwww...',
't.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.!wwwwwwwwwwwww...',
't.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.!wwwwwwwwwwwww..1',
't.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.!wwwwwwwwwwwww...',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'...............aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!wwwwwwwwwwwwee..',
'...............aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!wwwwwwwwwwwwee..',
'.......5.......aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!wwwwwwwwwwwwee..',
'...............aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac!wwwwwwwwwwwwcc.2',
'...............aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac!wwwwwwwcwwwwcc.0',
'...............aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac!wwwwwwwcwwwwee..',
'...............aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac!wwwwwwwwwwwwee..',
'...............aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!wwwwwwwwwwwwee..',
'...............aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!wwwwwwwwwwwwee..',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
'#########################################################################',
'#########################################################################',
'#########################################################################',
'#########################################################################',
)
battle_map_long_south_barrage_magor_ch1 = (
# Бой Патрокла и Марселлия с сариссофорами Магора:
'.9..............!.8...........~~!6..................!7...................!.......................',
'................!.............~~!...................!....................!.......................',
'................!.............~~!...................!5...................!.......................',
'................!.............~~!...................!....................!.......................',
'................!...............!...................!....................!.......................',
'................!...............!...................!....................!.......................',
'aaaaaaaaaaaaaaaa!aaaaaaaaaaaaa~~!wwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwwww!.......................',
'aaaaaaaaaaaaaaaa!aaaaaaaaaaaaa~~!wwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwwww!.......................',
'aaaaaaaaaaaaaaaa!aaaaaaaaaaaaa~~!wwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwwww!.......................',
'aaaaaaaaaaaaaaaa!aaaaaaaaaaaaa~~!wwwwwwccccccwwwwwww!wwwwccccccccccccwwww!.......................',
'aaaaaaaaaaaaaaaa!aaaaaaaaaaaaa~~!wwwwwwwwwwwwwwwwwww!eeeeeeeeeeeeeeeeeeee!.......................',
'AAAAAAAAAAACCAAA!AAAAAAAAAAACC~~!eeeeeeeeeeeeeeeeeee!eeeeeeeeeeeeeeeeeeee!.......................',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!~~!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'~~~~~~~~~~~.................~~~~!...................!....................!.......................',
'~~~~~~~~~~~.................~~~~!eeeeeeeeecceeeeeeee!eeeeeeeeecceeeeeeeee!.......................',
'~~~......__||||!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'.........__|AAAAAAAAAAAAAAAAAAAA!eeeeeeeeeeeeeeeeeee!eeeeeeeeeeeeeeeeeeee!AAAAAAAAAAAACCAAAAAAAA.',
'.........__|AAAAAAAAAAAACCCCAAAA!wwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwwww!AAAAAAAAAAAAAAAAAAAAAA.',
'.t.t.....__|aaaaaaaaaaaaaaaaaaaa!wwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwwww!aaaaaaaaaaaaaaaaaaaaaa.',
'.........__|aaaaaaaaaaaaaaaaaaaa!wwwwwcccccccccwwwww!wwwwwwcccccccccwwwww!aaaaaaaaaaaaaaaaaaaaaa.',
'.t.t.....__|aaaaaaaaaaaaaaaaaaaa!wwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwwww!======================.',
'.........__|....................!wwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwwww!======================.',
'.t.t.....__|....................!1..................!2...................!3......................',
'.........__|!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!....................!.......................',
'.t.t.....__|0eeccceeeeeeeeeeeeee!.eeeeeecccceeeeeeee!....................!.......................',
'.........__|!!!!!!!!!!!!!!!!!!!!|||||||||||||||||||||!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'.t.t.....__|AAAAAAAAACCAAAAAAAAA.AAAAAAACCAAAAAAAAAA|....................!......................!',
'.........__|AAAAAAAAAAAAAAAAAAAA.AAAAAAAAAAAAAAAAAAA|..t.t.t.t.t.t.t.t.t.!..t.t.t.t.t.t.t.t.t...!',
'.t.t.....__|....................|aaaaaaaaaaaaaaaaaaa|....................!......................!',
'.........__|....................|aaaaaaaaaaaaaaaaaaa|..t.t.t.t.t.t.t.t.t.!..t.t.t.t.t.t.t.t.t...!',
'.t.t.....__|....................|aaaaaaaaaaaaaaaaaaa|....................!......................!',
'.........__|....................|...................|..t.t.t.t.t.t.t.t.t.!..t.t.t.t.t.t.t.t.t...!',
'.t.t.....__|..t.t.t.t.t.t.t.t.t.|...................|....................!......................!',
'.........__|....................|.4.................|..t.t.t.t.t.t.t.t.t.!..t.t.t.t.t.t.t.t.t...!',
'.t.t.....__|..t.t.t.t.t.t.t.t.t.|...................|....................!......................!',
'.........__|!!!!!!!!!!!!!!!!!!!!|||||||||||||||||||||....................!......................!',
)
battle_map_long_south_barrage_magor_end_ch1 = (
# Бой Патрокла и Марселлия с сариссофорами Магора:
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc...2.!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;5.aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa.3!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;6.aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!caaaa..!...........',
'!.............||;;;;;;;;;;;7.aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!caaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!caaaa..!...........',
'!.............||;;;;;;;;;;;8.aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;9.aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!!!!!!!!!!!!!!!!!!!!!!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.0.1.!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa.4!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!caaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!caaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!.............||;;;;;;;;;;;..aaaaaaaaaaaaawwwwwweeeeccwewwwwwww.!ewwwwwc.....!aaaaa..!...........',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
)
battle_map_long_south_barrage_hill_ch1 = (
# Арбалетчики Патрокла на "Земляном валу" vs сариссофоры Магора.
# Отряд зоне №8 -- опытный; в зоне №5 сам Магор.
'aaaaaaaaaaaaa.6!t..t..t..t..t..t..t..t..t..t......!~~~~|=0!=1!=2!=3!..........',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!AA!..........',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'aaaaaacaaaaaa..!..................................!~~~~|AA!AA!AA!AA!..........',
'aaaaaacaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!AA!..........',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!AA!..........',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!~~~~|AA!AA!AA!AA!..........',
'aaaaaaaaaaaaa.7!..................................!~~~~|AA!AA!AA!AA!..........',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!AA!..........',
'aaaaaacaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'aaaaaacaaaaaa..!..................................!~~~~|AA!AA!AA!AA!..........',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!AA!..........',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|CA!CA!CA!CA!..........',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!~~~~|CA!CA!CA!CA!..........',
'wwwwwwcwwwwEE.8!t..t5!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwEE..!.....!............................!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwEE..!t..t.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwEE..!eecc.!............................!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwEE..!eecc.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwEE..!eecc.!............................!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwEE..!t..t.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwEE..!.....!............................!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwEE..!t..t.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwwE.9!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwwE..!..................................!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwwE..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwwE..!..................................!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwwE..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwwE..!..................................!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwwE..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!AA!..........',
'wwwwwwcwwwwwE..!..................................!~~~~|AA!AA!AA!AA!..........',
)
battle_map_long_south_barrage_hill_volley_ch1 = (
# Арбалетчики Патрокла на "Земляном валу" vs сариссофоры Магора.
# Отряд зоне №8 -- опытный; в зоне №5 сам Магор.
'aaaaaaaaaaaaa.6!t..t..t..t..t..t..t..t..t..t......!~~~~|========!0aaaaaaaaaaaa',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'aaaaacaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'aaaaacaaaaaaa..!..................................!~~~~|AA!AA!AA!aaaaacaaaaaaa',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!~~~~|AA!AA!AA!!!!!!!!!!!!!!',
'aaaaaaaaaaaaa.7!..................................!~~~~|AA!AA!AA!1aaaaaaaaaaaa',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'aaaaacaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaacaaaaaaa',
'aaaaacaaaaaaa..!..................................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|CA!CA!CA!aaaaaaaaaaaaa',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!~~~~|CA!CA!CA!!!!!!!!!!!!!!',
'wwwwwwwwwwcWE.8!t..t5!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!2aaaaaaaaaaaa',
'wwwwwwwwwwcWE..!.....!............................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'wwwwwwwwwwcWE..!t..t.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'wwwwwwwwwwcWE..!eecc.!............................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'wwwwwwwwwwcWE..!eecc.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaacaaaaaaa',
'wwwwwwwwwwcWE..!eecc.!............................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'wwwwwwwwwwcWE..!t..t.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'wwwwwwwwwwcWE..!.....!............................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'wwwwwwwwwwcWE..!t..t.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!~~~~|AA!AA!AA!!!!!!!!!!!!!!',
'wwwwwwwwwwcWE.9!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!3aaaaaaaaaaaa',
'wwwwwwwwwwcWE..!..................................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'wwwwwwwwwwcWE..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'wwwwwwwwwwcWE..!..................................!~~~~|AA!AA!AA!aaaaacaaaaaaa',
'wwwwwwwwwwcWE..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'wwwwwwwwwwcWE..!..................................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'wwwwwwwwwwcWE..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
'wwwwwwwwwwcWE..!..................................!~~~~|AA!AA!AA!aaaaaaaaaaaaa',
)
battle_map_long_south_barrage_hill_rear_ch1 = (
# Арбалетчики Патрокла на "Земляном валу" vs сариссофоры Магора.
# Отряд зоне №8 -- опытный; в зоне №5 сам Магор.
'aaaaaaaaaaaaa.6!t..t..t..t..t..t..t..t..t..t......!~~~~|=3!=2!=1!=0!..........',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!aa!..........',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!..........',
'aaaaaacaaaaaa..!..................................!~~~~|AA!AA!AA!aa!..........',
'aaaaaacaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!..........',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!aa!..........',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!..........',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!aa!..........',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!~~~~|AA!AA!AA!aa!..........',
'aaaaaaaaaaaaa.7!..................................!~~~~|AA!AA!AA!aa!..........',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!..........',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!aa!..........',
'aaaaaacaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!!!!!......',
'aaaaaacaaaaaa..!..................................!~~~~|AA!AA!AA!aa!ee5!......',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!eee!......',
'aaaaaaaaaaaaa..!..................................!~~~~|AA!AA!AA!aa!eee!......',
'aaaaaaaaaaaaa..!t..t..t..t..t..t..t..t..t..t......!~~~~|CA!CA!CA!ca!ccc!......',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!~~~~|CA!CA!CA!ca!ccc!......',
'wwwwwwcwwwwEE.8!t..t.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!eee!......',
'wwwwwwcwwwwEE..!.....!............................!~~~~|AA!AA!AA!aa!eee!......',
'wwwwwwcwwwwEE..!t..t.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!eee!......',
'wwwwwwcwwwwEE..!eecc.!............................!~~~~|AA!AA!AA!aa!eee!......',
'wwwwwwcwwwwEE..!eecc.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!eee!......',
'wwwwwwcwwwwEE..!eecc.!............................!~~~~|AA!AA!AA!aa!!!!!......',
'wwwwwwcwwwwEE..!t..t.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!..........',
'wwwwwwcwwwwEE..!.....!............................!~~~~|AA!AA!AA!aa!..........',
'wwwwwwcwwwwEE..!t..t.!t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!..........',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!~~~~|AA!AA!AA!aa!..........',
'wwwwwwcwwwwwE.9!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!..........',
'wwwwwwcwwwwwE..!..................................!~~~~|AA!AA!AA!aa!..........',
'wwwwwwcwwwwwE..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!..........',
'wwwwwwcwwwwwE..!..................................!~~~~|AA!AA!AA!aa!..........',
'wwwwwwcwwwwwE..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!..........',
'wwwwwwcwwwwwE..!..................................!~~~~|AA!AA!AA!aa!..........',
'wwwwwwcwwwwwE..!t..t..t..t..t..t..t..t..t..t......!~~~~|AA!AA!AA!aa!..........',
'wwwwwwcwwwwwE..!..................................!~~~~|AA!AA!AA!aa!..........',
)
battle_map_long_hana_vam_end = (
'.........................!2.........................................!.......................',
'.........................!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwww....',
'.........................!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwww....',
'.........................!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwww....',
'.........................!wwwwwwwwccccccccccwwwwwwwwwwwwwwwwwwwwwwww!ccccccwwwwwwwwwwwww....',
'.........................!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!eeeeeeeewwwwwwwwwww....',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'.........................!..........................................!.......................',
'ttttttttttttttttttttttttt!tttttttttttttttttttttttttttttttttttttttttt!.......................',
'.........................!..........................................!.......................',
'ttttttttttttttttttttttttt!tttttttttttttttttttttttttttttttttttttttttt!.......................',
'.........................!..........................................!.......................',
'ttttttttttttttttttttttttt!tttttttttttttttttttttttttttttttttttttttttt!.......................',
'.........................!..........................................!.......................',
'ttttttttttttttttttttttttt!tttttttttttttttttttttttttttttttttttttttttt!.......................',
'.........................!..........................................!.......................',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'...........!1.wwwwwwwwww.!.....!7.......!5a6aaaaaaaaaaaaa.!.8aaaa...!..............3..aaaaaa',
'...........!..wwwwwwwwww.!.....!........!aaaaaaaaaaaaaaaa.!..aaaa...!.................aaaaaa',
'...........!0.wwwwwwwwww.!.....!........!aaaaaaaaaaaaaaaa.!..aaaa...!.................aaaaaa',
'...........!..wwwweeewww.!.....!........!aaaaaaaaaaaaaaaa.!..aaaa...!!!!!!............caaaaa',
'...........!..wwwweeewww.!.....!.eeee...!ccccaaaaaaaaaaaa.!..aacc...!ee.4!............caaaaa',
'...........!..wwwwccewww.!.....!.eeee...!ccccaaaaaaaaaaaa.!..aacc...!ec..!............caaaaa',
'.......eee.!..wwwwccewww.!.....!.eeee...!ccccaaaaaaaaaaaa.!..aacc...!ec..!............caaaaa',
'.......eee.!..wwwwccewww.!.....!.eeee...!ccccaaaaaaaaaaaa.!..aacc...!ee..!............caaaaa',
'.......eee.!..wwwwccewww.!.....!.eeee...!ccccaaaaaaaaaaaa.!..aaaa...!!!!!!............aaaaaa',
'.......ecc.!..wwwwccewww.!.....!.eeee...!aaaaaaaaaaaaaaaa.!..aaaa...!.................aaaaaa',
'.......ecc.!..wwwweeewww.!.....!.eecc...!aaaaaaaaaaaaaaaa.!..aaaa...!.................aaaaaa',
'.......ecc.!..wwwweeewww.!.....!.eecc...!aaaaaaaaaaaaaaaa.!..aaaa...!.................aaaaaa',
'.......eee.!..wwwwwwwwww.!.....!.eecc...!aaaaaaaaaaaaaaaa.!..aaaa...!.................aaaaaa',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'...........!..wwwwwwwwww.!..........................................!.......................',
'...........!..cwwwwwwwww.!..........................................!.......................',
'...........!..cwwwwwwwww.!..........................................!.......................',
'...........!..cwwwwwwwww.!..........................................!.......................',
'...........!..wwwwwwwwww.!..........................................!.......................',
)
battle_map_long_hana_vam_end_capture = (
'.........................!2.........................................!.......................',
'.........................!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwww....',
'.........................!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwww....',
'.........................!wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww!wwwwwwwwwwwwwwwwwww....',
'.........................!wwwwwwwwccccccccccwwwwwwwwwwwwwwwwwwwwwwww!ccccccwwwwwwwwwwwww....',
'.........................!eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee!eeeeeeeewwwwwwwwwww....',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'.........................!..........................................!.......................',
'ttttttttttttttttttttttttt!tttttttttttttttttttttttttttttttttttttttttt!.......................',
'.........................!..........................................!.......................',
'ttttttttttttttttttttttttt!tttttttttttttttttttttttttttttttttttttttttt!.......................',
'.........................!..........................................!.......................',
'ttttttttttttttttttttttttt!tttttttttttttttttttttttttttttttttttttttttt!.......................',
'.........................!..........................................!.......................',
'ttttttttttttttttttttttttt!tttttttttttttttttttttttttttttttttttttttttt!.......................',
'.........................!..........................................!.......................',
'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'...........!1.wwwwwwwwww.!..........................................!..............3..aaaaaa',
'...........!..wwwwwwwwww.!...5.6..7.8..9............................!.................aaaaaa',
'...........!0.wwwwwwwwww.!..........................................!.................aaaaaa',
'...........!..wwwweeewww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!!!!!!............caaaaa',
'...........!..wwwweeewww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!ee.4!............caaaaa',
'...........!..wwwwccewww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!ec..!............caaaaa',
'.......eee.!..wwwwccewww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!ec..!............caaaaa',
'.......eee.!..wwwwccewww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!ee..!............caaaaa',
'.......eee.!..wwwwccewww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!!!!!!............aaaaaa',
'.......ecc.!..wwwwccewww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!.................aaaaaa',
'.......ecc.!..wwwweeewww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!.................aaaaaa',
'.......ecc.!..wwwweeewww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!.................aaaaaa',
'.......eee.!..wwwwwwwwww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!.................aaaaaa',
'!!!!!!!!!!!!!!!!!!!!!!!!!!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!!!!!!!!!!!!!!!!!!!!!!!!',
'...........!..wwwwwwwwww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!.......................',
'...........!..cwwwwwwwww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!.......................',
'...........!..cwwwwwwwww.!...eeeecccMMMMMMMMMMaaaaaaaaaaaaaaa.......!.......................',
'...........!..cwwwwwwwww.!..........................................!.......................',
'...........!..wwwwwwwwww.!..........................................!.......................',
)
battle_map_long_near = (
'-------------------------------------------------------------------------------------',
'aaaaaacaaaawwwwwweee.0!...3aaaaaawwwwwcee!7..........eeecwwwwww.!5.aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'----------------------!....aaaaaawwwwwcee!...........eeecwwwwww.!--------------------',
'aaaaaacaaaawwwwwweee.1!....aaaaaawwwwwcee!...........eeecwwwwww.!6.aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'aaaaaacaaaawwwwwweee..!....aaaaaawwwwwcee!...........eeecwwwwww.!..aaaaeeecwwwwwwaaaa',
'-------------------------------------------------------------------------------------',
)
battle_map_long_near_militia_vs_elite = (
'-------------------------------------------------------------------------------------',
'!aaaaacaaaaaeee16!cwwwwwwwwwweee.1!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'-------------------------------------------------------------------------------------',
'!aaaaacaaaaaeee17!cwwwwwwwwwweee.2!cwwwwwe0!...ewwwwwc5!eeeeecc6!7.aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!----------------!----------------!cwwwwwe.!...ewwwwwc.!eeeeecc.!--------------------',
'!aaaaacaaaaaeee18!cwwwwwwwwwweee.3!cwwwwwe.!...ewwwwwc.!eeeeecc.!8.aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!cwwwwwe.!...ewwwwwc.!eeeeecc.!..aaaaeeecwwwwwwaaaa',
'-------------------------------------------------------------------------------------',
'!aaaaacaaaaaeee19!cwwwwwwwwwweee.4!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'!aaaaacaaaaaeee..!cwwwwwwwwwweee..!..................................................',
'-------------------------------------------------------------------------------------',
)
battle_map_long_mages = (
'1a2a3wcwawwe!.......................................................................!e5a6a7wcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'w0wawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'w11awwcwawwe!.......................................................................!ewawawwcwa..',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'w12awwcwawwe!.......................................................................!ewawawwcwa..',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'w13awwcwawwe!.......................................................................!ewawawwcwa..',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
'wawawwcwawwe!.......................................................................!ewawawwcwaww',
)
battle_map_long_birds = (
'......................................................................................!5.........',
'!!!!!!!!!!!!!.........................................................................!..........',
'0w1w2wwwwwEW!.........................................................................!..........',
'wwwwwwwwwwEW!.........................................................................!a.........',
'wwwwwwwwwwEW!.........................................................................!a.........',
'wwwwwwwwwwEW!.........................................................................!ac........',
'wwwwwwwwwwEW!.........................................................................!ac........',
'wwwwwwwwwwEW!.........................................................................!ac........',
'wwwwwwwwwwEW!.........................................................................!ac........',
'wwwwwwwwwwCW!.........................................................................!ac........',
'wwwwwwwwwwCW!.........................................................................!ac........',
'wwwwwwwwwwCW!.........................................................................!ac........',
'wwwwwwwwwwCW!.........................................................................!ac........',
'wwwwwwwwwwCW!.........................................................................!ac........',
'wwwwwwwwwwCW!.........................................................................!a.........',
'wwwwwwwwwwEW!.........................................................................!a.........',
'wwwwwwwwwwEW!.........................................................................!a.........',
'wwwwwwwwwwEW!.........................................................................!..........',
'wwwwwwwwwwEW!.........................................................................!..........',
'wwwwwwwwwwEW!.........................................................................!..........',
'wwwwwwwwwwEW!.........................................................................!..........',
'wwwwwwwwwwEW!.........................................................................!..........',
'!!!!!!!!!!!!!.........................................................................!..........',
'......................................................................................!..........',
'......................................................................................!..........',
'......................................................................................!..........',
'......................................................................................!..........',
'......................................................................................!..........',
'......................................................................................!..........',
'......................................................................................!..........',
'......................................................................................!..........',
'......................................................................................!..........',
'......................................................................................!..........',
'......................................................................................!..........',
'......................................................................................!..........',
)
battle_map_long_cavalry = (
'......................................................................................!5a6a7a8a9a',
'!!!!!!!!!!!!!.........................................................................!aaaaaaaaaa',
'0wwwwwwwwwwe!.........................................................................!aaaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!aaaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!aaaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!aaaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!aaaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!aaaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwcwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwcwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwcwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwcwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwcwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwcwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!caaaaaaaaa',
'wwwwwwwwwwwe!.........................................................................!caaaaaaaaa',
'!!!!!!!!!!!!!.........................................................................!caaaaaaaaa',
'......................................................................................!caaaaaaaaa',
'......................................................................................!aaaaaaaaaa',
'......................................................................................!aaaaaaaaaa',
'......................................................................................!aaaaaaaaaa',
'......................................................................................!aaaaaaaaaa',
'......................................................................................!aaaaaaaaaa',
'......................................................................................!aaaaaaaaaa',
'......................................................................................!aaaaaaaaaa',
'......................................................................................!aaaaaaaaaa',
'......................................................................................!aaaaaaaaaa',
'......................................................................................!aaaaaaaaaa',
'......................................................................................!aaaaaaaaaa',
)
battle_map_long_line_infantry_volley = (
'a4aa!w2w3!w0e1!.................##..................!8w5w!9e6c!a7aa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'acaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!wcww!.................##..................!wwcw!eeec!acaa',
'acaa!ceee!wcww!.................##..................!wwcw!eeec!acaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'acaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'acaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'acaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!wcww!.................##..................!wwcw!eeec!acaa',
'acaa!ceee!wcww!.................##..................!wwcw!eeec!acaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'acaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'acaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!wcww!.................##..................!wwcw!eeec!acaa',
'acaa!ceee!wcww!.................##..................!wwcw!eeec!acaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'acaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'acaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
'aaaa!ceee!weww!.................##..................!wwew!eeec!aaaa',
)
artillery_map_ship_boarding = (
# Гандек фрегата -- 105 ft
# Между кораблями -- <100 ft
'4.!2.!1.!~~~~~~~!6.!7.!~~~~~~~~~~~~~~~~~~~~~~~~~!3.',
'..!..!0.!~~~~~~~!9.!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'..!..!..!~~~~~~~!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'..!..!..!~~~~~~~!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'..!..!..!~~~~~~~!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'..!..!..!~~~~~~~!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'..!..!..!~~~~~~~!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'ww!WW!CE!~~~~~~~!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ww',
'we!WE!CE!~~~~~~~!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ew',
'ww!WW!CE!~~~~~~~!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ww',
'we!WE!CE!~~~~~~5!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ew',
'ww!WW!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ww',
'we!WE!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ew',
'cw!CW!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!wc',
'we!WE!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ew',
'ww!WW!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ww',
'we!WE!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ew',
'cw!CW!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!wc',
'we!WE!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ew',
'ww!WW!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ww',
'we!WE!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ew',
'cw!CW!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!wc',
'we!WE!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ew',
'ww!WW!CE!ce!.!ec!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ww',
'we!WE!CE!~~~~~~~!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ew',
'ww!WW!CE!~~~~~~~!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ww',
'we!WE!CE!~~~~~~~!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ew',
'ww!WW!CE!~~~~~~~!ec!ec!~~~~~~~~~~~~~~~~~~~~~~~~~!ww',
'..!..!..!~~~~~~~!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'..!..!..!~~~~~~~!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'..!..!..!~~~~~~~!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'..!..!..!~~~~~~~!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'..!..!..!~~~~~~~!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'..!..!..!~~~~~~~!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
'..!..!..!..!.!..!..!..!~~~~~~~~~~~~~~~~~~~~~~~~~!..',
)
artillery_map_ship_boarding_shatter = (
# Гандек фрегата -- 105 ft
# Между кораблями -- <100 ft
'~~~~~~~~~~~~~~',
'~~~===~~~~~~~~',
'~~=====~~~===~',
'~!!!!!!~~=====',
'~!25!21~~51!!!',
'~!CE!==~~ECeEE',
'~!W!!==~~ECeEE',
'~!W!EEE~~ECeEE',
'~!W!EEE~~ECeEE',
'~!W!CCE~~ECeEE',
'~!W!CCE~~ECeEE',
'~!W!CCE~~ECeEE',
'~!W!EEE~~eceEE',
'~!W!EEE~~eceEE',
'~!W!!==~~eceEE',
'~!CE!==~~eceEE',
'~!CE!24~~eceEE',
'~!WE!23~~eceEE',
'~!WE!22~~eceEE',
'~!WE!!!~~eceEE',
'~!WE!==~~eceEE',
'~!WE!==~~eceEE',
'~!WE!==~~!!!!!',
)
artillery_map_ship_boarding_ally = (
# Гандек фрегата -- 105 ft
# Между кораблями -- <100 ft
'~~~~~~~~~~~~~~~~~!!!!!!!!!!!!!!!!!!!',
'~~~===~~~~~~~~~~=!ecwcwwwwwwwwwww14!',
'~~=====~~~===~~~=!wewewewewewewewew!',
'~!!!!!!~~=====~~~!!!!!!!!!!!!!!!!!!!',
'~!24!21~~51!!!~~~~~~~~~~~~~~~~~~!15!',
'~!CE!==~~ECeEE~~~~~~~~~~~~~~~~~~!ww!',
'~!W!!==~~ECeEE~~~~~~~~~~~~~~~~~~!ew!',
'~!W!EEE~~ECeEE~~~~~~~~~~~~~~~~~~!ww!',
'~!W!EEE~~ECeEE~~~~~~~~~~~~~~~~~~!ew!',
'~!W!CCE~~ECeEE~~~~~~~~~~~~~~~~~~!wc!',
'~!W!CCE~~ECeEE~~~~~~~~~~~~~~~~~~!ew!',
'~!W!CCE~~ECeEE~~~~~~~~~~~~~~~~~~!ww!',
'~!W!EEE~~eceEE~~~~~~~~~~~~~~~~~~!ew!',
'~!W!EEE~~eceEE~~~~~~~~~~~~~~~~~~!ww!',
'~!W!!==~~eceEE~~~~~~~~~~~~~~~~~~!ew!',
'~!WE!==~~eceEE~~~~~~~~~~~~~~~~~~!ww!',
'~!WE!==~~eceEE~~~~~~~~~~~~~~~~~~!ew!',
'~!WE!23~~eceEE~~~~~~~~~~~~~~~~~~!wc!',
'~!WE!22~~eceEE~~~~~~~~~~~~~~~~~~!ew!',
'~!!!!!!~~eceEE~~~~~~~~~~~~~~~~~~!ww!',
'~!...==~~eceEE~~~~~~~~~~~~~~~~~~!!!!',
'~!...==~~eceEE~~~~~~~~~~~~~~~~~~!!!!',
'~!...==~~!!!!!~~~~~~~~~~~~~~~~~~!16!',
'~!...==~~52...~~~~~~~~~~~~~~~~~~!wc!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ew!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ww!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ew!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!wc!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ew!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ww!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ew!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!wc!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ew!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ww!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ew!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ww!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ew!',
'~!...==~~.....~~~~~~~~~~~~~~~~~~!ww!',
'~!!!!!!~~!!!!!~~~!!!!!!!!!!!!!!!!!!!',
'~~=====~~=====~~=!wewewewewewewewew!',
'~~~===~~~~===~~~=!wcwcwwwwwwwwwww17!',
'~~~~~~~~~~~~~~~~~!!!!!!!!!!!!!!!!!!!',
)
artillery_map_ship = (
# Гандек фрегата -- 105 ft
# Между кораблями -- <100 ft
'3.!2.!1.!0.!~!5.!6.!7.!8.',
'..!..!4.!.e!~!e.!9.!..!..',
'..!..!..!.e!~!e.!..!..!..',
'..!..!..!.e!~!e.!..!..!..',
'..!..!..!.e!~!e.!..!..!..',
'..!..!..!.e!~!e.!..!..!..',
'..!..!..!.e!~!e.!..!..!..',
'ww!ww!ww!..!~!..!ww!ww!ww',
'we!we!we!..!~!..!ew!ew!ew',
'ww!ww!ww!..!~!..!ww!ww!ww',
'we!we!we!..!~!..!ew!ew!ew',
'ww!ww!ww!..!~!..!ww!ww!ww',
'we!we!we!..!~!..!ew!ew!ew',
'wC!wC!wC!..!~!..!Cw!Cw!Cw',
'we!we!we!..!~!..!ew!ew!ew',
'ww!ww!ww!..!~!c.!ww!ww!ww',
'we!we!we!..!~!..!ew!ew!ew',
'wC!wC!wC!..!~!..!Cw!Cw!Cw',
'we!we!we!..!~!..!ew!ew!ew',
'ww!ww!ww!..!~!..!ww!ww!ww',
'we!we!we!..!~!..!ew!ew!ew',
'wC!wC!wC!..!~!..!Cw!Cw!Cw',
'we!we!we!..!~!..!ew!ew!ew',
'ww!ww!ww!..!~!..!ww!ww!ww',
'we!we!we!..!~!..!ew!ew!ew',
'ww!ww!ww!..!~!..!ww!ww!ww',
'we!we!we!..!~!..!ew!ew!ew',
'ww!ww!ww!..!~!..!ww!ww!ww',
'..!..!..!.e!~!e.!..!..!..',
'..!..!..!.e!~!e.!..!..!..',
'..!..!..!.e!~!e.!..!..!..',
'..!..!..!.e!~!e.!..!..!..',
'..!..!..!.e!~!e.!..!..!..',
'..!..!..!.e!~!e.!..!..!..',
'..!..!..!.e!~!e.!..!..!..',
)
artillery_map_ship_volley_short = (
# Гандек фрегата -- 105 ft
# Между кораблями -- <100 ft
'3.!2.!1.!0.!#!5.!6.!7.!8.',
'..!..!4.!..!#!..!9.!..!..',
'..!..!..!..!#!..!..!..!..',
'..!..!..!..!#!..!..!..!..',
'..!..!..!..!#!..!..!..!..',
'..!..!..!..!#!..!..!..!..',
'..!..!..!..!#!..!..!..!..',
'ww!ww!ww!..!#!..!ww!ww!ww',
'we!we!we!..!#!..!ew!ew!ew',
'ww!ww!ww!..!#!..!ww!ww!ww',
'we!we!we!..!#!..!ew!ew!ew',
'ww!ww!ww!ww!#!ww!ww!ww!ww',
'we!we!we!we!#!ew!ew!ew!ew',
'cw!cw!cw!ww!#!ww!wc!wc!wc',
'we!we!we!we!#!ew!ew!ew!ew',
'ww!ww!ww!wc!#!cw!ww!ww!ww',
'we!we!we!we!#!ew!ew!ew!ew',
'cw!cw!cw!ww!#!ww!wc!wc!wc',
'we!we!we!we!#!ew!ew!ew!ew',
'ww!ww!ww!wc!#!cw!ww!ww!ww',
'we!we!we!we!#!ew!ew!ew!ew',
'cw!cw!cw!ww!#!ww!wc!wc!wc',
'we!we!we!we!#!ew!ew!ew!ew',
'ww!ww!ww!ww!#!ww!ww!ww!ww',
'we!we!we!..!#!..!ew!ew!ew',
'ww!ww!ww!..!#!..!ww!ww!ww',
'we!we!we!..!#!..!ew!ew!ew',
'ww!ww!ww!..!#!..!ww!ww!ww',
'..!..!..!..!#!..!..!..!..',
'..!..!..!..!#!..!..!..!..',
'..!..!..!..!#!..!..!..!..',
'..!..!..!..!#!..!..!..!..',
'..!..!..!..!#!..!..!..!..',
'..!..!..!..!#!..!..!..!..',
'..!..!..!..!#!..!..!..!..',
)
artillery_map_ship_volley_medium = (
# Гандек фрегата -- 105 ft
# Между кораблями -- 225-320 ft
'3.!2.!1.!0.!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!5.!6.!7.!8.',
'..!..!4.!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!9.!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'ww!ww!ww!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!ww!ww!ww',
'we!we!we!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!ew!ew!ew',
'ww!ww!ww!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!ww!ww!ww',
'we!we!we!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!ew!ew!ew',
'ww!ww!ww!ww!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!ww!ww!ww!ww',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'cw!cw!cw!ww!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!ww!wc!wc!wc',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'ww!ww!ww!wc!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!cw!ww!ww!ww',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'cw!cw!cw!ww!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!ww!wc!wc!wc',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'ww!ww!ww!wc!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!cw!ww!ww!ww',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'cw!cw!cw!ww!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!ww!wc!wc!wc',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'ww!ww!ww!ww!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!ww!ww!ww!ww',
'we!we!we!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!ew!ew!ew',
'ww!ww!ww!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!ww!ww!ww',
'we!we!we!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!ew!ew!ew',
'ww!ww!ww!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!ww!ww!ww',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
)
artillery_map_ship_volley_range = (
# Гандек фрегата -- 105 ft
# Между кораблями -- 400-500 ft
'3.!2.!1.!0.!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!5.!6.!7.!8.',
'..!..!4.!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!9.!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'ww!ww!ww!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!ww!ww!ww',
'we!we!we!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!ew!ew!ew',
'ww!ww!ww!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!ww!ww!ww',
'we!we!we!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!ew!ew!ew',
'ww!ww!ww!ww!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!ww!ww!ww!ww',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'cw!cw!cw!ww!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!ww!wc!wc!wc',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'ww!ww!ww!wc!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!cw!ww!ww!ww',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'cw!cw!cw!ww!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!ww!wc!wc!wc',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'ww!ww!ww!wc!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!cw!ww!ww!ww',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'cw!cw!cw!ww!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!ww!wc!wc!wc',
'we!we!we!we!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!ew!ew!ew!ew',
'ww!ww!ww!ww!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!ww!ww!ww!ww',
'we!we!we!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!ew!ew!ew',
'ww!ww!ww!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!ww!ww!ww',
'we!we!we!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!ew!ew!ew',
'ww!ww!ww!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!ww!ww!ww',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
'..!..!..!..!~~~~~~~~~~~~~~~~~~~~#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!..!..!..!..',
)
battle_map_long_line_infantry = (
'a4aa!w2w3!w0e1!...................................................................!8w5w!9e6c!a7aa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'acaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!wcww!...................................................................!wwcw!eeec!acaa',
'acaa!ceee!wcww!...................................................................!wwcw!eeec!acaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'acaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'acaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'acaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!wcww!...................................................................!wwcw!eeec!acaa',
'acaa!ceee!wcww!...................................................................!wwcw!eeec!acaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'acaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'acaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!wcww!...................................................................!wwcw!eeec!acaa',
'acaa!ceee!wcww!...................................................................!wwcw!eeec!acaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'acaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'acaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
'aaaa!ceee!weww!...................................................................!wwew!eeec!aaaa',
)
battle_map_long_hunting_plains = (
# Охота на диких животных в густом тропическом лесу.
'aa23!ww01!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,#t.#.t.#..#T..##t.#....#.t......##.###.##t.#.t',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,....#...#.#..tt....#.###...t...tt.ttt.tt....#.',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,T#...#.........T#...Tt...T.......T...T..T#...#',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,.t..tt..t##tt.#.t..tt.#.#.......#..##..#.t..tt',
'aaca!wcww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,..........TTt.........#.#tt.ttt...............',
'aaca!wcww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,t...t.....TTt.#t...t..##..t.ttt.#t.##t.#t...t.',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,.#...#......#.#.#...#T.t........#.T##.T#.#...#',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,.#..##..###t....#..#tt#.T................#..##',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,#...T...T....T.#...T...##T.T...T.#T..#T.#...T.',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,#..#...#.t#...##..#..##...t...t.##.###.##..#..',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,tt.#.t.#...t...tt.#..#..#....##..t...t..tt.#.t',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!5..MM..#.#..tt....#.###...t...tt.ttt.tt....#.',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!MM.MM..MM...#.T#ww.Tt...Tww##...T...T..T#...#',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!MM.tt..MM#tt.eetwwtt.#.#.ww##..#..##..#.t..tt',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!.##MM.#..TTt.ee.ww.cc#.#tt.cct...............',
'aaca!wcww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!MM.MM.#..TTt.#t.wwtcc##wwt.cct.#t.##t.#t...t.',
'aaca!wcww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!MM..#..MM..#.ee#...#T.tww..##..#.T##.T#.#...#',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!#..MM..MM#t..ee#ww#tt#.T...#ee.....T....#..##',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!MM.MM..T....T.#.wwTcc.wwT.T.eeT.#T..#T.#...T.',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!MM#...#MM#..ee#.ee.cc#ww.tee.t.##.#.#.##..#..',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!t.#.t.#MM#t.eettee..#..#..ee....t...t..tt.#.t',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,.#...#......#.#.#...#T.t.....#..#.T##.T#.#...#',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,.#..##..#.#t....#..#tt#.T................#..##',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,#...T...T....T.#...T...##T.T...T.#T..#T.#...T.',
'aaca!wcww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,#..#...#.t#...##..#..##...t.t.t.##.###.##..#..',
'aaca!wcww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,tt.#.t.#..#t...tt.#..#..#....T...t...t..tt.#.t',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,#t.#.t.#..#T..##t.#....#.t......##.###.##t.#.t',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,....#...#.#..tt....#.###...t...tt.ttt.tt....#.',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,T#...#.........T#...Tt...T..T....T...T..T#...#',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,.t..tt..t##tt.#.t..tt.#.#.......#..##..#.t..tt',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,..#....#..#Tt.........#.#.t...t...............',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,t...t...t#tT...t...t..T..t#.###..t...t..t...t.',
'aaaa!weww!,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,tt.#.t.#..#t...tt.#..#..#........t...t..tt.#.t',
)
battle_map_long_hunting_forest = (
# Охота на диких животных в густом тропическом лесу.
'aa23!ww01!#T.T..#.t..#T..##t.#.##t.#......#.t..##.##t.#.t.#..#T..##t.#....#.t......##.###.##t.#.t',
'aaaa!weww!.#t#t.##tt.#..tt....tt....##.##.##tttt.tt....#...#.#..tt....#.###...t...tt.ttt.tt....#.',
'aaaa!weww!t.......T.......T#....T#....Tt....T...T..T#...#.........T#...Tt...T.......T...T..T#...#',
'aaaa!weww!..#.##.#..##tt.#.t...#.t..t.t.##.#...#..#.t..tt..t##tt.#.t..tt.#.#.......#..##..#.t..tt',
'aaca!wcww!..T....#tt...t................#..#tt...............TTt.........#.#tt.ttt...............',
'aaca!wcww!.....##..t..Tt.#t....#t...t...###..t.#t.#t...t.....TTt.#t...t..##..t.ttt.#t.##t.#t...t.',
'aaaa!weww!.T....t......#.#.#TT.#.#....#T..t....#.T#.#...#......#.#.#...#T.t........#.T##.T#.#...#',
'aaaa!weww!..........##t....#TT...#..##tt#..T........#..##..###t....#..#tt#.T................#..##',
'aaaa!weww!t.....##T.....T.#...T.#...TT....##T.T.#T.#...T...T....T.#...T...##T.T...T.#T..#T.#...T.',
'aaaa!weww!#....T...tt#...##..#.##..#TT.##T...t.##.##..#...#.t#...##..#..##...t...t.##.###.##..#..',
'aaaa!weww!.t.t...#...#t...tt.#..tt.#...#...#....t..tt.#.t.#...t...tt.#..#..#....##..t...t..tt.#.t',
'aaaa!weww!#T.T..#.t..#T..#.t.#.##t.#......#.t..##.#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'aaaa!weww!.#t#t.##tt.#..tt....tt....##.##.##..t..tt!5..MM..#.#..tt....#.###...t...tt.ttt.tt....#.',
'aaaa!weww!t.......T.......T#....T#....Tt....T...T..!MM.MM..MM...#.T#ww.Tt...Tww##...T...T..T#...#',
'aaaa!weww!..#.##.#..##tt.#.t...#.t..t.t.##.#...#..#!MM.tt..MM#tt.eetwwtt.#.#.ww##..#..##..#.t..tt',
'aaaa!weww!..T....#tt...t................#..#tt.....!.##MM.#..TTt.ee.ww.cc#.#tt.cct...............',
'aaca!wcww!.....##..t..Tt.#t....#t...t...###..t.#t.#!MM.MM.#..TTt.#t.wwtcc##wwt.cct.#t.##t.#t...t.',
'aaca!wcww!.T....t......#.#.#...#.#....#T..t....#.T#!MM..#..MM..#.ee#...#T.tww..##..#.T##.T#.#...#',
'aaaa!weww!........#.##t....#.....#..##tt#..T.......!#..MM..MM#t..ee#ww#tt#.T...#ee.....T....#..##',
'aaaa!weww!t.....##T.....T.#...T.#...TT....##T.T.#T.!MM.MM..T....T.#.wwTcc.wwT.T.eeT.#T..#T.#...T.',
'aaaa!weww!#....T...tt#..###..#.##..#TT.##T...t.##.#!MM#...#MM#..ee#.ee.cc#ww.tee.t.##.#.#.##..#..',
'aaaa!weww!.t.t...#...#t.##tt.#..tt.#...#...#....t..!t.#.t.#MM#t.eettee..#..#..ee....t...t..tt.#.t',
'aaaa!weww!...T....t##tT...t.....t...tt..T...t#..t..!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'aaaa!weww!.T....t......#.#.#...#.#....#T..t....#.T#.#...#......#.#.#...#T.t.....#..#.T##.T#.#...#',
'aaaa!weww!..........##t....#.....#..##tt#..T........#..##..#.#t....#..#tt#.T................#..##',
'aaaa!weww!t.....##T.....T.#...T.#...TT....##T.T.#T.#...T...T....T.#...T...##T.T...T.#T..#T.#...T.',
'aaca!wcww!#....T...tt#...##..#.##..#TT.##T...t.##.##..#...#.t#...##..#..##...t.t.t.##.###.##..#..',
'aaca!wcww!.t.t...#...#t...tt.#..tt.#...#...#....t..tt.#.t.#..#t...tt.#..#..#....T...t...t..tt.#.t',
'aaaa!weww!#T.T..#.t..#T..##t.#.##t.#......#.t..##.##t.#.t.#..#T..##t.#....#.t......##.###.##t.#.t',
'aaaa!weww!.#t#t.##tt.#..tt....tt....##.##.##tttt.tt....#...#.#..tt....#.###...t...tt.ttt.tt....#.',
'aaaa!weww!t.......T.......T#....T#....Tt....T...T..T#...#.........T#...Tt...T..T....T...T..T#...#',
'aaaa!weww!..#.##.#..##tt.#.t...#.t..t.t.##.#...#..#.t..tt..t##tt.#.t..tt.#.#.......#..##..#.t..tt',
'aaaa!weww!..T....#tt...t................#..#tt....#..#....#..#Tt.........#.#.t...t...............',
'aaaa!weww!...T....t##tT...t.....t...tt..T...t#..t..t...t...t#tT...t...t..T..t#.###..t...t..t...t.',
'aaaa!weww!.t.t...#...#t...tt.#..tt.#...#...#....t..tt.#.t.#..#t...tt.#..#..#........t...t..tt.#.t',
)
battle_map_long_ilion = (
# Илионские кварталы 300x150 футов (60x30)
# 400 кварталов всего. 250 кварталов внешнего города.
# 3000 футов, 20 квараталов с севера на юг (от берега реки до частокола) атака с востока.
# 6000 футов, 20 кварталов с востока на запад.
# В городе 10 тыс. ополчения, 2200 проф. войск, 7 героев со свитами.
# На квартал можно выставить до 200 ополченцев и по 100 солдат (только на восточной стороне)
'-----------------------------------------------------------|-------##----------------------------',
'#||!||#||!||#||!||#||!||#||!||#||!||#||!||#||!||#...#||!||||!..!;;;##!7aaaaaaaaaaaaaaaaaaaaaaaaaa',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AAAAA#...#AAAAAA!!..!;;;##!aaaaaaaaaaaaaaaaaaaaaaaaaaa',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AAAAA#.1.#AAAAAA!!..!;;;##!aaaaaaaaaaacccccaaaaaaaaaaa',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AACAA#...#AAAAAA!!..!;;;##!aaaaaaaaaaacccccaaaaaaaaaaa',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^.AAAAA.AAAAA.....AAAAAA!!..!;;;##!aaaaaaaaaaaaaaaaaaaaaaaaaaa',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AAAAA#...#AAAAAA!!..!;;;##!aaaaaaaaaaaaaaaaaaaaaaaaaaa',
'#||!||#||!||#||!||#||!||#||!||#||!||#||!||#||!||#...#||!||||!..!;;;##!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'....................................................!..WCWWE!..!;;;;;!55wwwwwwww!66wwwwwwww!9aaaa',
'....................................................!..WCWWE!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'....................................................!..WCWWE!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'....................................................!..WCWWE!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'#||!||#||!||#||!||#||!||#||!||#||!||#||!||#||!||#!!!#||.||||!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AAAAA#AAAAA#...#WWWCWW!!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AAAAA#AAAAA#...#WWWCWE!!..!;;;;;!ewwwwwccww!ewwwwccwww!aaaaa',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AAAAA#AAAAA#...#WWWCWW!!..!;;;;;!ewwwwwccww!ewwwwccwww!aacaa',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^.AAAAA.AAAAA.AAAAA....#WWWCWE!!..!;;;;;!ewwwwwccww!ewwwwccwww!aacaa',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AAAAA#AAAAA#...#WWWCWW!!..!;;;;;!ewwwwwccww!ewwwwccwww!aacaa',
'#||!||#||!||#||!||#||!||#||!||#||!||#||!||#||!||#!!!#||.||||!..!;;;;;!ewwwwwccww!ewwwwccwww!aacaa',
'|....t..........t.........t............T.......t..tt!0.wCWWW!..!;;;;;!ewwwwwccww!ewwwwccwww!aacaa',
'|.t.....T....T.........t.......t..T........T........!..wCWWE!..!;;;;;!ewwwwwccww!ewwwwccwww!aaaaa',
'|...................t..........................T....!..wCWWW!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'|ttttttttttttttttttttttttttttttttttttttttttttttttttt!..wCWWE!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'....................................................!..wCWWW!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'|ttttttttttttttttttttttttttttttttttttttttttttttttttt!..wCWWE!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'|....t..........t.........t............T.......t..tt!..wCWWW!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'|.t.....T....T.........t.......t..T........T........!..wCWWE!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'|...................t..........................T....!..wCWWW!..!;;;;;!ewwwwwwwww!ewwwwwwwww!aaaaa',
'#||!||#||!||#||!||#||!||#||!||#||!||#||!||#||.||#!!!#||!||||!..!;;;##!!!!!!!!!!!!!!!!!!!!!!!!!!!!',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AAAAA#...#AAAAAA!!..!;;;##!8aaaaaaaaaaaaaaaaaaa.......',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AAAAA#...#AAAAAA!!..!;;;##!aaaaaaaaaaaaaaaaaaaa.......',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AACAA#..2#AAAAAA!!..!;;;##!aaaaaaaccccaaaaaaaaa.......',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^.AAAAA.AAAAA#...#AAAAAA!!..!;;;##!aaaaaaaccccaaaaaaaaa.......',
'#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#^^^^^#AAAAA#AAAAA.....AAAAAA!!..!;;;##!aaaaaaaaaaaaaaaaaaaa.......',
'#||!||#||!||#||!||#||!||#||!||#||!||#||!||#||!||#!!!#||!||||!..!;;;##!aaaaaaaaaaaaaaaaaaaa.......',
'-----------------------------------------------------------|-------##----------------------------',
)
|
py | 1a44a715876ae634ffae24579136d58400ae4cdb | # -*- coding: utf-8 -*-
from __future__ import division
import numpy as np
import pandas as pd
from scipy.stats import multivariate_normal
from pgmpy.factors.base import BaseFactor
class LinearGaussianCPD(BaseFactor):
"""
For, X -> Y the Linear Gaussian model assumes that the mean
of Y is a linear function of mean of X and the variance of Y does
not depend on X.
For example,
$ p(Y|X) = N(-2x + 0.9 ; 1) $
Here, $ x $ is the mean of the variable $ X $.
Let $ Y $ be a continuous variable with continuous parents
$ X1, X2, ..., Xk $. We say that $ Y $ has a linear Gaussian CPD
if there are parameters $ \beta_0, \beta_1, ..., \beta_k $
and $ \sigma_2 $ such that,
$ p(Y |x1, x2, ..., xk) = \mathcal{N}(\beta_0 + x1*\beta_1 + ......... + xk*\beta_k ; \sigma_2) $
In vector notation,
$ p(Y |x) = \mathcal{N}(\beta_0 + \boldmath{β}.T * \boldmath{x} ; \sigma_2) $
Reference: https://cedar.buffalo.edu/~srihari/CSE574/Chap8/Ch8-PGM-GaussianBNs/8.5%20GaussianBNs.pdf
"""
def __init__(
self, variable, evidence_mean, evidence_variance, evidence=[], beta=None
):
"""
Parameters
----------
variable: any hashable python object
The variable whose CPD is defined.
evidence_mean: Mean vector (numpy array) of the joint distribution, X
evidence_variance: int, float
The variance of the multivariate gaussian, X = ['x1', 'x2', ..., 'xn']
evidence: iterable of any hashabale python objects
An iterable of the parents of the variable. None if there are no parents.
beta (optional): iterable of int or float
An iterable representing the coefficient vector of the linear equation.
The first term represents the constant term in the linear equation.
Examples
--------
# For P(Y| X1, X2, X3) = N(-2x1 + 3x2 + 7x3 + 0.2; 9.6)
>>> cpd = LinearGaussianCPD('Y', [0.2, -2, 3, 7], 9.6, ['X1', 'X2', 'X3'])
>>> cpd.variable
'Y'
>>> cpd.evidence
['x1', 'x2', 'x3']
>>> cpd.beta_vector
[0.2, -2, 3, 7]
"""
self.variable = variable
self.mean = evidence_mean
self.variance = evidence_variance
self.evidence = evidence
self.sigma_yx = None
if beta is not None:
self.beta = beta
self.beta_0 = beta[0]
self.beta_vector = np.asarray(beta[1:])
if len(evidence) != len(beta) - 1:
raise ValueError(
"The number of variables in evidence must be one less than the length of the beta vector."
)
variables = [variable] + evidence
super(LinearGaussianCPD, self).__init__(
variables, pdf="gaussian", mean=self.mean, covariance=self.variance
)
def sum_of_product(self, xi, xj):
prod_xixj = xi * xj
return np.sum(prod_xixj)
def maximum_likelihood_estimator(self, data, states):
"""
Fit using MLE method.
Parameters
----------
data: pandas.DataFrame or 2D array
Dataframe of values containing samples from the conditional distribution, (Y|X)
and corresponding X values.
states: All the input states that are jointly gaussian.
Returns
-------
beta, variance (tuple): Returns estimated betas and the variance.
"""
x_df = pd.DataFrame(data, columns=states)
x_len = len(self.evidence)
sym_coefs = []
for i in range(0, x_len):
sym_coefs.append("b" + str(i + 1) + "_coef")
sum_x = x_df.sum()
x = [sum_x["(Y|X)"]]
coef_matrix = pd.DataFrame(columns=sym_coefs)
# First we compute just the coefficients of beta_1 to beta_N.
# Later we compute beta_0 and append it.
for i in range(0, x_len):
x.append(self.sum_of_product(x_df["(Y|X)"], x_df[self.evidence[i]]))
for j in range(0, x_len):
coef_matrix.loc[i, sym_coefs[j]] = self.sum_of_product(
x_df[self.evidence[i]], x_df[self.evidence[j]]
)
coef_matrix.insert(0, "b0_coef", sum_x[self.evidence].values)
row_1 = np.append([len(x_df)], sum_x[self.evidence].values)
coef_matrix.loc[-1] = row_1
coef_matrix.index = coef_matrix.index + 1 # shifting index
coef_matrix.sort_index(inplace=True)
beta_coef_matrix = np.matrix(coef_matrix.values, dtype="float")
coef_inv = np.linalg.inv(beta_coef_matrix)
beta_est = np.array(np.matmul(coef_inv, np.transpose(x)))
self.beta = beta_est[0]
sigma_est = 0
x_len_df = len(x_df)
for i in range(0, x_len):
for j in range(0, x_len):
sigma_est += (
self.beta[i + 1]
* self.beta[j + 1]
* (
self.sum_of_product(
x_df[self.evidence[i]], x_df[self.evidence[j]]
)
/ x_len_df
- np.mean(x_df[self.evidence[i]])
* np.mean(x_df[self.evidence[j]])
)
)
sigma_est = np.sqrt(
self.sum_of_product(x_df["(Y|X)"], x_df["(Y|X)"]) / x_len_df
- np.mean(x_df["(Y|X)"]) * np.mean(x_df["(Y|X)"])
- sigma_est
)
self.sigma_yx = sigma_est
return self.beta, self.sigma_yx
def fit(self, data, states, estimator=None, complete_samples_only=True, **kwargs):
"""
Determine βs from data
Parameters
----------
data: pandas.DataFrame
Dataframe containing samples from the conditional distribution, p(Y|X)
estimator: 'MLE' or 'MAP'
completely_samples_only: boolean (True or False)
Are they downsampled or complete? Defaults to True
"""
if estimator == "MLE":
mean, variance = self.maximum_likelihood_estimator(data, states)
elif estimator == "MAP":
raise NotImplementedError(
"fit method has not been implemented using Maximum A-Priori (MAP)"
)
return mean, variance
@property
def pdf(self):
def _pdf(*args):
# The first element of args is the value of the variable on which CPD is defined
# and the rest of the elements give the mean values of the parent
# variables.
mean = (
sum([arg * coeff for (arg, coeff) in zip(args[1:], self.beta_vector)])
+ self.beta_0
)
return multivariate_normal.pdf(
args[0], np.array(mean), np.array([[self.variance]])
)
return _pdf
def copy(self):
"""
Returns a copy of the distribution.
Returns
-------
LinearGaussianCPD: copy of the distribution
Examples
--------
>>> from pgmpy.factors.continuous import LinearGaussianCPD
>>> cpd = LinearGaussianCPD('Y', [0.2, -2, 3, 7], 9.6, ['X1', 'X2', 'X3'])
>>> copy_cpd = cpd.copy()
>>> copy_cpd.variable
'Y'
>>> copy_cpd.evidence
['X1', 'X2', 'X3']
"""
copy_cpd = LinearGaussianCPD(
self.variable, self.beta, self.variance, list(self.evidence)
)
return copy_cpd
def __str__(self):
if self.evidence and list(self.beta_vector):
# P(Y| X1, X2, X3) = N(-2*X1_mu + 3*X2_mu + 7*X3_mu; 0.2)
rep_str = "P({node} | {parents}) = N({mu} + {b_0}; {sigma})".format(
node=str(self.variable),
parents=", ".join([str(var) for var in self.evidence]),
mu=" + ".join(
[
"{coeff}*{parent}".format(coeff=coeff, parent=parent)
for coeff, parent in zip(self.beta_vector, self.evidence)
]
),
b_0=str(self.beta_0),
sigma=str(self.variance),
)
else:
# P(X) = N(1, 4)
rep_str = "P({X}) = N({beta_0}; {variance})".format(
X=str(self.variable),
beta_0=str(self.beta_0),
variance=str(self.variance),
)
return rep_str
|
py | 1a44a7dc01f9db32378d7bf2f122702ff8935aa7 | from __future__ import unicode_literals, absolute_import, print_function
import click
import hashlib, os, sys, compileall, re
import frappe
from frappe import _
from frappe.commands import pass_context, get_site
from frappe.commands.scheduler import _is_scheduler_enabled
from frappe.limits import update_limits, get_limits
from frappe.installer import update_site_config
from frappe.utils import touch_file, get_site_path
from six import text_type
# imports - third-party imports
from pymysql.constants import ER
# imports - module imports
from frappe.exceptions import SQLError
@click.command('new-site')
@click.argument('site')
@click.option('--db-name', help='Database name')
@click.option('--mariadb-root-username', default='root', help='Root username for MariaDB')
@click.option('--mariadb-root-password', help='Root password for MariaDB')
@click.option('--admin-password', help='Administrator password for new site', default=None)
@click.option('--verbose', is_flag=True, default=False, help='Verbose')
@click.option('--force', help='Force restore if site/database already exists', is_flag=True, default=False)
@click.option('--source_sql', help='Initiate database with a SQL file')
@click.option('--install-app', multiple=True, help='Install app after installation')
def new_site(site, mariadb_root_username=None, mariadb_root_password=None, admin_password=None, verbose=False, install_apps=None, source_sql=None, force=None, install_app=None, db_name=None):
"Create a new site"
frappe.init(site=site, new_site=True)
_new_site(db_name, site, mariadb_root_username=mariadb_root_username, mariadb_root_password=mariadb_root_password, admin_password=admin_password,
verbose=verbose, install_apps=install_app, source_sql=source_sql, force=force)
if len(frappe.utils.get_sites()) == 1:
use(site)
def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=None, admin_password=None,
verbose=False, install_apps=None, source_sql=None,force=False, reinstall=False):
"""Install a new Frappe site"""
if not db_name:
db_name = hashlib.sha1(site.encode()).hexdigest()[:16]
from frappe.installer import install_db, make_site_dirs
from frappe.installer import install_app as _install_app
import frappe.utils.scheduler
frappe.init(site=site)
try:
# enable scheduler post install?
enable_scheduler = _is_scheduler_enabled()
except Exception:
enable_scheduler = False
make_site_dirs()
installing = None
try:
installing = touch_file(get_site_path('locks', 'installing.lock'))
install_db(root_login=mariadb_root_username, root_password=mariadb_root_password, db_name=db_name,
admin_password=admin_password, verbose=verbose, source_sql=source_sql,force=force, reinstall=reinstall)
apps_to_install = ['frappe'] + (frappe.conf.get("install_apps") or []) + (list(install_apps) or [])
for app in apps_to_install:
_install_app(app, verbose=verbose, set_as_patched=not source_sql)
frappe.utils.scheduler.toggle_scheduler(enable_scheduler)
frappe.db.commit()
scheduler_status = "disabled" if frappe.utils.scheduler.is_scheduler_disabled() else "enabled"
print("*** Scheduler is", scheduler_status, "***")
except frappe.exceptions.ImproperDBConfigurationError:
_drop_site(site, mariadb_root_username, mariadb_root_password, force=True)
finally:
if installing and os.path.exists(installing):
os.remove(installing)
frappe.destroy()
@click.command('restore')
@click.argument('sql-file-path')
@click.option('--mariadb-root-username', default='root', help='Root username for MariaDB')
@click.option('--mariadb-root-password', help='Root password for MariaDB')
@click.option('--db-name', help='Database name for site in case it is a new one')
@click.option('--admin-password', help='Administrator password for new site')
@click.option('--install-app', multiple=True, help='Install app after installation')
@click.option('--with-public-files', help='Restores the public files of the site, given path to its tar file')
@click.option('--with-private-files', help='Restores the private files of the site, given path to its tar file')
@pass_context
def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_password=None, db_name=None, verbose=None, install_app=None, admin_password=None, force=None, with_public_files=None, with_private_files=None):
"Restore site database from an sql file"
from frappe.installer import extract_sql_gzip, extract_tar_files
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file
if not os.path.exists(sql_file_path):
sql_file_path = '../' + sql_file_path
if not os.path.exists(sql_file_path):
print('Invalid path {0}'.format(sql_file_path[3:]))
sys.exit(1)
if sql_file_path.endswith('sql.gz'):
sql_file_path = extract_sql_gzip(os.path.abspath(sql_file_path))
site = get_site(context)
frappe.init(site=site)
_new_site(frappe.conf.db_name, site, mariadb_root_username=mariadb_root_username,
mariadb_root_password=mariadb_root_password, admin_password=admin_password,
verbose=context.verbose, install_apps=install_app, source_sql=sql_file_path,
force=context.force)
# Extract public and/or private files to the restored site, if user has given the path
if with_public_files:
public = extract_tar_files(site, with_public_files, 'public')
os.remove(public)
if with_private_files:
private = extract_tar_files(site, with_private_files, 'private')
os.remove(private)
@click.command('reinstall')
@click.option('--admin-password', help='Administrator Password for reinstalled site')
@click.option('--yes', is_flag=True, default=False, help='Pass --yes to skip confirmation')
@pass_context
def reinstall(context, admin_password=None, yes=False):
"Reinstall site ie. wipe all data and start over"
site = get_site(context)
_reinstall(site, admin_password, yes, verbose=context.verbose)
def _reinstall(site, admin_password=None, yes=False, verbose=False):
if not yes:
click.confirm('This will wipe your database. Are you sure you want to reinstall?', abort=True)
try:
frappe.init(site=site)
frappe.connect()
frappe.clear_cache()
installed = frappe.get_installed_apps()
frappe.clear_cache()
except Exception:
installed = []
finally:
if frappe.db:
frappe.db.close()
frappe.destroy()
frappe.init(site=site)
_new_site(frappe.conf.db_name, site, verbose=verbose, force=True, reinstall=True,
install_apps=installed, admin_password=admin_password)
@click.command('install-app')
@click.argument('app')
@pass_context
def install_app(context, app):
"Install a new app to site"
from frappe.installer import install_app as _install_app
for site in context.sites:
frappe.init(site=site)
frappe.connect()
try:
_install_app(app, verbose=context.verbose)
finally:
frappe.destroy()
@click.command('list-apps')
@pass_context
def list_apps(context):
"List apps in site"
site = get_site(context)
frappe.init(site=site)
frappe.connect()
print("\n".join(frappe.get_installed_apps()))
frappe.destroy()
@click.command('add-system-manager')
@click.argument('email')
@click.option('--first-name')
@click.option('--last-name')
@click.option('--send-welcome-email', default=False, is_flag=True)
@pass_context
def add_system_manager(context, email, first_name, last_name, send_welcome_email):
"Add a new system manager to a site"
import frappe.utils.user
for site in context.sites:
frappe.connect(site=site)
try:
frappe.utils.user.add_system_manager(email, first_name, last_name, send_welcome_email)
frappe.db.commit()
finally:
frappe.destroy()
@click.command('disable-user')
@click.argument('email')
@pass_context
def disable_user(context, email):
site = get_site(context)
with frappe.init_site(site):
frappe.connect()
user = frappe.get_doc("User", email)
user.enabled = 0
user.save(ignore_permissions=True)
frappe.db.commit()
@click.command('migrate')
@click.option('--rebuild-website', help="Rebuild webpages after migration")
@pass_context
def migrate(context, rebuild_website=False):
"Run patches, sync schema and rebuild files/translations"
from frappe.migrate import migrate
for site in context.sites:
print('Migrating', site)
frappe.init(site=site)
frappe.connect()
try:
migrate(context.verbose, rebuild_website=rebuild_website)
finally:
frappe.destroy()
compileall.compile_dir('../apps', quiet=1, rx=re.compile('.*node_modules.*'))
@click.command('run-patch')
@click.argument('module')
@pass_context
def run_patch(context, module):
"Run a particular patch"
import frappe.modules.patch_handler
for site in context.sites:
frappe.init(site=site)
try:
frappe.connect()
frappe.modules.patch_handler.run_single(module, force=context.force)
finally:
frappe.destroy()
@click.command('reload-doc')
@click.argument('module')
@click.argument('doctype')
@click.argument('docname')
@pass_context
def reload_doc(context, module, doctype, docname):
"Reload schema for a DocType"
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
frappe.reload_doc(module, doctype, docname, force=context.force)
frappe.db.commit()
finally:
frappe.destroy()
@click.command('reload-doctype')
@click.argument('doctype')
@pass_context
def reload_doctype(context, doctype):
"Reload schema for a DocType"
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
frappe.reload_doctype(doctype, force=context.force)
frappe.db.commit()
finally:
frappe.destroy()
@click.command('use')
@click.argument('site')
def _use(site, sites_path='.'):
"Set a default site"
use(site, sites_path=sites_path)
def use(site, sites_path='.'):
with open(os.path.join(sites_path, "currentsite.txt"), "w") as sitefile:
sitefile.write(site)
@click.command('backup')
@click.option('--with-files', default=False, is_flag=True, help="Take backup with files")
@pass_context
def backup(context, with_files=False, backup_path_db=None, backup_path_files=None,
backup_path_private_files=None, quiet=False):
"Backup"
from frappe.utils.backups import scheduled_backup
verbose = context.verbose
for site in context.sites:
frappe.init(site=site)
frappe.connect()
odb = scheduled_backup(ignore_files=not with_files, backup_path_db=backup_path_db, backup_path_files=backup_path_files, backup_path_private_files=backup_path_private_files, force=True)
if verbose:
from frappe.utils import now
print("database backup taken -", odb.backup_path_db, "- on", now())
if with_files:
print("files backup taken -", odb.backup_path_files, "- on", now())
print("private files backup taken -", odb.backup_path_private_files, "- on", now())
frappe.destroy()
@click.command('remove-from-installed-apps')
@click.argument('app')
@pass_context
def remove_from_installed_apps(context, app):
"Remove app from site's installed-apps list"
from frappe.installer import remove_from_installed_apps
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
remove_from_installed_apps(app)
finally:
frappe.destroy()
@click.command('uninstall-app')
@click.argument('app')
@click.option('--yes', '-y', help='To bypass confirmation prompt for uninstalling the app', is_flag=True, default=False, multiple=True)
@click.option('--dry-run', help='List all doctypes that will be deleted', is_flag=True, default=False)
@pass_context
def uninstall(context, app, dry_run=False, yes=False):
"Remove app and linked modules from site"
from frappe.installer import remove_app
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
remove_app(app, dry_run, yes)
finally:
frappe.destroy()
@click.command('drop-site')
@click.argument('site')
@click.option('--root-login', default='root')
@click.option('--root-password')
@click.option('--archived-sites-path')
@click.option('--force', help='Force drop-site even if an error is encountered', is_flag=True, default=False)
def drop_site(site, root_login='root', root_password=None, archived_sites_path=None, force=False):
_drop_site(site, root_login, root_password, archived_sites_path, force)
def _drop_site(site, root_login='root', root_password=None, archived_sites_path=None, force=False):
"Remove site from database and filesystem"
from frappe.installer import get_root_connection
from frappe.model.db_schema import DbManager
from frappe.utils.backups import scheduled_backup
frappe.init(site=site)
frappe.connect()
try:
scheduled_backup(ignore_files=False, force=True)
except SQLError as err:
if err[0] == ER.NO_SUCH_TABLE:
if force:
pass
else:
click.echo("="*80)
click.echo("Error: The operation has stopped because backup of {s}'s database failed.".format(s=site))
click.echo("Reason: {reason}{sep}".format(reason=err[1], sep="\n"))
click.echo("Fix the issue and try again.")
click.echo(
"Hint: Use 'bench drop-site {s} --force' to force the removal of {s}".format(sep="\n", tab="\t", s=site)
)
sys.exit(1)
db_name = frappe.local.conf.db_name
frappe.local.db = get_root_connection(root_login, root_password)
dbman = DbManager(frappe.local.db)
dbman.delete_user(db_name)
dbman.drop_database(db_name)
if not archived_sites_path:
archived_sites_path = os.path.join(frappe.get_app_path('frappe'), '..', '..', '..', 'archived_sites')
if not os.path.exists(archived_sites_path):
os.mkdir(archived_sites_path)
move(archived_sites_path, site)
def move(dest_dir, site):
if not os.path.isdir(dest_dir):
raise Exception("destination is not a directory or does not exist")
frappe.init(site)
old_path = frappe.utils.get_site_path()
new_path = os.path.join(dest_dir, site)
# check if site dump of same name already exists
site_dump_exists = True
count = 0
while site_dump_exists:
final_new_path = new_path + (count and str(count) or "")
site_dump_exists = os.path.exists(final_new_path)
count = int(count or 0) + 1
os.rename(old_path, final_new_path)
frappe.destroy()
return final_new_path
@click.command('set-admin-password')
@click.argument('admin-password')
@click.option('--logout-all-sessions', help='Logout from all sessions', is_flag=True, default=False)
@pass_context
def set_admin_password(context, admin_password, logout_all_sessions=False):
"Set Administrator password for a site"
import getpass
from frappe.utils.password import update_password
for site in context.sites:
try:
frappe.init(site=site)
while not admin_password:
admin_password = getpass.getpass("Administrator's password for {0}: ".format(site))
frappe.connect()
update_password(user='Administrator', pwd=admin_password, logout_all_sessions=logout_all_sessions)
frappe.db.commit()
admin_password = None
finally:
frappe.destroy()
@click.command('set-limit')
@click.option('--site', help='site name')
@click.argument('limit')
@click.argument('value')
@pass_context
def set_limit(context, site, limit, value):
"""Sets user / space / email limit for a site"""
_set_limits(context, site, ((limit, value),))
@click.command('set-limits')
@click.option('--site', help='site name')
@click.option('--limit', 'limits', type=(text_type, text_type), multiple=True)
@pass_context
def set_limits(context, site, limits):
_set_limits(context, site, limits)
def _set_limits(context, site, limits):
import datetime
if not limits:
return
if not site:
site = get_site(context)
with frappe.init_site(site):
frappe.connect()
new_limits = {}
for limit, value in limits:
if limit not in ('daily_emails', 'emails', 'space', 'users', 'email_group',
'expiry', 'support_email', 'support_chat', 'upgrade_url'):
frappe.throw(_('Invalid limit {0}').format(limit))
if limit=='expiry' and value:
try:
datetime.datetime.strptime(value, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
elif limit=='space':
value = float(value)
elif limit in ('users', 'emails', 'email_group', 'daily_emails'):
value = int(value)
new_limits[limit] = value
update_limits(new_limits)
@click.command('clear-limits')
@click.option('--site', help='site name')
@click.argument('limits', nargs=-1, type=click.Choice(['emails', 'space', 'users', 'email_group',
'expiry', 'support_email', 'support_chat', 'upgrade_url', 'daily_emails']))
@pass_context
def clear_limits(context, site, limits):
"""Clears given limit from the site config, and removes limit from site config if its empty"""
from frappe.limits import clear_limit as _clear_limit
if not limits:
return
if not site:
site = get_site(context)
with frappe.init_site(site):
_clear_limit(limits)
# Remove limits from the site_config, if it's empty
limits = get_limits()
if not limits:
update_site_config('limits', 'None', validate=False)
@click.command('set-last-active-for-user')
@click.option('--user', help="Setup last active date for user")
@pass_context
def set_last_active_for_user(context, user=None):
"Set users last active date to current datetime"
from frappe.core.doctype.user.user import get_system_users
from frappe.utils.user import set_last_active_to_now
site = get_site(context)
with frappe.init_site(site):
frappe.connect()
if not user:
user = get_system_users(limit=1)
if len(user) > 0:
user = user[0]
else:
return
set_last_active_to_now(user)
frappe.db.commit()
@click.command('publish-realtime')
@click.argument('event')
@click.option('--message')
@click.option('--room')
@click.option('--user')
@click.option('--doctype')
@click.option('--docname')
@click.option('--after-commit')
@pass_context
def publish_realtime(context, event, message, room, user, doctype, docname, after_commit):
"Publish realtime event from bench"
from frappe import publish_realtime
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
publish_realtime(event, message=message, room=room, user=user, doctype=doctype, docname=docname,
after_commit=after_commit)
frappe.db.commit()
finally:
frappe.destroy()
commands = [
add_system_manager,
backup,
drop_site,
install_app,
list_apps,
migrate,
new_site,
reinstall,
reload_doc,
reload_doctype,
remove_from_installed_apps,
restore,
run_patch,
set_admin_password,
uninstall,
set_limit,
set_limits,
clear_limits,
disable_user,
_use,
set_last_active_for_user,
publish_realtime,
]
|
py | 1a44a82adcd239be44539cc8ebb1b9bb31abb299 | # Copyright 2019, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import re
from copy import copy
logger = logging.getLogger(__name__)
OC_RESOURCE_TYPE = 'OC_RESOURCE_TYPE'
OC_RESOURCE_LABELS = 'OC_RESOURCE_LABELS'
# Matches anything outside ASCII 32-126 inclusive
_NON_PRINTABLE_ASCII = re.compile(
r'[^ !"#$%&\'()*+,\-./:;<=>?@\[\\\]^_`{|}~0-9a-zA-Z]')
# Label key/value tokens, may be quoted
_WORD_RES = r'(\'[^\']*\'|"[^"]*"|[^\s,=]+)'
_KV_RE = re.compile(r"""
\s* # ignore leading spaces
(?P<key>{word_re}) # capture the key word
\s*=\s*
(?P<val>{word_re}) # capture the value word
\s* # ignore trailing spaces
""".format(word_re=_WORD_RES), re.VERBOSE)
_LABELS_RE = re.compile(r"""
^\s*{word_re}\s*=\s*{word_re}\s* # _KV_RE without the named groups
(,\s*{word_re}\s*=\s*{word_re}\s*)* # more KV pairs, comma delimited
$
""".format(word_re=_WORD_RES), re.VERBOSE)
_UNQUOTE_RE = re.compile(r'^([\'"]?)([^\1]*)(\1)$')
def merge_resources(resource_list):
"""Merge multiple resources to get a new resource.
Resources earlier in the list take precedence: if multiple resources share
a label key, use the value from the first resource in the list with that
key. The combined resource's type will be the first non-null type in the
list.
:type resource_list: list(:class:`Resource`)
:param resource_list: The list of resources to combine.
:rtype: :class:`Resource`
:return: The new combined resource.
"""
if not resource_list:
raise ValueError
rtype = None
for rr in resource_list:
if rr.type:
rtype = rr.type
break
labels = {}
for rr in reversed(resource_list):
labels.update(rr.labels)
return Resource(rtype, labels)
def check_ascii_256(string):
"""Check that `string` is printable ASCII and at most 256 chars.
Raise a `ValueError` if this check fails. Note that `string` itself doesn't
have to be ASCII-encoded.
:type string: str
:param string: The string to check.
"""
if string is None:
return
if len(string) > 256:
raise ValueError("Value is longer than 256 characters")
bad_char = _NON_PRINTABLE_ASCII.search(string)
if bad_char:
raise ValueError(u'Character "{}" at position {} is not printable '
'ASCII'
.format(
string[bad_char.start():bad_char.end()],
bad_char.start()))
class Resource(object):
"""A description of the entity for which signals are reported.
`type_` and `labels`' keys and values should contain only printable ASCII
and should be at most 256 characters.
See:
https://github.com/census-instrumentation/opencensus-specs/blob/master/resource/Resource.md
:type type_: str
:param type_: The resource type identifier.
:type labels: dict
:param labels: Key-value pairs that describe the entity.
""" # noqa
def __init__(self, type_=None, labels=None):
if type_ is not None and not type_:
raise ValueError("Resource type must not be empty")
check_ascii_256(type_)
if labels is None:
labels = {}
for key, value in labels.items():
if not key:
raise ValueError("Resource key must not be null or empty")
if value is None:
raise ValueError("Resource value must not be null")
check_ascii_256(key)
check_ascii_256(value)
self.type = type_
self.labels = copy(labels)
def get_type(self):
"""Get this resource's type.
:rtype: str
:return: The resource's type.
"""
return self.type
def get_labels(self):
"""Get this resource's labels.
:rtype: dict
:return: The resource's label dict.
"""
return copy(self.labels)
def merge(self, other):
"""Get a copy of this resource combined with another resource.
The combined resource will have the union of both resources' labels,
keeping this resource's label values if they conflict.
:type other: :class:`Resource`
:param other: The other resource to merge.
:rtype: :class:`Resource`
:return: The new combined resource.
"""
return merge_resources([self, other])
def unquote(string):
"""Strip quotes surrounding `string` if they exist.
>>> unquote('abc')
'abc'
>>> unquote('"abc"')
'abc'
>>> unquote("'abc'")
'abc'
>>> unquote('"a\\'b\\'c"')
"a'b'c"
"""
return _UNQUOTE_RE.sub(r'\2', string)
def parse_labels(labels_str):
"""Parse label keys and values following the Resource spec.
>>> parse_labels("k=v")
{'k': 'v'}
>>> parse_labels("k1=v1, k2=v2")
{'k1': 'v1', 'k2': 'v2'}
>>> parse_labels("k1='v1,=z1'")
{'k1': 'v1,=z1'}
"""
if not _LABELS_RE.match(labels_str):
return None
labels = {}
for kv in _KV_RE.finditer(labels_str):
gd = kv.groupdict()
key = unquote(gd['key'])
if key in labels:
logger.warning('Duplicate label key "%s"', key)
labels[key] = unquote(gd['val'])
return labels
def get_from_env():
"""Get a Resource from environment variables.
:rtype: :class:`Resource`
:return: A resource with type and labels from the environment.
"""
type_env = os.getenv(OC_RESOURCE_TYPE)
if type_env is None:
return None
type_env = type_env.strip()
labels_env = os.getenv(OC_RESOURCE_LABELS)
if labels_env is None:
return Resource(type_env)
labels = parse_labels(labels_env)
return Resource(type_env, labels)
|
py | 1a44a8ff6520248e31c400c92180cbc56e4530ce | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NatGatewaysOperations:
"""NatGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
nat_gateway_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
nat_gateway_name: str,
**kwargs
) -> None:
"""Deletes the specified nat gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: None, or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
nat_gateway_name=nat_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
async def get(
self,
resource_group_name: str,
nat_gateway_name: str,
expand: Optional[str] = None,
**kwargs
) -> "models.NatGateway":
"""Gets the specified nat gateway in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NatGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.NatGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NatGateway"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
nat_gateway_name: str,
parameters: "models.NatGateway",
**kwargs
) -> "models.NatGateway":
cls = kwargs.pop('cls', None) # type: ClsType["models.NatGateway"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NatGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NatGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NatGateway', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
nat_gateway_name: str,
parameters: "models.NatGateway",
**kwargs
) -> "models.NatGateway":
"""Creates or updates a nat gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:param parameters: Parameters supplied to the create or update nat gateway operation.
:type parameters: ~azure.mgmt.network.v2020_04_01.models.NatGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: NatGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.NatGateway
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.NatGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
nat_gateway_name=nat_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
nat_gateway_name: str,
parameters: "models.TagsObject",
**kwargs
) -> "models.NatGateway":
"""Updates nat gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:param parameters: Parameters supplied to update nat gateway tags.
:type parameters: ~azure.mgmt.network.v2020_04_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NatGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.NatGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NatGateway"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
def list_all(
self,
**kwargs
) -> AsyncIterable["models.NatGatewayListResult"]:
"""Gets all the Nat Gateways in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NatGatewayListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.NatGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NatGatewayListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NatGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/natGateways'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["models.NatGatewayListResult"]:
"""Gets all nat gateways in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NatGatewayListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.NatGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NatGatewayListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NatGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways'} # type: ignore
|
py | 1a44a92b55eb6e4371a01c8acbe7da0d526758d0 | from flax.util.condition_tools import ConditionOpcode
def make_create_coin_condition(puzzle_hash, amount):
return [ConditionOpcode.CREATE_COIN, puzzle_hash, amount]
def make_assert_aggsig_condition(pubkey):
return [ConditionOpcode.AGG_SIG_UNSAFE, pubkey]
def make_assert_my_coin_id_condition(coin_name):
return [ConditionOpcode.ASSERT_MY_COIN_ID, coin_name]
def make_assert_absolute_height_exceeds_condition(block_index):
return [ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, block_index]
def make_assert_relative_height_exceeds_condition(block_index):
return [ConditionOpcode.ASSERT_HEIGHT_RELATIVE, block_index]
def make_assert_absolute_seconds_exceeds_condition(time):
return [ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, time]
def make_assert_relative_seconds_exceeds_condition(time):
return [ConditionOpcode.ASSERT_SECONDS_RELATIVE, time]
def make_reserve_fee_condition(fee):
return [ConditionOpcode.RESERVE_FEE, fee]
def make_assert_coin_announcement(announcement_hash):
return [ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, announcement_hash]
def make_assert_puzzle_announcement(announcement_hash):
return [ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, announcement_hash]
def make_create_coin_announcement(message):
return [ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, message]
def make_create_puzzle_announcement(message):
return [ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, message]
def make_assert_my_parent_id(parent_id):
return [ConditionOpcode.ASSERT_MY_PARENT_ID, parent_id]
def make_assert_my_puzzlehash(puzzlehash):
return [ConditionOpcode.ASSERT_MY_PUZZLEHASH, puzzlehash]
def make_assert_my_amount(amount):
return [ConditionOpcode.ASSERT_MY_AMOUNT, amount]
|
py | 1a44aa16dc887c5709e2f68391b9ed364a577502 | import matplotlib.pyplot as plt
import numpy as np
import vae.training
import vae.cvae_model
import vae.regression_model
import vae.dataman
import torch
import torch.nn as nn
dataset = vae.dataman.DataManager(
mappings={
'sigma': None,
'albedo': lambda a: np.power(1 - a, 1.0 / 6),
'g': None,
# 'logscat': None,
'output_z': None,
'output_b': None,
'output_a': None
},
blocks=(3, 3)
)
dataset.load_file("./DataSets/SphereScattersDataSet.npz") #, limit=1024*1024)
print('Loaded data... '+str(dataset.data.shape))
def test_dataset(sigma, albedo, g):
test_data = dataset.get_filtered_data({
'sigma': (sigma - 1, sigma + 2),
'albedo': (albedo - 0.001, albedo),
'g': (g - 0.2, g + 0.2)
})
if len(test_data) == 0:
print(f'[ERROR] Config {sigma},{albedo},{g} has no data.')
return
print(len(test_data))
plt.figure()
# drawing histograms from empirical z position distribution
z_pos = test_data[:, 3].cpu().numpy()
plt.hist(z_pos, density=True, bins=80)
# test_dataset(16.0, 0.99, 0.0)
# plt.show()
# exit()
def cvae_factory(epochs, batch_size):
print('Creating CVAE model...')
model = vae.cvae_model.CVAEModel(3, 3, 4, 8, 4, activation=nn.LeakyReLU)
optimizer = torch.optim.AdamW(model.parameters(), lr=0.001)
# optimizer = torch.optim.AdamW(model.parameters(), lr=0.001, weight_decay=0.0000001)
gamma = np.exp(np.log(0.02) / epochs)
scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma)
return model, optimizer, scheduler
path = "./Running/sphere_cvae"
# vae.training.clear_training(path) # comment if you want to reuse from previous training
state = vae.training.start_training(path, dataset, cvae_factory, batch_size=128*1024, epochs=8000)
print('Training finished at epoch ...'+str(len(state.history)))
loss_history = np.array([h[0] for h in state.history])
epoch_list = np.arange(0, len(loss_history), 1)
plt.figure()
plt.plot(epoch_list, loss_history)
# testing model evaluation
dataset = vae.dataman.DataManager(
mappings={
'sigma': None,
'albedo': lambda a: np.power(1 - a, 1.0 / 6),
'g': None,
'output_z': None,
'output_b': None,
'output_a': None,
'logscat': None,
},
blocks=(3, 1, 3)
)
dataset.load_file("./DataSets/Test_SphereScattersDataSet.npz")
dataset.set_device(vae.training.DEFAULT_DEVICE)
print('Loaded data for testing... '+str(dataset.data.shape))
state.model.train(False)
def test_setting(sigma, albedo, g):
test_data = dataset.get_filtered_data({
'sigma': (sigma - .5, sigma + .5),
'albedo': (albedo - 0.0001, albedo),
'g': (g - 0.01, g + 0.01)
})
print('Testing data frame '+str(test_data.shape))
if len(test_data) == 0:
print(f'[ERROR] Config {sigma},{albedo},{g} has no data.')
return
plt.figure()
# drawing histograms from empirical z position distribution
weights = np.exp(test_data[:, 6].cpu().numpy())
z_pos = test_data[:, 3].cpu().numpy()
plt.hist(z_pos, weights=weights, density=True, bins=80)
# plt.hist(z_pos, density=True, bins=80)
# drawing histograms from model sampling distribution
internal_albedo = np.power(1.0 - albedo, 1.0/6)
sigma_test = torch.Tensor(10000, 1).fill_(sigma).to(vae.training.DEFAULT_DEVICE)
albedo_test = torch.Tensor(10000, 1).fill_(internal_albedo).to(vae.training.DEFAULT_DEVICE)
g_test = torch.Tensor(10000, 1).fill_(g).to(vae.training.DEFAULT_DEVICE)
y_test = state.model.conditional_sampling(torch.cat([sigma_test, albedo_test, g_test], dim=1))
plt.hist(torch.clamp(y_test[:, 0], -1, 1).cpu().detach().numpy(), density=True, bins=80, histtype='step')
plt.show()
testing_sigmas = [1.0, 4.0, 9.0, 20.0]
testing_albedos = [0.95]
# testing_albedos = [1.0, 0.999, 0.95, 0.8]
testing_gs = [-0.5, 0.0, 0.7, 0.875]
for sigma in testing_sigmas:
for albedo in testing_albedos:
for g in testing_gs:
test_setting(sigma, albedo, g)
# testing model evaluation
plt.show()
|
py | 1a44aa871a65e83444552a470c6511dca6a53c3d | #!/usr/bin/python2
import sys
import socket
import fcntl
import os
import select
import time
import dbglog as log
_lock_timeout = 60
def now():
return time.time()
class Locker2:
def __init__(self, impl):
self._impl = impl
self._in = sys.stdin.fileno()
self._out = sys.stdout.fileno()
# grab connection file descriptor
self._poll = select.poll()
self._outBuff = ""
self._inBuff = ""
self._updatePoll()
self._poll.register(self._in, select.EPOLLIN)
# TODO: make configurable
self._renewPeriod = _lock_timeout / 2.0
self._nextRenew = now() + self._renewPeriod
def __call__(self):
while (not self._handle(1)):
if (now() > self._nextRenew):
self._impl.renew()
self._nextRenew = now() + self._renewPeriod
def _handle(self, timeout):
events = self._poll.poll(timeout)
if not len(events): return False
for event in events:
if (event[0] not in (self._in, self._out)):
log.warn2("Unexpected event on fd={}.", event[0])
continue
if self._handleClientEvent(event[1]):
return True
return False
def _handleClientEvent(self, eventMask):
if (eventMask & select.EPOLLHUP):
return True
if (eventMask & select.EPOLLIN):
self._inBuff = self._inBuff + os.read(self._in, 1024)
self._processInput()
if (eventMask & select.EPOLLOUT):
sent = os.write(self._out, self._outBuff)
self._outBuff = self._outBuff[sent:]
self._updatePoll();
return False
def _updatePoll(self):
if (len(self._outBuff)):
self._poll.register(self._out, select.EPOLLOUT)
else:
try:
self._poll.unregister(self._out)
except KeyError:
pass
def _processInput(self):
if (not len(self._inBuff)): return
commands = self._inBuff.split('\n')
if (not len(commands[-1])):
# last element is empty -> last command was complete
self._inBuff = ""
else:
# last element non-empty -. last command was no complete
# remember it in input buffer
self._inBuff = commands[-1]
# get rid of the last commands since it is either empty or incomplete
del commands[-1]
for command in commands:
self._processCommand(command.split(":"))
def _processCommand(self, command):
if command[0] == "L":
if (len(command) != 2):
return self._invalidCommand()
return self._lock(command[1])
elif command[0] == "U":
if (len(command) != 3):
return self._invalidCommand()
return self._unlock(command[1], command[2])
else:
return self._invalidCommand()
def _lock(self, sublock):
try:
self._send("L:" + self._impl.lock(sublock))
except ValueError:
self._send("X")
except Exception as e:
self._error("Cannot acquire lock <%s>: <%s>." % (sublock, str(e)))
def _unlock(self, sublock, value):
try:
self._impl.unlock(sublock, value)
self._send("U")
except Exception as e:
self._error("Cannot release lock <%s>: <%s>." % (sublock, str(e)))
def _invalidCommand(self):
self._error("invalid command")
def _error(self, what):
log.err2("Sending error: <{}>.", what)
self._send("E:%s" % (what, ))
def ok(self, what):
self._send("OK")
def _send(self, what):
self._outBuff = self._outBuff + what + '\n'
self._updatePoll()
class _ExampleLocker:
def __init__(self):
self._locks = {}
def lock(self, sublock):
log.info3("Locking <{}>.", sublock)
if (self._locks.get(sublock) is not None):
raise ValueError, "Lock <%s> already locked." % (sublock, )
value = "LOCK%s" % (sublock, )
self._locks[sublock] = value
return value
def unlock(self, sublock, value):
log.info3("Unlocking <{}> (value: <{}>).", sublock, value)
heldValue = self._locks.get(sublock)
if (heldValue is None):
raise KeyError, "Lock <%s> not held." % (sublock, )
if (heldValue != value):
raise ValueError, "Lock <%s> value mismatch." % (sublock, )
del self._locks[sublock]
def renew(self):
for (lock, value) in self._locks.items():
log.info3("Renewing lock <{}>.", lock)
if (__name__ == "__main__"):
# Create simple locker and run it
log.thread_id("locker2")
log.info3("Locker2 starting.")
Locker2(impl = _ExampleLocker())()
log.info3("Locker2 terminating.")
|
py | 1a44aa904208259b3507db7da175a43a9795ea2f | # Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
Validation and classification
(train) : inner-kfold classifier
(train, test) : kfold classifier
(train, dev, test) : split classifier
"""
from __future__ import absolute_import, division, unicode_literals
import logging
import numpy as np
from enteval.tools.classifier import MLP
import enteval.tools.multiclassclassifier as multiclassclassifier
import sklearn
assert(sklearn.__version__ >= "0.18.0"), \
"need to update sklearn to version >= 0.18.0"
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import StratifiedKFold
def get_classif_name(classifier_config, usepytorch):
if not usepytorch:
modelname = 'sklearn-LogReg'
else:
nhid = classifier_config['nhid']
optim = 'adam' if 'optim' not in classifier_config else classifier_config['optim']
bs = 64 if 'batch_size' not in classifier_config else classifier_config['batch_size']
modelname = 'pytorch-MLP-nhid%s-%s-bs%s' % (nhid, optim, bs)
return modelname
# Pytorch version
class InnerKFoldClassifier(object):
"""
(train) split classifier : InnerKfold.
"""
def __init__(self, X, y, config):
self.X = X
self.y = y
self.featdim = X.shape[1]
self.nclasses = config['nclasses']
self.seed = config['seed']
self.devresults = []
self.testresults = []
self.usepytorch = config['usepytorch']
self.classifier_config = config['classifier']
self.modelname = get_classif_name(self.classifier_config, self.usepytorch)
self.k = 5 if 'kfold' not in config else config['kfold']
def run(self):
logging.info('Training {0} with (inner) {1}-fold cross-validation'
.format(self.modelname, self.k))
regs = [10**t for t in range(-5, -1)] if self.usepytorch else \
[2**t for t in range(-2, 4, 1)]
skf = StratifiedKFold(n_splits=self.k, shuffle=True, random_state=1111)
innerskf = StratifiedKFold(n_splits=self.k, shuffle=True,
random_state=1111)
count = 0
for train_idx, test_idx in skf.split(self.X, self.y):
count += 1
X_train, X_test = self.X[train_idx], self.X[test_idx]
y_train, y_test = self.y[train_idx], self.y[test_idx]
scores = []
for reg in regs:
regscores = []
for inner_train_idx, inner_test_idx in innerskf.split(X_train, y_train):
X_in_train, X_in_test = X_train[inner_train_idx], X_train[inner_test_idx]
y_in_train, y_in_test = y_train[inner_train_idx], y_train[inner_test_idx]
if self.usepytorch:
clf = MLP(self.classifier_config, inputdim=self.featdim,
nclasses=self.nclasses, l2reg=reg,
seed=self.seed)
clf.fit(X_in_train, y_in_train,
validation_data=(X_in_test, y_in_test))
else:
clf = LogisticRegression(C=reg, random_state=self.seed)
clf.fit(X_in_train, y_in_train)
regscores.append(clf.score(X_in_test, y_in_test))
scores.append(round(100*np.mean(regscores), 2))
optreg = regs[np.argmax(scores)]
logging.info('Best param found at split {0}: l2reg = {1} \
with score {2}'.format(count, optreg, np.max(scores)))
self.devresults.append(np.max(scores))
if self.usepytorch:
clf = MLP(self.classifier_config, inputdim=self.featdim,
nclasses=self.nclasses, l2reg=optreg,
seed=self.seed)
clf.fit(X_train, y_train, validation_split=0.05)
else:
clf = LogisticRegression(C=optreg, random_state=self.seed)
clf.fit(X_train, y_train)
self.testresults.append(round(100*clf.score(X_test, y_test), 2))
devaccuracy = round(np.mean(self.devresults), 2)
testaccuracy = round(np.mean(self.testresults), 2)
return devaccuracy, testaccuracy
class KFoldClassifier(object):
"""
(train, test) split classifier : cross-validation on train.
"""
def __init__(self, train, test, config):
self.train = train
self.test = test
self.featdim = self.train['X'].shape[1]
self.nclasses = config['nclasses']
self.seed = config['seed']
self.usepytorch = config['usepytorch']
self.classifier_config = config['classifier']
self.modelname = get_classif_name(self.classifier_config, self.usepytorch)
self.k = 5 if 'kfold' not in config else config['kfold']
def run(self):
# cross-validation
logging.info('Training {0} with {1}-fold cross-validation'
.format(self.modelname, self.k))
regs = [10**t for t in range(-5, -1)] if self.usepytorch else \
[2**t for t in range(-1, 6, 1)]
skf = StratifiedKFold(n_splits=self.k, shuffle=True,
random_state=self.seed)
scores = []
for reg in regs:
scanscores = []
for train_idx, test_idx in skf.split(self.train['X'],
self.train['y']):
# Split data
X_train, y_train = self.train['X'][train_idx], self.train['y'][train_idx]
X_test, y_test = self.train['X'][test_idx], self.train['y'][test_idx]
# Train classifier
if self.usepytorch:
clf = MLP(self.classifier_config, inputdim=self.featdim,
nclasses=self.nclasses, l2reg=reg,
seed=self.seed)
clf.fit(X_train, y_train, validation_data=(X_test, y_test))
else:
clf = LogisticRegression(C=reg, random_state=self.seed)
clf.fit(X_train, y_train)
score = clf.score(X_test, y_test)
scanscores.append(score)
# Append mean score
scores.append(round(100*np.mean(scanscores), 2))
# evaluation
logging.info([('reg:' + str(regs[idx]), scores[idx])
for idx in range(len(scores))])
optreg = regs[np.argmax(scores)]
devaccuracy = np.max(scores)
logging.info('Cross-validation : best param found is reg = {0} \
with score {1}'.format(optreg, devaccuracy))
logging.info('Evaluating...')
if self.usepytorch:
clf = MLP(self.classifier_config, inputdim=self.featdim,
nclasses=self.nclasses, l2reg=optreg,
seed=self.seed)
clf.fit(self.train['X'], self.train['y'], validation_split=0.05)
else:
clf = LogisticRegression(C=optreg, random_state=self.seed)
clf.fit(self.train['X'], self.train['y'])
yhat = clf.predict(self.test['X'])
testaccuracy = clf.score(self.test['X'], self.test['y'])
testaccuracy = round(100*testaccuracy, 2)
return devaccuracy, testaccuracy, yhat
class SplitClassifier(object):
"""
(train, valid, test) split classifier.
"""
def __init__(self, X, y, config):
self.X = X
self.y = y
self.nclasses = config['nclasses']
self.featdim = self.X['train'].shape[1]
self.seed = config['seed']
self.usepytorch = config['usepytorch']
self.classifier_config = config['classifier']
self.cudaEfficient = False if 'cudaEfficient' not in config else \
config['cudaEfficient']
self.modelname = get_classif_name(self.classifier_config, self.usepytorch)
self.noreg = False if 'noreg' not in config else config['noreg']
self.config = config
def run(self, return_score=False):
logging.info('Training {0} with standard validation..'
.format(self.modelname))
regs = [10**t for t in range(-5, -1)] if self.usepytorch else \
[2**t for t in range(-2, 4, 1)]
if self.noreg:
regs = [1e-9 if self.usepytorch else 1e9]
scores = []
for reg in regs:
if self.usepytorch:
clf = MLP(self.classifier_config, inputdim=self.featdim,
nclasses=self.nclasses, l2reg=reg,
seed=self.seed, cudaEfficient=self.cudaEfficient)
# TODO: Find a hack for reducing nb epoches in SNLI
clf.fit(self.X['train'], self.y['train'],
validation_data=(self.X['valid'], self.y['valid']))
else:
clf = LogisticRegression(C=reg, random_state=self.seed)
clf.fit(self.X['train'], self.y['train'])
scores.append(round(100*clf.score(self.X['valid'],
self.y['valid']), 2))
logging.info([('reg:'+str(regs[idx]), scores[idx])
for idx in range(len(scores))])
optreg = regs[np.argmax(scores)]
devaccuracy = np.max(scores)
logging.info('Validation : best param found is reg = {0} with score \
{1}'.format(optreg, devaccuracy))
clf = LogisticRegression(C=optreg, random_state=self.seed)
logging.info('Evaluating...')
if self.usepytorch:
clf = MLP(self.classifier_config, inputdim=self.featdim,
nclasses=self.nclasses, l2reg=optreg,
seed=self.seed, cudaEfficient=self.cudaEfficient)
# TODO: Find a hack for reducing nb epoches in SNLI
clf.fit(self.X['train'], self.y['train'],
validation_data=(self.X['valid'], self.y['valid']))
else:
clf = LogisticRegression(C=optreg, random_state=self.seed)
clf.fit(self.X['train'], self.y['train'])
logging.info("start predicting on test")
testaccuracy = clf.score(self.X['test'], self.y['test'], test=True, return_score=return_score)
if not return_score:
testaccuracy = round(100*testaccuracy, 2)
return devaccuracy, testaccuracy
class SplitMultiClassClassifier(object):
"""
(train, valid, test) split classifier.
"""
def __init__(self, X, y, config):
self.X = X
self.y = y
self.nclasses = config['nclasses']
self.featdim = self.X['train'].shape[-1]
self.seed = config['seed']
self.usepytorch = config['usepytorch']
self.classifier_config = config['classifier']
self.cudaEfficient = False if 'cudaEfficient' not in config else \
config['cudaEfficient']
self.modelname = get_classif_name(self.classifier_config, self.usepytorch)
self.noreg = False if 'noreg' not in config else config['noreg']
self.config = config
def run(self):
logging.info('Training {0} with standard validation..'
.format(self.modelname))
regs = [10**t for t in range(-5, -1)] if self.usepytorch else \
[2**t for t in range(-2, 4, 1)]
if self.noreg:
regs = [1e-9 if self.usepytorch else 1e9]
scores = []
for reg in regs:
if self.usepytorch:
clf = multiclassclassifier.MLP(self.classifier_config, inputdim=self.featdim,
nclasses=self.nclasses, l2reg=reg,
seed=self.seed, cudaEfficient=self.cudaEfficient)
# TODO: Find a hack for reducing nb epoches in SNLI
clf.fit(self.X['train'], self.y['train'],
validation_data=(self.X['valid'], self.y['valid']))
else:
clf = LogisticRegression(C=reg, random_state=self.seed)
clf.fit(self.X['train'], self.y['train'])
scores.append(round(100*clf.score(self.X['valid'],
self.y['valid']), 2))
logging.info([('reg:'+str(regs[idx]), scores[idx])
for idx in range(len(scores))])
optreg = regs[np.argmax(scores)]
devaccuracy = np.max(scores)
logging.info('Validation : best param found is reg = {0} with score \
{1}'.format(optreg, devaccuracy))
clf = LogisticRegression(C=optreg, random_state=self.seed)
logging.info('Evaluating...')
if self.usepytorch:
clf = multiclassclassifier.MLP(self.classifier_config, inputdim=self.featdim,
nclasses=self.nclasses, l2reg=optreg,
seed=self.seed, cudaEfficient=self.cudaEfficient)
# TODO: Find a hack for reducing nb epoches in SNLI
clf.fit(self.X['train'], self.y['train'],
validation_data=(self.X['valid'], self.y['valid']))
else:
loggint.info("have to use pytorch with the SplitMultiClassClassifier.")
exit(-1)
testaccuracy = clf.score(self.X['test'], self.y['test'])
testaccuracy = round(100*testaccuracy, 2)
return devaccuracy, testaccuracy
|
py | 1a44ac61f469835b89f5a773861d1119d02f1fb6 | # SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2020 Intel Corporation
"""REST inteface to model_runner."""
import io
from logging import exception
import sanic
from sanic import response
from logger import logger
from model_hub import ModelLoader
from model_hub import ImageProcessor
from model_hub import ModelRunner
app = sanic.Sanic("dlrs-torchub")
model_loader = ModelLoader()
@app.route("/")
async def index(request):
"""index"""
return response.json(
{
"info": "torch hub server on dlrs",
"urls": ["/", "/ping", "/serve", "/predict"],
}
)
@app.route("/ping")
async def ping(request):
"""heartbeat."""
return response.json({"status": "ok"})
@app.route("/serve", methods=["POST"])
async def load_model(request):
"""load model using process pool."""
global model_loader
req = request.json
if req is None:
return response.json(
{"status": "fail", "result": "model param json not provided"}
)
if req.get("path", None) and req.get("name", None):
if not model_loader:
model_loader = ModelLoader()
model_loader.init_model(req)
if model_loader.loaded:
return response.json(
{"status": "ok", "result": f"model {req['name']} loaded"}
)
try:
request.app.loop.run_in_executor(None, model_loader.load_model)
except RuntimeError:
return response.json(
{"status": "fail", "result": "model or path not retievable"}
)
return response.json(
{"status": "ok", "result": f"model {req['name']} loading in progress"}
)
else:
model_loader = None
raise sanic.exceptions.SanicException(
"model_path/model_name not given", status_code=401
)
@app.route("/predict", methods=["POST"])
async def predict(request):
"""return output of the model."""
img_str = request.files["img"]
image = io.BytesIO(img_str[0].body)
img_processor = ImageProcessor(image)
img_tensor = img_processor.transform()
try:
model_runner = ModelRunner(model_loader.model, img_tensor)
except AttributeError:
return response.json(
{"status": "failed", "result": "model not initiated, use serve model API"}
)
return response.json({"status": "ok", "result": model_runner.predict()})
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5550)
|
py | 1a44aeebe0863a95062e70ac508c54e984102cae | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from ._enums import *
__all__ = [
'MessageCountDetailsResponse',
'SkuResponse',
]
@pulumi.output_type
class MessageCountDetailsResponse(dict):
"""
Message Count Details.
"""
def __init__(__self__, *,
active_message_count: float,
dead_letter_message_count: float,
scheduled_message_count: float,
transfer_dead_letter_message_count: float,
transfer_message_count: float):
"""
Message Count Details.
:param float active_message_count: Number of active messages in the queue, topic, or subscription.
:param float dead_letter_message_count: Number of messages that are dead lettered.
:param float scheduled_message_count: Number of scheduled messages.
:param float transfer_dead_letter_message_count: Number of messages transferred into dead letters.
:param float transfer_message_count: Number of messages transferred to another queue, topic, or subscription.
"""
pulumi.set(__self__, "active_message_count", active_message_count)
pulumi.set(__self__, "dead_letter_message_count", dead_letter_message_count)
pulumi.set(__self__, "scheduled_message_count", scheduled_message_count)
pulumi.set(__self__, "transfer_dead_letter_message_count", transfer_dead_letter_message_count)
pulumi.set(__self__, "transfer_message_count", transfer_message_count)
@property
@pulumi.getter(name="activeMessageCount")
def active_message_count(self) -> float:
"""
Number of active messages in the queue, topic, or subscription.
"""
return pulumi.get(self, "active_message_count")
@property
@pulumi.getter(name="deadLetterMessageCount")
def dead_letter_message_count(self) -> float:
"""
Number of messages that are dead lettered.
"""
return pulumi.get(self, "dead_letter_message_count")
@property
@pulumi.getter(name="scheduledMessageCount")
def scheduled_message_count(self) -> float:
"""
Number of scheduled messages.
"""
return pulumi.get(self, "scheduled_message_count")
@property
@pulumi.getter(name="transferDeadLetterMessageCount")
def transfer_dead_letter_message_count(self) -> float:
"""
Number of messages transferred into dead letters.
"""
return pulumi.get(self, "transfer_dead_letter_message_count")
@property
@pulumi.getter(name="transferMessageCount")
def transfer_message_count(self) -> float:
"""
Number of messages transferred to another queue, topic, or subscription.
"""
return pulumi.get(self, "transfer_message_count")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SkuResponse(dict):
"""
SKU of the namespace.
"""
def __init__(__self__, *,
tier: str,
capacity: Optional[int] = None,
name: Optional[str] = None):
"""
SKU of the namespace.
:param str tier: The billing tier of this particular SKU.
:param int capacity: The specified messaging units for the tier.
:param str name: Name of this SKU.
"""
pulumi.set(__self__, "tier", tier)
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def tier(self) -> str:
"""
The billing tier of this particular SKU.
"""
return pulumi.get(self, "tier")
@property
@pulumi.getter
def capacity(self) -> Optional[int]:
"""
The specified messaging units for the tier.
"""
return pulumi.get(self, "capacity")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of this SKU.
"""
return pulumi.get(self, "name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
|
py | 1a44af4394240b5d2167e6f5bee9d3d81f4f5658 | """
## Cloud Assembly Schema
<!--BEGIN STABILITY BANNER-->---

---
<!--END STABILITY BANNER-->
This module is part of the [AWS Cloud Development Kit](https://github.com/aws/aws-cdk) project.
## Cloud Assembly
The *Cloud Assembly* is the output of the synthesis operation. It is produced as part of the
[`cdk synth`](https://github.com/aws/aws-cdk/tree/master/packages/aws-cdk#cdk-synthesize)
command, or the [`app.synth()`](https://github.com/aws/aws-cdk/blob/master/packages/@aws-cdk/core/lib/app.ts#L135) method invocation.
Its essentially a set of files and directories, one of which is the `manifest.json` file. It defines the set of instructions that are
needed in order to deploy the assembly directory.
> For example, when `cdk deploy` is executed, the CLI reads this file and performs its instructions:
>
> * Build container images.
> * Upload assets.
> * Deploy CloudFormation templates.
Therefore, the assembly is how the CDK class library and CDK CLI (or any other consumer) communicate. To ensure compatibility
between the assembly and its consumers, we treat the manifest file as a well defined, versioned schema.
## Schema
This module contains the typescript structs that comprise the `manifest.json` file, as well as the
generated [*json-schema*](./schema/cloud-assembly.schema.json).
## Versioning
The schema version is specified in the [`cloud-assembly.version.json`](./schema/cloud-assembly.schema.json) file, under the `version` property.
It follows semantic versioning, but with a small twist.
When we add instructions to the assembly, they are reflected in the manifest file and the *json-schema* accordingly.
Every such instruction, is crucial for ensuring the correct deployment behavior. This means that to properly deploy a cloud assembly,
consumers must be aware of every such instruction modification.
For this reason, every change to the schema, even though it might not strictly break validation of the *json-schema* format,
is considered `major` version bump.
## How to consume
If you'd like to consume the [schema file](./schema/cloud-assembly.schema.json) in order to do validations on `manifest.json` files,
simply download it from this repo and run it against standard *json-schema* validators, such as [jsonschema](https://www.npmjs.com/package/jsonschema).
Consumers must take into account the `major` version of the schema they are consuming. They should reject cloud assemblies
with a `major` version that is higher than what they expect. While schema validation might pass on such assemblies, the deployment integrity
cannot be guaranteed because some instructions will be ignored.
> For example, if your consumer was built when the schema version was 2.0.0, you should reject deploying cloud assemblies with a
> manifest version of 3.0.0.
## Contributing
See [Contribution Guide](./CONTRIBUTING.md)
"""
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from ._jsii import *
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.AmiContextQuery",
jsii_struct_bases=[],
name_mapping={
"account": "account",
"filters": "filters",
"region": "region",
"owners": "owners",
},
)
class AmiContextQuery:
def __init__(
self,
*,
account: builtins.str,
filters: typing.Mapping[builtins.str, typing.List[builtins.str]],
region: builtins.str,
owners: typing.Optional[typing.List[builtins.str]] = None,
) -> None:
"""Query to AMI context provider.
:param account: Account to query.
:param filters: Filters to DescribeImages call.
:param region: Region to query.
:param owners: Owners to DescribeImages call. Default: - All owners
"""
self._values: typing.Dict[str, typing.Any] = {
"account": account,
"filters": filters,
"region": region,
}
if owners is not None:
self._values["owners"] = owners
@builtins.property
def account(self) -> builtins.str:
"""Account to query."""
result = self._values.get("account")
assert result is not None, "Required property 'account' is missing"
return result
@builtins.property
def filters(self) -> typing.Mapping[builtins.str, typing.List[builtins.str]]:
"""Filters to DescribeImages call."""
result = self._values.get("filters")
assert result is not None, "Required property 'filters' is missing"
return result
@builtins.property
def region(self) -> builtins.str:
"""Region to query."""
result = self._values.get("region")
assert result is not None, "Required property 'region' is missing"
return result
@builtins.property
def owners(self) -> typing.Optional[typing.List[builtins.str]]:
"""Owners to DescribeImages call.
:default: - All owners
"""
result = self._values.get("owners")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AmiContextQuery(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.ArtifactManifest",
jsii_struct_bases=[],
name_mapping={
"type": "type",
"dependencies": "dependencies",
"environment": "environment",
"metadata": "metadata",
"properties": "properties",
},
)
class ArtifactManifest:
def __init__(
self,
*,
type: "ArtifactType",
dependencies: typing.Optional[typing.List[builtins.str]] = None,
environment: typing.Optional[builtins.str] = None,
metadata: typing.Optional[typing.Mapping[builtins.str, typing.List["MetadataEntry"]]] = None,
properties: typing.Optional[typing.Union["AwsCloudFormationStackProperties", "AssetManifestProperties", "TreeArtifactProperties", "NestedCloudAssemblyProperties"]] = None,
) -> None:
"""A manifest for a single artifact within the cloud assembly.
:param type: The type of artifact.
:param dependencies: IDs of artifacts that must be deployed before this artifact. Default: - no dependencies.
:param environment: The environment into which this artifact is deployed. Default: - no envrionment.
:param metadata: Associated metadata. Default: - no metadata.
:param properties: The set of properties for this artifact (depends on type). Default: - no properties.
"""
self._values: typing.Dict[str, typing.Any] = {
"type": type,
}
if dependencies is not None:
self._values["dependencies"] = dependencies
if environment is not None:
self._values["environment"] = environment
if metadata is not None:
self._values["metadata"] = metadata
if properties is not None:
self._values["properties"] = properties
@builtins.property
def type(self) -> "ArtifactType":
"""The type of artifact."""
result = self._values.get("type")
assert result is not None, "Required property 'type' is missing"
return result
@builtins.property
def dependencies(self) -> typing.Optional[typing.List[builtins.str]]:
"""IDs of artifacts that must be deployed before this artifact.
:default: - no dependencies.
"""
result = self._values.get("dependencies")
return result
@builtins.property
def environment(self) -> typing.Optional[builtins.str]:
"""The environment into which this artifact is deployed.
:default: - no envrionment.
"""
result = self._values.get("environment")
return result
@builtins.property
def metadata(
self,
) -> typing.Optional[typing.Mapping[builtins.str, typing.List["MetadataEntry"]]]:
"""Associated metadata.
:default: - no metadata.
"""
result = self._values.get("metadata")
return result
@builtins.property
def properties(
self,
) -> typing.Optional[typing.Union["AwsCloudFormationStackProperties", "AssetManifestProperties", "TreeArtifactProperties", "NestedCloudAssemblyProperties"]]:
"""The set of properties for this artifact (depends on type).
:default: - no properties.
"""
result = self._values.get("properties")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ArtifactManifest(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.enum(jsii_type="@aws-cdk/cloud-assembly-schema.ArtifactMetadataEntryType")
class ArtifactMetadataEntryType(enum.Enum):
"""Type of artifact metadata entry."""
ASSET = "ASSET"
"""Asset in metadata."""
INFO = "INFO"
"""Metadata key used to print INFO-level messages by the toolkit when an app is syntheized."""
WARN = "WARN"
"""Metadata key used to print WARNING-level messages by the toolkit when an app is syntheized."""
ERROR = "ERROR"
"""Metadata key used to print ERROR-level messages by the toolkit when an app is syntheized."""
LOGICAL_ID = "LOGICAL_ID"
"""Represents the CloudFormation logical ID of a resource at a certain path."""
STACK_TAGS = "STACK_TAGS"
"""Represents tags of a stack."""
@jsii.enum(jsii_type="@aws-cdk/cloud-assembly-schema.ArtifactType")
class ArtifactType(enum.Enum):
"""Type of cloud artifact."""
NONE = "NONE"
"""Stub required because of JSII."""
AWS_CLOUDFORMATION_STACK = "AWS_CLOUDFORMATION_STACK"
"""The artifact is an AWS CloudFormation stack."""
CDK_TREE = "CDK_TREE"
"""The artifact contains the CDK application's construct tree."""
ASSET_MANIFEST = "ASSET_MANIFEST"
"""Manifest for all assets in the Cloud Assembly."""
NESTED_CLOUD_ASSEMBLY = "NESTED_CLOUD_ASSEMBLY"
"""Nested Cloud Assembly."""
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.AssemblyManifest",
jsii_struct_bases=[],
name_mapping={
"version": "version",
"artifacts": "artifacts",
"missing": "missing",
"runtime": "runtime",
},
)
class AssemblyManifest:
def __init__(
self,
*,
version: builtins.str,
artifacts: typing.Optional[typing.Mapping[builtins.str, ArtifactManifest]] = None,
missing: typing.Optional[typing.List["MissingContext"]] = None,
runtime: typing.Optional["RuntimeInfo"] = None,
) -> None:
"""A manifest which describes the cloud assembly.
:param version: Protocol version.
:param artifacts: The set of artifacts in this assembly. Default: - no artifacts.
:param missing: Missing context information. If this field has values, it means that the cloud assembly is not complete and should not be deployed. Default: - no missing context.
:param runtime: Runtime information. Default: - no info.
"""
if isinstance(runtime, dict):
runtime = RuntimeInfo(**runtime)
self._values: typing.Dict[str, typing.Any] = {
"version": version,
}
if artifacts is not None:
self._values["artifacts"] = artifacts
if missing is not None:
self._values["missing"] = missing
if runtime is not None:
self._values["runtime"] = runtime
@builtins.property
def version(self) -> builtins.str:
"""Protocol version."""
result = self._values.get("version")
assert result is not None, "Required property 'version' is missing"
return result
@builtins.property
def artifacts(
self,
) -> typing.Optional[typing.Mapping[builtins.str, ArtifactManifest]]:
"""The set of artifacts in this assembly.
:default: - no artifacts.
"""
result = self._values.get("artifacts")
return result
@builtins.property
def missing(self) -> typing.Optional[typing.List["MissingContext"]]:
"""Missing context information.
If this field has values, it means that the
cloud assembly is not complete and should not be deployed.
:default: - no missing context.
"""
result = self._values.get("missing")
return result
@builtins.property
def runtime(self) -> typing.Optional["RuntimeInfo"]:
"""Runtime information.
:default: - no info.
"""
result = self._values.get("runtime")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AssemblyManifest(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.AssetManifest",
jsii_struct_bases=[],
name_mapping={
"version": "version",
"docker_images": "dockerImages",
"files": "files",
},
)
class AssetManifest:
def __init__(
self,
*,
version: builtins.str,
docker_images: typing.Optional[typing.Mapping[builtins.str, "DockerImageAsset"]] = None,
files: typing.Optional[typing.Mapping[builtins.str, "FileAsset"]] = None,
) -> None:
"""Definitions for the asset manifest.
:param version: Version of the manifest.
:param docker_images: The Docker image assets in this manifest. Default: - No Docker images
:param files: The file assets in this manifest. Default: - No files
"""
self._values: typing.Dict[str, typing.Any] = {
"version": version,
}
if docker_images is not None:
self._values["docker_images"] = docker_images
if files is not None:
self._values["files"] = files
@builtins.property
def version(self) -> builtins.str:
"""Version of the manifest."""
result = self._values.get("version")
assert result is not None, "Required property 'version' is missing"
return result
@builtins.property
def docker_images(
self,
) -> typing.Optional[typing.Mapping[builtins.str, "DockerImageAsset"]]:
"""The Docker image assets in this manifest.
:default: - No Docker images
"""
result = self._values.get("docker_images")
return result
@builtins.property
def files(self) -> typing.Optional[typing.Mapping[builtins.str, "FileAsset"]]:
"""The file assets in this manifest.
:default: - No files
"""
result = self._values.get("files")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AssetManifest(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.AssetManifestProperties",
jsii_struct_bases=[],
name_mapping={
"file": "file",
"requires_bootstrap_stack_version": "requiresBootstrapStackVersion",
},
)
class AssetManifestProperties:
def __init__(
self,
*,
file: builtins.str,
requires_bootstrap_stack_version: typing.Optional[jsii.Number] = None,
) -> None:
"""Artifact properties for the Asset Manifest.
:param file: Filename of the asset manifest.
:param requires_bootstrap_stack_version: Version of bootstrap stack required to deploy this stack. Default: - Version 1 (basic modern bootstrap stack)
"""
self._values: typing.Dict[str, typing.Any] = {
"file": file,
}
if requires_bootstrap_stack_version is not None:
self._values["requires_bootstrap_stack_version"] = requires_bootstrap_stack_version
@builtins.property
def file(self) -> builtins.str:
"""Filename of the asset manifest."""
result = self._values.get("file")
assert result is not None, "Required property 'file' is missing"
return result
@builtins.property
def requires_bootstrap_stack_version(self) -> typing.Optional[jsii.Number]:
"""Version of bootstrap stack required to deploy this stack.
:default: - Version 1 (basic modern bootstrap stack)
"""
result = self._values.get("requires_bootstrap_stack_version")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AssetManifestProperties(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.AvailabilityZonesContextQuery",
jsii_struct_bases=[],
name_mapping={"account": "account", "region": "region"},
)
class AvailabilityZonesContextQuery:
def __init__(self, *, account: builtins.str, region: builtins.str) -> None:
"""Query to availability zone context provider.
:param account: Query account.
:param region: Query region.
"""
self._values: typing.Dict[str, typing.Any] = {
"account": account,
"region": region,
}
@builtins.property
def account(self) -> builtins.str:
"""Query account."""
result = self._values.get("account")
assert result is not None, "Required property 'account' is missing"
return result
@builtins.property
def region(self) -> builtins.str:
"""Query region."""
result = self._values.get("region")
assert result is not None, "Required property 'region' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AvailabilityZonesContextQuery(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.AwsCloudFormationStackProperties",
jsii_struct_bases=[],
name_mapping={
"template_file": "templateFile",
"assume_role_arn": "assumeRoleArn",
"cloud_formation_execution_role_arn": "cloudFormationExecutionRoleArn",
"parameters": "parameters",
"requires_bootstrap_stack_version": "requiresBootstrapStackVersion",
"stack_name": "stackName",
"stack_template_asset_object_url": "stackTemplateAssetObjectUrl",
"tags": "tags",
"termination_protection": "terminationProtection",
},
)
class AwsCloudFormationStackProperties:
def __init__(
self,
*,
template_file: builtins.str,
assume_role_arn: typing.Optional[builtins.str] = None,
cloud_formation_execution_role_arn: typing.Optional[builtins.str] = None,
parameters: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
requires_bootstrap_stack_version: typing.Optional[jsii.Number] = None,
stack_name: typing.Optional[builtins.str] = None,
stack_template_asset_object_url: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
termination_protection: typing.Optional[builtins.bool] = None,
) -> None:
"""Artifact properties for CloudFormation stacks.
:param template_file: A file relative to the assembly root which contains the CloudFormation template for this stack.
:param assume_role_arn: The role that needs to be assumed to deploy the stack. Default: - No role is assumed (current credentials are used)
:param cloud_formation_execution_role_arn: The role that is passed to CloudFormation to execute the change set. Default: - No role is passed (currently assumed role/credentials are used)
:param parameters: Values for CloudFormation stack parameters that should be passed when the stack is deployed. Default: - No parameters
:param requires_bootstrap_stack_version: Version of bootstrap stack required to deploy this stack. Default: - No bootstrap stack required
:param stack_name: The name to use for the CloudFormation stack. Default: - name derived from artifact ID
:param stack_template_asset_object_url: If the stack template has already been included in the asset manifest, its asset URL. Default: - Not uploaded yet, upload just before deploying
:param tags: Values for CloudFormation stack tags that should be passed when the stack is deployed. Default: - No tags
:param termination_protection: Whether to enable termination protection for this stack. Default: false
"""
self._values: typing.Dict[str, typing.Any] = {
"template_file": template_file,
}
if assume_role_arn is not None:
self._values["assume_role_arn"] = assume_role_arn
if cloud_formation_execution_role_arn is not None:
self._values["cloud_formation_execution_role_arn"] = cloud_formation_execution_role_arn
if parameters is not None:
self._values["parameters"] = parameters
if requires_bootstrap_stack_version is not None:
self._values["requires_bootstrap_stack_version"] = requires_bootstrap_stack_version
if stack_name is not None:
self._values["stack_name"] = stack_name
if stack_template_asset_object_url is not None:
self._values["stack_template_asset_object_url"] = stack_template_asset_object_url
if tags is not None:
self._values["tags"] = tags
if termination_protection is not None:
self._values["termination_protection"] = termination_protection
@builtins.property
def template_file(self) -> builtins.str:
"""A file relative to the assembly root which contains the CloudFormation template for this stack."""
result = self._values.get("template_file")
assert result is not None, "Required property 'template_file' is missing"
return result
@builtins.property
def assume_role_arn(self) -> typing.Optional[builtins.str]:
"""The role that needs to be assumed to deploy the stack.
:default: - No role is assumed (current credentials are used)
"""
result = self._values.get("assume_role_arn")
return result
@builtins.property
def cloud_formation_execution_role_arn(self) -> typing.Optional[builtins.str]:
"""The role that is passed to CloudFormation to execute the change set.
:default: - No role is passed (currently assumed role/credentials are used)
"""
result = self._values.get("cloud_formation_execution_role_arn")
return result
@builtins.property
def parameters(self) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
"""Values for CloudFormation stack parameters that should be passed when the stack is deployed.
:default: - No parameters
"""
result = self._values.get("parameters")
return result
@builtins.property
def requires_bootstrap_stack_version(self) -> typing.Optional[jsii.Number]:
"""Version of bootstrap stack required to deploy this stack.
:default: - No bootstrap stack required
"""
result = self._values.get("requires_bootstrap_stack_version")
return result
@builtins.property
def stack_name(self) -> typing.Optional[builtins.str]:
"""The name to use for the CloudFormation stack.
:default: - name derived from artifact ID
"""
result = self._values.get("stack_name")
return result
@builtins.property
def stack_template_asset_object_url(self) -> typing.Optional[builtins.str]:
"""If the stack template has already been included in the asset manifest, its asset URL.
:default: - Not uploaded yet, upload just before deploying
"""
result = self._values.get("stack_template_asset_object_url")
return result
@builtins.property
def tags(self) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
"""Values for CloudFormation stack tags that should be passed when the stack is deployed.
:default: - No tags
"""
result = self._values.get("tags")
return result
@builtins.property
def termination_protection(self) -> typing.Optional[builtins.bool]:
"""Whether to enable termination protection for this stack.
:default: false
"""
result = self._values.get("termination_protection")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AwsCloudFormationStackProperties(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.AwsDestination",
jsii_struct_bases=[],
name_mapping={
"assume_role_arn": "assumeRoleArn",
"assume_role_external_id": "assumeRoleExternalId",
"region": "region",
},
)
class AwsDestination:
def __init__(
self,
*,
assume_role_arn: typing.Optional[builtins.str] = None,
assume_role_external_id: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
) -> None:
"""Destination for assets that need to be uploaded to AWS.
:param assume_role_arn: The role that needs to be assumed while publishing this asset. Default: - No role will be assumed
:param assume_role_external_id: The ExternalId that needs to be supplied while assuming this role. Default: - No ExternalId will be supplied
:param region: The region where this asset will need to be published. Default: - Current region
"""
self._values: typing.Dict[str, typing.Any] = {}
if assume_role_arn is not None:
self._values["assume_role_arn"] = assume_role_arn
if assume_role_external_id is not None:
self._values["assume_role_external_id"] = assume_role_external_id
if region is not None:
self._values["region"] = region
@builtins.property
def assume_role_arn(self) -> typing.Optional[builtins.str]:
"""The role that needs to be assumed while publishing this asset.
:default: - No role will be assumed
"""
result = self._values.get("assume_role_arn")
return result
@builtins.property
def assume_role_external_id(self) -> typing.Optional[builtins.str]:
"""The ExternalId that needs to be supplied while assuming this role.
:default: - No ExternalId will be supplied
"""
result = self._values.get("assume_role_external_id")
return result
@builtins.property
def region(self) -> typing.Optional[builtins.str]:
"""The region where this asset will need to be published.
:default: - Current region
"""
result = self._values.get("region")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AwsDestination(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.ContainerImageAssetMetadataEntry",
jsii_struct_bases=[],
name_mapping={
"id": "id",
"packaging": "packaging",
"path": "path",
"source_hash": "sourceHash",
"build_args": "buildArgs",
"file": "file",
"image_name_parameter": "imageNameParameter",
"image_tag": "imageTag",
"repository_name": "repositoryName",
"target": "target",
},
)
class ContainerImageAssetMetadataEntry:
def __init__(
self,
*,
id: builtins.str,
packaging: builtins.str,
path: builtins.str,
source_hash: builtins.str,
build_args: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
file: typing.Optional[builtins.str] = None,
image_name_parameter: typing.Optional[builtins.str] = None,
image_tag: typing.Optional[builtins.str] = None,
repository_name: typing.Optional[builtins.str] = None,
target: typing.Optional[builtins.str] = None,
) -> None:
"""Metadata Entry spec for container images.
:param id: Logical identifier for the asset.
:param packaging: Type of asset.
:param path: Path on disk to the asset.
:param source_hash: The hash of the asset source.
:param build_args: Build args to pass to the ``docker build`` command. Default: no build args are passed
:param file: Path to the Dockerfile (relative to the directory). Default: - no file is passed
:param image_name_parameter: (deprecated) ECR Repository name and repo digest (separated by "@sha256:") where this image is stored. Default: undefined If not specified, ``repositoryName`` and ``imageTag`` are required because otherwise how will the stack know where to find the asset, ha?
:param image_tag: The docker image tag to use for tagging pushed images. This field is required if ``imageParameterName`` is ommited (otherwise, the app won't be able to find the image). Default: - this parameter is REQUIRED after 1.21.0
:param repository_name: ECR repository name, if omitted a default name based on the asset's ID is used instead. Specify this property if you need to statically address the image, e.g. from a Kubernetes Pod. Note, this is only the repository name, without the registry and the tag parts. Default: - this parameter is REQUIRED after 1.21.0
:param target: Docker target to build to. Default: no build target
"""
self._values: typing.Dict[str, typing.Any] = {
"id": id,
"packaging": packaging,
"path": path,
"source_hash": source_hash,
}
if build_args is not None:
self._values["build_args"] = build_args
if file is not None:
self._values["file"] = file
if image_name_parameter is not None:
self._values["image_name_parameter"] = image_name_parameter
if image_tag is not None:
self._values["image_tag"] = image_tag
if repository_name is not None:
self._values["repository_name"] = repository_name
if target is not None:
self._values["target"] = target
@builtins.property
def id(self) -> builtins.str:
"""Logical identifier for the asset."""
result = self._values.get("id")
assert result is not None, "Required property 'id' is missing"
return result
@builtins.property
def packaging(self) -> builtins.str:
"""Type of asset."""
result = self._values.get("packaging")
assert result is not None, "Required property 'packaging' is missing"
return result
@builtins.property
def path(self) -> builtins.str:
"""Path on disk to the asset."""
result = self._values.get("path")
assert result is not None, "Required property 'path' is missing"
return result
@builtins.property
def source_hash(self) -> builtins.str:
"""The hash of the asset source."""
result = self._values.get("source_hash")
assert result is not None, "Required property 'source_hash' is missing"
return result
@builtins.property
def build_args(self) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
"""Build args to pass to the ``docker build`` command.
:default: no build args are passed
"""
result = self._values.get("build_args")
return result
@builtins.property
def file(self) -> typing.Optional[builtins.str]:
"""Path to the Dockerfile (relative to the directory).
:default: - no file is passed
"""
result = self._values.get("file")
return result
@builtins.property
def image_name_parameter(self) -> typing.Optional[builtins.str]:
"""(deprecated) ECR Repository name and repo digest (separated by "@sha256:") where this image is stored.
:default:
undefined If not specified, ``repositoryName`` and ``imageTag`` are
required because otherwise how will the stack know where to find the asset,
ha?
:deprecated:
specify ``repositoryName`` and ``imageTag`` instead, and then you
know where the image will go.
:stability: deprecated
"""
result = self._values.get("image_name_parameter")
return result
@builtins.property
def image_tag(self) -> typing.Optional[builtins.str]:
"""The docker image tag to use for tagging pushed images.
This field is
required if ``imageParameterName`` is ommited (otherwise, the app won't be
able to find the image).
:default: - this parameter is REQUIRED after 1.21.0
"""
result = self._values.get("image_tag")
return result
@builtins.property
def repository_name(self) -> typing.Optional[builtins.str]:
"""ECR repository name, if omitted a default name based on the asset's ID is used instead.
Specify this property if you need to statically address the
image, e.g. from a Kubernetes Pod. Note, this is only the repository name,
without the registry and the tag parts.
:default: - this parameter is REQUIRED after 1.21.0
"""
result = self._values.get("repository_name")
return result
@builtins.property
def target(self) -> typing.Optional[builtins.str]:
"""Docker target to build to.
:default: no build target
"""
result = self._values.get("target")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ContainerImageAssetMetadataEntry(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.enum(jsii_type="@aws-cdk/cloud-assembly-schema.ContextProvider")
class ContextProvider(enum.Enum):
"""Identifier for the context provider."""
AMI_PROVIDER = "AMI_PROVIDER"
"""AMI provider."""
AVAILABILITY_ZONE_PROVIDER = "AVAILABILITY_ZONE_PROVIDER"
"""AZ provider."""
HOSTED_ZONE_PROVIDER = "HOSTED_ZONE_PROVIDER"
"""Route53 Hosted Zone provider."""
SSM_PARAMETER_PROVIDER = "SSM_PARAMETER_PROVIDER"
"""SSM Parameter Provider."""
VPC_PROVIDER = "VPC_PROVIDER"
"""VPC Provider."""
ENDPOINT_SERVICE_AVAILABILITY_ZONE_PROVIDER = "ENDPOINT_SERVICE_AVAILABILITY_ZONE_PROVIDER"
"""VPC Endpoint Service AZ Provider."""
LOAD_BALANCER_PROVIDER = "LOAD_BALANCER_PROVIDER"
"""Load balancer provider."""
LOAD_BALANCER_LISTENER_PROVIDER = "LOAD_BALANCER_LISTENER_PROVIDER"
"""Load balancer listener provider."""
SECURITY_GROUP_PROVIDER = "SECURITY_GROUP_PROVIDER"
"""Security group provider."""
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.DockerImageAsset",
jsii_struct_bases=[],
name_mapping={"destinations": "destinations", "source": "source"},
)
class DockerImageAsset:
def __init__(
self,
*,
destinations: typing.Mapping[builtins.str, "DockerImageDestination"],
source: "DockerImageSource",
) -> None:
"""A file asset.
:param destinations: Destinations for this file asset.
:param source: Source description for file assets.
"""
if isinstance(source, dict):
source = DockerImageSource(**source)
self._values: typing.Dict[str, typing.Any] = {
"destinations": destinations,
"source": source,
}
@builtins.property
def destinations(self) -> typing.Mapping[builtins.str, "DockerImageDestination"]:
"""Destinations for this file asset."""
result = self._values.get("destinations")
assert result is not None, "Required property 'destinations' is missing"
return result
@builtins.property
def source(self) -> "DockerImageSource":
"""Source description for file assets."""
result = self._values.get("source")
assert result is not None, "Required property 'source' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DockerImageAsset(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.DockerImageDestination",
jsii_struct_bases=[AwsDestination],
name_mapping={
"assume_role_arn": "assumeRoleArn",
"assume_role_external_id": "assumeRoleExternalId",
"region": "region",
"image_tag": "imageTag",
"repository_name": "repositoryName",
},
)
class DockerImageDestination(AwsDestination):
def __init__(
self,
*,
assume_role_arn: typing.Optional[builtins.str] = None,
assume_role_external_id: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
image_tag: builtins.str,
repository_name: builtins.str,
) -> None:
"""Where to publish docker images.
:param assume_role_arn: The role that needs to be assumed while publishing this asset. Default: - No role will be assumed
:param assume_role_external_id: The ExternalId that needs to be supplied while assuming this role. Default: - No ExternalId will be supplied
:param region: The region where this asset will need to be published. Default: - Current region
:param image_tag: Tag of the image to publish.
:param repository_name: Name of the ECR repository to publish to.
"""
self._values: typing.Dict[str, typing.Any] = {
"image_tag": image_tag,
"repository_name": repository_name,
}
if assume_role_arn is not None:
self._values["assume_role_arn"] = assume_role_arn
if assume_role_external_id is not None:
self._values["assume_role_external_id"] = assume_role_external_id
if region is not None:
self._values["region"] = region
@builtins.property
def assume_role_arn(self) -> typing.Optional[builtins.str]:
"""The role that needs to be assumed while publishing this asset.
:default: - No role will be assumed
"""
result = self._values.get("assume_role_arn")
return result
@builtins.property
def assume_role_external_id(self) -> typing.Optional[builtins.str]:
"""The ExternalId that needs to be supplied while assuming this role.
:default: - No ExternalId will be supplied
"""
result = self._values.get("assume_role_external_id")
return result
@builtins.property
def region(self) -> typing.Optional[builtins.str]:
"""The region where this asset will need to be published.
:default: - Current region
"""
result = self._values.get("region")
return result
@builtins.property
def image_tag(self) -> builtins.str:
"""Tag of the image to publish."""
result = self._values.get("image_tag")
assert result is not None, "Required property 'image_tag' is missing"
return result
@builtins.property
def repository_name(self) -> builtins.str:
"""Name of the ECR repository to publish to."""
result = self._values.get("repository_name")
assert result is not None, "Required property 'repository_name' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DockerImageDestination(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.DockerImageSource",
jsii_struct_bases=[],
name_mapping={
"directory": "directory",
"docker_build_args": "dockerBuildArgs",
"docker_build_target": "dockerBuildTarget",
"docker_file": "dockerFile",
},
)
class DockerImageSource:
def __init__(
self,
*,
directory: builtins.str,
docker_build_args: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
docker_build_target: typing.Optional[builtins.str] = None,
docker_file: typing.Optional[builtins.str] = None,
) -> None:
"""Properties for how to produce a Docker image from a source.
:param directory: The directory containing the Docker image build instructions. This path is relative to the asset manifest location.
:param docker_build_args: Additional build arguments. Default: - No additional build arguments
:param docker_build_target: Target build stage in a Dockerfile with multiple build stages. Default: - The last stage in the Dockerfile
:param docker_file: The name of the file with build instructions. Default: "Dockerfile"
"""
self._values: typing.Dict[str, typing.Any] = {
"directory": directory,
}
if docker_build_args is not None:
self._values["docker_build_args"] = docker_build_args
if docker_build_target is not None:
self._values["docker_build_target"] = docker_build_target
if docker_file is not None:
self._values["docker_file"] = docker_file
@builtins.property
def directory(self) -> builtins.str:
"""The directory containing the Docker image build instructions.
This path is relative to the asset manifest location.
"""
result = self._values.get("directory")
assert result is not None, "Required property 'directory' is missing"
return result
@builtins.property
def docker_build_args(
self,
) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
"""Additional build arguments.
:default: - No additional build arguments
"""
result = self._values.get("docker_build_args")
return result
@builtins.property
def docker_build_target(self) -> typing.Optional[builtins.str]:
"""Target build stage in a Dockerfile with multiple build stages.
:default: - The last stage in the Dockerfile
"""
result = self._values.get("docker_build_target")
return result
@builtins.property
def docker_file(self) -> typing.Optional[builtins.str]:
"""The name of the file with build instructions.
:default: "Dockerfile"
"""
result = self._values.get("docker_file")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DockerImageSource(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.EndpointServiceAvailabilityZonesContextQuery",
jsii_struct_bases=[],
name_mapping={
"account": "account",
"region": "region",
"service_name": "serviceName",
},
)
class EndpointServiceAvailabilityZonesContextQuery:
def __init__(
self,
*,
account: builtins.str,
region: builtins.str,
service_name: builtins.str,
) -> None:
"""Query to endpoint service context provider.
:param account: Query account.
:param region: Query region.
:param service_name: Query service name.
"""
self._values: typing.Dict[str, typing.Any] = {
"account": account,
"region": region,
"service_name": service_name,
}
@builtins.property
def account(self) -> builtins.str:
"""Query account."""
result = self._values.get("account")
assert result is not None, "Required property 'account' is missing"
return result
@builtins.property
def region(self) -> builtins.str:
"""Query region."""
result = self._values.get("region")
assert result is not None, "Required property 'region' is missing"
return result
@builtins.property
def service_name(self) -> builtins.str:
"""Query service name."""
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "EndpointServiceAvailabilityZonesContextQuery(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.FileAsset",
jsii_struct_bases=[],
name_mapping={"destinations": "destinations", "source": "source"},
)
class FileAsset:
def __init__(
self,
*,
destinations: typing.Mapping[builtins.str, "FileDestination"],
source: "FileSource",
) -> None:
"""A file asset.
:param destinations: Destinations for this file asset.
:param source: Source description for file assets.
"""
if isinstance(source, dict):
source = FileSource(**source)
self._values: typing.Dict[str, typing.Any] = {
"destinations": destinations,
"source": source,
}
@builtins.property
def destinations(self) -> typing.Mapping[builtins.str, "FileDestination"]:
"""Destinations for this file asset."""
result = self._values.get("destinations")
assert result is not None, "Required property 'destinations' is missing"
return result
@builtins.property
def source(self) -> "FileSource":
"""Source description for file assets."""
result = self._values.get("source")
assert result is not None, "Required property 'source' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "FileAsset(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.FileAssetMetadataEntry",
jsii_struct_bases=[],
name_mapping={
"artifact_hash_parameter": "artifactHashParameter",
"id": "id",
"packaging": "packaging",
"path": "path",
"s3_bucket_parameter": "s3BucketParameter",
"s3_key_parameter": "s3KeyParameter",
"source_hash": "sourceHash",
},
)
class FileAssetMetadataEntry:
def __init__(
self,
*,
artifact_hash_parameter: builtins.str,
id: builtins.str,
packaging: builtins.str,
path: builtins.str,
s3_bucket_parameter: builtins.str,
s3_key_parameter: builtins.str,
source_hash: builtins.str,
) -> None:
"""Metadata Entry spec for files.
:param artifact_hash_parameter: The name of the parameter where the hash of the bundled asset should be passed in.
:param id: Logical identifier for the asset.
:param packaging: Requested packaging style.
:param path: Path on disk to the asset.
:param s3_bucket_parameter: Name of parameter where S3 bucket should be passed in.
:param s3_key_parameter: Name of parameter where S3 key should be passed in.
:param source_hash: The hash of the asset source.
"""
self._values: typing.Dict[str, typing.Any] = {
"artifact_hash_parameter": artifact_hash_parameter,
"id": id,
"packaging": packaging,
"path": path,
"s3_bucket_parameter": s3_bucket_parameter,
"s3_key_parameter": s3_key_parameter,
"source_hash": source_hash,
}
@builtins.property
def artifact_hash_parameter(self) -> builtins.str:
"""The name of the parameter where the hash of the bundled asset should be passed in."""
result = self._values.get("artifact_hash_parameter")
assert result is not None, "Required property 'artifact_hash_parameter' is missing"
return result
@builtins.property
def id(self) -> builtins.str:
"""Logical identifier for the asset."""
result = self._values.get("id")
assert result is not None, "Required property 'id' is missing"
return result
@builtins.property
def packaging(self) -> builtins.str:
"""Requested packaging style."""
result = self._values.get("packaging")
assert result is not None, "Required property 'packaging' is missing"
return result
@builtins.property
def path(self) -> builtins.str:
"""Path on disk to the asset."""
result = self._values.get("path")
assert result is not None, "Required property 'path' is missing"
return result
@builtins.property
def s3_bucket_parameter(self) -> builtins.str:
"""Name of parameter where S3 bucket should be passed in."""
result = self._values.get("s3_bucket_parameter")
assert result is not None, "Required property 's3_bucket_parameter' is missing"
return result
@builtins.property
def s3_key_parameter(self) -> builtins.str:
"""Name of parameter where S3 key should be passed in."""
result = self._values.get("s3_key_parameter")
assert result is not None, "Required property 's3_key_parameter' is missing"
return result
@builtins.property
def source_hash(self) -> builtins.str:
"""The hash of the asset source."""
result = self._values.get("source_hash")
assert result is not None, "Required property 'source_hash' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "FileAssetMetadataEntry(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.enum(jsii_type="@aws-cdk/cloud-assembly-schema.FileAssetPackaging")
class FileAssetPackaging(enum.Enum):
"""Packaging strategy for file assets."""
FILE = "FILE"
"""Upload the given path as a file."""
ZIP_DIRECTORY = "ZIP_DIRECTORY"
"""The given path is a directory, zip it and upload."""
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.FileDestination",
jsii_struct_bases=[AwsDestination],
name_mapping={
"assume_role_arn": "assumeRoleArn",
"assume_role_external_id": "assumeRoleExternalId",
"region": "region",
"bucket_name": "bucketName",
"object_key": "objectKey",
},
)
class FileDestination(AwsDestination):
def __init__(
self,
*,
assume_role_arn: typing.Optional[builtins.str] = None,
assume_role_external_id: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
bucket_name: builtins.str,
object_key: builtins.str,
) -> None:
"""Where in S3 a file asset needs to be published.
:param assume_role_arn: The role that needs to be assumed while publishing this asset. Default: - No role will be assumed
:param assume_role_external_id: The ExternalId that needs to be supplied while assuming this role. Default: - No ExternalId will be supplied
:param region: The region where this asset will need to be published. Default: - Current region
:param bucket_name: The name of the bucket.
:param object_key: The destination object key.
"""
self._values: typing.Dict[str, typing.Any] = {
"bucket_name": bucket_name,
"object_key": object_key,
}
if assume_role_arn is not None:
self._values["assume_role_arn"] = assume_role_arn
if assume_role_external_id is not None:
self._values["assume_role_external_id"] = assume_role_external_id
if region is not None:
self._values["region"] = region
@builtins.property
def assume_role_arn(self) -> typing.Optional[builtins.str]:
"""The role that needs to be assumed while publishing this asset.
:default: - No role will be assumed
"""
result = self._values.get("assume_role_arn")
return result
@builtins.property
def assume_role_external_id(self) -> typing.Optional[builtins.str]:
"""The ExternalId that needs to be supplied while assuming this role.
:default: - No ExternalId will be supplied
"""
result = self._values.get("assume_role_external_id")
return result
@builtins.property
def region(self) -> typing.Optional[builtins.str]:
"""The region where this asset will need to be published.
:default: - Current region
"""
result = self._values.get("region")
return result
@builtins.property
def bucket_name(self) -> builtins.str:
"""The name of the bucket."""
result = self._values.get("bucket_name")
assert result is not None, "Required property 'bucket_name' is missing"
return result
@builtins.property
def object_key(self) -> builtins.str:
"""The destination object key."""
result = self._values.get("object_key")
assert result is not None, "Required property 'object_key' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "FileDestination(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.FileSource",
jsii_struct_bases=[],
name_mapping={"path": "path", "packaging": "packaging"},
)
class FileSource:
def __init__(
self,
*,
path: builtins.str,
packaging: typing.Optional[FileAssetPackaging] = None,
) -> None:
"""Describe the source of a file asset.
:param path: The filesystem object to upload. This path is relative to the asset manifest location.
:param packaging: Packaging method. Default: FILE
"""
self._values: typing.Dict[str, typing.Any] = {
"path": path,
}
if packaging is not None:
self._values["packaging"] = packaging
@builtins.property
def path(self) -> builtins.str:
"""The filesystem object to upload.
This path is relative to the asset manifest location.
"""
result = self._values.get("path")
assert result is not None, "Required property 'path' is missing"
return result
@builtins.property
def packaging(self) -> typing.Optional[FileAssetPackaging]:
"""Packaging method.
:default: FILE
"""
result = self._values.get("packaging")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "FileSource(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.HostedZoneContextQuery",
jsii_struct_bases=[],
name_mapping={
"account": "account",
"domain_name": "domainName",
"region": "region",
"private_zone": "privateZone",
"vpc_id": "vpcId",
},
)
class HostedZoneContextQuery:
def __init__(
self,
*,
account: builtins.str,
domain_name: builtins.str,
region: builtins.str,
private_zone: typing.Optional[builtins.bool] = None,
vpc_id: typing.Optional[builtins.str] = None,
) -> None:
"""Query to hosted zone context provider.
:param account: Query account.
:param domain_name: The domain name e.g. example.com to lookup.
:param region: Query region.
:param private_zone: True if the zone you want to find is a private hosted zone. Default: false
:param vpc_id: The VPC ID to that the private zone must be associated with. If you provide VPC ID and privateZone is false, this will return no results and raise an error. Default: - Required if privateZone=true
"""
self._values: typing.Dict[str, typing.Any] = {
"account": account,
"domain_name": domain_name,
"region": region,
}
if private_zone is not None:
self._values["private_zone"] = private_zone
if vpc_id is not None:
self._values["vpc_id"] = vpc_id
@builtins.property
def account(self) -> builtins.str:
"""Query account."""
result = self._values.get("account")
assert result is not None, "Required property 'account' is missing"
return result
@builtins.property
def domain_name(self) -> builtins.str:
"""The domain name e.g. example.com to lookup."""
result = self._values.get("domain_name")
assert result is not None, "Required property 'domain_name' is missing"
return result
@builtins.property
def region(self) -> builtins.str:
"""Query region."""
result = self._values.get("region")
assert result is not None, "Required property 'region' is missing"
return result
@builtins.property
def private_zone(self) -> typing.Optional[builtins.bool]:
"""True if the zone you want to find is a private hosted zone.
:default: false
"""
result = self._values.get("private_zone")
return result
@builtins.property
def vpc_id(self) -> typing.Optional[builtins.str]:
"""The VPC ID to that the private zone must be associated with.
If you provide VPC ID and privateZone is false, this will return no results
and raise an error.
:default: - Required if privateZone=true
"""
result = self._values.get("vpc_id")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "HostedZoneContextQuery(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.LoadBalancerFilter",
jsii_struct_bases=[],
name_mapping={
"load_balancer_type": "loadBalancerType",
"load_balancer_arn": "loadBalancerArn",
"load_balancer_tags": "loadBalancerTags",
},
)
class LoadBalancerFilter:
def __init__(
self,
*,
load_balancer_type: "LoadBalancerType",
load_balancer_arn: typing.Optional[builtins.str] = None,
load_balancer_tags: typing.Optional[typing.List["Tag"]] = None,
) -> None:
"""Filters for selecting load balancers.
:param load_balancer_type: Filter load balancers by their type.
:param load_balancer_arn: Find by load balancer's ARN. Default: - does not search by load balancer arn
:param load_balancer_tags: Match load balancer tags. Default: - does not match load balancers by tags
"""
self._values: typing.Dict[str, typing.Any] = {
"load_balancer_type": load_balancer_type,
}
if load_balancer_arn is not None:
self._values["load_balancer_arn"] = load_balancer_arn
if load_balancer_tags is not None:
self._values["load_balancer_tags"] = load_balancer_tags
@builtins.property
def load_balancer_type(self) -> "LoadBalancerType":
"""Filter load balancers by their type."""
result = self._values.get("load_balancer_type")
assert result is not None, "Required property 'load_balancer_type' is missing"
return result
@builtins.property
def load_balancer_arn(self) -> typing.Optional[builtins.str]:
"""Find by load balancer's ARN.
:default: - does not search by load balancer arn
"""
result = self._values.get("load_balancer_arn")
return result
@builtins.property
def load_balancer_tags(self) -> typing.Optional[typing.List["Tag"]]:
"""Match load balancer tags.
:default: - does not match load balancers by tags
"""
result = self._values.get("load_balancer_tags")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "LoadBalancerFilter(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.LoadBalancerListenerContextQuery",
jsii_struct_bases=[LoadBalancerFilter],
name_mapping={
"load_balancer_type": "loadBalancerType",
"load_balancer_arn": "loadBalancerArn",
"load_balancer_tags": "loadBalancerTags",
"account": "account",
"region": "region",
"listener_arn": "listenerArn",
"listener_port": "listenerPort",
"listener_protocol": "listenerProtocol",
},
)
class LoadBalancerListenerContextQuery(LoadBalancerFilter):
def __init__(
self,
*,
load_balancer_type: "LoadBalancerType",
load_balancer_arn: typing.Optional[builtins.str] = None,
load_balancer_tags: typing.Optional[typing.List["Tag"]] = None,
account: builtins.str,
region: builtins.str,
listener_arn: typing.Optional[builtins.str] = None,
listener_port: typing.Optional[jsii.Number] = None,
listener_protocol: typing.Optional["LoadBalancerListenerProtocol"] = None,
) -> None:
"""Query input for looking up a load balancer listener.
:param load_balancer_type: Filter load balancers by their type.
:param load_balancer_arn: Find by load balancer's ARN. Default: - does not search by load balancer arn
:param load_balancer_tags: Match load balancer tags. Default: - does not match load balancers by tags
:param account: Query account.
:param region: Query region.
:param listener_arn: Find by listener's arn. Default: - does not find by listener arn
:param listener_port: Filter listeners by listener port. Default: - does not filter by a listener port
:param listener_protocol: Filter by listener protocol. Default: - does not filter by listener protocol
"""
self._values: typing.Dict[str, typing.Any] = {
"load_balancer_type": load_balancer_type,
"account": account,
"region": region,
}
if load_balancer_arn is not None:
self._values["load_balancer_arn"] = load_balancer_arn
if load_balancer_tags is not None:
self._values["load_balancer_tags"] = load_balancer_tags
if listener_arn is not None:
self._values["listener_arn"] = listener_arn
if listener_port is not None:
self._values["listener_port"] = listener_port
if listener_protocol is not None:
self._values["listener_protocol"] = listener_protocol
@builtins.property
def load_balancer_type(self) -> "LoadBalancerType":
"""Filter load balancers by their type."""
result = self._values.get("load_balancer_type")
assert result is not None, "Required property 'load_balancer_type' is missing"
return result
@builtins.property
def load_balancer_arn(self) -> typing.Optional[builtins.str]:
"""Find by load balancer's ARN.
:default: - does not search by load balancer arn
"""
result = self._values.get("load_balancer_arn")
return result
@builtins.property
def load_balancer_tags(self) -> typing.Optional[typing.List["Tag"]]:
"""Match load balancer tags.
:default: - does not match load balancers by tags
"""
result = self._values.get("load_balancer_tags")
return result
@builtins.property
def account(self) -> builtins.str:
"""Query account."""
result = self._values.get("account")
assert result is not None, "Required property 'account' is missing"
return result
@builtins.property
def region(self) -> builtins.str:
"""Query region."""
result = self._values.get("region")
assert result is not None, "Required property 'region' is missing"
return result
@builtins.property
def listener_arn(self) -> typing.Optional[builtins.str]:
"""Find by listener's arn.
:default: - does not find by listener arn
"""
result = self._values.get("listener_arn")
return result
@builtins.property
def listener_port(self) -> typing.Optional[jsii.Number]:
"""Filter listeners by listener port.
:default: - does not filter by a listener port
"""
result = self._values.get("listener_port")
return result
@builtins.property
def listener_protocol(self) -> typing.Optional["LoadBalancerListenerProtocol"]:
"""Filter by listener protocol.
:default: - does not filter by listener protocol
"""
result = self._values.get("listener_protocol")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "LoadBalancerListenerContextQuery(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.enum(jsii_type="@aws-cdk/cloud-assembly-schema.LoadBalancerListenerProtocol")
class LoadBalancerListenerProtocol(enum.Enum):
"""The protocol for connections from clients to the load balancer."""
HTTP = "HTTP"
"""HTTP protocol."""
HTTPS = "HTTPS"
"""HTTPS protocol."""
TCP = "TCP"
"""TCP protocol."""
TLS = "TLS"
"""TLS protocol."""
UDP = "UDP"
"""UDP protocol."""
TCP_UDP = "TCP_UDP"
"""TCP and UDP protocol."""
@jsii.enum(jsii_type="@aws-cdk/cloud-assembly-schema.LoadBalancerType")
class LoadBalancerType(enum.Enum):
"""Type of load balancer."""
NETWORK = "NETWORK"
"""Network load balancer."""
APPLICATION = "APPLICATION"
"""Application load balancer."""
class Manifest(
metaclass=jsii.JSIIMeta,
jsii_type="@aws-cdk/cloud-assembly-schema.Manifest",
):
"""Protocol utility class."""
@jsii.member(jsii_name="load")
@builtins.classmethod
def load(cls, file_path: builtins.str) -> AssemblyManifest:
"""(deprecated) Deprecated.
:param file_path: -
:deprecated: use ``loadAssemblyManifest()``
:stability: deprecated
"""
return jsii.sinvoke(cls, "load", [file_path])
@jsii.member(jsii_name="loadAssemblyManifest")
@builtins.classmethod
def load_assembly_manifest(cls, file_path: builtins.str) -> AssemblyManifest:
"""Load and validates the cloud assembly manifest from file.
:param file_path: - path to the manifest file.
"""
return jsii.sinvoke(cls, "loadAssemblyManifest", [file_path])
@jsii.member(jsii_name="loadAssetManifest")
@builtins.classmethod
def load_asset_manifest(cls, file_path: builtins.str) -> AssetManifest:
"""Load and validates the asset manifest from file.
:param file_path: - path to the manifest file.
"""
return jsii.sinvoke(cls, "loadAssetManifest", [file_path])
@jsii.member(jsii_name="save")
@builtins.classmethod
def save(cls, manifest: AssemblyManifest, file_path: builtins.str) -> None:
"""(deprecated) Deprecated.
:param manifest: -
:param file_path: -
:deprecated: use ``saveAssemblyManifest()``
:stability: deprecated
"""
return jsii.sinvoke(cls, "save", [manifest, file_path])
@jsii.member(jsii_name="saveAssemblyManifest")
@builtins.classmethod
def save_assembly_manifest(
cls,
manifest: AssemblyManifest,
file_path: builtins.str,
) -> None:
"""Validates and saves the cloud assembly manifest to file.
:param manifest: - manifest.
:param file_path: - output file path.
"""
return jsii.sinvoke(cls, "saveAssemblyManifest", [manifest, file_path])
@jsii.member(jsii_name="saveAssetManifest")
@builtins.classmethod
def save_asset_manifest(
cls,
manifest: AssetManifest,
file_path: builtins.str,
) -> None:
"""Validates and saves the asset manifest to file.
:param manifest: - manifest.
:param file_path: - output file path.
"""
return jsii.sinvoke(cls, "saveAssetManifest", [manifest, file_path])
@jsii.member(jsii_name="version")
@builtins.classmethod
def version(cls) -> builtins.str:
"""Fetch the current schema version number."""
return jsii.sinvoke(cls, "version", [])
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.MetadataEntry",
jsii_struct_bases=[],
name_mapping={"type": "type", "data": "data", "trace": "trace"},
)
class MetadataEntry:
def __init__(
self,
*,
type: builtins.str,
data: typing.Optional[typing.Union[builtins.str, FileAssetMetadataEntry, ContainerImageAssetMetadataEntry, typing.List["Tag"]]] = None,
trace: typing.Optional[typing.List[builtins.str]] = None,
) -> None:
"""A metadata entry in a cloud assembly artifact.
:param type: The type of the metadata entry.
:param data: The data. Default: - no data.
:param trace: A stack trace for when the entry was created. Default: - no trace.
"""
self._values: typing.Dict[str, typing.Any] = {
"type": type,
}
if data is not None:
self._values["data"] = data
if trace is not None:
self._values["trace"] = trace
@builtins.property
def type(self) -> builtins.str:
"""The type of the metadata entry."""
result = self._values.get("type")
assert result is not None, "Required property 'type' is missing"
return result
@builtins.property
def data(
self,
) -> typing.Optional[typing.Union[builtins.str, FileAssetMetadataEntry, ContainerImageAssetMetadataEntry, typing.List["Tag"]]]:
"""The data.
:default: - no data.
"""
result = self._values.get("data")
return result
@builtins.property
def trace(self) -> typing.Optional[typing.List[builtins.str]]:
"""A stack trace for when the entry was created.
:default: - no trace.
"""
result = self._values.get("trace")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "MetadataEntry(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.MissingContext",
jsii_struct_bases=[],
name_mapping={"key": "key", "props": "props", "provider": "provider"},
)
class MissingContext:
def __init__(
self,
*,
key: builtins.str,
props: typing.Union[AmiContextQuery, AvailabilityZonesContextQuery, HostedZoneContextQuery, "SSMParameterContextQuery", "VpcContextQuery", EndpointServiceAvailabilityZonesContextQuery, "LoadBalancerContextQuery", LoadBalancerListenerContextQuery, "SecurityGroupContextQuery"],
provider: ContextProvider,
) -> None:
"""Represents a missing piece of context.
:param key: The missing context key.
:param props: A set of provider-specific options.
:param provider: The provider from which we expect this context key to be obtained.
"""
self._values: typing.Dict[str, typing.Any] = {
"key": key,
"props": props,
"provider": provider,
}
@builtins.property
def key(self) -> builtins.str:
"""The missing context key."""
result = self._values.get("key")
assert result is not None, "Required property 'key' is missing"
return result
@builtins.property
def props(
self,
) -> typing.Union[AmiContextQuery, AvailabilityZonesContextQuery, HostedZoneContextQuery, "SSMParameterContextQuery", "VpcContextQuery", EndpointServiceAvailabilityZonesContextQuery, "LoadBalancerContextQuery", LoadBalancerListenerContextQuery, "SecurityGroupContextQuery"]:
"""A set of provider-specific options."""
result = self._values.get("props")
assert result is not None, "Required property 'props' is missing"
return result
@builtins.property
def provider(self) -> ContextProvider:
"""The provider from which we expect this context key to be obtained."""
result = self._values.get("provider")
assert result is not None, "Required property 'provider' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "MissingContext(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.NestedCloudAssemblyProperties",
jsii_struct_bases=[],
name_mapping={"directory_name": "directoryName", "display_name": "displayName"},
)
class NestedCloudAssemblyProperties:
def __init__(
self,
*,
directory_name: builtins.str,
display_name: typing.Optional[builtins.str] = None,
) -> None:
"""Artifact properties for nested cloud assemblies.
:param directory_name: Relative path to the nested cloud assembly.
:param display_name: Display name for the cloud assembly. Default: - The artifact ID
"""
self._values: typing.Dict[str, typing.Any] = {
"directory_name": directory_name,
}
if display_name is not None:
self._values["display_name"] = display_name
@builtins.property
def directory_name(self) -> builtins.str:
"""Relative path to the nested cloud assembly."""
result = self._values.get("directory_name")
assert result is not None, "Required property 'directory_name' is missing"
return result
@builtins.property
def display_name(self) -> typing.Optional[builtins.str]:
"""Display name for the cloud assembly.
:default: - The artifact ID
"""
result = self._values.get("display_name")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "NestedCloudAssemblyProperties(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.RuntimeInfo",
jsii_struct_bases=[],
name_mapping={"libraries": "libraries"},
)
class RuntimeInfo:
def __init__(
self,
*,
libraries: typing.Mapping[builtins.str, builtins.str],
) -> None:
"""Information about the application's runtime components.
:param libraries: The list of libraries loaded in the application, associated with their versions.
"""
self._values: typing.Dict[str, typing.Any] = {
"libraries": libraries,
}
@builtins.property
def libraries(self) -> typing.Mapping[builtins.str, builtins.str]:
"""The list of libraries loaded in the application, associated with their versions."""
result = self._values.get("libraries")
assert result is not None, "Required property 'libraries' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RuntimeInfo(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.SSMParameterContextQuery",
jsii_struct_bases=[],
name_mapping={
"account": "account",
"parameter_name": "parameterName",
"region": "region",
},
)
class SSMParameterContextQuery:
def __init__(
self,
*,
account: builtins.str,
parameter_name: builtins.str,
region: builtins.str,
) -> None:
"""Query to SSM Parameter Context Provider.
:param account: Query account.
:param parameter_name: Parameter name to query.
:param region: Query region.
"""
self._values: typing.Dict[str, typing.Any] = {
"account": account,
"parameter_name": parameter_name,
"region": region,
}
@builtins.property
def account(self) -> builtins.str:
"""Query account."""
result = self._values.get("account")
assert result is not None, "Required property 'account' is missing"
return result
@builtins.property
def parameter_name(self) -> builtins.str:
"""Parameter name to query."""
result = self._values.get("parameter_name")
assert result is not None, "Required property 'parameter_name' is missing"
return result
@builtins.property
def region(self) -> builtins.str:
"""Query region."""
result = self._values.get("region")
assert result is not None, "Required property 'region' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "SSMParameterContextQuery(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.SecurityGroupContextQuery",
jsii_struct_bases=[],
name_mapping={
"account": "account",
"region": "region",
"security_group_id": "securityGroupId",
},
)
class SecurityGroupContextQuery:
def __init__(
self,
*,
account: builtins.str,
region: builtins.str,
security_group_id: builtins.str,
) -> None:
"""Query input for looking up a security group.
:param account: Query account.
:param region: Query region.
:param security_group_id: Security group id.
"""
self._values: typing.Dict[str, typing.Any] = {
"account": account,
"region": region,
"security_group_id": security_group_id,
}
@builtins.property
def account(self) -> builtins.str:
"""Query account."""
result = self._values.get("account")
assert result is not None, "Required property 'account' is missing"
return result
@builtins.property
def region(self) -> builtins.str:
"""Query region."""
result = self._values.get("region")
assert result is not None, "Required property 'region' is missing"
return result
@builtins.property
def security_group_id(self) -> builtins.str:
"""Security group id."""
result = self._values.get("security_group_id")
assert result is not None, "Required property 'security_group_id' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "SecurityGroupContextQuery(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.Tag",
jsii_struct_bases=[],
name_mapping={"key": "key", "value": "value"},
)
class Tag:
def __init__(self, *, key: builtins.str, value: builtins.str) -> None:
"""Metadata Entry spec for stack tag.
:param key: Tag key. (In the actual file on disk this will be cased as "Key", and the structure is patched to match this structure upon loading: https://github.com/aws/aws-cdk/blob/4aadaa779b48f35838cccd4e25107b2338f05547/packages/%40aws-cdk/cloud-assembly-schema/lib/manifest.ts#L137)
:param value: Tag value. (In the actual file on disk this will be cased as "Value", and the structure is patched to match this structure upon loading: https://github.com/aws/aws-cdk/blob/4aadaa779b48f35838cccd4e25107b2338f05547/packages/%40aws-cdk/cloud-assembly-schema/lib/manifest.ts#L137)
"""
self._values: typing.Dict[str, typing.Any] = {
"key": key,
"value": value,
}
@builtins.property
def key(self) -> builtins.str:
"""Tag key.
(In the actual file on disk this will be cased as "Key", and the structure is
patched to match this structure upon loading:
https://github.com/aws/aws-cdk/blob/4aadaa779b48f35838cccd4e25107b2338f05547/packages/%40aws-cdk/cloud-assembly-schema/lib/manifest.ts#L137)
"""
result = self._values.get("key")
assert result is not None, "Required property 'key' is missing"
return result
@builtins.property
def value(self) -> builtins.str:
"""Tag value.
(In the actual file on disk this will be cased as "Value", and the structure is
patched to match this structure upon loading:
https://github.com/aws/aws-cdk/blob/4aadaa779b48f35838cccd4e25107b2338f05547/packages/%40aws-cdk/cloud-assembly-schema/lib/manifest.ts#L137)
"""
result = self._values.get("value")
assert result is not None, "Required property 'value' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "Tag(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.TreeArtifactProperties",
jsii_struct_bases=[],
name_mapping={"file": "file"},
)
class TreeArtifactProperties:
def __init__(self, *, file: builtins.str) -> None:
"""Artifact properties for the Construct Tree Artifact.
:param file: Filename of the tree artifact.
"""
self._values: typing.Dict[str, typing.Any] = {
"file": file,
}
@builtins.property
def file(self) -> builtins.str:
"""Filename of the tree artifact."""
result = self._values.get("file")
assert result is not None, "Required property 'file' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "TreeArtifactProperties(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.VpcContextQuery",
jsii_struct_bases=[],
name_mapping={
"account": "account",
"filter": "filter",
"region": "region",
"return_asymmetric_subnets": "returnAsymmetricSubnets",
"subnet_group_name_tag": "subnetGroupNameTag",
},
)
class VpcContextQuery:
def __init__(
self,
*,
account: builtins.str,
filter: typing.Mapping[builtins.str, builtins.str],
region: builtins.str,
return_asymmetric_subnets: typing.Optional[builtins.bool] = None,
subnet_group_name_tag: typing.Optional[builtins.str] = None,
) -> None:
"""Query input for looking up a VPC.
:param account: Query account.
:param filter: Filters to apply to the VPC. Filter parameters are the same as passed to DescribeVpcs.
:param region: Query region.
:param return_asymmetric_subnets: Whether to populate the subnetGroups field of the {@link VpcContextResponse}, which contains potentially asymmetric subnet groups. Default: false
:param subnet_group_name_tag: Optional tag for subnet group name. If not provided, we'll look at the aws-cdk:subnet-name tag. If the subnet does not have the specified tag, we'll use its type as the name. Default: 'aws-cdk:subnet-name'
"""
self._values: typing.Dict[str, typing.Any] = {
"account": account,
"filter": filter,
"region": region,
}
if return_asymmetric_subnets is not None:
self._values["return_asymmetric_subnets"] = return_asymmetric_subnets
if subnet_group_name_tag is not None:
self._values["subnet_group_name_tag"] = subnet_group_name_tag
@builtins.property
def account(self) -> builtins.str:
"""Query account."""
result = self._values.get("account")
assert result is not None, "Required property 'account' is missing"
return result
@builtins.property
def filter(self) -> typing.Mapping[builtins.str, builtins.str]:
"""Filters to apply to the VPC.
Filter parameters are the same as passed to DescribeVpcs.
:see: https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVpcs.html
"""
result = self._values.get("filter")
assert result is not None, "Required property 'filter' is missing"
return result
@builtins.property
def region(self) -> builtins.str:
"""Query region."""
result = self._values.get("region")
assert result is not None, "Required property 'region' is missing"
return result
@builtins.property
def return_asymmetric_subnets(self) -> typing.Optional[builtins.bool]:
"""Whether to populate the subnetGroups field of the {@link VpcContextResponse}, which contains potentially asymmetric subnet groups.
:default: false
"""
result = self._values.get("return_asymmetric_subnets")
return result
@builtins.property
def subnet_group_name_tag(self) -> typing.Optional[builtins.str]:
"""Optional tag for subnet group name.
If not provided, we'll look at the aws-cdk:subnet-name tag.
If the subnet does not have the specified tag,
we'll use its type as the name.
:default: 'aws-cdk:subnet-name'
"""
result = self._values.get("subnet_group_name_tag")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "VpcContextQuery(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/cloud-assembly-schema.LoadBalancerContextQuery",
jsii_struct_bases=[LoadBalancerFilter],
name_mapping={
"load_balancer_type": "loadBalancerType",
"load_balancer_arn": "loadBalancerArn",
"load_balancer_tags": "loadBalancerTags",
"account": "account",
"region": "region",
},
)
class LoadBalancerContextQuery(LoadBalancerFilter):
def __init__(
self,
*,
load_balancer_type: LoadBalancerType,
load_balancer_arn: typing.Optional[builtins.str] = None,
load_balancer_tags: typing.Optional[typing.List[Tag]] = None,
account: builtins.str,
region: builtins.str,
) -> None:
"""Query input for looking up a load balancer.
:param load_balancer_type: Filter load balancers by their type.
:param load_balancer_arn: Find by load balancer's ARN. Default: - does not search by load balancer arn
:param load_balancer_tags: Match load balancer tags. Default: - does not match load balancers by tags
:param account: Query account.
:param region: Query region.
"""
self._values: typing.Dict[str, typing.Any] = {
"load_balancer_type": load_balancer_type,
"account": account,
"region": region,
}
if load_balancer_arn is not None:
self._values["load_balancer_arn"] = load_balancer_arn
if load_balancer_tags is not None:
self._values["load_balancer_tags"] = load_balancer_tags
@builtins.property
def load_balancer_type(self) -> LoadBalancerType:
"""Filter load balancers by their type."""
result = self._values.get("load_balancer_type")
assert result is not None, "Required property 'load_balancer_type' is missing"
return result
@builtins.property
def load_balancer_arn(self) -> typing.Optional[builtins.str]:
"""Find by load balancer's ARN.
:default: - does not search by load balancer arn
"""
result = self._values.get("load_balancer_arn")
return result
@builtins.property
def load_balancer_tags(self) -> typing.Optional[typing.List[Tag]]:
"""Match load balancer tags.
:default: - does not match load balancers by tags
"""
result = self._values.get("load_balancer_tags")
return result
@builtins.property
def account(self) -> builtins.str:
"""Query account."""
result = self._values.get("account")
assert result is not None, "Required property 'account' is missing"
return result
@builtins.property
def region(self) -> builtins.str:
"""Query region."""
result = self._values.get("region")
assert result is not None, "Required property 'region' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "LoadBalancerContextQuery(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
__all__ = [
"AmiContextQuery",
"ArtifactManifest",
"ArtifactMetadataEntryType",
"ArtifactType",
"AssemblyManifest",
"AssetManifest",
"AssetManifestProperties",
"AvailabilityZonesContextQuery",
"AwsCloudFormationStackProperties",
"AwsDestination",
"ContainerImageAssetMetadataEntry",
"ContextProvider",
"DockerImageAsset",
"DockerImageDestination",
"DockerImageSource",
"EndpointServiceAvailabilityZonesContextQuery",
"FileAsset",
"FileAssetMetadataEntry",
"FileAssetPackaging",
"FileDestination",
"FileSource",
"HostedZoneContextQuery",
"LoadBalancerContextQuery",
"LoadBalancerFilter",
"LoadBalancerListenerContextQuery",
"LoadBalancerListenerProtocol",
"LoadBalancerType",
"Manifest",
"MetadataEntry",
"MissingContext",
"NestedCloudAssemblyProperties",
"RuntimeInfo",
"SSMParameterContextQuery",
"SecurityGroupContextQuery",
"Tag",
"TreeArtifactProperties",
"VpcContextQuery",
]
publication.publish()
|
py | 1a44afbe414b9c9d52d7886f85627a01e362f238 | """
WSGI config for testappauto764_dev_23472 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'testappauto764_dev_23472.settings')
application = get_wsgi_application()
|
py | 1a44affaece7cc12d66441f45a1d78f394c167ff | # IMPORTATION STANDARD
# IMPORTATION THIRDPARTY
import requests
import pandas as pd
import pytest
# IMPORTATION INTERNAL
from gamestonk_terminal.stocks.due_diligence import ark_model
@pytest.fixture(scope="module")
def vcr_config():
return {
"filter_headers": [("User-Agent", None)],
"filter_query_parameters": [
("period1", "1598220000"),
("period2", "1635980400"),
],
}
@pytest.mark.default_cassette("test_get_ark_trades_by_ticker_TSLA")
@pytest.mark.vcr
def test_get_ark_trades_by_ticker(recorder):
result_df = ark_model.get_ark_trades_by_ticker(ticker="TSLA")
recorder.capture(result_df)
@pytest.mark.default_cassette("test_get_ark_trades_by_ticker_INVALID_TICKER")
@pytest.mark.vcr
def test_get_ark_trades_by_ticker_invalid_ticker():
result_df = ark_model.get_ark_trades_by_ticker(ticker="INVALID_TICKER")
assert result_df.empty
@pytest.mark.default_cassette("test_get_ark_trades_by_ticker_TSLA")
@pytest.mark.vcr(record_mode="none")
def test_get_ark_trades_by_ticker_invalid_json(mocker):
mocker.patch(
target="json.loads",
new=mocker.Mock(
return_value={
"props": {
"pageProps": [],
}
}
),
)
result_df = ark_model.get_ark_trades_by_ticker(ticker="TSLA")
assert result_df.empty
@pytest.mark.vcr(record_mode="none")
def test_get_ark_trades_by_ticker_invalid_status(mocker):
mock_response = requests.Response()
mock_response.status_code = 400
mocker.patch(
target="requests.get",
new=mocker.Mock(return_value=mock_response),
)
result_df = ark_model.get_ark_trades_by_ticker(ticker="TSLA")
assert result_df.empty
@pytest.mark.default_cassette("test_get_ark_trades_by_ticker_TSLA")
@pytest.mark.vcr(record_mode="none")
def test_get_ark_trades_by_ticker_json_normalize(mocker):
mock_df = pd.DataFrame()
mocker.patch(
target="pandas.json_normalize",
new=mocker.Mock(return_value=mock_df),
)
result_df = ark_model.get_ark_trades_by_ticker(ticker="TSLA")
assert result_df.empty
|
py | 1a44b12c501ba29393c6d43a47212ac55b511feb | # -*- encoding: utf-8 -*-
"""
Programa de Mentoria DSA 2021
"""
from flask import Blueprint
blueprint = Blueprint(
"home_blueprint",
__name__,
url_prefix="",
template_folder="templates",
static_folder="static",
)
|
py | 1a44b1a9162a1a0fd86419f9caad940e8e9e7a0d | # Python libraries
import argparse
import os
# Lib files
import lib.medloaders as medical_loaders
import lib.medzoo as medzoo
import lib.train as train
import lib.utils as utils
from lib.losses3D import DiceLoss
from lib.visual3D_temp import *
os.environ["CUDA_VISIBLE_DEVICES"] = "0,2"
seed = 1777777
def main():
args = get_arguments()
utils.reproducibility(args, seed)
utils.make_dirs(args.save)
training_generator, val_generator, full_volume, affine = medical_loaders.generate_datasets(args,
path='.././datasets')
model, optimizer = medzoo.create_model(args)
criterion = DiceLoss(classes=args.classes)
if args.cuda:
model = model.cuda()
print("Model transferred in GPU.....")
trainer = train.Trainer(args, model, criterion, optimizer, train_data_loader=training_generator,
valid_data_loader=val_generator, lr_scheduler=None)
print("START TRAINING...")
trainer.training()
visualize_3D_no_overlap_new(args, full_volume, affine, model, 10, args.dim)
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--batchSz', type=int, default=2)
parser.add_argument('--dataset_name', type=str, default="mrbrains4")
parser.add_argument('--dim', nargs="+", type=int, default=(64, 64, 32))
parser.add_argument('--nEpochs', type=int, default=1)
parser.add_argument('--classes', type=int, default=4)
parser.add_argument('--samples_train', type=int, default=20)
parser.add_argument('--samples_val', type=int, default=20)
parser.add_argument('--inChannels', type=int, default=3)
parser.add_argument('--inModalities', type=int, default=3)
parser.add_argument('--fold_id', default='1', type=str, help='Select subject for fold validation')
parser.add_argument('--lr', default=1e-3, type=float,
help='learning rate (default: 1e-3)')
parser.add_argument('--cuda', action='store_true', default=False)
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--model', type=str, default='DENSENET3',
choices=('VNET', 'VNET2', 'UNET3D', 'DENSENET1', 'DENSENET2', 'DENSENET3', 'HYPERDENSENET'))
parser.add_argument('--opt', type=str, default='sgd',
choices=('sgd', 'adam', 'rmsprop'))
parser.add_argument('--log_dir', type=str,
default='../runs/')
args = parser.parse_args()
args.save = '/data/hejy/MedicalZooPytorch/saved_models/' + args.model + '_checkpoints/' + args.model + '_{}_{}_'.format(
utils.datestr(), args.dataset_name)
return args
if __name__ == '__main__':
main()
|
py | 1a44b1e3aab90c6cbd68c2fa8e021f3da651fd92 | from mongoengine import *
from enum import Enum
class Effect(Enum):
FREEZE = 0
DOUBLE_TODO = 1
DOUBLE_IDEAS = 2
DOUBLE_24 = 3
class StoreItem(Document):
meta = {'collection': 'store_item'}
name = StringField(required=True)
price = IntField(required=True)
effect = StringField(required=True, default=Effect.FREEZE.name)
|
py | 1a44b252f6a3453e1b82d93c8fd750ce3e0631af | #@+leo-ver=5-thin
#@+node:ekr.20031218072017.3603: * @file leoUndo.py
'''Leo's undo/redo manager.'''
#@+<< How Leo implements unlimited undo >>
#@+node:ekr.20031218072017.2413: ** << How Leo implements unlimited undo >>
#@+at Think of the actions that may be Undone or Redone as a string of beads
# (g.Bunches) containing all information needed to undo _and_ redo an operation.
#
# A bead pointer points to the present bead. Undoing an operation moves the bead
# pointer backwards; redoing an operation moves the bead pointer forwards. The
# bead pointer points in front of the first bead when Undo is disabled. The bead
# pointer points at the last bead when Redo is disabled.
#
# The Undo command uses the present bead to undo the action, then moves the bead
# pointer backwards. The Redo command uses the bead after the present bead to redo
# the action, then moves the bead pointer forwards. The list of beads does not
# branch; all undoable operations (except the Undo and Redo commands themselves)
# delete any beads following the newly created bead.
#
# New in Leo 4.3: User (client) code should call u.beforeX and u.afterX methods to
# create a bead describing the operation that is being performed. (By convention,
# the code sets u = c.undoer for undoable operations.) Most u.beforeX methods
# return 'undoData' that the client code merely passes to the corresponding
# u.afterX method. This data contains the 'before' snapshot. The u.afterX methods
# then create a bead containing both the 'before' and 'after' snapshots.
#
# New in Leo 4.3: u.beforeChangeGroup and u.afterChangeGroup allow multiple calls
# to u.beforeX and u.afterX methods to be treated as a single undoable entry. See
# the code for the Replace All, Sort, Promote and Demote commands for examples.
# u.before/afterChangeGroup substantially reduce the number of u.before/afterX
# methods needed.
#
# New in Leo 4.3: It would be possible for plugins or other code to define their
# own u.before/afterX methods. Indeed, u.afterX merely needs to set the
# bunch.undoHelper and bunch.redoHelper ivars to the methods used to undo and redo
# the operation. See the code for the various u.before/afterX methods for
# guidance.
#
# New in Leo 4.3: p.setDirty and p.setAllAncestorAtFileNodesDirty now return a
# 'dirtyVnodeList' that all vnodes that became dirty as the result of an
# operation. More than one list may be generated: client code is responsible for
# merging lists using the pattern dirtyVnodeList.extend(dirtyVnodeList2)
#
# I first saw this model of unlimited undo in the documentation for Apple's Yellow Box classes.
#@-<< How Leo implements unlimited undo >>
import leo.core.leoGlobals as g
# pylint: disable=unpacking-non-sequence
#@+others
#@+node:ekr.20031218072017.3605: ** class Undoer
class Undoer(object):
"""A class that implements unlimited undo and redo."""
# pylint: disable=not-an-iterable
# pylint: disable=unsubscriptable-object
# So that ivars can be inited to None rather thatn [].
#@+others
#@+node:ekr.20150509193307.1: *3* u.Birth
#@+node:ekr.20031218072017.3606: *4* u.__init__ & reloadSettings
def __init__(self, c):
self.c = c
self.debug_Undoer = False # True: enable debugging code in new undo scheme.
self.debug_print = False # True: enable print statements in debug code.
self.granularity = None # Set in reloadSettings.
self.max_undo_stack_size = c.config.getInt('max_undo_stack_size') or 0
# Statistics comparing old and new ways (only if self.debug_Undoer is on).
self.new_mem = 0
self.old_mem = 0
# State ivars...
self.beads = [] # List of undo nodes.
self.bead = -1 # Index of the present bead: -1:len(beads)
self.undoType = "Can't Undo"
# These must be set here, _not_ in clearUndoState.
self.redoMenuLabel = "Can't Redo"
self.undoMenuLabel = "Can't Undo"
self.realRedoMenuLabel = "Can't Redo"
self.realUndoMenuLabel = "Can't Undo"
self.undoing = False # True if executing an Undo command.
self.redoing = False # True if executing a Redo command.
self.per_node_undo = False # True: v may contain undo_info ivar.
# New in 4.2...
self.optionalIvars = []
# Set the following ivars to keep pylint happy.
self.afterTree = None
self.beforeTree = None
self.children = None
self.deleteMarkedNodesData = None
self.dirtyVnodeList = None
self.followingSibs = None
self.inHead = None
self.kind = None
self.newBack = None
self.newBody = None
self.newChanged = None
self.newChildren = None
self.newHead = None
self.newMarked = None
self.newN = None
self.newP = None
self.newParent = None
self.newParent_v = None
self.newRecentFiles = None
self.newSel = None
self.newTree = None
self.newYScroll = None
self.oldBack = None
self.oldBody = None
self.oldChanged = None
self.oldChildren = None
self.oldHead = None
self.oldMarked = None
self.oldN = None
self.oldParent = None
self.oldParent_v = None
self.oldRecentFiles = None
self.oldSel = None
self.oldTree = None
self.oldYScroll = None
self.pasteAsClone = None
self.prevSel = None
self.sortChildren = None
self.verboseUndoGroup = None
self.reloadSettings()
def reloadSettings(self):
'''Undoer.reloadSettings.'''
c = self.c
self.granularity = c.config.getString('undo_granularity')
if self.granularity:
self.granularity = self.granularity.lower()
if self.granularity not in ('node', 'line', 'word', 'char'):
self.granularity = 'line'
def redoHelper(self):
pass
def undoHelper(self):
pass
#@+node:ekr.20150509193222.1: *4* u.cmd (decorator)
def cmd(name):
'''Command decorator for the Undoer class.'''
# pylint: disable=no-self-argument
return g.new_cmd_decorator(name, ['c', 'undoer', ])
#@+node:ekr.20050416092908.1: *3* u.Internal helpers
#@+node:ekr.20031218072017.3607: *4* u.clearOptionalIvars
def clearOptionalIvars(self):
u = self
u.p = None # The position/node being operated upon for undo and redo.
for ivar in u.optionalIvars:
setattr(u, ivar, None)
#@+node:ekr.20060127052111.1: *4* u.cutStack
def cutStack(self):
u = self; n = u.max_undo_stack_size
if n > 0 and u.bead >= n and not g.app.unitTesting:
# Do nothing if we are in the middle of creating a group.
i = len(u.beads) - 1
while i >= 0:
bunch = u.beads[i]
if hasattr(bunch, 'kind') and bunch.kind == 'beforeGroup':
return
i -= 1
# This work regardless of how many items appear after bead n.
# g.trace('Cutting undo stack to %d entries' % (n))
u.beads = u.beads[-n:]
u.bead = n - 1
#@+node:ekr.20080623083646.10: *4* u.dumpBead
def dumpBead(self, n):
u = self
if n < 0 or n >= len(u.beads):
return 'no bead: n = ', n
# bunch = u.beads[n]
result = []
result.append('-' * 10)
result.append('len(u.beads): %s, n: %s' % (len(u.beads), n))
for ivar in ('kind', 'newP', 'newN', 'p', 'oldN', 'undoHelper'):
result.append('%s = %s' % (ivar, getattr(self, ivar)))
return '\n'.join(result)
def dumpTopBead(self):
u = self
n = len(u.beads)
if n > 0:
return self.dumpBead(n - 1)
else:
return '<no top bead>'
#@+node:EKR.20040526150818: *4* u.getBead
def getBead(self, n):
'''Set Undoer ivars from the bunch at the top of the undo stack.'''
u = self
if n < 0 or n >= len(u.beads):
return None
bunch = u.beads[n]
self.setIvarsFromBunch(bunch)
return bunch
#@+node:EKR.20040526150818.1: *4* u.peekBead
def peekBead(self, n):
u = self
if n < 0 or n >= len(u.beads):
return None
else:
return u.beads[n]
#@+node:ekr.20060127113243: *4* u.pushBead
def pushBead(self, bunch):
u = self
# New in 4.4b2: Add this to the group if it is being accumulated.
bunch2 = u.bead >= 0 and u.bead < len(u.beads) and u.beads[u.bead]
if bunch2 and hasattr(bunch2, 'kind') and bunch2.kind == 'beforeGroup':
# Just append the new bunch the group's items.
bunch2.items.append(bunch)
else:
# Push the bunch.
u.bead += 1
u.beads[u.bead:] = [bunch]
# Recalculate the menu labels.
u.setUndoTypes()
#@+node:ekr.20050126081529: *4* u.recognizeStartOfTypingWord
def recognizeStartOfTypingWord(self,
old_lines, old_row, old_col, old_ch,
new_lines, new_row, new_col, new_ch,
prev_row, prev_col
):
''' A potentially user-modifiable method that should return True if the
typing indicated by the params starts a new 'word' for the purposes of
undo with 'word' granularity.
u.setUndoTypingParams calls this method only when the typing could possibly
continue a previous word. In other words, undo will work safely regardless
of the value returned here.
old_ch is the char at the given (Tk) row, col of old_lines.
new_ch is the char at the given (Tk) row, col of new_lines.
The present code uses only old_ch and new_ch. The other arguments are given
for use by more sophisticated algorithms.'''
# Start a word if new_ch begins whitespace + word
new_word_started = not old_ch.isspace() and new_ch.isspace()
# Start a word if the cursor has been moved since the last change
moved_cursor = new_row != prev_row or new_col != prev_col + 1
return new_word_started or moved_cursor
#@+node:ekr.20031218072017.3613: *4* u.redoMenuName, undoMenuName
def redoMenuName(self, name):
if name == "Can't Redo":
return name
else:
return "Redo " + name
def undoMenuName(self, name):
if name == "Can't Undo":
return name
else:
return "Undo " + name
#@+node:ekr.20060127070008: *4* u.setIvarsFromBunch
def setIvarsFromBunch(self, bunch):
u = self
u.clearOptionalIvars()
if 0: # Debugging.
g.pr('-' * 40)
for key in sorted(bunch):
g.trace(key, bunch.get(key))
g.pr('-' * 20)
# bunch is not a dict, so bunch.keys() is required.
for key in list(bunch.keys()):
val = bunch.get(key)
setattr(u, key, val)
if key not in u.optionalIvars:
u.optionalIvars.append(key)
#@+node:ekr.20031218072017.3614: *4* u.setRedoType
# These routines update both the ivar and the menu label.
def setRedoType(self, theType):
u = self; frame = u.c.frame
if not g.isString(theType):
g.trace('oops: expected string for command, got %s' % repr(theType))
g.trace(g.callers())
theType = '<unknown>'
menu = frame.menu.getMenu("Edit")
name = u.redoMenuName(theType)
if name != u.redoMenuLabel:
# Update menu using old name.
realLabel = frame.menu.getRealMenuName(name)
if realLabel == name:
underline = -1 if g.match(name, 0, "Can't") else 0
else:
underline = realLabel.find("&")
realLabel = realLabel.replace("&", "")
frame.menu.setMenuLabel(menu, u.realRedoMenuLabel, realLabel, underline=underline)
u.redoMenuLabel = name
u.realRedoMenuLabel = realLabel
#@+node:ekr.20091221145433.6381: *4* u.setUndoType
def setUndoType(self, theType):
u = self; frame = u.c.frame
if not g.isString(theType):
g.trace('oops: expected string for command, got %s' % repr(theType))
g.trace(g.callers())
theType = '<unknown>'
menu = frame.menu.getMenu("Edit")
name = u.undoMenuName(theType)
if name != u.undoMenuLabel:
# Update menu using old name.
realLabel = frame.menu.getRealMenuName(name)
if realLabel == name:
underline = -1 if g.match(name, 0, "Can't") else 0
else:
underline = realLabel.find("&")
realLabel = realLabel.replace("&", "")
frame.menu.setMenuLabel(menu, u.realUndoMenuLabel, realLabel, underline=underline)
u.undoType = theType
u.undoMenuLabel = name
u.realUndoMenuLabel = realLabel
#@+node:ekr.20031218072017.3616: *4* u.setUndoTypes
def setUndoTypes(self):
u = self
# Set the undo type and undo menu label.
bunch = u.peekBead(u.bead)
if bunch:
u.setUndoType(bunch.undoType)
else:
u.setUndoType("Can't Undo")
# Set only the redo menu label.
bunch = u.peekBead(u.bead + 1)
if bunch:
u.setRedoType(bunch.undoType)
else:
u.setRedoType("Can't Redo")
u.cutStack()
#@+node:EKR.20040530121329: *4* u.restoreTree & helpers
def restoreTree(self, treeInfo):
"""Use the tree info to restore all VNode data,
including all links."""
u = self
# This effectively relinks all vnodes.
for v, vInfo, tInfo in treeInfo:
u.restoreVnodeUndoInfo(vInfo)
u.restoreTnodeUndoInfo(tInfo)
#@+node:ekr.20050415170737.2: *5* u.restoreVnodeUndoInfo
def restoreVnodeUndoInfo(self, bunch):
"""Restore all ivars saved in the bunch."""
v = bunch.v
v.statusBits = bunch.statusBits
v.children = bunch.children
v.parents = bunch.parents
uA = bunch.get('unknownAttributes')
if uA is not None:
v.unknownAttributes = uA
v._p_changed = 1
#@+node:ekr.20050415170812.2: *5* u.restoreTnodeUndoInfo
def restoreTnodeUndoInfo(self, bunch):
v = bunch.v
v.h = bunch.headString
v.b = bunch.bodyString
v.statusBits = bunch.statusBits
uA = bunch.get('unknownAttributes')
if uA is not None:
v.unknownAttributes = uA
v._p_changed = 1
#@+node:EKR.20040528075307: *4* u.saveTree & helpers
def saveTree(self, p, treeInfo=None):
"""Return a list of tuples with all info needed to handle a general undo operation."""
# WARNING: read this before doing anything "clever"
#@+<< about u.saveTree >>
#@+node:EKR.20040530114124: *5* << about u.saveTree >>
#@+at The old code made a free-standing copy of the tree using v.copy and
# t.copy. This looks "elegant" and is WRONG. The problem is that it can
# not handle clones properly, especially when some clones were in the
# "undo" tree and some were not. Moreover, it required complex
# adjustments to t.vnodeLists.
#
# Instead of creating new nodes, the new code creates all information
# needed to properly restore the vnodes and tnodes. It creates a list of
# tuples, on tuple for each VNode in the tree. Each tuple has the form,
#
# (vnodeInfo, tnodeInfo)
#
# where vnodeInfo and tnodeInfo are dicts contain all info needed to
# recreate the nodes. The v.createUndoInfoDict and t.createUndoInfoDict
# methods correspond to the old v.copy and t.copy methods.
#
# Aside: Prior to 4.2 Leo used a scheme that was equivalent to the
# createUndoInfoDict info, but quite a bit uglier.
#@-<< about u.saveTree >>
u = self; topLevel = (treeInfo is None)
if topLevel: treeInfo = []
# Add info for p.v. Duplicate tnode info is harmless.
data = (p.v, u.createVnodeUndoInfo(p.v), u.createTnodeUndoInfo(p.v))
treeInfo.append(data)
# Recursively add info for the subtree.
child = p.firstChild()
while child:
self.saveTree(child, treeInfo)
child = child.next()
return treeInfo
#@+node:ekr.20050415170737.1: *5* u.createVnodeUndoInfo
def createVnodeUndoInfo(self, v):
"""Create a bunch containing all info needed to recreate a VNode for undo."""
bunch = g.Bunch(
v=v,
statusBits=v.statusBits,
parents=v.parents[:],
children=v.children[:],
)
if hasattr(v, 'unknownAttributes'):
bunch.unknownAttributes = v.unknownAttributes
return bunch
#@+node:ekr.20050415170812.1: *5* u.createTnodeUndoInfo
def createTnodeUndoInfo(self, v):
"""Create a bunch containing all info needed to recreate a VNode."""
bunch = g.Bunch(
v=v,
headString=v.h,
bodyString=v.b,
statusBits=v.statusBits,
)
if hasattr(v, 'unknownAttributes'):
bunch.unknownAttributes = v.unknownAttributes
return bunch
#@+node:ekr.20050525151449: *4* u.trace
def trace(self):
ivars = ('kind', 'undoType')
for ivar in ivars:
g.pr(ivar, getattr(self, ivar))
#@+node:ekr.20050410095424: *4* u.updateMarks
def updateMarks(self, oldOrNew):
'''Update dirty and marked bits.'''
u = self; c = u.c
if oldOrNew not in ('new', 'old'):
g.trace("can't happen")
return
isOld = oldOrNew == 'old'
marked = u.oldMarked if isOld else u.newMarked
if marked: c.setMarked(u.p)
else: c.clearMarked(u.p)
# Bug fix: Leo 4.4.6: Undo/redo always set changed/dirty bits
# because the file may have been saved.
u.p.setDirty(setDescendentsDirty=False)
u.p.setAllAncestorAtFileNodesDirty(setDescendentsDirty=False) # Bug fix: Leo 4.4.6
u.c.setChanged(True)
#@+node:ekr.20031218072017.3608: *3* u.Externally visible entries
#@+node:ekr.20050318085432.4: *4* u.afterX...
#@+node:ekr.20050315134017.4: *5* u.afterChangeGroup
def afterChangeGroup(self, p, undoType, reportFlag=False, dirtyVnodeList=None):
'''Create an undo node for general tree operations using d created by beforeChangeGroup'''
u = self; c = self.c
w = c.frame.body.wrapper
if u.redoing or u.undoing:
return
if dirtyVnodeList is None: dirtyVnodeList = []
bunch = u.beads[u.bead]
if not u.beads:
g.trace('oops: empty undo stack.')
return
if bunch.kind == 'beforeGroup':
bunch.kind = 'afterGroup'
else:
g.trace('oops: expecting beforeGroup, got %s' % bunch.kind)
# Set the types & helpers.
bunch.kind = 'afterGroup'
bunch.undoType = undoType
# Set helper only for undo:
# The bead pointer will point to an 'beforeGroup' bead for redo.
bunch.undoHelper = u.undoGroup
bunch.redoHelper = u.redoGroup
bunch.dirtyVnodeList = dirtyVnodeList
bunch.newP = p.copy()
bunch.newSel = w.getSelectionRange()
# Tells whether to report the number of separate changes undone/redone.
bunch.reportFlag = reportFlag
if 0:
# Push the bunch.
u.bead += 1
u.beads[u.bead:] = [bunch]
# Recalculate the menu labels.
u.setUndoTypes()
#@+node:ekr.20050315134017.2: *5* u.afterChangeNodeContents
def afterChangeNodeContents(self, p, command, bunch, dirtyVnodeList=None, inHead=False):
'''Create an undo node using d created by beforeChangeNode.'''
u = self; c = self.c; w = c.frame.body.wrapper
if u.redoing or u.undoing:
return
if dirtyVnodeList is None: dirtyVnodeList = []
# Set the type & helpers.
bunch.kind = 'node'
bunch.undoType = command
bunch.undoHelper = u.undoNodeContents
bunch.redoHelper = u.redoNodeContents
bunch.dirtyVnodeList = dirtyVnodeList
bunch.inHead = inHead # 2013/08/26
bunch.newBody = p.b
bunch.newChanged = u.c.isChanged()
bunch.newDirty = p.isDirty()
bunch.newHead = p.h
bunch.newMarked = p.isMarked()
# Bug fix 2017/11/12: don't use ternary operator.
if w:
bunch.newSel = w.getSelectionRange()
else:
bunch.newSel = 0, 0
bunch.newYScroll = w.getYScrollPosition() if w else 0
u.pushBead(bunch)
#@+node:ekr.20050315134017.3: *5* u.afterChangeTree
def afterChangeTree(self, p, command, bunch):
'''Create an undo node for general tree operations using d created by beforeChangeTree'''
u = self; c = self.c; w = c.frame.body.wrapper
if u.redoing or u.undoing: return
# Set the types & helpers.
bunch.kind = 'tree'
bunch.undoType = command
bunch.undoHelper = u.undoTree
bunch.redoHelper = u.redoTree
# Set by beforeChangeTree: changed, oldSel, oldText, oldTree, p
bunch.newSel = w.getSelectionRange()
bunch.newText = w.getAllText()
bunch.newTree = u.saveTree(p)
u.pushBead(bunch)
#@+node:ekr.20050424161505: *5* u.afterClearRecentFiles
def afterClearRecentFiles(self, bunch):
u = self
bunch.newRecentFiles = g.app.config.recentFiles[:]
bunch.undoType = 'Clear Recent Files'
bunch.undoHelper = u.undoClearRecentFiles
bunch.redoHelper = u.redoClearRecentFiles
u.pushBead(bunch)
return bunch
#@+node:ekr.20111006060936.15639: *5* u.afterCloneMarkedNodes
def afterCloneMarkedNodes(self, p):
u = self; c = u.c
if u.redoing or u.undoing:
return
bunch = u.createCommonBunch(p)
# Sets
# oldChanged = c.isChanged(),
# oldDirty = p.isDirty(),
# oldMarked = p.isMarked(),
# oldSel = w and w.getSelectionRange() or None,
# p = p.copy(),
# Set types & helpers
bunch.kind = 'clone-marked-nodes'
bunch.undoType = 'clone-marked-nodes'
# Set helpers
bunch.undoHelper = u.undoCloneMarkedNodes
bunch.redoHelper = u.redoCloneMarkedNodes
bunch.newP = p.next()
bunch.newChanged = c.isChanged()
bunch.newDirty = p.isDirty()
bunch.newMarked = p.isMarked()
u.pushBead(bunch)
#@+node:ekr.20160502175451.1: *5* u.afterCopyMarkedNodes
def afterCopyMarkedNodes(self, p):
u = self; c = u.c
if u.redoing or u.undoing:
return
bunch = u.createCommonBunch(p)
# Sets
# oldChanged = c.isChanged(),
# oldDirty = p.isDirty(),
# oldMarked = p.isMarked(),
# oldSel = w and w.getSelectionRange() or None,
# p = p.copy(),
# Set types & helpers
bunch.kind = 'copy-marked-nodes'
bunch.undoType = 'copy-marked-nodes'
# Set helpers
bunch.undoHelper = u.undoCopyMarkedNodes
bunch.redoHelper = u.redoCopyMarkedNodes
bunch.newP = p.next()
bunch.newChanged = c.isChanged()
bunch.newDirty = p.isDirty()
bunch.newMarked = p.isMarked()
u.pushBead(bunch)
#@+node:ekr.20050411193627.5: *5* u.afterCloneNode
def afterCloneNode(self, p, command, bunch, dirtyVnodeList=None):
u = self; c = u.c
if u.redoing or u.undoing: return
if dirtyVnodeList is None: dirtyVnodeList = []
# Set types & helpers
bunch.kind = 'clone'
bunch.undoType = command
# Set helpers
bunch.undoHelper = u.undoCloneNode
bunch.redoHelper = u.redoCloneNode
bunch.newBack = p.back() # 6/15/05
bunch.newParent = p.parent() # 6/15/05
bunch.newP = p.copy()
bunch.dirtyVnodeList = dirtyVnodeList
bunch.newChanged = c.isChanged()
bunch.newDirty = p.isDirty()
bunch.newMarked = p.isMarked()
u.pushBead(bunch)
#@+node:ekr.20050411193627.6: *5* u.afterDehoist
def afterDehoist(self, p, command):
u = self
if u.redoing or u.undoing: return
bunch = u.createCommonBunch(p)
# Set types & helpers
bunch.kind = 'dehoist'
bunch.undoType = command
# Set helpers
bunch.undoHelper = u.undoDehoistNode
bunch.redoHelper = u.redoDehoistNode
u.pushBead(bunch)
#@+node:ekr.20050411193627.8: *5* u.afterDeleteNode
def afterDeleteNode(self, p, command, bunch, dirtyVnodeList=None):
u = self; c = u.c
if u.redoing or u.undoing:
return
if dirtyVnodeList is None: dirtyVnodeList = []
# Set types & helpers
bunch.kind = 'delete'
bunch.undoType = command
# Set helpers
bunch.undoHelper = u.undoDeleteNode
bunch.redoHelper = u.redoDeleteNode
bunch.newP = p.copy()
bunch.dirtyVnodeList = dirtyVnodeList
bunch.newChanged = c.isChanged()
bunch.newDirty = p.isDirty()
bunch.newMarked = p.isMarked()
u.pushBead(bunch)
#@+node:ekr.20111005152227.15555: *5* u.afterDeleteMarkedNodes
def afterDeleteMarkedNodes(self, data, p):
u = self; c = u.c
if u.redoing or u.undoing: return
bunch = u.createCommonBunch(p)
# Set types & helpers
bunch.kind = 'delete-marked-nodes'
bunch.undoType = 'delete-marked-nodes'
# Set helpers
bunch.undoHelper = u.undoDeleteMarkedNodes
bunch.redoHelper = u.redoDeleteMarkedNodes
bunch.newP = p.copy()
bunch.deleteMarkedNodesData = data
# bunch.dirtyVnodeList = dirtyVnodeList
bunch.newChanged = c.isChanged()
bunch.newDirty = p.isDirty()
bunch.newMarked = p.isMarked()
u.pushBead(bunch)
#@+node:ekr.20080425060424.8: *5* u.afterDemote
def afterDemote(self, p, followingSibs, dirtyVnodeList):
'''Create an undo node for demote operations.'''
u = self
bunch = u.createCommonBunch(p)
# Set types.
bunch.kind = 'demote'
bunch.undoType = 'Demote'
bunch.undoHelper = u.undoDemote
bunch.redoHelper = u.redoDemote
bunch.followingSibs = followingSibs
# Push the bunch.
u.bead += 1
u.beads[u.bead:] = [bunch]
# Recalculate the menu labels.
u.setUndoTypes()
#@+node:ekr.20050411193627.7: *5* u.afterHoist
def afterHoist(self, p, command):
u = self
if u.redoing or u.undoing:
return
bunch = u.createCommonBunch(p)
# Set types & helpers
bunch.kind = 'hoist'
bunch.undoType = command
# Set helpers
bunch.undoHelper = u.undoHoistNode
bunch.redoHelper = u.redoHoistNode
u.pushBead(bunch)
#@+node:ekr.20050411193627.9: *5* u.afterInsertNode
def afterInsertNode(self, p, command, bunch, dirtyVnodeList=None):
u = self; c = u.c
if u.redoing or u.undoing:
return
if dirtyVnodeList is None: dirtyVnodeList = []
# Set types & helpers
bunch.kind = 'insert'
bunch.undoType = command
# Set helpers
bunch.undoHelper = u.undoInsertNode
bunch.redoHelper = u.redoInsertNode
bunch.newP = p.copy()
bunch.dirtyVnodeList = dirtyVnodeList
bunch.newBack = p.back()
bunch.newParent = p.parent()
bunch.newChanged = c.isChanged()
bunch.newDirty = p.isDirty()
bunch.newMarked = p.isMarked()
if bunch.pasteAsClone:
beforeTree = bunch.beforeTree
afterTree = []
for bunch2 in beforeTree:
v = bunch2.v
afterTree.append(
g.Bunch(v=v, head=v.h[:], body=v.b[:]))
bunch.afterTree = afterTree
u.pushBead(bunch)
#@+node:ekr.20050526124257: *5* u.afterMark
def afterMark(self, p, command, bunch, dirtyVnodeList=None):
'''Create an undo node for mark and unmark commands.'''
# 'command' unused, but present for compatibility with similar methods.
u = self
if u.redoing or u.undoing: return
if dirtyVnodeList is None: dirtyVnodeList = []
# Set the type & helpers.
bunch.undoHelper = u.undoMark
bunch.redoHelper = u.redoMark
bunch.dirtyVnodeList = dirtyVnodeList
bunch.newChanged = u.c.isChanged()
bunch.newDirty = p.isDirty()
bunch.newMarked = p.isMarked()
u.pushBead(bunch)
#@+node:ekr.20050410110343: *5* u.afterMoveNode
def afterMoveNode(self, p, command, bunch, dirtyVnodeList=None):
u = self; c = u.c
if u.redoing or u.undoing: return
if dirtyVnodeList is None: dirtyVnodeList = []
# Set the types & helpers.
bunch.kind = 'move'
bunch.undoType = command
# Set helper only for undo:
# The bead pointer will point to an 'beforeGroup' bead for redo.
bunch.undoHelper = u.undoMove
bunch.redoHelper = u.redoMove
bunch.dirtyVnodeList = dirtyVnodeList
bunch.newChanged = c.isChanged()
bunch.newDirty = p.isDirty()
bunch.newMarked = p.isMarked()
bunch.newN = p.childIndex()
bunch.newParent_v = p._parentVnode()
bunch.newP = p.copy()
u.pushBead(bunch)
#@+node:ekr.20080425060424.12: *5* u.afterPromote
def afterPromote(self, p, children, dirtyVnodeList):
'''Create an undo node for demote operations.'''
u = self
bunch = u.createCommonBunch(p)
# Set types.
bunch.kind = 'promote'
bunch.undoType = 'Promote'
bunch.undoHelper = u.undoPromote
bunch.redoHelper = u.redoPromote
bunch.children = children
# Push the bunch.
u.bead += 1
u.beads[u.bead:] = [bunch]
# Recalculate the menu labels.
u.setUndoTypes()
#@+node:ekr.20080425060424.2: *5* u.afterSort
def afterSort(self, p, bunch, dirtyVnodeList):
'''Create an undo node for sort operations'''
u = self
# c = self.c
if u.redoing or u.undoing: return
bunch.dirtyVnodeList = dirtyVnodeList
# Recalculate the menu labels.
u.setUndoTypes()
#@+node:ekr.20050318085432.3: *4* u.beforeX...
#@+node:ekr.20050315134017.7: *5* u.beforeChangeGroup
def beforeChangeGroup(self, p, command, verboseUndoGroup=True):
'''Prepare to undo a group of undoable operations.'''
u = self
bunch = u.createCommonBunch(p)
# Set types.
bunch.kind = 'beforeGroup'
bunch.undoType = command
bunch.verboseUndoGroup = verboseUndoGroup
# Set helper only for redo:
# The bead pointer will point to an 'afterGroup' bead for undo.
bunch.undoHelper = u.undoGroup
bunch.redoHelper = u.redoGroup
bunch.items = []
# Push the bunch.
u.bead += 1
u.beads[u.bead:] = [bunch]
#@+node:ekr.20050315133212.2: *5* u.beforeChangeNodeContents
def beforeChangeNodeContents(self, p, oldBody=None, oldHead=None, oldYScroll=None):
'''Return data that gets passed to afterChangeNode'''
u = self
bunch = u.createCommonBunch(p)
bunch.oldBody = oldBody or p.b
bunch.oldHead = oldHead or p.h
bunch.oldYScroll = oldYScroll
return bunch
#@+node:ekr.20050315134017.6: *5* u.beforeChangeTree
def beforeChangeTree(self, p):
u = self; c = u.c
w = c.frame.body.wrapper
bunch = u.createCommonBunch(p)
bunch.oldSel = w.getSelectionRange()
bunch.oldText = w.getAllText()
bunch.oldTree = u.saveTree(p)
return bunch
#@+node:ekr.20050424161505.1: *5* u.beforeClearRecentFiles
def beforeClearRecentFiles(self):
u = self; p = u.c.p
bunch = u.createCommonBunch(p)
bunch.oldRecentFiles = g.app.config.recentFiles[:]
return bunch
#@+node:ekr.20050412080354: *5* u.beforeCloneNode
def beforeCloneNode(self, p):
u = self
bunch = u.createCommonBunch(p)
return bunch
#@+node:ekr.20050411193627.3: *5* u.beforeDeleteNode
def beforeDeleteNode(self, p):
u = self
bunch = u.createCommonBunch(p)
bunch.oldBack = p.back()
bunch.oldParent = p.parent()
return bunch
#@+node:ekr.20050411193627.4: *5* u.beforeInsertNode
def beforeInsertNode(self, p, pasteAsClone=False, copiedBunchList=None):
u = self
if copiedBunchList is None: copiedBunchList = []
bunch = u.createCommonBunch(p)
bunch.pasteAsClone = pasteAsClone
if pasteAsClone:
# Save the list of bunched.
bunch.beforeTree = copiedBunchList
return bunch
#@+node:ekr.20050526131252: *5* u.beforeMark
def beforeMark(self, p, command):
u = self
bunch = u.createCommonBunch(p)
bunch.kind = 'mark'
bunch.undoType = command
return bunch
#@+node:ekr.20050410110215: *5* u.beforeMoveNode
def beforeMoveNode(self, p):
u = self
bunch = u.createCommonBunch(p)
bunch.oldN = p.childIndex()
bunch.oldParent_v = p._parentVnode()
return bunch
#@+node:ekr.20080425060424.3: *5* u.beforeSort
def beforeSort(self, p, undoType, oldChildren, newChildren, sortChildren):
'''Create an undo node for sort operations.'''
u = self
bunch = u.createCommonBunch(p)
# Set types.
bunch.kind = 'sort'
bunch.undoType = undoType
bunch.undoHelper = u.undoSort
bunch.redoHelper = u.redoSort
bunch.oldChildren = oldChildren
bunch.newChildren = newChildren
bunch.sortChildren = sortChildren # A bool
# Push the bunch.
u.bead += 1
u.beads[u.bead:] = [bunch]
return bunch
#@+node:ekr.20050318085432.2: *5* u.createCommonBunch
def createCommonBunch(self, p):
'''Return a bunch containing all common undo info.
This is mostly the info for recreating an empty node at position p.'''
u = self; c = u.c; w = c.frame.body.wrapper
return g.Bunch(
oldChanged=c.isChanged(),
oldDirty=p and p.isDirty(),
oldMarked=p and p.isMarked(),
oldSel=w and w.getSelectionRange() or None,
p=p and p.copy(),
)
#@+node:ekr.20031218072017.3610: *4* u.canRedo & canUndo
# Translation does not affect these routines.
def canRedo(self):
u = self
return u.redoMenuLabel != "Can't Redo"
def canUndo(self):
u = self
return u.undoMenuLabel != "Can't Undo"
#@+node:ekr.20031218072017.3609: *4* u.clearUndoState
def clearUndoState(self):
"""Clears then entire Undo state.
All non-undoable commands should call this method."""
u = self
u.clearOptionalIvars() # Do this first.
u.setRedoType("Can't Redo")
u.setUndoType("Can't Undo")
u.beads = [] # List of undo nodes.
u.bead = -1 # Index of the present bead: -1:len(beads)
#@+node:ekr.20031218072017.3611: *4* u.enableMenuItems
def enableMenuItems(self):
u = self; frame = u.c.frame
menu = frame.menu.getMenu("Edit")
if menu:
frame.menu.enableMenu(menu, u.redoMenuLabel, u.canRedo())
frame.menu.enableMenu(menu, u.undoMenuLabel, u.canUndo())
#@+node:ekr.20110519074734.6094: *4* u.onSelect & helpers
def onSelect(self, old_p, p):
u = self
if u.per_node_undo:
if old_p and u.beads:
u.putIvarsToVnode(old_p)
u.setIvarsFromVnode(p)
u.setUndoTypes()
#@+node:ekr.20110519074734.6096: *5* u.putIvarsToVnode
def putIvarsToVnode(self, p):
u = self; v = p.v
assert self.per_node_undo
bunch = g.bunch()
for key in self.optionalIvars:
bunch[key] = getattr(u, key)
# Put these ivars by hand.
for key in ('bead', 'beads', 'undoType',):
bunch[key] = getattr(u, key)
v.undo_info = bunch
#@+node:ekr.20110519074734.6095: *5* u.setIvarsFromVnode
def setIvarsFromVnode(self, p):
u = self; v = p.v
assert self.per_node_undo
u.clearUndoState()
if hasattr(v, 'undo_info'):
u.setIvarsFromBunch(v.undo_info)
#@+node:ekr.20031218072017.1490: *4* u.setUndoTypingParams
def setUndoTypingParams(self, p, undo_type, oldText, newText,
oldSel=None, newSel=None, oldYview=None,
):
'''
Save enough information to undo or redo typing operation.
Do nothing when called from the undo/redo logic because the Undo
and Redo commands merely reset the bead pointer.
'''
u = self; c = u.c
#@+<< return if there is nothing to do >>
#@+node:ekr.20040324061854: *5* << return if there is nothing to do >>
if u.redoing or u.undoing:
return None
if undo_type is None:
return None
if undo_type == "Can't Undo":
u.clearUndoState()
u.setUndoTypes() # Must still recalculate the menu labels.
return None
if oldText == newText:
u.setUndoTypes() # Must still recalculate the menu labels.
return None
#@-<< return if there is nothing to do >>
#@+<< init the undo params >>
#@+node:ekr.20040324061854.1: *5* << init the undo params >>
# Clear all optional params.
# for ivar in u.optionalIvars:
# setattr(u,ivar,None)
u.clearOptionalIvars()
# Set the params.
u.undoType = undo_type
u.p = p.copy()
#@-<< init the undo params >>
#@+<< compute leading, middle & trailing lines >>
#@+node:ekr.20031218072017.1491: *5* << compute leading, middle & trailing lines >>
#@+at Incremental undo typing is similar to incremental syntax coloring. We compute
# the number of leading and trailing lines that match, and save both the old and
# new middle lines. NB: the number of old and new middle lines may be different.
#@@c
old_lines = oldText.split('\n')
new_lines = newText.split('\n')
new_len = len(new_lines)
old_len = len(old_lines)
min_len = min(old_len, new_len)
i = 0
while i < min_len:
if old_lines[i] != new_lines[i]:
break
i += 1
leading = i
if leading == new_len:
# This happens when we remove lines from the end.
# The new text is simply the leading lines from the old text.
trailing = 0
else:
i = 0
while i < min_len - leading:
if old_lines[old_len - i - 1] != new_lines[new_len - i - 1]:
break
i += 1
trailing = i
# NB: the number of old and new middle lines may be different.
if trailing == 0:
old_middle_lines = old_lines[leading:]
new_middle_lines = new_lines[leading:]
else:
old_middle_lines = old_lines[leading: -trailing]
new_middle_lines = new_lines[leading: -trailing]
# Remember how many trailing newlines in the old and new text.
i = len(oldText) - 1; old_newlines = 0
while i >= 0 and oldText[i] == '\n':
old_newlines += 1
i -= 1
i = len(newText) - 1; new_newlines = 0
while i >= 0 and newText[i] == '\n':
new_newlines += 1
i -= 1
#@-<< compute leading, middle & trailing lines >>
#@+<< save undo text info >>
#@+node:ekr.20031218072017.1492: *5* << save undo text info >>
#@+at This is the start of the incremental undo algorithm.
#
# We must save enough info to do _both_ of the following:
#
# Undo: Given newText, recreate oldText.
# Redo: Given oldText, recreate oldText.
#
# The "given" texts for the undo and redo routines are simply p.b.
#@@c
if u.debug_Undoer:
# Remember the complete text for comparisons...
u.oldText = oldText
u.newText = newText
# Compute statistics comparing old and new ways...
# The old doesn't often store the old text, so don't count it here.
u.old_mem += len(newText)
s1 = '\n'.join(old_middle_lines)
s2 = '\n'.join(new_middle_lines)
u.new_mem += len(s1) + len(s2)
else:
u.oldText = None
u.newText = None
u.leading = leading
u.trailing = trailing
u.oldMiddleLines = old_middle_lines
u.newMiddleLines = new_middle_lines
u.oldNewlines = old_newlines
u.newNewlines = new_newlines
#@-<< save undo text info >>
#@+<< save the selection and scrolling position >>
#@+node:ekr.20040324061854.2: *5* << save the selection and scrolling position >>
# Remember the selection.
u.oldSel = oldSel
u.newSel = newSel
# Remember the scrolling position.
if oldYview:
u.yview = oldYview
else:
u.yview = c.frame.body.wrapper.getYScrollPosition()
#@-<< save the selection and scrolling position >>
#@+<< adjust the undo stack, clearing all forward entries >>
#@+node:ekr.20040324061854.3: *5* << adjust the undo stack, clearing all forward entries >>
#@+at New in Leo 4.3. Instead of creating a new bead on every character, we
# may adjust the top bead:
#
# word granularity: adjust the top bead if the typing would continue the word.
# line granularity: adjust the top bead if the typing is on the same line.
# node granularity: adjust the top bead if the typing is anywhere on the same node.
#@@c
granularity = u.granularity
old_d = u.peekBead(u.bead)
old_p = old_d and old_d.get('p')
#@+<< set newBead if we can't share the previous bead >>
#@+node:ekr.20050125220613: *6* << set newBead if we can't share the previous bead >>
#@+at We must set newBead to True if undo_type is not 'Typing' so that commands that
# get treated like typing (by updateBodyPane and onBodyChanged) don't get lumped
# with 'real' typing.
#@@c
if (
not old_d or not old_p or
old_p.v != p.v or
old_d.get('kind') != 'typing' or
old_d.get('undoType') != 'Typing' or
undo_type != 'Typing'
):
newBead = True # We can't share the previous node.
elif granularity == 'char':
newBead = True # This was the old way.
elif granularity == 'node':
newBead = False # Always replace previous bead.
else:
assert granularity in ('line', 'word')
# Replace the previous bead if only the middle lines have changed.
newBead = (
old_d.get('leading', 0) != u.leading or
old_d.get('trailing', 0) != u.trailing
)
if granularity == 'word' and not newBead:
# Protect the method that may be changed by the user
try:
#@+<< set newBead if the change does not continue a word >>
#@+node:ekr.20050125203937: *7* << set newBead if the change does not continue a word >>
# Fix #653: undoer problem: be wary of the ternary operator here.
old_start = old_end = new_start = new_end = 0
if oldSel:
old_start, old_end = oldSel
if newSel:
new_start, new_end = newSel
prev_start, prev_end = u.prevSel
if old_start != old_end or new_start != new_end:
# The new and old characters are not contiguous.
newBead = True
else:
# 2011/04/01: Patch by Sam Hartsfield
old_row, old_col = g.convertPythonIndexToRowCol(oldText, old_start)
new_row, new_col = g.convertPythonIndexToRowCol(newText, new_start)
prev_row, prev_col = g.convertPythonIndexToRowCol(oldText, prev_start)
old_lines = g.splitLines(oldText)
new_lines = g.splitLines(newText)
# Recognize backspace, del, etc. as contiguous.
if old_row != new_row or abs(old_col - new_col) != 1:
# The new and old characters are not contiguous.
newBead = True
elif old_col == 0 or new_col == 0:
# py-lint: disable=W0511
# W0511:1362: TODO
# TODO this is not true, we might as well just have entered a
# char at the beginning of an existing line
pass # We have just inserted a line.
else:
# 2011/04/01: Patch by Sam Hartsfield
old_s = old_lines[old_row]
new_s = new_lines[new_row]
# New in 4.3b2:
# Guard against invalid oldSel or newSel params.
if old_col - 1 >= len(old_s) or new_col - 1 >= len(new_s):
newBead = True
else:
old_ch = old_s[old_col - 1]
new_ch = new_s[new_col - 1]
newBead = self.recognizeStartOfTypingWord(
old_lines, old_row, old_col, old_ch,
new_lines, new_row, new_col, new_ch,
prev_row, prev_col)
#@-<< set newBead if the change does not continue a word >>
except Exception:
g.error('Unexpected exception...')
g.es_exception()
newBead = True
#@-<< set newBead if we can't share the previous bead >>
# Save end selection as new "previous" selection
u.prevSel = u.newSel
if newBead:
# Push params on undo stack, clearing all forward entries.
bunch = g.Bunch(
p=p.copy(),
kind='typing',
undoType=undo_type,
undoHelper=u.undoTyping,
redoHelper=u.redoTyping,
oldText=u.oldText,
oldSel=u.oldSel,
oldNewlines=u.oldNewlines,
oldMiddleLines=u.oldMiddleLines,
)
u.pushBead(bunch)
else:
bunch = old_d
bunch.dirtyVnodeList = p.setAllAncestorAtFileNodesDirty()
# Bug fix: Leo 4.4.6: always add p to the list.
bunch.dirtyVnodeList.append(p.copy())
bunch.leading = u.leading
bunch.trailing = u.trailing
bunch.newNewlines = u.newNewlines
bunch.newMiddleLines = u.newMiddleLines
bunch.newSel = u.newSel
bunch.newText = u.newText
bunch.yview = u.yview
#@-<< adjust the undo stack, clearing all forward entries >>
if u.per_node_undo:
u.putIvarsToVnode(p)
return bunch # Never used.
#@+node:ekr.20031218072017.2030: *3* u.redo
@cmd('redo')
def redo(self, event=None):
'''Redo the operation undone by the last undo.'''
u = self; c = u.c
w = c.frame.body.wrapper
if not c.p:
return
# End editing *before* getting state.
c.endEditing()
if not u.canRedo():
return
if not u.getBead(u.bead + 1):
return
u.redoing = True
u.groupCount = 0
if u.redoHelper:
u.redoHelper()
else:
g.trace('no redo helper for %s %s' % (u.kind, u.undoType))
c.checkOutline()
# Redraw and recolor.
c.frame.body.updateEditors() # New in Leo 4.4.8.
if 0: # Don't do this: it interferes with selection ranges.
# This strange code forces a recomputation of the root position.
c.selectPosition(c.p)
else:
c.setCurrentPosition(c.p)
if u.newChanged is None: u.newChanged = True
c.setChanged(u.newChanged)
# Redrawing *must* be done here before setting u.undoing to False.
i, j = w.getSelectionRange()
ins = w.getInsertPoint()
c.redraw()
c.recolor()
if u.inHead: # 2013/08/26.
c.editHeadline()
u.inHead = False
else:
c.bodyWantsFocus()
w.setSelectionRange(i, j, insert=ins)
w.seeInsertPoint()
u.redoing = False
u.bead += 1
u.setUndoTypes()
#@+node:ekr.20110519074734.6092: *3* u.redo helpers
#@+node:ekr.20050424170219: *4* u.redoClearRecentFiles
def redoClearRecentFiles(self):
u = self; c = u.c
rf = g.app.recentFilesManager
rf.setRecentFiles(u.newRecentFiles[:])
rf.createRecentFilesMenuItems(c)
#@+node:ekr.20111005152227.15558: *4* u.redoCloneMarkedNodes
def redoCloneMarkedNodes(self):
u = self; c = u.c
c.selectPosition(u.p)
c.cloneMarked()
u.newP = c.p
u.newChanged = c.isChanged()
#@+node:ekr.20160502175557.1: *4* u.redoCopyMarkedNodes
def redoCopyMarkedNodes(self):
u = self; c = u.c
c.selectPosition(u.p)
c.copyMarked()
u.newP = c.p
u.newChanged = c.isChanged()
#@+node:ekr.20050412083057: *4* u.redoCloneNode
def redoCloneNode(self):
u = self; c = u.c; cc = c.chapterController
if cc: cc.selectChapterByName('main')
if u.newBack:
u.newP._linkAfter(u.newBack)
elif u.newParent:
u.newP._linkAsNthChild(u.newParent, 0)
else:
oldRoot = c.rootPosition()
u.newP._linkAsRoot(oldRoot)
for v in u.dirtyVnodeList:
v.setDirty()
c.selectPosition(u.newP)
#@+node:ekr.20111005152227.15559: *4* u.redoDeleteMarkedNodes
def redoDeleteMarkedNodes(self):
u = self; c = u.c
c.selectPosition(u.p)
c.deleteMarked()
c.selectPosition(u.newP)
u.newChanged = c.isChanged()
#@+node:EKR.20040526072519.2: *4* u.redoDeleteNode
def redoDeleteNode(self):
u = self; c = u.c
c.selectPosition(u.p)
c.deleteOutline()
c.selectPosition(u.newP)
#@+node:ekr.20080425060424.9: *4* u.redoDemote
def redoDemote(self):
u = self; c = u.c
parent_v = u.p._parentVnode()
n = u.p.childIndex()
# Move the demoted nodes from the old parent to the new parent.
parent_v.children = parent_v.children[: n + 1]
u.p.v.children.extend(u.followingSibs)
# Adjust the parent links of the moved nodes.
# There is no need to adjust descendant links.
for v in u.followingSibs:
v.parents.remove(parent_v)
v.parents.append(u.p.v)
c.setCurrentPosition(u.p)
#@+node:ekr.20050318085432.6: *4* u.redoGroup
def redoGroup(self):
'''Process beads until the matching 'afterGroup' bead is seen.'''
u = self
# Remember these values.
c = u.c
dirtyVnodeList = u.dirtyVnodeList or []
newSel = u.newSel
p = u.p.copy()
u.groupCount += 1
bunch = u.beads[u.bead]; count = 0
if not hasattr(bunch, 'items'):
g.trace('oops: expecting bunch.items. bunch.kind = %s' % bunch.kind)
g.trace(bunch)
else:
for z in bunch.items:
self.setIvarsFromBunch(z)
if z.redoHelper:
z.redoHelper(); count += 1
else:
g.trace('oops: no redo helper for %s %s' % (u.undoType, p.h))
u.groupCount -= 1
u.updateMarks('new') # Bug fix: Leo 4.4.6.
for v in dirtyVnodeList:
v.setDirty()
if not g.unitTesting and u.verboseUndoGroup:
g.es("redo", count, "instances")
c.selectPosition(p)
if newSel:
i, j = newSel
c.frame.body.wrapper.setSelectionRange(i, j)
#@+node:ekr.20050412085138.1: *4* u.redoHoistNode & redoDehoistNode
def redoHoistNode(self):
u = self; c = u.c
c.selectPosition(u.p)
c.hoist()
def redoDehoistNode(self):
u = self; c = u.c
c.selectPosition(u.p)
c.dehoist()
#@+node:ekr.20050412084532: *4* u.redoInsertNode
def redoInsertNode(self):
u = self; c = u.c; cc = c.chapterController
if cc:
cc.selectChapterByName('main')
if u.newBack:
u.newP._linkAfter(u.newBack)
elif u.newParent:
u.newP._linkAsNthChild(u.newParent, 0)
else:
oldRoot = c.rootPosition()
u.newP._linkAsRoot(oldRoot)
if u.pasteAsClone:
for bunch in u.afterTree:
v = bunch.v
if u.newP.v == v:
c.setBodyString(u.newP, bunch.body)
c.setHeadString(u.newP, bunch.head)
else:
v.setBodyString(bunch.body)
v.setHeadString(bunch.head)
c.selectPosition(u.newP)
#@+node:ekr.20050526125801: *4* u.redoMark
def redoMark(self):
u = self; c = u.c
u.updateMarks('new')
if u.groupCount == 0:
for v in u.dirtyVnodeList:
v.setDirty()
c.selectPosition(u.p)
#@+node:ekr.20050411111847: *4* u.redoMove
def redoMove(self):
u = self; c = u.c; cc = c.chapterController
v = u.p.v
assert(u.oldParent_v)
assert(u.newParent_v)
assert(v)
if cc: cc.selectChapterByName('main')
# Adjust the children arrays.
assert u.oldParent_v.children[u.oldN] == v
del u.oldParent_v.children[u.oldN]
parent_v = u.newParent_v
parent_v.children.insert(u.newN, v)
v.parents.append(u.newParent_v)
v.parents.remove(u.oldParent_v)
u.updateMarks('new')
for v in u.dirtyVnodeList:
v.setDirty()
c.selectPosition(u.newP)
#@+node:ekr.20050318085432.7: *4* u.redoNodeContents
def redoNodeContents(self):
u = self; c = u.c; w = c.frame.body.wrapper
# Restore the body.
u.p.setBodyString(u.newBody)
w.setAllText(u.newBody)
c.frame.body.recolor(u.p)
# Restore the headline.
u.p.initHeadString(u.newHead)
# This is required so. Otherwise redraw will revert the change!
c.frame.tree.setHeadline(u.p, u.newHead) # New in 4.4b2.
if u.groupCount == 0 and u.newSel:
i, j = u.newSel
w.setSelectionRange(i, j)
if u.groupCount == 0 and u.newYScroll is not None:
w.setYScrollPosition(u.newYScroll)
u.updateMarks('new')
for v in u.dirtyVnodeList:
v.setDirty()
#@+node:ekr.20080425060424.13: *4* u.redoPromote
def redoPromote(self):
u = self; c = u.c
parent_v = u.p._parentVnode()
# Add the children to parent_v's children.
n = u.p.childIndex() + 1
old_children = parent_v.children[:]
parent_v.children = old_children[: n]
# Add children up to the promoted nodes.
parent_v.children.extend(u.children)
# Add the promoted nodes.
parent_v.children.extend(old_children[n:])
# Add the children up to the promoted nodes.
# Remove the old children.
u.p.v.children = []
# Adjust the parent links in the moved children.
# There is no need to adjust descendant links.
for child in u.children:
child.parents.remove(u.p.v)
child.parents.append(parent_v)
c.setCurrentPosition(u.p)
#@+node:ekr.20080425060424.4: *4* u.redoSort
def redoSort(self):
u = self; c = u.c
parent_v = u.p._parentVnode()
parent_v.children = u.newChildren
p = c.setPositionAfterSort(u.sortChildren)
c.setCurrentPosition(p)
#@+node:ekr.20050318085432.8: *4* u.redoTree
def redoTree(self):
'''Redo replacement of an entire tree.'''
u = self; c = u.c
u.p = self.undoRedoTree(u.p, u.oldTree, u.newTree)
c.selectPosition(u.p) # Does full recolor.
if u.newSel:
i, j = u.newSel
c.frame.body.wrapper.setSelectionRange(i, j)
#@+node:EKR.20040526075238.5: *4* u.redoTyping
def redoTyping(self):
u = self; c = u.c; current = c.p
w = c.frame.body.wrapper
# selectPosition causes recoloring, so avoid if possible.
if current != u.p:
c.selectPosition(u.p)
self.undoRedoText(
u.p, u.leading, u.trailing,
u.newMiddleLines, u.oldMiddleLines,
u.newNewlines, u.oldNewlines,
tag="redo", undoType=u.undoType)
u.updateMarks('new')
for v in u.dirtyVnodeList:
v.setDirty()
if u.newSel:
c.bodyWantsFocus()
i, j = u.newSel
w.setSelectionRange(i, j, insert=j)
if u.yview:
c.bodyWantsFocus()
w.setYScrollPosition(u.yview)
#@+node:ekr.20031218072017.2039: *3* u.undo
@cmd('undo')
def undo(self, event=None):
"""Undo the operation described by the undo parameters."""
u = self; c = u.c
w = c.frame.body.wrapper
if not c.p:
return g.trace('no current position')
# End editing *before* getting state.
c.endEditing()
if u.per_node_undo: # 2011/05/19
u.setIvarsFromVnode(c.p)
if not u.canUndo():
return
if not u.getBead(u.bead):
return
u.undoing = True
u.groupCount = 0
if u.undoHelper:
u.undoHelper()
else:
g.trace('no undo helper for %s %s' % (u.kind, u.undoType))
c.checkOutline()
# Redraw and recolor.
c.frame.body.updateEditors() # New in Leo 4.4.8.
if 0: # Don't do this: it interferes with selection ranges.
# This strange code forces a recomputation of the root position.
c.selectPosition(c.p)
else:
c.setCurrentPosition(c.p)
if u.oldChanged is None: u.oldChanged = True
c.setChanged(u.oldChanged)
# Redrawing *must* be done here before setting u.undoing to False.
i, j = w.getSelectionRange()
ins = w.getInsertPoint()
c.redraw()
c.recolor()
if u.inHead:
c.editHeadline()
u.inHead = False
else:
c.bodyWantsFocus()
w.setSelectionRange(i, j, insert=ins)
w.seeInsertPoint()
u.undoing = False
u.bead -= 1
u.setUndoTypes()
#@+node:ekr.20110519074734.6093: *3* u.undo helpers
#@+node:ekr.20050424170219.1: *4* u.undoClearRecentFiles
def undoClearRecentFiles(self):
u = self; c = u.c
rf = g.app.recentFilesManager
rf.setRecentFiles(u.oldRecentFiles[:])
rf.createRecentFilesMenuItems(c)
#@+node:ekr.20111005152227.15560: *4* u.undoCloneMarkedNodes
def undoCloneMarkedNodes(self):
u = self
next = u.p.next()
assert next.h == 'Clones of marked nodes', (u.p, next.h)
next.doDelete()
u.p.setAllAncestorAtFileNodesDirty()
u.c.selectPosition(u.p)
#@+node:ekr.20160502175653.1: *4* u.undoCopyMarkedNodes
def undoCopyMarkedNodes(self):
u = self
next = u.p.next()
assert next.h == 'Copies of marked nodes', (u.p.h, next.h)
next.doDelete()
u.p.setAllAncestorAtFileNodesDirty()
u.c.selectPosition(u.p)
#@+node:ekr.20050412083057.1: *4* u.undoCloneNode
def undoCloneNode(self):
u = self; c = u.c; cc = c.chapterController
if cc: cc.selectChapterByName('main')
c.selectPosition(u.newP)
c.deleteOutline()
for v in u.dirtyVnodeList:
v.setDirty() # Bug fix: Leo 4.4.6
c.selectPosition(u.p)
#@+node:ekr.20111005152227.15557: *4* u.undoDeleteMarkedNodes
def undoDeleteMarkedNodes(self):
u = self; c = u.c
# Undo the deletes in reverse order
aList = u.deleteMarkedNodesData[:]
aList.reverse()
for p in aList:
if p.stack:
parent_v, junk = p.stack[-1]
else:
parent_v = c.hiddenRootNode
p.v._addLink(p._childIndex, parent_v)
u.p.setAllAncestorAtFileNodesDirty()
c.selectPosition(u.p)
#@+node:ekr.20050412084055: *4* u.undoDeleteNode
def undoDeleteNode(self):
u = self; c = u.c
if u.oldBack:
u.p._linkAfter(u.oldBack)
elif u.oldParent:
u.p._linkAsNthChild(u.oldParent, 0)
else:
oldRoot = c.rootPosition()
u.p._linkAsRoot(oldRoot)
u.p.setAllAncestorAtFileNodesDirty()
c.selectPosition(u.p)
#@+node:ekr.20080425060424.10: *4* u.undoDemote
def undoDemote(self):
u = self; c = u.c
parent_v = u.p._parentVnode()
n = len(u.followingSibs)
# Remove the demoted nodes from p's children.
u.p.v.children = u.p.v.children[: -n]
# Add the demoted nodes to the parent's children.
parent_v.children.extend(u.followingSibs)
# Adjust the parent links.
# There is no need to adjust descendant links.
for sib in u.followingSibs:
sib.parents.remove(u.p.v)
sib.parents.append(parent_v)
c.setCurrentPosition(u.p)
#@+node:ekr.20050318085713: *4* u.undoGroup
def undoGroup(self):
'''Process beads until the matching 'beforeGroup' bead is seen.'''
u = self
# Remember these values.
c = u.c
dirtyVnodeList = u.dirtyVnodeList or []
oldSel = u.oldSel
p = u.p.copy()
u.groupCount += 1
bunch = u.beads[u.bead]; count = 0
if not hasattr(bunch, 'items'):
g.trace('oops: expecting bunch.items. bunch.kind = %s' % bunch.kind)
g.trace(bunch)
else:
# Important bug fix: 9/8/06: reverse the items first.
reversedItems = bunch.items[:]
reversedItems.reverse()
for z in reversedItems:
self.setIvarsFromBunch(z)
if z.undoHelper:
z.undoHelper(); count += 1
else:
g.trace('oops: no undo helper for %s %s' % (u.undoType, p.v))
u.groupCount -= 1
u.updateMarks('old') # Bug fix: Leo 4.4.6.
for v in dirtyVnodeList:
v.setDirty() # Bug fix: Leo 4.4.6.
if not g.unitTesting and u.verboseUndoGroup:
g.es("undo", count, "instances")
c.selectPosition(p)
if oldSel:
i, j = oldSel
c.frame.body.wrapper.setSelectionRange(i, j)
#@+node:ekr.20050412083244: *4* u.undoHoistNode & undoDehoistNode
def undoHoistNode(self):
u = self; c = u.c
c.selectPosition(u.p)
c.dehoist()
def undoDehoistNode(self):
u = self; c = u.c
c.selectPosition(u.p)
c.hoist()
#@+node:ekr.20050412085112: *4* u.undoInsertNode
def undoInsertNode(self):
u = self; c = u.c; cc = c.chapterController
if cc: cc.selectChapterByName('main')
c.selectPosition(u.newP)
c.deleteOutline()
# Bug fix: 2016/03/30.
# This always selects the proper new position.
# c.selectPosition(u.p)
if u.pasteAsClone:
for bunch in u.beforeTree:
v = bunch.v
if u.p.v == v:
c.setBodyString(u.p, bunch.body)
c.setHeadString(u.p, bunch.head)
else:
v.setBodyString(bunch.body)
v.setHeadString(bunch.head)
#@+node:ekr.20050526124906: *4* u.undoMark
def undoMark(self):
u = self; c = u.c
u.updateMarks('old')
if u.groupCount == 0:
for v in u.dirtyVnodeList:
v.setDirty() # Bug fix: Leo 4.4.6.
c.selectPosition(u.p)
#@+node:ekr.20050411112033: *4* u.undoMove
def undoMove(self):
u = self; c = u.c; cc = c.chapterController
if cc: cc.selectChapterByName('main')
v = u.p.v
assert(u.oldParent_v)
assert(u.newParent_v)
assert(v)
# Adjust the children arrays.
assert u.newParent_v.children[u.newN] == v
del u.newParent_v.children[u.newN]
u.oldParent_v.children.insert(u.oldN, v)
# Recompute the parent links.
v.parents.append(u.oldParent_v)
v.parents.remove(u.newParent_v)
u.updateMarks('old')
for v in u.dirtyVnodeList:
v.setDirty()
c.selectPosition(u.p)
#@+node:ekr.20050318085713.1: *4* u.undoNodeContents
def undoNodeContents(self):
'''Undo all changes to the contents of a node,
including headline and body text, and marked bits.
'''
u = self; c = u.c
w = c.frame.body.wrapper
u.p.b = u.oldBody
w.setAllText(u.oldBody)
c.frame.body.recolor(u.p)
u.p.h = u.oldHead
# This is required. Otherwise c.redraw will revert the change!
c.frame.tree.setHeadline(u.p, u.oldHead)
if u.groupCount == 0 and u.oldSel:
i, j = u.oldSel
w.setSelectionRange(i, j)
if u.groupCount == 0 and u.oldYScroll is not None:
w.setYScrollPosition(u.oldYScroll)
u.updateMarks('old')
for v in u.dirtyVnodeList:
v.setDirty() # Bug fix: Leo 4.4.6.
#@+node:ekr.20080425060424.14: *4* u.undoPromote
def undoPromote(self):
u = self; c = u.c
parent_v = u.p._parentVnode() # The parent of the all the *promoted* nodes.
# Remove the promoted nodes from parent_v's children.
n = u.p.childIndex() + 1
# Adjust the old parents children
old_children = parent_v.children
parent_v.children = old_children[: n]
# Add the nodes before the promoted nodes.
parent_v.children.extend(old_children[n + len(u.children):])
# Add the nodes after the promoted nodes.
# Add the demoted nodes to v's children.
u.p.v.children = u.children[:]
# Adjust the parent links.
# There is no need to adjust descendant links.
for child in u.children:
child.parents.remove(parent_v)
child.parents.append(u.p.v)
c.setCurrentPosition(u.p)
#@+node:ekr.20031218072017.1493: *4* u.undoRedoText
def undoRedoText(self, p,
leading, trailing, # Number of matching leading & trailing lines.
oldMidLines, newMidLines, # Lists of unmatched lines.
oldNewlines, newNewlines, # Number of trailing newlines.
tag="undo", # "undo" or "redo"
undoType=None
):
'''Handle text undo and redo: converts _new_ text into _old_ text.'''
# newNewlines is unused, but it has symmetry.
u = self; c = u.c; w = c.frame.body.wrapper
#@+<< Compute the result using p's body text >>
#@+node:ekr.20061106105812.1: *5* << Compute the result using p's body text >>
# Recreate the text using the present body text.
body = p.b
body = g.toUnicode(body)
body_lines = body.split('\n')
s = []
if leading > 0:
s.extend(body_lines[: leading])
if oldMidLines:
s.extend(oldMidLines)
if trailing > 0:
s.extend(body_lines[-trailing:])
s = '\n'.join(s)
# Remove trailing newlines in s.
while s and s[-1] == '\n':
s = s[: -1]
# Add oldNewlines newlines.
if oldNewlines > 0:
s = s + '\n' * oldNewlines
result = s
if u.debug_print:
g.pr("body: ", body)
g.pr("result:", result)
#@-<< Compute the result using p's body text >>
p.setBodyString(result)
w.setAllText(result)
sel = u.oldSel if tag == 'undo' else u.newSel
if sel:
i, j = sel
w.setSelectionRange(i, j, insert=j)
c.frame.body.recolor(p)
w.seeInsertPoint() # 2009/12/21
#@+node:ekr.20050408100042: *4* u.undoRedoTree
def undoRedoTree(self, p, new_data, old_data):
'''Replace p and its subtree using old_data during undo.'''
# Same as undoReplace except uses g.Bunch.
u = self; c = u.c
if new_data is None:
# This is the first time we have undone the operation.
# Put the new data in the bead.
bunch = u.beads[u.bead]
bunch.newTree = u.saveTree(p.copy())
u.beads[u.bead] = bunch
# Replace data in tree with old data.
u.restoreTree(old_data)
c.setBodyString(p, p.b)
return p # Nothing really changes.
#@+node:ekr.20080425060424.5: *4* u.undoSort
def undoSort(self):
u = self; c = u.c
parent_v = u.p._parentVnode()
parent_v.children = u.oldChildren
p = c.setPositionAfterSort(u.sortChildren)
c.setCurrentPosition(p)
#@+node:ekr.20050318085713.2: *4* u.undoTree
def undoTree(self):
'''Redo replacement of an entire tree.'''
u = self; c = u.c
u.p = self.undoRedoTree(u.p, u.newTree, u.oldTree)
c.selectPosition(u.p) # Does full recolor.
if u.oldSel:
i, j = u.oldSel
c.frame.body.wrapper.setSelectionRange(i, j)
#@+node:EKR.20040526090701.4: *4* u.undoTyping
def undoTyping(self):
u = self; c = u.c; current = c.p
w = c.frame.body.wrapper
# selectPosition causes recoloring, so don't do this unless needed.
if current != u.p:
c.selectPosition(u.p)
self.undoRedoText(
u.p, u.leading, u.trailing,
u.oldMiddleLines, u.newMiddleLines,
u.oldNewlines, u.newNewlines,
tag="undo", undoType=u.undoType)
u.updateMarks('old')
for v in u.dirtyVnodeList:
v.setDirty() # Bug fix: Leo 4.4.6.
if u.oldSel:
c.bodyWantsFocus()
i, j = u.oldSel
w.setSelectionRange(i, j, insert=j)
if u.yview:
c.bodyWantsFocus()
w.setYScrollPosition(u.yview)
#@-others
#@-others
#@@language python
#@@tabwidth -4
#@@pagewidth 70
#@-leo
|
py | 1a44b2dd82d2d17920b45c83995bdc23b67f8df0 | #! coding:utf-8
"""
The bottle module defines the Bottle class that is one element in
a water sort puzzle.
"""
# Import to do typing :Bottle inside class Bottle
from __future__ import annotations
from typing import Sequence, Optional, Set, Any
class BottleError(Exception):
"""Exception from the Bottle class."""
class Bottle:
"""
A bottle contains doses of colored water (up to Bottle.MAX_DOSES)
The content of a bottle is a list of objects where each objet identifies a color.
doses = [None, None, None, None] in case of empty bottle (nb_doses = 0)
doses = ['X', None, None, None] where the bottle contains only one dose of 'X' (nb_doses = 1)
doses = ['X', 'Y', 'Y', None] where the bottle contains one dose of 'X' at the bottom and
2 doses of 'Y' at the top (nb_doses = 3)
In this situation, the bottle contains 3 doses with 2 different colors
"""
# Speedup properties for this class
__slots__ = "doses", "nb_doses"
MAX_DOSES = 4
def __init__(self, doses: Sequence):
self.doses: list[Any] = [
None,
] * Bottle.MAX_DOSES
self.nb_doses = 0
for dose in doses:
if dose is not None:
self.doses[self.nb_doses] = dose
self.nb_doses += 1
@property
def is_empty(self) -> bool:
"""@return True if the bottle is empty."""
return self.nb_doses == 0
@property
def is_full(self) -> bool:
"""@return True if the bottle is full."""
return self.nb_doses == Bottle.MAX_DOSES
@property
def colors(self) -> Set[Any]:
"""@return Set of the different colors in the bottle."""
return set(self.doses[: self.nb_doses])
@property
def nb_different_colors(self) -> int:
"""Number of different colors in the bottle."""
return len(self.colors)
@property
def top_color(self) -> Optional[Any]:
"""Top color in the bottle."""
if self.nb_doses == 0:
return None
return self.doses[self.nb_doses - 1]
def iter_doses(self):
"""Iterator on every dose holding a color in the bottle."""
for i in range(self.nb_doses):
yield self.doses[i]
def is_same_as(self, other: Bottle) -> bool:
"""
@return True if bottles are the same.
(same as __eq__ but not checking isinstance of the other bottle to speedup computation)
"""
if self.nb_doses != other.nb_doses:
return False
for i in range(self.nb_doses):
if self.doses[i] != other.doses[i]:
return False
return True
def pop_dose(self) -> Any:
"""Pop the top dose in the bottle and return its color."""
if self.is_empty:
raise BottleError("Cannot pop dose from an empty bottle")
ret = self.doses[self.nb_doses - 1]
self.doses[self.nb_doses - 1] = None
self.nb_doses -= 1
return ret
def can_push_dose(self, color: Any) -> bool:
"""@return True if one dose of the color can be poured into the bottle."""
if self.nb_doses == 0:
return True
if self.nb_doses == Bottle.MAX_DOSES:
return False
return self.doses[self.nb_doses - 1] == color
def push_dose(self, color: Any) -> None:
"""Pour one dose of the color into the bottle."""
if not self.can_push_dose(color):
raise BottleError(f"Cannot pour {color} into {self}")
self.doses[self.nb_doses] = color
self.nb_doses += 1
def is_possible_to_pour_one_dose_into(self, destination: Bottle) -> bool:
"""
@return True if at least one dose of the top color can be poured into
the destination bottle.
"""
if self.nb_doses == 0:
return False
if destination.nb_doses == 0:
return True
if destination.nb_doses == Bottle.MAX_DOSES:
return False
# Same top colors ?
return (
self.doses[self.nb_doses - 1] == destination.doses[destination.nb_doses - 1]
)
def is_interesting_to_pour_into(self, destination: Bottle) -> bool:
"""
@return True if pouring into destination leads to an interesting situation.
(Quite the same as is_possible_to_pour_one_dose_into but also checking for
interesting resulting situation)
"""
if destination.nb_doses == Bottle.MAX_DOSES:
return False # destination is full
if self.nb_doses == 0:
return False # Source empty
if destination.nb_doses == 0:
if self.nb_different_colors == 1:
return False # Because de resulting situation would be the same
return True
# Same top colors ?
return (
self.doses[self.nb_doses - 1] == destination.doses[destination.nb_doses - 1]
)
def pour_into(self, destination: Bottle) -> int:
"""Pour all possible doses of top color into the destination bottle.
@return number of poured doses
"""
nb_doses = 0
while self.is_possible_to_pour_one_dose_into(destination):
color = self.pop_dose()
destination.push_dose(color)
nb_doses += 1
return nb_doses
def clone(self) -> Bottle:
"""@return Create a copy clone of the bottle."""
copy_list_doses = self.doses.copy()
return Bottle(copy_list_doses)
def __repr__(self):
return f"<{self.doses[:self.nb_doses]}>"
|
py | 1a44b2e78531b8c414a3705c515daf52e9b59e15 | import base64
import json
import logging
from html.parser import HTMLParser
from http.client import HTTPConnection
from markupsafe import escape
from sqlalchemy import (
and_,
desc,
)
from sqlalchemy.orm import (
joinedload,
lazyload,
undefer,
)
from sqlalchemy.sql import expression
from galaxy import (
model,
util,
web,
)
from galaxy.managers.sharable import SlugBuilder
from galaxy.managers.workflows import (
MissingToolsException,
WorkflowUpdateOptions,
)
from galaxy.model.item_attrs import UsesItemRatings
from galaxy.tools.parameters.basic import workflow_building_modes
from galaxy.util import (
FILENAME_VALID_CHARS,
unicodify,
)
from galaxy.util.sanitize_html import sanitize_html
from galaxy.web import (
error,
url_for,
)
from galaxy.web.framework.helpers import (
grids,
time_ago,
)
from galaxy.webapps.base.controller import (
BaseUIController,
SharableMixin,
UsesStoredWorkflowMixin,
)
from galaxy.workflow.extract import (
extract_workflow,
summarize,
)
from galaxy.workflow.modules import (
load_module_sections,
module_factory,
)
from galaxy.workflow.render import (
STANDALONE_SVG_TEMPLATE,
WorkflowCanvas,
)
log = logging.getLogger(__name__)
class StoredWorkflowListGrid(grids.Grid):
class StepsColumn(grids.GridColumn):
def get_value(self, trans, grid, workflow):
return len(workflow.latest_workflow.steps)
# Grid definition
use_panels = True
title = "Saved Workflows"
model_class = model.StoredWorkflow
default_filter = {"name": "All", "tags": "All"}
default_sort_key = "-update_time"
columns = [
grids.TextColumn("Name", key="name", attach_popup=True, filterable="advanced"),
grids.IndividualTagsColumn(
"Tags",
"tags",
model_tag_association_class=model.StoredWorkflowTagAssociation,
filterable="advanced",
grid_name="StoredWorkflowListGrid",
),
StepsColumn("Steps"),
grids.GridColumn("Created", key="create_time", format=time_ago),
grids.GridColumn("Last Updated", key="update_time", format=time_ago),
]
columns.append(
grids.MulticolFilterColumn(
"Search",
cols_to_filter=[columns[0], columns[1]],
key="free-text-search",
visible=False,
filterable="standard",
)
)
operations = [
grids.GridOperation(
"Edit", allow_multiple=False, condition=(lambda item: not item.deleted), async_compatible=False
),
grids.GridOperation("Run", condition=(lambda item: not item.deleted), async_compatible=False),
grids.GridOperation("Copy", condition=(lambda item: not item.deleted), async_compatible=False),
grids.GridOperation("Rename", condition=(lambda item: not item.deleted), async_compatible=False),
grids.GridOperation("Sharing", condition=(lambda item: not item.deleted), async_compatible=False),
grids.GridOperation("Delete", condition=(lambda item: item.deleted), async_compatible=True),
]
def apply_query_filter(self, trans, query, **kwargs):
return query.filter_by(user=trans.user, deleted=False)
class StoredWorkflowAllPublishedGrid(grids.Grid):
title = "Published Workflows"
model_class = model.StoredWorkflow
default_sort_key = "update_time"
default_filter = dict(public_url="All", username="All", tags="All")
columns = [
grids.PublicURLColumn("Name", key="name", filterable="advanced", attach_popup=True),
grids.OwnerAnnotationColumn(
"Annotation",
key="annotation",
model_annotation_association_class=model.StoredWorkflowAnnotationAssociation,
filterable="advanced",
),
grids.OwnerColumn("Owner", key="username", model_class=model.User, filterable="advanced"),
grids.CommunityRatingColumn("Community Rating", key="rating"),
grids.CommunityTagsColumn(
"Community Tags",
key="tags",
model_tag_association_class=model.StoredWorkflowTagAssociation,
filterable="advanced",
grid_name="PublicWorkflowListGrid",
),
grids.ReverseSortColumn("Last Updated", key="update_time", format=time_ago),
]
columns.append(
grids.MulticolFilterColumn(
"Search name, annotation, owner, and tags",
cols_to_filter=[columns[0], columns[1], columns[2], columns[4]],
key="free-text-search",
visible=False,
filterable="standard",
)
)
operations = [
grids.GridOperation(
"Run",
condition=(lambda item: not item.deleted),
allow_multiple=False,
url_args=dict(controller="workflows", action="run"),
),
grids.GridOperation(
"Import", condition=(lambda item: not item.deleted), allow_multiple=False, url_args=dict(action="imp")
),
grids.GridOperation(
"Save as File",
condition=(lambda item: not item.deleted),
allow_multiple=False,
url_args=dict(action="export_to_file"),
),
]
num_rows_per_page = 50
use_paging = True
def build_initial_query(self, trans, **kwargs):
# See optimization description comments and TODO for tags in matching public histories query.
# In addition to that - be sure to lazyload the latest_workflow - it isn't needed and it causes all
# of its steps to be eagerly loaded.
return (
trans.sa_session.query(self.model_class)
.join("user")
.options(
lazyload("latest_workflow"),
joinedload("user").load_only("username"),
joinedload("annotations"),
undefer("average_rating"),
)
)
def apply_query_filter(self, trans, query, **kwargs):
# A public workflow is published, has a slug, and is not deleted.
return (
query.filter(self.model_class.published == expression.true())
.filter(self.model_class.slug.isnot(None))
.filter(self.model_class.deleted == expression.false())
)
# Simple HTML parser to get all content in a single tag.
class SingleTagContentsParser(HTMLParser):
def __init__(self, target_tag):
# Cannot use super() because HTMLParser is an old-style class in Python2
HTMLParser.__init__(self)
self.target_tag = target_tag
self.cur_tag = None
self.tag_content = ""
def handle_starttag(self, tag, attrs):
"""Called for each start tag."""
self.cur_tag = tag
def handle_data(self, text):
"""Called for each block of plain text."""
if self.cur_tag == self.target_tag:
self.tag_content += text
class WorkflowController(BaseUIController, SharableMixin, UsesStoredWorkflowMixin, UsesItemRatings):
stored_list_grid = StoredWorkflowListGrid()
published_list_grid = StoredWorkflowAllPublishedGrid()
slug_builder = SlugBuilder()
@web.expose
@web.require_login("use Galaxy workflows")
def list_grid(self, trans, **kwargs):
"""List user's stored workflows."""
# status = message = None
if "operation" in kwargs:
operation = kwargs["operation"].lower()
if operation == "rename":
return self.rename(trans, **kwargs)
history_ids = util.listify(kwargs.get("id", []))
if operation == "sharing":
return self.sharing(trans, id=history_ids)
return self.stored_list_grid(trans, **kwargs)
@web.expose
@web.require_login("use Galaxy workflows", use_panels=True)
def list(self, trans):
"""
Render workflow main page (management of existing workflows)
"""
# Take care of proxy prefix in url as well
redirect_url = f"{url_for('/')}workflow"
return trans.response.send_redirect(redirect_url)
@web.expose
@web.json
def list_published(self, trans, **kwargs):
return self.published_list_grid(trans, **kwargs)
@web.expose
def display_by_username_and_slug(self, trans, username, slug, format="html"):
"""
Display workflow based on a username and slug. Format can be html, json, or json-download.
"""
# Get workflow by username and slug. Security is handled by the display methods below.
session = trans.sa_session
user = session.query(model.User).filter_by(username=username).first()
if not user:
raise web.httpexceptions.HTTPNotFound()
stored_workflow = (
trans.sa_session.query(model.StoredWorkflow).filter_by(user=user, slug=slug, deleted=False).first()
)
if not stored_workflow:
raise web.httpexceptions.HTTPNotFound()
encoded_id = trans.security.encode_id(stored_workflow.id)
# Display workflow in requested format.
if format == "html":
return self._display(trans, stored_workflow)
elif format == "json":
return self.for_direct_import(trans, encoded_id)
elif format == "json-download":
return self.export_to_file(trans, encoded_id)
@web.expose
def display_by_id(self, trans, id):
"""Display workflow based on id."""
# Get workflow.
stored_workflow = self.get_stored_workflow(trans, id)
return self._display(trans, stored_workflow)
def _display(self, trans, stored_workflow):
"""Diplay workflow as HTML page."""
if stored_workflow is None:
raise web.httpexceptions.HTTPNotFound()
# Security check raises error if user cannot access workflow.
self.security_check(trans, stored_workflow, False, True)
# Get data for workflow's steps.
self.get_stored_workflow_steps(trans, stored_workflow)
# Get annotations.
stored_workflow.annotation = self.get_item_annotation_str(
trans.sa_session, stored_workflow.user, stored_workflow
)
for step in stored_workflow.latest_workflow.steps:
step.annotation = self.get_item_annotation_str(trans.sa_session, stored_workflow.user, step)
user_is_owner = True if trans.user == stored_workflow.user else False
# Get rating data.
user_item_rating = 0
if trans.get_user():
user_item_rating = self.get_user_item_rating(trans.sa_session, trans.get_user(), stored_workflow)
if user_item_rating:
user_item_rating = user_item_rating.rating
else:
user_item_rating = 0
ave_item_rating, num_ratings = self.get_ave_item_rating_data(trans.sa_session, stored_workflow)
return trans.fill_template_mako(
"workflow/display.mako",
item=stored_workflow,
item_data=stored_workflow.latest_workflow.steps,
user_item_rating=user_item_rating,
ave_item_rating=ave_item_rating,
num_ratings=num_ratings,
user_is_owner=user_is_owner,
)
@web.expose
def get_item_content_async(self, trans, id):
"""Returns item content in HTML format."""
stored = self.get_stored_workflow(trans, id, False, True)
if stored is None:
raise web.httpexceptions.HTTPNotFound()
# Get data for workflow's steps.
self.get_stored_workflow_steps(trans, stored)
# Get annotations.
stored.annotation = self.get_item_annotation_str(trans.sa_session, stored.user, stored)
for step in stored.latest_workflow.steps:
step.annotation = self.get_item_annotation_str(trans.sa_session, stored.user, step)
return trans.fill_template_mako(
"/workflow/item_content.mako", item=stored, item_data=stored.latest_workflow.steps
)
@web.expose
@web.require_login("use Galaxy workflows")
def share(self, trans, id, email="", use_panels=False):
msg = mtype = None
# Load workflow from database
stored = self.get_stored_workflow(trans, id)
if email:
other = (
trans.sa_session.query(model.User)
.filter(and_(model.User.table.c.email == email, model.User.table.c.deleted == expression.false()))
.first()
)
if not other:
mtype = "error"
msg = f"User '{escape(email)}' does not exist"
elif other == trans.get_user():
mtype = "error"
msg = "You cannot share a workflow with yourself"
elif (
trans.sa_session.query(model.StoredWorkflowUserShareAssociation)
.filter_by(user=other, stored_workflow=stored)
.count()
> 0
):
mtype = "error"
msg = f"Workflow already shared with '{escape(email)}'"
else:
share = model.StoredWorkflowUserShareAssociation()
share.stored_workflow = stored
share.user = other
session = trans.sa_session
session.add(share)
session.flush()
trans.set_message(f"Workflow '{escape(stored.name)}' shared with user '{escape(other.email)}'")
return trans.response.send_redirect(url_for(controller="workflow", action="sharing", id=id))
return trans.fill_template(
"/ind_share_base.mako", message=msg, messagetype=mtype, item=stored, email=email, use_panels=use_panels
)
@web.expose
@web.require_login("export Galaxy workflows")
def export(self, trans, id, **kwargs):
"""Handle workflow export."""
session = trans.sa_session
# Get session and workflow.
stored = self.get_stored_workflow(trans, id)
session.add(stored)
# Legacy issue: workflows made accessible before recent updates may not have a slug. Create slug for any workflows that need them.
if stored.importable and not stored.slug:
self._make_item_accessible(trans.sa_session, stored)
session.flush()
return trans.fill_template("/workflow/sharing.mako", use_panels=True, item=stored)
@web.expose
@web.require_login("to import a workflow", use_panels=True)
def imp(self, trans, id, **kwargs):
"""Imports a workflow shared by other users."""
# Set referer message.
referer = trans.request.referer
if referer and not referer.startswith(f"{trans.request.application_url}{url_for('/login')}"):
referer_message = f"<a href='{escape(referer)}'>return to the previous page</a>"
else:
referer_message = f"<a href='{url_for('/')}'>go to Galaxy's start page</a>"
# Do import.
stored = self.get_stored_workflow(trans, id, check_ownership=False)
if stored.importable is False:
return trans.show_error_message(
f"The owner of this workflow has disabled imports via this link.<br>You can {referer_message}",
use_panels=True,
)
elif stored.deleted:
return trans.show_error_message(
f"You can't import this workflow because it has been deleted.<br>You can {referer_message}",
use_panels=True,
)
self._import_shared_workflow(trans, stored)
# Redirect to load galaxy frames.
return trans.show_ok_message(
message="""Workflow "%s" has been imported. <br>You can <a href="%s">start using this workflow</a> or %s."""
% (stored.name, web.url_for("/workflows/list"), referer_message)
)
@web.expose
@web.require_login("use Galaxy workflows")
def rename_async(self, trans, id, new_name=None, **kwargs):
stored = self.get_stored_workflow(trans, id)
if new_name:
san_new_name = sanitize_html(new_name)
stored.name = san_new_name
stored.latest_workflow.name = san_new_name
trans.sa_session.flush()
return stored.name
@web.expose
@web.require_login("use Galaxy workflows")
def annotate_async(self, trans, id, new_annotation=None, **kwargs):
stored = self.get_stored_workflow(trans, id)
if new_annotation:
# Sanitize annotation before adding it.
new_annotation = sanitize_html(new_annotation)
self.add_item_annotation(trans.sa_session, trans.get_user(), stored, new_annotation)
trans.sa_session.flush()
return new_annotation
@web.expose
@web.require_login("rate items")
@web.json
def rate_async(self, trans, id, rating):
"""Rate a workflow asynchronously and return updated community data."""
stored = self.get_stored_workflow(trans, id, check_ownership=False, check_accessible=True)
if not stored:
return trans.show_error_message("The specified workflow does not exist.")
# Rate workflow.
self.rate_item(trans.sa_session, trans.get_user(), stored, rating)
return self.get_ave_item_rating_data(trans.sa_session, stored)
@web.expose
def get_embed_html_async(self, trans, id):
"""Returns HTML for embedding a workflow in a page."""
# TODO: user should be able to embed any item he has access to. see display_by_username_and_slug for security code.
stored = self.get_stored_workflow(trans, id)
if stored:
return f"Embedded Workflow '{stored.name}'"
@web.expose
@web.json
@web.require_login("use Galaxy workflows")
def get_name_and_link_async(self, trans, id=None):
"""Returns workflow's name and link."""
stored = self.get_stored_workflow(trans, id)
return_dict = {
"name": stored.name,
"link": url_for(
controller="workflow",
action="display_by_username_and_slug",
username=stored.user.username,
slug=stored.slug,
),
}
return return_dict
@web.expose
@web.require_login("use Galaxy workflows")
def gen_image(self, trans, id):
stored = self.get_stored_workflow(trans, id, check_ownership=True)
try:
svg = self._workflow_to_svg_canvas(trans, stored)
except Exception:
status = "error"
message = (
"Galaxy is unable to create the SVG image. Please check your workflow, there might be missing tools."
)
return trans.fill_template(
"/workflow/sharing.mako", use_panels=True, item=stored, status=status, message=message
)
trans.response.set_content_type("image/svg+xml")
s = STANDALONE_SVG_TEMPLATE % svg.tostring()
return s.encode("utf-8")
@web.expose
@web.require_login("use Galaxy workflows")
def copy(self, trans, id, save_as_name=None):
# Get workflow to copy.
stored = self.get_stored_workflow(trans, id, check_ownership=False)
user = trans.get_user()
if stored.user == user:
owner = True
else:
if (
trans.sa_session.query(model.StoredWorkflowUserShareAssociation)
.filter_by(user=user, stored_workflow=stored)
.count()
== 0
):
error("Workflow is not owned by or shared with current user")
owner = False
# Copy.
new_stored = model.StoredWorkflow()
if save_as_name:
new_stored.name = f"{save_as_name}"
else:
new_stored.name = f"Copy of {stored.name}"
new_stored.latest_workflow = stored.latest_workflow
# Copy annotation.
annotation_obj = self.get_item_annotation_obj(trans.sa_session, stored.user, stored)
if annotation_obj:
self.add_item_annotation(trans.sa_session, trans.get_user(), new_stored, annotation_obj.annotation)
new_stored.copy_tags_from(trans.user, stored)
if not owner:
new_stored.name += f" shared by {stored.user.email}"
new_stored.user = user
# Persist
session = trans.sa_session
session.add(new_stored)
session.flush()
# Display the management page
message = f"Created new workflow with name: {escape(new_stored.name)}"
trans.set_message(message)
return_url = f"{url_for('/')}workflow?status=done&message={escape(message)}"
trans.response.send_redirect(return_url)
@web.legacy_expose_api
def create(self, trans, payload=None, **kwd):
if trans.request.method == "GET":
return {
"title": "Create Workflow",
"inputs": [
{"name": "workflow_name", "label": "Name", "value": "Unnamed workflow"},
{
"name": "workflow_annotation",
"label": "Annotation",
"help": "A description of the workflow; annotation is shown alongside shared or published workflows.",
},
],
}
else:
user = trans.get_user()
workflow_name = payload.get("workflow_name")
workflow_annotation = payload.get("workflow_annotation")
if not workflow_name:
return self.message_exception(trans, "Please provide a workflow name.")
# Create the new stored workflow
stored_workflow = model.StoredWorkflow()
stored_workflow.name = workflow_name
stored_workflow.user = user
self.slug_builder.create_item_slug(trans.sa_session, stored_workflow)
# And the first (empty) workflow revision
workflow = model.Workflow()
workflow.name = workflow_name
workflow.stored_workflow = stored_workflow
stored_workflow.latest_workflow = workflow
# Add annotation.
workflow_annotation = sanitize_html(workflow_annotation)
self.add_item_annotation(trans.sa_session, trans.get_user(), stored_workflow, workflow_annotation)
# Persist
session = trans.sa_session
session.add(stored_workflow)
session.flush()
return {
"id": trans.security.encode_id(stored_workflow.id),
"message": f"Workflow {workflow_name} has been created.",
}
@web.json
def save_workflow_as(self, trans, workflow_name, workflow_data, workflow_annotation="", from_tool_form=False):
"""
Creates a new workflow based on Save As command. It is a new workflow, but
is created with workflow_data already present.
"""
user = trans.get_user()
if workflow_name is not None:
workflow_contents_manager = self.app.workflow_contents_manager
stored_workflow = model.StoredWorkflow()
stored_workflow.name = workflow_name
stored_workflow.user = user
self.slug_builder.create_item_slug(trans.sa_session, stored_workflow)
workflow = model.Workflow()
workflow.name = workflow_name
workflow.stored_workflow = stored_workflow
stored_workflow.latest_workflow = workflow
# Add annotation.
workflow_annotation = sanitize_html(workflow_annotation)
self.add_item_annotation(trans.sa_session, trans.get_user(), stored_workflow, workflow_annotation)
# Persist
session = trans.sa_session
session.add(stored_workflow)
session.flush()
workflow_update_options = WorkflowUpdateOptions(
update_stored_workflow_attributes=False, # taken care of above
from_tool_form=from_tool_form,
)
try:
workflow, errors = workflow_contents_manager.update_workflow_from_raw_description(
trans,
stored_workflow,
workflow_data,
workflow_update_options,
)
except MissingToolsException as e:
return dict(
name=e.workflow.name,
message=(
"This workflow includes missing or invalid tools. "
"It cannot be saved until the following steps are removed or the missing tools are enabled."
),
errors=e.errors,
)
return trans.security.encode_id(stored_workflow.id)
else:
# This is an error state, 'save as' must have a workflow_name
log.exception("Error in Save As workflow: no name.")
@web.expose
def delete(self, trans, id=None):
"""
Mark a workflow as deleted
"""
# Load workflow from database
stored = self.get_stored_workflow(trans, id)
# Mark as deleted and save
stored.deleted = True
trans.user.stored_workflow_menu_entries = [
entry for entry in trans.user.stored_workflow_menu_entries if entry.stored_workflow != stored
]
trans.sa_session.add(stored)
trans.sa_session.flush()
# Display the management page
message = f"Workflow deleted: {escape(stored.name)}"
trans.set_message(message)
return trans.response.send_redirect(f"{url_for('/')}workflow?status=done&message={escape(message)}")
@web.expose
@web.require_login("edit workflows")
def editor(self, trans, id=None, workflow_id=None, version=None):
"""
Render the main workflow editor interface. The canvas is embedded as
an iframe (necessary for scrolling to work properly), which is
rendered by `editor_canvas`.
"""
if not id:
if workflow_id:
stored_workflow = self.app.workflow_manager.get_stored_workflow(trans, workflow_id, by_stored_id=False)
self.security_check(trans, stored_workflow, True, False)
stored_workflow_id = trans.security.encode_id(stored_workflow.id)
return trans.response.send_redirect(f'{url_for("/")}workflow/editor?id={stored_workflow_id}')
error("Invalid workflow id")
stored = self.get_stored_workflow(trans, id)
# The following query loads all user-owned workflows,
# So that they can be copied or inserted in the workflow editor.
workflows = (
trans.sa_session.query(model.StoredWorkflow)
.filter_by(user=trans.user, deleted=False, hidden=False)
.order_by(desc(model.StoredWorkflow.table.c.update_time))
.options(joinedload("latest_workflow").joinedload("steps"))
.all()
)
if version is None:
version = len(stored.workflows) - 1
else:
version = int(version)
# create workflow module models
module_sections = []
for module_section in load_module_sections(trans).values():
module_sections.append(
{
"title": module_section.get("title"),
"name": module_section.get("name"),
"elems": [
{"name": elem.get("name"), "title": elem.get("title"), "description": elem.get("description")}
for elem in module_section.get("modules")
],
}
)
# create data manager tool models
data_managers = []
if trans.user_is_admin and trans.app.data_managers.data_managers:
for data_manager_val in trans.app.data_managers.data_managers.values():
tool = data_manager_val.tool
if not tool.hidden:
data_managers.append(
{
"id": tool.id,
"name": tool.name,
"hidden": tool.hidden,
"description": tool.description,
"is_workflow_compatible": tool.is_workflow_compatible,
}
)
# create workflow models
workflows = [
{
"id": trans.security.encode_id(workflow.id),
"latest_id": trans.security.encode_id(workflow.latest_workflow.id),
"step_count": len(workflow.latest_workflow.steps),
"name": workflow.name,
}
for workflow in workflows
if workflow.id != stored.id
]
# identify item tags
item_tags = [tag for tag in stored.tags if tag.user == trans.user]
item_tag_names = []
for ta in item_tags:
item_tag_names.append(escape(ta.tag.name))
# build workflow editor model
editor_config = {
"id": trans.security.encode_id(stored.id),
"name": stored.name,
"tags": item_tag_names,
"initialVersion": version,
"annotation": self.get_item_annotation_str(trans.sa_session, trans.user, stored),
"moduleSections": module_sections,
"dataManagers": data_managers,
"workflows": workflows,
}
# parse to mako
return trans.fill_template("workflow/editor.mako", editor_config=editor_config)
@web.json
def load_workflow(self, trans, id, version=None):
"""
Get the latest Workflow for the StoredWorkflow identified by `id` and
encode it as a json string that can be read by the workflow editor
web interface.
"""
trans.workflow_building_mode = workflow_building_modes.ENABLED
stored = self.get_stored_workflow(trans, id, check_ownership=True, check_accessible=False)
workflow_contents_manager = self.app.workflow_contents_manager
return workflow_contents_manager.workflow_to_dict(trans, stored, style="editor", version=version)
@web.expose
@web.require_login("use workflows")
def export_to_myexp(self, trans, id, myexp_username, myexp_password):
"""
Exports a workflow to myExperiment website.
"""
trans.workflow_building_mode = workflow_building_modes.ENABLED
stored = self.get_stored_workflow(trans, id, check_ownership=False, check_accessible=True)
# Convert workflow to dict.
workflow_dict = self._workflow_to_dict(trans, stored)
#
# Create and submit workflow myExperiment request.
#
# Create workflow content JSON.
workflow_content = json.dumps(workflow_dict, indent=4, sort_keys=True)
# Create myExperiment request.
request_raw = trans.fill_template(
"workflow/myexp_export.mako",
workflow_name=workflow_dict["name"],
workflow_description=workflow_dict["annotation"],
workflow_content=workflow_content,
workflow_svg=self._workflow_to_svg_canvas(trans, stored).tostring(),
)
# strip() b/c myExperiment XML parser doesn't allow white space before XML; utf-8 handles unicode characters.
request = unicodify(request_raw.strip(), "utf-8")
# Do request and get result.
auth_header = base64.b64encode(f"{myexp_username}:{myexp_password}")
headers = {"Content-type": "text/xml", "Accept": "text/xml", "Authorization": f"Basic {auth_header}"}
myexp_url = trans.app.config.myexperiment_target_url
conn = HTTPConnection(myexp_url)
# NOTE: blocks web thread.
conn.request("POST", "/workflow.xml", request, headers)
response = conn.getresponse()
response_data = response.read()
conn.close()
# Do simple parse of response to see if export successful and provide user feedback.
parser = SingleTagContentsParser("id")
parser.feed(response_data)
myexp_workflow_id = parser.tag_content
workflow_list_str = f" <br>Return to <a href='{url_for(controller='workflows', action='list')}'>workflow list."
if myexp_workflow_id:
return trans.show_message(
"""Workflow '{}' successfully exported to myExperiment. <br/>
<a href="http://{}/workflows/{}">Click here to view the workflow on myExperiment</a> {}
""".format(
stored.name, myexp_url, myexp_workflow_id, workflow_list_str
),
use_panels=True,
)
else:
return trans.show_error_message(
"Workflow '%s' could not be exported to myExperiment. Error: %s %s"
% (stored.name, response_data, workflow_list_str),
use_panels=True,
)
@web.json_pretty
def for_direct_import(self, trans, id):
"""
Get the latest Workflow for the StoredWorkflow identified by `id` and
encode it as a json string that can be imported back into Galaxy
This has slightly different information than the above. In particular,
it does not attempt to decode forms and build UIs, it just stores
the raw state.
"""
stored = self.get_stored_workflow(trans, id, check_ownership=False, check_accessible=True)
return self._workflow_to_dict(trans, stored)
@web.json_pretty
def export_to_file(self, trans, id):
"""
Get the latest Workflow for the StoredWorkflow identified by `id` and
encode it as a json string that can be imported back into Galaxy
This has slightly different information than the above. In particular,
it does not attempt to decode forms and build UIs, it just stores
the raw state.
"""
# Get workflow.
stored = self.get_stored_workflow(trans, id, check_ownership=False, check_accessible=True)
# Stream workflow to file.
stored_dict = self._workflow_to_dict(trans, stored)
if not stored_dict:
# This workflow has a tool that's missing from the distribution
trans.response.status = 400
return "Workflow cannot be exported due to missing tools."
sname = stored.name
sname = "".join(c in FILENAME_VALID_CHARS and c or "_" for c in sname)[0:150]
trans.response.headers["Content-Disposition"] = f'attachment; filename="Galaxy-Workflow-{sname}.ga"'
trans.response.set_content_type("application/galaxy-archive")
return stored_dict
@web.expose
def build_from_current_history(
self,
trans,
job_ids=None,
dataset_ids=None,
dataset_collection_ids=None,
workflow_name=None,
dataset_names=None,
dataset_collection_names=None,
):
user = trans.get_user()
history = trans.get_history()
if not user:
return trans.show_error_message("Must be logged in to create workflows")
if (job_ids is None and dataset_ids is None) or workflow_name is None:
jobs, warnings = summarize(trans)
# Render
return trans.fill_template(
"workflow/build_from_current_history.mako", jobs=jobs, warnings=warnings, history=history
)
else:
# If there is just one dataset name selected or one dataset collection, these
# come through as string types instead of lists. xref #3247.
dataset_names = util.listify(dataset_names)
dataset_collection_names = util.listify(dataset_collection_names)
stored_workflow = extract_workflow(
trans,
user=user,
job_ids=job_ids,
dataset_ids=dataset_ids,
dataset_collection_ids=dataset_collection_ids,
workflow_name=workflow_name,
dataset_names=dataset_names,
dataset_collection_names=dataset_collection_names,
)
# Index page with message
workflow_id = trans.security.encode_id(stored_workflow.id)
return trans.show_message(
'Workflow "%s" created from current history. '
'You can <a href="%s" target="_parent">edit</a> or <a href="%s" target="_parent">run</a> the workflow.'
% (
escape(workflow_name),
url_for(controller="workflow", action="editor", id=workflow_id),
url_for(controller="workflows", action="run", id=workflow_id),
)
)
def get_item(self, trans, id):
return self.get_stored_workflow(trans, id)
def _workflow_to_svg_canvas(self, trans, stored):
workflow = stored.latest_workflow
workflow_canvas = WorkflowCanvas()
for step in workflow.steps:
# Load from database representation
module = module_factory.from_workflow_step(trans, step)
module_name = module.get_name()
module_data_inputs = module.get_data_inputs()
module_data_outputs = module.get_data_outputs()
workflow_canvas.populate_data_for_step(
step,
module_name,
module_data_inputs,
module_data_outputs,
)
workflow_canvas.add_steps()
return workflow_canvas.finish()
|
py | 1a44b4945767b27f44c0b9ed846e113c043f2a56 | from batou.component import Component
from batou.lib.appenv import AppEnv
from batou.lib.file import SyncDirectory, File
from batou.lib.supervisor import Program
from batou.utils import Address
class Django(Component):
def configure(self):
self.address = Address(self.host.fqdn, "8081")
self += AppEnv("3.8")
self += SyncDirectory("mysite", source="mysite")
self += File("foo", content="asdf\nbsdf\ncsdf")
self += Program(
"django",
command="bin/python",
deployment="cold",
options={"stopasgroup": "true"},
args=self.expand("mysite/manage.py runserver "
" {{component.address.listen}}"),
)
|
py | 1a44b4e2913b70e40d82ae350a650f8b18019949 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-12-03 20:38
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0061_auto_20201203_2338'),
]
operations = [
migrations.AlterField(
model_name='blog',
name='posted',
field=models.DateTimeField(db_index=True, default=datetime.datetime(2020, 12, 3, 23, 38, 49, 114243), verbose_name='Опубликована'),
),
migrations.AlterField(
model_name='catalog',
name='date',
field=models.DateTimeField(db_index=True, default=datetime.datetime(2020, 12, 3, 23, 38, 49, 115243), verbose_name='Дата'),
),
migrations.AlterField(
model_name='comment',
name='date',
field=models.DateTimeField(db_index=True, default=datetime.datetime(2020, 12, 3, 23, 38, 49, 115243), verbose_name='Дата'),
),
]
|
py | 1a44b7adef9d84728b8eb01201767c597d8f732e | from setuptools import setup
import os.path
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md')) as rdr:
long_description = rdr.read()
setup(name='pymonkey',
version='0.1.0',
description='Monkey interpreter',
long_description=long_description,
url='http://github.com/adamvinueza/pymonkey',
author='Adam Vinueza',
author_email='[email protected]',
license='Apache 2.0',
packages=['pymonkey'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'
],
zip_safe=False)
|
py | 1a44b80bf8c87d17613b239fc4d7b36acb660985 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/9/21 14:06
# @Author : ganliang
# @File : ndcopy.py
# @Desc : 数据复制
import numpy as np
a = np.arange(10)
b = np.copy(a)
print ("修改之前:")
print (a)
print (id(a))
print (b)
print (id(b))
a[0] = 100
print ("修改之后:")
print (a)
print (id(a))
print (b)
print (id(b))
c=a.reshape(5, 2)
print (c) |
py | 1a44b82c8c71fcfa938840c8efd77c045fe3a0cc | import json
import jsonpickle
from decimal import Decimal
from flask import Blueprint
from farmsList.public.models import Parcel, Farmland, AdditionalLayer
from farmsList.database import db
from sqlalchemy import func
blueprint = Blueprint('api', __name__, url_prefix='/api',
static_folder="../static")
def pre_json_encode(obj):
for key in obj.__dict__.keys():
if isinstance(obj.__dict__[key], Decimal):
obj.__dict__[key] = float(obj.__dict__[key])
obj.__dict__['_sa_instance_state'] = None
return obj
@blueprint.route("/parcel/", methods=["GET", "POST"])
def api_parcel():
farmlandData = Farmland.query.filter(Farmland.public == True).all()
for farmland in farmlandData:
farmland.geometry = db.session.query(func.ST_AsGeoJson(farmland.geometry)).all()[0][0]
db.session.close()
farmland.center = db.session.query(func.ST_AsGeoJson(farmland.center)).all()[0][0]
db.session.close()
farmland.center = json.loads(str(farmland.center))
farmland = pre_json_encode(farmland)
return jsonpickle.encode(farmlandData, unpicklable=False, make_refs=False)
@blueprint.route("/farmland/<int:farmlandId>", methods=["GET", "POST"])
def api_farmland_by_id(farmlandId):
farmlandData = Farmland.query.filter_by(id=farmlandId).all()[0]
farmlandData.center = db.session.query(func.ST_AsGeoJson(farmlandData.center)).all()[0][0]
db.session.close()
farmlandData.geometry = db.session.query(func.ST_AsGeoJson(farmlandData.geometry)).all()[0][0]
db.session.close()
farmlandData.center = json.loads(str(farmlandData.center))
farmlandData = pre_json_encode(farmlandData)
return jsonpickle.encode(farmlandData, unpicklable=False, make_refs=False)
@blueprint.route("/tax-incentive-zones", methods=["GET"])
def tax_incentive_zones():
taxIncentiveZones = AdditionalLayer.query.filter_by(name="taxIncentive").all()
for taxIncentiveZone in taxIncentiveZones:
taxIncentiveZone.geometry = db.session.query(func.ST_AsGeoJson(taxIncentiveZone.geom)).all()[0][0]
db.session.close()
return jsonpickle.encode(taxIncentiveZones, unpicklable=False, make_refs=False)
@blueprint.route("/food-deserts", methods=["GET"])
def food_desert_zones():
foodDeserts = AdditionalLayer.query.filter_by(name="foodDesert").all()
for taxIncentiveZone in foodDeserts:
taxIncentiveZone.geometry = db.session.query(func.ST_AsGeoJson(taxIncentiveZone.geom)).all()[0][0]
db.session.close()
return jsonpickle.encode(foodDeserts, unpicklable=False, make_refs=False)
|
py | 1a44b83a53d937116cf8c4bae65f89a44a4e8acd | import requests
import json
import time
import logging
log = logging.getLogger(__name__)
sh = logging.StreamHandler()
log.addHandler(sh)
log.setLevel(logging.INFO)
from nose.tools import with_setup
import pymongo
from bson.objectid import ObjectId
db = pymongo.MongoClient('mongodb://localhost:9001/scitran').get_default_database()
adm_user = '[email protected]'
base_url = 'http://localhost:8080/api'
test_data = type('',(object,),{})()
def setup_db():
global session
session = requests.Session()
session.params = {
'user': adm_user,
'root': True
}
test_data.group_id = 'test_group_' + str(int(time.time()*1000))
payload = {
'_id': test_data.group_id
}
payload = json.dumps(payload)
r = session.post(base_url + '/groups', data=payload)
assert r.ok
payload = {
'group': test_data.group_id,
'label': 'test_project',
'public': False
}
payload = json.dumps(payload)
r = session.post(base_url + '/projects', data=payload)
test_data.pid = json.loads(r.content)['_id']
assert r.ok
log.debug('pid = \'{}\''.format(test_data.pid))
payload = {
'project': test_data.pid,
'label': 'session_testing',
'public': False
}
payload = json.dumps(payload)
r = session.post(base_url + '/sessions', data=payload)
assert r.ok
test_data.sid = json.loads(r.content)['_id']
log.debug('sid = \'{}\''.format(test_data.sid))
payload = {
'session': test_data.sid,
'label': 'acq_testing',
'public': False
}
payload = json.dumps(payload)
r = session.post(base_url + '/acquisitions', data=payload)
assert r.ok
test_data.aid = json.loads(r.content)['_id']
log.debug('aid = \'{}\''.format(test_data.aid))
def teardown_db():
session.params['root'] = True
r = session.delete(base_url + '/acquisitions/' + test_data.aid)
assert r.ok
r = session.delete(base_url + '/sessions/' + test_data.sid)
assert r.ok
r = session.delete(base_url + '/projects/' + test_data.pid)
assert r.ok
@with_setup(setup_db, teardown_db)
def test_collections():
payload = {
'curator': adm_user,
'label': 'test_collection_'+ str(int(time.time())) ,
'public': False
}
session.params['root'] = False
r = session.post(base_url + '/collections', data=json.dumps(payload))
assert r.ok
_id = json.loads(r.content)['_id']
log.debug('_id = \'{}\''.format(_id))
r = session.get(base_url + '/collections/' + _id)
assert r.ok
payload = {
'contents':{
'nodes':
[{
'level': 'session',
'_id': test_data.sid
}],
'operation': 'add'
}
}
r = session.put(base_url + '/collections/' + _id, data=json.dumps(payload))
assert r.ok
r = session.get(base_url + '/collections/' + _id + '/acquisitions?session=' + test_data.sid)
assert r.ok
coll_acq_id= json.loads(r.content)[0]['_id']
assert coll_acq_id == test_data.aid
acq_ids = [ObjectId(test_data.aid)]
acs = db.acquisitions.find({'_id': {'$in': acq_ids}})
for ac in acs:
assert len(ac['collections']) == 1
assert ac['collections'][0] == ObjectId(_id)
r = session.delete(base_url + '/collections/' + _id)
assert r.ok
r = session.get(base_url + '/collections/' + _id)
assert r.status_code == 404
acs = db.acquisitions.find({'_id': {'$in': acq_ids}})
for ac in acs:
assert len(ac['collections']) == 0
|
py | 1a44b898cf11c0ee32cee389b2f392dbe846f27d | """empty message
Revision ID: 65edcc47e4ed
Revises: c9d6313461dd
Create Date: 2020-05-24 17:13:03.346660
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '65edcc47e4ed'
down_revision = 'c9d6313461dd'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('confirmed', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'confirmed')
# ### end Alembic commands ###
|
py | 1a44b8e9117ce39a6ac492cc1beba9f0e8937853 | import paho.mqtt.client as mqtt
from time import sleep
import random
broker="test.mosquitto.org"
topic_pub='v1/devices/me/telemetry'
client = mqtt.Client()
client.username_pw_set("$WIND_TURBINE_2_ACCESS_TOKEN")
client.connect('127.0.0.1', 1883, 1)
while True:
x = random.randrange(45, 51)
print x
msg = '{"windSpeed":"'+ str(x) + '"}'
client.publish(topic_pub, msg)
sleep(0.1) |
py | 1a44ba292131b8243665ec34f42497f8da5a5f71 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Project: Tesis Lali
Experiment: Camp Visual -> Perimetria
Created on Sun Feb 3 11:29:49 2019
@author: Aitor Matilla
Hitoria dels canvis
versio 2.0.0 canvi mètode: de dins a fora i de fora a dins (2 voltes) enlloc de
versio 2.0.1 canvi de nom a Perimetria, afegir time stamp + info versio dins resultat
versio 2.0.2 can crear un nom de resultats de perimetria amb nom fix per tal que el movilab pugui crear la mascara. crearem dos resutats
ATEMCIO: avancem de versió però pel PC del deslumbrometre ens convé que el nom de l'arxiu sigui sempre el mateix. per tant el nom arxiu a partir d'ara no indica versio
3 voltes de dins a fora.
"""
from tkinter import *
from tkinter import messagebox
import time
import datetime
import math
import shutil
CVVersion = "Perimetria Movilab v 2.0.2"
#Primer llegim de l'arxiu la mida del punt a mostrar a pantalla
file = open('..\DotSize.txt', 'r')
if file.mode != 'r' :
print ('Error reading DoSize.txt')
exit()
dotSize = file.readline().rstrip()
idPatient = file.readline().rstrip()
doRightEyeFile = file.readline().rstrip()
doLeftEyeFile = file.readline().rstrip()
doBothEyesFile = file.readline().rstrip()
nIterationsFile = file.readline().rstrip()
crossLineFile = file.readline().rstrip()
file.close()
#Creem/obrim l'arxiu a on escriurem les dades de l'experiment
sep=';'
endl= '\n'
timetag = datetime.datetime.now().strftime("_%Y%m%d_%H%M%S")
resultsFilename = "..\Resultats\CampVisual_Resultats"+ timetag +".txt"
file = open(resultsFilename, 'w')
if file.mode != 'w' :
print ('Error reading ' + resultsFilename)
exit()
#file.write('-------------------------------------\n')
#file.write(' New experiment \n')
#file.write('-------------------------------------\n')
strAux = "Version" + sep + CVVersion + endl
file.write(strAux)
strAux = 'Initial Date & Time:' + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + sep + endl
file.write(strAux)
strAux = 'Dot size' + sep + dotSize + sep + sep +sep + sep + sep+ endl
file.write(strAux)
strAux = 'ID patient' + sep + idPatient + endl
file.write(strAux)
file.close()
#Variables de configuració
backgroundColor = "black" #"darkgray"
focusColor = "white"
centralColor = "orange"
dotSpeed = 1 #segons
#Variables de fluxe execució
deg90 = 1
deg45 = 2
deg0 = 3
deg315 = 4
deg270 = 5
deg225 = 6
deg180 = 7
deg135 = 8
experimentDone = 9
currentStep = deg90
#numberOfLoops = int(nIterationsFile)
numberOfLoops = 2
eyeLeft=1
eyeRight=2
eyeBoth=3
applyEyeLeft=int(doLeftEyeFile)
applyEyeRight=int(doRightEyeFile)
applyEyeBoth=int(doBothEyesFile)
currentEye=eyeRight
currentLoop = 0
# num keys presseed ha de ser zero per tal que el 1r loop vagi de dins a fora
numberKeysPressed = 0
maxKeysPressed = 0
GoUp=100
GoDown=101
GoEnter=102
aStr=''
bStr=''
cStr=''
dStr=''
eStr=''
fStr=''
gStr=''
hStr=''
if (applyEyeRight == 0) and applyEyeLeft == 1:
currentEye = eyeLeft
elif applyEyeRight == 0 and applyEyeBoth == 1:
currentEye = eyeBoth
#Obtenim la resolució del monitor per posicionar el centre
#OS dependent TODO
wMonitorRes = 1920
hMonitorRes = 1080
xPosition = 0
yPosition = 0
def writeToFile (msg):
global resultsFilename
file = open(resultsFilename, 'a')
file.write (msg + '\n')
file.close()
#Key press action
def kp(event):
#if event.keysym == 'Return':
#verticalAxis(w, int(xLine/2), int(yLine/2), dotSizeNumber, 1)
if event.keysym == 'Up':
newMovement(w, GoUp)
elif event.keysym == 'Down':
newMovement(w, GoDown)
elif event.keysym == 'Space':
# spaceAction = 1
print ("space")
elif event.keysym == 'Return':
newMovement(w, GoEnter)
experimentProcedure()
elif event.keysym == 'Escape':
writeToFile('Experiment closed')
master.destroy()
#Dibuixem rectangle
def cursorRect(canvas, xPos, yPos, size, on) :
#print (xPos,yPos)
color = focusColor
if on == 0:
color = backgroundColor
elif on == 2:
color = centralColor
canvas.create_rectangle(xPos, yPos, xPos+size, yPos+size, fill=color, outline=focusColor)
#Dibuixem grid
def checkered(canvas, line_distance):
# vertical lines at an interval of "line_distance" pixel
for x in range(line_distance,canvas_width,line_distance):
canvas.create_line(x, 0, x, canvas_height, fill=focusColor)
# horizontal lines at an interval of "line_distance" pixel
for y in range(line_distance,canvas_height,line_distance):
canvas.create_line(0, y, canvas_width, y, fill=focusColor)
# cross line if necessary
if (int(crossLineFile) == 1):
init_w = int(canvas_width/dotSizeNumber)*dotSizeNumber
print (init_w)
canvas.create_line (0,0,xDotCenter*2, yDotCenter*2, fill=focusColor)
canvas.create_line (init_w,0,0,yDotCenter*2, fill=focusColor)
#New movement to do
def newMovement(canvas, direction):
global xLine
global yLine
#global isUpPressed
global numberKeysPressed
global dotSize
global currentStep
global xPosition
global yPosition
previousOffsetX = 0
previousOffsetY = 0
xLine1 = int(xLine/2)
yLine1 = int(yLine/2)
xLineCurrent = xLine1
yLineCurrent = yLine1
#print("newMovement (1) numberKeysPressed",numberKeysPressed)
prevnumberKeysPressed = numberKeysPressed
#if numberKeysPressed == 0 and direction == GoDown:
if currentLoop==0 and numberKeysPressed == 0 and direction == GoDown:
print("can't go close to center")
return
if currentLoop==1 and numberKeysPressed == maxKeysPressed and direction == GoUp:
print("can't go far from center")
return
if currentStep == deg90:
if direction == GoUp:
numberKeysPressed = numberKeysPressed -1
previousOffsetY = 1
elif direction == GoDown:
numberKeysPressed = numberKeysPressed +1
previousOffsetY = -1
yLine1 = yLine1+numberKeysPressed
yLineCurrent = yLine1
previousOffsetX = 0
elif currentStep == deg45:
if direction == GoUp:
numberKeysPressed = numberKeysPressed -1
previousOffsetY = 1
elif direction == GoDown:
numberKeysPressed = numberKeysPressed +1
previousOffsetY = -1
previousOffsetX = 2
yLine1 = yLine1+numberKeysPressed
yLineCurrent = yLine1
xLine1 = xLine1-numberKeysPressed
xLineCurrent = xLine1-1
elif currentStep == deg0:
if direction == GoUp:
numberKeysPressed = numberKeysPressed -1
previousOffsetY = 0
elif direction == GoDown:
numberKeysPressed = numberKeysPressed +1
previousOffsetY = 0
previousOffsetX = 2
xLine1 = xLine1-numberKeysPressed
xLineCurrent = xLine1-1
elif currentStep == deg315:
if direction == GoUp:
numberKeysPressed = numberKeysPressed -1
previousOffsetY = -1
elif direction == GoDown:
numberKeysPressed = numberKeysPressed +1
previousOffsetY = 1
previousOffsetX = 2
yLine1 = yLine1-numberKeysPressed-1+1
yLineCurrent = yLine1
xLine1 = xLine1-numberKeysPressed
xLineCurrent = xLine1-1
elif currentStep == deg270:
if direction == GoUp:
numberKeysPressed = numberKeysPressed -1
previousOffsetY = -1
elif direction == GoDown:
numberKeysPressed = numberKeysPressed +1
previousOffsetY = 1
yLine1 = yLine1-numberKeysPressed+1
yLineCurrent = yLine1-1
previousOffsetX = 0
elif currentStep == deg225:
if direction == GoUp:
numberKeysPressed = numberKeysPressed -1
previousOffsetY = -1
previousOffsetX = 2
elif direction == GoDown:
numberKeysPressed = numberKeysPressed +1
previousOffsetY = 1
previousOffsetX = 0
yLine1 = yLine1-numberKeysPressed-1+1
yLineCurrent = yLine1
xLine1 = xLine1+numberKeysPressed
xLineCurrent = xLine1-1
elif currentStep == deg180:
if direction == GoUp:
numberKeysPressed = numberKeysPressed -1
previousOffsetX = 2
previousOffsetY = 0
elif direction == GoDown:
numberKeysPressed = numberKeysPressed +1
previousOffsetY = 0
previousOffsetX = 0
xLine1 = xLine1+numberKeysPressed
xLineCurrent = xLine1-1
elif currentStep == deg135:
if direction == GoUp:
numberKeysPressed = numberKeysPressed -1
previousOffsetY = 1
previousOffsetX = 2
elif direction == GoDown:
numberKeysPressed = numberKeysPressed +1
previousOffsetY = -1
yLine1 = yLine1+numberKeysPressed
yLineCurrent = yLine1
xLine1 = xLine1+numberKeysPressed
xLineCurrent = xLine1-1
#print("newMovement (2) numberKeysPressed",numberKeysPressed,"xLine1",xLine1,"yLine1",yLine1)
xNextValue = (int)(xLine1-xDotPosition)
yNextValue = (int)(yLineCurrent-yDotPosition)
xNextPosition = xLine1 * dotSizeNumber
yNextPosition = yLineCurrent * dotSizeNumber
#print ('xNextPosition:',xNextPosition,',',yNextPosition,' yNextPosition')
if (xNextValue == 0 and yNextValue ==0) or (xNextPosition < 0) or (xNextPosition > wMonitorRes) or (yNextPosition < 0) or (yNextPosition > hMonitorRes):
numberKeysPressed = prevnumberKeysPressed;
return
xPosition = xLine1 * dotSizeNumber
yPosition = yLineCurrent * dotSizeNumber
xPrevPosition = (xLineCurrent+previousOffsetX) * dotSizeNumber
yPrevPosition = (yLineCurrent+previousOffsetY) * dotSizeNumber
xPrevValue = xLineCurrent+previousOffsetX-xDotPosition
yPrevValue = yLineCurrent+previousOffsetY-yDotPosition
#print ('curr:',xPosition,',',yPosition,' xLine1',xNextValue,' yCurrentLine',yNextValue)
focus=1
if direction == GoEnter:
focus = 0
cursorRect(canvas, xPosition, yPosition, dotSizeNumber, focus)
if xPrevValue != 0 or yPrevValue != 0:
cursorRect(canvas, xPrevPosition, yPrevPosition, dotSizeNumber, 0)
#print ('pre:',xPrevPosition,',',yPrevPosition)
master.update()
def clearCoords():
global aStr
global bStr
global cStr
global dStr
global eStr
global fStr
global gStr
global hStr
aStr=''
bStr=''
cStr=''
dStr=''
eStr=''
fStr=''
gStr=''
hStr=''
def writeCoords2Disk():
global aStr
global bStr
global cStr
global dStr
global eStr
global fStr
global gStr
global hStr
writeToFile ('A'+aStr)
writeToFile ('B'+bStr)
writeToFile ('C'+cStr)
writeToFile ('D'+dStr)
writeToFile ('E'+eStr)
writeToFile ('F'+fStr)
writeToFile ('G'+gStr)
writeToFile ('H'+hStr)
writeToFile ('Abis'+aStr)
#Coneix el procediment del experiment
def experimentProcedure():
global currentStep
global currentLoop
global numberKeysPressed
global xPosition
global yPosition
global currentEye
global applyEyeRight
global applyEyeBoth
global aStr
global bStr
global cStr
global dStr
global eStr
global fStr
global gStr
global hStr
xPosFile = (xPosition / dotSizeNumber) - xDotPosition
yPosFile = (yPosition / dotSizeNumber) - yDotPosition
#Guardem info a disc
if currentStep == deg90:
aStr = aStr+sep+'%d'%xPosFile+sep+'%d'%yPosFile
elif currentStep == deg45:
bStr = bStr+sep+'%d'%xPosFile+sep+'%d'%yPosFile
elif currentStep == deg0:
cStr = cStr+sep+'%d'%xPosFile+sep+'%d'%yPosFile
elif currentStep == deg315:
dStr = dStr+sep+'%d'%xPosFile+sep+'%d'%yPosFile
elif currentStep == deg270:
eStr = eStr+sep+'%d'%xPosFile+sep+'%d'%yPosFile
elif currentStep == deg225:
fStr = fStr+sep+'%d'%xPosFile+sep+'%d'%yPosFile
elif currentStep == deg180:
gStr = gStr+sep+'%d'%xPosFile+sep+'%d'%yPosFile
elif currentStep == deg135:
hStr = hStr+sep+'%d'%xPosFile+sep+'%d'%yPosFile
#writeToFile ('XPosition = %d'%xPosition + ' YPosition = %d'%yPosition)
currentStep = currentStep + 1
if currentStep == experimentDone:
print("experimentProcedure -> experimentDone (iteration)")
currentLoop = currentLoop +1
print ('currentEye:',currentEye,', currentLoop',currentLoop)
#prepara inici del seguent loop
currentStep = deg90
maxKeysPressed = -1*yDotPosition-1
if currentLoop ==0:
numberKeysPressed = 0
else:
numberKeysPressed = maxKeysPressed
# si es l'ultima iteracio de l'ull actual
if currentLoop == numberOfLoops:
if ((currentEye == eyeRight and applyEyeLeft != 1 and applyEyeBoth != 1) or
(currentEye == eyeLeft and applyEyeBoth != 1) or
(currentEye == eyeBoth)):
# final experiment
messagebox.showinfo("Perimetria", "Experiment done")
# escriu resultat de cada eix i cada iteracio
writeCoords2Disk()
# tancament arxiu
writeToFile('Experiment Done at '+ datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
# copiem arxiu per poder fer la mascara des de movilab
resultsFilename_mask = "..\Resultats\CampVisual_Resultats.txt"
shutil.copy(resultsFilename, resultsFilename_mask)
return
elif (currentEye==eyeRight and applyEyeLeft==1):
# escriu resultat de cada eix i cada iteracio
writeCoords2Disk()
# prepara per seguent ull
clearCoords()
writeToFile('Eye'+sep+'Left'+sep+sep+sep)
writeToFile('Direction'+sep+'Col1'+sep+'Row1'+sep+'Col2'+sep+'Row2')
currentEye=eyeLeft
currentLoop=0
messagebox.showinfo("Perimetria", "Eye: Right")
elif ((currentEye==eyeRight and applyEyeBoth==1) or
(currentEye==eyeLeft and applyEyeBoth==1)):
# escriu resultat de cada eix i cada iteracio
writeCoords2Disk()
# prepara per seguent ull
clearCoords()
writeToFile('Eye'+sep+'Both'+sep+sep+sep)
writeToFile('Direction'+sep+'Col1'+sep+'Row1'+sep+'Col2'+sep+'Row2')
currentEye=eyeBoth
currentLoop=0
messagebox.showinfo("Perimetria", "Eye: Both")
else:
print("experimentProcedure -> next step (axis)")
# num keys presseed ha de ser zero per tal que el 1r loop vagi de dins a fora
if currentLoop ==0:
numberKeysPressed = 0
else:
numberKeysPressed = 0
if currentStep == deg90:
maxKeysPressed = -1*yDotPosition-1
elif currentStep == deg45:
maxKeysPressed = -1*yDotPosition-1
elif currentStep == deg0:
maxKeysPressed = -1*xDotPosition-1
elif currentStep == deg315:
maxKeysPressed = -1*yDotPosition-1
elif currentStep == deg270:
maxKeysPressed = -1*yDotPosition-1
elif currentStep == deg225:
maxKeysPressed = -1*yDotPosition-1
elif currentStep == deg180:
maxKeysPressed = -1*xDotPosition-1
elif currentStep == deg135:
maxKeysPressed = -1*yDotPosition-1
numberKeysPressed = maxKeysPressed
#Generem GUI
master = Tk()
master.title('Experiment: Perimetria')
master.bind_all('<KeyPress>', kp)
canvas_width = wMonitorRes
canvas_height = hMonitorRes
w = Canvas(master,
width=canvas_width,
height=canvas_height,
bg=backgroundColor)
w.pack()
dotSizeNumber = int(dotSize)
#Dibuixa punt central
xLine = wMonitorRes / dotSizeNumber
yLine = hMonitorRes / dotSizeNumber
cursorRect (w,int(xLine/2)*dotSizeNumber, int(yLine/2)*dotSizeNumber, dotSizeNumber, 2)
xDotCenter = int(xLine/2)*dotSizeNumber+(dotSizeNumber/2)
yDotCenter = int(yLine/2)*dotSizeNumber+(dotSizeNumber/2)
xDotPosition = (int)(xLine/2)
yDotPosition = (int)(yLine/2)
#Dibuixa grid
checkered(w,dotSizeNumber)
#valor inicial per 90deg
maxKeysPressed = -1*yDotPosition-1
#Initial message
strs='Left'
if currentEye==eyeRight:
strs='Right'
elif currentEye==eyeBoth:
strs ='Both'
#Escriu capçalera a l'arxiu de resultats
messagebox.showinfo("Perimetria", 'Eye:'+strs)
writeToFile('Eye'+sep+strs+sep+sep+sep+sep+sep)
writeToFile('Direction'+sep+'Col1'+sep+'Row1'+sep+'Col2'+sep+'Row2')
mainloop()
|
py | 1a44ba47cfb15f6c9c04fac0c84d1b31e7d839a6 | from django.db import models
class BankAccountQuerySet(models.QuerySet):
def action_detailed_list(self):
return self.select_related("bank", ).order_by('id')
def action_detailed(self):
return self.select_related("bank", ).order_by('id')
|
py | 1a44ba57f2bb860c7676fa08aa291e2bee84e267 | #!/usr/bin/env python
__author__ = "XXX"
__email__ = "XXX"
import pytest
import pandas as pd
@pytest.fixture()
def dummy_dataset():
test_lst_ratings = [
{"userId": 156, "movieId": 1, "rating": 5.0, "timestamp": 1037739266},
{"userId": 156, "movieId": 2, "rating": 5.0, "timestamp": 1040937649},
{"userId": 156, "movieId": 4, "rating": 3.0, "timestamp": 1038801803},
{"userId": 156, "movieId": 5, "rating": 3.0, "timestamp": 1040944583},
{"userId": 156, "movieId": 6, "rating": 4.0, "timestamp": 1037822117},
]
test_df_ratings = pd.DataFrame(test_lst_ratings)
return test_df_ratings
def test_long_tail_plot(dummy_dataset):
from datasets.dataset_statistics import long_tail_plot
# GIVEN long_tail_plot inputs
# WHEN plots.long_tail_plot is run
# todo: understand how to run test without generatign plot
# long_tail_plot(
# df=dummy_dataset,
# item_id_column="movieId",
# interaction_type="movie ratings",
# percentage=0.5,
# x_labels=False,
# )
# THEN validate if plot is called
assert True
def test_get_dataset_stats(dummy_train_data):
from datasets.dataset_statistics import get_dataset_stats
dataset_stats = get_dataset_stats(dummy_train_data)
assert dataset_stats["# users"] == 6
assert dataset_stats["# items"] == 5
assert dataset_stats["# interactions"] == 9
assert dataset_stats["density"] == 9/30
|
py | 1a44bbf15cf6e2e73d47587a73761e3f4c5e0940 | from .annospan import AnnoSpan, SpanGroup
from .utils import flatten, merge_dicts
class MetaSpan(AnnoSpan):
def __init__(self, span=None, start=None, end=None, doc=None, metadata={}):
if span is None:
self.start = start
self.end = end
self.doc = doc
elif isinstance(span, AnnoSpan):
self.start = span.start
self.end = span.end
self.doc = span.doc
else: # We assume that span is a spaCy token
self.start = span.idx
self.end = span.idx + len(span)
self.token = span
self.doc = doc
self.label = self.text
self._metadata = span.metadata if isinstance(span, MetaSpan) else metadata
def __repr__(self):
return "MetaSpan(start={}, end={}, doc={}, metadata={})".format(self.start,
self.end,
self.doc,
self.metadata)
def __str__(self):
return "{}-{}: '{}' {}".format(self.start, self.end, self.text, self.metadata)
def to_dict(self):
result = super(MetaSpan, self).to_dict()
result.update(self.metadata)
result['text'] = self.text
return result
@property
def metadata(self):
return self._metadata
@metadata.setter
def metadata(self, value):
self._metadata = value
@metadata.deleter
def metadata(self):
del self._metadata
def update_metadata(self, metagen, *args, **kwargs):
result = metagen.generate(self, *args, **kwargs)
if isinstance(result, dict):
self._metadata = merge_dicts([result, self._metadata], unique=True)
return self.metadata
@property
def tokens(self):
tokens_tier = self.doc.tiers["spacy.tokens"]
tokens = [t.token for t in tokens_tier.spans_contained_by_span(self)]
return(tokens)
class MetaGroup(MetaSpan, SpanGroup):
def __init__(self, base_spans, label=None):
assert isinstance(base_spans, list)
assert len(base_spans) > 0
self.base_spans = [MetaSpan(span) for span in base_spans]
self.doc = base_spans[0].doc
self._label = label
self._metadata = {}
def __repr__(self):
return "MetaGroup(start={}, end={}, doc={}, metadata={})".format(self.start,
self.end,
self.doc,
self.metadata)
def __str__(self):
text = "merged text and metadata:\n {}-{}: '{}'\n {}".format(self.start,
self.end,
self.text,
self.metadata)
text += "\ngroup metadata:\n {}".format(self._metadata)
text += "\nbase text and metadata:"
for span in self.iterate_base_spans():
text += "\n {}-{}: '{}' {}".format(span.start,
span.end,
span.text,
span.metadata)
return(text)
def __iter__(self):
return(iter(flatten(self.base_spans)))
# def __next__(self):
# for span in flatten(self.base_spans):
# return span
# raise StopIteration
@property
def start(self):
return(min([s.start for s in self.base_spans]))
@property
def end(self):
return(max([s.end for s in self.base_spans]))
@property
def text(self):
return self.doc.text[self.start:self.end]
@property
def label(self):
if self._label is None:
return(self.text)
else:
return(self._label)
@property
def metadata(self, **kwargs):
metadata_list = [self._metadata] + [s.metadata for s in self.iterate_base_spans()]
metadata = merge_dicts(metadata_list, unique=True, **kwargs)
return(metadata)
def update_group_metadata(self, metagen, *args, **kwargs):
result = metagen.generate(self, *args, **kwargs)
if isinstance(result, dict):
self._metadata = merge_dicts([result, self._metadata], unique=True)
return self.metadata
def update_base_span_metadata(self, metagen, *args, **kwargs):
for span in self.iterate_base_spans():
span.update_metadata(metagen, *args, **kwargs)
return self.metadata
# I could be convinced that either way is better on this.
def update_metadata(self, metagen, *args, **kwargs):
self.update_base_span_metadata(metagen, *args, **kwargs)
# self.update_group_metadata(metagen, *args, **kwargs)
@property
def tokens(self):
tokens_tier = self.doc.tiers["spacy.tokens"]
tokens = []
for span in self.iterate_base_spans():
tokens.append([t.token for t in tokens_tier.spans_contained_by_span(span)])
tokens = flatten(tokens)
return(tokens)
def append(self, spans):
if isinstance(spans, AnnoSpan):
self.base_spans.append(spans)
elif isinstance(spans, list):
self.base_spans.extend(spans)
|
py | 1a44bc840b8ff95d165623020694b07b6448473d | from conans import ConanFile, CMake
class ValuePtrLiteConan(ConanFile):
version = "0.2.1"
name = "value-ptr-lite"
description = "A C++ smart-pointer with value semantics for C++98, C++11 and later"
license = "Boost Software License - Version 1.0. http://www.boost.org/LICENSE_1_0.txt"
url = "https://github.com/martinmoene/value-ptr-lite.git"
exports_sources = "include/nonstd/*", "CMakeLists.txt", "cmake/*", "LICENSE.txt"
settings = "compiler", "build_type", "arch"
build_policy = "missing"
author = "Martin Moene"
def build(self):
"""Avoid warning on build step"""
pass
def package(self):
"""Run CMake install"""
cmake = CMake(self)
cmake.definitions["VALUE_PTR_LITE_OPT_BUILD_TESTS"] = "OFF"
cmake.definitions["VALUE_PTR_LITE_OPT_BUILD_EXAMPLES"] = "OFF"
cmake.configure()
cmake.install()
def package_info(self):
self.info.header_only()
|
py | 1a44bd066a1fdea54447911e6e340f3cdd4bbe4e | from app import main
main() |
py | 1a44bd295ba375fdf75c265802bc5f1474338943 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import pandas as pd
import requests
UTAHAQ_API_BASE_URI = 'http://meso2.chpc.utah.edu/aq/cgi-bin/download_mobile_archive.cgi'
UTAHAQ_API_TOKEN = os.getenv('UTAHAQ_API_TOKEN')
def _utahaq_batch_get(stid: str,
yr: int,
mo: int,
datatype: str) -> pd.DataFrame:
"""Queries UtahAQ API endpoint for single month of data
For API reference, see
http://utahaq.chpc.utah.edu/aq/cgi-bin/mobile_archive.cgi
Args:
stid: unique station identifier
yr: year desired
mo: month desired
datatype: measurement dataset identifier, see reference
Returns:
pd.DataFrame: flattened time, stid, lat/lon, and relevant readings
"""
yr = str(yr).zfill(4)
mo = str(mo).zfill(2)
stid = stid.upper()
datatype = datatype.lower()
uri = (
f'{UTAHAQ_API_BASE_URI}'
f'?accesskey={UTAHAQ_API_TOKEN}'
f'&stid={stid}'
f'&yr={yr}'
f'&mo={mo}'
f'&datatype={datatype}'
)
try:
res = pd.read_csv(uri, skiprows=True)
except pd.errors.EmptyDataError:
return None
res = res[res.esampler_error_code == 0]
res.index = pd.to_datetime(res.Date + ' ' + res.TimeUTC, utc=True)
res = res.rename(columns={
'esampler_pm25_ugm3': 'pm25_ugm3',
'esampler_rh_pcent': 'rh_pct'
})
return res[['pm25_ugm3', 'rh_pct']]
def utahaq_api_get(stid: list,
start: pd.Timestamp,
end: pd.Timestamp,
datatype: list) -> pd.DataFrame:
"""Returns `pd.DataFrame` containing observations
For API reference, see
http://utahaq.chpc.utah.edu/aq/cgi-bin/mobile_archive.cgi
Args:
stid: unique station identifier
start: start timestamp for returned data
end: end timestamp for returned data
datatype: measurement dataset identifier, see reference
Returns:
pd.DataFrame: flattened time, stid, lat/lon, and relevant readings
Examples:
>>> utahaq_api_get(
'hawth',
pd.Timestamp('2019-01-02 00:00:00'),
pd.Timestamp('2019-01-02 00:00:30'),
'pm'
)
pm25_ugm3 rh_pct
2019-01-02 00:00:00+00:00 3.0 28.0
2019-01-02 00:00:10+00:00 3.0 28.0
2019-01-02 00:00:20+00:00 3.0 28.0
2019-01-02 00:00:30+00:00 2.0 28.0
"""
query_dates = pd.date_range(start=start, end=end, freq='MS')
if len(query_dates) == 0:
query_dates = [start]
df_list = []
for date in query_dates:
df_list.append(
_utahaq_batch_get(
stid=stid,
yr=date.year,
mo=date.month,
datatype=datatype
)
)
df = pd.concat(df_list)
return df[(df.index >= start) & (df.index <= end)]
|
py | 1a44bdc08ceabb0e76f1cd9d6b06068ba8b493f8 | #!/usr/bin/env python3
# Copyright (c) 2013-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS = 512
MAX_SEEDS_PER_ASN = 2
MIN_BLOCKS = 337600
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
"130.211.129.106", "178.63.107.226",
"83.81.130.26", "88.198.17.7", "148.251.238.178", "176.9.46.6",
"54.173.72.127", "54.174.10.182", "54.183.64.54", "54.194.231.211",
"54.66.214.167", "54.66.220.137", "54.67.33.14", "54.77.251.214",
"54.94.195.96", "54.94.200.247"
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(
r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(
r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
# Used to only select nodes with a user agent string compatible with the
# BCC/UAHF specification.
PATTERN_AGENT = re.compile(
r"^(/BitcoinABC:0.15.(\d+)\(\S+\)/|/BitcoinXT:0.11.0G\(\S+\)/|/BUCash:1.1.(\d+)\(\S+\)/|/Classic:1.3.(\d+)\(\S+\)/)")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
# All BCC clients apart BU and Classic has a space in the useragent string
if len(sline) == 13:
sline[11] = sline[11] + sline[12]
if len(sline) == 14:
sline[11] = sline[11] + sline[12] + sline[13]
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0, 4):
if int(m.group(i + 2)) < 0 or int(m.group(i + 2)) > 255:
return None
ip = ip + (int(m.group(i + 2)) << (8 * (3 - i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(ips):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key, value) in list(hist.items()) if len(value) == 1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
# Sift out ips by type
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv4 by ASN
result = []
asn_count = {}
for ip in ips_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text()
for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write(
'ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(ips_ipv6)
result.extend(ips_onion)
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
# TODO set it back to 50% once nodes will have enough uptime.
ips = [ip for ip in ips if ip['uptime'] > 0]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x:
(x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
ips = filtermultiport(ips)
# Look up ASNs and limit results, both per ASN and globally.
# TODO during this bootstrap phase we need any BCC full nodes
# active on the network, uncomment the following line once the
# BCC chain will be consolidated.
# ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
|
py | 1a44be24c0e6359f6507b1efee84d84c469b69ba | from ..db import *
from .. import currentUser
from ..accounting import UsageStatistics
from ..lib import logging
from ..lib.error import UserError
from ..generic import *
class Organization(Entity, BaseDocument):
name = StringField(unique=True, required=True)
totalUsage = ReferenceField(UsageStatistics, db_field='total_usage', required=True, reverse_delete_rule=DENY)
label = StringField(required=True)
homepageUrl = URLField(db_field='homepage_url')
imageUrl = URLField(db_field='image_url')
description = StringField()
meta = {
'ordering': ['name'],
'indexes': [
'name'
]
}
@property
def sites(self):
from .site import Site
return Site.objects(organization=self)
@property
def users(self):
from ..auth import User
return User.objects(organization=self)
def init(self, attrs):
self.totalUsage = UsageStatistics().save()
self.modify(attrs)
def checkPermissions(self, *args, **kwargs):
user = currentUser()
if user.hasFlag(Flags.GlobalAdmin):
return True
if user.hasFlag(Flags.OrgaAdmin) and user.organization == self:
return True
return False
def _checkRemove(self):
UserError.check(self.checkPermissions(), code=UserError.DENIED, message="Not enough permissions")
if self.id:
UserError.check(not self.sites, code=UserError.NOT_EMPTY, message="Organization still has sites")
UserError.check(not self.users, code=UserError.NOT_EMPTY, message="Organization still has users")
def _remove(self):
logging.logMessage("remove", category="organization", name=self.name)
if self.id:
self.delete()
self.totalUsage.remove()
def updateUsage(self):
self.totalUsage.updateFrom([user.totalUsage for user in self.users])
def __str__(self):
return self.name
def __repr__(self):
return "Organization(%s)" % self.name
@classmethod
def get(cls, name, **kwargs):
try:
return Organization.objects.get(name=name, **kwargs)
except Organization.DoesNotExist:
return None
@classmethod
def create(cls, name, label="", attrs=None):
if not attrs:
attrs = {}
UserError.check(currentUser().hasFlag(Flags.GlobalAdmin), code=UserError.DENIED, message="Not enough permissions")
UserError.check('/' not in name, code=UserError.INVALID_VALUE, message="Organization name may not include a '/'")
logging.logMessage("create", category="site", name=name, label=label)
organization = Organization(name=name, label=label)
try:
attrs_ = attrs.copy()
attrs_['name'] = name
attrs_['label'] = label
organization.init(attrs_)
organization.save()
except:
organization.remove()
raise
return organization
ACTIONS = {
Entity.REMOVE_ACTION: Action(fn=_remove, check=_checkRemove)
}
ATTRIBUTES = {
"name": Attribute(field=name, check=checkPermissions, schema=schema.Identifier()),
"label": Attribute(field=label, check=checkPermissions, schema=schema.String()),
"homepage_url": Attribute(field=homepageUrl, check=checkPermissions, schema=schema.URL(null=True)),
"image_url": Attribute(field=imageUrl, check=checkPermissions, schema=schema.URL(null=True)),
"description": Attribute(field=description, check=checkPermissions, schema=schema.String())
}
from ..auth import Flags |
py | 1a44c03a8f3db5959794222c8e8bf97756574bfe | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file performs Sling based entity linking on NQ.
The file iterates through entire train and dev set of NQ.
For every example it does entity linking on long answer candidates,
annotated long and short answer and questiopn.
Every paragraph in the dataset is augmented with an entity map from
every token to it's entity id.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import json
import os
import sling
import sling.flags as flags
import sling.task.entity as entity
import sling.task.workflow as workflow
import tensorflow.compat.v1 as tf
# Calling these 'args' to avoid conflicts with sling flags
args = tf.flags
ARGS = args.FLAGS
args.DEFINE_string("nq_dir", "", "NQ data location")
args.DEFINE_string("files_dir", "", "Preprocess files location")
args.DEFINE_string("output_data_dir", "", "Location to write augmented data to")
args.DEFINE_boolean("annotate_candidates", True, "Flag to annotate candidates")
args.DEFINE_boolean("annotate_long_answers", True,
"Flag to annotate long answer")
args.DEFINE_boolean("annotate_short_answers", True,
"Flag to annotate short answers")
args.DEFINE_boolean("annotate_question", True, "Flag to annotate questions")
def extract_and_tokenize_text(item, tokens):
"""Extracts the tokens in passage, tokenizes them using sling tokenizer."""
start_token = item["start_token"]
end_token = item["end_token"]
if start_token >= 0 and end_token >= 0:
non_html_tokens = [
x
for x in tokens[start_token:end_token]
if not x["html_token"]
]
answer = " ".join([x["token"] for x in non_html_tokens])
answer_map = [idx for idx, x in enumerate(non_html_tokens)]
doc = sling.tokenize(str(answer))
return answer, answer_map, doc
return "", [], None
def is_sling_entity(item):
return isinstance(
item[0]) == sling.Frame and "id" in item[0] and item[0]["id"].startswith(
"Q")
def prepare_sling_input_corpus(nq_data, sling_input_corpus):
"""Parse each paragrapgh in NQ (LA candidate, LA, SA, question).
Prepare a sling corpus to do entity linking.
Args:
nq_data: A python dictionary containint NQ data of 1 train/dev shard
sling_input_corpus: A filename string to write the sling format documents
into
"""
corpus = sling.RecordWriter(sling_input_corpus)
for i in nq_data.keys():
tokens = nq_data[i]["document_tokens"]
if ARGS.annotate_candidates:
for idx, la_cand in enumerate(nq_data[i]["long_answer_candidates"]):
answer, answer_map, doc = extract_and_tokenize_text(la_cand, tokens)
if answer:
nq_data[i]["long_answer_candidates"][idx]["text_answer"] = answer
nq_data[i]["long_answer_candidates"][idx]["answer_map"] = answer_map
key = i + "|candidate|" + str(idx) + "|i"
corpus.write(key, doc.frame.data(binary=True))
if ARGS.annotate_short_answers:
for idx, ann in enumerate(nq_data[i]["annotations"]):
short_ans = ann["short_answers"]
if not short_ans:
continue
for sid in range(len(short_ans)):
ans = short_ans[sid]
answer, answer_map, doc = extract_and_tokenize_text(ans, tokens)
if answer:
nq_data[i]["annotations"][idx]["short_answers"][sid][
"text_answer"] = answer
nq_data[i]["annotations"][idx]["short_answers"][sid][
"answer_map"] = answer_map
key = i + "|annotated_short_answer|" + str(idx) + "|" + str(sid)
corpus.write(key, doc.frame.data(binary=True))
if ARGS.annotate_long_answers:
for idx, ann in enumerate(nq_data[i]["annotations"]):
long_ans = ann["long_answer"]
answer, answer_map, doc = extract_and_tokenize_text(long_ans, tokens)
if answer:
nq_data[i]["annotations"][idx]["long_answer"]["text_answer"] = answer
nq_data[i]["annotations"][idx]["long_answer"][
"answer_map"] = answer_map
key = i + "|annotated_long_answer|" + str(idx) + "|i"
corpus.write(key, doc.frame.data(binary=True))
if ARGS.annotate_question:
doc = sling.tokenize(str(nq_data[i]["question_text"]))
key = i + "|question|i|i"
corpus.write(key, doc.frame.data(binary=True))
corpus.close()
def sling_entity_link(sling_input_corpus, sling_output_corpus):
"""Does sling entity linking and created linked output corpus."""
labeler = entity.EntityWorkflow("wiki-label")
unannotated = labeler.wf.resource(
sling_input_corpus, format="records/document")
annotated = labeler.wf.resource(
sling_output_corpus, format="records/document")
labeler.label_documents(indocs=unannotated, outdocs=annotated)
workflow.run(labeler.wf)
def extract_entity_mentions(nq_data, labelled_record):
"""Parse ourput corpus and create map from tokens to entity ids.
Args:
nq_data: A python dictionary containint NQ data of 1 train/dev shard
labelled_record: Sling output document with labelled paragraphs
Returns:
nq_data: Original object augmented with entity maps
"""
recin = sling.RecordReader(labelled_record)
commons = sling.Store()
docschema = sling.DocumentSchema(commons)
commons.freeze()
cnt = 1
for key, value in recin:
store = sling.Store(commons)
doc = sling.Document(store.parse(value), store, docschema)
index, ans_type, idx, ans_id = key.decode("utf-8").split("|")
cnt += 1
entity_map = {}
# Parse entity mentions labelled by sling
for m in doc.mentions:
e = [i["is"] for i in m.evokes()]
if not e:
continue
if is_sling_entity(e):
e_val = e[0]["id"]
if m.begin in entity_map:
entity_map[m.begin].append((m.end, e_val))
else:
entity_map[m.begin] = [(m.end, e_val)]
if ans_type == "annotated_long_answer":
nq_data[index]["annotations"][int(
idx)]["long_answer"]["entity_map"] = entity_map
elif ans_type == "question":
nq_data[index]["question_entity_map"] = entity_map
elif ans_type == "annotated_short_answer":
nq_data[index]["annotations"][int(idx)]["short_answers"][int(
ans_id)]["entity_map"] = entity_map
else:
nq_data[index]["long_answer_candidates"][int(
idx)]["entity_map"] = entity_map
return nq_data
def extract_nq_data(nq_file):
"""Read nq shard file and return dict of nq_data."""
fp = gzip.GzipFile(fileobj=tf.gfile.Open(nq_file, "rb"))
lines = fp.readlines()
data = {}
counter = 0
for line in lines:
data[str(counter)] = json.loads(line.decode("utf-8"))
tok = []
for j in data[str(counter)]["document_tokens"]:
tok.append(j["token"])
data[str(counter)]["full_document_long"] = " ".join(tok)
counter += 1
return data
def get_shard(mode, task_id, shard_id):
return "nq-%s-%02d%02d" % (mode, task_id, shard_id)
def get_full_filename(data_dir, mode, task_id, shard_id):
return os.path.join(
data_dir, "%s/%s.jsonl.gz" % (mode, get_shard(mode, task_id, shard_id)))
def get_examples(data_dir, mode, task_id, shard_id):
"""Reads NQ data, does sling entity linking and returns augmented data."""
file_path = get_full_filename(data_dir, mode, task_id, shard_id)
tf.logging.info("Reading file: %d" % (file_path))
if not os.path.exists(file_path):
return None
nq_data = extract_nq_data(file_path)
tf.logging.info("NQ data Size: " + str(len(nq_data.keys())))
tf.logging.info("Preparing sling corpus: ")
sling_input_corpus = os.path.join(ARGS.files_dir, "sling_input_corpus.rec")
sling_output_corpus = os.path.join(ARGS.files_dir, "nq_labelled_output.rec")
prepare_sling_input_corpus(nq_data, sling_input_corpus)
tf.logging.info("Performing Sling NER Labeling")
sling_entity_link(sling_input_corpus, sling_output_corpus)
fact_extracted_data = extract_entity_mentions(nq_data, sling_output_corpus)
return fact_extracted_data
def main(_):
workflow.startup()
max_tasks = {"train": 50, "dev": 5}
max_shards = {"train": 6, "dev": 16}
for mode in ["train", "dev"]:
# Parse all shards in each mode
# Currently sequentially, can be parallelized later
for task_id in range(0, max_tasks[mode]):
for shard_id in range(0, max_shards[mode]):
nq_augmented_data = get_examples(ARGS.nq_dir, mode, task_id, shard_id)
if nq_augmented_data is None:
continue
path = get_full_filename(ARGS.output_data_dir, mode, task_id, shard_id)
with gzip.GzipFile(fileobj=tf.gfile.Open(path, "w")) as output_file:
for idx in nq_augmented_data.keys():
json_line = nq_augmented_data[idx]
output_file.write(json.dumps(json_line) + "\n")
workflow.shutdown()
if __name__ == "__main__":
# This will fail if non-sling CMDLine Args are given.
# Will modify sling separately to parse known args
flags.parse()
tf.app.run()
|
py | 1a44c1406d9d798e964da7bda7a859bcf0a4da96 | #
# PySNMP MIB module Storage-Management-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Storage-Management-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:14:42 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
IpAddress, iso, ModuleIdentity, ObjectIdentity, Counter64, MibIdentifier, enterprises, Bits, Counter32, TimeTicks, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, Unsigned32, NotificationType, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "iso", "ModuleIdentity", "ObjectIdentity", "Counter64", "MibIdentifier", "enterprises", "Bits", "Counter32", "TimeTicks", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "Unsigned32", "NotificationType", "NotificationType")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
sni = MibIdentifier((1, 3, 6, 1, 4, 1, 231))
sniProductMibs = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2))
sniStorMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20))
sniStorMgmtAvailInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 1))
sniStorMgmtProductInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 1))
sniStorMgmtHsmsInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2))
sniStorMgmtMarenInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3))
sniStorMgmtTlsInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 4))
sniStorMgmtRobarInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5))
sniStorMgmtResourceInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 2))
sniStorMgmtPubsetInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 3))
sniStorMgmtDiskInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 4))
sniStorMgmtGlobalData = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 18))
sniStorMgmtPubsetTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 20))
sniStorMgmtDiskTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 2, 20, 21))
storMgmtProductTabNum = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtProductTabNum.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtProductTabNum.setDescription('The number of entries in the table storMgmtProductTable')
storMgmtProductTable = MibTable((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 1, 2), )
if mibBuilder.loadTexts: storMgmtProductTable.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtProductTable.setDescription('The Storage management information table')
storMgmtProductEntry = MibTableRow((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 1, 2, 1), ).setIndexNames((0, "Storage-Management-MIB", "storMgmtProductIndex"))
if mibBuilder.loadTexts: storMgmtProductEntry.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtProductEntry.setDescription('An entry in the table')
storMgmtProductIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 1, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtProductIndex.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtProductIndex.setDescription('A unique value for each entry, its value ranges between 1 and the value of storMgmtProductTabNum')
storMgmtProductName = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 1, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtProductName.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtProductName.setDescription('The name of the product itself')
storMgmtProductVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 1, 2, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtProductVersion.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtProductVersion.setDescription('The installed version of the specified product')
storMgmtProductState = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 255))).clone(namedValues=NamedValues(("created", 1), ("not-created", 2), ("in-delete", 3), ("in-create", 4), ("in-resume", 5), ("in-hold", 6), ("not-resumed", 7), ("locked", 8), ("not-installed", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtProductState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtProductState.setDescription('Current state of the product')
storMgmtProductTimestamp = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 1, 2, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtProductTimestamp.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtProductTimestamp.setDescription('Status timestamp (only for started and stopped state)')
storMgmtHsmsOpmode = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("defineshow", 1), ("simulation", 2), ("operation", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: storMgmtHsmsOpmode.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsOpmode.setDescription('The current operational mode specifies the scope of actions to be performed by HSMS')
storMgmtHsmsModeTimestamp = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: storMgmtHsmsModeTimestamp.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsModeTimestamp.setDescription('Mode timestamp')
storMgmtHsmsServertask = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 99))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: storMgmtHsmsServertask.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsServertask.setDescription('Number of current server tasks')
storMgmtHsmsWaitJobs = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsWaitJobs.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsWaitJobs.setDescription('Number of waiting jobs (state is ACCEPTED)')
storMgmtHsmsInstances = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsInstances.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsInstances.setDescription('')
storMgmtHsmsAcceptReqs = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsAcceptReqs.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsAcceptReqs.setDescription('')
storMgmtHsmsCompleteReqs = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsCompleteReqs.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsCompleteReqs.setDescription('')
storMgmtHsmsInterruptReqs = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsInterruptReqs.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsInterruptReqs.setDescription('')
storMgmtHsmsWaitReqsRead = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsWaitReqsRead.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsWaitReqsRead.setDescription('')
storMgmtHsmsWaitReqsWrite = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsWaitReqsWrite.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsWaitReqsWrite.setDescription('')
storMgmtHsmsWaitReqsExpress = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsWaitReqsExpress.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsWaitReqsExpress.setDescription('')
storMgmtHsmsStimeRead = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsStimeRead.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsStimeRead.setDescription('')
storMgmtHsmsStimeWrite = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsStimeWrite.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsStimeWrite.setDescription('')
storMgmtHsmsStimeExpress = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsStimeExpress.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsStimeExpress.setDescription('')
storMgmtHsmsNetload = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtHsmsNetload.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtHsmsNetload.setDescription('Netload (in Bytes) while performing a backup')
storMgmtHsmsS1Bottleneck = NotificationType((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 2) + (0,1))
if mibBuilder.loadTexts: storMgmtHsmsS1Bottleneck.setDescription('Sending a TRAP, if the agent sees the message HSM0468 for S1 bottleneck')
storMgmtMarenCPTask = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenCPTask.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenCPTask.setDescription('Is MARENCP running?')
storMgmtMarenUCPTask = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenUCPTask.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenUCPTask.setDescription('Is MARENUCP running?')
storMgmtMarenCatConn = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("spd", 1), ("rfalocal", 2), ("rfaremote", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenCatConn.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenCatConn.setDescription('Type of connection to the MAREN catalogue; the access to the catalogue is possible via a shared privat disk or via a RFA connection (local or remote)')
storMgmtMarenConnState = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("active", 1), ("inactive", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenConnState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenConnState.setDescription('Is connection to the MAREN catalogue active/inactive?')
storMgmtMarenLocTabNum = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenLocTabNum.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenLocTabNum.setDescription('The number of entries in the table storMgmtMarenLocTable')
storMgmtMarenLocTable = MibTable((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 6), )
if mibBuilder.loadTexts: storMgmtMarenLocTable.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenLocTable.setDescription('List of available MAREN locations')
storMgmtMarenLocEntry = MibTableRow((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 6, 1), ).setIndexNames((0, "Storage-Management-MIB", "storMgmtMarenLocIndex"))
if mibBuilder.loadTexts: storMgmtMarenLocEntry.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenLocEntry.setDescription('An entry in the table')
storMgmtMarenLocIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 6, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenLocIndex.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenLocIndex.setDescription('A unique value for each entry, its value ranges between 1 and the value of storMgmtMarenLocTabNum')
storMgmtMarenLocName = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 6, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenLocName.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenLocName.setDescription('Name of the location, e.g. ROBOTER1 or FIREARCH')
storMgmtMarenLocOpmode = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 6, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenLocOpmode.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenLocOpmode.setDescription('Operating mode e.g. TLS-1 or ROBAR-1')
storMgmtMarenLocFreevol = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 6, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenLocFreevol.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenLocFreevol.setDescription('Number of free volumes in the specified location; varied according to volumetypes TAPE-Cx')
storMgmtMarenNetworkTabNum = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenNetworkTabNum.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenNetworkTabNum.setDescription('The number of entries in the table storMgmtMarenNetworkTable')
storMgmtMarenNetworkTable = MibTable((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 8), )
if mibBuilder.loadTexts: storMgmtMarenNetworkTable.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenNetworkTable.setDescription('List of hosts belonging to the same MAREN network as the current host')
storMgmtMarenNetworkEntry = MibTableRow((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 8, 1), ).setIndexNames((0, "Storage-Management-MIB", "storMgmtMarenNetworkIndex"))
if mibBuilder.loadTexts: storMgmtMarenNetworkEntry.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenNetworkEntry.setDescription('An entry in the table')
storMgmtMarenNetworkIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 8, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenNetworkIndex.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenNetworkIndex.setDescription('A unique value for each entry, its value ranges between 1 and the value of storMgmtMarenNetworkTabNum')
storMgmtMarenNetworkHost = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3, 8, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtMarenNetworkHost.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtMarenNetworkHost.setDescription('Hostnames belonging to the same MAREN network')
storMgmtMarenNoVolume = NotificationType((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 3) + (0,1))
if mibBuilder.loadTexts: storMgmtMarenNoVolume.setDescription('Sending a TRAP, if the agent sees the message MARUP97 or MARUP98 (no more free volumes available)')
storMgmtTlsTabNum = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 4, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtTlsTabNum.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtTlsTabNum.setDescription('The number of entries in the table storMgmtTlsTable')
storMgmtTlsTable = MibTable((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 4, 2), )
if mibBuilder.loadTexts: storMgmtTlsTable.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtTlsTable.setDescription('The Storage management information table')
storMgmtTlsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 4, 2, 1), ).setIndexNames((0, "Storage-Management-MIB", "storMgmtTlsIndex"))
if mibBuilder.loadTexts: storMgmtTlsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtTlsEntry.setDescription('An entry in the table')
storMgmtTlsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 4, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtTlsIndex.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtTlsIndex.setDescription('A unique value for each entry, its value ranges between 1 and the value of storMgmtTlsTabNum')
storMgmtTlsRobState = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 4, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("automatic", 1), ("pause", 2), ("manual", 3), ("off", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtTlsRobState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtTlsRobState.setDescription('Roboter state can be: automatic mode, pause mode, manual mode or off')
storMgmtTlsFreeboxno = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 4, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtTlsFreeboxno.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtTlsFreeboxno.setDescription('Number of free storage boxes in the archive')
storMgmtTlsCleanno = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 4, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtTlsCleanno.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtTlsCleanno.setDescription('Number of available cleaning cartridges')
storMgmtTlsLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 4, 2, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtTlsLocation.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtTlsLocation.setDescription('Location(s) for devices supported by the robot system')
storMgmtTlsOffline = NotificationType((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 4) + (0,1))
if mibBuilder.loadTexts: storMgmtTlsOffline.setDescription('Sending a TRAP, if the agent sees the message EXC0858 (no connection to robot)')
storMgmtRobarTabNum = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtRobarTabNum.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtRobarTabNum.setDescription('The number of entries in the table storMgmtRobarTab')
storMgmtRobarTable = MibTable((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5, 2), )
if mibBuilder.loadTexts: storMgmtRobarTable.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtRobarTable.setDescription('The Storage management information table')
storMgmtRobarEntry = MibTableRow((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5, 2, 1), ).setIndexNames((0, "Storage-Management-MIB", "storMgmtRobarIndex"))
if mibBuilder.loadTexts: storMgmtRobarEntry.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtRobarEntry.setDescription('An entry in the table')
storMgmtRobarIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtRobarIndex.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtRobarIndex.setDescription('A unique value for each entry, its value ranges between 1 and the value of storMgmtRobarTabNum')
storMgmtRobarLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtRobarLocation.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtRobarLocation.setDescription('Current ROBAR location')
storMgmtRobarState = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 255))).clone(namedValues=NamedValues(("running", 1), ("terminated", 2), ("aborted", 3), ("loaded", 4), ("in-hold", 5), ("not-installed", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtRobarState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtRobarState.setDescription('Is ROBAR-DCAM running, stopped or even not installed?')
storMgmtRobarVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtRobarVersion.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtRobarVersion.setDescription('Current ROBAR version')
storMgmtRobarConnState = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("active", 1), ("inactive", 2), ("not-available", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtRobarConnState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtRobarConnState.setDescription('Check connection between ROBAR-BS2000 and ROBAR-SINIX')
storMgmtRobarRobState = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtRobarRobState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtRobarRobState.setDescription('Roboter state is ON or OFF')
storMgmtRobarRouting = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5, 2, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: storMgmtRobarRouting.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtRobarRouting.setDescription('Current routing codes for ROBAR-DCAM')
storMgmtRobarBs2Messages = NotificationType((1, 3, 6, 1, 4, 1, 231, 2, 20, 1, 5) + (0,1))
if mibBuilder.loadTexts: storMgmtRobarBs2Messages.setDescription('Sending a TRAP, if the agent sees predefined BS2000 messages like ROB0042 (ROB ON), ROB0043 (ROB OFF), ROB2000 (Connect ok), ROB2001 (Connect not ok), ...')
storMgmtResourcePubset = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 2, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 4))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: storMgmtResourcePubset.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtResourcePubset.setDescription('Catalogue ID of the specified pubset')
storMgmtResourceSaturation = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("level-0", 1), ("level-1", 2), ("level-2", 3), ("level-3", 4), ("level-4", 5), ("level-5", 6), ("unknown-level", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtResourceSaturation.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtResourceSaturation.setDescription('Saturation level for the specified pubset')
storMgmtResourceCapacity = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 2, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtResourceCapacity.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtResourceCapacity.setDescription('Capacity of the specified pubset in half pages')
storMgmtResourceSpaceAllocated = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 2, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtResourceSpaceAllocated.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtResourceSpaceAllocated.setDescription('Allocated space of the specified pubset in half pages')
storMgmtResourceFragment = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 2, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtResourceFragment.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtResourceFragment.setDescription('Degree of fragmentation')
storMgmtResourceReusableS1 = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 2, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtResourceReusableS1.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtResourceReusableS1.setDescription('Information is: number of save files, number of occupied PAM pages according to save files, number of unused PAM pages')
storMgmtResourceSecureQueue = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 2, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtResourceSecureQueue.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtResourceSecureQueue.setDescription('The number of tasks waiting in the SECURE queue')
storMgmtPubsetTabNum = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetTabNum.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetTabNum.setDescription('The number of entries in the table storMgmtPubsetTable')
storMgmtPubsetTabState = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17))).clone(namedValues=NamedValues(("all", 1), ("paging", 2), ("local", 3), ("remote", 4), ("accessible", 5), ("local-accessible", 6), ("shared", 7), ("exclusive", 8), ("remote-accessible", 9), ("local-accessible-speedcat", 10), ("xcs-pubset", 11), ("hsms-supported", 12), ("single-feature", 13), ("system-managed", 14), ("volume-sets", 15), ("unused-volsets", 16), ("master-change-error", 17)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: storMgmtPubsetTabState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetTabState.setDescription('State of Pubsets to be shown in the storMgmtPubsetTable ')
storMgmtPubsetTable = MibTable((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10), )
if mibBuilder.loadTexts: storMgmtPubsetTable.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetTable.setDescription('The Pubset information table')
storMgmtPubsetEntry = MibTableRow((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1), ).setIndexNames((0, "Storage-Management-MIB", "storMgmtPubsetIndex"))
if mibBuilder.loadTexts: storMgmtPubsetEntry.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetEntry.setDescription('An entry in the table')
storMgmtPubsetIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetIndex.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetIndex.setDescription('A unique value for each entry, its value is the CATID of the Pubset storMgmtPubsetIndex ')
storMgmtPubsetTyp = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 255))).clone(namedValues=NamedValues(("single-feature", 1), ("system-managed", 2), ("volumeset", 3), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetTyp.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetTyp.setDescription('Type of the Pubset')
storMgmtPubsetLocal = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("local", 1), ("remote", 2), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetLocal.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetLocal.setDescription('Local / remote Pubset')
storMgmtPubsetHome = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("home", 1), ("imported", 2), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetHome.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetHome.setDescription('Home / imported Pubset')
storMgmtPubsetShared = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("shared", 1), ("exclusive", 2), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetShared.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetShared.setDescription('shared / exclusive Pubset')
storMgmtPubsetMaster = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("yes", 1), ("no", 2), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetMaster.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetMaster.setDescription('Master / slave Pubset')
storMgmtPubsetAccessible = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("accessible", 1), ("inaccessible", 2), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetAccessible.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetAccessible.setDescription('accessible / inaccessible Pubset')
storMgmtPubsetQuiet = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("yes", 1), ("no", 2), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetQuiet.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetQuiet.setDescription('quiet Pubset')
storMgmtPubsetPaging = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("yes", 1), ("no", 2), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetPaging.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetPaging.setDescription('paging Pubset')
storMgmtPubsetSize = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetSize.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetSize.setDescription('Size of Pubset')
storMgmtPubsetUsedSize = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetUsedSize.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetUsedSize.setDescription('used space of Pubset')
storMgmtPubsetSaturationLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 3, 10, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("level-0", 1), ("level-1", 2), ("level-2", 3), ("level-3", 4), ("level-4", 5), ("level-5", 6), ("unknown-level", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtPubsetSaturationLevel.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtPubsetSaturationLevel.setDescription('Saturation level for the specified pubset')
storMgmtDiskTabNum = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskTabNum.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskTabNum.setDescription('The number of entries in the table storMgmtDiskTable')
storMgmtDiskTabReconfState = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 255))).clone(namedValues=NamedValues(("all", 1), ("attached", 2), ("detached", 3), ("other", 255)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: storMgmtDiskTabReconfState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskTabReconfState.setDescription('The reconfiguration state of disks to be shown ')
storMgmtDiskTable = MibTable((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10), )
if mibBuilder.loadTexts: storMgmtDiskTable.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskTable.setDescription('The Disk information table')
storMgmtDiskEntry = MibTableRow((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1), ).setIndexNames((0, "Storage-Management-MIB", "storMgmtDiskIndex"))
if mibBuilder.loadTexts: storMgmtDiskEntry.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskEntry.setDescription('An entry in the table')
storMgmtDiskIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskIndex.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskIndex.setDescription('A unique value for each entry, its value is the Mnemonic of the Disk ')
storMgmtDiskVSN = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 6))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskVSN.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskVSN.setDescription('VSN of Disk ')
storMgmtDiskDeviceAllocState = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 255))).clone(namedValues=NamedValues(("free", 1), ("allocated", 2), ("exclusive-allocated", 3), ("current-public-assigned", 4), ("dms-used-volume-assigned", 5), ("drv-assigned", 6), ("drv-public-assigned", 7), ("drv-dms-used-volume-assigned", 8), ("invalid", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskDeviceAllocState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskDeviceAllocState.setDescription('Device Allocation State ')
storMgmtDiskSystemUse = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("paging-device", 1), ("public-device", 2), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskSystemUse.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskSystemUse.setDescription('Type of the Disk')
storMgmtDiskPoolAttribut = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("shared-privat-disk", 1), ("device-switchable", 2), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskPoolAttribut.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskPoolAttribut.setDescription('Local / remote Disk')
storMgmtDiskReconfState = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 255))).clone(namedValues=NamedValues(("attached", 1), ("detach-pending", 2), ("detached", 3), ("assignment-in-progress", 4), ("invalid", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskReconfState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskReconfState.setDescription('Reconfiguration State ')
storMgmtDiskVolAllocState = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 255))).clone(namedValues=NamedValues(("free", 1), ("task-exclusive", 2), ("task-sharable", 3), ("current-public", 4), ("current-paging", 5), ("volume-cancelled", 6), ("volume-allocated", 7), ("invalid", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskVolAllocState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskVolAllocState.setDescription('Volume Allocation State ')
storMgmtDiskPrivDiskRunState = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 255))).clone(namedValues=NamedValues(("offline", 1), ("active", 2), ("inactive", 3), ("end-in-process", 4), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskPrivDiskRunState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskPrivDiskRunState.setDescription('Private Disk Run State ')
storMgmtDiskPhaseSet = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 255))).clone(namedValues=NamedValues(("online-only", 1), ("premounting", 2), ("mounting", 3), ("in-use", 4), ("not-specified", 5), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskPhaseSet.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskPhaseSet.setDescription('Phase Set')
storMgmtDiskActionState = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 255))).clone(namedValues=NamedValues(("no-action", 1), ("inoperable", 2), ("remount", 3), ("cancelled", 4), ("no-device-available", 5), ("recover", 6), ("positioning", 7), ("writepermission-missing", 8), ("unlock", 9), ("svl-update", 10), ("dismount", 11), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskActionState.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskActionState.setDescription('Action State')
storMgmtDiskUse = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 255))).clone(namedValues=NamedValues(("dms", 1), ("special", 2), ("work", 3), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskUse.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskUse.setDescription(' Disk Use ')
storMgmtDiskAssignTime = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 255))).clone(namedValues=NamedValues(("standard", 1), ("user", 2), ("user-by-default", 3), ("operator", 4), ("operator-by-default", 5), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskAssignTime.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskAssignTime.setDescription('Assign Time ')
storMgmtDiskUserAllocation = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 255))).clone(namedValues=NamedValues(("standard", 1), ("shared", 2), ("shared-by-default", 3), ("exclusive", 4), ("exclusive-by-default", 5), ("no", 6), ("no-by-default", 7), ("all", 8), ("all-by-default", 9), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskUserAllocation.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskUserAllocation.setDescription('User Allocation ')
storMgmtDiskOperatorControl = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 255))).clone(namedValues=NamedValues(("standard", 1), ("shared", 2), ("shared-by-default", 3), ("exclusive", 4), ("exclusive-by-default", 5), ("no", 6), ("no-by-default", 7), ("all", 8), ("all-by-default", 9), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskOperatorControl.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskOperatorControl.setDescription('Operator Control ')
storMgmtDiskSystemAllocation = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 255))).clone(namedValues=NamedValues(("shared", 1), ("exclusive", 2), ("all", 3), ("current-shared", 4), ("current-exclusive", 5), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskSystemAllocation.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskSystemAllocation.setDescription('Sytem Allocation ')
storMgmtDiskAccess = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 255))).clone(namedValues=NamedValues(("write", 1), ("ppd", 2), ("from-device", 3), ("write-from-device", 4), ("ppd-from-device", 5), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskAccess.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskAccess.setDescription('Disk Access ')
storMgmtDiskRecordingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 231, 2, 20, 4, 10, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("single-recording", 1), ("dual-recording-by-volume", 2), ("unknown", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtDiskRecordingMode.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtDiskRecordingMode.setDescription('Disk Recording Mode')
storMgmtGlobalDataVersion = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 18, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: storMgmtGlobalDataVersion.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtGlobalDataVersion.setDescription('Version of subagent ')
storMgmtGlobalDataInputFile = MibScalar((1, 3, 6, 1, 4, 1, 231, 2, 20, 18, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: storMgmtGlobalDataInputFile.setStatus('mandatory')
if mibBuilder.loadTexts: storMgmtGlobalDataInputFile.setDescription('Filename of input file ')
storMgmtPubsetSatLevTrap = NotificationType((1, 3, 6, 1, 4, 1, 231, 2, 20, 20) + (0,301)).setObjects(("Storage-Management-MIB", "storMgmtPubsetIndex"), ("Storage-Management-MIB", "storMgmtPubsetSaturationLevel"))
if mibBuilder.loadTexts: storMgmtPubsetSatLevTrap.setDescription('Saturation level x reached')
storMgmtDiskReconfStateTrap = NotificationType((1, 3, 6, 1, 4, 1, 231, 2, 20, 21) + (0,301)).setObjects(("Storage-Management-MIB", "storMgmtDiskIndex"), ("Storage-Management-MIB", "storMgmtDiskReconfState"))
if mibBuilder.loadTexts: storMgmtDiskReconfStateTrap.setDescription('DiskReconfstate x reached')
mibBuilder.exportSymbols("Storage-Management-MIB", storMgmtPubsetLocal=storMgmtPubsetLocal, storMgmtHsmsServertask=storMgmtHsmsServertask, storMgmtMarenCPTask=storMgmtMarenCPTask, sniStorMgmtDiskInfo=sniStorMgmtDiskInfo, storMgmtRobarRouting=storMgmtRobarRouting, storMgmtHsmsStimeRead=storMgmtHsmsStimeRead, storMgmtPubsetQuiet=storMgmtPubsetQuiet, storMgmtMarenLocOpmode=storMgmtMarenLocOpmode, storMgmtTlsIndex=storMgmtTlsIndex, storMgmtDiskActionState=storMgmtDiskActionState, sniStorMgmtGlobalData=sniStorMgmtGlobalData, storMgmtHsmsWaitReqsExpress=storMgmtHsmsWaitReqsExpress, storMgmtResourceSpaceAllocated=storMgmtResourceSpaceAllocated, storMgmtHsmsAcceptReqs=storMgmtHsmsAcceptReqs, storMgmtPubsetEntry=storMgmtPubsetEntry, storMgmtHsmsInterruptReqs=storMgmtHsmsInterruptReqs, storMgmtDiskPhaseSet=storMgmtDiskPhaseSet, storMgmtDiskVolAllocState=storMgmtDiskVolAllocState, storMgmtPubsetSatLevTrap=storMgmtPubsetSatLevTrap, storMgmtDiskAssignTime=storMgmtDiskAssignTime, sniStorMgmtTlsInfo=sniStorMgmtTlsInfo, storMgmtPubsetHome=storMgmtPubsetHome, storMgmtHsmsWaitReqsWrite=storMgmtHsmsWaitReqsWrite, storMgmtProductEntry=storMgmtProductEntry, storMgmtPubsetSaturationLevel=storMgmtPubsetSaturationLevel, storMgmtMarenConnState=storMgmtMarenConnState, storMgmtHsmsStimeWrite=storMgmtHsmsStimeWrite, storMgmtProductState=storMgmtProductState, storMgmtMarenNetworkHost=storMgmtMarenNetworkHost, storMgmtRobarTabNum=storMgmtRobarTabNum, storMgmtTlsTable=storMgmtTlsTable, storMgmtDiskPoolAttribut=storMgmtDiskPoolAttribut, storMgmtHsmsCompleteReqs=storMgmtHsmsCompleteReqs, storMgmtMarenLocName=storMgmtMarenLocName, storMgmtPubsetTyp=storMgmtPubsetTyp, storMgmtHsmsModeTimestamp=storMgmtHsmsModeTimestamp, sniStorMgmtProductInfo=sniStorMgmtProductInfo, storMgmtDiskSystemAllocation=storMgmtDiskSystemAllocation, storMgmtHsmsOpmode=storMgmtHsmsOpmode, storMgmtTlsLocation=storMgmtTlsLocation, storMgmtDiskIndex=storMgmtDiskIndex, sniProductMibs=sniProductMibs, storMgmtMarenNetworkIndex=storMgmtMarenNetworkIndex, storMgmtTlsRobState=storMgmtTlsRobState, storMgmtDiskPrivDiskRunState=storMgmtDiskPrivDiskRunState, storMgmtMarenNetworkTabNum=storMgmtMarenNetworkTabNum, storMgmtDiskOperatorControl=storMgmtDiskOperatorControl, storMgmtPubsetUsedSize=storMgmtPubsetUsedSize, storMgmtPubsetShared=storMgmtPubsetShared, storMgmtTlsCleanno=storMgmtTlsCleanno, storMgmtMarenLocTabNum=storMgmtMarenLocTabNum, storMgmtPubsetPaging=storMgmtPubsetPaging, storMgmtDiskTabNum=storMgmtDiskTabNum, storMgmtTlsOffline=storMgmtTlsOffline, storMgmtHsmsWaitJobs=storMgmtHsmsWaitJobs, storMgmtResourceSecureQueue=storMgmtResourceSecureQueue, storMgmtRobarIndex=storMgmtRobarIndex, storMgmtRobarEntry=storMgmtRobarEntry, storMgmtProductIndex=storMgmtProductIndex, storMgmtHsmsInstances=storMgmtHsmsInstances, sniStorMgmtDiskTraps=sniStorMgmtDiskTraps, storMgmtPubsetTable=storMgmtPubsetTable, storMgmtHsmsStimeExpress=storMgmtHsmsStimeExpress, storMgmtProductVersion=storMgmtProductVersion, sniStorMgmtRobarInfo=sniStorMgmtRobarInfo, storMgmtMarenLocTable=storMgmtMarenLocTable, sniStorMgmtPubsetInfo=sniStorMgmtPubsetInfo, storMgmtDiskUserAllocation=storMgmtDiskUserAllocation, storMgmtPubsetSize=storMgmtPubsetSize, storMgmtProductName=storMgmtProductName, storMgmtProductTabNum=storMgmtProductTabNum, storMgmtRobarRobState=storMgmtRobarRobState, storMgmtDiskAccess=storMgmtDiskAccess, storMgmtMarenLocEntry=storMgmtMarenLocEntry, storMgmtDiskSystemUse=storMgmtDiskSystemUse, storMgmtGlobalDataVersion=storMgmtGlobalDataVersion, storMgmtRobarConnState=storMgmtRobarConnState, sniStorMgmtAvailInfo=sniStorMgmtAvailInfo, storMgmtMarenNetworkEntry=storMgmtMarenNetworkEntry, storMgmtDiskTabReconfState=storMgmtDiskTabReconfState, storMgmtRobarState=storMgmtRobarState, storMgmtTlsTabNum=storMgmtTlsTabNum, storMgmtResourceFragment=storMgmtResourceFragment, storMgmtProductTimestamp=storMgmtProductTimestamp, sniStorMgmtMarenInfo=sniStorMgmtMarenInfo, storMgmtPubsetIndex=storMgmtPubsetIndex, storMgmtDiskReconfState=storMgmtDiskReconfState, storMgmtTlsEntry=storMgmtTlsEntry, storMgmtResourceSaturation=storMgmtResourceSaturation, storMgmtPubsetAccessible=storMgmtPubsetAccessible, storMgmtResourceCapacity=storMgmtResourceCapacity, storMgmtDiskVSN=storMgmtDiskVSN, storMgmtMarenNetworkTable=storMgmtMarenNetworkTable, storMgmtMarenCatConn=storMgmtMarenCatConn, storMgmtResourceReusableS1=storMgmtResourceReusableS1, storMgmtMarenLocIndex=storMgmtMarenLocIndex, sni=sni, storMgmtDiskReconfStateTrap=storMgmtDiskReconfStateTrap, sniStorMgmtPubsetTraps=sniStorMgmtPubsetTraps, storMgmtMarenNoVolume=storMgmtMarenNoVolume, storMgmtDiskUse=storMgmtDiskUse, storMgmtPubsetTabState=storMgmtPubsetTabState, storMgmtMarenLocFreevol=storMgmtMarenLocFreevol, storMgmtRobarBs2Messages=storMgmtRobarBs2Messages, sniStorMgmtHsmsInfo=sniStorMgmtHsmsInfo, sniStorMgmtResourceInfo=sniStorMgmtResourceInfo, storMgmtHsmsS1Bottleneck=storMgmtHsmsS1Bottleneck, storMgmtDiskTable=storMgmtDiskTable, storMgmtMarenUCPTask=storMgmtMarenUCPTask, storMgmtDiskRecordingMode=storMgmtDiskRecordingMode, storMgmtPubsetTabNum=storMgmtPubsetTabNum, storMgmtHsmsWaitReqsRead=storMgmtHsmsWaitReqsRead, storMgmtResourcePubset=storMgmtResourcePubset, storMgmtTlsFreeboxno=storMgmtTlsFreeboxno, storMgmtRobarVersion=storMgmtRobarVersion, sniStorMgmt=sniStorMgmt, storMgmtPubsetMaster=storMgmtPubsetMaster, storMgmtRobarTable=storMgmtRobarTable, storMgmtRobarLocation=storMgmtRobarLocation, storMgmtProductTable=storMgmtProductTable, storMgmtHsmsNetload=storMgmtHsmsNetload, storMgmtDiskDeviceAllocState=storMgmtDiskDeviceAllocState, storMgmtGlobalDataInputFile=storMgmtGlobalDataInputFile, storMgmtDiskEntry=storMgmtDiskEntry)
|
py | 1a44c26d554c453aed618256567108247e042930 | __copyright__ = "Copyright (C) 2018 CVision AI."
__license__ = "GPLv3"
# This file is part of OpenEM, released under GPLv3.
# OpenEM is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OpenEM. If not, see <http://www.gnu.org/licenses/>.
"""Defines interface to config file."""
import os
import glob
import configparser
from datetime import datetime
class ConfigInterface:
"""Interface to config file.
"""
def __init__(self, config_file):
"""Constructor.
# Arguments
config_file: Path to config file.
"""
self.config = configparser.ConfigParser()
self.config.read(config_file)
# Read in species info.
self._species = self.config.get('Data', 'Species').split(',')
if self.config.has_option('Data', 'LengthFormat') == False:
self._ratios = self.config.get('Data', 'AspectRatios').split(',')
self._ratios = [float(r) for r in self._ratios]
if len(self._ratios) != len(self._species):
msg = (
"Invalid config file! "
"Number of species and aspect ratios must match! "
"Number of species: {}, "
"Number of aspect ratios: {}")
msg.format(len(self._species), len(self._ratios))
raise ValueError(msg)
self._num_classes = len(self._species) + 1
def tensorboard_port(self):
""" Returns the backbone for retinanet to use """
if self.config.has_option('Tensorboard', 'Port'):
return self.config.get('Tensorboard', 'Port')
else:
return 10000
def model_dir(self):
"""Gets model directory.
"""
return os.path.join(self.config.get('Paths', 'ModelDir'), 'deploy')
def find_ruler_model_dir(self):
"""Gets find ruler model directory.
"""
return os.path.join(self.model_dir(), 'find_ruler')
def find_ruler_model_path(self):
"""Gets find ruler model file path.
"""
return os.path.join(self.find_ruler_model_dir(), 'find_ruler.pb')
def detect_model_dir(self):
"""Gets detection model directory.
"""
return os.path.join(self.model_dir(), 'detect')
def detect_model_path(self):
"""Gets detection file path.
"""
return os.path.join(self.detect_model_dir(), 'detect.pb')
def detect_retinanet_path(self):
"""Gets detection file path.
"""
return os.path.join(self.detect_model_dir(), 'detect_retinanet.pb')
def classify_model_dir(self):
"""Gets classification model directory.
"""
return os.path.join(self.model_dir(), 'classify')
def classify_model_path(self):
"""Gets classification file path.
"""
return os.path.join(self.classify_model_dir(), 'classify.pb')
def count_model_dir(self):
"""Gets count model directory.
"""
return os.path.join(self.model_dir(), 'count')
def count_model_path(self):
"""Gets count file path.
"""
return os.path.join(self.count_model_dir(), 'count.pb')
def work_dir(self):
"""Gets working directory.
"""
return self.config.get('Paths', 'WorkDir')
def train_dir(self):
"""Returns training directory.
"""
return self.config.get('Paths', 'TrainDir')
def test_dir(self):
"""Returns test directory.
"""
return self.config.get('Paths', 'TestDir')
def num_classes(self):
"""Returns number of classes, including null class.
"""
return self._num_classes
def species(self):
"""Returns list of species names.
"""
return self._species
def aspect_ratios(self):
"""Returns list of species aspect ratios.
"""
return self._ratios
def find_ruler_width(self):
"""Returns width of image used for finding ruler.
"""
return self.config.getint('FindRuler', 'Width')
def find_ruler_height(self):
"""Returns height of image used for finding ruler.
"""
return self.config.getint('FindRuler', 'Height')
def find_ruler_batch_size(self):
"""Returns batch size used for find ruler training.
"""
return self.config.getint('FindRuler', 'BatchSize')
def find_ruler_val_batch_size(self):
"""Returns batch size used for find ruler validation.
"""
return self.config.getint('FindRuler', 'ValBatchSize')
def find_ruler_num_epochs(self):
"""Returns number of epochs for find ruler training.
"""
return self.config.getint('FindRuler', 'NumEpochs')
def find_ruler_initial_epoch(self):
"""Returns initial epoch for find ruler training.
"""
return self.config.getint('FindRuler', 'InitialEpoch')
def find_ruler_steps_per_epoch(self):
"""Returns steps per epoch for ruler training if the key exists,
otherwise returns a default of 100.
"""
steps_per_epoch = 100
if self.config.has_option('FindRuler', 'StepsPerEpoch'):
steps_per_epoch = self.config.getint('FindRuler', 'StepsPerEpoch')
return steps_per_epoch
def find_ruler_num_channels(self):
"""Returns how many channels to include in model/input.
"""
num_channels = 3
if self.config.has_option('FindRuler', 'NumChannels'):
num_channels = self.config.getint('FindRuler', 'NumChannels')
return num_channels
def find_ruler_save_masks(self):
"""Returns whether to save output masks during inference.
"""
save_masks = False
if self.config.has_option('FindRuler', 'SaveMasks'):
save_masks = self.config.getboolean('FindRuler', 'SaveMasks')
return save_masks
def detect_patience(self):
try:
return self.config.getint('Detect', 'LR_Patience')
except:
return None
def detect_width(self):
"""Returns width of ROI used for detection.
"""
return self.config.getint('Detect', 'Width')
def detect_height(self):
"""Returns height of ROI used for detection.
"""
return self.config.getint('Detect', 'Height')
def detect_batch_size(self):
"""Returns batch size used for detection training.
"""
return self.config.getint('Detect', 'BatchSize')
def detect_val_batch_size(self):
"""Returns batch size used for detection validation.
"""
return self.config.getint('Detect', 'ValBatchSize')
def detect_val_random_seed(self):
""" Returns the value to use for the random seed
to generate the validation population size.
"""
return self.config.getint('Detect', 'ValRandomSeed')
def detect_val_population(self):
""" Returns the population percentage for the validation set """
return self.config.getfloat('Detect', 'ValPopulation')
def detect_backbone(self):
""" Returns the backbone for retinanet to use """
if self.config.has_option('Detect', 'Backbone'):
return self.config.get('Detect', 'Backbone')
else:
return None
def detect_force_aspect(self):
""" Returns the backbone for retinanet to use """
if self.config.has_option('Detect', 'ForceAspect'):
return self.config.getfloat('Detect', 'ForceAspect')
else:
return None
def detect_num_epochs(self):
"""Returns number of epochs used for detection training.
"""
return self.config.getint('Detect', 'NumEpochs')
def detect_frame_jitter(self):
"""Returns frame jitter setting for detection training.
"""
return self.config.getint('Detect', 'FrameJitter')
def detect_initial_epoch(self):
"""Returns initial epoch for detection training.
"""
return self.config.getint('Detect', 'InitialEpoch')
def detect_steps_per_epoch(self):
"""Returns steps per epoch for detection traiing if the key exists,
otherwise returns None.
"""
steps_per_epoch = None
if self.config.has_option('Detect', 'StepsPerEpoch'):
steps_per_epoch = self.config.getint('Detect', 'StepsPerEpoch')
return steps_per_epoch
def detect_do_validation(self):
"""Returns whether to do validation if the key exists, otherwise
returns default value of True.
"""
do_validation = True
if self.config.has_option('Detect', 'DoValidation'):
do_validation = self.config.getboolean('Detect', 'DoValidation')
return do_validation
def classify_width(self):
"""Returns width of detections used for classification training.
"""
return self.config.getint('Classify', 'Width')
def classify_height(self):
"""Returns height of detections used for classification training.
"""
return self.config.getint('Classify', 'Height')
def classify_batch_size(self):
"""Returns batch size used for classification training.
"""
return self.config.getint('Classify', 'BatchSize')
def classify_val_batch_size(self):
"""Returns batch size used for classification validation.
"""
return self.config.getint('Classify', 'ValBatchSize')
def classify_num_epochs(self):
"""Returns number of epochs used for classification training.
"""
return self.config.getint('Classify', 'NumEpochs')
def classify_initial_epoch(self):
"""Returns initial epoch for classify training.
"""
return self.config.getint('Classify', 'InitialEpoch')
def classify_steps_per_epoch(self):
"""Returns steps per epoch for classication training if the key exists,
otherwise returns None.
"""
steps_per_epoch = None
if self.config.has_option('Classify', 'StepsPerEpoch'):
steps_per_epoch = self.config.getint('Classify', 'StepsPerEpoch')
return steps_per_epoch
def classify_do_validation(self):
"""Returns whether to do validation if the key exists, otherwise
returns default value of True.
"""
do_validation = True
if self.config.has_option('Classify', 'DoValidation'):
do_validation = self.config.getboolean('Classify', 'DoValidation')
return do_validation
def count_num_steps(self):
"""Returns number of timesteps used as input to count model.
"""
return self.config.getint('Count', 'NumSteps')
def count_num_steps_crop(self):
"""Returns number of timesteps to crop for count model.
"""
return self.config.getint('Count', 'NumStepsCrop')
def count_num_features(self):
"""Returns number of features used as input to count model.
"""
return self.config.getint('Count', 'NumFeatures')
def count_batch_size(self):
"""Returns batch size used for count training.
"""
return self.config.getint('Count', 'BatchSize')
def count_val_batch_size(self):
"""Returns batch size used for count validation.
"""
return self.config.getint('Count', 'ValBatchSize')
def count_num_epochs(self):
"""Returns number of epochs used for count training.
"""
return self.config.getint('Count', 'NumEpochs')
def count_initial_epoch(self):
"""Returns initial epoch for count training.
"""
return self.config.getint('Count', 'InitialEpoch')
def count_steps_per_epoch(self):
"""Returns steps per epoch for count traiing if the key exists,
otherwise returns None.
"""
steps_per_epoch = None
if self.config.has_option('Count', 'StepsPerEpoch'):
steps_per_epoch = self.config.getint('Count', 'StepsPerEpoch')
return steps_per_epoch
def count_do_validation(self):
"""Returns whether to do validation if the key exists, otherwise
returns default value of True.
"""
do_validation = True
if self.config.has_option('Count', 'DoValidation'):
do_validation = self.config.getboolean('Count', 'DoValidation')
return do_validation
def count_num_res_steps(self):
"""Returns number of timesteps after cropping.
"""
return self.count_num_steps() - self.count_num_steps_crop() * 2
def train_vids(self):
"""Returns list of paths to videos in training data.
"""
patt = os.path.join(self.train_dir(), 'videos', '*.mp4')
return glob.glob(patt)
def test_vids(self):
"""Returns list of paths to videos in test data.
"""
patt = os.path.join(self.test_dir(), 'videos', '*.mp4')
return glob.glob(patt)
def test_truth_files(self):
"""Returns list of paths to truth files in test data.
"""
patt = os.path.join(self.test_dir(), 'truth', '*.csv')
return glob.glob(patt)
def all_video_ids(self):
"""Gets all video IDs as a list.
"""
video_ids = []
for vid in self.train_vids():
_, f = os.path.split(vid)
vid_id, _ = os.path.splitext(f)
if vid_id not in video_ids:
video_ids.append(vid_id)
return video_ids
def length_path(self):
"""Returns path to length annotations.
"""
if self.config.has_option('Data', 'LengthFormat') and \
self.config.get('Data', 'LengthFormat') == "box":
return os.path.join(self.train_dir(), 'boxLength.csv')
else:
return os.path.join(self.train_dir(), 'length.csv')
def cover_path(self):
"""Returns path to cover annotations.
"""
return os.path.join(self.train_dir(), 'cover.csv')
def train_mask_imgs_dir(self):
"""Returns path to images used for training find ruler alg.
"""
return os.path.join(self.train_dir(), 'masks', 'images')
def train_mask_masks_dir(self):
"""Returns path to masks used for training find ruler alg.
"""
return os.path.join(self.train_dir(), 'masks', 'masks')
def train_imgs_dir(self):
"""Returns path to training images directory.
"""
return os.path.join(self.work_dir(), 'train_imgs')
def train_rois_dir(self):
"""Returns path to training roi images directory.
"""
return os.path.join(self.work_dir(), 'train_rois')
def train_dets_dir(self):
"""Returns path to training detection images directory.
"""
return os.path.join(self.work_dir(), 'train_dets')
def predict_masks_dir(self):
"""Returns path to generated mask images directory.
"""
return os.path.join(self.work_dir(), 'predict_masks')
def train_imgs(self):
"""Returns list of all training images.
"""
patt = os.path.join(self.train_imgs_dir(), '**', '*.png')
return glob.glob(patt, recursive=True)
def num_frames_path(self):
"""Returns path to csv containing number of frames per video.
"""
return os.path.join(self.train_imgs_dir(), 'num_frames.csv')
def train_roi_img(self, video_id, frame):
"""Returns a specific image.
"""
return os.path.join(
self.train_rois_dir(),
video_id,
"{:04d}.jpg".format(frame)
)
def train_rois(self):
"""Returns list of all training roi images.
"""
patt = os.path.join(self.train_rois_dir(), '**', '*.jpg')
patt2 = os.path.join(self.train_rois_dir(), '**', '*.png')
return glob.glob(patt, recursive=True).extend(glob.glob(patt2, recursive=True))
def train_dets(self):
"""Returns list of all training detection images.
"""
patt = os.path.join(self.train_dets_dir(), '**', '*.jpg')
return glob.glob(patt, recursive=True)
def checkpoints_dir(self, model):
"""Returns path to checkpoints directory.
# Arguments
model: Which model this corresponds to, one of find_ruler,
detect, classify, count.
"""
return os.path.join(self.work_dir(), 'checkpoints', model)
def checkpoint_best(self, model):
"""Returns path to best checkpoint file.
The path is meant to be formatted with epoch and validation loss.
# Arguments
model: Which model this corresponds to, one of find_ruler,
detect, classify, count.
"""
fname = "checkpoint-best-{epoch:03d}-{loss:.4f}.hdf5"
return os.path.join(self.checkpoints_dir(model), fname)
def checkpoint_periodic(self, model):
"""Returns path to periodic checkpoint file.
The path is meant to be formatted with epoch and validation loss.
# Arguments
model: Which model this corresponds to, one of find_ruler,
detect, classify, count.
"""
fname = "checkpoint-periodic-{epoch:03d}-{loss:.4f}.hdf5"
return os.path.join(self.checkpoints_dir(model), fname)
def tensorboard_dir(self, model):
"""Returns path to tensorboard directory.
"""
name = datetime.now().strftime("%Y_%m_%d_")
name += model
return os.path.join(self.work_dir(), 'tensorboard', name)
def inference_dir(self):
"""Returns output path for inference results.
"""
return os.path.join(self.work_dir(), 'inference')
def find_ruler_inference_path(self):
"""Returns path to find ruler inference results.
"""
return os.path.join(self.inference_dir(), 'find_ruler.csv')
def detect_inference_path(self):
"""Returns path to detection inference results.
"""
return os.path.join(self.inference_dir(), 'detect.csv')
def classify_inference_path(self):
"""Returns path to classification inference results.
"""
return os.path.join(self.inference_dir(), 'classify.csv')
def test_output_dir(self):
"""Returns path to test outputs.
"""
return os.path.join(self.work_dir(), 'test')
|
py | 1a44c2b733723d827ee70aafdcaea320cb585bd3 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @trojanzhex
from pyrogram import filters
from pyrogram import Client as trojanz
from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton
from config import Config
from script import Script
from helpers.progress import PRGRS
from helpers.tools import clean_up
from helpers.download import download_file, DATA
from helpers.ffmpeg import extract_audio, extract_subtitle
@trojanz.on_callback_query()
async def cb_handler(client, query):
if query.data == "start_data":
await query.answer()
keyboard = InlineKeyboardMarkup([
[InlineKeyboardButton("HELP", callback_data="help_data"),
InlineKeyboardButton("ABOUT", callback_data="about_data")],
[InlineKeyboardButton("⭕️ JOIN OUR CHANNEL ⭕️", url="https://t.me/TroJanzHEX")]
])
await query.message.edit_text(
Script.START_MSG.format(query.from_user.mention),
reply_markup=keyboard,
disable_web_page_preview=True
)
return
elif query.data == "help_data":
await query.answer()
keyboard = InlineKeyboardMarkup([
[InlineKeyboardButton("BACK", callback_data="start_data"),
InlineKeyboardButton("ABOUT", callback_data="about_data")],
[InlineKeyboardButton("⭕️ SUPPORT ⭕️", url="https://t.me/TroJanzSupport")]
])
await query.message.edit_text(
Script.HELP_MSG,
reply_markup=keyboard,
disable_web_page_preview=True
)
return
elif query.data == "about_data":
await query.answer()
keyboard = InlineKeyboardMarkup([
[InlineKeyboardButton("BACK", callback_data="help_data"),
InlineKeyboardButton("START", callback_data="start_data")],
[InlineKeyboardButton("SOURCE CODE", url="https://github.com/TroJanzHEX/Streams-Extractor")]
])
await query.message.edit_text(
Script.ABOUT_MSG,
reply_markup=keyboard,
disable_web_page_preview=True
)
return
elif query.data == "download_file":
await query.answer()
await query.message.delete()
await download_file(client, query.message)
elif query.data == "progress_msg":
try:
msg = "Progress Details...\n\nCompleted : {current}\nTotal Size : {total}\nSpeed : {speed}\nProgress : {progress:.2f}%\nETA: {eta}"
await query.answer(
msg.format(
**PRGRS[f"{query.message.chat.id}_{query.message.message_id}"]
),
show_alert=True
)
except:
await query.answer(
"Processing your file...",
show_alert=True
)
elif query.data == "close":
await query.message.delete()
await query.answer(
"Cancelled...",
show_alert=True
)
elif query.data.startswith('audio'):
await query.answer()
try:
stream_type, mapping, keyword = query.data.split('_')
data = DATA[keyword][int(mapping)]
await extract_audio(client, query.message, data)
except:
await query.message.edit_text("**Details Not Found**")
elif query.data.startswith('subtitle'):
await query.answer()
try:
stream_type, mapping, keyword = query.data.split('_')
data = DATA[keyword][int(mapping)]
await extract_subtitle(client, query.message, data)
except:
await query.message.edit_text("**Details Not Found**")
elif query.data.startswith('cancel'):
try:
query_type, mapping, keyword = query.data.split('_')
data = DATA[keyword][int(mapping)]
await clean_up(data['location'])
await query.message.edit_text("**Cancelled...**")
await query.answer(
"Cancelled...",
show_alert=True
)
except:
await query.answer()
await query.message.edit_text("**Details Not Found**")
|
py | 1a44c33f087c592a6bfc3b054c579665086a22b6 | """!
This file contains some pair potentials.
\ingroup lammpstools
"""
import lammpstools
import dumpreader
import numpy as np
import math
import sys
def make_pair_table( fname, name, pair_pot, N, mode = "R", lo = 1.0, hi = 10.0 ):
"Dumps a LAMMPS-style pair table to given file."
if mode == "R":
dr = (hi - lo)/(N-1)
elif mode == "RSQ":
print >> sys.stderr, "Mode RSQ not supported!"
return -1
elif mode == "BITMAP":
print >> sys.stderr, "Mode BITMAP not supported!"
return -1
else:
print >> sys.stderr, "Mode ", mode, " not recognized!"
return -1
# First test the given potential:
if lammpstools.test_potential( pair_pot, lo, hi, 1e-4, 1e-8 ):
print >> sys.stderr, "Potential not consistent!"
# return -2
# Fill table:
fp = open(fname,"w")
use_fprime = False
if hasattr( pair_pot, "force_prime" ):
"use_fprime = True"
print >> fp, name
if use_fprime:
print >> fp, "N %d %s %f %f" % (N, mode, lo, hi)
else:
fplo = pair_pot.force_prime(lo)
fphi = pair_pot.force_prime(hi)
print >> fp, "N %d %s %e %e FPRIME %f %f" % (N, mode, lo, hi, fplo, fphi)
print >> fp, ""
for i in range(0,N):
r = lo + i*dr
E = pair_pot.energy(r)
f = pair_pot.force(r)
print >> fp, "%d %e %e %e" % (i,r,E,f)
return 0
|
py | 1a44c341904611230a7bcae4900343064ace9fa0 | import math
from typing import Tuple
class Rect:
def __init__(
self, x=None, y=None, width=None, height=None, position=None, size=None
):
self.x = x
self.y = y
self.width = width
self.height = height
if position:
self.x, self.y = position
if size:
self.width, self.height = size
@property
def size(self) -> Tuple[float, float]:
return self.width, self.height
@property
def position(self) -> Tuple[float, float]:
return self.x, self.y
@property
def mid_x(self) -> float:
return self.x + self.width / 2
@property
def mid_y(self) -> float:
return self.y + self.height / 2
@property
def mid_point(self) -> Tuple[float, float]:
return self.mid_x, self.mid_y
@property
def x2(self):
return self.x + self.width
@property
def y2(self):
return self.y + self.height
def shrink(self, left=0, right=0, top=0, bottom=0):
return Rect(
self.x + left,
self.y + top,
self.width - left - right,
self.height - top - bottom,
)
def __eq__(self, other):
if not isinstance(other, Rect):
return False
return (
self.x == other.x
and self.y == other.y
and self.width == other.width
and self.height == other.height
)
def __repr__(self):
return "<Rect x={0.x} y={0.y} w={0.width} h={0.height}>".format(self)
def segment_delta(p1, p2):
return p2[0] - p1[0], p2[1] - p1[1]
def segment_len(p1, p2):
dx, dy = segment_delta(p1, p2)
return math.sqrt(dx * dx + dy * dy)
def segment_resize(p1, p2, len_change):
dx, dy = segment_delta(p1, p2)
ln = segment_len(p1, p2)
t = 1 + (len_change / ln)
return p1[0] + dx * t, p1[1] + dy * t
def find_centroid(points) -> Tuple[float, float]:
xs = sum([p[0] for p in points])
ys = sum([p[1] for p in points])
return (xs / len(points), ys / len(points))
|
py | 1a44c3439310f62d714b659efbe38ec28a382ebf | from cms.api import add_plugin
from cms.test_utils.testcases import CMSTestCase
from djangocms_bootstrap5.contrib.bootstrap5_badge.cms_plugins import (
Bootstrap5BadgePlugin,
)
from ..fixtures import B5TestFixture
class B5BadgePluginTestCase(B5TestFixture, CMSTestCase):
def test_plugin(self):
plugin = add_plugin(
placeholder=self.placeholder,
plugin_type=Bootstrap5BadgePlugin.__name__,
language=self.language,
badge_text="some text",
)
plugin.full_clean()
self.page.publish(self.language)
with self.login_user_context(self.superuser):
response = self.client.get(self.request_url)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<span class="badge badge-primary">some text</span>',
html=True,
)
# test with pills enabled
plugin = add_plugin(
placeholder=self.placeholder,
plugin_type=Bootstrap5BadgePlugin.__name__,
language=self.language,
badge_text="some text",
badge_pills=True,
)
plugin.full_clean()
self.page.publish(self.language)
with self.login_user_context(self.superuser):
response = self.client.get(self.request_url)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<span class="badge badge-pill badge-primary">some text</span>',
html=True,
)
|
py | 1a44c3859762c4bea191c8ecb73c2fc864829bd1 | # Author: Anantha Ravi Kiran
# Data : 06/04/14
# Readers and plotters for image sequence
#
# Copyright 2013-2015, by the California Institute of Technology. ALL
# RIGHTS RESERVED. United States Government Sponsorship
# acknowledged. Any commercial use must be negotiated with the Office
# of Technology Transfer at the California Institute of Technology.
#
# This software may be subject to U.S. export control laws and
# regulations. By accepting this document, the user agrees to comply
# with all applicable U.S. export laws and regulations. User has the
# responsibility to obtain export licenses, or other export authority
# as may be required before exporting such information to foreign
# countries or providing access to foreign persons.
import os, sys, glob, pdb, scipy, scipy.misc
import numpy as N
import cv2 as cv2
import random
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib as mpl
import pylab
import pickle as pickle
from dataset import *
# For color_mask_img function
from mpl_toolkits.axes_grid1 import make_axes_locatable
import matplotlib.gridspec as gridspec
from skimage import data, color, io, img_as_float
#from mlabwrap import mlab
################## Image Sequence Data ################
class NavcamData(Dataset):
# Containes the load, init and plot functions
# for sequence of image dataset - uses sol number
_VL_SIFT_ = 0
def __init__(self, input_folder=None, sol_number=None, init_sols=None, scaleInvariant=None):
self.input_folder = None
self.sol_number = None
self.init_sols = None
self.dataset_folder = os.path.join(input_folder, 'sol%.4d' % sol_number)
self.datafiles = []
self.init_data_folder = []
self.data_files_count = 0
self.img_label_split = [0]
self.data_split = [0]
self.selections = []
self.init_sols = []
self.priority = 0
self.score = []
self.shadow_score = []
self.met_score = []
self.select_rect = []
self.rand_score = []
self.shadow_rand_score = []
self.met_rand_score = []
self.rec_features = {}
self.orig_features = {}
self.loc = {}
self.zoom_window = {}
# Variables from TCData
self.feature_string = ('dsift')
# Used for extracting sub images to extract features
self.winsize = 100
self.nbins = 101
self.scaleInvariant = scaleInvariant
if ~(input_folder is None):
self.input_folder = input_folder
if ~(sol_number is None):
self.sol_number = sol_number
if ~(init_sols is None):
self.init_sols = init_sols
if ~(scaleInvariant is None):
self.scaleInvariant = scaleInvariant
# Data folder for analysis
print('Input Data')
for i,data_file in enumerate(glob.glob('%s/*eff*.img.jpg'%(self.dataset_folder))):
print(data_file)
self.datafiles.append(data_file)
if not scaleInvariant:
pkl_file = data_file.split('.')[0] + '.pkl'
else:
pkl_file = data_file.split('.')[0] + '.si'
if not i:
# Initialized for the first run and extended thereafter
Dataset.__init__(self, pkl_file,
'tc-sol%d-prior%s' % (self.sol_number,
len(self.init_sols)))
# pkl_file[pkl_file.rfind('/')+1:pkl_file.rfind('_')+1])
if not scaleInvariant:
(self.data, self.labels, feature_string, self.width, self.height, \
self.winsize, self.nbins) = self.read_ppm(data_file, pkl_file)
else:
(self.data, self.labels, feature_string, self.width, self.height, \
self.winsize, self.nbins) = self.si_read_ppm(data_file, pkl_file)
self.npixels = self.width * self.height
self.xlabel = 'Grayscale intensity'
self.ylabel = 'Probability'
self.xvals = scipy.arange(self.data.shape[0]).reshape(-1,1)
self.img_label_split.extend([len(self.labels)])
self.data_split.extend([self.data.shape[1]])
self.selections.append(N.zeros((self.height, self.width)))
self.select_rect.append({})
self.width = N.array([self.width])
self.height = N.array([self.height])
self.xvals = N.array([self.xvals])
continue
if not scaleInvariant:
extracted_features = self.read_ppm(data_file, pkl_file)
else:
extracted_features = self.si_read_ppm(data_file, pkl_file)
self.extend(extracted_features)
self.data_files_count = self.data_files_count + 1
self.selections.append(N.zeros((self.height[i], self.width[i])))
self.select_rect.append({})
# Data folder for initialization
print('Init Data Folders')
for init_sol in init_sols:
init_dataset_folder = os.path.join(input_folder, 'sol%.4d' % init_sol)
print(init_dataset_folder)
if os.path.isdir(init_dataset_folder):
for init_data_file in glob.glob('%s/*eff*.img.jpg'%(init_dataset_folder)):
self.initfilename = init_data_file
if not scaleInvariant:
init_pkl_file = init_data_file.split('.')[0] + '.pkl'
else:
init_pkl_file = init_data_file.split('.')[0] + '.si'
if not scaleInvariant:
(initdata, labels, features_string, width, height, \
winsize, nbins) = self.read_ppm(init_data_file, init_pkl_file)
else:
(initdata, labels, features_string, width, height, \
winsize, nbins) = self.si_read_ppm(init_data_file, init_pkl_file)
if not len(self.initdata):
self.initdata = initdata
else:
self.initdata = N.concatenate((self.initdata, initdata),axis=1)
@classmethod
def extract_sift(cls, rawfilename, winsize, nbins):
"""read_ppm(rawfilename, filename)
Read in raw pixel data from rawfilename (.ppm).
Create a histogram around each pixel to become
the feature vector for that obsevation (pixel).
Pickle the result and save it to filename.
Note: does NOT update object fields.
Follow this with a call to readin().
"""
if cls._VL_SIFT_:
# VLSIFT matlab
im = Image.open(rawfilename)
(width, height) = im.size
mlab.bb_sift(N.array(im), 'temp.mat')
sift_features = scipy.io.loadmat('temp.mat')
kp = sift_features['f_']
sift_features = sift_features['d_']
sift_features = scipy.concatenate((sift_features.transpose(), kp[2:4].transpose()), 1).transpose()
labels = [];
for ikp in kp.transpose():
(x,y) = ikp[0:2]
labels += ['(%d,%d)' % (y,x)]
else:
#Opencv SIFT
img = cv2.imread(rawfilename)
gray= cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
height, width = gray.shape
# Computing SIFT
sift = cv2.SIFT(edgeThreshold = 3)
kp, des = sift.detectAndCompute(gray,None)
labels = []
sift_features = N.transpose(des)
scale_angle = []
for ikp in kp:
(x,y) = ikp.pt
scale_angle.append([ikp.size/12, ikp.angle])
labels += ['(%d,%d)' % (y,x)]
scale_angle = N.array(scale_angle)
sift_features = scipy.concatenate((sift_features.transpose(), scale_angle), 1).transpose()
return (sift_features, labels, width, height)
@classmethod
def extract_dsift(cls, rawfilename, winsize, nbins):
"""read_ppm(rawfilename, filename)
Read in raw pixel data from rawfilename (.ppm).
Create a histogram around each pixel to become
the feature vector for that obsevation (pixel).
Pickle the result and save it to filename.
Note: does NOT update object fields.
Follow this with a call to readin().
"""
im = Image.open(rawfilename)
(width, height) = im.size
# To be removed in the future
# Pick up all windows, stepping by half of the window size
labels = []
halfwin = int(winsize/2)
for y in range(halfwin, height-halfwin, int(halfwin/2)):
for x in range(halfwin, width-halfwin, int(halfwin/2)):
labels += ['(%d,%d)' % (y,x)]
mlab.bb_dsift(N.array(im), winsize, 'temp.mat')
sift_features = scipy.io.loadmat('temp.mat')
sift_features = sift_features['d_']
return (sift_features, labels, width, height)
@classmethod
def extract_hist(cls, rawfilename, winsize, nbins):
# This function extracts the histogram features from the image
im = Image.open(rawfilename)
(width, height) = im.size
npixels = width * height
pix = scipy.array(im)
# Generate one feature vector (histogram) per pixel
#winsize = 20 # for test.pgm
#winsize = 0 # for RGB
halfwin = int(winsize/2)
bins = scipy.linspace(0, 255, nbins)
# Only use windows that are fully populated
mywidth = width-winsize
myheight = height-winsize
#data = scipy.zeros((nbins-1, mywidth * myheight))
#data = scipy.zeros((3*winsize*winsize, mywidth * myheight))
data = []
labels = []
# Pick up all windows, stepping by half of the window size
for y in range(halfwin, height-halfwin, int(halfwin/2)):
for x in range(halfwin, width-halfwin, int(halfwin/2)):
# Read in data in row-major order
ind = (y-halfwin)*mywidth + (x-halfwin)
#data[:,ind] = \
# scipy.histogram(pix[y-halfwin:y+halfwin,
# x-halfwin:x+halfwin],
# bins)[0]
# Just RGB
#data[:,ind] = pix[y,x]
# RGB window
#data[:,ind] = pix[y-halfwin:y+halfwin,x-halfwin:x+halfwin].flat
hist_features = TCData.extract_hist_subimg(pix[y-halfwin:y+halfwin,x-halfwin:x+halfwin])
if data == []:
data = hist_features.reshape(-1,1)
else:
data = scipy.concatenate((data, hist_features.reshape(-1,1)),1)
labels += ['(%d,%d)' % (y,x)]
return (data, labels, width, height)
@staticmethod
def extract_hist_subimg(sub_image):
hist_bins = range(0,260,1)
hist_features = N.histogram(sub_image.ravel(), hist_bins)[0]
return hist_features
def si_read_ppm(self, rawfilename, filename):
# This function reads the ppm/jpg file and extracts the features if the
# features pkl file doesn't exist. It is also compatible for extension
# of the feauture vector and doesn't compute the already computed features
new_feature_string = []
updated_feature = 0
data = N.array([], dtype=int)
if os.path.exists(filename):
pkl_f = open(filename, 'r')
(data, labels, feature_string, width, height, winsize, nbins)= pickle.load(pkl_f)
self.winsize = winsize
self.nbins = nbins
new_feature_string = list(feature_string)
pkl_f.close()
if not new_feature_string.count('sift'):
updated_feature = 1
(sift_features, labels, width, height) = self.extract_sift(rawfilename, self.winsize, self.nbins)
if data.size:
data = scipy.concatenate((data.transpose(), sift_features.transpose()), 1).transpose()
else:
data = sift_features
new_feature_string.append('sift')
if updated_feature:
outf = open(filename, 'w')
pickle.dump((data, labels, new_feature_string, width, height, self.winsize, self.nbins),outf)
outf.close()
print 'Saved data to %s.' % filename
return (data, labels, new_feature_string, width, height, self.winsize, self.nbins)
def read_ppm(self, rawfilename, filename):
# This function reads the ppm/jpg file and extracts the features if the
# features pkl file doesn't exist. It is also compatible for extension
# of the feauture vector and doesn't compute the already computed features
new_feature_string = []
updated_feature = 0
data = N.array([], dtype=int)
if os.path.exists(filename):
pkl_f = open(filename, 'r')
(data, labels, feature_string, width, height, winsize, nbins)= pickle.load(pkl_f)
self.winsize = winsize
self.nbins = nbins
new_feature_string = list(feature_string)
pkl_f.close()
if not new_feature_string.count('dsift'):
updated_feature = 1
(sift_features, labels, width, height) = self.extract_dsift(rawfilename, self.winsize, self.nbins)
if data.size:
data = scipy.concatenate((data.transpose(), sift_features.transpose()), 1).transpose()
else:
data = sift_features
new_feature_string.append('dsift')
if not new_feature_string.count('histogram'):
updated_feature = 1
(hist_features, labels, width, height) = self.extract_hist(rawfilename, self.winsize, self.nbins)
hist_features = hist_features/(self.winsize)
if data.size:
data = scipy.concatenate((data.transpose(), hist_features.transpose()), 1).transpose()
else:
data = hist_features
new_feature_string.append('histogram')
'''
if not new_feature_string.count('position'):
updated_feature = 1
position_features = []
for label in labels:
(y,x) = map(int, label.strip('()').split(','))
position_features.append([x,y])
position_features = N.array(position_features)
if data.size:
data = scipy.concatenate((data.transpose(), position_features), 1).transpose()
else:
data = position_features
new_feature_string.append('position')
'''
if updated_feature:
outf = open(filename, 'w')
pickle.dump((data, labels, new_feature_string, width, height, self.winsize, self.nbins),outf)
outf.close()
print 'Saved data to %s.' % filename
return (data, labels, new_feature_string, width, height, self.winsize, self.nbins)
def extend(self, extracted_features):
# This method reads the pkl files in a folder and adds them to the
# existing data for processing in the TCData class.
(data, labels, feature_string, width, height, winsize, nbins) = extracted_features
npixels = width * height
xlabel = 'Grayscale intensity'
ylabel = 'Probability'
xvals = scipy.arange(self.data.shape[0]).reshape(-1,1)
self.data = N.concatenate((self.data, data),axis=1)
self.width = N.append(self.width, width)
self.height = N.append(self.height, height)
self.xvals = N.append(self.xvals, xvals)
self.labels.extend(labels)
self.img_label_split.extend([len(self.labels)])
self.data_split.extend([self.data.shape[1]])
def compute_score(self, img_idx, y, x, mask):
" Compute the score for deck or met with idx "
qtrwin = self.winsize/2
if mask==0:
mask_file = self.datafiles[img_idx].split('.')[0] + '.jpg'
elif mask==1:
mask_file = self.datafiles[img_idx].split('.')[0] + '.msk.jpg'
else:
mask_file = self.datafiles[img_idx].split('.')[0] + '.shadow.jpg'
selections_pad = N.zeros((self.height[img_idx] + self.winsize,
self.width[img_idx] + self.winsize))
mask_img = cv2.imread(mask_file, 0)
selections_pad[qtrwin:self.height[img_idx]+qtrwin,
qtrwin:self.width[img_idx]+qtrwin] = mask_img
csel_mask = selections_pad[y:y+self.winsize, x:x+self.winsize]
# Matches are pixels with intensity 255, so divide by this
# to get number of matching pixels.
return (csel_mask.sum()/255)
def save_rec(self, reconst_features, ind, orig_features, k):
img_idx = N.where(self.img_label_split > ind)[0][0] - 1
(y,x) = map(int, self.labels[ind].strip('()').split(','))
outdir = os.path.join('results', self.name)
figfile = os.path.join(outdir,
'%s/%s-priority-k-%d-%d.png' % (self.name, k, img_idx))
if figfile in self.rec_features:
self.rec_features[figfile].append(reconst_features)
self.orig_features[figfile].append(orig_features)
self.loc[figfile].append([x,y])
else:
self.rec_features[figfile]= [reconst_features]
self.orig_features[figfile]= [orig_features]
self.loc[figfile] = [[x,y]]
def plot_item(self, m, ind, x, r, k, label):
"""plot_item(self, m, ind, x, r, k, label)
Plot selection m (index ind, data in x) and its reconstruction r,
with k and label to annotate the plot.
"""
img_idx = N.where(self.img_label_split > ind)[0][0] - 1
img_data_file = self.datafiles[img_idx]
rand_ind = random.randint(0, self.img_label_split[-1])
rand_idx = N.where(self.img_label_split > rand_ind)[0][0] - 1
if x == [] or r == []:
print "Error: No data in x and/or r."
return
# im = Image.fromarray(x.reshape(self.winsize, self.winsize, 3))
outdir = os.path.join('results', self.name)
if not os.path.exists(outdir):
os.mkdir(outdir)
# figfile = '%s/%s-sel-%d-k-%d.pdf' % (outdir, self.name, m, k)
# im.save(figfile)
# print 'Wrote plot to %s' % figfile
# record the selections in order, at their x,y coords
# subtract selection number from n so first sels have high values
mywidth = self.width[img_idx] - self.winsize
myheight = self.height[img_idx] - self.winsize
# set all unselected items to a value 1 less than the latest
(y,x) = map(int, label.strip('()').split(','))
qtrwin = self.winsize/2
if y < qtrwin:
y = qtrwin
if x < qtrwin:
x = qtrwin
if y + qtrwin > mywidth:
y = mywidth - qtrwin
if x + qtrwin > mywidth:
x = mywidth - qtrwin
im = cv2.imread(img_data_file,0)
im1 = cv2.equalizeHist(im)
im1 = cv2.medianBlur(im1,5)
# Selection matrix manipulation
#self.selections[ind/mywidth, ind%myheight] = priority
self.priority = self.priority + 1
self.selections[img_idx][y-qtrwin:y+qtrwin, x-qtrwin:x+qtrwin] = self.priority
self.select_rect[img_idx][self.priority] = ((x-qtrwin, y-qtrwin), (x+qtrwin, y+qtrwin))
figfile = os.path.join(outdir,
'%s-priority-k-%d-%d.pdf' % (self.name, k, img_idx))
figfile_jpg = os.path.join(outdir,
'%s-priority-k-%d-%d.png' % (outdir, self.name, k, img_idx))
(img_masked, cmap, num_classes)= self.color_mask_img(im1, im, self.selections[img_idx], self.select_rect[img_idx], self.priority, figfile, 0, 0)
# Saving the masked image separately
img_disp = plt.imshow(img_masked)
plt.axis('off')
plt.savefig(figfile_jpg, bbox_inches='tight')
self.zoom_window[len(self.score)] = im[y-qtrwin:y+qtrwin, x-qtrwin:x+qtrwin]
# Deck mask
score = self.compute_score(img_idx, y, x, 0) * 100.0 / self.winsize / self.winsize
print 'Deck score: %.2f%%' % score
self.score.append(score)
# Meteorite mask
met_score = self.compute_score(img_idx, y, x, 1) * 100.0 / self.winsize / self.winsize
print 'Meteorite score: %.2f%%' % met_score
self.met_score.append(met_score)
# Shadow mask
score = self.compute_score(img_idx, y, x, 2)
self.shadow_score.append(score)
# zoom pictures
(left_top, bottom_right) = ((x-qtrwin, y-qtrwin), (x+qtrwin, y+qtrwin))
zoom_file = os.path.join(outdir, '%d.png' % (self.priority-1))
f, (ax1, ax2) = plt.subplots(1,2)
ax1.imshow(img_masked)
ax1.set_title('Original Image with selected block')
ax1.axis('off')
ax2.imshow(im[y-qtrwin:y+qtrwin,x-qtrwin:x+qtrwin], cmap = cm.Greys_r)
ax2.set_title('Selected Block (Filtered)')
ax2.axis('off')
plt.savefig(zoom_file, bbox_inches='tight')
print 'writing selection to %s/sel-%d.png' % (outdir, self.priority-1)
scipy.misc.imsave(os.path.join(outdir, 'sel-%d.png' % (self.priority-1)),
im[y-qtrwin:y+qtrwin,x-qtrwin:x+qtrwin])
# rand choices
(y,x) = map(int, self.labels[rand_ind].strip('()').split(','))
score = self.compute_score(rand_idx, y, x, 0)
self.rand_score.append(score)
met_score = self.compute_score(rand_idx, y, x, 1)
self.met_rand_score.append(met_score)
score = self.compute_score(rand_idx, y, x, 2)
self.shadow_rand_score.append(score)
def plot_score(self, outdir):
# Summary scoring
print 'Avg deck score: %.2f%%' % N.mean(self.score)
print 'Avg meteorite score: %.2f%%' % N.mean(self.met_score)
# Deck scoring technique
pylab.clf()
pylab.scatter(range(0,len(self.score)),self.score)
pylab.xlabel('Iterations')
pylab.ylabel('Score')
pylab.title('Deck score')
figfile = os.path.join(outdir, 'deck_score.png')
pylab.savefig(figfile, bbox_inches='tight')
pylab.clf()
# Deck scoring technique
pylab.scatter(range(0,len(self.score)),self.met_score)
pylab.xlabel('Iterations')
pylab.ylabel('Score')
pylab.title('Meteorite Score')
figfile = os.path.join(outdir, 'met_score.png')
pylab.savefig(figfile, bbox_inches='tight')
pylab.clf()
# Deck scoring technique
pylab.scatter(range(0,len(self.score)),self.rand_score)
pylab.xlabel('Iterations')
pylab.ylabel('Score')
pylab.title('Random Deck Score')
figfile = os.path.join(outdir, 'deck_rand_score.png')
pylab.savefig(figfile, bbox_inches='tight')
pylab.clf()
# Deck scoring technique
pylab.clf()
pylab.scatter(range(0,len(self.score)),self.met_rand_score)
pylab.xlabel('Iterations')
pylab.ylabel('Score')
pylab.title('Random Meteorite Score')
figfile = os.path.join(outdir, 'met_rand_score.png')
pylab.savefig(figfile, bbox_inches='tight')
# Deck scoring technique
pylab.clf()
pylab.scatter(range(0,len(self.score)),self.shadow_score)
pylab.xlabel('Iterations')
pylab.ylabel('Score')
pylab.title('Shadow overlap Score')
figfile = os.path.join(outdir, 'shadow_score.png')
pylab.savefig(figfile, bbox_inches='tight')
# Deck scoring technique
pylab.clf()
pylab.scatter(range(0,len(self.met_score)),self.shadow_rand_score)
pylab.xlabel('Iterations')
pylab.ylabel('Score')
pylab.title('Random Shadow overlap Score')
figfile = os.path.join(outdir, 'shadow_rand_score.png')
pylab.savefig(figfile, bbox_inches='tight')
pylab.clf()
@staticmethod
def color_mask_img(img, original_img, mask, rect, idx, figfile = None, show_image = 0, hist_overlay = 0):
alpha = 0.6
img = img_as_float(img)
rows, cols = img.shape
classes = rect.keys()
num_classes = len(classes) + 1
# Construct a colour image to superimpose
colors = [(1.0,1.0,1.0,1.0)]
colors.extend(cm.jet(N.linspace(0,1,num_classes-1)[::-1]))
norm = mpl.colors.Normalize(vmin=0, vmax=num_classes - 1)
cmap = mpl.colors.ListedColormap(colors)
m = cm.ScalarMappable(norm=norm, cmap=cmap)
color_mask = m.to_rgba(mask)
color_mask = color_mask[:,:,0:3]
# Construct RGB version of grey-level image
img_color = N.dstack((img, img, img))
# Convert the input image and color mask to Hue Saturation Value (HSV)
# colorspace
img_hsv = color.rgb2hsv(img_color)
## Replace the hue and saturation of the original image
## with that of the color mask
img_masked = color.hsv2rgb(img_hsv)
img_masked_copy = img_masked.copy()
if not hist_overlay:
for i,keys in enumerate(rect):
(left_top, bottom_right) = rect[keys]
cv2.rectangle(img_masked, left_top, bottom_right,color=colors[i+1],thickness=3)
else:
color_mask_hsv = color.rgb2hsv(color_mask)
img_hsv[..., 0] = color_mask_hsv[..., 0]
img_hsv[..., 1] = color_mask_hsv[..., 1] * alpha
(left_top, bottom_right) = rect[idx]
cv2.rectangle(img_masked_copy, left_top, bottom_right,color=colors[-1],thickness=3)
# Width ratio is uneven because of the colorbar - image with colorbar seemed to be smaller othewise
gs = gridspec.GridSpec(1, 2,width_ratios=[1.12,1])
# Display image with overlayed demud output
fig = plt.figure()
a = fig.add_subplot(gs[0])
a.set_title('Demud Output')
img_disp = plt.imshow(img_masked, cmap = cmap, vmin=0, vmax=num_classes)
plt.setp( a.get_yticklabels(), visible=False)
plt.setp( a.get_xticklabels(), visible=False)
divider = make_axes_locatable(plt.gca())
cax = divider.append_axes("left", "8%", pad="5%")
cax = plt.colorbar(img_disp, ticks = N.linspace(0.5,num_classes-.5, num_classes), cax = cax)
cax.set_ticklabels(range(0,num_classes) )
cax.ax.tick_params(labelsize=5)
# Display original image as well
a = fig.add_subplot(gs[1])
original_img = cv2.cvtColor(original_img, cv2.COLOR_GRAY2RGB)
a.set_title('Original Image')
img_disp = plt.imshow(original_img)
plt.setp( a.get_yticklabels(), visible=False)
plt.setp( a.get_xticklabels(), visible=False)
if not (figfile is None):
plt.savefig(figfile, bbox_inches='tight')
print 'Wrote selection priority plot to %s' % figfile
# Display the output
if show_image:
plt.show()
plt.close('all')
return (img_masked_copy, cmap, num_classes)
|
py | 1a44c40ff23192489dd9702b2ee56ccffd6dd28c | #===============================================================================
# Copyright 2009 Matt Chaput
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#===============================================================================
from collections import defaultdict
from threading import Lock
from whoosh.fields import UnknownFieldError
from whoosh.index import Index
from whoosh.ramdb.ramreading import RamIndexReader
from whoosh.util import protected
class RamIndex(Index):
def __init__(self, schema):
self.schema = schema
self.docnum = 0
self._sync_lock = Lock()
self.is_closed = False
self.clear()
def clear(self):
# Maps fieldname -> a sorted list of term texts in that field
self.termlists = defaultdict(list)
# Maps fieldnames to dictionaries of term -> posting list
self.invertedindex = {}
for fieldname in self.schema.names():
self.invertedindex[fieldname] = defaultdict(list)
# Maps terms -> index frequencies
self.indexfreqs = defaultdict(int)
# Maps docnum -> stored field dicts
self.storedfields = {}
# Maps (docnum, fieldname) -> field length
self.fieldlengths = defaultdict(int)
# Maps (docnum, fieldname) -> posting list
self.vectors = {}
# Contains docnums of deleted documents
self.deleted = set()
def close(self):
del self.termlists
del self.invertedindex
del self.indexfreqs
del self.storedfields
del self.fieldlengths
del self.vectors
del self.deleted
self.is_closed = True
def doc_count_all(self):
return len(self.storedfields)
def doc_count(self):
return len(self.storedfields) - len(self.deleted)
def field_length(self, fieldname):
return sum(l for docnum_fieldname, l in self.fieldlengths.iteritems()
if docnum_fieldname[1] == fieldname)
def max_field_length(self, fieldname):
return max(l for docnum_fieldname, l in self.fieldlengths.iteritems()
if docnum_fieldname[1] == fieldname)
def reader(self):
return RamIndexReader(self)
def writer(self):
return self
@protected
def add_field(self, *args, **kwargs):
self.schema.add_field(*args, **kwargs)
@protected
def remove_field(self, fieldname):
self.schema.remove_field(fieldname)
if fieldname in self.termlists:
del self.termlists[fieldname]
for fn, text in self.indexfreqs.iterkeys():
if fn == fieldname:
del self.indexfreqs[(fn, text)]
for sfields in self.storedfields.itervalues():
if fieldname in sfields:
del sfields[fieldname]
for docnum, fn in self.fieldlengths.iterkeys():
if fn == fieldname:
del self.fieldlengths[(docnum, fn)]
if fieldname in self.fieldlength_maxes:
del self.fieldlength_maxes[fieldname]
for docnum, fn in self.vectors.iterkeys():
if fn == fieldname:
del self.vectors[(docnum, fn)]
@protected
def delete_document(self, docnum, delete=True):
if delete:
self.deleted.add(docnum)
else:
self.deleted.remove(docnum)
@protected
def delete_by_term(self, fieldname, text):
inv = self.invertedindex
if fieldname in inv:
terms = inv[fieldname]
if text in terms:
postings = terms[text]
for p in postings:
self.deleted.add(p[0])
@protected
def delete_by_query(self, q, searcher=None):
s = self.searcher()
for docnum in q.docs(s):
self.deleted.add(docnum)
def has_deletions(self):
return bool(self.deleted)
@protected
def optimize(self):
deleted = self.deleted
# Remove deleted documents from stored fields
storedfields = self.storedfields
for docnum in deleted:
del storedfields[docnum]
# Remove deleted documents from inverted index
removedterms = defaultdict(set)
for fieldname in self.schema.names():
inv = self.invertedindex[fieldname]
for term, postlist in inv.iteritems():
inv[term] = [x for x in postlist if x[0] not in deleted]
# Remove terms that no longer have any postings after the
# documents are deleted
for term in inv.keys():
if not inv[term]:
removedterms[fieldname].add(term)
del inv[term]
# If terms were removed as a result of document deletion,
# update termlists and indexfreqs
termlists = self.termlists
for fieldname, removed in removedterms.iteritems():
termlists[fieldname] = [t for t in termlists[fieldname]
if t not in removed]
for text in removed:
del self.indexfreqs[(fieldname, text)]
# Remove documents from field lengths
fieldlengths = self.fieldlengths
for docnum, fieldname in fieldlengths.keys():
if docnum in deleted:
del fieldlengths[(docnum, fieldname)]
# Remove documents from vectors
vectors = self.vectors
for docnum, fieldname in vectors.keys():
if docnum in deleted:
del vectors[(docnum, fieldname)]
# Reset deleted list
self.deleted = set()
@protected
def add_document(self, **fields):
schema = self.schema
invertedindex = self.invertedindex
indexfreqs = self.indexfreqs
fieldlengths = self.fieldlengths
fieldnames = [name for name in sorted(fields.keys())
if not name.startswith("_")]
storedvalues = {}
for name in fieldnames:
if name not in schema:
raise UnknownFieldError("There is no field named %r" % name)
for name in fieldnames:
value = fields.get(name)
if value:
field = schema[name]
newwords = set()
fielddict = invertedindex[name]
# If the field is indexed, add the words in the value to the
# index
if field.indexed:
# Count of all terms in the value
count = 0
# Count of UNIQUE terms in the value
unique = 0
for w, freq, weight, valuestring in field.index(value):
if w not in fielddict:
newwords.add(w)
fielddict[w].append((self.docnum, weight, valuestring))
indexfreqs[(name, w)] += freq
count += freq
unique += 1
self.termlists[name] = sorted(set(self.termlists[name]) | newwords)
if field.scorable:
fieldlengths[(self.docnum, name)] = count
vector = field.vector
if vector:
vlist = sorted((w, weight, valuestring) for w, freq, weight, valuestring
in vector.word_values(value))
self.vectors[(self.docnum, name)] = vlist
if field.stored:
storedname = "_stored_" + name
if storedname in fields:
stored_value = fields[storedname]
else :
stored_value = value
storedvalues[name] = stored_value
self.storedfields[self.docnum] = storedvalues
self.docnum += 1
@protected
def add_reader(self, reader):
startdoc = self.docnum
has_deletions = reader.has_deletions()
if has_deletions:
docmap = {}
fieldnames = set(self.schema.names())
for docnum in xrange(reader.doc_count_all()):
if (not has_deletions) or (not reader.is_deleted(docnum)):
d = dict(item for item
in reader.stored_fields(docnum).iteritems()
if item[0] in fieldnames)
self.storedfields[self.docnum] = d
if has_deletions:
docmap[docnum] = self.docnum
for fieldname, length in reader.doc_field_lengths(docnum):
if fieldname in fieldnames:
self.fieldlengths[(self.docnum, fieldname)] = length
for fieldname in reader.vector_names():
if (fieldname in fieldnames
and reader.has_vector(docnum, fieldname)):
vpostreader = reader.vector(docnum, fieldname)
self.vectors[(self.docnum, fieldname)] = list(vpostreader.all_items())
vpostreader.close()
self.docnum += 1
for fieldname, text, _, _ in reader:
if fieldname in fieldnames:
postreader = reader.postings(fieldname, text)
while postreader.is_active():
docnum = postreader.id()
valuestring = postreader.value()
weight = postreader.weight()
if has_deletions:
newdoc = docmap[docnum]
else:
newdoc = startdoc + docnum
self.invertedindex[fieldname][text].append((newdoc,
weight,
valuestring))
postreader.next()
|
py | 1a44c428891aa01751cb51ea2b28ce747b895793 |
import requests
#import sys
from selenium import webdriver
import re
from bs4 import BeautifulSoup
#from bs4 import UnicodeDammit
#sys.stdout = codecs.getwriter("iso-8859-8")(sys.stdout, 'xmlcharrefreplace')
import os
project_dir = os.path.dirname(os.path.abspath(__file__))
#project_dir = project_dir.replace('\\','/')
#phantom_linuxdir = project_dir + '/phantom/linux/bin/phantomjs'
#phantom_linuxdir= '/app/getrw_tiki/phantom/linux/bin/phantomjs'
phantom_windir = project_dir + '/phantom/windows/bin/phantomjs'
#print phantom_linuxdir
#client = webdriver.PhantomJS(executable_path=r'/app/getrw_tiki/phantom/linux/bin/phantomjs') ### crawler js
client = webdriver.PhantomJS()
#client = webdriver.PhantomJS(phantom_windir) ### crawler js
class object:
def __init__(self,lnkweb,comment,lnkImg):
#comment co kieu la chuoi
self.lnkweb = lnkweb
self.comment = comment
self.lnkImg = lnkImg
def returnvalue(self): # tra ve cac gia tri cua doi tuong
obj = list() # tao mang rong
obj.append(self.lnkweb) #them cac gia tri thuoc tinh vao list
obj.append(self.comment)
obj.append(self.lnkImg)
return obj #tra ve list chua cac gia tri doi tuong
######### start lazada######################
def get_comment(link): #lazada
r= requests.get(link)
soup = BeautifulSoup(r.text,'html.parser')
fnd = soup.find_all("div","review_criteria")
#print len(fnd)
c = list()
if len(fnd)!= 0:
for i in fnd :
comment = i.text
c.append(comment)
return c
def search_vatgia(keywords):
k=keywords.replace(' ','+')
link = 'http://vatgia.com/home/quicksearch.php?keyword='+k+'&sort=5'
client.get(link)
soup = BeautifulSoup(client.page_source,"html.parser")
fclass = soup.find_all("a","picture_link",limit=5)
return fclass
'''
def get_src(input): # get link src tu html element lazada
rexp='(src=")(.*)"' #lay link cua the span
f = re.compile(str(rexp)).findall(str(input))
if f == []:
print "1"
rexp='(url\()(.*)\)'
f = re.compile(str(rexp)).findall(str(input))
return f[0][1]'''
#search_vatgia("iphone 5") |
py | 1a44c467906f883a49406c5135eca0629c0008a2 | import argparse
import os
import numpy as np
import torch
import torch.nn.functional as F
from pil import Image
from Network import UNet
from utils import resize_and_crop, normalize, split_img_into_squares, hwc_to_chw, merge_masks
from utils import plot_img_and_mask
from torchvision import transforms
def predict_img(net,
full_img,
scale_factor=0.25,
out_threshold=0.5,
use_dense_crf=True,
use_gpu=True):
net.eval()
img_height = full_img.size[1]
img_width = full_img.size[0]
img = resize_and_crop(full_img, scale=scale_factor)
img = normalize(img)
left_square, right_square = split_img_into_squares(img)
left_square = hwc_to_chw(left_square)
right_square = hwc_to_chw(right_square)
X_left = torch.from_numpy(left_square).unsqueeze(0)
X_right = torch.from_numpy(right_square).unsqueeze(0)
if use_gpu:
X_left = X_left.cuda()
X_right = X_right.cuda()
with torch.no_grad():
output_left = net(X_left)
output_right = net(X_right)
left_probs = output_left.squeeze(0)
right_probs = output_right.squeeze(0)
tf = transforms.Compose(
[
transforms.ToPILImage(),
transforms.Resize(img_height),
transforms.ToTensor()
]
)
left_probs = tf(left_probs.cpu())
right_probs = tf(right_probs.cpu())
left_mask_np = left_probs.squeeze().cpu().numpy()
right_mask_np = right_probs.squeeze().cpu().numpy()
full_mask = merge_masks(left_mask_np, right_mask_np, img_width)
# if use_dense_crf:
# full_mask = dense_crf(np.array(full_img).astype(np.uint8), full_mask)
#
return full_mask > out_threshold
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('--model', '-m', default='CP1.pth',
metavar='FILE',
help="Specify the file in which is stored the model"
" (default : 'CP1.pth')")
parser.add_argument('--input', '-i', metavar='INPUT', nargs='+',
help='filenames of input images', required=True)
parser.add_argument('--output', '-o', metavar='INPUT', nargs='+',
help='filenames of ouput images')
parser.add_argument('--cpu', '-c', action='store_true',
help="Do not use the cuda version of the net",
default=False)
parser.add_argument('--viz', '-v', action='store_true',
help="Visualize the images as they are processed",
default=False)
parser.add_argument('--no-save', '-n', action='store_true',
help="Do not save the output masks",
default=False)
parser.add_argument('--no-crf', '-r', action='store_true',
help="Do not use dense CRF postprocessing",
default=False)
parser.add_argument('--mask-threshold', '-t', type=float,
help="Minimum probability value to consider a mask pixel white",
default=0.5)
parser.add_argument('--scale', '-s', type=float,
help="Scale factor for the input images",
default=0.25)
return parser.parse_args()
def get_output_filenames(args):
in_files = args.input
out_files = []
if not args.output:
for f in in_files:
pathsplit = os.path.splitext(f)
out_files.append("{}_OUT{}".format(pathsplit[0], pathsplit[1]))
elif len(in_files) != len(args.output):
print("Error : Input files and output files are not of the same length")
raise SystemExit()
else:
out_files = args.output
return out_files
def mask_to_image(mask):
return Image.fromarray((mask * 255).astype(np.uint8))
if __name__ == "__main__":
args = get_args()
in_files = args.input
out_files = get_output_filenames(args)
net = UNet(n_channels=3, n_classes=1)
print("Loading model {}".format(args.model))
if not args.cpu:
print("Using CUDA version of the net, prepare your GPU !")
net.cuda()
net.load_state_dict(torch.load(args.model))
else:
net.cpu()
net.load_state_dict(torch.load(args.model, map_location='cpu'))
print("Using CPU version of the net, this may be very slow")
print("Model loaded !")
for i, fn in enumerate(in_files):
print("\nPredicting image {} ...".format(fn))
img = Image.open(fn)
if img.size[0] < img.size[1]:
print("Error: image height larger than the width")
mask = predict_img(net=net,
full_img=img,
scale_factor=args.scale,
out_threshold=args.mask_threshold,
use_dense_crf=not args.no_crf,
use_gpu=not args.cpu)
print(mask)
plot_img_and_mask(img, mask)
exit(0)
if args.viz:
print("Visualizing results for image {}, close to continue ...".format(fn))
plot_img_and_mask(img, mask)
if not args.no_save:
out_fn = out_files[i]
result = mask_to_image(mask)
result.save(out_files[i])
print("Mask saved to {}".format(out_files[i])) |
py | 1a44c5bb0fc630a88efc8a29938742529e877786 | # coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.4425
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from lusid.api_client import ApiClient
from lusid.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class PackagesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_package(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] DeletePackage: Delete package # noqa: E501
Delete an package. Deletion will be valid from the package's creation datetime. This means that the package will no longer exist at any effective datetime from the asAt datetime of deletion. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_package(scope, code, async_req=True)
>>> result = thread.get()
:param scope: The package scope. (required)
:type scope: str
:param code: The package's code. This, together with the scope uniquely identifies the package to delete. (required)
:type code: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: DeletedEntityResponse
"""
kwargs['_return_http_data_only'] = True
return self.delete_package_with_http_info(scope, code, **kwargs) # noqa: E501
def delete_package_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] DeletePackage: Delete package # noqa: E501
Delete an package. Deletion will be valid from the package's creation datetime. This means that the package will no longer exist at any effective datetime from the asAt datetime of deletion. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_package_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param scope: The package scope. (required)
:type scope: str
:param code: The package's code. This, together with the scope uniquely identifies the package to delete. (required)
:type code: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(DeletedEntityResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'scope',
'code'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_package" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'scope' is set
if self.api_client.client_side_validation and ('scope' not in local_var_params or # noqa: E501
local_var_params['scope'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `scope` when calling `delete_package`") # noqa: E501
# verify the required parameter 'code' is set
if self.api_client.client_side_validation and ('code' not in local_var_params or # noqa: E501
local_var_params['code'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `code` when calling `delete_package`") # noqa: E501
if self.api_client.client_side_validation and ('scope' in local_var_params and # noqa: E501
len(local_var_params['scope']) > 64): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_package`, length must be less than or equal to `64`") # noqa: E501
if self.api_client.client_side_validation and ('scope' in local_var_params and # noqa: E501
len(local_var_params['scope']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_package`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_package`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if self.api_client.client_side_validation and ('code' in local_var_params and # noqa: E501
len(local_var_params['code']) > 64): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_package`, length must be less than or equal to `64`") # noqa: E501
if self.api_client.client_side_validation and ('code' in local_var_params and # noqa: E501
len(local_var_params['code']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_package`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_package`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "DeletedEntityResponse",
400: "LusidValidationProblemDetails",
}
return self.api_client.call_api(
'/api/packages/{scope}/{code}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_package(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] GetPackage: Get Package # noqa: E501
Fetch a Package that matches the specified identifier # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_package(scope, code, async_req=True)
>>> result = thread.get()
:param scope: The scope to which the package belongs. (required)
:type scope: str
:param code: The package's unique identifier. (required)
:type code: str
:param as_at: The asAt datetime at which to retrieve the package. Defaults to return the latest version of the package if not specified.
:type as_at: datetime
:param property_keys: A list of property keys from the \"Package\" domain to decorate onto the package. These take the format {domain}/{scope}/{code} e.g. \"Package/system/Name\".
:type property_keys: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Package
"""
kwargs['_return_http_data_only'] = True
return self.get_package_with_http_info(scope, code, **kwargs) # noqa: E501
def get_package_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] GetPackage: Get Package # noqa: E501
Fetch a Package that matches the specified identifier # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_package_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param scope: The scope to which the package belongs. (required)
:type scope: str
:param code: The package's unique identifier. (required)
:type code: str
:param as_at: The asAt datetime at which to retrieve the package. Defaults to return the latest version of the package if not specified.
:type as_at: datetime
:param property_keys: A list of property keys from the \"Package\" domain to decorate onto the package. These take the format {domain}/{scope}/{code} e.g. \"Package/system/Name\".
:type property_keys: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Package, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'scope',
'code',
'as_at',
'property_keys'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_package" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if self.api_client.client_side_validation and ('scope' in local_var_params and # noqa: E501
len(local_var_params['scope']) > 64): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_package`, length must be less than or equal to `64`") # noqa: E501
if self.api_client.client_side_validation and ('scope' in local_var_params and # noqa: E501
len(local_var_params['scope']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_package`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_package`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if self.api_client.client_side_validation and ('code' in local_var_params and # noqa: E501
len(local_var_params['code']) > 64): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_package`, length must be less than or equal to `64`") # noqa: E501
if self.api_client.client_side_validation and ('code' in local_var_params and # noqa: E501
len(local_var_params['code']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_package`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_package`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'as_at' in local_var_params and local_var_params['as_at'] is not None: # noqa: E501
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'property_keys' in local_var_params and local_var_params['property_keys'] is not None: # noqa: E501
query_params.append(('propertyKeys', local_var_params['property_keys'])) # noqa: E501
collection_formats['propertyKeys'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "Package",
400: "LusidValidationProblemDetails",
}
return self.api_client.call_api(
'/api/packages/{scope}/{code}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def list_packages(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] ListPackages: List Packages # noqa: E501
Fetch the last pre-AsAt date version of each package in scope (does not fetch the entire history). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_packages(async_req=True)
>>> result = thread.get()
:param as_at: The asAt datetime at which to retrieve the package. Defaults to return the latest version of the package if not specified.
:type as_at: datetime
:param page: The pagination token to use to continue listing packages from a previous call to list packages. This value is returned from the previous call. If a pagination token is provided the sortBy, filter, effectiveAt, and asAt fields must not have changed since the original request.
:type page: str
:param sort_by: Order the results by these fields. Use use the '-' sign to denote descending order e.g. -MyFieldName.
:type sort_by: list[str]
:param limit: When paginating, limit the number of returned results to this many.
:type limit: int
:param filter: Expression to filter the result set. Read more about filtering results from LUSID here: https://support.lusid.com/filtering-results-from-lusid.
:type filter: str
:param property_keys: A list of property keys from the \"Package\" domain to decorate onto each package. These take the format {domain}/{scope}/{code} e.g. \"Package/system/Name\".
:type property_keys: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: PagedResourceListOfPackage
"""
kwargs['_return_http_data_only'] = True
return self.list_packages_with_http_info(**kwargs) # noqa: E501
def list_packages_with_http_info(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] ListPackages: List Packages # noqa: E501
Fetch the last pre-AsAt date version of each package in scope (does not fetch the entire history). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_packages_with_http_info(async_req=True)
>>> result = thread.get()
:param as_at: The asAt datetime at which to retrieve the package. Defaults to return the latest version of the package if not specified.
:type as_at: datetime
:param page: The pagination token to use to continue listing packages from a previous call to list packages. This value is returned from the previous call. If a pagination token is provided the sortBy, filter, effectiveAt, and asAt fields must not have changed since the original request.
:type page: str
:param sort_by: Order the results by these fields. Use use the '-' sign to denote descending order e.g. -MyFieldName.
:type sort_by: list[str]
:param limit: When paginating, limit the number of returned results to this many.
:type limit: int
:param filter: Expression to filter the result set. Read more about filtering results from LUSID here: https://support.lusid.com/filtering-results-from-lusid.
:type filter: str
:param property_keys: A list of property keys from the \"Package\" domain to decorate onto each package. These take the format {domain}/{scope}/{code} e.g. \"Package/system/Name\".
:type property_keys: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(PagedResourceListOfPackage, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'as_at',
'page',
'sort_by',
'limit',
'filter',
'property_keys'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_packages" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if self.api_client.client_side_validation and ('page' in local_var_params and # noqa: E501
len(local_var_params['page']) > 500): # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `list_packages`, length must be less than or equal to `500`") # noqa: E501
if self.api_client.client_side_validation and ('page' in local_var_params and # noqa: E501
len(local_var_params['page']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `list_packages`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and not re.search(r'^[a-zA-Z0-9\+\/]*={0,3}$', local_var_params['page']): # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `list_packages`, must conform to the pattern `/^[a-zA-Z0-9\+\/]*={0,3}$/`") # noqa: E501
if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 5000: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `list_packages`, must be a value less than or equal to `5000`") # noqa: E501
if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `list_packages`, must be a value greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and ('filter' in local_var_params and # noqa: E501
len(local_var_params['filter']) > 16384): # noqa: E501
raise ApiValueError("Invalid value for parameter `filter` when calling `list_packages`, length must be less than or equal to `16384`") # noqa: E501
if self.api_client.client_side_validation and ('filter' in local_var_params and # noqa: E501
len(local_var_params['filter']) < 0): # noqa: E501
raise ApiValueError("Invalid value for parameter `filter` when calling `list_packages`, length must be greater than or equal to `0`") # noqa: E501
if self.api_client.client_side_validation and 'filter' in local_var_params and not re.search(r'^[\s\S]*$', local_var_params['filter']): # noqa: E501
raise ApiValueError("Invalid value for parameter `filter` when calling `list_packages`, must conform to the pattern `/^[\s\S]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'as_at' in local_var_params and local_var_params['as_at'] is not None: # noqa: E501
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'sort_by' in local_var_params and local_var_params['sort_by'] is not None: # noqa: E501
query_params.append(('sortBy', local_var_params['sort_by'])) # noqa: E501
collection_formats['sortBy'] = 'multi' # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
if 'property_keys' in local_var_params and local_var_params['property_keys'] is not None: # noqa: E501
query_params.append(('propertyKeys', local_var_params['property_keys'])) # noqa: E501
collection_formats['propertyKeys'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "PagedResourceListOfPackage",
400: "LusidValidationProblemDetails",
}
return self.api_client.call_api(
'/api/packages', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def upsert_packages(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] UpsertPackages: Upsert Package # noqa: E501
Upsert; update existing packages with given ids, or create new packages otherwise. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upsert_packages(async_req=True)
>>> result = thread.get()
:param package_set_request: The collection of package requests.
:type package_set_request: PackageSetRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ResourceListOfPackage
"""
kwargs['_return_http_data_only'] = True
return self.upsert_packages_with_http_info(**kwargs) # noqa: E501
def upsert_packages_with_http_info(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] UpsertPackages: Upsert Package # noqa: E501
Upsert; update existing packages with given ids, or create new packages otherwise. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upsert_packages_with_http_info(async_req=True)
>>> result = thread.get()
:param package_set_request: The collection of package requests.
:type package_set_request: PackageSetRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ResourceListOfPackage, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'package_set_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method upsert_packages" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'package_set_request' in local_var_params:
body_params = local_var_params['package_set_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.4425'
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
201: "ResourceListOfPackage",
400: "LusidValidationProblemDetails",
}
return self.api_client.call_api(
'/api/packages', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
|
py | 1a44c61ba5daea20d282098bce01b912d85b60fb | import numpy as np
import chainer
from chainer import serializers
from chainercv.links import SSD512
import chainercv.links as C
import chainercv
import onnx_chainer
# model_name = 'model/2020_9_18.npz'
# majomoji_label="A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z"
# model = SSD512(n_fg_class=len(majomoji_label))
# serializers.load_npz(model_name,model)
model = C.VGG16(pretrained_model='imagenet')
# ネットワークに流し込む擬似的なデータを用意する
x = np.zeros((1, 3, 224, 224), dtype=np.float32)
# 推論モードにする
chainer.config.train = False
onnx_chainer.export(model, x, filename='vgg16.onnx') |
py | 1a44c6deaa271d521f54bc956c557e0b16dc91a1 | import math, networkx as nx, timeit, unittest
class ConnectTheDotsBigDataTest(unittest.TestCase):
"""
Benchmarking suite for ConnectTheDots (large datasets)
"""
def test_bc_runtime(self):
"""
Test time needed to calculate betweenness centrality
"""
TEST_CASES = [] # add (V, E) tuples
NUM_TRIALS = 10
def generate_graph(V, E):
"""
Return a random Barabasi-Albert graph with V nodes and E edges
"""
m = (V - math.sqrt(V ** 2 - 4 * E)) / 2
return nx.barabasi_albert_graph(V, int(m))
def calculate_bc(G):
"""
Calculate betweenness centrality for graph G
"""
return nx.betweenness_centrality(G)
if len(TEST_CASES) > 0:
print('\n\n[ Runtime ]\n')
for (V, E) in TEST_CASES:
print('V = ' + str(V) + ', E = ' + str(E) + '\n')
G = generate_graph(V, E)
for i in range(NUM_TRIALS):
start = timeit.default_timer()
calculate_bc(G)
stop = timeit.default_timer()
print(stop - start)
print('')
def test_bc_estimation(self):
"""
Test accuracy of different k-values for betweenness centrality estimation
"""
TEST_CASES = [] # add (V, E, k) tuples
NUM_TRIALS = 1
def generate_graph(V, E):
"""
Return a random Barabasi-Albert graph with V nodes and E edges
"""
m = (V - math.sqrt(V ** 2 - 4 * E)) / 2
return nx.barabasi_albert_graph(V, int(m))
def calculate_bc(G, k=None):
"""
Calculate betweenness centrality for graph G using k pivots
"""
return nx.betweenness_centrality(G, k)
def round_float(n):
"""
Return string representation of float n rounded to six decimal places
"""
return '{:f}'.format(n)
def error_pct(error, actual):
"""
Return string representation of error % of estimate from actual
"""
if actual > 0:
return '{:.1%}'.format(error / actual)
else:
return '--'
if len(TEST_CASES) > 0:
print('\n\n[ Estimation ]\n')
for (V, E, k) in TEST_CASES:
print('V = ' + str(V) + ', E = ' + str(E) + ', k = ' + str(k) + '\n')
G = generate_graph(V, E)
bc = calculate_bc(G)
for i in range(NUM_TRIALS):
# estimate = calculate_bc(G, key)
# print 'node estimate actual error % error'
# print '---- -------- -------- -------- --------'
# for key, val in estimate.iteritems():
# error = abs(bc[key] - val)
# print ' '.join(['{:04}'.format(key), round_float(val), round_float(bc[key]), round_float(error), error_pct(error, bc[key])])
# print ''
start = timeit.default_timer()
estimate = calculate_bc(G, k)
stop = timeit.default_timer()
runtime = stop - start
max_error = 0
max_error_pct = 0
for key, val in estimate.items():
error = abs(bc[key] - val)
max_error = max(max_error, error)
if bc[key] > 0:
max_error_pct = max(max_error_pct, error / bc[key])
print(', ' .join([round_float(max_error), '{:.1%}'.format(max_error_pct), str(runtime)]))
print('')
|
py | 1a44c712f9e4bf7230f6bf0af9072811c7242efd | from datetime import datetime
from io import BytesIO
import os
import shutil
from behave import *
from tsserver import configutils
from tsserver.dtutils import datetime_to_str
from tsserver.features.testutils import (
open_resource, resource_path, table_to_database
)
from tsserver.photos.models import Photo
PHOTO_DETAIL_KEYS = {'id', 'filename', 'isPanorama', 'url', 'timestamp'}
@given("test photos in upload directory")
def step_impl(context):
src = os.path.join(resource_path(), 'deathvalley.jpg')
uploads = configutils.get_upload_dir()
for filename in {'test001.jpg', 'test002.jpg'}:
shutil.copyfile(src, os.path.join(uploads, filename))
@given("following photo data")
def step_impl(context):
table_to_database(context.table, Photo)
@then("list of {num:d} object with image details should be sent")
@then("list of {num:d} objects with image details should be sent")
def step_impl(context, num):
assert len(context.rv.json_data) == num
assert all(PHOTO_DETAIL_KEYS == set(x) for x in context.rv.json_data)
@when("I upload an image to {url}")
def step_impl(context, url):
data = {'timestamp': datetime_to_str(datetime.now()),
'photo': (open_resource('deathvalley.jpg', mode='rb'),
'TEST_ONLY_deathvalley.jpg')}
context.rv = context.request(url, 'POST', data=data)
@when("I upload a panorama via PUT to {url}")
def step_impl(context, url):
data = {'timestamp': datetime_to_str(datetime.now()),
'photo': (open_resource('deathvalley.jpg', mode='rb'),
'TEST_ONLY_deathvalley.jpg')}
context.rv = context.request(url, 'PUT', data=data)
@then("JSON with image details should be sent")
def step_impl(context):
assert PHOTO_DETAIL_KEYS == set(context.rv.json_data)
# Save the photo filename so it can be later removed
context.test_photo_url = context.rv.json_data['filename']
# For "Then the same JSON data should be sent" step
context.last_json_data = context.rv.json_data
@when('I request file from "{key}" key')
def step_impl(context, key):
context.rv = context.app.get(context.rv.json_data[key])
@when("I upload a file with '{ext}' extension to {url}")
def step_impl(context, ext, url):
data = {'timestamp': datetime_to_str(datetime.now()),
'photo': (BytesIO(b'test'), 'example.' + ext)}
context.rv = context.request(url, 'POST', data=data)
|
py | 1a44c7682f00e69b11b6fb1324d89ea8f63d9f61 | """Module for start_mappers function that starts all entity to table mappers"""
__all__ = ['start_mappers']
from entities.users import mapper as user_mapper
def start_mappers() -> None:
"""Start all mappers
Warning:
Function intended to use once and only while main application creation. If call more than once where
will be an error that tells that tables, which mappers must create and map entity to them, already exist.
"""
user_mapper.start_mapper()
|
py | 1a44c7e96c810790f68708e436000e3ec8e9a2bc | # import various
from . import dataproc
from . import generators
from . import callbacks
from . import plot
from . import metrics
from . import inits
from . import models
from . import utils
from . import layers
from . import vae_tools
from . import regularizers |
py | 1a44c8787645d66765d6e56af4001469d43f75df | #!/usr/bin/python
#coding=utf-8
# 测试接口
import os
import sys
sys.path.append('/usr/lib/python2.7/site-packages/')
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.transport.TTransport import TFramedTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
from thrift.server import TNonblockingServer
sys.path.append('../gen-py')
from conf_crawler import ExtractorService
from conf_crawler import StaticLinkBaseService
from conf_crawler.ttypes import *
SVR_IP = '127.0.0.1'
SVR_PORT = 44003
TIME_OUT = 100000
def LoadTemplate(url_template, template_type):
try:
global SVR_PORT
transport = TSocket.TSocket(SVR_IP, SVR_PORT)
transport.setTimeout(TIME_OUT)
framed_transport = TFramedTransport(transport)
framed_transport.open()
protocol = TBinaryProtocol.TBinaryProtocol(framed_transport)
service = ExtractorService.Client(protocol)
ret = service.load_template(url_template, template_type)
transport.close()
return True
except Exception as ex:
print "Error:%s" % (ex)
return True
def Extract(url_template, template_type, depth, body_file):
try:
global SVR_PORT
transport = TSocket.TSocket(SVR_IP, SVR_PORT)
transport.setTimeout(TIME_OUT)
framed_transport = TFramedTransport(transport)
framed_transport.open()
protocol = TBinaryProtocol.TBinaryProtocol(framed_transport)
service = ExtractorService.Client(protocol)
extract_item = ExtractItem()
extract_item.url = 'http://api.wap.58.com/api/info/infolist/bj/zufang/1/25/?pic=1'
extract_item.url_template = url_template
extract_item.depth = depth
extract_item.template_type = template_type
file_in = body_file
f = open(file_in, 'r')
extract_item.body = f.read()
f.close()
matched_result_item = service.extract_sync(extract_item)
print 'len(sub_result_list):%d' % len(matched_result_item.sub_result_list)
transport.close()
if matched_result_item.is_ok == False:
print 'Err:%s' % (matched_result_item.err_info)
for key, value in matched_result_item.self_result.iteritems():
for v in value:
print '[%s]\t%s' % (key, v)
for item in matched_result_item.sub_result_list:
print '-------------------'
for key, value in item.iteritems():
print '[%s]\t%s' % (key, value[0])
'''
transport = TSocket.TSocket(SVR_IP, SVR_PORT)
transport.setTimeout(TIME_OUT)
framed_transport = TFramedTransport(transport)
framed_transport.open()
protocol = TBinaryProtocol.TBinaryProtocol(framed_transport)
service = StaticLinkBaseService.Client(protocol)
service.upload_extract(extract_item, matched_result_item)
transport.close()
'''
return True
except Exception as ex:
print "Error:%s" % (ex)
return False
if __name__ == '__main__':
if len(sys.argv) < 5:
print 'Usage:%s template template_type depth body_file'
sys.exit(1)
url_template = sys.argv[1]
template_type = int(sys.argv[2])
depth = int(sys.argv[3])
body_file = sys.argv[4]
if template_type >= TemplateType.TEMPLATE_TYPE_MAX:
print 'invalid template type:%d' % (template_type)
sys.exit(1)
LoadTemplate(url_template, template_type)
for i in range(0, 1):
#while True:
if not Extract(url_template, template_type, depth, body_file):
print 'ERROR'
sys.exit(1)
|
py | 1a44ca077b9bfabdf454bb89cc4e93ffde9f7e47 | import os
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import sys
import json
import gc
from tqdm import tqdm
from sklearn.cluster import KMeans
from encode import lstm_encoder
from dataprocess_tacred import data_sampler
from model import proto_softmax_layer
from dataprocess_tacred import get_data_loader
from transformers import BertTokenizer,BertModel
from util import set_seed,process_data,getnegfrombatch,select_similar_data_new_tac
import faiss
def eval_model(config, basemodel, test_set, mem_relations):
print("One eval")
print("test data num is:\t",len(test_set))
basemodel.eval()
test_dataloader = get_data_loader(config, test_set, shuffle=False, batch_size=30)
allnum= 0.0
correctnum = 0.0
for step, (labels, neg_labels, sentences, firstent, firstentindex, secondent, secondentindex, headid, tailid, rawtext, lengths,
typelabels) in enumerate(test_dataloader):
logits, rep = basemodel(sentences, lengths)
distances = basemodel.get_mem_feature(rep)
short_logits = distances
#short_logits = logits
for index, logit in enumerate(logits):
score = short_logits[index] # logits[index] + short_logits[index] + long_logits[index]
allnum += 1.0
golden_score = score[labels[index]]
max_neg_score = -2147483647.0
for i in neg_labels[index]: # range(num_class):
if (i != labels[index]) and (score[i] > max_neg_score):
max_neg_score = score[i]
if golden_score > max_neg_score:
correctnum += 1
acc = correctnum / allnum
print(acc)
basemodel.train()
return acc
def get_memory(config, model, proto_set):
memset = []
resset = []
rangeset= [0]
for i in proto_set:
#print(i)
memset += i
rangeset.append(rangeset[-1] + len(i))
data_loader = get_data_loader(config, memset, False, False)
features = []
for step, (labels, neg_labels, sentences, firstent, firstentindex, secondent, secondentindex, headid, tailid, rawtext, lengths,
typelabels) in enumerate(data_loader):
feature = model.get_feature(sentences, lengths)
features.append(feature)
features = np.concatenate(features)
protos = []
#print ("proto_instaces:%d"%len(features))
for i in range(len(proto_set)):
protos.append(torch.tensor(features[rangeset[i]:rangeset[i+1],:].mean(0, keepdims = True)))
protos = torch.cat(protos, 0)
#print(protos.shape)
return protos
def select_data(mem_set, proto_memory, config, model, divide_train_set, num_sel_data, current_relations, selecttype):
####select data according to selecttype
#selecttype is 0: cluster for every rel
#selecttype is 1: use ave embedding
rela_num = len(current_relations)
for i in range(0, rela_num):
thisrel = current_relations[i]
if thisrel in mem_set.keys():
#print("have set mem before")
mem_set[thisrel] = {'0': [], '1': {'h': [], 't': []}}
proto_memory[thisrel].pop()
else:
mem_set[thisrel] = {'0': [], '1': {'h': [], 't': []}}
thisdataset = divide_train_set[thisrel]
data_loader = get_data_loader(config, thisdataset, False, False)
features = []
for step, (labels, neg_labels, sentences, firstent, firstentindex, secondent, secondentindex, headid, tailid, rawtext, lengths,
typelabels) in enumerate(data_loader):
feature = model.get_feature(sentences, lengths)
features.append(feature)
features = np.concatenate(features)
#print(features.shape)
num_clusters = min(num_sel_data, len(thisdataset))
if selecttype == 0:
kmeans = KMeans(n_clusters=num_clusters, random_state=0)
distances = kmeans.fit_transform(features)
for i in range(num_clusters):
sel_index = np.argmin(distances[:, i])
instance = thisdataset[sel_index]
###change tylelabel
instance[11] = 3
###add to mem data
mem_set[thisrel]['0'].append(instance) ####positive sample
cluster_center = kmeans.cluster_centers_[i]
#print(cluster_center.shape)
proto_memory[thisrel].append(instance)
elif selecttype == 1:
#print("use average embedding")
samplenum = features.shape[0]
veclength = features.shape[1]
sumvec = np.zeros(veclength)
for j in range(samplenum):
sumvec += features[j]
sumvec /= samplenum
###find nearest sample
mindist = 100000000
minindex = -100
for j in range(samplenum):
dist = np.sqrt(np.sum(np.square(features[j] - sumvec)))
if dist < mindist:
minindex = j
mindist = dist
#print(minindex)
instance = thisdataset[j]
###change tylelabel
instance[11] = 3
mem_set[thisrel]['0'].append(instance)
proto_memory[thisrel].append(instance)
else:
print("error select type")
#####to get negative sample mem_set[thisrel]['1']
if rela_num > 1:
####we need to sample negative samples
allnegres = {}
for i in range(rela_num):
thisnegres = {'h':[],'t':[]}
currel = current_relations[i]
thisrelposnum = len(mem_set[currel]['0'])
#assert thisrelposnum == num_sel_data
#allnum = list(range(thisrelposnum))
for j in range(thisrelposnum):
thisnegres['h'].append(mem_set[currel]['0'][j][3])
thisnegres['t'].append(mem_set[currel]['0'][j][5])
allnegres[currel] = thisnegres
####get neg sample
for i in range(rela_num):
togetnegindex = (i + 1) % rela_num
togetnegrelname = current_relations[togetnegindex]
mem_set[current_relations[i]]['1']['h'].extend(allnegres[togetnegrelname]['h'])
mem_set[current_relations[i]]['1']['t'].extend(allnegres[togetnegrelname]['t'])
return mem_set
tempthre = 0.2
factorfor2 = 1.0
factorfor3 = 1.0
factorfor4 = 1.0
factorfor5 = 0.1
def train_model_with_hard_neg(config, model, mem_set, traindata, epochs, current_proto, ifnegtive=0):
print(len(traindata))
#print(len(train_set))
mem_data = []
if len(mem_set) != 0:
for key in mem_set.keys():
mem_data.extend(mem_set[key]['0'])
print(len(mem_data))
train_set = traindata + mem_data
#train_set.extend(mem_data) ########??????maybe some question!! 重复添加mem
print(len(train_set))
data_loader = get_data_loader(config, train_set, batch_size=config['batch_size_per_step'])
model.train()
criterion = nn.CrossEntropyLoss()
lossfn = nn.MultiMarginLoss(margin=0.2)
optimizer = optim.Adam(model.parameters(), config['learning_rate'])
for epoch_i in range(epochs):
model.set_memorized_prototypes(current_proto)
losses1 = []
losses2 = []
losses3 = []
losses4 = []
losses5 = []
lossesfactor1 = 0.0
lossesfactor2 = factorfor2
lossesfactor3 = factorfor3
lossesfactor4 = factorfor4
lossesfactor5 = factorfor5
for step, (labels, neg_labels, sentences, firstent, firstentindex, secondent, secondentindex, headid, tailid, rawtext, lengths,
typelabels) in enumerate(data_loader):
model.zero_grad()
#print(len(sentences))
labels = labels.to(config['device'])
typelabels = typelabels.to(config['device']) ####0:rel 1:pos(new train data) 2:neg 3:mem
numofmem = 0
numofnewtrain = 0
allnum = 0
memindex = []
for index,onetype in enumerate(typelabels):
if onetype == 1:
numofnewtrain += 1
if onetype == 3:
numofmem += 1
memindex.append(index)
allnum += 1
#print(numofmem)
#print(numofnewtrain)
getnegfromnum = 1
allneg = []
alllen = []
if numofmem > 0:
###select neg data for mem
for oneindex in memindex:
negres,lenres = getnegfrombatch(oneindex,firstent,firstentindex,secondent,secondentindex,sentences,lengths,getnegfromnum,allnum,labels,neg_labels)
for aa in negres:
allneg.append(torch.tensor(aa))
for aa in lenres:
alllen.append(torch.tensor(aa))
sentences.extend(allneg)
lengths.extend(alllen)
logits, rep = model(sentences, lengths)
#print(logits.shape)
#print(rep.shape)
logits_proto = model.mem_forward(rep)
#print(logits_proto.shape)
logitspos = logits[0:allnum,]
#print(logitspos.shape)
logits_proto_pos = logits_proto[0:allnum,]
#print(logits_proto_pos.shape)
if numofmem > 0:
logits_proto_neg = logits_proto[allnum:,]
logits = logitspos
logits_proto = logits_proto_pos
loss1 = criterion(logits, labels)
loss2 = criterion(logits_proto, labels)
loss4 = lossfn(logits_proto, labels)
loss3 = torch.tensor(0.0).to(config['device'])
for index, logit in enumerate(logits):
score = logits_proto[index]
preindex = labels[index]
maxscore = score[preindex]
size = score.shape[0]
secondmax = -100000
for j in range(size):
if j != preindex and score[j] > secondmax:
secondmax = score[j]
if secondmax - maxscore + tempthre > 0.0:
loss3 += (secondmax - maxscore + tempthre).to(config['device'])
loss3 /= logits.shape[0]
start = 0
loss5 = torch.tensor(0.0).to(config['device'])
allusenum = 0
for index in memindex:
onepos = logits_proto[index]
posindex = labels[index]
#poslabelscore = torch.exp(onepos[posindex])
poslabelscore = onepos[posindex]
negnum = getnegfromnum * 2
negscore = torch.tensor(0.0).to(config['device'])
for ii in range(start, start + negnum):
oneneg = logits_proto_neg[ii]
#negscore += torch.exp(oneneg[posindex])
negscore = oneneg[posindex]
if negscore - poslabelscore + 0.01 > 0.0 and negscore < poslabelscore:
loss5 += (negscore - poslabelscore + 0.01)
allusenum += 1
#loss5 += (-torch.log(poslabelscore/(poslabelscore+negscore)))
start += negnum
#print(len(memindex))
if len(memindex) == 0:
loss = loss1 * lossesfactor1 + loss2 * lossesfactor2 + loss3 * lossesfactor3 + loss4 * lossesfactor4
else:
#loss5 /= len(memindex)
loss5 = loss5 / allusenum
#loss = loss1 * lossesfactor1 + loss2 * lossesfactor2 + loss3 * lossesfactor3 + loss4 * lossesfactor4 ###no loss5
loss = loss1 * lossesfactor1 + loss2 * lossesfactor2 + loss3 * lossesfactor3 + loss4 * lossesfactor4 + loss5 * lossesfactor5 ###with loss5
loss.backward()
losses1.append(loss1.item())
losses2.append(loss2.item())
losses3.append(loss3.item())
losses4.append(loss4.item())
losses5.append(loss5.item())
#print("step:\t", step, "\tloss1:\t", loss1.item(), "\tloss2:\t", loss2.item(), "\tloss3:\t", loss3.item(),
# "\tloss4:\t", loss4.item(), "\tloss5:\t", loss5.item())
torch.nn.utils.clip_grad_norm_(model.parameters(), config['max_grad_norm'])
optimizer.step()
return model
def train_simple_model(config, model, mem_set, train_set, epochs, current_proto, ifusemem=False):
if ifusemem:
mem_data = []
if len(mem_set)!=0:
for key in mem_set.keys():
mem_data.extend(mem_set[key]['0'])
train_set.extend(mem_data)
data_loader = get_data_loader(config, train_set, batch_size=config['batch_size_per_step'])
model.train()
criterion = nn.CrossEntropyLoss()
lossfn = nn.MultiMarginLoss(margin=0.2)
optimizer = optim.Adam(model.parameters(), config['learning_rate'])
for epoch_i in range(epochs):
model.set_memorized_prototypes(current_proto)
losses1 = []
losses2 = []
losses3 = []
losses4 = []
lossesfactor1 = 0.0
lossesfactor2 = factorfor2
lossesfactor3 = factorfor3
lossesfactor4 = factorfor4
for step, (labels, neg_labels, sentences, firstent, firstentindex, secondent, secondentindex, headid, tailid, rawtext,
lengths, typelabels) in enumerate(tqdm(data_loader)):
model.zero_grad()
logits, rep = model(sentences, lengths)
logits_proto = model.mem_forward(rep)
labels = labels.to(config['device'])
loss1 = criterion(logits, labels)
loss2 = criterion(logits_proto, labels)
loss4 = lossfn(logits_proto, labels)
loss3 = torch.tensor(0.0).to(config['device'])
###add triple loss
for index, logit in enumerate(logits):
score = logits_proto[index]
preindex = labels[index]
maxscore = score[preindex]
size = score.shape[0]
secondmax = -100000
for j in range(size):
if j != preindex and score[j] > secondmax:
secondmax = score[j]
if secondmax - maxscore + tempthre > 0.0:
loss3 += (secondmax - maxscore + tempthre).to(config['device'])
loss3 /= logits.shape[0]
loss = loss1 * lossesfactor1 + loss2 * lossesfactor2 + loss3 * lossesfactor3 + loss4 * lossesfactor4
loss.backward()
losses1.append(loss1.item())
losses2.append(loss2.item())
losses3.append(loss3.item())
losses4.append(loss4.item())
torch.nn.utils.clip_grad_norm_(model.parameters(), config['max_grad_norm'])
optimizer.step()
#print (np.array(losses).mean())
return model
if __name__ == '__main__':
select_thredsold_param = 0.65
select_num = 1
f = open("config/config_tacred.json", "r")
config = json.loads(f.read())
f.close()
config['device'] = torch.device('cuda' if torch.cuda.is_available() and config['use_gpu'] else 'cpu')
config['n_gpu'] = torch.cuda.device_count()
config['batch_size_per_step'] = int(config['batch_size'] / config["gradient_accumulation_steps"])
config['neg_sampling'] = False
root_path = '.'
word2id = json.load(open(os.path.join(root_path, 'glove/word2id.txt')))
word2vec = np.load(os.path.join(root_path, 'glove/word2vec.npy'))
tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
donum = 1
distantpath = "data/distantdata/"
file1 = distantpath + "distant.json"
file2 = distantpath + "exclude_fewrel_distant.json"
list_data,entpair2scope = process_data(file1,file2)
topk = 16
max_sen_length_for_select = 64
max_sen_lstm_tokenize = 128
select_thredsold = select_thredsold_param
print("********* load from ckpt ***********")
ckptpath = "simmodelckpt"
print(ckptpath)
ckpt = torch.load(ckptpath)
SimModel = BertModel.from_pretrained('bert-base-uncased',state_dict=ckpt["bert-base"]).to(config["device"])
allunlabledata = np.load("allunlabeldata.npy").astype('float32')
d = 768 * 2
index = faiss.IndexFlatIP(d)
print(index.is_trained)
index.add(allunlabledata) # add vectors to the index
print(index.ntotal)
for m in range(donum):
print(m)
config["rel_cluster_label"] = "data/tacred/CFRLdata_10_100_10_10/rel_cluster_label_" + str(m) + ".npy"
config['training_file'] = "data/tacred/CFRLdata_10_100_10_10/train_" + str(m) + ".txt"
config['valid_file'] = "data/tacred/CFRLdata_10_100_10_10/valid_" + str(m) + ".txt"
config['test_file'] = "data/tacred/CFRLdata_10_100_10_10/test_" + str(m) + ".txt"
encoderforbase = lstm_encoder(token2id=word2id, word2vec=word2vec, word_size=len(word2vec[0]), max_length=128, pos_size=None,
hidden_size=config['hidden_size'], dropout=0, bidirectional=True, num_layers=1, config=config)
sampler = data_sampler(config, encoderforbase.tokenizer)
modelforbase = proto_softmax_layer(encoderforbase, num_class=len(sampler.id2rel), id2rel=sampler.id2rel, drop=0, config=config)
modelforbase = modelforbase.to(config["device"])
word2vec_back = word2vec.copy()
sequence_results = []
result_whole_test = []
for i in range(6):
num_class = len(sampler.id2rel)
print(config['random_seed'] + 10 * i)
set_seed(config, config['random_seed'] + 10 * i)
sampler.set_seed(config['random_seed'] + 10 * i)
mem_set = {} #### mem_set = {rel_id:{'0':[positive samples],'1':[negative samples]}} 换5个head 换5个tail
mem_relations = [] ###not include relation of current task
past_relations = []
savetest_all_data = None
saveseen_relations = []
proto_memory = []
for i in range(len(sampler.id2rel)):
proto_memory.append([sampler.id2rel_pattern[i]])
oneseqres = []
##################################
whichdataselecct = 1
ifnorm = True
##################################
for steps, (training_data, valid_data, test_data, test_all_data, seen_relations, current_relations) in enumerate(sampler):
#print(steps)
print("------------------------")
print(len(training_data))
#for aa in range(20):
# print(training_data[aa])
savetest_all_data = test_all_data
saveseen_relations = seen_relations
currentnumber = len(current_relations)
print(currentnumber)
print(current_relations)
divide_train_set = {}
for relation in current_relations:
divide_train_set[relation] = [] ##int
for data in training_data:
divide_train_set[data[0]].append(data)
print(len(divide_train_set))
####select most similar sentence for new task, not for base task
####step==0是base model
if steps == 0:
##train base model
print("train base model,not select most similar")
else:
print("train new model,select most similar")
selectdata = select_similar_data_new_tac(training_data, tokenizer, entpair2scope, topk,
max_sen_length_for_select,list_data, config, SimModel,
select_thredsold,max_sen_lstm_tokenize,encoderforbase.tokenizer,index,ifnorm,select_num)
print(len(selectdata))
training_data.extend(selectdata)
print(len(training_data))
#'''
current_proto = get_memory(config, modelforbase, proto_memory)
modelforbase = train_simple_model(config, modelforbase, mem_set, training_data, 1,
current_proto, False)
select_data(mem_set, proto_memory, config, modelforbase, divide_train_set,
config['rel_memory_size'], current_relations, 0) ##config['rel_memory_size'] == 1
for j in range(2):
current_proto = get_memory(config, modelforbase, proto_memory)
modelforbase = train_model_with_hard_neg(config, modelforbase, mem_set, training_data, 1,
current_proto, ifnegtive=0)
current_proto = get_memory(config, modelforbase, proto_memory)
modelforbase.set_memorized_prototypes(current_proto)
mem_relations.extend(current_relations)
currentalltest = []
for mm in range(len(test_data)):
currentalltest.extend(test_data[mm])
#eval_model(config, modelforbase, test_data[mm], mem_relations)
thisstepres = eval_model(config, modelforbase, currentalltest, mem_relations)
print("step:\t",steps,"\taccuracy:\t",thisstepres)
oneseqres.append(thisstepres)
sequence_results.append(np.array(oneseqres))
#def eval_both_model(config, newmodel, basemodel, test_set, mem_relations, baserelation, newrelation, proto_embed):
allres = eval_model(config, modelforbase, savetest_all_data, saveseen_relations)
result_whole_test.append(allres)
print("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&")
print("after one epoch allres:\t",allres)
print(result_whole_test)
# initialize the models
modelforbase = modelforbase.to('cpu')
del modelforbase
gc.collect()
if config['device'] == 'cuda':
torch.cuda.empty_cache()
encoderforbase = lstm_encoder(token2id=word2id, word2vec=word2vec_back.copy(), word_size=len(word2vec[0]),max_length=128, pos_size=None,
hidden_size=config['hidden_size'], dropout=0, bidirectional=True, num_layers=1, config=config)
modelforbase = proto_softmax_layer(encoderforbase, num_class=len(sampler.id2rel), id2rel=sampler.id2rel,
drop=0, config=config)
modelforbase.to(config["device"])
# output the final avg result
print("Final result!")
print(result_whole_test)
for one in sequence_results:
for item in one:
sys.stdout.write('%.4f, ' % item)
print('')
avg_result_all_test = np.average(sequence_results, 0)
for one in avg_result_all_test:
sys.stdout.write('%.4f, ' % one)
print('')
print("Finish training............................")
#'''
|
py | 1a44ca0f64754361b5a9f9e551e3daa711344ffd | from abaqusConstants import *
from .LoadState import LoadState
class ConcentratedHeatFluxState(LoadState):
"""The ConcentratedHeatFluxState object stores the propagating data of a concentrated heat
flux load in a step. One instance of this object is created internally by the
ConcentratedHeatFlux object for each step. The instance is also deleted internally by
the ConcentratedHeatFlux object.
The ConcentratedHeatFluxState object has no constructor or methods.
The ConcentratedHeatFluxState object is derived from the LoadState object.
Attributes
----------
magnitude: float
A Float specifying the load magnitude.
magnitudeState: SymbolicConstant
A SymbolicConstant specifying the propagation state of the load magnitude. Possible
values are UNSET, SET, UNCHANGED, and FREED.
amplitudeState: SymbolicConstant
A SymbolicConstant specifying the propagation state of the **amplitude** member. Possible
values are UNSET, SET, UNCHANGED, and FREED.
status: SymbolicConstant
A SymbolicConstant specifying the propagation state of the :py:class:`~abaqus.Load.LoadState.LoadState` object. Possible
values are:
- NOT_YET_ACTIVE
- CREATED
- PROPAGATED
- MODIFIED
- DEACTIVATED
- NO_LONGER_ACTIVE
- TYPE_NOT_APPLICABLE
- INSTANCE_NOT_APPLICABLE
- BUILT_INTO_BASE_STATE
amplitude: str
A String specifying the name of the amplitude reference. The String is empty if the load
has no amplitude reference.
Notes
-----
This object can be accessed by:
.. code-block:: python
import load
mdb.models[name].steps[name].loadStates[name]
The corresponding analysis keywords are:
- CFLUX
"""
# A Float specifying the load magnitude.
magnitude: float = None
# A SymbolicConstant specifying the propagation state of the load magnitude. Possible
# values are UNSET, SET, UNCHANGED, and FREED.
magnitudeState: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the *amplitude* member. Possible
# values are UNSET, SET, UNCHANGED, and FREED.
amplitudeState: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the LoadState object. Possible
# values are:
# - NOT_YET_ACTIVE
# - CREATED
# - PROPAGATED
# - MODIFIED
# - DEACTIVATED
# - NO_LONGER_ACTIVE
# - TYPE_NOT_APPLICABLE
# - INSTANCE_NOT_APPLICABLE
# - BUILT_INTO_BASE_STATE
status: SymbolicConstant = None
# A String specifying the name of the amplitude reference. The String is empty if the load
# has no amplitude reference.
amplitude: str = ''
|
py | 1a44ca1a2dcd203742e9355bd9c2eab1af060c99 | import asyncio
import logging
import pathlib
import random
import tempfile
from concurrent.futures.process import ProcessPoolExecutor
from typing import IO, List, Tuple, Optional
from chia.consensus.block_record import BlockRecord
from chia.consensus.constants import ConsensusConstants
from chia.full_node.weight_proof import (
_validate_sub_epoch_summaries,
vars_to_bytes,
validate_sub_epoch_sampling,
_validate_sub_epoch_segments,
_validate_recent_blocks_and_get_records,
chunks,
_validate_vdf_batch,
)
from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from chia.types.weight_proof import (
WeightProof,
)
from chia.util.ints import uint32
log = logging.getLogger(__name__)
def _create_shutdown_file() -> IO:
return tempfile.NamedTemporaryFile(prefix="chia_wallet_weight_proof_handler_executor_shutdown_trigger")
class WalletWeightProofHandler:
LAMBDA_L = 100
C = 0.5
MAX_SAMPLES = 20
def __init__(
self,
constants: ConsensusConstants,
):
self._constants = constants
self._num_processes = 4
self._executor_shutdown_tempfile: IO = _create_shutdown_file()
self._executor: ProcessPoolExecutor = ProcessPoolExecutor(self._num_processes)
self._weight_proof_tasks: List[asyncio.Task] = []
def cancel_weight_proof_tasks(self):
for task in self._weight_proof_tasks:
if not task.done():
task.cancel()
self._weight_proof_tasks = []
self._executor_shutdown_tempfile.close()
self._executor.shutdown(wait=True)
async def validate_weight_proof(
self, weight_proof: WeightProof, skip_segment_validation=False
) -> Tuple[bool, uint32, List[SubEpochSummary], List[BlockRecord]]:
task: asyncio.Task = asyncio.create_task(
self._validate_weight_proof_inner(weight_proof, skip_segment_validation)
)
self._weight_proof_tasks.append(task)
valid, fork_point, summaries, block_records = await task
self._weight_proof_tasks.remove(task)
return valid, fork_point, summaries, block_records
async def _validate_weight_proof_inner(
self, weight_proof: WeightProof, skip_segment_validation: bool
) -> Tuple[bool, uint32, List[SubEpochSummary], List[BlockRecord]]:
assert len(weight_proof.sub_epochs) > 0
if len(weight_proof.sub_epochs) == 0:
return False, uint32(0), [], []
peak_height = weight_proof.recent_chain_data[-1].reward_chain_block.height
log.info(f"validate weight proof peak height {peak_height}")
summaries, sub_epoch_weight_list = _validate_sub_epoch_summaries(self._constants, weight_proof)
if summaries is None:
log.error("weight proof failed sub epoch data validation")
return False, uint32(0), [], []
seed = summaries[-2].get_hash()
rng = random.Random(seed)
if not validate_sub_epoch_sampling(rng, sub_epoch_weight_list, weight_proof):
log.error("failed weight proof sub epoch sample validation")
return False, uint32(0), [], []
constants, summary_bytes, wp_segment_bytes, wp_recent_chain_bytes = vars_to_bytes(
self._constants, summaries, weight_proof
)
vdf_tasks: List[asyncio.Future] = []
recent_blocks_validation_task: asyncio.Future = asyncio.get_running_loop().run_in_executor(
self._executor,
_validate_recent_blocks_and_get_records,
constants,
wp_recent_chain_bytes,
summary_bytes,
pathlib.Path(self._executor_shutdown_tempfile.name),
)
try:
if not skip_segment_validation:
segments_validated, vdfs_to_validate = _validate_sub_epoch_segments(
constants, rng, wp_segment_bytes, summary_bytes
)
if not segments_validated:
return False, uint32(0), [], []
vdf_chunks = chunks(vdfs_to_validate, self._num_processes)
for chunk in vdf_chunks:
byte_chunks = []
for vdf_proof, classgroup, vdf_info in chunk:
byte_chunks.append((bytes(vdf_proof), bytes(classgroup), bytes(vdf_info)))
vdf_task: asyncio.Future = asyncio.get_running_loop().run_in_executor(
self._executor,
_validate_vdf_batch,
constants,
byte_chunks,
pathlib.Path(self._executor_shutdown_tempfile.name),
)
vdf_tasks.append(vdf_task)
for vdf_task in vdf_tasks:
validated = await vdf_task
if not validated:
return False, uint32(0), [], []
valid_recent_blocks, records_bytes = await recent_blocks_validation_task
finally:
recent_blocks_validation_task.cancel()
for vdf_task in vdf_tasks:
vdf_task.cancel()
if not valid_recent_blocks:
log.error("failed validating weight proof recent blocks")
# Verify the data
return False, uint32(0), [], []
records = [BlockRecord.from_bytes(b) for b in records_bytes]
# TODO fix find fork point
return True, uint32(0), summaries, records
def get_fork_point(self, old_wp: Optional[WeightProof], new_wp: WeightProof) -> uint32:
"""
iterate through sub epoch summaries to find fork point. This method is conservative, it does not return the
actual fork point, it can return a height that is before the actual fork point.
"""
if old_wp is None:
return uint32(0)
old_ses = set()
for ses in old_wp.sub_epochs:
old_ses.add(ses.reward_chain_hash)
overflow = 0
count = 0
for idx, new_ses in enumerate(new_wp.sub_epochs):
if new_ses.reward_chain_hash in old_ses:
count += 1
overflow += new_ses.num_blocks_overflow
continue
else:
break
# Try to find an exact fork point
if new_wp.recent_chain_data[0].height >= old_wp.recent_chain_data[0].height:
left_wp = old_wp
right_wp = new_wp
else:
left_wp = new_wp
right_wp = old_wp
r_index = 0
l_index = 0
while r_index < len(right_wp.recent_chain_data) and l_index < len(left_wp.recent_chain_data):
if right_wp.recent_chain_data[r_index].header_hash == left_wp.recent_chain_data[l_index].header_hash:
r_index += 1
continue
# Keep incrementing left pointer until we find a match
l_index += 1
if r_index != 0:
# We found a matching block, this is the last matching block
return right_wp.recent_chain_data[r_index - 1].height
# Just return the matching sub epoch height
return uint32((self._constants.SUB_EPOCH_BLOCKS * count) - overflow)
|
py | 1a44ca1f50e33db7a6942284bb6f05e4f0fc69d2 | from logs import sonarlog
from twisted.internet import reactor
import conf_nodes
import configuration
import domain_provision
import metrichandler
import model
import msgpump
import scoreboard
import conf_controller as conf
# Setup logging
logger = sonarlog.getLogger('controller')
class Controller(object):
def __init__(self):
# New message pump with heartbeat to keep it spinning
def heartbeat(pump):
pump.callLater(10 * 60, heartbeat, pump)
self.pump = msgpump.Pump(heartbeat)
# Create scoreboard
self.scoreboard = scoreboard.Scoreboard(self.pump)
# Create new domain and node model
self.model = model.Model(self.pump, self.scoreboard)
# Create notification handler
self.handler = metrichandler.MetricHandler(self.model)
# Setup the re-allocation strategy
self.strategy_reallocation = self.__build_stragegy_reallocation()
# Setup initial placement strategy
self.strategy_initial_placement = self.__build_strategy_initial_placement()
# Setup the placement strategy
self.strategy_placement = self.__build_strategy_placement()
def __build_strategy_initial_placement(self):
if conf.STRATEGY_INITIAL_PLACEMENT == 'firstFit':
import initial_placement_firstfit
return initial_placement_firstfit.FirstFitPlacement()
if conf.STRATEGY_INITIAL_PLACEMENT == 'firstFitVector':
import initial_placement_ffvector
return initial_placement_ffvector.FFVPlacement()
if conf.STRATEGY_INITIAL_PLACEMENT == 'dotProduct':
import initial_placement_dotproduct
return initial_placement_dotproduct.DotProductPlacement()
if conf.STRATEGY_INITIAL_PLACEMENT == 'cosine':
import initial_placement_cosine
return initial_placement_cosine.CosinePlacement()
if conf.STRATEGY_INITIAL_PLACEMENT == 'ssapv':
import initial_placement_ssapv
return initial_placement_ssapv.SSAPvPlacement()
if conf.STRATEGY_INITIAL_PLACEMENT == 'cssapv':
import initial_placement_cssapv
return initial_placement_cssapv.CSSAPvPlacement()
if conf.STRATEGY_INITIAL_PLACEMENT == 'dsap':
if conf.STRATEGY_REALLOCATION != 'dsap':
print 'FATAL: STRATEGY REALLOCATION needs to be dsap'
import initial_placement_dsap
return initial_placement_dsap.DSAPPlacement(self.strategy_reallocation)
if conf.STRATEGY_INITIAL_PLACEMENT == 'file':
import initial_placement_file
return initial_placement_file.FilePlacement()
if conf.STRATEGY_INITIAL_PLACEMENT == 'round':
import initial_placement_rr
return initial_placement_rr.RRPlacement()
else:
print 'No initial placement strategy defined'
return None
def __build_strategy_placement(self):
if conf.STRATEGY_PLACEMENT == 'static':
import placement_static
return placement_static.Static(self.model)
if conf.STRATEGY_PLACEMENT == 'firstNode':
import placement
return placement.PlacementBase(self.model)
elif conf.STRATEGY_PLACEMENT == 'random':
import placement_random
return placement_random.RandomPlacement(self.model)
elif conf.STRATEGY_PLACEMENT == 'firstFit':
import placement_firstfit
return placement_firstfit.FirstFit(self.model)
elif conf.STRATEGY_PLACEMENT == 'firstFitDemand':
import placement_firstfit_demand
return placement_firstfit_demand.FirstFitDemand(self.model)
elif conf.STRATEGY_PLACEMENT == 'bestFit':
import placement_bestfit
return placement_bestfit.BestFit(self.model)
elif conf.STRATEGY_PLACEMENT == 'bestFitDemand':
import placement_bestfit_demand
return placement_bestfit_demand.BestFitDemand(self.model)
elif conf.STRATEGY_PLACEMENT == 'worstFit':
import placement_worstfit
return placement_worstfit.WorstFit(self.model)
elif conf.STRATEGY_PLACEMENT == 'worstFitDemand':
import placement_worstfit_demand
return placement_worstfit_demand.WorstFitDemand(self.model)
elif conf.STRATEGY_PLACEMENT == 'nextFit':
import placement_nextfit
return placement_nextfit.NextFit(self.model)
elif conf.STRATEGY_PLACEMENT == 'nextFitDemand':
import placement_nextfit_demand
return placement_nextfit_demand.NextFitDemand(self.model)
elif conf.STRATEGY_PLACEMENT == 'dotProduct':
import placement_dotproduct
return placement_dotproduct.DotProduct(self.model)
elif conf.STRATEGY_PLACEMENT == 'dotProductDemand':
import placement_dotproduct_demand
return placement_dotproduct_demand.DotProductDemand(self.model)
elif conf.STRATEGY_PLACEMENT == 'l2':
import placement_L2
return placement_L2.L2(self.model)
elif conf.STRATEGY_PLACEMENT == 'l2Demand':
import placement_L2_demand
return placement_L2_demand.L2Demand(self.model)
elif conf.STRATEGY_PLACEMENT == 'harmonic':
import placement_harmonic
return placement_harmonic.Harmonic(self.model)
elif conf.STRATEGY_PLACEMENT == 'round':
import placement_round
return placement_round.RoundRobin(self.model)
else:
print 'No placement strategy defined'
return
def __build_stragegy_reallocation(self):
# Create controller based on the strategy
if conf.STRATEGY_REALLOCATION == 'tcontrol':
import strategy_t
return strategy_t.TTestStrategy(self.scoreboard, self.pump, self.model)
if conf.STRATEGY_REALLOCATION == 'kmcontrol':
import strategy_km
return strategy_km.Strategy(self.scoreboard, self.pump, self.model)
elif conf.STRATEGY_REALLOCATION == 'dsap':
import strategy_dsap
return strategy_dsap.Strategy(self.scoreboard, self.pump, self.model)
elif conf.STRATEGY_REALLOCATION == 'dsapp':
import strategy_dsapp
return strategy_dsapp.Strategy(self.scoreboard, self.pump, self.model)
elif conf.STRATEGY_REALLOCATION == 'none':
import strategy_none
return strategy_none.Strategy(self.scoreboard, self.pump, self.model)
else:
print 'No controller defined'
return
def start(self):
# Build model - This has to be executed at start not in init
# The setup routine creates a Controller instance and then
# calculates and establishes the initial placement.
#
# The model initialize method queries the libvirt for the
# current domain placement. This placement is established after
# creating a Controller instance and its __imit__ method call.
#
# Hence, if the model was initialized in the init method it would
# acquire the infrastructure state before the calculated initial
# placement was established
if not self.__model_initialize():
print 'Exiting because of error in initial placement'
return
# Exit after intial placement
if conf.EXIT_AFTER_INITIAL_PLACEMENT:
return
# Production mode connects with sonar
if configuration.PRODUCTION:
print 'RUNNING IN PRODUCTION MODE'
if conf.is_start_reallocation():
# Connect with Sonar for metric readings
import connector
connector.connect_sonar(self.model, self.handler)
# Start reallocation strategy
self.strategy_reallocation.dump()
self.strategy_reallocation.start()
if conf.is_start_placement():
# Create domain provisioner (the concrete one in this case)
pv = domain_provision.ConcreteProvisioning(self.model, self.pump, self.strategy_placement)
# Start infrastructure service with a reference to the provisioner
import infrastructure_service
infrastructure_service.start(pv)
else:
print 'RUNNING IN SIMULATION MODE'
# Start load driver that simulations Sonar
import driver_load
driver_load = driver_load.Driver(self.scoreboard, self.pump,
self.model, self.handler,
not conf.is_start_placement())
driver_load.start()
if conf.is_start_reallocation():
print 'Starting reallocation strategy...'
# Start reallocation strategy
self.strategy_reallocation.dump()
self.strategy_reallocation.start()
if conf.is_start_placement():
print 'Starting placement strategy...'
# Create domain provisioner (the model one in this case)
pv = domain_provision.Provisioning(self.model, self.pump, self.strategy_placement)
# Start domain driver that simulates Rain domain provisioning
import driver_domains
driver_domains = driver_domains.Driver(self.pump, self.scoreboard, pv)
driver_domains.start()
# Update scoreboard
self.scoreboard.start()
def __model_initialize(self):
# Run configuration
if configuration.PRODUCTION:
# Load current infrastructure state into model
self.model.model_from_current_allocation()
else:
# Calculate initial placement
migrations, servers = self.strategy_initial_placement.execute()
# Update scoreboard
self.scoreboard.set_initial_placement_duration()
# Check for valid initial placement
if migrations is None and servers is None:
print 'Invalid initial placement'
self.scoreboard.add_active_info(conf_nodes.NODES, 0)
return False
else:
# Get execution duration
self.model.model_from_migrations(migrations)
# Dump model
self.model.dump()
# Update empty counts
self.model.log_active_server_info();
# Initialize all control variables
self.model.reset()
return True
def start():
###########################################################
### START CONTROLLER ######################################
###########################################################
# Create a new controller instance and start the controller
controller = Controller()
controller.start()
###########################################################
# Start reactor (is stopped in the message pump)
if not conf.EXIT_AFTER_INITIAL_PLACEMENT:
print 'Running reactor...'
reactor.run()
###########################################################
# Check if exited with an error
if controller.pump.exit_on_error:
raise controller.pump.exit_on_error
# After the simulation get the scoreboard results
print 'Reading scoreboard...'
controller.scoreboard.dump()
names, res = controller.scoreboard.get_result_line()
# Return results only
return names, res
if __name__ == '__main__':
start()
|
py | 1a44ca93ed193a70b8fdfd666130b26bf110a7ee | import urllib.request
import unittest
import time
import dewpoint.aws
class TestAWSAuthHandlerV4(unittest.TestCase):
def setUp(self):
self.auth_handler = dewpoint.aws.AWSAuthHandlerV4(
key='AKIDEXAMPLE',
secret='wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY',
region='us-east-1',
service='iam')
def test_canonical_request(self):
req = urllib.request.Request('https://iam.amazonaws.com/?Action=ListUsers&Version=2010-05-08', headers={
'Content-type': 'application/x-www-form-urlencoded; charset=utf-8',
'Host': 'iam.amazonaws.com',
'x-amz-date': '20150830T123600Z',
})
chash = dewpoint.aws.canonical_hash(req)
self.assertEqual(chash, 'f536975d06c0309214f805bb90ccff089219ecd68b2577efef23edd43b7e1a59')
def test_signing_key(self):
scope = '{date}/{region}/{service}/aws4_request'.format(
date='20150830',
region='us-east-1',
service='iam')
skey = self.auth_handler.signing_key(scope)
self.assertEqual(skey, bytes.fromhex('c4afb1cc5771d871763a393e44b703571b55cc28424d1a5e86da6ed3c154a4b9'))
def test_signature(self):
req = urllib.request.Request('https://iam.amazonaws.com/?Action=ListUsers&Version=2010-05-08', headers={
'Content-type': 'application/x-www-form-urlencoded; charset=utf-8',
'Host': 'iam.amazonaws.com',
'x-amz-date': '20150830T123600Z',
})
req.timestamp = time.localtime(1440963360.0)
self.auth_handler.sign(req)
self.assertEqual(req.headers['Authorization'],
'AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/iam/aws4_request, SignedHeaders=content-type;host;x-amz-date, Signature=5d672d79c15b13162d9279b0855cfba6789a8edb4c82c400e06b5924a6f2b5d7')
|
py | 1a44cad5ba211229e0d5c9e8239582152964c7bd | from flask import current_app, Blueprint, request
from assemblyline_ui.api.base import api_login, make_api_response
from assemblyline_ui.config import config
API_PREFIX = "/api/v4"
apiv4 = Blueprint("apiv4", __name__, url_prefix=API_PREFIX)
apiv4._doc = "Version 4 Api Documentation"
#####################################
# API DOCUMENTATION
# noinspection PyProtectedMember,PyBroadException
@apiv4.route("/")
@api_login(audit=False, required_priv=['R', 'W'],
require_type=["user", "signature_importer", "signature_manager", "admin"])
def get_api_documentation(**kwargs):
"""
Full API doc.
Loop through all registered API paths and display their documentation.
Returns a list of API definition.
Variables:
None
Arguments:
None
Data Block:
None
Result example:
[ # LIST of:
{'name': "Api Doc", # Name of the api
'path': "/api/path/<variable>/", # API path
'ui_only': false, # Is UI only API
'methods': ["GET", "POST"], # Allowed HTTP methods
'description': "API doc.", # API documentation
'id': "api_doc", # Unique ID for the API
'function': "apiv4.api_doc", # Function called in the code
'protected': False, # Does the API require login?
'require_type': ['user'], # Type of users allowed to use API
'complete' : True}, # Is the API stable?
...]
"""
user_types = kwargs['user']['type']
api_blueprints = {}
api_list = []
for rule in current_app.url_map.iter_rules():
if rule.rule.startswith(request.path):
methods = []
for item in rule.methods:
if item != "OPTIONS" and item != "HEAD":
methods.append(item)
func = current_app.view_functions[rule.endpoint]
require_type = func.__dict__.get('require_type', ['user'])
allow_readonly = func.__dict__.get('allow_readonly', True)
if config.ui.read_only and not allow_readonly:
continue
for u_type in user_types:
if u_type in require_type:
doc_string = func.__doc__
func_title = " ".join([x.capitalize()
for x in rule.endpoint[rule.endpoint.rindex(".") + 1:].split("_")])
blueprint = rule.endpoint[:rule.endpoint.rindex(".")]
if blueprint == "apiv4":
blueprint = "documentation"
if blueprint not in api_blueprints:
try:
doc = current_app.blueprints[rule.endpoint[:rule.endpoint.rindex(".")]]._doc
except Exception:
doc = ""
api_blueprints[blueprint] = doc
try:
description = "\n".join([x[4:] for x in doc_string.splitlines()])
except Exception:
description = "[INCOMPLETE]\n\nTHIS API HAS NOT BEEN DOCUMENTED YET!"
api_id = rule.endpoint.replace("apiv4.", "").replace(".", "_")
api_list.append({
"protected": func.__dict__.get('protected', False),
"require_type": require_type,
"name": func_title,
"id": api_id,
"function": f"api.v4.{rule.endpoint}",
"path": rule.rule, "ui_only": rule.rule.startswith("%sui/" % request.path),
"methods": methods, "description": description,
"complete": "[INCOMPLETE]" not in description,
"required_priv": func.__dict__.get('required_priv', "")
})
break
return make_api_response({"apis": api_list, "blueprints": api_blueprints})
|
py | 1a44cb1902ed7c3f7e982f1f2847f5190bdbdd90 | mappp = [list(map(int, input().split())) for _ in range(9)]
pos = []
for i in range(9):
for j in range(9):
if mappp[i][j] == 0:
pos.append([i, j])
enddd = False
def back_dfs(idx):
global enddd
if enddd:
return
if idx == len(pos):
for i in range(9):
for j in range(9):
print(mappp[i][j], end=" ")
print()
enddd = True
return
else:
x = pos[idx][0]
y = pos[idx][1]
arr = [i for i in range(1, 10)]
for a in range(9):
if mappp[x][a] in arr:
arr.remove(mappp[x][a])
if mappp[a][y] in arr:
arr.remove(mappp[a][y])
start_i = (x // 3) * 3
start_j = (y // 3) * 3
for a in range(start_i, start_i + 3):
for b in range(start_j, start_j + 3):
if mappp[a][b] in arr:
arr.remove(mappp[a][b])
for a in arr:
mappp[x][y] = a
back_dfs(idx + 1)
mappp[x][y] = 0
back_dfs(0) |
py | 1a44cb54fb86070977efd6fd9dd2138c444974f2 | # -*- coding: utf-8 -*-
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""Kite document requests handlers and senders."""
from collections import defaultdict
import logging
import hashlib
import os
import os.path as osp
from qtpy.QtCore import QMutexLocker
from spyder.plugins.completion.kite.decorators import send_request, handles
from spyder.plugins.completion.manager.api import (
LSPRequestTypes, CompletionItemKind)
# Kite can return e.g. "int | str", so we make the default hint VALUE.
KITE_DOCUMENT_TYPES = defaultdict(lambda: CompletionItemKind.VALUE, {
'function': CompletionItemKind.FUNCTION,
'type': CompletionItemKind.CLASS,
'module': CompletionItemKind.MODULE,
'descriptor': CompletionItemKind.PROPERTY,
'union': CompletionItemKind.VALUE,
'unknown': CompletionItemKind.TEXT,
'keyword': CompletionItemKind.KEYWORD,
'call': CompletionItemKind.FUNCTION,
})
KITE_COMPLETION = 'Kite'
logger = logging.getLogger(__name__)
def convert_text_snippet(snippet_info):
text = snippet_info['text']
text_builder = []
prev_pos = 0
next_pos = None
num_placeholders = len(snippet_info['placeholders'])
total_placeholders = num_placeholders + 1
for i, placeholder in enumerate(snippet_info['placeholders']):
placeholder_begin = placeholder['begin']
placeholder_end = placeholder['end']
next_pos = placeholder_begin
standard_text = text[prev_pos:next_pos]
snippet_text = text[next_pos:placeholder_end]
prev_pos = placeholder['end']
text_builder.append(standard_text)
placeholder_number = (i + 1) % total_placeholders
if snippet_text:
snippet = '${%d:%s}' % (placeholder_number, snippet_text)
else:
snippet = '$%d' % (placeholder_number)
text_builder.append(snippet)
text_builder.append(text[prev_pos:])
if num_placeholders > 0:
text_builder.append('$0')
return ''.join(text_builder)
class DocumentProvider:
@send_request(method=LSPRequestTypes.DOCUMENT_DID_OPEN)
def document_did_open(self, params):
request = {
'source': 'spyder',
'filename': osp.realpath(params['file']),
'text': params['text'],
'action': 'focus',
'selections': [{
'start': params['selection_start'],
'end': params['selection_end'],
'encoding': 'utf-16',
}],
}
with QMutexLocker(self.mutex):
self.get_status(params['file'])
self.opened_files[params['file']] = params['text']
return request
@send_request(method=LSPRequestTypes.DOCUMENT_DID_CHANGE)
def document_did_change(self, params):
request = {
'source': 'spyder',
'filename': osp.realpath(params['file']),
'text': params['text'],
'action': 'edit',
'selections': [{
'start': params['selection_start'],
'end': params['selection_end'],
'encoding': 'utf-16',
}],
}
with QMutexLocker(self.mutex):
self.opened_files[params['file']] = params['text']
return request
@send_request(method=LSPRequestTypes.DOCUMENT_CURSOR_EVENT)
def document_cursor_event(self, params):
request = {
'source': 'spyder',
'filename': osp.realpath(params['file']),
'text': params['text'],
'action': 'edit',
'selections': [{
'start': params['selection_start'],
'end': params['selection_end'],
'encoding': 'utf-16',
}],
}
return request
@send_request(method=LSPRequestTypes.DOCUMENT_COMPLETION)
def request_document_completions(self, params):
text = self.opened_files[params['file']]
request = {
'filename': osp.realpath(params['file']),
'editor': 'spyder',
'no_snippets': not self.enable_code_snippets,
'text': text,
'position': {
'begin': params['selection_start'],
'end': params['selection_end'],
},
'offset_encoding': 'utf-16',
}
return request
@handles(LSPRequestTypes.DOCUMENT_COMPLETION)
def convert_completion_request(self, response):
# The response schema is tested via mocking in
# spyder/plugins/editor/widgets/tests/test_introspection.py
logger.debug(response)
if response is None:
return {'params': []}
spyder_completions = []
completions = response['completions']
if completions is not None:
for i, completion in enumerate(completions):
entry = {
'kind': KITE_DOCUMENT_TYPES.get(
completion['hint'], CompletionItemKind.TEXT),
'label': completion['display'],
'textEdit': {
'newText': convert_text_snippet(completion['snippet']),
'range': {
'start': completion['replace']['begin'],
'end': completion['replace']['end'],
},
},
'filterText': '',
# Use the returned ordering
'sortText': (i, 0),
'documentation': completion['documentation']['text'],
'provider': KITE_COMPLETION,
}
spyder_completions.append(entry)
if 'children' in completion:
for j, child in enumerate(completion['children']):
child_entry = {
'kind': KITE_DOCUMENT_TYPES.get(
child['hint'], CompletionItemKind.TEXT),
'label': ' '*2 + child['display'],
'textEdit': {
'newText': convert_text_snippet(
child['snippet']),
'range': {
'start': child['replace']['begin'],
'end': child['replace']['end'],
},
},
'insertText': convert_text_snippet(
child['snippet']),
'filterText': '',
# Use the returned ordering
'sortText': (i, j+1),
'documentation': child['documentation']['text'],
'provider': KITE_COMPLETION,
}
spyder_completions.append(child_entry)
return {'params': spyder_completions}
@send_request(method=LSPRequestTypes.DOCUMENT_HOVER)
def request_hover(self, params):
text = self.opened_files.get(params['file'], "")
md5 = hashlib.md5(text.encode('utf-8')).hexdigest()
path = params['file']
path = path.replace(osp.sep, ':')
logger.debug(path)
if os.name == 'nt':
path = path.replace('::', ':')
path = ':windows:' + path
request = {
'filename': path,
'hash': md5,
'cursor_runes': params['offset'],
'offset_encoding': 'utf-16',
}
return None, request
@handles(LSPRequestTypes.DOCUMENT_HOVER)
def process_hover(self, response):
# logger.debug(response)
text = None
logger.debug(response)
if response is not None:
report = response['report']
text = report['description_text']
if len(text) == 0:
text = None
else:
text = None
return {'params': text}
@send_request(method=LSPRequestTypes.DOCUMENT_SIGNATURE)
def request_signature(self, request):
text = self.opened_files.get(request['file'], "")
response = {
'editor': 'spyder',
'filename': request['file'],
'text': text,
'cursor_runes': request['offset'],
'offset_encoding': 'utf-16',
}
return response
@handles(LSPRequestTypes.DOCUMENT_SIGNATURE)
def process_signature(self, response):
params = None
if response is not None:
calls = response['calls']
if len(calls) > 0:
call = calls[0]
callee = call['callee']
documentation = callee['synopsis']
call_label = callee['repr']
signatures = call['signatures']
arg_idx = call['arg_index']
parameters = []
names = []
logger.debug(signatures)
if len(signatures) > 0:
signature = signatures[0]
logger.debug(signature)
if signature['args'] is not None:
for arg in signature['args']:
parameters.append({
'label': arg['name'],
'documentation': ''
})
names.append(arg['name'])
func_args = ', '.join(names)
call_label = '{0}({1})'.format(call_label, func_args)
base_signature = {
'label': call_label,
'documentation': documentation,
'parameters': parameters
}
# doc_signatures.append(base_signature)
params = {
'signatures': base_signature,
'activeSignature': 0,
'activeParameter': arg_idx,
'provider': KITE_COMPLETION
}
return {'params': params}
|
py | 1a44cd2f9a4306442112756cb4d29e9e97aa3011 | #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Copyright (c) 2017 The Raven Core developers
# Copyright (c) 2018 The Rito Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Testing asset mempool use cases
"""
from test_framework.test_framework import RitoTestFramework
from test_framework.util import *
import string
class AssetMempoolTest(RitoTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def activate_assets(self):
self.log.info("Generating RITO and activating assets...")
n0, n1 = self.nodes[0], self.nodes[1]
n0.generate(1)
self.sync_all()
n0.generate(216)
self.sync_all()
n1.generate(216)
self.sync_all()
assert_equal("active", n0.getblockchaininfo()['bip9_softforks']['assets']['status'])
def issue_mempool_test(self):
self.log.info("Testing issue mempool...")
n0, n1 = self.nodes[0], self.nodes[1]
disconnect_all_nodes(self.nodes)
asset_name = "MEMPOOL"
# Issue asset on chain 1 and mine it into the blocks
n0.issue(asset_name)
n0.generate(15)
# Issue asset on chain 2 but keep it in the mempool. No mining
txid = n1.issue(asset_name)
print(txid)
connect_all_nodes_bi(self.nodes)
assert_equal(n0.getblockcount(), n1.getblockcount())
assert_equal(n0.getbestblockhash(), n1.getbestblockhash())
def run_test(self):
self.activate_assets()
self.issue_mempool_test()
if __name__ == '__main__':
AssetMempoolTest().main()
|
py | 1a44cd54e482e05878ca595535a27ba38bbb7f5a | """
Copyright 2019 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import logging
from collections import namedtuple
from enum import Enum, IntEnum
from functools import wraps
from typing import Optional, Union, List
import pandas as pd
from gs_quant.api.gs.data import QueryType
def _create_enum(name, members):
return Enum(name, {n.upper(): n.lower() for n in members}, module=__name__)
def _create_int_enum(name, mappings):
return IntEnum(name, {k.upper(): v for k, v in mappings.items()})
Interpolate = _create_enum('Interpolate', ['intersect', 'step', 'nan', 'zero', 'time'])
Returns = _create_enum('Returns', ['simple', 'logarithmic'])
SeriesType = _create_enum('SeriesType', ['prices', 'returns'])
Window = namedtuple('Window', ['w', 'r'])
def _check_window(x: pd.Series, window: Window):
if len(x) > 0:
if window.w <= 0:
raise ValueError('Window value must be greater than zero.')
if window.r > len(x) or window.r < 0:
raise ValueError('Ramp value must be less than the length of the series and greater than zero.')
def apply_ramp(x: pd.Series, window: Window) -> pd.Series:
_check_window(x, window)
return x[window.r:] if window.w <= len(x) else pd.Series([])
def normalize_window(x: pd.Series, window: Union[Window, int, None], default_window: int = None) -> Window:
if default_window is None:
default_window = x.size
if isinstance(window, int):
window = Window(w=window, r=window)
else:
if window is None:
window = Window(w=default_window, r=0)
else:
if window.w and window.r is None:
window_size = window.w
window = Window(w=window_size, r=window_size)
elif window.w is None and window.r >= 0:
window = Window(w=default_window, r=window.r)
_check_window(x, window)
return window
def plot_function(fn):
# Indicates that fn should be exported to plottool as a pure function.
fn.plot_function = True
return fn
def plot_measure(asset_class: Optional[tuple] = None, asset_type: Optional[tuple] = None,
dependencies: Optional[List[QueryType]] = []):
# Indicates that fn should be exported to plottool as a member function / pseudo-measure.
# Set category to None for no restrictions, else provide a tuple of allowed values.
def decorator(fn):
assert asset_class is None or isinstance(asset_class, tuple)
assert asset_type is None or isinstance(asset_type, tuple)
fn.plot_measure = True
fn.asset_class = asset_class
fn.asset_type = asset_type
fn.dependencies = dependencies
return fn
return decorator
def log_return(logger: logging.Logger, message):
def outer(fn):
@wraps(fn)
def inner(*args, **kwargs):
response = fn(*args, **kwargs)
logger.debug('%s: %s', message, response)
return response
return inner
return outer
|
py | 1a44cd7b40d13575475cad18c184b4065a9e6789 | from decimal import Decimal
from unittest.mock import patch
from django.core.management import call_command
from django.urls import reverse
from prices import Money
from saleor.discount import DiscountValueType
from saleor.product.models import Product, ProductVariant
from saleor.product.tasks import (
update_all_products_minimal_variant_prices_task,
update_products_minimal_variant_prices_of_catalogues,
update_products_minimal_variant_prices_task,
)
from saleor.product.utils.variant_prices import update_product_minimal_variant_price
def test_update_product_minimal_variant_price(product):
variant = product.variants.first()
variant.price_override = Money("4.99", "USD")
variant.save()
assert product.minimal_variant_price == product.price == Money("10", "USD")
update_product_minimal_variant_price(product)
assert product.minimal_variant_price == variant.price_override
def test_update_products_minimal_variant_prices_of_catalogues_for_product(product):
variant = ProductVariant(
product=product,
sku="SKU_MINIMAL_VARIANT_PRICE",
price_override=Money("0.99", "USD"),
)
variant.save()
product.refresh_from_db()
assert product.minimal_variant_price == product.price == Money("10", "USD")
update_products_minimal_variant_prices_of_catalogues(product_ids=[product.pk])
product.refresh_from_db()
assert product.minimal_variant_price == variant.price_override
def test_update_products_minimal_variant_prices_of_catalogues_for_category(
category, product
):
variant = ProductVariant(
product=product,
sku="SKU_MINIMAL_VARIANT_PRICE",
price_override=Money("0.89", "USD"),
)
variant.save()
product.refresh_from_db()
assert product.minimal_variant_price == product.price == Money("10", "USD")
update_products_minimal_variant_prices_of_catalogues(
category_ids=[product.category_id]
)
product.refresh_from_db()
assert product.minimal_variant_price == variant.price_override
def test_update_products_minimal_variant_prices_of_catalogues_for_collection(
collection, product
):
variant = ProductVariant(
product=product,
sku="SKU_MINIMAL_VARIANT_PRICE",
price_override=Money("0.79", "USD"),
)
variant.save()
product.refresh_from_db()
collection.products.add(product)
assert product.minimal_variant_price == product.price == Money("10", "USD")
update_products_minimal_variant_prices_of_catalogues(collection_ids=[collection.pk])
product.refresh_from_db()
assert product.minimal_variant_price == variant.price_override
def test_update_all_products_minimal_variant_prices_task(product_list):
price_override = Money("0.01", "USD")
for product in product_list:
assert product.minimal_variant_price > price_override
variant = product.variants.first()
variant.price_override = price_override
variant.save()
# Check that "variant.save()" doesn't update the "minimal_variant_price"
assert product.minimal_variant_price > price_override
update_all_products_minimal_variant_prices_task.apply()
for product in product_list:
product.refresh_from_db()
assert product.minimal_variant_price == price_override
def test_update_products_minimal_variant_prices_task(product_list):
price_override = Money("0.01", "USD")
for product in product_list:
assert product.minimal_variant_price > price_override
variant = product.variants.first()
variant.price_override = price_override
variant.save()
# Check that "variant.save()" doesn't update the "minimal_variant_price"
assert product.minimal_variant_price > price_override
update_products_minimal_variant_prices_task.apply(
kwargs={"product_ids": [product.pk for product in product_list]}
)
for product in product_list:
product.refresh_from_db()
assert product.minimal_variant_price == price_override
def test_product_objects_create_sets_default_minimal_variant_price(
product_type, category
):
product1 = Product.objects.create(
name="Test product 1",
price=Money("10.00", "USD"),
category=category,
product_type=product_type,
is_published=True,
)
assert product1.minimal_variant_price
assert product1.price == product1.minimal_variant_price == Money("10", "USD")
product2 = Product.objects.create(
name="Test product 2",
price=Money("10.00", "USD"),
minimal_variant_price=Money("20.00", "USD"),
category=category,
product_type=product_type,
is_published=True,
)
assert product2.minimal_variant_price
assert product2.price != product2.minimal_variant_price
assert product2.minimal_variant_price == Money("20", "USD")
def test_product_objects_bulk_create_sets_default_minimal_variant_price(
product_type, category
):
[product1, product2] = Product.objects.bulk_create(
[
Product(
name="Test product 1",
price=Money("10.00", "USD"),
category=category,
product_type=product_type,
is_published=True,
),
Product(
name="Test product 2",
price=Money("10.00", "USD"),
minimal_variant_price=Money("20.00", "USD"),
category=category,
product_type=product_type,
is_published=True,
),
]
)
assert product1.minimal_variant_price
assert product1.price == product1.minimal_variant_price == Money("10", "USD")
assert product2.minimal_variant_price
assert product2.price != product2.minimal_variant_price
assert product2.minimal_variant_price == Money("20", "USD")
def test_product_variant_objects_create_updates_minimal_variant_price(product):
assert product.minimal_variant_price == Money("10.00", "USD")
ProductVariant.objects.create(
product=product, sku="1", price_override=Money("1.00", "USD"), quantity=1
)
product.refresh_from_db()
assert product.minimal_variant_price == Money("1.00", "USD")
def test_product_variant_objects_bulk_create_updates_minimal_variant_price(product):
assert product.minimal_variant_price == Money("10.00", "USD")
ProductVariant.objects.bulk_create(
[
ProductVariant(
product=product,
sku="1",
price_override=Money("1.00", "USD"),
quantity=1,
),
ProductVariant(
product=product,
sku="2",
price_override=Money("5.00", "USD"),
quantity=1,
),
]
)
product.refresh_from_db()
assert product.minimal_variant_price == Money("1.00", "USD")
def test_dashboard_product_create_view_sets_minimal_variant_price(
admin_client, product_type, category
):
url = reverse("dashboard:product-add", kwargs={"type_pk": product_type.pk})
data = {
"name": "Product name",
"description": "Description.",
"price_0": "9.99",
"price_1": "USD",
"category": category.pk,
}
response = admin_client.post(url, data)
assert response.status_code == 302
assert Product.objects.count() == 1
product = Product.objects.get()
assert product.minimal_variant_price == product.price == Money("9.99", "USD")
def test_dashboard_product_variant_create_view_updates_minimal_variant_price(
admin_client, product
):
url = reverse("dashboard:variant-add", kwargs={"product_pk": product.pk})
data = {
"sku": "ACME/1/2/3",
"price_override_0": "4.99",
"price_override_1": product.currency,
"quantity": 1,
}
response = admin_client.post(url, data)
assert response.status_code == 302
product.refresh_from_db()
assert product.variants.count() == 2
assert product.minimal_variant_price != product.price
assert product.minimal_variant_price == Money("4.99", "USD")
def test_dashboard_product_variant_delete_view_updates_minimal_variant_price(
admin_client, product
):
# Set "price_override" on the variant to lower the "minimal_variant_price"
assert product.minimal_variant_price == product.price == Money("10", "USD")
variant = product.variants.get()
variant.price_override = Money("4.99", "USD")
variant.save()
update_product_minimal_variant_price(product)
product.refresh_from_db()
assert product.minimal_variant_price == variant.price_override
url = reverse(
"dashboard:variant-delete",
kwargs={"product_pk": product.pk, "variant_pk": variant.pk},
)
response = admin_client.post(url)
assert response.status_code == 302
product.refresh_from_db()
assert product.minimal_variant_price == product.price
def test_dashboard_sale_of_product_create_view_updates_minimal_variant_price(
admin_client, product
):
# Store the old "minimal_variant_price"
old_minimal_variant_price = product.minimal_variant_price
# Create the "Sale" object of the given product
url = reverse("dashboard:sale-add")
data = {
"name": "Half price products",
"type": DiscountValueType.PERCENTAGE,
"value": "50",
"start_date": "2019-07-11 00:00:01",
"products": [product.pk],
}
response = admin_client.post(url, data)
assert response.status_code == 302
product.refresh_from_db()
assert product.minimal_variant_price == Decimal("0.5") * old_minimal_variant_price
def test_dashboard_sale_of_category_create_view_updates_minimal_variant_price(
admin_client, product
):
category = product.category
# Store the old "minimal_variant_price"
old_minimal_variant_price = product.minimal_variant_price
# Create the "Sale" object of the given category
url = reverse("dashboard:sale-add")
data = {
"name": "Half price category",
"type": DiscountValueType.PERCENTAGE,
"value": "50",
"start_date": "2019-07-11 00:00:01",
"categories": [category.pk],
}
response = admin_client.post(url, data)
assert response.status_code == 302
product.refresh_from_db()
assert product.minimal_variant_price == Decimal("0.5") * old_minimal_variant_price
def test_dashboard_sale_of_collection_create_view_updates_minimal_variant_price(
admin_client, product, collection
):
collection.products.add(product)
# Store the old "minimal_variant_price"
old_minimal_variant_price = product.minimal_variant_price
# Create the "Sale" object of the given collection
url = reverse("dashboard:sale-add")
data = {
"name": "Half price collection",
"type": DiscountValueType.PERCENTAGE,
"value": "50",
"start_date": "2019-07-11 00:00:01",
"collections": [collection.pk],
}
response = admin_client.post(url, data)
assert response.status_code == 302
product.refresh_from_db()
assert product.minimal_variant_price == Decimal("0.5") * old_minimal_variant_price
@patch(
"saleor.product.management.commands"
".update_all_products_minimal_variant_prices"
".update_product_minimal_variant_price"
)
def test_management_commmand_update_all_products_minimal_variant_price(
mock_update_product_minimal_variant_price, product_list
):
call_command("update_all_products_minimal_variant_prices")
call_args_list = mock_update_product_minimal_variant_price.call_args_list
for (args, kwargs), product in zip(call_args_list, product_list):
assert args[0] == product
|
py | 1a44cdf305dfaabff2e5b8c14546509ae87c96a3 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-17 18:52
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ConnectModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=25, verbose_name='Phone')),
('email', models.EmailField(max_length=25, verbose_name='Email')),
('phone', models.CharField(max_length=50, verbose_name='Address')),
('text', models.TextField(max_length=1000, verbose_name='In use')),
('time', models.DateTimeField(auto_now_add=True, verbose_name='Application time')),
],
options={
'verbose_name': 'Звернення',
'verbose_name_plural': 'Звернення',
},
),
migrations.AlterField(
model_name='mycontactmodel',
name='phone',
field=models.CharField(blank=True, default=None, max_length=20, null=True, verbose_name='Phone'),
),
]
|
py | 1a44ce714eab543edde1ad1dde73edbdfd067375 | import pytest
from lkmltools.google_auth_helper import GoogleAuthHelper
import os
import json
@pytest.fixture(scope="module")
def get_raw_json():
raw_json = {
"type": "service_account",
"project_id": "someproject",
"private_key_id": "xxx",
"private_key": "-----BEGIN PRIVATE KEY-----\nxxx-----END PRIVATE KEY-----\n",
"client_email": "[email protected]",
"client_id": "1234567890",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/someuser%40appspot.gserviceaccount.com",
}
return raw_json
@pytest.fixture(scope="module")
def get_encoded_json():
# this is the encoded version of the raw_json above, so doesn't contain any proper secrets.
# The unit tests below confirm that decoding this byte string below matches the JSON above
return b"eyd0eXBlJzogJ3NlcnZpY2VfYWNjb3VudCcsICdwcm9qZWN0X2lkJzogJ3NvbWVwcm9qZWN0JywgJ3ByaXZhdGVfa2V5X2lkJzogJ3h4eCcsICdwcml2YXRlX2tleSc6ICctLS0tLUJFR0lOIFBSSVZBVEUgS0VZLS0tLS1cbnh4eC0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS1cbicsICdjbGllbnRfZW1haWwnOiAnc29tZXVzZXJAYXBwc3BvdC5nc2VydmljZWFjY291bnQuY29tJywgJ2NsaWVudF9pZCc6ICcxMjM0NTY3ODkwJywgJ2F1dGhfdXJpJzogJ2h0dHBzOi8vYWNjb3VudHMuZ29vZ2xlLmNvbS9vL29hdXRoMi9hdXRoJywgJ3Rva2VuX3VyaSc6ICdodHRwczovL29hdXRoMi5nb29nbGVhcGlzLmNvbS90b2tlbicsICdhdXRoX3Byb3ZpZGVyX3g1MDlfY2VydF91cmwnOiAnaHR0cHM6Ly93d3cuZ29vZ2xlYXBpcy5jb20vb2F1dGgyL3YxL2NlcnRzJywgJ2NsaWVudF94NTA5X2NlcnRfdXJsJzogJ2h0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL3JvYm90L3YxL21ldGFkYXRhL3g1MDkvc29tZXVzZXIlNDBhcHBzcG90LmdzZXJ2aWNlYWNjb3VudC5jb20nfQ=="
def test_encode_service_account():
helper = GoogleAuthHelper()
encoded_json = helper.encode_service_account(get_raw_json())
assert encoded_json == get_encoded_json()
def test_decode_service_account():
helper = GoogleAuthHelper()
decoded_json = helper.decode_service_account(get_encoded_json())
assert decoded_json == get_raw_json()
def test_write_decoded_sa_json_to_file():
helper = GoogleAuthHelper()
filename = "tmp_test_decoded.json"
if os.path.exists(filename):
os.remove(filename)
helper.write_decoded_sa_json_to_file(get_encoded_json(), filename=filename)
assert os.path.exists(filename)
with open(filename, "r") as f:
data = json.load(f)
assert data == get_raw_json()
if os.path.exists(filename):
os.remove(filename)
|
py | 1a44cfcecf82251515046033c6ca8537473cb1dc | import pytest
import json
import os
from faker import Faker
from functools import partial
from utils import retry
@pytest.fixture(scope='module')
def module_config(request):
fake = Faker()
return {
'masters': [
{
'config': {
'container__config__salt_config__sls': [
'tests/sls/latest.sls',
'tests/sls/latest-again.sls',
'tests/sls/downloaded.sls',
'tests/sls/patches-downloaded.sls',
'tests/sls/pipes.sls',
'tests/sls/bsc1098394.sls',
]
},
'minions': [
{
'config': {
"container__config__name": 'minion_{0}_{1}_{2}'.format(fake.word(), fake.word(), os.environ.get('ST_JOB_ID', '')), # pylint: disable=no-member
}
},
{
'config': {
"container__config__name": 'minion_{0}_{1}_{2}'.format(fake.word(), fake.word(), os.environ.get('ST_JOB_ID', '')), # pylint: disable=no-member
}
}
]
}
]
}
@pytest.mark.skiptags('ubuntu')
def test_pkg_latest_version(setup):
config, initconfig = setup
master = config['masters'][0]
minion = master['minions'][0]
def test(master, minion):
try:
resp = master['fixture'].salt(minion['id'], 'state.apply latest')
assert resp
assert minion['id'] in resp
assert resp[minion['id']][
'pkg_|-latest-version_|-test-package_|-latest']['result'] is True
return True
except TypeError:
return False
assert retry(partial(test, master, minion))
@pytest.mark.skiptags('ubuntu')
def test_pkg_latest_version_already_installed(setup):
config, initconfig = setup
master = config['masters'][0]
minion = master['minions'][1]
def test(master, minion):
try:
resp = master['fixture'].salt(minion['id'], 'state.apply latest-again')
assert resp
assert minion['id'] in resp
assert resp[minion['id']][
'pkg_|-latest-version_|-test-package_|-latest']['result'] is True
return True
except TypeError:
return False
assert retry(partial(test, master, minion))
@pytest.mark.skiptags('ubuntu')
def test_pkg_installed_downloadonly(setup):
config, initconfig = setup
master = config['masters'][0]
minion = master['minions'][0]
list_pkgs_pre = master['fixture'].salt(minion['id'], 'pkg.list_pkgs')
resp = master['fixture'].salt(minion['id'], 'state.apply downloaded')
list_pkgs_post = master['fixture'].salt(minion['id'], 'pkg.list_pkgs')
assert resp[minion['id']][
'pkg_|-test-pkg-downloaded_|-test-package_|-installed']['result'] is True
assert list_pkgs_pre == list_pkgs_post
@pytest.mark.xfail
@pytest.mark.tags('sles', 'opensuse')
def test_patches_installed_downloadonly_sles(setup):
config, initconfig = setup
master = config['masters'][0]
minion = master['minions'][0]
patches = master['fixture'].salt(minion['id'],
'cmd.run "zypper --quiet patches | cut -d\'|\' -f2 | cut -d\' \' -f2"'
)[minion['id']].encode('utf-8').split(os.linesep)
patches = {"patches": filter(lambda x: "SUSE-SLE-SERVER" in x, patches)[:2]}
list_pkgs_pre = master['fixture'].salt(minion['id'], 'pkg.list_pkgs')
resp = master['fixture'].salt(minion['id'], 'state.apply patches-downloaded pillar=\'{0}\''.format(patches))
list_pkgs_post = master['fixture'].salt(minion['id'], 'pkg.list_pkgs')
assert resp[minion['id']]['pkg_|-test-patches-downloaded_|-test-patches-downloaded_|-installed']['result'] is True
assert list_pkgs_pre == list_pkgs_post
@pytest.mark.xfail
@pytest.mark.tags('rhel')
def test_patches_installed_downloadonly_rhel(setup):
config, initconfig = setup
master = config['masters'][0]
minion = master['minions'][0]
patches = master['fixture'].salt(minion['id'],
'cmd.run "yum info-sec | grep \'Update ID\' | cut -d\' \' -f6"'
)[minion['id']].encode('utf-8').split(os.linesep)
patches = {"patches": filter(lambda x: "RHBA" in x, patches)[:2]}
list_pkgs_pre = master['fixture'].salt(minion['id'], 'pkg.list_pkgs')
resp = master['fixture'].salt(minion['id'], 'state.apply patches-downloaded pillar=\'{0}\''.format(patches))
list_pkgs_post = master['fixture'].salt(minion['id'], 'pkg.list_pkgs')
assert resp[minion['id']]['pkg_|-test-patches-downloaded_|-test-patches-downloaded_|-installed']['result'] is True
assert list_pkgs_pre == list_pkgs_post
def test_pipes(setup, master):
res = master['container'].run('salt-call --local --output json --file-root=/etc/salt/sls state.apply pipes')
assert json.loads(str(res.decode()))["local"]["cmd_|-reboot_|-echo 'shutdown'_|-run"]['changes'] == {}
@pytest.mark.skiptags('ubuntu')
@pytest.mark.skip("skip it until the patch will be in the package")
def test_file_managed_bsc1098394(setup, master, minion):
master['container']['config']['client'].copy_to(
master,
'tests/data/1098394/master/rhn-org-trusted-ssl-cert-osimage-1.0-1.noarch.rpm',
'/etc/salt/sls/')
minion['container'].run('mkdir -p /tmp/bsc1098394/repo')
minion['container']['config']['client'].copy_to(
minion,
'tests/data/1098394/minion/rhn-org-trusted-ssl-cert-osimage-1.0-1.noarch.rpm',
'/tmp/bsc1098394/repo')
resp = master.salt(minion['id'], 'state.apply bsc1098394')
assert resp[minion['id']][
'file_|-/tmp/bsc1098394/repo/rhn-org-trusted-ssl-cert-osimage-1.0-1.noarch.rpm_|-/tmp/bsc1098394/repo/rhn-org-trusted-ssl-cert-osimage-1.0-1.noarch.rpm_|-managed'
]['result'] is True
|
py | 1a44cfe7f44f7374c64023c992f970665cc233c4 | """
27. Default manipulators
Each model gets an AddManipulator and ChangeManipulator by default.
"""
from django.db import models
class Musician(models.Model):
first_name = models.CharField(maxlength=30)
last_name = models.CharField(maxlength=30)
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
class Album(models.Model):
name = models.CharField(maxlength=100)
musician = models.ForeignKey(Musician)
release_date = models.DateField(blank=True, null=True)
def __str__(self):
return self.name
__test__ = {'API_TESTS':"""
>>> from django.utils.datastructures import MultiValueDict
# Create a Musician object via the default AddManipulator.
>>> man = Musician.AddManipulator()
>>> data = MultiValueDict({'first_name': ['Ella'], 'last_name': ['Fitzgerald']})
>>> man.get_validation_errors(data)
{}
>>> man.do_html2python(data)
>>> m1 = man.save(data)
# Verify it worked.
>>> Musician.objects.all()
[<Musician: Ella Fitzgerald>]
>>> [m1] == list(Musician.objects.all())
True
# Attempt to add a Musician without a first_name.
>>> man.get_validation_errors(MultiValueDict({'last_name': ['Blakey']}))
{'first_name': ['This field is required.']}
# Attempt to add a Musician without a first_name and last_name.
>>> man.get_validation_errors(MultiValueDict({}))
{'first_name': ['This field is required.'], 'last_name': ['This field is required.']}
# Attempt to create an Album without a name or musician.
>>> man = Album.AddManipulator()
>>> man.get_validation_errors(MultiValueDict({}))
{'musician': ['This field is required.'], 'name': ['This field is required.']}
# Attempt to create an Album with an invalid musician.
>>> man.get_validation_errors(MultiValueDict({'name': ['Sallies Fforth'], 'musician': ['foo']}))
{'musician': ["Select a valid choice; 'foo' is not in ['', '1']."]}
# Attempt to create an Album with an invalid release_date.
>>> man.get_validation_errors(MultiValueDict({'name': ['Sallies Fforth'], 'musician': ['1'], 'release_date': 'today'}))
{'release_date': ['Enter a valid date in YYYY-MM-DD format.']}
# Create an Album without a release_date (because it's optional).
>>> data = MultiValueDict({'name': ['Ella and Basie'], 'musician': ['1']})
>>> man.get_validation_errors(data)
{}
>>> man.do_html2python(data)
>>> a1 = man.save(data)
# Verify it worked.
>>> Album.objects.all()
[<Album: Ella and Basie>]
>>> Album.objects.get().musician
<Musician: Ella Fitzgerald>
# Create an Album with a release_date.
>>> data = MultiValueDict({'name': ['Ultimate Ella'], 'musician': ['1'], 'release_date': ['2005-02-13']})
>>> man.get_validation_errors(data)
{}
>>> man.do_html2python(data)
>>> a2 = man.save(data)
# Verify it worked.
>>> Album.objects.order_by('name')
[<Album: Ella and Basie>, <Album: Ultimate Ella>]
>>> a2 = Album.objects.get(pk=2)
>>> a2
<Album: Ultimate Ella>
>>> a2.release_date
datetime.date(2005, 2, 13)
"""}
|
py | 1a44d232af9bb0e211fa0d69b5c2085cf10f69ba | """ Cisco_IOS_XR_lpts_pa_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR lpts\-pa package operational data.
This module contains definitions
for the following management objects\:
lpts\-pa\: lpts pre\-ifib data
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class LptsPa(Entity):
"""
lpts pre\-ifib data
.. attribute:: entry_xr
lpts pa bindings
**type**\: :py:class:`EntryXr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lpts_pa_oper.LptsPa.EntryXr>`
.. attribute:: entries
lpts pa clients
**type**\: :py:class:`Entries <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lpts_pa_oper.LptsPa.Entries>`
"""
_prefix = 'lpts-pa-oper'
_revision = '2015-11-09'
def __init__(self):
super(LptsPa, self).__init__()
self._top_entity = None
self.yang_name = "lpts-pa"
self.yang_parent_name = "Cisco-IOS-XR-lpts-pa-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("entry-xr", ("entry_xr", LptsPa.EntryXr)), ("entries", ("entries", LptsPa.Entries))])
self._leafs = OrderedDict()
self.entry_xr = LptsPa.EntryXr()
self.entry_xr.parent = self
self._children_name_map["entry_xr"] = "entry-xr"
self.entries = LptsPa.Entries()
self.entries.parent = self
self._children_name_map["entries"] = "entries"
self._segment_path = lambda: "Cisco-IOS-XR-lpts-pa-oper:lpts-pa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(LptsPa, [], name, value)
class EntryXr(Entity):
"""
lpts pa bindings
.. attribute:: entry
Data for single PA Binding
**type**\: list of :py:class:`Entry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lpts_pa_oper.LptsPa.EntryXr.Entry>`
"""
_prefix = 'lpts-pa-oper'
_revision = '2015-11-09'
def __init__(self):
super(LptsPa.EntryXr, self).__init__()
self.yang_name = "entry-xr"
self.yang_parent_name = "lpts-pa"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("entry", ("entry", LptsPa.EntryXr.Entry))])
self._leafs = OrderedDict()
self.entry = YList(self)
self._segment_path = lambda: "entry-xr"
self._absolute_path = lambda: "Cisco-IOS-XR-lpts-pa-oper:lpts-pa/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(LptsPa.EntryXr, [], name, value)
class Entry(Entity):
"""
Data for single PA Binding
.. attribute:: entry (key)
Single Binding entry
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: ctime
Creation Time
**type**\: :py:class:`Ctime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lpts_pa_oper.LptsPa.EntryXr.Entry.Ctime>`
.. attribute:: utime
Update Time
**type**\: :py:class:`Utime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lpts_pa_oper.LptsPa.EntryXr.Entry.Utime>`
.. attribute:: location
Rack/slot/instance
**type**\: int
**range:** 0..4294967295
.. attribute:: client_id
Client ID
**type**\: int
**range:** 0..4294967295
.. attribute:: vid
VR/VRF ID
**type**\: int
**range:** 0..4294967295
.. attribute:: cookie
Cookie
**type**\: int
**range:** 0..4294967295
.. attribute:: l3protocol
Layer 3 protocol
**type**\: int
**range:** 0..4294967295
.. attribute:: l4protocol
Layer 4 protocol
**type**\: int
**range:** 0..4294967295
.. attribute:: smask
Filter operation
**type**\: int
**range:** 0..4294967295
.. attribute:: ifs
Ifhandle
**type**\: int
**range:** 0..4294967295
.. attribute:: ptype
Packet type
**type**\: int
**range:** 0..4294967295
.. attribute:: local_ip
Local address
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
.. attribute:: remote_ip
Remote address
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
.. attribute:: local_len
Local address length
**type**\: int
**range:** 0..255
.. attribute:: remote_len
Remote address length
**type**\: int
**range:** 0..255
.. attribute:: local_port
Local port
**type**\: int
**range:** 0..65535
.. attribute:: remote_port
Remote port
**type**\: int
**range:** 0..65535
.. attribute:: packet_misc
L5 info
**type**\: int
**range:** 0..4294967295
.. attribute:: scope
Scope
**type**\: int
**range:** 0..4294967295
.. attribute:: client_flags
Client flags
**type**\: int
**range:** 0..4294967295
.. attribute:: min_ttl
Minimum TTL
**type**\: int
**range:** 0..255
.. attribute:: lazy_bindq_delay
lazy binding queue delay
**type**\: int
**range:** 0..4294967295
.. attribute:: ptq_delay
pending transactions queue delay
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'lpts-pa-oper'
_revision = '2015-11-09'
def __init__(self):
super(LptsPa.EntryXr.Entry, self).__init__()
self.yang_name = "entry"
self.yang_parent_name = "entry-xr"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['entry']
self._child_classes = OrderedDict([("ctime", ("ctime", LptsPa.EntryXr.Entry.Ctime)), ("utime", ("utime", LptsPa.EntryXr.Entry.Utime))])
self._leafs = OrderedDict([
('entry', (YLeaf(YType.str, 'entry'), ['str'])),
('location', (YLeaf(YType.uint32, 'location'), ['int'])),
('client_id', (YLeaf(YType.uint32, 'client-id'), ['int'])),
('vid', (YLeaf(YType.uint32, 'vid'), ['int'])),
('cookie', (YLeaf(YType.uint32, 'cookie'), ['int'])),
('l3protocol', (YLeaf(YType.uint32, 'l3protocol'), ['int'])),
('l4protocol', (YLeaf(YType.uint32, 'l4protocol'), ['int'])),
('smask', (YLeaf(YType.uint32, 'smask'), ['int'])),
('ifs', (YLeaf(YType.uint32, 'ifs'), ['int'])),
('ptype', (YLeaf(YType.uint32, 'ptype'), ['int'])),
('local_ip', (YLeaf(YType.str, 'local-ip'), ['str'])),
('remote_ip', (YLeaf(YType.str, 'remote-ip'), ['str'])),
('local_len', (YLeaf(YType.uint8, 'local-len'), ['int'])),
('remote_len', (YLeaf(YType.uint8, 'remote-len'), ['int'])),
('local_port', (YLeaf(YType.uint16, 'local-port'), ['int'])),
('remote_port', (YLeaf(YType.uint16, 'remote-port'), ['int'])),
('packet_misc', (YLeaf(YType.uint32, 'packet-misc'), ['int'])),
('scope', (YLeaf(YType.uint32, 'scope'), ['int'])),
('client_flags', (YLeaf(YType.uint32, 'client-flags'), ['int'])),
('min_ttl', (YLeaf(YType.uint8, 'min-ttl'), ['int'])),
('lazy_bindq_delay', (YLeaf(YType.uint32, 'lazy-bindq-delay'), ['int'])),
('ptq_delay', (YLeaf(YType.uint32, 'ptq-delay'), ['int'])),
])
self.entry = None
self.location = None
self.client_id = None
self.vid = None
self.cookie = None
self.l3protocol = None
self.l4protocol = None
self.smask = None
self.ifs = None
self.ptype = None
self.local_ip = None
self.remote_ip = None
self.local_len = None
self.remote_len = None
self.local_port = None
self.remote_port = None
self.packet_misc = None
self.scope = None
self.client_flags = None
self.min_ttl = None
self.lazy_bindq_delay = None
self.ptq_delay = None
self.ctime = LptsPa.EntryXr.Entry.Ctime()
self.ctime.parent = self
self._children_name_map["ctime"] = "ctime"
self.utime = LptsPa.EntryXr.Entry.Utime()
self.utime.parent = self
self._children_name_map["utime"] = "utime"
self._segment_path = lambda: "entry" + "[entry='" + str(self.entry) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-lpts-pa-oper:lpts-pa/entry-xr/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(LptsPa.EntryXr.Entry, ['entry', u'location', u'client_id', u'vid', u'cookie', u'l3protocol', u'l4protocol', u'smask', u'ifs', u'ptype', u'local_ip', u'remote_ip', u'local_len', u'remote_len', u'local_port', u'remote_port', u'packet_misc', u'scope', u'client_flags', u'min_ttl', u'lazy_bindq_delay', u'ptq_delay'], name, value)
class Ctime(Entity):
"""
Creation Time
.. attribute:: tv_sec
Time Sec
**type**\: int
**range:** 0..4294967295
.. attribute:: tv_nsec
Time Nanosec
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'lpts-pa-oper'
_revision = '2015-11-09'
def __init__(self):
super(LptsPa.EntryXr.Entry.Ctime, self).__init__()
self.yang_name = "ctime"
self.yang_parent_name = "entry"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tv_sec', (YLeaf(YType.uint32, 'tv-sec'), ['int'])),
('tv_nsec', (YLeaf(YType.uint32, 'tv-nsec'), ['int'])),
])
self.tv_sec = None
self.tv_nsec = None
self._segment_path = lambda: "ctime"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(LptsPa.EntryXr.Entry.Ctime, [u'tv_sec', u'tv_nsec'], name, value)
class Utime(Entity):
"""
Update Time
.. attribute:: tv_sec
Time Sec
**type**\: int
**range:** 0..4294967295
.. attribute:: tv_nsec
Time Nanosec
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'lpts-pa-oper'
_revision = '2015-11-09'
def __init__(self):
super(LptsPa.EntryXr.Entry.Utime, self).__init__()
self.yang_name = "utime"
self.yang_parent_name = "entry"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tv_sec', (YLeaf(YType.uint32, 'tv-sec'), ['int'])),
('tv_nsec', (YLeaf(YType.uint32, 'tv-nsec'), ['int'])),
])
self.tv_sec = None
self.tv_nsec = None
self._segment_path = lambda: "utime"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(LptsPa.EntryXr.Entry.Utime, [u'tv_sec', u'tv_nsec'], name, value)
class Entries(Entity):
"""
lpts pa clients
.. attribute:: entry
Data for single PA Client
**type**\: list of :py:class:`Entry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lpts_pa_oper.LptsPa.Entries.Entry>`
"""
_prefix = 'lpts-pa-oper'
_revision = '2015-11-09'
def __init__(self):
super(LptsPa.Entries, self).__init__()
self.yang_name = "entries"
self.yang_parent_name = "lpts-pa"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("entry", ("entry", LptsPa.Entries.Entry))])
self._leafs = OrderedDict()
self.entry = YList(self)
self._segment_path = lambda: "entries"
self._absolute_path = lambda: "Cisco-IOS-XR-lpts-pa-oper:lpts-pa/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(LptsPa.Entries, [], name, value)
class Entry(Entity):
"""
Data for single PA Client
.. attribute:: entry (key)
Single Client entry
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: flags
Client flags
**type**\: int
**range:** 0..4294967295
.. attribute:: open_flags
Open flags
**type**\: int
**range:** 0..4294967295
.. attribute:: location
Rack/slot/instance
**type**\: int
**range:** 0..4294967295
.. attribute:: client_id
Client ID
**type**\: int
**range:** 0..4294967295
.. attribute:: times
Transaction statisitics
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
"""
_prefix = 'lpts-pa-oper'
_revision = '2015-11-09'
def __init__(self):
super(LptsPa.Entries.Entry, self).__init__()
self.yang_name = "entry"
self.yang_parent_name = "entries"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['entry']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('entry', (YLeaf(YType.str, 'entry'), ['str'])),
('flags', (YLeaf(YType.uint32, 'flags'), ['int'])),
('open_flags', (YLeaf(YType.uint32, 'open-flags'), ['int'])),
('location', (YLeaf(YType.uint32, 'location'), ['int'])),
('client_id', (YLeaf(YType.uint32, 'client-id'), ['int'])),
('times', (YLeaf(YType.str, 'times'), ['str'])),
])
self.entry = None
self.flags = None
self.open_flags = None
self.location = None
self.client_id = None
self.times = None
self._segment_path = lambda: "entry" + "[entry='" + str(self.entry) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-lpts-pa-oper:lpts-pa/entries/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(LptsPa.Entries.Entry, ['entry', u'flags', u'open_flags', u'location', u'client_id', u'times'], name, value)
def clone_ptr(self):
self._top_entity = LptsPa()
return self._top_entity
|
py | 1a44d3cbba5d4d2c24171f8b31f6f456d739cd6b | #!/usr/bin/env python
import sys, getopt
import glob, os
from streaming_eigenhashes import StreamingEigenhashes
help_message = 'usage example: python kmer_clusters.py -i /project/home/hashed_reads/ -o /project/home/cluster_vectors/ -p 16'
if __name__ == "__main__":
singleInstance = False
try:
opts, args = getopt.getopt(sys.argv[1:],'hi:o:p:s',["inputdir=","outputdir=","numproc"])
except:
print help_message
sys.exit(2)
for opt, arg in opts:
if opt in ('-h','--help'):
print help_message
sys.exit()
elif opt in ('-i','--inputdir'):
inputdir = arg
if inputdir[-1] != '/':
inputdir += '/'
elif opt in ('-o','--outputdir'):
outputdir = arg
if outputdir[-1] != '/':
outputdir += '/'
elif opt in ('-p','--numproc'):
num_proc = int(arg)
elif opt in ('-s','--single'):
singleInstance = True
### use -p option for multiprocessing
num_proc = -1
###
hashobject = StreamingEigenhashes(inputdir,outputdir,get_pool=num_proc)
Kmer_Hash_Count_Files = glob.glob(os.path.join(hashobject.input_path,'*.count.hash.conditioned'))
hashobject.path_dict = {}
for i in range(len(Kmer_Hash_Count_Files)):
hashobject.path_dict[i] = Kmer_Hash_Count_Files[i]
corpus = hashobject.kmer_corpus_from_disk()
# This is a hack. Should do a better job chosing num_dims
lsi = hashobject.train_kmer_lsi(corpus,num_dims=len(hashobject.path_dict)*4/5,single=singleInstance)
lsi.save(hashobject.output_path+'kmer_lsi.gensim')
|
py | 1a44d3ed1acd6cf8d7976a3d4d626f7b111c4167 | # Copyright 2007-2010 by Peter Cock. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
from __future__ import print_function
from Bio._py3k import basestring
import os
import warnings
try:
from StringIO import StringIO # Python 2
# Can't use cStringIO, quoting the documentation,
# "Unlike the StringIO module, this module is not able to accept
# Unicode strings that cannot be encoded as plain ASCII strings."
# Therefore can't use from Bio._py3k import StringIO
except ImportError:
from io import StringIO # Python 3
from io import BytesIO
from Bio import BiopythonWarning, BiopythonParserWarning
from Bio import SeqIO
from Bio import AlignIO
from Bio.SeqRecord import SeqRecord
from Bio.Seq import Seq, UnknownSeq
from Bio import Alphabet
from Bio.Align import MultipleSeqAlignment
# TODO - Convert this to using unittest, and check desired warnings
# are issued. Used to do that by capturing warnings to stdout and
# verifying via the print-and-compare check. However, there was some
# frustrating cross-platform inconsistency I couldn't resolve.
protein_alphas = [Alphabet.generic_protein]
dna_alphas = [Alphabet.generic_dna]
rna_alphas = [Alphabet.generic_rna]
nucleotide_alphas = [Alphabet.generic_nucleotide,
Alphabet.Gapped(Alphabet.generic_nucleotide)]
no_alpha_formats = ["fasta", "clustal", "phylip", "phylip-relaxed",
"phylip-sequential", "tab", "ig",
"stockholm", "emboss", "fastq", "fastq-solexa",
"fastq-illumina", "qual"]
possible_unknown_seq_formats = ["qual", "genbank", "gb", "embl", "imgt"]
#List of formats including alignment only file formats we can read AND write.
#The list is initially hard coded to preserve the original order of the unit
#test output, with any new formats added since appended to the end.
test_write_read_alignment_formats = ["fasta", "clustal", "phylip", "stockholm",
"phylip-relaxed"]
for format in sorted(SeqIO._FormatToWriter):
if format not in test_write_read_alignment_formats:
test_write_read_alignment_formats.append(format)
for format in sorted(AlignIO._FormatToWriter):
if format not in test_write_read_alignment_formats:
test_write_read_alignment_formats.append(format)
test_write_read_alignment_formats.remove("gb") # an alias for genbank
test_write_read_alignment_formats.remove("fastq-sanger") # an alias for fastq
# test_files is a list of tuples containing:
# - string: file format
# - boolean: alignment (requires all seqs be same length)
# - string: relative filename
# - integer: number of sequences
test_files = [
("sff", False, 'Roche/E3MFGYR02_random_10_reads.sff', 10),
#Following examples are also used in test_Clustalw.py
("clustal", True, 'Clustalw/cw02.aln', 2),
("clustal", True, 'Clustalw/opuntia.aln', 7),
("clustal", True, 'Clustalw/hedgehog.aln', 5),
("clustal", True, 'Clustalw/odd_consensus.aln', 2),
#Following nucleic examples are also used in test_SeqIO_FastaIO.py
("fasta", False, 'Fasta/lupine.nu', 1),
("fasta", False, 'Fasta/elderberry.nu', 1),
("fasta", False, 'Fasta/phlox.nu', 1),
("fasta", False, 'Fasta/centaurea.nu', 1),
("fasta", False, 'Fasta/wisteria.nu', 1),
("fasta", False, 'Fasta/sweetpea.nu', 1),
("fasta", False, 'Fasta/lavender.nu', 1),
#Following protein examples are also used in test_SeqIO_FastaIO.py
("fasta", False, 'Fasta/aster.pro', 1),
("fasta", False, 'Fasta/loveliesbleeding.pro', 1),
("fasta", False, 'Fasta/rose.pro', 1),
("fasta", False, 'Fasta/rosemary.pro', 1),
#Following examples are also used in test_BioSQL_SeqIO.py
("fasta", False, 'Fasta/f001', 1), # Protein
("fasta", False, 'Fasta/f002', 3), # DNA
#("fasta", False, 'Fasta/f003', 2), # Protein with comments
("fasta", False, 'Fasta/fa01', 2), # Protein with gaps
#Following are also used in test_SeqIO_features.py, see also NC_005816.gb
("fasta", False, 'GenBank/NC_005816.fna', 1),
("fasta", False, 'GenBank/NC_005816.ffn', 10),
("fasta", False, 'GenBank/NC_005816.faa', 10),
("fasta", False, 'GenBank/NC_000932.faa', 85),
("tab", False, 'GenBank/NC_005816.tsv', 10), # FASTA -> Tabbed
#Following examples are also used in test_GFF.py
("fasta", False, 'GFF/NC_001802.fna', 1), # upper case
("fasta", False, 'GFF/NC_001802lc.fna', 1), # lower case
("fasta", True, 'GFF/multi.fna', 3), # Trivial nucleotide alignment
#Following example is also used in test_registry.py
("fasta", False, 'Registry/seqs.fasta', 2), # contains blank line
#Following example is also used in test_Nexus.py
("nexus", True, 'Nexus/test_Nexus_input.nex', 9),
#Following examples are also used in test_SwissProt.py
("swiss", False, 'SwissProt/sp001', 1),
("swiss", False, 'SwissProt/sp002', 1),
("swiss", False, 'SwissProt/sp003', 1),
("swiss", False, 'SwissProt/sp004', 1),
("swiss", False, 'SwissProt/sp005', 1),
("swiss", False, 'SwissProt/sp006', 1),
("swiss", False, 'SwissProt/sp007', 1),
("swiss", False, 'SwissProt/sp008', 1),
("swiss", False, 'SwissProt/sp009', 1),
("swiss", False, 'SwissProt/sp010', 1),
("swiss", False, 'SwissProt/sp011', 1),
("swiss", False, 'SwissProt/sp012', 1),
("swiss", False, 'SwissProt/sp013', 1),
("swiss", False, 'SwissProt/sp014', 1),
("swiss", False, 'SwissProt/sp015', 1),
("swiss", False, 'SwissProt/sp016', 1),
#Following example is also used in test_registry.py
("swiss", False, 'Registry/EDD_RAT.dat', 1),
#Following examples are also used in test_Uniprot.py
("uniprot-xml", False, 'SwissProt/uni001', 1),
("uniprot-xml", False, 'SwissProt/uni002', 3),
("uniprot-xml", False, 'SwissProt/Q13639.xml', 1),
("swiss", False, 'SwissProt/Q13639.txt', 1),
#Following examples are also used in test_GenBank.py
("genbank", False, 'GenBank/noref.gb', 1),
("genbank", False, 'GenBank/cor6_6.gb', 6),
("genbank", False, 'GenBank/iro.gb', 1),
("genbank", False, 'GenBank/pri1.gb', 1),
("genbank", False, 'GenBank/arab1.gb', 1),
("genbank", False, 'GenBank/protein_refseq.gb', 1), # Old version
("genbank", False, 'GenBank/protein_refseq2.gb', 1), # Revised version
("genbank", False, 'GenBank/extra_keywords.gb', 1),
("genbank", False, 'GenBank/one_of.gb', 1),
("genbank", False, 'GenBank/NT_019265.gb', 1), # contig, no sequence
("genbank", False, 'GenBank/origin_line.gb', 1),
("genbank", False, 'GenBank/blank_seq.gb', 1),
("genbank", False, 'GenBank/dbsource_wrap.gb', 1),
("genbank", False, 'GenBank/NC_005816.gb', 1), # See also AE017046.embl
("genbank", False, 'GenBank/NC_000932.gb', 1),
("genbank", False, 'GenBank/pBAD30.gb', 1), # Odd LOCUS line from Vector NTI
# The next example is a truncated copy of gbvrl1.seq from
# ftp://ftp.ncbi.nih.gov/genbank/gbvrl1.seq.gz
# This includes an NCBI header, and the first three records:
("genbank", False, 'GenBank/gbvrl1_start.seq', 3),
#Following files are also used in test_GFF.py
("genbank", False, 'GFF/NC_001422.gbk', 1),
#Generated with Entrez.efetch("protein", id="16130152", rettype="gbwithparts")
("genbank", False, 'GenBank/NP_416719.gbwithparts', 1),
#GenPept file with nasty bond locations,
("genbank", False, 'GenBank/1MRR_A.gp', 1),
#Following files are currently only used here or in test_SeqIO_index.py:
("embl", False, 'EMBL/epo_prt_selection.embl', 9), # proteins
("embl", False, 'EMBL/patents.embl', 4), # more proteins, but no seq
("embl", False, 'EMBL/TRBG361.embl', 1),
("embl", False, 'EMBL/DD231055_edited.embl', 1),
("embl", False, 'EMBL/DD231055_edited2.embl', 1), #Partial ID line
("embl", False, 'EMBL/SC10H5.embl', 1), # Pre 2006 style ID line
("embl", False, 'EMBL/U87107.embl', 1), # Old ID line with SV line
("embl", False, 'EMBL/AAA03323.embl', 1), # 2008, PA line but no AC
("embl", False, 'EMBL/AE017046.embl', 1), # See also NC_005816.gb
("embl", False, 'EMBL/Human_contigs.embl', 2), # contigs, no sequences
("embl", False, 'EMBL/location_wrap.embl', 1), # wrapped locations and unspecified type
("embl", False, 'EMBL/A04195.imgt', 1), # features over indented for EMBL
("imgt", False, 'EMBL/A04195.imgt', 1), # features over indented for EMBL
("stockholm", True, 'Stockholm/simple.sth', 2),
("stockholm", True, 'Stockholm/funny.sth', 6),
#Following PHYLIP files are currently only used here and in test_AlignIO.py,
#and are mostly from Joseph Felsenstein's PHYLIP v3.6 documentation:
("phylip", True, 'Phylip/reference_dna.phy', 6),
("phylip", True, 'Phylip/reference_dna2.phy', 6),
("phylip", True, 'Phylip/hennigian.phy', 10),
("phylip", True, 'Phylip/horses.phy', 10),
("phylip", True, 'Phylip/random.phy', 10),
("phylip", True, 'Phylip/interlaced.phy', 3),
("phylip", True, 'Phylip/interlaced2.phy', 4),
#Following are EMBOSS simple or pairs format alignments
("emboss", True, 'Emboss/alignret.txt', 4),
("emboss", False, 'Emboss/needle.txt', 10),
("emboss", True, 'Emboss/water.txt', 2),
#Following PHD (PHRAP) sequencing files are also used in test_Phd.py
("phd", False, 'Phd/phd1', 3),
("phd", False, 'Phd/phd2', 1),
("phd", False, 'Phd/phd_solexa', 2),
("phd", False, 'Phd/phd_454', 1),
#Following ACE assembly files are also used in test_Ace.py
("ace", False, 'Ace/contig1.ace', 2),
("ace", False, 'Ace/consed_sample.ace', 1),
("ace", False, 'Ace/seq.cap.ace', 1),
#Following IntelliGenetics / MASE files are also used in test_intelligenetics.py
("ig", False, 'IntelliGenetics/TAT_mase_nuc.txt', 17),
("ig", True, 'IntelliGenetics/VIF_mase-pro.txt', 16),
#This next file is a MASE alignment but sequence O_ANT70 is shorter than
#the others (so as an alignment will fail). Perhaps MASE doesn't
#write trailing gaps?
("ig", False, 'IntelliGenetics/vpu_nucaligned.txt', 9),
#Following NBRD-PIR files are used in test_nbrf.py
("pir", False, 'NBRF/B_nuc.pir', 444),
("pir", False, 'NBRF/Cw_prot.pir', 111),
("pir", False, 'NBRF/DMA_nuc.pir', 4),
("pir", False, 'NBRF/DMB_prot.pir', 6),
("pir", True, 'NBRF/clustalw.pir', 2),
#Following quality files are also used in the Bio.SeqIO.QualityIO doctests:
("fasta", True, 'Quality/example.fasta', 3),
("qual", False, 'Quality/example.qual', 3),
("fastq", True, 'Quality/example.fastq', 3), #Unix new lines
("fastq", True, 'Quality/example_dos.fastq', 3), #DOS/Windows new lines
("fastq", True, 'Quality/tricky.fastq', 4),
("fastq", False, 'Quality/sanger_faked.fastq', 1),
("fastq", False, 'Quality/sanger_93.fastq', 1),
("fastq-illumina", False, 'Quality/illumina_faked.fastq', 1),
("fastq-solexa", False, 'Quality/solexa_faked.fastq', 1),
("fastq-solexa", True, 'Quality/solexa_example.fastq', 5),
#Following examples are also used in test_SeqXML.py
("seqxml", False, 'SeqXML/dna_example.xml', 4),
("seqxml", False, 'SeqXML/rna_example.xml', 5),
("seqxml", False, 'SeqXML/protein_example.xml', 5),
#Following examples are also used in test_SeqIO_AbiIO.py
("abi", False, 'Abi/310.ab1', 1),
("abi", False, 'Abi/3100.ab1', 1),
("abi", False, 'Abi/3730.ab1', 1),
]
class ForwardOnlyHandle(object):
"""Mimic a network handle without seek and tell methods etc."""
def __init__(self, handle):
self._handle = handle
def __iter__(self):
return iter(self._handle)
def read(self, length=None):
if length is None:
return self._handle.read()
else:
return self._handle.read(length)
def readline(self):
return self._handle.readline()
def close(self):
return self._handle.close()
def compare_record(record_one, record_two):
"""This is meant to be a strict comparison for exact agreement..."""
assert isinstance(record_one, SeqRecord)
assert isinstance(record_two, SeqRecord)
assert record_one.seq is not None
assert record_two.seq is not None
if record_one.id != record_two.id:
return False
if record_one.name != record_two.name:
return False
if record_one.description != record_two.description:
return False
if len(record_one) != len(record_two):
return False
if isinstance(record_one.seq, UnknownSeq) \
and isinstance(record_two.seq, UnknownSeq):
#Jython didn't like us comparing the string of very long UnknownSeq
#object (out of heap memory error)
if record_one.seq._character != record_two.seq._character:
return False
elif str(record_one.seq) != str(record_two.seq):
return False
#TODO - check features and annotation (see code for BioSQL tests)
for key in set(record_one.letter_annotations).intersection(
record_two.letter_annotations):
if record_one.letter_annotations[key] != \
record_two.letter_annotations[key]:
return False
return True
def record_summary(record, indent=" "):
"""Returns a concise summary of a SeqRecord object as a string"""
if record.id == record.name:
answer = "%sID and Name='%s',\n%sSeq='" % (indent, record.id, indent)
else:
answer = "%sID = '%s', Name='%s',\n%sSeq='" % (indent, record.id, record.name, indent)
if record.seq is None:
answer += "None"
else:
if len(record.seq) > 50:
answer += str(record.seq[:40]) + "..." + str(record.seq[-7:])
else:
answer += str(record.seq)
answer += "', length=%i" % (len(record.seq))
return answer
def col_summary(col_text):
if len(col_text) < 65:
return col_text
else:
return col_text[:60] + "..." + col_text[-5:]
def alignment_summary(alignment, index=" "):
"""Returns a concise summary of an Alignment object as a string"""
answer = []
alignment_len = alignment.get_alignment_length()
rec_count = len(alignment)
for i in range(min(5, alignment_len)):
answer.append(index + col_summary(alignment[:, i])
+ " alignment column %i" % i)
if alignment_len > 5:
i = alignment_len - 1
answer.append(index + col_summary("|" * rec_count)
+ " ...")
answer.append(index + col_summary(alignment[:, i])
+ " alignment column %i" % i)
return "\n".join(answer)
def check_simple_write_read(records, indent=" "):
#print(indent+"Checking we can write and then read back these records")
for format in test_write_read_alignment_formats:
if format not in possible_unknown_seq_formats \
and isinstance(records[0].seq, UnknownSeq) \
and len(records[0].seq) > 100:
#Skipping for speed. Some of the unknown sequences are
#rather long, and it seems a bit pointless to record them.
continue
print(indent+"Checking can write/read as '%s' format" % format)
#Going to write to a handle...
if format in SeqIO._BinaryFormats:
handle = BytesIO()
else:
handle = StringIO()
try:
with warnings.catch_warnings():
#e.g. data loss
warnings.simplefilter("ignore", BiopythonWarning)
c = SeqIO.write(sequences=records, handle=handle, format=format)
assert c == len(records)
except (TypeError, ValueError) as e:
#This is often expected to happen, for example when we try and
#write sequences of different lengths to an alignment file.
if "len()" in str(e):
#Python 2.4.3,
#>>> len(None)
#...
#TypeError: len() of unsized object
#
#Python 2.5.2,
#>>> len(None)
#...
#TypeError: object of type 'NoneType' has no len()
print("Failed: Probably len() of None")
else:
print(indent+"Failed: %s" % str(e))
if records[0].seq.alphabet.letters is not None:
assert format != t_format, \
"Should be able to re-write in the original format!"
#Carry on to the next format:
continue
handle.flush()
handle.seek(0)
#Now ready to read back from the handle...
try:
records2 = list(SeqIO.parse(handle=handle, format=format))
except ValueError as e:
#This is BAD. We can't read our own output.
#I want to see the output when called from the test harness,
#run_tests.py (which can be funny about new lines on Windows)
handle.seek(0)
raise ValueError("%s\n\n%s\n\n%s"
% (str(e), repr(handle.read()), repr(records)))
assert len(records2) == t_count
for r1, r2 in zip(records, records2):
#Check the bare minimum (ID and sequence) as
#many formats can't store more than that.
assert len(r1) == len(r2)
#Check the sequence
if format in ["gb", "genbank", "embl", "imgt"]:
#The GenBank/EMBL parsers will convert to upper case.
if isinstance(r1.seq, UnknownSeq) \
and isinstance(r2.seq, UnknownSeq):
#Jython didn't like us comparing the string of very long
#UnknownSeq object (out of heap memory error)
assert r1.seq._character.upper() == r2.seq._character
else:
assert str(r1.seq).upper() == str(r2.seq)
elif format == "qual":
assert isinstance(r2.seq, UnknownSeq)
assert len(r2) == len(r1)
else:
assert str(r1.seq) == str(r2.seq)
#Beware of different quirks and limitations in the
#valid character sets and the identifier lengths!
if format in ["phylip", "phylip-sequential"]:
assert r1.id.replace("[", "").replace("]", "")[:10] == r2.id, \
"'%s' vs '%s'" % (r1.id, r2.id)
elif format=="phylip-relaxed":
assert r1.id.replace(" ", "").replace(':', '|') == r2.id, \
"'%s' vs '%s'" % (r1.id, r2.id)
elif format=="clustal":
assert r1.id.replace(" ", "_")[:30] == r2.id, \
"'%s' vs '%s'" % (r1.id, r2.id)
elif format=="stockholm":
assert r1.id.replace(" ", "_") == r2.id, \
"'%s' vs '%s'" % (r1.id, r2.id)
elif format=="fasta":
assert r1.id.split()[0] == r2.id
else:
assert r1.id == r2.id, \
"'%s' vs '%s'" % (r1.id, r2.id)
if len(records)>1:
#Try writing just one record (passing a SeqRecord, not a list)
if format in SeqIO._BinaryFormats:
handle = BytesIO()
else:
handle = StringIO()
SeqIO.write(records[0], handle, format)
assert handle.getvalue() == records[0].format(format)
#Check parsers can cope with an empty file
for t_format in SeqIO._FormatToIterator:
if t_format in SeqIO._BinaryFormats or \
t_format in ("uniprot-xml", "pdb-seqres", "pdb-atom"):
#Not allowed empty SFF files.
continue
handle = StringIO()
records = list(SeqIO.parse(handle, t_format))
assert len(records) == 0
for (t_format, t_alignment, t_filename, t_count) in test_files:
if t_format in SeqIO._BinaryFormats:
mode = "rb"
else:
mode = "r"
print("Testing reading %s format file %s" % (t_format, t_filename))
assert os.path.isfile(t_filename), t_filename
with warnings.catch_warnings():
# e.g. BiopythonParserWarning: Dropping bond qualifier in feature location
warnings.simplefilter("ignore", BiopythonParserWarning)
#Try as an iterator using handle
h = open(t_filename, mode)
records = list(SeqIO.parse(handle=h, format=t_format))
h.close()
assert len(records) == t_count, \
"Found %i records but expected %i" % (len(records), t_count)
#Try using the iterator with a for loop, and a filename not handle
records2 = []
for record in SeqIO.parse(t_filename, format=t_format):
records2.append(record)
assert len(records2) == t_count
#Try using the iterator with the next() method
records3 = []
h = open(t_filename, mode)
seq_iterator = SeqIO.parse(handle=h, format=t_format)
while True:
try:
record = next(seq_iterator)
except StopIteration:
break
assert record is not None, "Should raise StopIteration not return None"
records3.append(record)
h.close()
#Try a mixture of next() and list (a torture test!)
h = open(t_filename, mode)
seq_iterator = SeqIO.parse(handle=h, format=t_format)
try:
record = next(seq_iterator)
except StopIteration:
record = None
if record is not None:
records4 = [record]
records4.extend(list(seq_iterator))
else:
records4 = []
assert len(records4) == t_count
h.close()
#Try a mixture of next() and for loop (a torture test!)
#with a forward-only-handle
if t_format == "abi":
#Temp hack
h = open(t_filename, mode)
else:
h = ForwardOnlyHandle(open(t_filename, mode))
seq_iterator = SeqIO.parse(h, format=t_format)
try:
record = next(seq_iterator)
except StopIteration:
record = None
if record is not None:
records5 = [record]
for record in seq_iterator:
records5.append(record)
else:
records5 = []
assert len(records5) == t_count
h.close()
for i in range(t_count):
record = records[i]
#Check returned expected object type
assert isinstance(record, SeqRecord)
if t_format in possible_unknown_seq_formats:
assert isinstance(record.seq, Seq) or \
isinstance(record.seq, UnknownSeq)
else:
assert isinstance(record.seq, Seq)
assert isinstance(record.id, basestring)
assert isinstance(record.name, basestring)
assert isinstance(record.description, basestring)
assert record.id != ""
if "accessions" in record.annotations:
accs = record.annotations["accessions"]
#Check for blanks, or entries with leading/trailing spaces
for acc in accs:
assert acc and acc == acc.strip(), \
"Bad accession in annotations: %s" % repr(acc)
assert len(set(accs)) == len(accs), \
"Repeated accession in annotations: %s" % repr(accs)
for ref in record.dbxrefs:
assert ref and ref == ref.strip(), \
"Bad cross reference in dbxrefs: %s" % repr(ref)
assert len(record.dbxrefs) == len(record.dbxrefs), \
"Repeated cross reference in dbxrefs: %s" % repr(record.dbxrefs)
#Check the lists obtained by the different methods agree
assert compare_record(record, records2[i])
assert compare_record(record, records3[i])
assert compare_record(record, records4[i])
assert compare_record(record, records5[i])
if i < 3:
print(record_summary(record))
# Only printed the only first three records: 0,1,2
if t_count > 4:
print(" ...")
if t_count > 3:
print(record_summary(records[-1]))
# Check Bio.SeqIO.read(...)
if t_count == 1:
record = SeqIO.read(t_filename, format=t_format)
assert isinstance(record, SeqRecord)
else:
try:
record = SeqIO.read(t_filename, t_format)
assert False, "Bio.SeqIO.read(...) should have failed"
except ValueError:
#Expected to fail
pass
# Check alphabets
for record in records:
base_alpha = Alphabet._get_base_alphabet(record.seq.alphabet)
if isinstance(base_alpha, Alphabet.SingleLetterAlphabet):
if t_format in no_alpha_formats:
assert base_alpha == Alphabet.single_letter_alphabet # Too harsh?
else:
base_alpha = None
if base_alpha is None:
good = []
bad =[]
given_alpha=None
elif isinstance(base_alpha, Alphabet.ProteinAlphabet):
good = protein_alphas
bad = dna_alphas + rna_alphas + nucleotide_alphas
elif isinstance(base_alpha, Alphabet.RNAAlphabet):
good = nucleotide_alphas + rna_alphas
bad = protein_alphas + dna_alphas
elif isinstance(base_alpha, Alphabet.DNAAlphabet):
good = nucleotide_alphas + dna_alphas
bad = protein_alphas + rna_alphas
elif isinstance(base_alpha, Alphabet.NucleotideAlphabet):
good = nucleotide_alphas
bad = protein_alphas
else:
assert t_format in no_alpha_formats, "Got %s from %s file" \
% (repr(base_alpha), t_format)
good = protein_alphas + dna_alphas + rna_alphas + nucleotide_alphas
bad = []
for given_alpha in good:
#These should all work...
given_base = Alphabet._get_base_alphabet(given_alpha)
for record in SeqIO.parse(t_filename, t_format, given_alpha):
base_alpha = Alphabet._get_base_alphabet(record.seq.alphabet)
assert isinstance(base_alpha, given_base.__class__)
assert base_alpha == given_base
if t_count == 1:
h = open(t_filename, mode)
record = SeqIO.read(h, t_format, given_alpha)
h.close()
assert isinstance(base_alpha, given_base.__class__)
assert base_alpha == given_base
for given_alpha in bad:
#These should all fail...
h = open(t_filename, mode)
try:
print(next(SeqIO.parse(h, t_format, given_alpha)))
h.close()
assert False, "Forcing wrong alphabet, %s, should fail (%s)" \
% (repr(given_alpha), t_filename)
except ValueError:
#Good - should fail
pass
h.close()
del good, bad, given_alpha, base_alpha
if t_alignment:
print("Testing reading %s format file %s as an alignment" \
% (t_format, t_filename))
alignment = MultipleSeqAlignment(SeqIO.parse(
handle=t_filename, format=t_format))
assert len(alignment) == t_count
alignment_len = alignment.get_alignment_length()
#Check the record order agrees, and double check the
#sequence lengths all agree too.
for i in range(t_count):
assert compare_record(records[i], alignment[i])
assert len(records[i].seq) == alignment_len
print(alignment_summary(alignment))
#Some alignment file formats have magic characters which mean
#use the letter in this position in the first sequence.
#They should all have been converted by the parser, but if
#not reversing the record order might expose an error. Maybe.
records.reverse()
check_simple_write_read(records)
print("Finished tested reading files")
|
py | 1a44d41b3afc49facc82f01a0c4576ebb60ed07b | #
# PySNMP MIB module EQLACCESS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/EQLACCESS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:50:54 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint")
UTFString, eqlStorageGroupAdminAccountIndex, eqlGroupId = mibBuilder.importSymbols("EQLGROUP-MIB", "UTFString", "eqlStorageGroupAdminAccountIndex", "eqlGroupId")
ACLAppliesTo, eqliscsiVolumeIndex, eqliscsiLocalMemberId = mibBuilder.importSymbols("EQLVOLUME-MIB", "ACLAppliesTo", "eqliscsiVolumeIndex", "eqliscsiLocalMemberId")
equalLogic, = mibBuilder.importSymbols("EQUALLOGIC-SMI", "equalLogic")
InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
IpAddress, Integer32, enterprises, Counter64, TimeTicks, Bits, Counter32, iso, Unsigned32, NotificationType, Gauge32, ObjectIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "Integer32", "enterprises", "Counter64", "TimeTicks", "Bits", "Counter32", "iso", "Unsigned32", "NotificationType", "Gauge32", "ObjectIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity")
TextualConvention, TruthValue, RowStatus, RowPointer, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "TruthValue", "RowStatus", "RowPointer", "DisplayString")
eqlAccessModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 12740, 24))
eqlAccessModule.setRevisions(('2012-05-01 00:00',))
if mibBuilder.loadTexts: eqlAccessModule.setLastUpdated('201403121459Z')
if mibBuilder.loadTexts: eqlAccessModule.setOrganization('Dell Inc.')
eqlAccessObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 12740, 24, 1))
eqlAccessNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 12740, 24, 2))
eqlAccessConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 12740, 24, 3))
eqlAccessGroupTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 1), )
if mibBuilder.loadTexts: eqlAccessGroupTable.setStatus('current')
eqlAccessGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 1, 1), ).setIndexNames((0, "EQLACCESS-MIB", "eqlAccessGroupIndex"))
if mibBuilder.loadTexts: eqlAccessGroupEntry.setStatus('current')
eqlAccessGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 1, 1, 1), Unsigned32())
if mibBuilder.loadTexts: eqlAccessGroupIndex.setStatus('current')
eqlAccessGroupRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 1, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupRowStatus.setStatus('current')
eqlAccessGroupUUID = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 1, 1, 3), UTFString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupUUID.setStatus('current')
eqlAccessGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 1, 1, 4), UTFString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupName.setStatus('current')
eqlAccessGroupKeyName = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 1, 1, 5), UTFString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAccessGroupKeyName.setStatus('current')
eqlAccessGroupDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 1, 1, 6), UTFString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupDescription.setStatus('current')
eqlAccessGroupAdminKey = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 1, 1, 7), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupAdminKey.setStatus('current')
eqlAccessGroupType = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access-group", 1), ("access-record", 2))).clone('access-record')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupType.setStatus('current')
eqlAccessGroupPrivacyFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("public", 1), ("private", 2))).clone('private')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupPrivacyFlag.setStatus('current')
eqlAccessGroupByTypeTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 2), )
if mibBuilder.loadTexts: eqlAccessGroupByTypeTable.setStatus('current')
eqlAccessGroupByTypeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 2, 1), ).setIndexNames((0, "EQLACCESS-MIB", "eqlAccessGroupType"), (0, "EQLACCESS-MIB", "eqlAccessGroupIndex"))
if mibBuilder.loadTexts: eqlAccessGroupByTypeEntry.setStatus('current')
eqlAccessGroupByTypeUUID = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 2, 1, 1), UTFString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupByTypeUUID.setStatus('current')
eqlAccessGroupByTypeName = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 2, 1, 2), UTFString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupByTypeName.setStatus('current')
eqlAccessGroupByTypeDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 2, 1, 3), UTFString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupByTypeDescription.setStatus('current')
eqlAccessGroupByTypeAdminKey = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 2, 1, 4), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupByTypeAdminKey.setStatus('current')
eqlAccessGroupMemberTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 3), )
if mibBuilder.loadTexts: eqlAccessGroupMemberTable.setStatus('current')
eqlAccessGroupMemberEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 3, 1), ).setIndexNames((0, "EQLACCESS-MIB", "eqlAccessGroupIndex"), (0, "EQLACCESS-MIB", "eqlAccessGroupChildIndex"))
if mibBuilder.loadTexts: eqlAccessGroupMemberEntry.setStatus('current')
eqlAccessGroupChildIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 3, 1, 1), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupChildIndex.setStatus('current')
eqlAccessGroupMemberRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 3, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupMemberRowStatus.setStatus('current')
eqlAccessPointTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 4), )
if mibBuilder.loadTexts: eqlAccessPointTable.setStatus('current')
eqlAccessPointEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 4, 1), ).setIndexNames((0, "EQLACCESS-MIB", "eqlAccessGroupIndex"), (0, "EQLACCESS-MIB", "eqlAccessPointIndex"))
if mibBuilder.loadTexts: eqlAccessPointEntry.setStatus('current')
eqlAccessPointIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 4, 1, 1), Unsigned32())
if mibBuilder.loadTexts: eqlAccessPointIndex.setStatus('current')
eqlAccessPointRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 4, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessPointRowStatus.setStatus('current')
eqlAccessPointInitiatorName = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 4, 1, 3), UTFString().subtype(subtypeSpec=ValueSizeConstraint(0, 223))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessPointInitiatorName.setStatus('current')
eqlAccessPointInitiatorCHAPUserName = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 4, 1, 4), UTFString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessPointInitiatorCHAPUserName.setStatus('current')
eqlAccessPointDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 4, 1, 5), UTFString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessPointDescription.setStatus('current')
eqlAccessPointAddrTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 5), )
if mibBuilder.loadTexts: eqlAccessPointAddrTable.setStatus('current')
eqlAccessPointAddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 5, 1), ).setIndexNames((0, "EQLACCESS-MIB", "eqlAccessGroupIndex"), (0, "EQLACCESS-MIB", "eqlAccessPointIndex"), (0, "EQLACCESS-MIB", "eqlAccessPointAddrIndex"))
if mibBuilder.loadTexts: eqlAccessPointAddrEntry.setStatus('current')
eqlAccessPointAddrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 5, 1, 1), Unsigned32())
if mibBuilder.loadTexts: eqlAccessPointAddrIndex.setStatus('current')
eqlAccessPointAddrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 5, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessPointAddrRowStatus.setStatus('current')
eqlAccessPointAddrInitiatorAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 5, 1, 3), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessPointAddrInitiatorAddrType.setStatus('current')
eqlAccessPointAddrInitiatorAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 5, 1, 4), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessPointAddrInitiatorAddr.setStatus('current')
eqlAccessPointAddrInitiatorAddrWildcardType = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 5, 1, 5), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessPointAddrInitiatorAddrWildcardType.setStatus('current')
eqlAccessPointAddrInitiatorAddrWildcard = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 5, 1, 6), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessPointAddrInitiatorAddrWildcard.setStatus('current')
eqlAccessGroupObjectAssocTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 6), )
if mibBuilder.loadTexts: eqlAccessGroupObjectAssocTable.setStatus('current')
eqlAccessGroupObjectAssocEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 6, 1), ).setIndexNames((0, "EQLACCESS-MIB", "eqlAccessGroupIndex"), (0, "EQLACCESS-MIB", "eqlAccessGroupObjectAssocIndex"))
if mibBuilder.loadTexts: eqlAccessGroupObjectAssocEntry.setStatus('current')
eqlAccessGroupObjectAssocIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 6, 1, 1), Unsigned32())
if mibBuilder.loadTexts: eqlAccessGroupObjectAssocIndex.setStatus('current')
eqlAccessGroupObjectAssocRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 6, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupObjectAssocRowStatus.setStatus('current')
eqlAccessGroupObjectAssocOID = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 6, 1, 3), RowPointer()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupObjectAssocOID.setStatus('current')
eqlAccessGroupObjectAssocFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 6, 1, 4), ACLAppliesTo()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupObjectAssocFlag.setStatus('current')
eqlAccessGroupObjectAssocCreator = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 6, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("vCenter", 1), ("gui", 2), ("cli", 3), ("other", 4))).clone('other')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: eqlAccessGroupObjectAssocCreator.setStatus('current')
eqlAccessGroupVolumeAssocTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 7), )
if mibBuilder.loadTexts: eqlAccessGroupVolumeAssocTable.setStatus('current')
eqlAccessGroupVolumeAssocEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 7, 1), ).setIndexNames((0, "EQLACCESS-MIB", "eqlAccessGroupIndex"), (0, "EQLVOLUME-MIB", "eqliscsiLocalMemberId"), (0, "EQLVOLUME-MIB", "eqliscsiVolumeIndex"))
if mibBuilder.loadTexts: eqlAccessGroupVolumeAssocEntry.setStatus('current')
eqlAccessGroupVolumeAssocFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 7, 1, 1), ACLAppliesTo()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAccessGroupVolumeAssocFlag.setStatus('current')
eqlAccessGroupVolumeAssocObjectIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 7, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAccessGroupVolumeAssocObjectIndex.setStatus('current')
eqlVolumeAccessGroupAssocTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 8), )
if mibBuilder.loadTexts: eqlVolumeAccessGroupAssocTable.setStatus('current')
eqlVolumeAccessGroupAssocEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 8, 1), ).setIndexNames((0, "EQLVOLUME-MIB", "eqliscsiLocalMemberId"), (0, "EQLVOLUME-MIB", "eqliscsiVolumeIndex"), (0, "EQLACCESS-MIB", "eqlAccessGroupIndex"))
if mibBuilder.loadTexts: eqlVolumeAccessGroupAssocEntry.setStatus('current')
eqlVolumeAccessGroupAssocFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 8, 1, 1), ACLAppliesTo()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlVolumeAccessGroupAssocFlag.setStatus('current')
eqlVolumeAccessGroupAssocObjectIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 8, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlVolumeAccessGroupAssocObjectIndex.setStatus('current')
eqlAccessGroupSharedVolumeAssocTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 9), )
if mibBuilder.loadTexts: eqlAccessGroupSharedVolumeAssocTable.setStatus('current')
eqlAccessGroupSharedVolumeAssocEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 9, 1), ).setIndexNames((0, "EQLACCESS-MIB", "eqlAccessGroupIndex"), (0, "EQLVOLUME-MIB", "eqliscsiLocalMemberId"), (0, "EQLVOLUME-MIB", "eqliscsiVolumeIndex"))
if mibBuilder.loadTexts: eqlAccessGroupSharedVolumeAssocEntry.setStatus('current')
eqlAccessGroupSharedVolumeAssocFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 9, 1, 1), ACLAppliesTo()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAccessGroupSharedVolumeAssocFlag.setStatus('current')
eqlAccessGroupSharedVolumeAssocObjectIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 9, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAccessGroupSharedVolumeAssocObjectIndex.setStatus('current')
eqlSharedVolumeAccessGroupAssocTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 10), )
if mibBuilder.loadTexts: eqlSharedVolumeAccessGroupAssocTable.setStatus('current')
eqlSharedVolumeAccessGroupAssocEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 10, 1), ).setIndexNames((0, "EQLVOLUME-MIB", "eqliscsiLocalMemberId"), (0, "EQLVOLUME-MIB", "eqliscsiVolumeIndex"), (0, "EQLACCESS-MIB", "eqlAccessGroupIndex"))
if mibBuilder.loadTexts: eqlSharedVolumeAccessGroupAssocEntry.setStatus('current')
eqlSharedVolumeAccessGroupAssocFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 10, 1, 1), ACLAppliesTo()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlSharedVolumeAccessGroupAssocFlag.setStatus('current')
eqlSharedVolumeAccessGroupAssocObjectIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 10, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlSharedVolumeAccessGroupAssocObjectIndex.setStatus('current')
eqlAdminAccountAccessGroupTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 11), )
if mibBuilder.loadTexts: eqlAdminAccountAccessGroupTable.setStatus('current')
eqlAdminAccountAccessGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 11, 1), ).setIndexNames((0, "EQLGROUP-MIB", "eqlGroupId"), (0, "EQLGROUP-MIB", "eqlStorageGroupAdminAccountIndex"), (0, "EQLACCESS-MIB", "eqlAccessGroupIndex"))
if mibBuilder.loadTexts: eqlAdminAccountAccessGroupEntry.setStatus('current')
eqlAdminAccountAccessGroupRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 11, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAdminAccountAccessGroupRowStatus.setStatus('current')
eqlAdminAccountAccessGroupAccess = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("read-only", 1), ("read-write", 2))).clone('read-only')).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAdminAccountAccessGroupAccess.setStatus('current')
eqlACLCountTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12), )
if mibBuilder.loadTexts: eqlACLCountTable.setStatus('current')
eqlACLCountEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1), ).setIndexNames((0, "EQLGROUP-MIB", "eqlGroupId"))
if mibBuilder.loadTexts: eqlACLCountEntry.setStatus('current')
eqlACLCountUserDefined = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlACLCountUserDefined.setStatus('current')
eqlACLCountMPIO = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlACLCountMPIO.setStatus('current')
eqlACLCountTotal = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlACLCountTotal.setStatus('current')
eqlMaxAccessGroupCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlMaxAccessGroupCount.setStatus('current')
eqlMaxAccessRecordCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlMaxAccessRecordCount.setStatus('current')
eqlMaxAccessPointCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlMaxAccessPointCount.setStatus('current')
eqlMaxAccessPointIPAddrCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlMaxAccessPointIPAddrCount.setStatus('current')
eqlMaxAssociationCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlMaxAssociationCount.setStatus('current')
eqlAccessGroupCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 9), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAccessGroupCount.setStatus('current')
eqlAccessRecordCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 10), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAccessRecordCount.setStatus('current')
eqlAccessPointCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 11), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAccessPointCount.setStatus('current')
eqlAccessPointIPAddrCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 12), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAccessPointIPAddrCount.setStatus('current')
eqlAssociationCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 12, 1, 13), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlAssociationCount.setStatus('current')
eqlVolumeAccessGroupAssocCountTable = MibTable((1, 3, 6, 1, 4, 1, 12740, 24, 1, 13), )
if mibBuilder.loadTexts: eqlVolumeAccessGroupAssocCountTable.setStatus('current')
eqlVolumeAccessGroupAssocCountEntry = MibTableRow((1, 3, 6, 1, 4, 1, 12740, 24, 1, 13, 1), ).setIndexNames((0, "EQLVOLUME-MIB", "eqliscsiLocalMemberId"), (0, "EQLVOLUME-MIB", "eqliscsiVolumeIndex"))
if mibBuilder.loadTexts: eqlVolumeAccessGroupAssocCountEntry.setStatus('current')
eqlVolumeAccessGroupAssocCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 13, 1, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlVolumeAccessGroupAssocCount.setStatus('current')
eqlVolumeAccessRecordAssocCount = MibTableColumn((1, 3, 6, 1, 4, 1, 12740, 24, 1, 13, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eqlVolumeAccessRecordAssocCount.setStatus('current')
mibBuilder.exportSymbols("EQLACCESS-MIB", eqlVolumeAccessGroupAssocFlag=eqlVolumeAccessGroupAssocFlag, eqlMaxAccessRecordCount=eqlMaxAccessRecordCount, eqlAdminAccountAccessGroupTable=eqlAdminAccountAccessGroupTable, eqlAccessGroupByTypeAdminKey=eqlAccessGroupByTypeAdminKey, eqlAccessGroupSharedVolumeAssocEntry=eqlAccessGroupSharedVolumeAssocEntry, eqlACLCountUserDefined=eqlACLCountUserDefined, eqlAccessGroupAdminKey=eqlAccessGroupAdminKey, eqlAdminAccountAccessGroupEntry=eqlAdminAccountAccessGroupEntry, eqlAccessPointAddrInitiatorAddrWildcard=eqlAccessPointAddrInitiatorAddrWildcard, eqlAccessPointAddrRowStatus=eqlAccessPointAddrRowStatus, eqlAccessGroupPrivacyFlag=eqlAccessGroupPrivacyFlag, eqlAccessPointAddrInitiatorAddr=eqlAccessPointAddrInitiatorAddr, eqlAccessGroupDescription=eqlAccessGroupDescription, eqlAccessGroupByTypeName=eqlAccessGroupByTypeName, eqlAccessModule=eqlAccessModule, eqlAccessPointCount=eqlAccessPointCount, eqlAccessPointAddrInitiatorAddrWildcardType=eqlAccessPointAddrInitiatorAddrWildcardType, eqlAccessGroupByTypeUUID=eqlAccessGroupByTypeUUID, eqlSharedVolumeAccessGroupAssocEntry=eqlSharedVolumeAccessGroupAssocEntry, eqlAccessPointIPAddrCount=eqlAccessPointIPAddrCount, eqlACLCountTotal=eqlACLCountTotal, eqlAdminAccountAccessGroupAccess=eqlAdminAccountAccessGroupAccess, eqlAccessGroupSharedVolumeAssocTable=eqlAccessGroupSharedVolumeAssocTable, eqlAccessGroupVolumeAssocObjectIndex=eqlAccessGroupVolumeAssocObjectIndex, eqlAccessGroupType=eqlAccessGroupType, eqlAccessGroupObjectAssocEntry=eqlAccessGroupObjectAssocEntry, eqlVolumeAccessGroupAssocCountEntry=eqlVolumeAccessGroupAssocCountEntry, eqlAccessConformance=eqlAccessConformance, eqlVolumeAccessGroupAssocCountTable=eqlVolumeAccessGroupAssocCountTable, eqlAccessGroupRowStatus=eqlAccessGroupRowStatus, eqlSharedVolumeAccessGroupAssocTable=eqlSharedVolumeAccessGroupAssocTable, eqlAccessGroupMemberEntry=eqlAccessGroupMemberEntry, eqlMaxAccessPointIPAddrCount=eqlMaxAccessPointIPAddrCount, eqlAccessGroupByTypeEntry=eqlAccessGroupByTypeEntry, eqlVolumeAccessRecordAssocCount=eqlVolumeAccessRecordAssocCount, PYSNMP_MODULE_ID=eqlAccessModule, eqlAccessGroupSharedVolumeAssocFlag=eqlAccessGroupSharedVolumeAssocFlag, eqlAccessGroupEntry=eqlAccessGroupEntry, eqlAccessGroupObjectAssocOID=eqlAccessGroupObjectAssocOID, eqlAccessGroupMemberTable=eqlAccessGroupMemberTable, eqlAssociationCount=eqlAssociationCount, eqlAccessPointIndex=eqlAccessPointIndex, eqlAccessGroupByTypeDescription=eqlAccessGroupByTypeDescription, eqlAccessPointAddrEntry=eqlAccessPointAddrEntry, eqlAccessObjects=eqlAccessObjects, eqlACLCountMPIO=eqlACLCountMPIO, eqlAccessPointAddrInitiatorAddrType=eqlAccessPointAddrInitiatorAddrType, eqlAccessGroupVolumeAssocEntry=eqlAccessGroupVolumeAssocEntry, eqlACLCountEntry=eqlACLCountEntry, eqlVolumeAccessGroupAssocEntry=eqlVolumeAccessGroupAssocEntry, eqlAccessPointAddrTable=eqlAccessPointAddrTable, eqlVolumeAccessGroupAssocCount=eqlVolumeAccessGroupAssocCount, eqlMaxAccessGroupCount=eqlMaxAccessGroupCount, eqlAccessGroupByTypeTable=eqlAccessGroupByTypeTable, eqlAccessPointInitiatorCHAPUserName=eqlAccessPointInitiatorCHAPUserName, eqlSharedVolumeAccessGroupAssocFlag=eqlSharedVolumeAccessGroupAssocFlag, eqlAccessGroupObjectAssocCreator=eqlAccessGroupObjectAssocCreator, eqlAccessGroupIndex=eqlAccessGroupIndex, eqlAccessGroupChildIndex=eqlAccessGroupChildIndex, eqlAccessGroupVolumeAssocFlag=eqlAccessGroupVolumeAssocFlag, eqlAccessGroupObjectAssocRowStatus=eqlAccessGroupObjectAssocRowStatus, eqlACLCountTable=eqlACLCountTable, eqlAccessGroupCount=eqlAccessGroupCount, eqlAccessPointAddrIndex=eqlAccessPointAddrIndex, eqlMaxAssociationCount=eqlMaxAssociationCount, eqlAccessPointDescription=eqlAccessPointDescription, eqlAccessGroupSharedVolumeAssocObjectIndex=eqlAccessGroupSharedVolumeAssocObjectIndex, eqlMaxAccessPointCount=eqlMaxAccessPointCount, eqlAdminAccountAccessGroupRowStatus=eqlAdminAccountAccessGroupRowStatus, eqlAccessPointInitiatorName=eqlAccessPointInitiatorName, eqlAccessGroupName=eqlAccessGroupName, eqlAccessGroupObjectAssocIndex=eqlAccessGroupObjectAssocIndex, eqlAccessPointTable=eqlAccessPointTable, eqlAccessGroupKeyName=eqlAccessGroupKeyName, eqlAccessGroupObjectAssocTable=eqlAccessGroupObjectAssocTable, eqlVolumeAccessGroupAssocObjectIndex=eqlVolumeAccessGroupAssocObjectIndex, eqlAccessRecordCount=eqlAccessRecordCount, eqlAccessGroupTable=eqlAccessGroupTable, eqlVolumeAccessGroupAssocTable=eqlVolumeAccessGroupAssocTable, eqlAccessPointEntry=eqlAccessPointEntry, eqlAccessPointRowStatus=eqlAccessPointRowStatus, eqlSharedVolumeAccessGroupAssocObjectIndex=eqlSharedVolumeAccessGroupAssocObjectIndex, eqlAccessGroupMemberRowStatus=eqlAccessGroupMemberRowStatus, eqlAccessGroupObjectAssocFlag=eqlAccessGroupObjectAssocFlag, eqlAccessGroupUUID=eqlAccessGroupUUID, eqlAccessGroupVolumeAssocTable=eqlAccessGroupVolumeAssocTable, eqlAccessNotifications=eqlAccessNotifications)
|
py | 1a44d660d279b04a34a25123fdddfae5ccb901a9 | class Solution:
def compareVersion(self, version1: str, version2: str) -> int:
l1 = [int(s) for s in version1.split(".")]
l2 = [int(s) for s in version2.split(".")]
len1, len2 = len(l1), len(l2)
if len1 > len2:
l2 += [0] * (len1 - len2)
elif len1 < len2:
l1 += [0] * (len2 - len1)
return (l1 > l2) - (l1 < l2)
|
py | 1a44d67bbf8cb1c6a7b6d4e1226cf04fdbfde67e | import unittest
from unittest import mock
from datetime import datetime,timedelta
from shutil import rmtree
import os
import json
import dotenv
# project modules
from logs.config.logging import logs_config
from locations import paths, dirs, root_dir, test_dir
from modules.email import email_notification, login_to_gmail_and_send
# LOGGING
logs_config(paths["logs_config_test"])
# ENV VARS
dotenv.load_dotenv(root_dir / ".dev.env")
# MOCK VARS
mock_dirs = {
"payload_email": test_dir / "fixtures/payload_email/",
"email_template": dirs["email_template"],
"email_final": test_dir / "output/email_final/",
"payload_csv": test_dir / "fixtures/payload_csv/",
}
mock_paths = {
"payload_email": mock_dirs["payload_email"] / "email-homicide1.html",
"email_final": mock_dirs["email_final"] / "email.html",
"payload_csv": mock_dirs["payload_csv"] / "dockets_murder_and_hom.csv",
}
class TestEmailHomicideAndMurder(unittest.TestCase):
def setUp(self) -> None:
# clean up
if mock_dirs["email_final"].is_dir():
rmtree(mock_dirs["email_final"])
mock_dirs["email_final"].mkdir(parents=True, exist_ok=True)
# vars
self.scrape_start_datetime = datetime.now() - timedelta(hours=1)
self.scrape_end_datetime = datetime.now()
self.target_scrape_day = "yesterday"
self.county_list = ["Cumberland", "Perry", "York", "Lancaster"]
def tearDown(self) -> None:
pass
@mock.patch.dict(paths, mock_paths, clear=True)
@mock.patch.dict(dirs, mock_dirs, clear=True)
def test_email_with_homicide_and_murder_sends(self):
"""
Test that email notification successfully detects that a homicide
and murder is included in CSV payload and responds accordingly.
"""
email_notification(
self.scrape_start_datetime, self.scrape_end_datetime,
self.target_scrape_day, self.county_list
)
if __name__ == "__main__":
unittest.main()
|
py | 1a44d81fea62905aba68378b7a67fd4794e8335b | import os
import json
import logging
def load_mock_data(filename):
base_dir = os.path.dirname(os.path.abspath(__file__))
resource_file = os.path.join(base_dir, 'test_data/%s' % filename)
json_text = '[]'
try:
with open(resource_file, 'r') as f:
json_text = f.read()
except IOError:
logging.exception('could not load file %s' % filename)
return json.loads(json_text)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.