body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
---|---|---|---|---|---|---|---|---|---|
6be99ed2e1a6b36e7495619ec9dd2bd18da700838324994ffc94de6f38b61a69 | @property
def family_code(self):
'\n Gets the family_code of this NodesLnnHardwareNode.\n Family code of this node (X, S, NL, etc.).\n\n :return: The family_code of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._family_code | Gets the family_code of this NodesLnnHardwareNode.
Family code of this node (X, S, NL, etc.).
:return: The family_code of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | family_code | Atomicology/isilon_sdk_python | 0 | python | @property
def family_code(self):
'\n Gets the family_code of this NodesLnnHardwareNode.\n Family code of this node (X, S, NL, etc.).\n\n :return: The family_code of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._family_code | @property
def family_code(self):
'\n Gets the family_code of this NodesLnnHardwareNode.\n Family code of this node (X, S, NL, etc.).\n\n :return: The family_code of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._family_code<|docstring|>Gets the family_code of this NodesLnnHardwareNode.
Family code of this node (X, S, NL, etc.).
:return: The family_code of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
23333d2c94ed74c0a1cf99532ba646728b24129017c5883e69124841657dbb08 | @family_code.setter
def family_code(self, family_code):
'\n Sets the family_code of this NodesLnnHardwareNode.\n Family code of this node (X, S, NL, etc.).\n\n :param family_code: The family_code of this NodesLnnHardwareNode.\n :type: str\n '
self._family_code = family_code | Sets the family_code of this NodesLnnHardwareNode.
Family code of this node (X, S, NL, etc.).
:param family_code: The family_code of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | family_code | Atomicology/isilon_sdk_python | 0 | python | @family_code.setter
def family_code(self, family_code):
'\n Sets the family_code of this NodesLnnHardwareNode.\n Family code of this node (X, S, NL, etc.).\n\n :param family_code: The family_code of this NodesLnnHardwareNode.\n :type: str\n '
self._family_code = family_code | @family_code.setter
def family_code(self, family_code):
'\n Sets the family_code of this NodesLnnHardwareNode.\n Family code of this node (X, S, NL, etc.).\n\n :param family_code: The family_code of this NodesLnnHardwareNode.\n :type: str\n '
self._family_code = family_code<|docstring|>Sets the family_code of this NodesLnnHardwareNode.
Family code of this node (X, S, NL, etc.).
:param family_code: The family_code of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
5d45e99c5194a0ab9d54eaf00e9c838b6c233ca16cc805d495ff2218db5cb680 | @property
def flash_drive(self):
"\n Gets the flash_drive of this NodesLnnHardwareNode.\n Manufacturer, model, and device id of this node's flash drive.\n\n :return: The flash_drive of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._flash_drive | Gets the flash_drive of this NodesLnnHardwareNode.
Manufacturer, model, and device id of this node's flash drive.
:return: The flash_drive of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | flash_drive | Atomicology/isilon_sdk_python | 0 | python | @property
def flash_drive(self):
"\n Gets the flash_drive of this NodesLnnHardwareNode.\n Manufacturer, model, and device id of this node's flash drive.\n\n :return: The flash_drive of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._flash_drive | @property
def flash_drive(self):
"\n Gets the flash_drive of this NodesLnnHardwareNode.\n Manufacturer, model, and device id of this node's flash drive.\n\n :return: The flash_drive of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._flash_drive<|docstring|>Gets the flash_drive of this NodesLnnHardwareNode.
Manufacturer, model, and device id of this node's flash drive.
:return: The flash_drive of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
e770336da85208520b20905110a4debdaddff6ea80d5225da211ee21708d1c7d | @flash_drive.setter
def flash_drive(self, flash_drive):
"\n Sets the flash_drive of this NodesLnnHardwareNode.\n Manufacturer, model, and device id of this node's flash drive.\n\n :param flash_drive: The flash_drive of this NodesLnnHardwareNode.\n :type: str\n "
self._flash_drive = flash_drive | Sets the flash_drive of this NodesLnnHardwareNode.
Manufacturer, model, and device id of this node's flash drive.
:param flash_drive: The flash_drive of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | flash_drive | Atomicology/isilon_sdk_python | 0 | python | @flash_drive.setter
def flash_drive(self, flash_drive):
"\n Sets the flash_drive of this NodesLnnHardwareNode.\n Manufacturer, model, and device id of this node's flash drive.\n\n :param flash_drive: The flash_drive of this NodesLnnHardwareNode.\n :type: str\n "
self._flash_drive = flash_drive | @flash_drive.setter
def flash_drive(self, flash_drive):
"\n Sets the flash_drive of this NodesLnnHardwareNode.\n Manufacturer, model, and device id of this node's flash drive.\n\n :param flash_drive: The flash_drive of this NodesLnnHardwareNode.\n :type: str\n "
self._flash_drive = flash_drive<|docstring|>Sets the flash_drive of this NodesLnnHardwareNode.
Manufacturer, model, and device id of this node's flash drive.
:param flash_drive: The flash_drive of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
64d0bfece499ddb6b3f7c4653f8aeeef3996eec83f6dc5f1b8f76dcd6cac658f | @property
def generation_code(self):
'\n Gets the generation_code of this NodesLnnHardwareNode.\n Generation code of this node.\n\n :return: The generation_code of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._generation_code | Gets the generation_code of this NodesLnnHardwareNode.
Generation code of this node.
:return: The generation_code of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | generation_code | Atomicology/isilon_sdk_python | 0 | python | @property
def generation_code(self):
'\n Gets the generation_code of this NodesLnnHardwareNode.\n Generation code of this node.\n\n :return: The generation_code of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._generation_code | @property
def generation_code(self):
'\n Gets the generation_code of this NodesLnnHardwareNode.\n Generation code of this node.\n\n :return: The generation_code of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._generation_code<|docstring|>Gets the generation_code of this NodesLnnHardwareNode.
Generation code of this node.
:return: The generation_code of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
142dd80adedd425301c2b4897fffdf00becbfa1505263dc683d2f8cbaf808674 | @generation_code.setter
def generation_code(self, generation_code):
'\n Sets the generation_code of this NodesLnnHardwareNode.\n Generation code of this node.\n\n :param generation_code: The generation_code of this NodesLnnHardwareNode.\n :type: str\n '
self._generation_code = generation_code | Sets the generation_code of this NodesLnnHardwareNode.
Generation code of this node.
:param generation_code: The generation_code of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | generation_code | Atomicology/isilon_sdk_python | 0 | python | @generation_code.setter
def generation_code(self, generation_code):
'\n Sets the generation_code of this NodesLnnHardwareNode.\n Generation code of this node.\n\n :param generation_code: The generation_code of this NodesLnnHardwareNode.\n :type: str\n '
self._generation_code = generation_code | @generation_code.setter
def generation_code(self, generation_code):
'\n Sets the generation_code of this NodesLnnHardwareNode.\n Generation code of this node.\n\n :param generation_code: The generation_code of this NodesLnnHardwareNode.\n :type: str\n '
self._generation_code = generation_code<|docstring|>Sets the generation_code of this NodesLnnHardwareNode.
Generation code of this node.
:param generation_code: The generation_code of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
d0dd0f8322340372c5174864250d1b12be9885e75045a75f9720f7fa7ebd4b4b | @property
def hwgen(self):
'\n Gets the hwgen of this NodesLnnHardwareNode.\n Isilon hardware generation name.\n\n :return: The hwgen of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._hwgen | Gets the hwgen of this NodesLnnHardwareNode.
Isilon hardware generation name.
:return: The hwgen of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | hwgen | Atomicology/isilon_sdk_python | 0 | python | @property
def hwgen(self):
'\n Gets the hwgen of this NodesLnnHardwareNode.\n Isilon hardware generation name.\n\n :return: The hwgen of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._hwgen | @property
def hwgen(self):
'\n Gets the hwgen of this NodesLnnHardwareNode.\n Isilon hardware generation name.\n\n :return: The hwgen of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._hwgen<|docstring|>Gets the hwgen of this NodesLnnHardwareNode.
Isilon hardware generation name.
:return: The hwgen of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
d9f4ff1885a6d0513f6d9f6828ecffb65e7e9c243b563636ec555b95ceaf831e | @hwgen.setter
def hwgen(self, hwgen):
'\n Sets the hwgen of this NodesLnnHardwareNode.\n Isilon hardware generation name.\n\n :param hwgen: The hwgen of this NodesLnnHardwareNode.\n :type: str\n '
self._hwgen = hwgen | Sets the hwgen of this NodesLnnHardwareNode.
Isilon hardware generation name.
:param hwgen: The hwgen of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | hwgen | Atomicology/isilon_sdk_python | 0 | python | @hwgen.setter
def hwgen(self, hwgen):
'\n Sets the hwgen of this NodesLnnHardwareNode.\n Isilon hardware generation name.\n\n :param hwgen: The hwgen of this NodesLnnHardwareNode.\n :type: str\n '
self._hwgen = hwgen | @hwgen.setter
def hwgen(self, hwgen):
'\n Sets the hwgen of this NodesLnnHardwareNode.\n Isilon hardware generation name.\n\n :param hwgen: The hwgen of this NodesLnnHardwareNode.\n :type: str\n '
self._hwgen = hwgen<|docstring|>Sets the hwgen of this NodesLnnHardwareNode.
Isilon hardware generation name.
:param hwgen: The hwgen of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
48949e473d3e673cc6d83fb2fa803da231202e8e73868bf2ff7a7b18d09ed629 | @property
def id(self):
'\n Gets the id of this NodesLnnHardwareNode.\n Node ID (Device Number) of this node.\n\n :return: The id of this NodesLnnHardwareNode.\n :rtype: int\n '
return self._id | Gets the id of this NodesLnnHardwareNode.
Node ID (Device Number) of this node.
:return: The id of this NodesLnnHardwareNode.
:rtype: int | isi_sdk/models/nodes_lnn_hardware_node.py | id | Atomicology/isilon_sdk_python | 0 | python | @property
def id(self):
'\n Gets the id of this NodesLnnHardwareNode.\n Node ID (Device Number) of this node.\n\n :return: The id of this NodesLnnHardwareNode.\n :rtype: int\n '
return self._id | @property
def id(self):
'\n Gets the id of this NodesLnnHardwareNode.\n Node ID (Device Number) of this node.\n\n :return: The id of this NodesLnnHardwareNode.\n :rtype: int\n '
return self._id<|docstring|>Gets the id of this NodesLnnHardwareNode.
Node ID (Device Number) of this node.
:return: The id of this NodesLnnHardwareNode.
:rtype: int<|endoftext|> |
8b479496924b8abcc76d47180e0035c36c32df120b09fa8e069134e8396d5b44 | @id.setter
def id(self, id):
'\n Sets the id of this NodesLnnHardwareNode.\n Node ID (Device Number) of this node.\n\n :param id: The id of this NodesLnnHardwareNode.\n :type: int\n '
self._id = id | Sets the id of this NodesLnnHardwareNode.
Node ID (Device Number) of this node.
:param id: The id of this NodesLnnHardwareNode.
:type: int | isi_sdk/models/nodes_lnn_hardware_node.py | id | Atomicology/isilon_sdk_python | 0 | python | @id.setter
def id(self, id):
'\n Sets the id of this NodesLnnHardwareNode.\n Node ID (Device Number) of this node.\n\n :param id: The id of this NodesLnnHardwareNode.\n :type: int\n '
self._id = id | @id.setter
def id(self, id):
'\n Sets the id of this NodesLnnHardwareNode.\n Node ID (Device Number) of this node.\n\n :param id: The id of this NodesLnnHardwareNode.\n :type: int\n '
self._id = id<|docstring|>Sets the id of this NodesLnnHardwareNode.
Node ID (Device Number) of this node.
:param id: The id of this NodesLnnHardwareNode.
:type: int<|endoftext|> |
03bc3d7567e2d0df9eb2291e06da16aa635a957698e9ededfe0c7b1a33c059ed | @property
def imb_version(self):
"\n Gets the imb_version of this NodesLnnHardwareNode.\n Version of this node's Isilon Management Board.\n\n :return: The imb_version of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._imb_version | Gets the imb_version of this NodesLnnHardwareNode.
Version of this node's Isilon Management Board.
:return: The imb_version of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | imb_version | Atomicology/isilon_sdk_python | 0 | python | @property
def imb_version(self):
"\n Gets the imb_version of this NodesLnnHardwareNode.\n Version of this node's Isilon Management Board.\n\n :return: The imb_version of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._imb_version | @property
def imb_version(self):
"\n Gets the imb_version of this NodesLnnHardwareNode.\n Version of this node's Isilon Management Board.\n\n :return: The imb_version of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._imb_version<|docstring|>Gets the imb_version of this NodesLnnHardwareNode.
Version of this node's Isilon Management Board.
:return: The imb_version of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
bdeede46b0e27d8bd8dd5dafe376cc4c9027cc91dfdca800fe825267037461aa | @imb_version.setter
def imb_version(self, imb_version):
"\n Sets the imb_version of this NodesLnnHardwareNode.\n Version of this node's Isilon Management Board.\n\n :param imb_version: The imb_version of this NodesLnnHardwareNode.\n :type: str\n "
self._imb_version = imb_version | Sets the imb_version of this NodesLnnHardwareNode.
Version of this node's Isilon Management Board.
:param imb_version: The imb_version of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | imb_version | Atomicology/isilon_sdk_python | 0 | python | @imb_version.setter
def imb_version(self, imb_version):
"\n Sets the imb_version of this NodesLnnHardwareNode.\n Version of this node's Isilon Management Board.\n\n :param imb_version: The imb_version of this NodesLnnHardwareNode.\n :type: str\n "
self._imb_version = imb_version | @imb_version.setter
def imb_version(self, imb_version):
"\n Sets the imb_version of this NodesLnnHardwareNode.\n Version of this node's Isilon Management Board.\n\n :param imb_version: The imb_version of this NodesLnnHardwareNode.\n :type: str\n "
self._imb_version = imb_version<|docstring|>Sets the imb_version of this NodesLnnHardwareNode.
Version of this node's Isilon Management Board.
:param imb_version: The imb_version of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
d72d765983f34f9047f6d132e1008f0d41c2d68c004243bfdf8d8a32aa7d65e4 | @property
def infiniband(self):
'\n Gets the infiniband of this NodesLnnHardwareNode.\n Infiniband card type.\n\n :return: The infiniband of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._infiniband | Gets the infiniband of this NodesLnnHardwareNode.
Infiniband card type.
:return: The infiniband of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | infiniband | Atomicology/isilon_sdk_python | 0 | python | @property
def infiniband(self):
'\n Gets the infiniband of this NodesLnnHardwareNode.\n Infiniband card type.\n\n :return: The infiniband of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._infiniband | @property
def infiniband(self):
'\n Gets the infiniband of this NodesLnnHardwareNode.\n Infiniband card type.\n\n :return: The infiniband of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._infiniband<|docstring|>Gets the infiniband of this NodesLnnHardwareNode.
Infiniband card type.
:return: The infiniband of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
dbeee21175e4c6355913fb164556929deda56ac20f8a7adc9e43a6354734ef58 | @infiniband.setter
def infiniband(self, infiniband):
'\n Sets the infiniband of this NodesLnnHardwareNode.\n Infiniband card type.\n\n :param infiniband: The infiniband of this NodesLnnHardwareNode.\n :type: str\n '
self._infiniband = infiniband | Sets the infiniband of this NodesLnnHardwareNode.
Infiniband card type.
:param infiniband: The infiniband of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | infiniband | Atomicology/isilon_sdk_python | 0 | python | @infiniband.setter
def infiniband(self, infiniband):
'\n Sets the infiniband of this NodesLnnHardwareNode.\n Infiniband card type.\n\n :param infiniband: The infiniband of this NodesLnnHardwareNode.\n :type: str\n '
self._infiniband = infiniband | @infiniband.setter
def infiniband(self, infiniband):
'\n Sets the infiniband of this NodesLnnHardwareNode.\n Infiniband card type.\n\n :param infiniband: The infiniband of this NodesLnnHardwareNode.\n :type: str\n '
self._infiniband = infiniband<|docstring|>Sets the infiniband of this NodesLnnHardwareNode.
Infiniband card type.
:param infiniband: The infiniband of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
9e0a5ef4d2a5ef0d3ec54a7361d0b4abde506dbb711d7add6f6b9396ab4290b2 | @property
def lcd_version(self):
'\n Gets the lcd_version of this NodesLnnHardwareNode.\n Version of the LCD panel.\n\n :return: The lcd_version of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._lcd_version | Gets the lcd_version of this NodesLnnHardwareNode.
Version of the LCD panel.
:return: The lcd_version of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | lcd_version | Atomicology/isilon_sdk_python | 0 | python | @property
def lcd_version(self):
'\n Gets the lcd_version of this NodesLnnHardwareNode.\n Version of the LCD panel.\n\n :return: The lcd_version of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._lcd_version | @property
def lcd_version(self):
'\n Gets the lcd_version of this NodesLnnHardwareNode.\n Version of the LCD panel.\n\n :return: The lcd_version of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._lcd_version<|docstring|>Gets the lcd_version of this NodesLnnHardwareNode.
Version of the LCD panel.
:return: The lcd_version of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
cf5f4fdafdba5870f15e5890f69f2827efe7c0183d93b296a3d90ffd67fab88f | @lcd_version.setter
def lcd_version(self, lcd_version):
'\n Sets the lcd_version of this NodesLnnHardwareNode.\n Version of the LCD panel.\n\n :param lcd_version: The lcd_version of this NodesLnnHardwareNode.\n :type: str\n '
self._lcd_version = lcd_version | Sets the lcd_version of this NodesLnnHardwareNode.
Version of the LCD panel.
:param lcd_version: The lcd_version of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | lcd_version | Atomicology/isilon_sdk_python | 0 | python | @lcd_version.setter
def lcd_version(self, lcd_version):
'\n Sets the lcd_version of this NodesLnnHardwareNode.\n Version of the LCD panel.\n\n :param lcd_version: The lcd_version of this NodesLnnHardwareNode.\n :type: str\n '
self._lcd_version = lcd_version | @lcd_version.setter
def lcd_version(self, lcd_version):
'\n Sets the lcd_version of this NodesLnnHardwareNode.\n Version of the LCD panel.\n\n :param lcd_version: The lcd_version of this NodesLnnHardwareNode.\n :type: str\n '
self._lcd_version = lcd_version<|docstring|>Sets the lcd_version of this NodesLnnHardwareNode.
Version of the LCD panel.
:param lcd_version: The lcd_version of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
cdad082be9207dcdea2c6ec6451997ac928bc16a88a918b71c1a05fe439e4063 | @property
def lnn(self):
'\n Gets the lnn of this NodesLnnHardwareNode.\n Logical Node Number (LNN) of this node.\n\n :return: The lnn of this NodesLnnHardwareNode.\n :rtype: int\n '
return self._lnn | Gets the lnn of this NodesLnnHardwareNode.
Logical Node Number (LNN) of this node.
:return: The lnn of this NodesLnnHardwareNode.
:rtype: int | isi_sdk/models/nodes_lnn_hardware_node.py | lnn | Atomicology/isilon_sdk_python | 0 | python | @property
def lnn(self):
'\n Gets the lnn of this NodesLnnHardwareNode.\n Logical Node Number (LNN) of this node.\n\n :return: The lnn of this NodesLnnHardwareNode.\n :rtype: int\n '
return self._lnn | @property
def lnn(self):
'\n Gets the lnn of this NodesLnnHardwareNode.\n Logical Node Number (LNN) of this node.\n\n :return: The lnn of this NodesLnnHardwareNode.\n :rtype: int\n '
return self._lnn<|docstring|>Gets the lnn of this NodesLnnHardwareNode.
Logical Node Number (LNN) of this node.
:return: The lnn of this NodesLnnHardwareNode.
:rtype: int<|endoftext|> |
0c73a71b629f4ad5e1771b64a7398f8559a3f85bf9d65d456de3921b1c478234 | @lnn.setter
def lnn(self, lnn):
'\n Sets the lnn of this NodesLnnHardwareNode.\n Logical Node Number (LNN) of this node.\n\n :param lnn: The lnn of this NodesLnnHardwareNode.\n :type: int\n '
self._lnn = lnn | Sets the lnn of this NodesLnnHardwareNode.
Logical Node Number (LNN) of this node.
:param lnn: The lnn of this NodesLnnHardwareNode.
:type: int | isi_sdk/models/nodes_lnn_hardware_node.py | lnn | Atomicology/isilon_sdk_python | 0 | python | @lnn.setter
def lnn(self, lnn):
'\n Sets the lnn of this NodesLnnHardwareNode.\n Logical Node Number (LNN) of this node.\n\n :param lnn: The lnn of this NodesLnnHardwareNode.\n :type: int\n '
self._lnn = lnn | @lnn.setter
def lnn(self, lnn):
'\n Sets the lnn of this NodesLnnHardwareNode.\n Logical Node Number (LNN) of this node.\n\n :param lnn: The lnn of this NodesLnnHardwareNode.\n :type: int\n '
self._lnn = lnn<|docstring|>Sets the lnn of this NodesLnnHardwareNode.
Logical Node Number (LNN) of this node.
:param lnn: The lnn of this NodesLnnHardwareNode.
:type: int<|endoftext|> |
4ad9b5eaf6a00a055e6d47321e055b0be8db50233521393106c8a9d2c3c2c4e4 | @property
def motherboard(self):
"\n Gets the motherboard of this NodesLnnHardwareNode.\n Manufacturer and model of this node's motherboard.\n\n :return: The motherboard of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._motherboard | Gets the motherboard of this NodesLnnHardwareNode.
Manufacturer and model of this node's motherboard.
:return: The motherboard of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | motherboard | Atomicology/isilon_sdk_python | 0 | python | @property
def motherboard(self):
"\n Gets the motherboard of this NodesLnnHardwareNode.\n Manufacturer and model of this node's motherboard.\n\n :return: The motherboard of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._motherboard | @property
def motherboard(self):
"\n Gets the motherboard of this NodesLnnHardwareNode.\n Manufacturer and model of this node's motherboard.\n\n :return: The motherboard of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._motherboard<|docstring|>Gets the motherboard of this NodesLnnHardwareNode.
Manufacturer and model of this node's motherboard.
:return: The motherboard of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
7262a843e31eb2278692629291f4e2291a78a4058ba1b6750c8052f5e8a99407 | @motherboard.setter
def motherboard(self, motherboard):
"\n Sets the motherboard of this NodesLnnHardwareNode.\n Manufacturer and model of this node's motherboard.\n\n :param motherboard: The motherboard of this NodesLnnHardwareNode.\n :type: str\n "
self._motherboard = motherboard | Sets the motherboard of this NodesLnnHardwareNode.
Manufacturer and model of this node's motherboard.
:param motherboard: The motherboard of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | motherboard | Atomicology/isilon_sdk_python | 0 | python | @motherboard.setter
def motherboard(self, motherboard):
"\n Sets the motherboard of this NodesLnnHardwareNode.\n Manufacturer and model of this node's motherboard.\n\n :param motherboard: The motherboard of this NodesLnnHardwareNode.\n :type: str\n "
self._motherboard = motherboard | @motherboard.setter
def motherboard(self, motherboard):
"\n Sets the motherboard of this NodesLnnHardwareNode.\n Manufacturer and model of this node's motherboard.\n\n :param motherboard: The motherboard of this NodesLnnHardwareNode.\n :type: str\n "
self._motherboard = motherboard<|docstring|>Sets the motherboard of this NodesLnnHardwareNode.
Manufacturer and model of this node's motherboard.
:param motherboard: The motherboard of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
8f0e4aa205030402227aaad1bbcb5607980e653f825c1f956833b992efa40b4a | @property
def net_interfaces(self):
"\n Gets the net_interfaces of this NodesLnnHardwareNode.\n Description of all this node's network interfaces.\n\n :return: The net_interfaces of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._net_interfaces | Gets the net_interfaces of this NodesLnnHardwareNode.
Description of all this node's network interfaces.
:return: The net_interfaces of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | net_interfaces | Atomicology/isilon_sdk_python | 0 | python | @property
def net_interfaces(self):
"\n Gets the net_interfaces of this NodesLnnHardwareNode.\n Description of all this node's network interfaces.\n\n :return: The net_interfaces of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._net_interfaces | @property
def net_interfaces(self):
"\n Gets the net_interfaces of this NodesLnnHardwareNode.\n Description of all this node's network interfaces.\n\n :return: The net_interfaces of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._net_interfaces<|docstring|>Gets the net_interfaces of this NodesLnnHardwareNode.
Description of all this node's network interfaces.
:return: The net_interfaces of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
e00ac68829eae33532131896b109f50a352db28fcf444cb0858fb934d6a24f62 | @net_interfaces.setter
def net_interfaces(self, net_interfaces):
"\n Sets the net_interfaces of this NodesLnnHardwareNode.\n Description of all this node's network interfaces.\n\n :param net_interfaces: The net_interfaces of this NodesLnnHardwareNode.\n :type: str\n "
self._net_interfaces = net_interfaces | Sets the net_interfaces of this NodesLnnHardwareNode.
Description of all this node's network interfaces.
:param net_interfaces: The net_interfaces of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | net_interfaces | Atomicology/isilon_sdk_python | 0 | python | @net_interfaces.setter
def net_interfaces(self, net_interfaces):
"\n Sets the net_interfaces of this NodesLnnHardwareNode.\n Description of all this node's network interfaces.\n\n :param net_interfaces: The net_interfaces of this NodesLnnHardwareNode.\n :type: str\n "
self._net_interfaces = net_interfaces | @net_interfaces.setter
def net_interfaces(self, net_interfaces):
"\n Sets the net_interfaces of this NodesLnnHardwareNode.\n Description of all this node's network interfaces.\n\n :param net_interfaces: The net_interfaces of this NodesLnnHardwareNode.\n :type: str\n "
self._net_interfaces = net_interfaces<|docstring|>Sets the net_interfaces of this NodesLnnHardwareNode.
Description of all this node's network interfaces.
:param net_interfaces: The net_interfaces of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
a97037e8fb996ab5456b927b45c346d7cbd782052ebd7b1721193c9da6d38a4f | @property
def nvram(self):
"\n Gets the nvram of this NodesLnnHardwareNode.\n Manufacturer and model of this node's NVRAM board.\n\n :return: The nvram of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._nvram | Gets the nvram of this NodesLnnHardwareNode.
Manufacturer and model of this node's NVRAM board.
:return: The nvram of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | nvram | Atomicology/isilon_sdk_python | 0 | python | @property
def nvram(self):
"\n Gets the nvram of this NodesLnnHardwareNode.\n Manufacturer and model of this node's NVRAM board.\n\n :return: The nvram of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._nvram | @property
def nvram(self):
"\n Gets the nvram of this NodesLnnHardwareNode.\n Manufacturer and model of this node's NVRAM board.\n\n :return: The nvram of this NodesLnnHardwareNode.\n :rtype: str\n "
return self._nvram<|docstring|>Gets the nvram of this NodesLnnHardwareNode.
Manufacturer and model of this node's NVRAM board.
:return: The nvram of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
d01b6c9de772fe5af341723ac3013cbecde3ff4af2926ae4fc934bc960df58cf | @nvram.setter
def nvram(self, nvram):
"\n Sets the nvram of this NodesLnnHardwareNode.\n Manufacturer and model of this node's NVRAM board.\n\n :param nvram: The nvram of this NodesLnnHardwareNode.\n :type: str\n "
self._nvram = nvram | Sets the nvram of this NodesLnnHardwareNode.
Manufacturer and model of this node's NVRAM board.
:param nvram: The nvram of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | nvram | Atomicology/isilon_sdk_python | 0 | python | @nvram.setter
def nvram(self, nvram):
"\n Sets the nvram of this NodesLnnHardwareNode.\n Manufacturer and model of this node's NVRAM board.\n\n :param nvram: The nvram of this NodesLnnHardwareNode.\n :type: str\n "
self._nvram = nvram | @nvram.setter
def nvram(self, nvram):
"\n Sets the nvram of this NodesLnnHardwareNode.\n Manufacturer and model of this node's NVRAM board.\n\n :param nvram: The nvram of this NodesLnnHardwareNode.\n :type: str\n "
self._nvram = nvram<|docstring|>Sets the nvram of this NodesLnnHardwareNode.
Manufacturer and model of this node's NVRAM board.
:param nvram: The nvram of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
94e3fee03e02c2aa1b4da4bab2f7182cb781937214728b00501f424309acc5d9 | @property
def powersupplies(self):
'\n Gets the powersupplies of this NodesLnnHardwareNode.\n Description strings for each power supply on this node.\n\n :return: The powersupplies of this NodesLnnHardwareNode.\n :rtype: list[str]\n '
return self._powersupplies | Gets the powersupplies of this NodesLnnHardwareNode.
Description strings for each power supply on this node.
:return: The powersupplies of this NodesLnnHardwareNode.
:rtype: list[str] | isi_sdk/models/nodes_lnn_hardware_node.py | powersupplies | Atomicology/isilon_sdk_python | 0 | python | @property
def powersupplies(self):
'\n Gets the powersupplies of this NodesLnnHardwareNode.\n Description strings for each power supply on this node.\n\n :return: The powersupplies of this NodesLnnHardwareNode.\n :rtype: list[str]\n '
return self._powersupplies | @property
def powersupplies(self):
'\n Gets the powersupplies of this NodesLnnHardwareNode.\n Description strings for each power supply on this node.\n\n :return: The powersupplies of this NodesLnnHardwareNode.\n :rtype: list[str]\n '
return self._powersupplies<|docstring|>Gets the powersupplies of this NodesLnnHardwareNode.
Description strings for each power supply on this node.
:return: The powersupplies of this NodesLnnHardwareNode.
:rtype: list[str]<|endoftext|> |
eb018790c63d192c76caa98cab10ab3389705b243b0139e652f318cec06f2cd5 | @powersupplies.setter
def powersupplies(self, powersupplies):
'\n Sets the powersupplies of this NodesLnnHardwareNode.\n Description strings for each power supply on this node.\n\n :param powersupplies: The powersupplies of this NodesLnnHardwareNode.\n :type: list[str]\n '
self._powersupplies = powersupplies | Sets the powersupplies of this NodesLnnHardwareNode.
Description strings for each power supply on this node.
:param powersupplies: The powersupplies of this NodesLnnHardwareNode.
:type: list[str] | isi_sdk/models/nodes_lnn_hardware_node.py | powersupplies | Atomicology/isilon_sdk_python | 0 | python | @powersupplies.setter
def powersupplies(self, powersupplies):
'\n Sets the powersupplies of this NodesLnnHardwareNode.\n Description strings for each power supply on this node.\n\n :param powersupplies: The powersupplies of this NodesLnnHardwareNode.\n :type: list[str]\n '
self._powersupplies = powersupplies | @powersupplies.setter
def powersupplies(self, powersupplies):
'\n Sets the powersupplies of this NodesLnnHardwareNode.\n Description strings for each power supply on this node.\n\n :param powersupplies: The powersupplies of this NodesLnnHardwareNode.\n :type: list[str]\n '
self._powersupplies = powersupplies<|docstring|>Sets the powersupplies of this NodesLnnHardwareNode.
Description strings for each power supply on this node.
:param powersupplies: The powersupplies of this NodesLnnHardwareNode.
:type: list[str]<|endoftext|> |
14bc58a2b3d432ebc0c699dcfda3656e5047ea4a0d6cf711a3d45224f32e054e | @property
def processor(self):
'\n Gets the processor of this NodesLnnHardwareNode.\n Number of processors and cores on this node.\n\n :return: The processor of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._processor | Gets the processor of this NodesLnnHardwareNode.
Number of processors and cores on this node.
:return: The processor of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | processor | Atomicology/isilon_sdk_python | 0 | python | @property
def processor(self):
'\n Gets the processor of this NodesLnnHardwareNode.\n Number of processors and cores on this node.\n\n :return: The processor of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._processor | @property
def processor(self):
'\n Gets the processor of this NodesLnnHardwareNode.\n Number of processors and cores on this node.\n\n :return: The processor of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._processor<|docstring|>Gets the processor of this NodesLnnHardwareNode.
Number of processors and cores on this node.
:return: The processor of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
cb08bb3ba675f317633a7bf13fa020f61fb532839caf722d7d06cca7d03796ba | @processor.setter
def processor(self, processor):
'\n Sets the processor of this NodesLnnHardwareNode.\n Number of processors and cores on this node.\n\n :param processor: The processor of this NodesLnnHardwareNode.\n :type: str\n '
self._processor = processor | Sets the processor of this NodesLnnHardwareNode.
Number of processors and cores on this node.
:param processor: The processor of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | processor | Atomicology/isilon_sdk_python | 0 | python | @processor.setter
def processor(self, processor):
'\n Sets the processor of this NodesLnnHardwareNode.\n Number of processors and cores on this node.\n\n :param processor: The processor of this NodesLnnHardwareNode.\n :type: str\n '
self._processor = processor | @processor.setter
def processor(self, processor):
'\n Sets the processor of this NodesLnnHardwareNode.\n Number of processors and cores on this node.\n\n :param processor: The processor of this NodesLnnHardwareNode.\n :type: str\n '
self._processor = processor<|docstring|>Sets the processor of this NodesLnnHardwareNode.
Number of processors and cores on this node.
:param processor: The processor of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
710032d912f67cdcb0b618c93429f1cee8a864e096bb61faa0af21cf938b8a52 | @property
def product(self):
'\n Gets the product of this NodesLnnHardwareNode.\n Isilon product name.\n\n :return: The product of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._product | Gets the product of this NodesLnnHardwareNode.
Isilon product name.
:return: The product of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | product | Atomicology/isilon_sdk_python | 0 | python | @property
def product(self):
'\n Gets the product of this NodesLnnHardwareNode.\n Isilon product name.\n\n :return: The product of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._product | @property
def product(self):
'\n Gets the product of this NodesLnnHardwareNode.\n Isilon product name.\n\n :return: The product of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._product<|docstring|>Gets the product of this NodesLnnHardwareNode.
Isilon product name.
:return: The product of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
82b992f4588232e82d8074ba9d0cc25f464cb0510289e2f27fd0fdbbdd9d5876 | @product.setter
def product(self, product):
'\n Sets the product of this NodesLnnHardwareNode.\n Isilon product name.\n\n :param product: The product of this NodesLnnHardwareNode.\n :type: str\n '
self._product = product | Sets the product of this NodesLnnHardwareNode.
Isilon product name.
:param product: The product of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | product | Atomicology/isilon_sdk_python | 0 | python | @product.setter
def product(self, product):
'\n Sets the product of this NodesLnnHardwareNode.\n Isilon product name.\n\n :param product: The product of this NodesLnnHardwareNode.\n :type: str\n '
self._product = product | @product.setter
def product(self, product):
'\n Sets the product of this NodesLnnHardwareNode.\n Isilon product name.\n\n :param product: The product of this NodesLnnHardwareNode.\n :type: str\n '
self._product = product<|docstring|>Sets the product of this NodesLnnHardwareNode.
Isilon product name.
:param product: The product of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
4843bbd32209abec72dcc3c9fb6c993f07b684ef07496fca697ede6b07938506 | @property
def ram(self):
'\n Gets the ram of this NodesLnnHardwareNode.\n Size of RAM in bytes.\n\n :return: The ram of this NodesLnnHardwareNode.\n :rtype: int\n '
return self._ram | Gets the ram of this NodesLnnHardwareNode.
Size of RAM in bytes.
:return: The ram of this NodesLnnHardwareNode.
:rtype: int | isi_sdk/models/nodes_lnn_hardware_node.py | ram | Atomicology/isilon_sdk_python | 0 | python | @property
def ram(self):
'\n Gets the ram of this NodesLnnHardwareNode.\n Size of RAM in bytes.\n\n :return: The ram of this NodesLnnHardwareNode.\n :rtype: int\n '
return self._ram | @property
def ram(self):
'\n Gets the ram of this NodesLnnHardwareNode.\n Size of RAM in bytes.\n\n :return: The ram of this NodesLnnHardwareNode.\n :rtype: int\n '
return self._ram<|docstring|>Gets the ram of this NodesLnnHardwareNode.
Size of RAM in bytes.
:return: The ram of this NodesLnnHardwareNode.
:rtype: int<|endoftext|> |
10b1eaa4eae270f31752ec4cfde44e517d788015150a5ea5f9f8a96afa87872a | @ram.setter
def ram(self, ram):
'\n Sets the ram of this NodesLnnHardwareNode.\n Size of RAM in bytes.\n\n :param ram: The ram of this NodesLnnHardwareNode.\n :type: int\n '
self._ram = ram | Sets the ram of this NodesLnnHardwareNode.
Size of RAM in bytes.
:param ram: The ram of this NodesLnnHardwareNode.
:type: int | isi_sdk/models/nodes_lnn_hardware_node.py | ram | Atomicology/isilon_sdk_python | 0 | python | @ram.setter
def ram(self, ram):
'\n Sets the ram of this NodesLnnHardwareNode.\n Size of RAM in bytes.\n\n :param ram: The ram of this NodesLnnHardwareNode.\n :type: int\n '
self._ram = ram | @ram.setter
def ram(self, ram):
'\n Sets the ram of this NodesLnnHardwareNode.\n Size of RAM in bytes.\n\n :param ram: The ram of this NodesLnnHardwareNode.\n :type: int\n '
self._ram = ram<|docstring|>Sets the ram of this NodesLnnHardwareNode.
Size of RAM in bytes.
:param ram: The ram of this NodesLnnHardwareNode.
:type: int<|endoftext|> |
8ac45b661d44b3bd9a7a281c1e913f29af81c669d277b79033ee63a74f4bb7ae | @property
def serial_number(self):
'\n Gets the serial_number of this NodesLnnHardwareNode.\n Serial number of this node.\n\n :return: The serial_number of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._serial_number | Gets the serial_number of this NodesLnnHardwareNode.
Serial number of this node.
:return: The serial_number of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | serial_number | Atomicology/isilon_sdk_python | 0 | python | @property
def serial_number(self):
'\n Gets the serial_number of this NodesLnnHardwareNode.\n Serial number of this node.\n\n :return: The serial_number of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._serial_number | @property
def serial_number(self):
'\n Gets the serial_number of this NodesLnnHardwareNode.\n Serial number of this node.\n\n :return: The serial_number of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._serial_number<|docstring|>Gets the serial_number of this NodesLnnHardwareNode.
Serial number of this node.
:return: The serial_number of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
41cc538dd9657c1a7d354c787615b76e4a41729a08fb270b66f064bf2ac00a2f | @serial_number.setter
def serial_number(self, serial_number):
'\n Sets the serial_number of this NodesLnnHardwareNode.\n Serial number of this node.\n\n :param serial_number: The serial_number of this NodesLnnHardwareNode.\n :type: str\n '
self._serial_number = serial_number | Sets the serial_number of this NodesLnnHardwareNode.
Serial number of this node.
:param serial_number: The serial_number of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | serial_number | Atomicology/isilon_sdk_python | 0 | python | @serial_number.setter
def serial_number(self, serial_number):
'\n Sets the serial_number of this NodesLnnHardwareNode.\n Serial number of this node.\n\n :param serial_number: The serial_number of this NodesLnnHardwareNode.\n :type: str\n '
self._serial_number = serial_number | @serial_number.setter
def serial_number(self, serial_number):
'\n Sets the serial_number of this NodesLnnHardwareNode.\n Serial number of this node.\n\n :param serial_number: The serial_number of this NodesLnnHardwareNode.\n :type: str\n '
self._serial_number = serial_number<|docstring|>Sets the serial_number of this NodesLnnHardwareNode.
Serial number of this node.
:param serial_number: The serial_number of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
e1171deddf79bc7cd04028a4f956b2aa20af719a2b81f71a370c396dab29222f | @property
def series(self):
'\n Gets the series of this NodesLnnHardwareNode.\n Series of this node (X, I, NL, etc.).\n\n :return: The series of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._series | Gets the series of this NodesLnnHardwareNode.
Series of this node (X, I, NL, etc.).
:return: The series of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | series | Atomicology/isilon_sdk_python | 0 | python | @property
def series(self):
'\n Gets the series of this NodesLnnHardwareNode.\n Series of this node (X, I, NL, etc.).\n\n :return: The series of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._series | @property
def series(self):
'\n Gets the series of this NodesLnnHardwareNode.\n Series of this node (X, I, NL, etc.).\n\n :return: The series of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._series<|docstring|>Gets the series of this NodesLnnHardwareNode.
Series of this node (X, I, NL, etc.).
:return: The series of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
9013102c7d8289ad4a030aa3dc3b0e8ad87c900430dc9c48c3d3db63d1300029 | @series.setter
def series(self, series):
'\n Sets the series of this NodesLnnHardwareNode.\n Series of this node (X, I, NL, etc.).\n\n :param series: The series of this NodesLnnHardwareNode.\n :type: str\n '
self._series = series | Sets the series of this NodesLnnHardwareNode.
Series of this node (X, I, NL, etc.).
:param series: The series of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | series | Atomicology/isilon_sdk_python | 0 | python | @series.setter
def series(self, series):
'\n Sets the series of this NodesLnnHardwareNode.\n Series of this node (X, I, NL, etc.).\n\n :param series: The series of this NodesLnnHardwareNode.\n :type: str\n '
self._series = series | @series.setter
def series(self, series):
'\n Sets the series of this NodesLnnHardwareNode.\n Series of this node (X, I, NL, etc.).\n\n :param series: The series of this NodesLnnHardwareNode.\n :type: str\n '
self._series = series<|docstring|>Sets the series of this NodesLnnHardwareNode.
Series of this node (X, I, NL, etc.).
:param series: The series of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
3f8bc68e752a6f2182245ee98f85a37805b19ecd82561fd8922f2b72792e9b2c | @property
def storage_class(self):
'\n Gets the storage_class of this NodesLnnHardwareNode.\n Storage class of this node (storage or diskless).\n\n :return: The storage_class of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._storage_class | Gets the storage_class of this NodesLnnHardwareNode.
Storage class of this node (storage or diskless).
:return: The storage_class of this NodesLnnHardwareNode.
:rtype: str | isi_sdk/models/nodes_lnn_hardware_node.py | storage_class | Atomicology/isilon_sdk_python | 0 | python | @property
def storage_class(self):
'\n Gets the storage_class of this NodesLnnHardwareNode.\n Storage class of this node (storage or diskless).\n\n :return: The storage_class of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._storage_class | @property
def storage_class(self):
'\n Gets the storage_class of this NodesLnnHardwareNode.\n Storage class of this node (storage or diskless).\n\n :return: The storage_class of this NodesLnnHardwareNode.\n :rtype: str\n '
return self._storage_class<|docstring|>Gets the storage_class of this NodesLnnHardwareNode.
Storage class of this node (storage or diskless).
:return: The storage_class of this NodesLnnHardwareNode.
:rtype: str<|endoftext|> |
e72d70ef0d702c249b9f2b86fc94844779476a96742af7bc8042351e8c346dfc | @storage_class.setter
def storage_class(self, storage_class):
'\n Sets the storage_class of this NodesLnnHardwareNode.\n Storage class of this node (storage or diskless).\n\n :param storage_class: The storage_class of this NodesLnnHardwareNode.\n :type: str\n '
self._storage_class = storage_class | Sets the storage_class of this NodesLnnHardwareNode.
Storage class of this node (storage or diskless).
:param storage_class: The storage_class of this NodesLnnHardwareNode.
:type: str | isi_sdk/models/nodes_lnn_hardware_node.py | storage_class | Atomicology/isilon_sdk_python | 0 | python | @storage_class.setter
def storage_class(self, storage_class):
'\n Sets the storage_class of this NodesLnnHardwareNode.\n Storage class of this node (storage or diskless).\n\n :param storage_class: The storage_class of this NodesLnnHardwareNode.\n :type: str\n '
self._storage_class = storage_class | @storage_class.setter
def storage_class(self, storage_class):
'\n Sets the storage_class of this NodesLnnHardwareNode.\n Storage class of this node (storage or diskless).\n\n :param storage_class: The storage_class of this NodesLnnHardwareNode.\n :type: str\n '
self._storage_class = storage_class<|docstring|>Sets the storage_class of this NodesLnnHardwareNode.
Storage class of this node (storage or diskless).
:param storage_class: The storage_class of this NodesLnnHardwareNode.
:type: str<|endoftext|> |
f82945678aa8239ce359f4d053efe9cd26bebb743866b0d3ca2cc7dcb18ed12b | def to_dict(self):
'\n Returns the model properties as a dict\n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
else:
result[attr] = value
return result | Returns the model properties as a dict | isi_sdk/models/nodes_lnn_hardware_node.py | to_dict | Atomicology/isilon_sdk_python | 0 | python | def to_dict(self):
'\n \n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
else:
result[attr] = value
return result | def to_dict(self):
'\n \n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
else:
result[attr] = value
return result<|docstring|>Returns the model properties as a dict<|endoftext|> |
c373d87dd29c1e96dce460ab571bff86e58edb298ba83c85d8cc7603a6505de4 | def to_str(self):
'\n Returns the string representation of the model\n '
return pformat(self.to_dict()) | Returns the string representation of the model | isi_sdk/models/nodes_lnn_hardware_node.py | to_str | Atomicology/isilon_sdk_python | 0 | python | def to_str(self):
'\n \n '
return pformat(self.to_dict()) | def to_str(self):
'\n \n '
return pformat(self.to_dict())<|docstring|>Returns the string representation of the model<|endoftext|> |
1034ff7dd2eef24d21e3c2fa7409b793ab5cbb8cd75a2eb0ab3e62604b26264d | def __repr__(self):
'\n For `print` and `pprint`\n '
return self.to_str() | For `print` and `pprint` | isi_sdk/models/nodes_lnn_hardware_node.py | __repr__ | Atomicology/isilon_sdk_python | 0 | python | def __repr__(self):
'\n \n '
return self.to_str() | def __repr__(self):
'\n \n '
return self.to_str()<|docstring|>For `print` and `pprint`<|endoftext|> |
a43b3ce7478646f0122f200e4de04f4f5ed99329a4b75930eecef4ff54a23351 | def __eq__(self, other):
'\n Returns true if both objects are equal\n '
return (self.__dict__ == other.__dict__) | Returns true if both objects are equal | isi_sdk/models/nodes_lnn_hardware_node.py | __eq__ | Atomicology/isilon_sdk_python | 0 | python | def __eq__(self, other):
'\n \n '
return (self.__dict__ == other.__dict__) | def __eq__(self, other):
'\n \n '
return (self.__dict__ == other.__dict__)<|docstring|>Returns true if both objects are equal<|endoftext|> |
2a0b1bfcb00f209de59fbfddd6b5ec2568f26333e15ce6f21b98dbda512b87fd | def __ne__(self, other):
' \n Returns true if both objects are not equal\n '
return (not (self == other)) | Returns true if both objects are not equal | isi_sdk/models/nodes_lnn_hardware_node.py | __ne__ | Atomicology/isilon_sdk_python | 0 | python | def __ne__(self, other):
' \n \n '
return (not (self == other)) | def __ne__(self, other):
' \n \n '
return (not (self == other))<|docstring|>Returns true if both objects are not equal<|endoftext|> |
746de7dc9c9cff2118e8029f27cfb1f767fe573f8c08253df6f65d337d577270 | def maxIncreaseKeepingSkyline(self, grid):
'\n :type grid: List[List[int]]\n :rtype: int\n '
size = len(grid)
row_max = ([0] * size)
col_max = ([0] * size)
for row in range(size):
for col in range(size):
if (grid[row][col] > row_max[row]):
row_max[row] = grid[row][col]
if (grid[row][col] > col_max[col]):
col_max[col] = grid[row][col]
sum_increased = 0
for row in range(size):
for col in range(size):
sum_increased += (min(row_max[row], col_max[col]) - grid[row][col])
return sum_increased | :type grid: List[List[int]]
:rtype: int | leetcode/0807 Max Increase to Keep City Skyline.py | maxIncreaseKeepingSkyline | jaredliw/python-question-bank | 1 | python | def maxIncreaseKeepingSkyline(self, grid):
'\n :type grid: List[List[int]]\n :rtype: int\n '
size = len(grid)
row_max = ([0] * size)
col_max = ([0] * size)
for row in range(size):
for col in range(size):
if (grid[row][col] > row_max[row]):
row_max[row] = grid[row][col]
if (grid[row][col] > col_max[col]):
col_max[col] = grid[row][col]
sum_increased = 0
for row in range(size):
for col in range(size):
sum_increased += (min(row_max[row], col_max[col]) - grid[row][col])
return sum_increased | def maxIncreaseKeepingSkyline(self, grid):
'\n :type grid: List[List[int]]\n :rtype: int\n '
size = len(grid)
row_max = ([0] * size)
col_max = ([0] * size)
for row in range(size):
for col in range(size):
if (grid[row][col] > row_max[row]):
row_max[row] = grid[row][col]
if (grid[row][col] > col_max[col]):
col_max[col] = grid[row][col]
sum_increased = 0
for row in range(size):
for col in range(size):
sum_increased += (min(row_max[row], col_max[col]) - grid[row][col])
return sum_increased<|docstring|>:type grid: List[List[int]]
:rtype: int<|endoftext|> |
0c37dd463d9c383e57d2f39b25c250728a77a4ffd8e2e99562de20c801edec6f | def get_dict(self, reverse=False):
"\n Get the source and target dictionary.\n\n Args:\n reverse (bool): wether to reverse key and value in dictionary,\n i.e. key: value to value: key.\n \n Returns:\n Two dictionaries, the source and target dictionary.\n \n Examples:\n \n .. code-block:: python\n \n from paddle.text.datasets import WMT14\n wmt14 = WMT14(mode='train', dict_size=50)\n src_dict, trg_dict = wmt14.get_dict()\n "
(src_dict, trg_dict) = (self.src_dict, self.trg_dict)
if reverse:
src_dict = {v: k for (k, v) in six.iteritems(src_dict)}
trg_dict = {v: k for (k, v) in six.iteritems(trg_dict)}
return (src_dict, trg_dict) | Get the source and target dictionary.
Args:
reverse (bool): wether to reverse key and value in dictionary,
i.e. key: value to value: key.
Returns:
Two dictionaries, the source and target dictionary.
Examples:
.. code-block:: python
from paddle.text.datasets import WMT14
wmt14 = WMT14(mode='train', dict_size=50)
src_dict, trg_dict = wmt14.get_dict() | python/paddle/text/datasets/wmt14.py | get_dict | tangzhiyi11/Paddle | 17,085 | python | def get_dict(self, reverse=False):
"\n Get the source and target dictionary.\n\n Args:\n reverse (bool): wether to reverse key and value in dictionary,\n i.e. key: value to value: key.\n \n Returns:\n Two dictionaries, the source and target dictionary.\n \n Examples:\n \n .. code-block:: python\n \n from paddle.text.datasets import WMT14\n wmt14 = WMT14(mode='train', dict_size=50)\n src_dict, trg_dict = wmt14.get_dict()\n "
(src_dict, trg_dict) = (self.src_dict, self.trg_dict)
if reverse:
src_dict = {v: k for (k, v) in six.iteritems(src_dict)}
trg_dict = {v: k for (k, v) in six.iteritems(trg_dict)}
return (src_dict, trg_dict) | def get_dict(self, reverse=False):
"\n Get the source and target dictionary.\n\n Args:\n reverse (bool): wether to reverse key and value in dictionary,\n i.e. key: value to value: key.\n \n Returns:\n Two dictionaries, the source and target dictionary.\n \n Examples:\n \n .. code-block:: python\n \n from paddle.text.datasets import WMT14\n wmt14 = WMT14(mode='train', dict_size=50)\n src_dict, trg_dict = wmt14.get_dict()\n "
(src_dict, trg_dict) = (self.src_dict, self.trg_dict)
if reverse:
src_dict = {v: k for (k, v) in six.iteritems(src_dict)}
trg_dict = {v: k for (k, v) in six.iteritems(trg_dict)}
return (src_dict, trg_dict)<|docstring|>Get the source and target dictionary.
Args:
reverse (bool): wether to reverse key and value in dictionary,
i.e. key: value to value: key.
Returns:
Two dictionaries, the source and target dictionary.
Examples:
.. code-block:: python
from paddle.text.datasets import WMT14
wmt14 = WMT14(mode='train', dict_size=50)
src_dict, trg_dict = wmt14.get_dict()<|endoftext|> |
88468e7950663cb4115f7b7d24379c1dab9ce04d811d7c1afefa125d06842bb1 | def is_active(self):
'Skip hosts with unsupported deployment types.'
deployment_type = self.get_var('openshift_deployment_type')
has_valid_deployment_type = (deployment_type in DEPLOYMENT_IMAGE_INFO)
return (super(DockerImageAvailability, self).is_active() and has_valid_deployment_type) | Skip hosts with unsupported deployment types. | roles/openshift_health_checker/openshift_checks/docker_image_availability.py | is_active | Ravichandramanupati/openshift | 1 | python | def is_active(self):
deployment_type = self.get_var('openshift_deployment_type')
has_valid_deployment_type = (deployment_type in DEPLOYMENT_IMAGE_INFO)
return (super(DockerImageAvailability, self).is_active() and has_valid_deployment_type) | def is_active(self):
deployment_type = self.get_var('openshift_deployment_type')
has_valid_deployment_type = (deployment_type in DEPLOYMENT_IMAGE_INFO)
return (super(DockerImageAvailability, self).is_active() and has_valid_deployment_type)<|docstring|>Skip hosts with unsupported deployment types.<|endoftext|> |
b8c302020288666b735685512346218740594b2886a6f3175d6e6bd3e49ad1d5 | def required_images(self):
"\n Determine which images we expect to need for this host.\n Returns: a set of required images like 'openshift/origin:v3.6'\n\n The thorny issue of determining the image names from the variables is under consideration\n via https://github.com/openshift/openshift-ansible/issues/4415\n\n For now we operate as follows:\n * For containerized components (master, node, ...) we look at the deployment type and\n use openshift/origin or openshift3/ose as the base for those component images. The\n version is openshift_image_tag as determined by the openshift_version role.\n * For OpenShift-managed infrastructure (router, registry...) we use oreg_url if\n it is defined; otherwise we again use the base that depends on the deployment type.\n Registry is not included in constructed images. It may be in oreg_url or etcd image.\n "
required = set()
deployment_type = self.get_var('openshift_deployment_type')
host_groups = self.get_var('group_names')
image_tag = self.get_var('openshift_image_tag', default='latest')
image_info = DEPLOYMENT_IMAGE_INFO[deployment_type]
image_url = '{}/{}-{}:{}'.format(image_info['namespace'], image_info['name'], '${component}', '${version}')
image_url = (self.get_var('oreg_url', default='') or image_url)
if ('oo_nodes_to_config' in host_groups):
for suffix in NODE_IMAGE_SUFFIXES:
required.add(image_url.replace('${component}', suffix).replace('${version}', image_tag))
required.add(image_info['registry_console_image'])
if self.get_var('openshift', 'common', 'is_containerized'):
components = set()
if ('oo_nodes_to_config' in host_groups):
components.update(['node', 'openvswitch'])
if ('oo_masters_to_config' in host_groups):
components.add(image_info['name'])
for component in components:
required.add('{}/{}:{}'.format(image_info['namespace'], component, image_tag))
if ('oo_etcd_to_config' in host_groups):
required.add('registry.access.redhat.com/rhel7/etcd')
return required | Determine which images we expect to need for this host.
Returns: a set of required images like 'openshift/origin:v3.6'
The thorny issue of determining the image names from the variables is under consideration
via https://github.com/openshift/openshift-ansible/issues/4415
For now we operate as follows:
* For containerized components (master, node, ...) we look at the deployment type and
use openshift/origin or openshift3/ose as the base for those component images. The
version is openshift_image_tag as determined by the openshift_version role.
* For OpenShift-managed infrastructure (router, registry...) we use oreg_url if
it is defined; otherwise we again use the base that depends on the deployment type.
Registry is not included in constructed images. It may be in oreg_url or etcd image. | roles/openshift_health_checker/openshift_checks/docker_image_availability.py | required_images | Ravichandramanupati/openshift | 1 | python | def required_images(self):
"\n Determine which images we expect to need for this host.\n Returns: a set of required images like 'openshift/origin:v3.6'\n\n The thorny issue of determining the image names from the variables is under consideration\n via https://github.com/openshift/openshift-ansible/issues/4415\n\n For now we operate as follows:\n * For containerized components (master, node, ...) we look at the deployment type and\n use openshift/origin or openshift3/ose as the base for those component images. The\n version is openshift_image_tag as determined by the openshift_version role.\n * For OpenShift-managed infrastructure (router, registry...) we use oreg_url if\n it is defined; otherwise we again use the base that depends on the deployment type.\n Registry is not included in constructed images. It may be in oreg_url or etcd image.\n "
required = set()
deployment_type = self.get_var('openshift_deployment_type')
host_groups = self.get_var('group_names')
image_tag = self.get_var('openshift_image_tag', default='latest')
image_info = DEPLOYMENT_IMAGE_INFO[deployment_type]
image_url = '{}/{}-{}:{}'.format(image_info['namespace'], image_info['name'], '${component}', '${version}')
image_url = (self.get_var('oreg_url', default=) or image_url)
if ('oo_nodes_to_config' in host_groups):
for suffix in NODE_IMAGE_SUFFIXES:
required.add(image_url.replace('${component}', suffix).replace('${version}', image_tag))
required.add(image_info['registry_console_image'])
if self.get_var('openshift', 'common', 'is_containerized'):
components = set()
if ('oo_nodes_to_config' in host_groups):
components.update(['node', 'openvswitch'])
if ('oo_masters_to_config' in host_groups):
components.add(image_info['name'])
for component in components:
required.add('{}/{}:{}'.format(image_info['namespace'], component, image_tag))
if ('oo_etcd_to_config' in host_groups):
required.add('registry.access.redhat.com/rhel7/etcd')
return required | def required_images(self):
"\n Determine which images we expect to need for this host.\n Returns: a set of required images like 'openshift/origin:v3.6'\n\n The thorny issue of determining the image names from the variables is under consideration\n via https://github.com/openshift/openshift-ansible/issues/4415\n\n For now we operate as follows:\n * For containerized components (master, node, ...) we look at the deployment type and\n use openshift/origin or openshift3/ose as the base for those component images. The\n version is openshift_image_tag as determined by the openshift_version role.\n * For OpenShift-managed infrastructure (router, registry...) we use oreg_url if\n it is defined; otherwise we again use the base that depends on the deployment type.\n Registry is not included in constructed images. It may be in oreg_url or etcd image.\n "
required = set()
deployment_type = self.get_var('openshift_deployment_type')
host_groups = self.get_var('group_names')
image_tag = self.get_var('openshift_image_tag', default='latest')
image_info = DEPLOYMENT_IMAGE_INFO[deployment_type]
image_url = '{}/{}-{}:{}'.format(image_info['namespace'], image_info['name'], '${component}', '${version}')
image_url = (self.get_var('oreg_url', default=) or image_url)
if ('oo_nodes_to_config' in host_groups):
for suffix in NODE_IMAGE_SUFFIXES:
required.add(image_url.replace('${component}', suffix).replace('${version}', image_tag))
required.add(image_info['registry_console_image'])
if self.get_var('openshift', 'common', 'is_containerized'):
components = set()
if ('oo_nodes_to_config' in host_groups):
components.update(['node', 'openvswitch'])
if ('oo_masters_to_config' in host_groups):
components.add(image_info['name'])
for component in components:
required.add('{}/{}:{}'.format(image_info['namespace'], component, image_tag))
if ('oo_etcd_to_config' in host_groups):
required.add('registry.access.redhat.com/rhel7/etcd')
return required<|docstring|>Determine which images we expect to need for this host.
Returns: a set of required images like 'openshift/origin:v3.6'
The thorny issue of determining the image names from the variables is under consideration
via https://github.com/openshift/openshift-ansible/issues/4415
For now we operate as follows:
* For containerized components (master, node, ...) we look at the deployment type and
use openshift/origin or openshift3/ose as the base for those component images. The
version is openshift_image_tag as determined by the openshift_version role.
* For OpenShift-managed infrastructure (router, registry...) we use oreg_url if
it is defined; otherwise we again use the base that depends on the deployment type.
Registry is not included in constructed images. It may be in oreg_url or etcd image.<|endoftext|> |
b2b76ec7902b57e38a5d8409a9326e07ce3200086ab49ec7b5a56449521a3aa5 | def local_images(self, images):
'Filter a list of images and return those available locally.'
found_images = []
for image in images:
imglist = ([image] + [((reg + '/') + image) for reg in self.registries['configured']])
if self.is_image_local(imglist):
found_images.append(image)
return found_images | Filter a list of images and return those available locally. | roles/openshift_health_checker/openshift_checks/docker_image_availability.py | local_images | Ravichandramanupati/openshift | 1 | python | def local_images(self, images):
found_images = []
for image in images:
imglist = ([image] + [((reg + '/') + image) for reg in self.registries['configured']])
if self.is_image_local(imglist):
found_images.append(image)
return found_images | def local_images(self, images):
found_images = []
for image in images:
imglist = ([image] + [((reg + '/') + image) for reg in self.registries['configured']])
if self.is_image_local(imglist):
found_images.append(image)
return found_images<|docstring|>Filter a list of images and return those available locally.<|endoftext|> |
66bf697cc2319c6ac963f64c403347f7499cb5dd9a4589719cac50cc0d8b8226 | def is_image_local(self, image):
'Check if image is already in local docker index.'
result = self.execute_module('docker_image_facts', {'name': image})
return (bool(result.get('images')) and (not result.get('failed'))) | Check if image is already in local docker index. | roles/openshift_health_checker/openshift_checks/docker_image_availability.py | is_image_local | Ravichandramanupati/openshift | 1 | python | def is_image_local(self, image):
result = self.execute_module('docker_image_facts', {'name': image})
return (bool(result.get('images')) and (not result.get('failed'))) | def is_image_local(self, image):
result = self.execute_module('docker_image_facts', {'name': image})
return (bool(result.get('images')) and (not result.get('failed')))<|docstring|>Check if image is already in local docker index.<|endoftext|> |
0a3e552597dbe9c5c8c9ab8b7628125fb21521e125c2b8e5c75ae6c3cddc452b | def ensure_list(self, registry_param):
'Return the task var as a list.'
registry = self.get_var(registry_param, default=[])
if (not isinstance(registry, six.string_types)):
return list(registry)
return self.normalize(registry) | Return the task var as a list. | roles/openshift_health_checker/openshift_checks/docker_image_availability.py | ensure_list | Ravichandramanupati/openshift | 1 | python | def ensure_list(self, registry_param):
registry = self.get_var(registry_param, default=[])
if (not isinstance(registry, six.string_types)):
return list(registry)
return self.normalize(registry) | def ensure_list(self, registry_param):
registry = self.get_var(registry_param, default=[])
if (not isinstance(registry, six.string_types)):
return list(registry)
return self.normalize(registry)<|docstring|>Return the task var as a list.<|endoftext|> |
e6bc028fbe78664ca9605cafbb5edd8647e506ba3f5a94cb217492365804ca90 | def available_images(self, images):
'Search remotely for images. Returns: list of images found.'
return [image for image in images if self.is_available_skopeo_image(image)] | Search remotely for images. Returns: list of images found. | roles/openshift_health_checker/openshift_checks/docker_image_availability.py | available_images | Ravichandramanupati/openshift | 1 | python | def available_images(self, images):
return [image for image in images if self.is_available_skopeo_image(image)] | def available_images(self, images):
return [image for image in images if self.is_available_skopeo_image(image)]<|docstring|>Search remotely for images. Returns: list of images found.<|endoftext|> |
e46096cddc88a9b08c30a24e4d0f07c46cd3dfd8775f90465188f7a89c6ba0f1 | def is_available_skopeo_image(self, image):
'Use Skopeo to determine if required image exists in known registry(s).'
registries = self.registries['configured']
if (image.count('/') > 1):
(registry, image) = image.split('/', 1)
registries = [registry]
for registry in registries:
if (registry in self.registries['blocked']):
continue
if (registry not in self.reachable_registries):
self.reachable_registries[registry] = self.connect_to_registry(registry)
if (not self.reachable_registries[registry]):
continue
args = dict(registry=registry, image=image)
args['tls'] = ('false' if (registry in self.registries['insecure']) else 'true')
args['creds'] = (self.skopeo_command_creds if (registry == self.registries['oreg']) else '')
result = self.execute_module_with_retries('command', {'_raw_params': self.skopeo_command.format(**args)})
if ((result.get('rc', 0) == 0) and (not result.get('failed'))):
return True
if (result.get('rc') == 124):
self.reachable_registries[registry] = False
return False | Use Skopeo to determine if required image exists in known registry(s). | roles/openshift_health_checker/openshift_checks/docker_image_availability.py | is_available_skopeo_image | Ravichandramanupati/openshift | 1 | python | def is_available_skopeo_image(self, image):
registries = self.registries['configured']
if (image.count('/') > 1):
(registry, image) = image.split('/', 1)
registries = [registry]
for registry in registries:
if (registry in self.registries['blocked']):
continue
if (registry not in self.reachable_registries):
self.reachable_registries[registry] = self.connect_to_registry(registry)
if (not self.reachable_registries[registry]):
continue
args = dict(registry=registry, image=image)
args['tls'] = ('false' if (registry in self.registries['insecure']) else 'true')
args['creds'] = (self.skopeo_command_creds if (registry == self.registries['oreg']) else )
result = self.execute_module_with_retries('command', {'_raw_params': self.skopeo_command.format(**args)})
if ((result.get('rc', 0) == 0) and (not result.get('failed'))):
return True
if (result.get('rc') == 124):
self.reachable_registries[registry] = False
return False | def is_available_skopeo_image(self, image):
registries = self.registries['configured']
if (image.count('/') > 1):
(registry, image) = image.split('/', 1)
registries = [registry]
for registry in registries:
if (registry in self.registries['blocked']):
continue
if (registry not in self.reachable_registries):
self.reachable_registries[registry] = self.connect_to_registry(registry)
if (not self.reachable_registries[registry]):
continue
args = dict(registry=registry, image=image)
args['tls'] = ('false' if (registry in self.registries['insecure']) else 'true')
args['creds'] = (self.skopeo_command_creds if (registry == self.registries['oreg']) else )
result = self.execute_module_with_retries('command', {'_raw_params': self.skopeo_command.format(**args)})
if ((result.get('rc', 0) == 0) and (not result.get('failed'))):
return True
if (result.get('rc') == 124):
self.reachable_registries[registry] = False
return False<|docstring|>Use Skopeo to determine if required image exists in known registry(s).<|endoftext|> |
24825f1f37cf62811e5102fe923cd5f76a7331cc3d5b69df654a06abe0e3b24f | def connect_to_registry(self, registry):
'Use ansible wait_for module to test connectivity from host to registry. Returns bool.'
(host, _, port) = registry.partition(':')
port = (port or 443)
args = dict(host=host, port=port, state='started', timeout=30)
result = self.execute_module('wait_for', args)
return ((result.get('rc', 0) == 0) and (not result.get('failed'))) | Use ansible wait_for module to test connectivity from host to registry. Returns bool. | roles/openshift_health_checker/openshift_checks/docker_image_availability.py | connect_to_registry | Ravichandramanupati/openshift | 1 | python | def connect_to_registry(self, registry):
(host, _, port) = registry.partition(':')
port = (port or 443)
args = dict(host=host, port=port, state='started', timeout=30)
result = self.execute_module('wait_for', args)
return ((result.get('rc', 0) == 0) and (not result.get('failed'))) | def connect_to_registry(self, registry):
(host, _, port) = registry.partition(':')
port = (port or 443)
args = dict(host=host, port=port, state='started', timeout=30)
result = self.execute_module('wait_for', args)
return ((result.get('rc', 0) == 0) and (not result.get('failed')))<|docstring|>Use ansible wait_for module to test connectivity from host to registry. Returns bool.<|endoftext|> |
acfefa687098da548b52f0051504b5cc956c735c3bac9662661807acd5a36450 | def decodeString(self, s):
'\n :type s: str\n :rtype: str\n '
stack = []
cur_str = ''
i = 0
while (i < len(s)):
num = 0
while ((i < len(s)) and s[i].isdigit()):
num = (((num * 10) + ord(s[i])) - ord('0'))
i += 1
if (num > 0):
stack.append(cur_str)
stack.append(num)
cur_str = ''
if (s[i] == '['):
pass
elif (s[i] == ']'):
cur_str *= int(stack.pop())
while ((len(stack) > 0) and (not isinstance(stack[(- 1)], int))):
cur_str = (stack.pop() + cur_str)
else:
cur_str += s[i]
i += 1
return cur_str | :type s: str
:rtype: str | LeetCode/394. Decode String.py | decodeString | ctc316/algorithm-python | 0 | python | def decodeString(self, s):
'\n :type s: str\n :rtype: str\n '
stack = []
cur_str =
i = 0
while (i < len(s)):
num = 0
while ((i < len(s)) and s[i].isdigit()):
num = (((num * 10) + ord(s[i])) - ord('0'))
i += 1
if (num > 0):
stack.append(cur_str)
stack.append(num)
cur_str =
if (s[i] == '['):
pass
elif (s[i] == ']'):
cur_str *= int(stack.pop())
while ((len(stack) > 0) and (not isinstance(stack[(- 1)], int))):
cur_str = (stack.pop() + cur_str)
else:
cur_str += s[i]
i += 1
return cur_str | def decodeString(self, s):
'\n :type s: str\n :rtype: str\n '
stack = []
cur_str =
i = 0
while (i < len(s)):
num = 0
while ((i < len(s)) and s[i].isdigit()):
num = (((num * 10) + ord(s[i])) - ord('0'))
i += 1
if (num > 0):
stack.append(cur_str)
stack.append(num)
cur_str =
if (s[i] == '['):
pass
elif (s[i] == ']'):
cur_str *= int(stack.pop())
while ((len(stack) > 0) and (not isinstance(stack[(- 1)], int))):
cur_str = (stack.pop() + cur_str)
else:
cur_str += s[i]
i += 1
return cur_str<|docstring|>:type s: str
:rtype: str<|endoftext|> |
d8c4ccfa2d12d400eecfd1ee9ff94a3ad43522f304f350fade9499e0b17dc98e | def setup(hass, config):
'Activate Tahoma component.'
from tahoma_api import TahomaApi
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
exclude = conf.get(CONF_EXCLUDE)
try:
api = TahomaApi(username, password)
except RequestException:
_LOGGER.exception('Error when trying to log in to the Tahoma API')
return False
try:
api.get_setup()
devices = api.get_devices()
scenes = api.get_action_groups()
except RequestException:
_LOGGER.exception('Error when getting devices from the Tahoma API')
return False
hass.data[DOMAIN] = {'controller': api, 'devices': defaultdict(list), 'scenes': []}
for device in devices:
_device = api.get_device(device)
if all(((ext not in _device.type) for ext in exclude)):
device_type = map_tahoma_device(_device)
if (device_type is None):
_LOGGER.warning('Unsupported type %s for Tahoma device %s', _device.type, _device.label)
continue
hass.data[DOMAIN]['devices'][device_type].append(_device)
for scene in scenes:
hass.data[DOMAIN]['scenes'].append(scene)
for component in TAHOMA_COMPONENTS:
discovery.load_platform(hass, component, DOMAIN, {}, config)
return True | Activate Tahoma component. | homeassistant/components/tahoma.py | setup | spacesuitdiver/home-assistant | 2 | python | def setup(hass, config):
from tahoma_api import TahomaApi
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
exclude = conf.get(CONF_EXCLUDE)
try:
api = TahomaApi(username, password)
except RequestException:
_LOGGER.exception('Error when trying to log in to the Tahoma API')
return False
try:
api.get_setup()
devices = api.get_devices()
scenes = api.get_action_groups()
except RequestException:
_LOGGER.exception('Error when getting devices from the Tahoma API')
return False
hass.data[DOMAIN] = {'controller': api, 'devices': defaultdict(list), 'scenes': []}
for device in devices:
_device = api.get_device(device)
if all(((ext not in _device.type) for ext in exclude)):
device_type = map_tahoma_device(_device)
if (device_type is None):
_LOGGER.warning('Unsupported type %s for Tahoma device %s', _device.type, _device.label)
continue
hass.data[DOMAIN]['devices'][device_type].append(_device)
for scene in scenes:
hass.data[DOMAIN]['scenes'].append(scene)
for component in TAHOMA_COMPONENTS:
discovery.load_platform(hass, component, DOMAIN, {}, config)
return True | def setup(hass, config):
from tahoma_api import TahomaApi
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
exclude = conf.get(CONF_EXCLUDE)
try:
api = TahomaApi(username, password)
except RequestException:
_LOGGER.exception('Error when trying to log in to the Tahoma API')
return False
try:
api.get_setup()
devices = api.get_devices()
scenes = api.get_action_groups()
except RequestException:
_LOGGER.exception('Error when getting devices from the Tahoma API')
return False
hass.data[DOMAIN] = {'controller': api, 'devices': defaultdict(list), 'scenes': []}
for device in devices:
_device = api.get_device(device)
if all(((ext not in _device.type) for ext in exclude)):
device_type = map_tahoma_device(_device)
if (device_type is None):
_LOGGER.warning('Unsupported type %s for Tahoma device %s', _device.type, _device.label)
continue
hass.data[DOMAIN]['devices'][device_type].append(_device)
for scene in scenes:
hass.data[DOMAIN]['scenes'].append(scene)
for component in TAHOMA_COMPONENTS:
discovery.load_platform(hass, component, DOMAIN, {}, config)
return True<|docstring|>Activate Tahoma component.<|endoftext|> |
5edb8bc8beb95ab5b221fa16add3007e5e1b208d38c9dad492656a7e2db5c844 | def map_tahoma_device(tahoma_device):
'Map Tahoma device types to Home Assistant components.'
return TAHOMA_TYPES.get(tahoma_device.type) | Map Tahoma device types to Home Assistant components. | homeassistant/components/tahoma.py | map_tahoma_device | spacesuitdiver/home-assistant | 2 | python | def map_tahoma_device(tahoma_device):
return TAHOMA_TYPES.get(tahoma_device.type) | def map_tahoma_device(tahoma_device):
return TAHOMA_TYPES.get(tahoma_device.type)<|docstring|>Map Tahoma device types to Home Assistant components.<|endoftext|> |
dc5139db639a0c2c196549209b77ae5278a667e558ea8b6f5d8fd80a91fa4515 | def __init__(self, tahoma_device, controller):
'Initialize the device.'
self.tahoma_device = tahoma_device
self.controller = controller
self._name = self.tahoma_device.label | Initialize the device. | homeassistant/components/tahoma.py | __init__ | spacesuitdiver/home-assistant | 2 | python | def __init__(self, tahoma_device, controller):
self.tahoma_device = tahoma_device
self.controller = controller
self._name = self.tahoma_device.label | def __init__(self, tahoma_device, controller):
self.tahoma_device = tahoma_device
self.controller = controller
self._name = self.tahoma_device.label<|docstring|>Initialize the device.<|endoftext|> |
959514ff3ad36bfa3d9e493ec80719a74614a7e62c186c5799451855e3d3f810 | @property
def name(self):
'Return the name of the device.'
return self._name | Return the name of the device. | homeassistant/components/tahoma.py | name | spacesuitdiver/home-assistant | 2 | python | @property
def name(self):
return self._name | @property
def name(self):
return self._name<|docstring|>Return the name of the device.<|endoftext|> |
ab5053a293614909330db551f622231e2b11d0748cea457a389b7f78da0c4956 | @property
def device_state_attributes(self):
'Return the state attributes of the device.'
return {'tahoma_device_id': self.tahoma_device.url} | Return the state attributes of the device. | homeassistant/components/tahoma.py | device_state_attributes | spacesuitdiver/home-assistant | 2 | python | @property
def device_state_attributes(self):
return {'tahoma_device_id': self.tahoma_device.url} | @property
def device_state_attributes(self):
return {'tahoma_device_id': self.tahoma_device.url}<|docstring|>Return the state attributes of the device.<|endoftext|> |
accc07b441f37ed65e4e652c6ea4ef6c7adb6c9e0c10b1d44e6b3381c909d6c6 | def apply_action(self, cmd_name, *args):
'Apply Action to Device.'
from tahoma_api import Action
action = Action(self.tahoma_device.url)
action.add_command(cmd_name, *args)
self.controller.apply_actions('HomeAssistant', [action]) | Apply Action to Device. | homeassistant/components/tahoma.py | apply_action | spacesuitdiver/home-assistant | 2 | python | def apply_action(self, cmd_name, *args):
from tahoma_api import Action
action = Action(self.tahoma_device.url)
action.add_command(cmd_name, *args)
self.controller.apply_actions('HomeAssistant', [action]) | def apply_action(self, cmd_name, *args):
from tahoma_api import Action
action = Action(self.tahoma_device.url)
action.add_command(cmd_name, *args)
self.controller.apply_actions('HomeAssistant', [action])<|docstring|>Apply Action to Device.<|endoftext|> |
85d882424c17c603d93835e84692346d50c4a24fde4038ae91137fcc00cb5a70 | @abstractmethod
def _train_epoch(self, epoch):
'\n Training logic for an epoch\n\n :param epoch: Current epoch number\n '
raise NotImplementedError | Training logic for an epoch
:param epoch: Current epoch number | base/base_trainer.py | _train_epoch | dll-ncai/AI-ForestWatch | 2 | python | @abstractmethod
def _train_epoch(self, epoch):
'\n Training logic for an epoch\n\n :param epoch: Current epoch number\n '
raise NotImplementedError | @abstractmethod
def _train_epoch(self, epoch):
'\n Training logic for an epoch\n\n :param epoch: Current epoch number\n '
raise NotImplementedError<|docstring|>Training logic for an epoch
:param epoch: Current epoch number<|endoftext|> |
8af4bfe45ee20e8ae379c5f0cffbbf6eb48e0d9582ff07bdaf814abcf60e0a1d | def train(self):
'\n Full training logic\n '
not_improved_count = 0
for epoch in range(self.start_epoch, (self.epochs + 1)):
result = self._train_epoch(epoch)
log = {'epoch': epoch}
log.update(result)
for (key, value) in log.items():
self.logger.info(' {:15s}: {}'.format(str(key), value))
best = False
if (self.mnt_mode != 'off'):
try:
improved = (((self.mnt_mode == 'min') and (log[self.mnt_metric] <= self.mnt_best)) or ((self.mnt_mode == 'max') and (log[self.mnt_metric] >= self.mnt_best)))
except KeyError:
self.logger.warning("Warning: Metric '{}' is not found. Model performance monitoring is disabled.".format(self.mnt_metric))
self.mnt_mode = 'off'
improved = False
if improved:
self.mnt_best = log[self.mnt_metric]
not_improved_count = 0
best = True
else:
not_improved_count += 1
if (not_improved_count > self.early_stop):
self.logger.info("Validation performance didn't improve for {} epochs. Training stops.".format(self.early_stop))
break
if ((epoch % self.save_period) == 0):
self._save_checkpoint(epoch, save_best=best) | Full training logic | base/base_trainer.py | train | dll-ncai/AI-ForestWatch | 2 | python | def train(self):
'\n \n '
not_improved_count = 0
for epoch in range(self.start_epoch, (self.epochs + 1)):
result = self._train_epoch(epoch)
log = {'epoch': epoch}
log.update(result)
for (key, value) in log.items():
self.logger.info(' {:15s}: {}'.format(str(key), value))
best = False
if (self.mnt_mode != 'off'):
try:
improved = (((self.mnt_mode == 'min') and (log[self.mnt_metric] <= self.mnt_best)) or ((self.mnt_mode == 'max') and (log[self.mnt_metric] >= self.mnt_best)))
except KeyError:
self.logger.warning("Warning: Metric '{}' is not found. Model performance monitoring is disabled.".format(self.mnt_metric))
self.mnt_mode = 'off'
improved = False
if improved:
self.mnt_best = log[self.mnt_metric]
not_improved_count = 0
best = True
else:
not_improved_count += 1
if (not_improved_count > self.early_stop):
self.logger.info("Validation performance didn't improve for {} epochs. Training stops.".format(self.early_stop))
break
if ((epoch % self.save_period) == 0):
self._save_checkpoint(epoch, save_best=best) | def train(self):
'\n \n '
not_improved_count = 0
for epoch in range(self.start_epoch, (self.epochs + 1)):
result = self._train_epoch(epoch)
log = {'epoch': epoch}
log.update(result)
for (key, value) in log.items():
self.logger.info(' {:15s}: {}'.format(str(key), value))
best = False
if (self.mnt_mode != 'off'):
try:
improved = (((self.mnt_mode == 'min') and (log[self.mnt_metric] <= self.mnt_best)) or ((self.mnt_mode == 'max') and (log[self.mnt_metric] >= self.mnt_best)))
except KeyError:
self.logger.warning("Warning: Metric '{}' is not found. Model performance monitoring is disabled.".format(self.mnt_metric))
self.mnt_mode = 'off'
improved = False
if improved:
self.mnt_best = log[self.mnt_metric]
not_improved_count = 0
best = True
else:
not_improved_count += 1
if (not_improved_count > self.early_stop):
self.logger.info("Validation performance didn't improve for {} epochs. Training stops.".format(self.early_stop))
break
if ((epoch % self.save_period) == 0):
self._save_checkpoint(epoch, save_best=best)<|docstring|>Full training logic<|endoftext|> |
1010dd8d9e78d374e5d48a8fcb65bf6a7391a0a1fc4c2cafb03d1f1a7e2eaac5 | def _save_checkpoint(self, epoch, save_best=False):
"\n Saving checkpoints\n\n :param epoch: current epoch number\n :param log: logging information of the epoch\n :param save_best: if True, rename the saved checkpoint to 'model_best.pth'\n "
arch = type(self.model).__name__
state = {'arch': arch, 'epoch': epoch, 'state_dict': self.model.state_dict(), 'optimizer': self.optimizer.state_dict(), 'monitor_best': self.mnt_best, 'config': self.config}
filename = str((self.checkpoint_dir / 'checkpoint-epoch{}.pth'.format(epoch)))
torch.save(state, filename)
self.logger.info('Saving checkpoint: {} ...'.format(filename))
if save_best:
best_path = str((self.checkpoint_dir / 'model_best.pth'))
torch.save(state, best_path)
self.logger.info('Saving current best: model_best.pth ...') | Saving checkpoints
:param epoch: current epoch number
:param log: logging information of the epoch
:param save_best: if True, rename the saved checkpoint to 'model_best.pth' | base/base_trainer.py | _save_checkpoint | dll-ncai/AI-ForestWatch | 2 | python | def _save_checkpoint(self, epoch, save_best=False):
"\n Saving checkpoints\n\n :param epoch: current epoch number\n :param log: logging information of the epoch\n :param save_best: if True, rename the saved checkpoint to 'model_best.pth'\n "
arch = type(self.model).__name__
state = {'arch': arch, 'epoch': epoch, 'state_dict': self.model.state_dict(), 'optimizer': self.optimizer.state_dict(), 'monitor_best': self.mnt_best, 'config': self.config}
filename = str((self.checkpoint_dir / 'checkpoint-epoch{}.pth'.format(epoch)))
torch.save(state, filename)
self.logger.info('Saving checkpoint: {} ...'.format(filename))
if save_best:
best_path = str((self.checkpoint_dir / 'model_best.pth'))
torch.save(state, best_path)
self.logger.info('Saving current best: model_best.pth ...') | def _save_checkpoint(self, epoch, save_best=False):
"\n Saving checkpoints\n\n :param epoch: current epoch number\n :param log: logging information of the epoch\n :param save_best: if True, rename the saved checkpoint to 'model_best.pth'\n "
arch = type(self.model).__name__
state = {'arch': arch, 'epoch': epoch, 'state_dict': self.model.state_dict(), 'optimizer': self.optimizer.state_dict(), 'monitor_best': self.mnt_best, 'config': self.config}
filename = str((self.checkpoint_dir / 'checkpoint-epoch{}.pth'.format(epoch)))
torch.save(state, filename)
self.logger.info('Saving checkpoint: {} ...'.format(filename))
if save_best:
best_path = str((self.checkpoint_dir / 'model_best.pth'))
torch.save(state, best_path)
self.logger.info('Saving current best: model_best.pth ...')<|docstring|>Saving checkpoints
:param epoch: current epoch number
:param log: logging information of the epoch
:param save_best: if True, rename the saved checkpoint to 'model_best.pth'<|endoftext|> |
af3f372c9b3974ed607b0f56f02ad89e2fd386c3cb85be38cf6d8044d10650c2 | def _resume_checkpoint(self, resume_path):
'\n Resume from saved checkpoints\n\n :param resume_path: Checkpoint path to be resumed\n '
resume_path = str(resume_path)
self.logger.info('Loading checkpoint: {} ...'.format(resume_path))
checkpoint = torch.load(resume_path)
if (not ('epoch' in checkpoint)):
self.model.load_state_dict(torch.load(resume_path), strict=False)
else:
self.start_epoch = (checkpoint['epoch'] + 1)
self.mnt_best = checkpoint['monitor_best']
if (checkpoint['config']['arch'] != self.config['arch']):
self.logger.warning('Warning: Architecture configuration given in config file is different from that of checkpoint. This may yield an exception while state_dict is being loaded.')
self.model.load_state_dict(checkpoint['state_dict'])
if (checkpoint['config']['optimizer']['type'] != self.config['optimizer']['type']):
self.logger.warning('Warning: Optimizer type given in config file is different from that of checkpoint. Optimizer parameters not being resumed.')
else:
self.optimizer.load_state_dict(checkpoint['optimizer'])
self.logger.info('Checkpoint loaded. Resume training from epoch {}'.format(self.start_epoch)) | Resume from saved checkpoints
:param resume_path: Checkpoint path to be resumed | base/base_trainer.py | _resume_checkpoint | dll-ncai/AI-ForestWatch | 2 | python | def _resume_checkpoint(self, resume_path):
'\n Resume from saved checkpoints\n\n :param resume_path: Checkpoint path to be resumed\n '
resume_path = str(resume_path)
self.logger.info('Loading checkpoint: {} ...'.format(resume_path))
checkpoint = torch.load(resume_path)
if (not ('epoch' in checkpoint)):
self.model.load_state_dict(torch.load(resume_path), strict=False)
else:
self.start_epoch = (checkpoint['epoch'] + 1)
self.mnt_best = checkpoint['monitor_best']
if (checkpoint['config']['arch'] != self.config['arch']):
self.logger.warning('Warning: Architecture configuration given in config file is different from that of checkpoint. This may yield an exception while state_dict is being loaded.')
self.model.load_state_dict(checkpoint['state_dict'])
if (checkpoint['config']['optimizer']['type'] != self.config['optimizer']['type']):
self.logger.warning('Warning: Optimizer type given in config file is different from that of checkpoint. Optimizer parameters not being resumed.')
else:
self.optimizer.load_state_dict(checkpoint['optimizer'])
self.logger.info('Checkpoint loaded. Resume training from epoch {}'.format(self.start_epoch)) | def _resume_checkpoint(self, resume_path):
'\n Resume from saved checkpoints\n\n :param resume_path: Checkpoint path to be resumed\n '
resume_path = str(resume_path)
self.logger.info('Loading checkpoint: {} ...'.format(resume_path))
checkpoint = torch.load(resume_path)
if (not ('epoch' in checkpoint)):
self.model.load_state_dict(torch.load(resume_path), strict=False)
else:
self.start_epoch = (checkpoint['epoch'] + 1)
self.mnt_best = checkpoint['monitor_best']
if (checkpoint['config']['arch'] != self.config['arch']):
self.logger.warning('Warning: Architecture configuration given in config file is different from that of checkpoint. This may yield an exception while state_dict is being loaded.')
self.model.load_state_dict(checkpoint['state_dict'])
if (checkpoint['config']['optimizer']['type'] != self.config['optimizer']['type']):
self.logger.warning('Warning: Optimizer type given in config file is different from that of checkpoint. Optimizer parameters not being resumed.')
else:
self.optimizer.load_state_dict(checkpoint['optimizer'])
self.logger.info('Checkpoint loaded. Resume training from epoch {}'.format(self.start_epoch))<|docstring|>Resume from saved checkpoints
:param resume_path: Checkpoint path to be resumed<|endoftext|> |
7ffc895412f4ab1d780b805109ba038f0c87a49f084f5f0208162863ef84f70e | @classmethod
def _from_json_data(cls, client: 'BotClient', json_data: Mapping[(str, Any)]) -> 'Role':
'Converts JSON data received from the API into a valid :class:`Role` instance. For internal use only.\n\n See Also:\n :meth:`JsonAPIModel.from_json_data`\n\n Examples:\n .. doctest::\n\n >>> from serpcord.utils.model import compare_attributes\n >>> role_data = {\n ... "id": "41771983423143936",\n ... "name": "WE DEM BOYZZ!!!!!!",\n ... "color": 3447003,\n ... "hoist": True,\n ... "icon": "cf3ced8600b777c9486c6d8d84fb4327",\n ... "unicode_emoji": None,\n ... "position": 1,\n ... "permissions": "66321471",\n ... "managed": False,\n ... "mentionable": False\n ... }\n >>> role = Role._from_json_data(client, role_data)\n >>> compare_attributes(\n ... role,\n ... Role(\n ... client, Snowflake(41771983423143936), name="WE DEM BOYZZ!!!!!!",\n ... color_int=3447003, is_hoisted=True, icon_hash="cf3ced8600b777c9486c6d8d84fb4327",\n ... unicode_emoji=None, position=1, permissions=PermissionFlags(66321471),\n ... is_managed=False, is_mentionable=False\n ... )\n ... )\n True\n '
return _init_model_from_mapping_json_data(cls, client, json_data, rename=dict(id='roleid', color='color_int', hoist='is_hoisted', icon='icon_hash', managed='is_managed', mentionable='is_mentionable'), type_check_types=True) | Converts JSON data received from the API into a valid :class:`Role` instance. For internal use only.
See Also:
:meth:`JsonAPIModel.from_json_data`
Examples:
.. doctest::
>>> from serpcord.utils.model import compare_attributes
>>> role_data = {
... "id": "41771983423143936",
... "name": "WE DEM BOYZZ!!!!!!",
... "color": 3447003,
... "hoist": True,
... "icon": "cf3ced8600b777c9486c6d8d84fb4327",
... "unicode_emoji": None,
... "position": 1,
... "permissions": "66321471",
... "managed": False,
... "mentionable": False
... }
>>> role = Role._from_json_data(client, role_data)
>>> compare_attributes(
... role,
... Role(
... client, Snowflake(41771983423143936), name="WE DEM BOYZZ!!!!!!",
... color_int=3447003, is_hoisted=True, icon_hash="cf3ced8600b777c9486c6d8d84fb4327",
... unicode_emoji=None, position=1, permissions=PermissionFlags(66321471),
... is_managed=False, is_mentionable=False
... )
... )
True | serpcord/models/permissions.py | _from_json_data | PgBiel/serpcord | 0 | python | @classmethod
def _from_json_data(cls, client: 'BotClient', json_data: Mapping[(str, Any)]) -> 'Role':
'Converts JSON data received from the API into a valid :class:`Role` instance. For internal use only.\n\n See Also:\n :meth:`JsonAPIModel.from_json_data`\n\n Examples:\n .. doctest::\n\n >>> from serpcord.utils.model import compare_attributes\n >>> role_data = {\n ... "id": "41771983423143936",\n ... "name": "WE DEM BOYZZ!!!!!!",\n ... "color": 3447003,\n ... "hoist": True,\n ... "icon": "cf3ced8600b777c9486c6d8d84fb4327",\n ... "unicode_emoji": None,\n ... "position": 1,\n ... "permissions": "66321471",\n ... "managed": False,\n ... "mentionable": False\n ... }\n >>> role = Role._from_json_data(client, role_data)\n >>> compare_attributes(\n ... role,\n ... Role(\n ... client, Snowflake(41771983423143936), name="WE DEM BOYZZ!!!!!!",\n ... color_int=3447003, is_hoisted=True, icon_hash="cf3ced8600b777c9486c6d8d84fb4327",\n ... unicode_emoji=None, position=1, permissions=PermissionFlags(66321471),\n ... is_managed=False, is_mentionable=False\n ... )\n ... )\n True\n '
return _init_model_from_mapping_json_data(cls, client, json_data, rename=dict(id='roleid', color='color_int', hoist='is_hoisted', icon='icon_hash', managed='is_managed', mentionable='is_mentionable'), type_check_types=True) | @classmethod
def _from_json_data(cls, client: 'BotClient', json_data: Mapping[(str, Any)]) -> 'Role':
'Converts JSON data received from the API into a valid :class:`Role` instance. For internal use only.\n\n See Also:\n :meth:`JsonAPIModel.from_json_data`\n\n Examples:\n .. doctest::\n\n >>> from serpcord.utils.model import compare_attributes\n >>> role_data = {\n ... "id": "41771983423143936",\n ... "name": "WE DEM BOYZZ!!!!!!",\n ... "color": 3447003,\n ... "hoist": True,\n ... "icon": "cf3ced8600b777c9486c6d8d84fb4327",\n ... "unicode_emoji": None,\n ... "position": 1,\n ... "permissions": "66321471",\n ... "managed": False,\n ... "mentionable": False\n ... }\n >>> role = Role._from_json_data(client, role_data)\n >>> compare_attributes(\n ... role,\n ... Role(\n ... client, Snowflake(41771983423143936), name="WE DEM BOYZZ!!!!!!",\n ... color_int=3447003, is_hoisted=True, icon_hash="cf3ced8600b777c9486c6d8d84fb4327",\n ... unicode_emoji=None, position=1, permissions=PermissionFlags(66321471),\n ... is_managed=False, is_mentionable=False\n ... )\n ... )\n True\n '
return _init_model_from_mapping_json_data(cls, client, json_data, rename=dict(id='roleid', color='color_int', hoist='is_hoisted', icon='icon_hash', managed='is_managed', mentionable='is_mentionable'), type_check_types=True)<|docstring|>Converts JSON data received from the API into a valid :class:`Role` instance. For internal use only.
See Also:
:meth:`JsonAPIModel.from_json_data`
Examples:
.. doctest::
>>> from serpcord.utils.model import compare_attributes
>>> role_data = {
... "id": "41771983423143936",
... "name": "WE DEM BOYZZ!!!!!!",
... "color": 3447003,
... "hoist": True,
... "icon": "cf3ced8600b777c9486c6d8d84fb4327",
... "unicode_emoji": None,
... "position": 1,
... "permissions": "66321471",
... "managed": False,
... "mentionable": False
... }
>>> role = Role._from_json_data(client, role_data)
>>> compare_attributes(
... role,
... Role(
... client, Snowflake(41771983423143936), name="WE DEM BOYZZ!!!!!!",
... color_int=3447003, is_hoisted=True, icon_hash="cf3ced8600b777c9486c6d8d84fb4327",
... unicode_emoji=None, position=1, permissions=PermissionFlags(66321471),
... is_managed=False, is_mentionable=False
... )
... )
True<|endoftext|> |
cc5168e9137a6ea6586f536f0ea34c8c672f5e8fc97d9d4ddf16f2df0f060e63 | def ip_int_to_str(val):
'\n That function takes a 0..2**32 interger and converts it into a string IP\n address.\n For example: 16909060 aka (1<<24)+(2<<16)+(3<<8)+4 will return 1.2.3.4\n '
if (not isinstance(val, int)):
raise TypeError('ip_int_to_str expects a number')
if ((val < 0) or (val >= (1 << 32))):
raise ValueError('Out of range')
return '{}.{}.{}.{}'.format(((val >> 24) & 255), ((val >> 16) & 255), ((val >> 8) & 255), (val & 255)) | That function takes a 0..2**32 interger and converts it into a string IP
address.
For example: 16909060 aka (1<<24)+(2<<16)+(3<<8)+4 will return 1.2.3.4 | extensions/vpn/management/commands/vpnconfig.py | ip_int_to_str | nirgal/ngw | 0 | python | def ip_int_to_str(val):
'\n That function takes a 0..2**32 interger and converts it into a string IP\n address.\n For example: 16909060 aka (1<<24)+(2<<16)+(3<<8)+4 will return 1.2.3.4\n '
if (not isinstance(val, int)):
raise TypeError('ip_int_to_str expects a number')
if ((val < 0) or (val >= (1 << 32))):
raise ValueError('Out of range')
return '{}.{}.{}.{}'.format(((val >> 24) & 255), ((val >> 16) & 255), ((val >> 8) & 255), (val & 255)) | def ip_int_to_str(val):
'\n That function takes a 0..2**32 interger and converts it into a string IP\n address.\n For example: 16909060 aka (1<<24)+(2<<16)+(3<<8)+4 will return 1.2.3.4\n '
if (not isinstance(val, int)):
raise TypeError('ip_int_to_str expects a number')
if ((val < 0) or (val >= (1 << 32))):
raise ValueError('Out of range')
return '{}.{}.{}.{}'.format(((val >> 24) & 255), ((val >> 16) & 255), ((val >> 8) & 255), (val & 255))<|docstring|>That function takes a 0..2**32 interger and converts it into a string IP
address.
For example: 16909060 aka (1<<24)+(2<<16)+(3<<8)+4 will return 1.2.3.4<|endoftext|> |
c4301f9d6af2ba79e428114e164e31a289dffe5284fcb8b302d80201ef0d822f | def ip_str_to_int(val):
'\n That function takes a string with a IP address and converts it to a integer\n For example: 1.2.3.4 returns 16909060 aka (1<<24)+(2<<16)+(3<<8)+4\n '
split = re.fullmatch('(\\d+)\\.(\\d+)\\.(\\d+)\\.(\\d+)', val)
if (split is None):
raise ValueError('Not an IP address')
result = 0
for idx in range(1, 5):
frag = int(split.group(idx))
if ((frag < 0) or (frag > 255)):
raise ValueError('Out of range')
result = ((result << 8) + frag)
return result | That function takes a string with a IP address and converts it to a integer
For example: 1.2.3.4 returns 16909060 aka (1<<24)+(2<<16)+(3<<8)+4 | extensions/vpn/management/commands/vpnconfig.py | ip_str_to_int | nirgal/ngw | 0 | python | def ip_str_to_int(val):
'\n That function takes a string with a IP address and converts it to a integer\n For example: 1.2.3.4 returns 16909060 aka (1<<24)+(2<<16)+(3<<8)+4\n '
split = re.fullmatch('(\\d+)\\.(\\d+)\\.(\\d+)\\.(\\d+)', val)
if (split is None):
raise ValueError('Not an IP address')
result = 0
for idx in range(1, 5):
frag = int(split.group(idx))
if ((frag < 0) or (frag > 255)):
raise ValueError('Out of range')
result = ((result << 8) + frag)
return result | def ip_str_to_int(val):
'\n That function takes a string with a IP address and converts it to a integer\n For example: 1.2.3.4 returns 16909060 aka (1<<24)+(2<<16)+(3<<8)+4\n '
split = re.fullmatch('(\\d+)\\.(\\d+)\\.(\\d+)\\.(\\d+)', val)
if (split is None):
raise ValueError('Not an IP address')
result = 0
for idx in range(1, 5):
frag = int(split.group(idx))
if ((frag < 0) or (frag > 255)):
raise ValueError('Out of range')
result = ((result << 8) + frag)
return result<|docstring|>That function takes a string with a IP address and converts it to a integer
For example: 1.2.3.4 returns 16909060 aka (1<<24)+(2<<16)+(3<<8)+4<|endoftext|> |
b2d4384dd720c2582290e8a4a5b84951d57e914730008261305347a7fcc3da03 | def emoji_of_status(status: str) -> str:
'Returns the emoji associated to a docker container status.\n\n The emojis are as follows:\n * ``exited``: ⏹,\n * ``paused``: ⏸,\n * ``restarting``: ↩,\n * ``running``: ▶,\n * otherwise: ❓.\n '
return {'exited': '⏹', 'paused': '⏸', 'restarting': '↩', 'running': '▶'}.get(status, '❓') | Returns the emoji associated to a docker container status.
The emojis are as follows:
* ``exited``: ⏹,
* ``paused``: ⏸,
* ``restarting``: ↩,
* ``running``: ▶,
* otherwise: ❓. | src/docker_utils.py | emoji_of_status | altaris/docker-telegram-bot | 0 | python | def emoji_of_status(status: str) -> str:
'Returns the emoji associated to a docker container status.\n\n The emojis are as follows:\n * ``exited``: ⏹,\n * ``paused``: ⏸,\n * ``restarting``: ↩,\n * ``running``: ▶,\n * otherwise: ❓.\n '
return {'exited': '⏹', 'paused': '⏸', 'restarting': '↩', 'running': '▶'}.get(status, '❓') | def emoji_of_status(status: str) -> str:
'Returns the emoji associated to a docker container status.\n\n The emojis are as follows:\n * ``exited``: ⏹,\n * ``paused``: ⏸,\n * ``restarting``: ↩,\n * ``running``: ▶,\n * otherwise: ❓.\n '
return {'exited': '⏹', 'paused': '⏸', 'restarting': '↩', 'running': '▶'}.get(status, '❓')<|docstring|>Returns the emoji associated to a docker container status.
The emojis are as follows:
* ``exited``: ⏹,
* ``paused``: ⏸,
* ``restarting``: ↩,
* ``running``: ▶,
* otherwise: ❓.<|endoftext|> |
881d7084d3cff91221a61c2c6807e9a402c9e3a709437d7d2e07a14c43467a62 | def get_container(self, container_name: str) -> Optional[Container]:
'Gets a container.\n\n If the container does not exist, return ``None`` and reports.\n '
container = None
try:
container = self.docker_client.containers.get(container_name)
except docker.errors.NotFound:
self.reply_error(f'Container "{container_name}" not found.')
return container | Gets a container.
If the container does not exist, return ``None`` and reports. | src/docker_utils.py | get_container | altaris/docker-telegram-bot | 0 | python | def get_container(self, container_name: str) -> Optional[Container]:
'Gets a container.\n\n If the container does not exist, return ``None`` and reports.\n '
container = None
try:
container = self.docker_client.containers.get(container_name)
except docker.errors.NotFound:
self.reply_error(f'Container "{container_name}" not found.')
return container | def get_container(self, container_name: str) -> Optional[Container]:
'Gets a container.\n\n If the container does not exist, return ``None`` and reports.\n '
container = None
try:
container = self.docker_client.containers.get(container_name)
except docker.errors.NotFound:
self.reply_error(f'Container "{container_name}" not found.')
return container<|docstring|>Gets a container.
If the container does not exist, return ``None`` and reports.<|endoftext|> |
3cf41b45c5b1303c57c2aa866b1fabe58cf9249fb2309e54b6bdcf1784b3c50a | @property
def docker_client(self) -> DockerClient:
'Returns the ``docker.DockerClient`` of this command.\n '
client = self._args_dict.get('docker_client', None)
if (not isinstance(client, DockerClient)):
raise ValueError('A DockerCommand must have a DockerClient as default value for key "docker_client"')
return client | Returns the ``docker.DockerClient`` of this command. | src/docker_utils.py | docker_client | altaris/docker-telegram-bot | 0 | python | @property
def docker_client(self) -> DockerClient:
'\n '
client = self._args_dict.get('docker_client', None)
if (not isinstance(client, DockerClient)):
raise ValueError('A DockerCommand must have a DockerClient as default value for key "docker_client"')
return client | @property
def docker_client(self) -> DockerClient:
'\n '
client = self._args_dict.get('docker_client', None)
if (not isinstance(client, DockerClient)):
raise ValueError('A DockerCommand must have a DockerClient as default value for key "docker_client"')
return client<|docstring|>Returns the ``docker.DockerClient`` of this command.<|endoftext|> |
fe2a8863acd6d7cd5fe9a1854f86aa5302425404340cb318bcdea51cafac9260 | def _streams(source):
'Reimplemented from otio burnins to be able use full path to ffprobe\n :param str source: source media file\n :rtype: [{}, ...]\n '
command = (FFPROBE % {'source': source})
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
out = proc.communicate()[0]
if (proc.returncode != 0):
raise RuntimeError(('Failed to run: %s' % command))
return json.loads(out)['streams'] | Reimplemented from otio burnins to be able use full path to ffprobe
:param str source: source media file
:rtype: [{}, ...] | pype/scripts/otio_burnin.py | _streams | tokejepsen/pype | 0 | python | def _streams(source):
'Reimplemented from otio burnins to be able use full path to ffprobe\n :param str source: source media file\n :rtype: [{}, ...]\n '
command = (FFPROBE % {'source': source})
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
out = proc.communicate()[0]
if (proc.returncode != 0):
raise RuntimeError(('Failed to run: %s' % command))
return json.loads(out)['streams'] | def _streams(source):
'Reimplemented from otio burnins to be able use full path to ffprobe\n :param str source: source media file\n :rtype: [{}, ...]\n '
command = (FFPROBE % {'source': source})
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
out = proc.communicate()[0]
if (proc.returncode != 0):
raise RuntimeError(('Failed to run: %s' % command))
return json.loads(out)['streams']<|docstring|>Reimplemented from otio burnins to be able use full path to ffprobe
:param str source: source media file
:rtype: [{}, ...]<|endoftext|> |
7608ee337bd4e2468b7581f0d2cf0fa5b6d9c719e5272c7412852a581c126eca | def burnins_from_data(input_path, output_path, data, codec_data=None, overwrite=True):
'\n This method adds burnins to video/image file based on presets setting.\n Extension of output MUST be same as input. (mov -> mov, avi -> avi,...)\n\n :param input_path: full path to input file where burnins should be add\n :type input_path: str\n :param codec_data: all codec related arguments in list\n :param codec_data: list\n :param output_path: full path to output file where output will be rendered\n :type output_path: str\n :param data: data required for burnin settings (more info below)\n :type data: dict\n :param overwrite: output will be overriden if already exists, defaults to True\n :type overwrite: bool\n\n Presets must be set separately. Should be dict with 2 keys:\n - "options" - sets look of burnins - colors, opacity,...(more info: ModifiedBurnins doc)\n - *OPTIONAL* default values are used when not included\n - "burnins" - contains dictionary with burnins settings\n - *OPTIONAL* burnins won\'t be added (easier is not to use this)\n - each key of "burnins" represents Alignment, there are 6 possibilities:\n TOP_LEFT TOP_CENTERED TOP_RIGHT\n BOTTOM_LEFT BOTTOM_CENTERED BOTTOM_RIGHT\n - value must be string with text you want to burn-in\n - text may contain specific formatting keys (exmplained below)\n\n Requirement of *data* keys is based on presets.\n - "frame_start" - is required when "timecode" or "current_frame" ins keys\n - "frame_start_tc" - when "timecode" should start with different frame\n - *keys for static text*\n\n EXAMPLE:\n preset = {\n "options": {*OPTIONS FOR LOOK*},\n "burnins": {\n "TOP_LEFT": "static_text",\n "TOP_RIGHT": "{shot}",\n "BOTTOM_LEFT": "TC: {timecode}",\n "BOTTOM_RIGHT": "{frame_start}{current_frame}"\n }\n }\n\n For this preset we\'ll need at least this data:\n data = {\n "frame_start": 1001,\n "shot": "sh0010"\n }\n\n When Timecode should start from 1 then data need:\n data = {\n "frame_start": 1001,\n "frame_start_tc": 1,\n "shot": "sh0010"\n }\n '
presets = config.get_presets().get('tools', {}).get('burnins', {})
options_init = presets.get('options')
burnin = ModifiedBurnins(input_path, options_init=options_init)
frame_start = data.get('frame_start')
frame_end = data.get('frame_end')
frame_start_tc = data.get('frame_start_tc', frame_start)
stream = burnin._streams[0]
if ('resolution_width' not in data):
data['resolution_width'] = stream.get('width', MISSING_KEY_VALUE)
if ('resolution_height' not in data):
data['resolution_height'] = stream.get('height', MISSING_KEY_VALUE)
if ('fps' not in data):
data['fps'] = get_fps(stream.get('r_frame_rate', '0/0'))
if (frame_start is not None):
data[CURRENT_FRAME_KEY[1:(- 1)]] = CURRENT_FRAME_SPLITTER
if (frame_start_tc is not None):
data[TIMECODE_KEY[1:(- 1)]] = TIMECODE_KEY
source_timecode = stream.get('timecode')
if (source_timecode is None):
source_timecode = stream.get('tags', {}).get('timecode')
if (source_timecode is not None):
data[SOURCE_TIMECODE_KEY[1:(- 1)]] = SOURCE_TIMECODE_KEY
for (align_text, value) in presets.get('burnins', {}).items():
if (not value):
continue
if isinstance(value, (dict, list, tuple)):
raise TypeError('Expected string or number type. Got: {} - "{}" (Make sure you have new burnin presets).'.format(str(type(value)), str(value)))
align = None
align_text = align_text.strip().lower()
if (align_text == 'top_left'):
align = ModifiedBurnins.TOP_LEFT
elif (align_text == 'top_centered'):
align = ModifiedBurnins.TOP_CENTERED
elif (align_text == 'top_right'):
align = ModifiedBurnins.TOP_RIGHT
elif (align_text == 'bottom_left'):
align = ModifiedBurnins.BOTTOM_LEFT
elif (align_text == 'bottom_centered'):
align = ModifiedBurnins.BOTTOM_CENTERED
elif (align_text == 'bottom_right'):
align = ModifiedBurnins.BOTTOM_RIGHT
has_timecode = (TIMECODE_KEY in value)
if ((frame_start_tc is None) and has_timecode):
has_timecode = False
log.warning('`frame_start` and `frame_start_tc` are not set in entered data.')
value = value.replace(TIMECODE_KEY, MISSING_KEY_VALUE)
has_source_timecode = (SOURCE_TIMECODE_KEY in value)
if ((source_timecode is None) and has_source_timecode):
has_source_timecode = False
log.warning('Source does not have set timecode value.')
value = value.replace(SOURCE_TIMECODE_KEY, MISSING_KEY_VALUE)
key_pattern = re.compile('(\\{.*?[^{0]*\\})')
missing_keys = []
for group in key_pattern.findall(value):
try:
group.format(**data)
except (TypeError, KeyError):
missing_keys.append(group)
missing_keys = list(set(missing_keys))
for key in missing_keys:
value = value.replace(key, MISSING_KEY_VALUE)
if has_source_timecode:
args = [align, frame_start, frame_end, source_timecode]
if (not value.startswith(SOURCE_TIMECODE_KEY)):
value_items = value.split(SOURCE_TIMECODE_KEY)
text = value_items[0].format(**data)
args.append(text)
burnin.add_timecode(*args)
continue
if has_timecode:
args = [align, frame_start, frame_end, frame_start_tc]
if (not value.startswith(TIMECODE_KEY)):
value_items = value.split(TIMECODE_KEY)
text = value_items[0].format(**data)
args.append(text)
burnin.add_timecode(*args)
continue
text = value.format(**data)
burnin.add_text(text, align, frame_start, frame_end)
codec_args = ''
if codec_data:
codec_args = ' '.join(codec_data)
burnin.render(output_path, args=codec_args, overwrite=overwrite, **data) | This method adds burnins to video/image file based on presets setting.
Extension of output MUST be same as input. (mov -> mov, avi -> avi,...)
:param input_path: full path to input file where burnins should be add
:type input_path: str
:param codec_data: all codec related arguments in list
:param codec_data: list
:param output_path: full path to output file where output will be rendered
:type output_path: str
:param data: data required for burnin settings (more info below)
:type data: dict
:param overwrite: output will be overriden if already exists, defaults to True
:type overwrite: bool
Presets must be set separately. Should be dict with 2 keys:
- "options" - sets look of burnins - colors, opacity,...(more info: ModifiedBurnins doc)
- *OPTIONAL* default values are used when not included
- "burnins" - contains dictionary with burnins settings
- *OPTIONAL* burnins won't be added (easier is not to use this)
- each key of "burnins" represents Alignment, there are 6 possibilities:
TOP_LEFT TOP_CENTERED TOP_RIGHT
BOTTOM_LEFT BOTTOM_CENTERED BOTTOM_RIGHT
- value must be string with text you want to burn-in
- text may contain specific formatting keys (exmplained below)
Requirement of *data* keys is based on presets.
- "frame_start" - is required when "timecode" or "current_frame" ins keys
- "frame_start_tc" - when "timecode" should start with different frame
- *keys for static text*
EXAMPLE:
preset = {
"options": {*OPTIONS FOR LOOK*},
"burnins": {
"TOP_LEFT": "static_text",
"TOP_RIGHT": "{shot}",
"BOTTOM_LEFT": "TC: {timecode}",
"BOTTOM_RIGHT": "{frame_start}{current_frame}"
}
}
For this preset we'll need at least this data:
data = {
"frame_start": 1001,
"shot": "sh0010"
}
When Timecode should start from 1 then data need:
data = {
"frame_start": 1001,
"frame_start_tc": 1,
"shot": "sh0010"
} | pype/scripts/otio_burnin.py | burnins_from_data | tokejepsen/pype | 0 | python | def burnins_from_data(input_path, output_path, data, codec_data=None, overwrite=True):
'\n This method adds burnins to video/image file based on presets setting.\n Extension of output MUST be same as input. (mov -> mov, avi -> avi,...)\n\n :param input_path: full path to input file where burnins should be add\n :type input_path: str\n :param codec_data: all codec related arguments in list\n :param codec_data: list\n :param output_path: full path to output file where output will be rendered\n :type output_path: str\n :param data: data required for burnin settings (more info below)\n :type data: dict\n :param overwrite: output will be overriden if already exists, defaults to True\n :type overwrite: bool\n\n Presets must be set separately. Should be dict with 2 keys:\n - "options" - sets look of burnins - colors, opacity,...(more info: ModifiedBurnins doc)\n - *OPTIONAL* default values are used when not included\n - "burnins" - contains dictionary with burnins settings\n - *OPTIONAL* burnins won\'t be added (easier is not to use this)\n - each key of "burnins" represents Alignment, there are 6 possibilities:\n TOP_LEFT TOP_CENTERED TOP_RIGHT\n BOTTOM_LEFT BOTTOM_CENTERED BOTTOM_RIGHT\n - value must be string with text you want to burn-in\n - text may contain specific formatting keys (exmplained below)\n\n Requirement of *data* keys is based on presets.\n - "frame_start" - is required when "timecode" or "current_frame" ins keys\n - "frame_start_tc" - when "timecode" should start with different frame\n - *keys for static text*\n\n EXAMPLE:\n preset = {\n "options": {*OPTIONS FOR LOOK*},\n "burnins": {\n "TOP_LEFT": "static_text",\n "TOP_RIGHT": "{shot}",\n "BOTTOM_LEFT": "TC: {timecode}",\n "BOTTOM_RIGHT": "{frame_start}{current_frame}"\n }\n }\n\n For this preset we\'ll need at least this data:\n data = {\n "frame_start": 1001,\n "shot": "sh0010"\n }\n\n When Timecode should start from 1 then data need:\n data = {\n "frame_start": 1001,\n "frame_start_tc": 1,\n "shot": "sh0010"\n }\n '
presets = config.get_presets().get('tools', {}).get('burnins', {})
options_init = presets.get('options')
burnin = ModifiedBurnins(input_path, options_init=options_init)
frame_start = data.get('frame_start')
frame_end = data.get('frame_end')
frame_start_tc = data.get('frame_start_tc', frame_start)
stream = burnin._streams[0]
if ('resolution_width' not in data):
data['resolution_width'] = stream.get('width', MISSING_KEY_VALUE)
if ('resolution_height' not in data):
data['resolution_height'] = stream.get('height', MISSING_KEY_VALUE)
if ('fps' not in data):
data['fps'] = get_fps(stream.get('r_frame_rate', '0/0'))
if (frame_start is not None):
data[CURRENT_FRAME_KEY[1:(- 1)]] = CURRENT_FRAME_SPLITTER
if (frame_start_tc is not None):
data[TIMECODE_KEY[1:(- 1)]] = TIMECODE_KEY
source_timecode = stream.get('timecode')
if (source_timecode is None):
source_timecode = stream.get('tags', {}).get('timecode')
if (source_timecode is not None):
data[SOURCE_TIMECODE_KEY[1:(- 1)]] = SOURCE_TIMECODE_KEY
for (align_text, value) in presets.get('burnins', {}).items():
if (not value):
continue
if isinstance(value, (dict, list, tuple)):
raise TypeError('Expected string or number type. Got: {} - "{}" (Make sure you have new burnin presets).'.format(str(type(value)), str(value)))
align = None
align_text = align_text.strip().lower()
if (align_text == 'top_left'):
align = ModifiedBurnins.TOP_LEFT
elif (align_text == 'top_centered'):
align = ModifiedBurnins.TOP_CENTERED
elif (align_text == 'top_right'):
align = ModifiedBurnins.TOP_RIGHT
elif (align_text == 'bottom_left'):
align = ModifiedBurnins.BOTTOM_LEFT
elif (align_text == 'bottom_centered'):
align = ModifiedBurnins.BOTTOM_CENTERED
elif (align_text == 'bottom_right'):
align = ModifiedBurnins.BOTTOM_RIGHT
has_timecode = (TIMECODE_KEY in value)
if ((frame_start_tc is None) and has_timecode):
has_timecode = False
log.warning('`frame_start` and `frame_start_tc` are not set in entered data.')
value = value.replace(TIMECODE_KEY, MISSING_KEY_VALUE)
has_source_timecode = (SOURCE_TIMECODE_KEY in value)
if ((source_timecode is None) and has_source_timecode):
has_source_timecode = False
log.warning('Source does not have set timecode value.')
value = value.replace(SOURCE_TIMECODE_KEY, MISSING_KEY_VALUE)
key_pattern = re.compile('(\\{.*?[^{0]*\\})')
missing_keys = []
for group in key_pattern.findall(value):
try:
group.format(**data)
except (TypeError, KeyError):
missing_keys.append(group)
missing_keys = list(set(missing_keys))
for key in missing_keys:
value = value.replace(key, MISSING_KEY_VALUE)
if has_source_timecode:
args = [align, frame_start, frame_end, source_timecode]
if (not value.startswith(SOURCE_TIMECODE_KEY)):
value_items = value.split(SOURCE_TIMECODE_KEY)
text = value_items[0].format(**data)
args.append(text)
burnin.add_timecode(*args)
continue
if has_timecode:
args = [align, frame_start, frame_end, frame_start_tc]
if (not value.startswith(TIMECODE_KEY)):
value_items = value.split(TIMECODE_KEY)
text = value_items[0].format(**data)
args.append(text)
burnin.add_timecode(*args)
continue
text = value.format(**data)
burnin.add_text(text, align, frame_start, frame_end)
codec_args =
if codec_data:
codec_args = ' '.join(codec_data)
burnin.render(output_path, args=codec_args, overwrite=overwrite, **data) | def burnins_from_data(input_path, output_path, data, codec_data=None, overwrite=True):
'\n This method adds burnins to video/image file based on presets setting.\n Extension of output MUST be same as input. (mov -> mov, avi -> avi,...)\n\n :param input_path: full path to input file where burnins should be add\n :type input_path: str\n :param codec_data: all codec related arguments in list\n :param codec_data: list\n :param output_path: full path to output file where output will be rendered\n :type output_path: str\n :param data: data required for burnin settings (more info below)\n :type data: dict\n :param overwrite: output will be overriden if already exists, defaults to True\n :type overwrite: bool\n\n Presets must be set separately. Should be dict with 2 keys:\n - "options" - sets look of burnins - colors, opacity,...(more info: ModifiedBurnins doc)\n - *OPTIONAL* default values are used when not included\n - "burnins" - contains dictionary with burnins settings\n - *OPTIONAL* burnins won\'t be added (easier is not to use this)\n - each key of "burnins" represents Alignment, there are 6 possibilities:\n TOP_LEFT TOP_CENTERED TOP_RIGHT\n BOTTOM_LEFT BOTTOM_CENTERED BOTTOM_RIGHT\n - value must be string with text you want to burn-in\n - text may contain specific formatting keys (exmplained below)\n\n Requirement of *data* keys is based on presets.\n - "frame_start" - is required when "timecode" or "current_frame" ins keys\n - "frame_start_tc" - when "timecode" should start with different frame\n - *keys for static text*\n\n EXAMPLE:\n preset = {\n "options": {*OPTIONS FOR LOOK*},\n "burnins": {\n "TOP_LEFT": "static_text",\n "TOP_RIGHT": "{shot}",\n "BOTTOM_LEFT": "TC: {timecode}",\n "BOTTOM_RIGHT": "{frame_start}{current_frame}"\n }\n }\n\n For this preset we\'ll need at least this data:\n data = {\n "frame_start": 1001,\n "shot": "sh0010"\n }\n\n When Timecode should start from 1 then data need:\n data = {\n "frame_start": 1001,\n "frame_start_tc": 1,\n "shot": "sh0010"\n }\n '
presets = config.get_presets().get('tools', {}).get('burnins', {})
options_init = presets.get('options')
burnin = ModifiedBurnins(input_path, options_init=options_init)
frame_start = data.get('frame_start')
frame_end = data.get('frame_end')
frame_start_tc = data.get('frame_start_tc', frame_start)
stream = burnin._streams[0]
if ('resolution_width' not in data):
data['resolution_width'] = stream.get('width', MISSING_KEY_VALUE)
if ('resolution_height' not in data):
data['resolution_height'] = stream.get('height', MISSING_KEY_VALUE)
if ('fps' not in data):
data['fps'] = get_fps(stream.get('r_frame_rate', '0/0'))
if (frame_start is not None):
data[CURRENT_FRAME_KEY[1:(- 1)]] = CURRENT_FRAME_SPLITTER
if (frame_start_tc is not None):
data[TIMECODE_KEY[1:(- 1)]] = TIMECODE_KEY
source_timecode = stream.get('timecode')
if (source_timecode is None):
source_timecode = stream.get('tags', {}).get('timecode')
if (source_timecode is not None):
data[SOURCE_TIMECODE_KEY[1:(- 1)]] = SOURCE_TIMECODE_KEY
for (align_text, value) in presets.get('burnins', {}).items():
if (not value):
continue
if isinstance(value, (dict, list, tuple)):
raise TypeError('Expected string or number type. Got: {} - "{}" (Make sure you have new burnin presets).'.format(str(type(value)), str(value)))
align = None
align_text = align_text.strip().lower()
if (align_text == 'top_left'):
align = ModifiedBurnins.TOP_LEFT
elif (align_text == 'top_centered'):
align = ModifiedBurnins.TOP_CENTERED
elif (align_text == 'top_right'):
align = ModifiedBurnins.TOP_RIGHT
elif (align_text == 'bottom_left'):
align = ModifiedBurnins.BOTTOM_LEFT
elif (align_text == 'bottom_centered'):
align = ModifiedBurnins.BOTTOM_CENTERED
elif (align_text == 'bottom_right'):
align = ModifiedBurnins.BOTTOM_RIGHT
has_timecode = (TIMECODE_KEY in value)
if ((frame_start_tc is None) and has_timecode):
has_timecode = False
log.warning('`frame_start` and `frame_start_tc` are not set in entered data.')
value = value.replace(TIMECODE_KEY, MISSING_KEY_VALUE)
has_source_timecode = (SOURCE_TIMECODE_KEY in value)
if ((source_timecode is None) and has_source_timecode):
has_source_timecode = False
log.warning('Source does not have set timecode value.')
value = value.replace(SOURCE_TIMECODE_KEY, MISSING_KEY_VALUE)
key_pattern = re.compile('(\\{.*?[^{0]*\\})')
missing_keys = []
for group in key_pattern.findall(value):
try:
group.format(**data)
except (TypeError, KeyError):
missing_keys.append(group)
missing_keys = list(set(missing_keys))
for key in missing_keys:
value = value.replace(key, MISSING_KEY_VALUE)
if has_source_timecode:
args = [align, frame_start, frame_end, source_timecode]
if (not value.startswith(SOURCE_TIMECODE_KEY)):
value_items = value.split(SOURCE_TIMECODE_KEY)
text = value_items[0].format(**data)
args.append(text)
burnin.add_timecode(*args)
continue
if has_timecode:
args = [align, frame_start, frame_end, frame_start_tc]
if (not value.startswith(TIMECODE_KEY)):
value_items = value.split(TIMECODE_KEY)
text = value_items[0].format(**data)
args.append(text)
burnin.add_timecode(*args)
continue
text = value.format(**data)
burnin.add_text(text, align, frame_start, frame_end)
codec_args =
if codec_data:
codec_args = ' '.join(codec_data)
burnin.render(output_path, args=codec_args, overwrite=overwrite, **data)<|docstring|>This method adds burnins to video/image file based on presets setting.
Extension of output MUST be same as input. (mov -> mov, avi -> avi,...)
:param input_path: full path to input file where burnins should be add
:type input_path: str
:param codec_data: all codec related arguments in list
:param codec_data: list
:param output_path: full path to output file where output will be rendered
:type output_path: str
:param data: data required for burnin settings (more info below)
:type data: dict
:param overwrite: output will be overriden if already exists, defaults to True
:type overwrite: bool
Presets must be set separately. Should be dict with 2 keys:
- "options" - sets look of burnins - colors, opacity,...(more info: ModifiedBurnins doc)
- *OPTIONAL* default values are used when not included
- "burnins" - contains dictionary with burnins settings
- *OPTIONAL* burnins won't be added (easier is not to use this)
- each key of "burnins" represents Alignment, there are 6 possibilities:
TOP_LEFT TOP_CENTERED TOP_RIGHT
BOTTOM_LEFT BOTTOM_CENTERED BOTTOM_RIGHT
- value must be string with text you want to burn-in
- text may contain specific formatting keys (exmplained below)
Requirement of *data* keys is based on presets.
- "frame_start" - is required when "timecode" or "current_frame" ins keys
- "frame_start_tc" - when "timecode" should start with different frame
- *keys for static text*
EXAMPLE:
preset = {
"options": {*OPTIONS FOR LOOK*},
"burnins": {
"TOP_LEFT": "static_text",
"TOP_RIGHT": "{shot}",
"BOTTOM_LEFT": "TC: {timecode}",
"BOTTOM_RIGHT": "{frame_start}{current_frame}"
}
}
For this preset we'll need at least this data:
data = {
"frame_start": 1001,
"shot": "sh0010"
}
When Timecode should start from 1 then data need:
data = {
"frame_start": 1001,
"frame_start_tc": 1,
"shot": "sh0010"
}<|endoftext|> |
4813d1d9097a3023082afcc33fe5a87de5e96bf408c8f70ced06aba53a1a536d | def add_text(self, text, align, frame_start=None, frame_end=None, options=None):
'\n Adding static text to a filter.\n\n :param str text: text to apply to the drawtext\n :param enum align: alignment, must use provided enum flags\n :param int frame_start: starting frame for burnins current frame\n :param dict options: recommended to use TextOptions\n '
if (not options):
options = ffmpeg_burnins.TextOptions(**self.options_init)
options = options.copy()
if frame_start:
options['frame_offset'] = frame_start
if frame_end:
options['frame_end'] = frame_end
self._add_burnin(text, align, options, DRAWTEXT) | Adding static text to a filter.
:param str text: text to apply to the drawtext
:param enum align: alignment, must use provided enum flags
:param int frame_start: starting frame for burnins current frame
:param dict options: recommended to use TextOptions | pype/scripts/otio_burnin.py | add_text | tokejepsen/pype | 0 | python | def add_text(self, text, align, frame_start=None, frame_end=None, options=None):
'\n Adding static text to a filter.\n\n :param str text: text to apply to the drawtext\n :param enum align: alignment, must use provided enum flags\n :param int frame_start: starting frame for burnins current frame\n :param dict options: recommended to use TextOptions\n '
if (not options):
options = ffmpeg_burnins.TextOptions(**self.options_init)
options = options.copy()
if frame_start:
options['frame_offset'] = frame_start
if frame_end:
options['frame_end'] = frame_end
self._add_burnin(text, align, options, DRAWTEXT) | def add_text(self, text, align, frame_start=None, frame_end=None, options=None):
'\n Adding static text to a filter.\n\n :param str text: text to apply to the drawtext\n :param enum align: alignment, must use provided enum flags\n :param int frame_start: starting frame for burnins current frame\n :param dict options: recommended to use TextOptions\n '
if (not options):
options = ffmpeg_burnins.TextOptions(**self.options_init)
options = options.copy()
if frame_start:
options['frame_offset'] = frame_start
if frame_end:
options['frame_end'] = frame_end
self._add_burnin(text, align, options, DRAWTEXT)<|docstring|>Adding static text to a filter.
:param str text: text to apply to the drawtext
:param enum align: alignment, must use provided enum flags
:param int frame_start: starting frame for burnins current frame
:param dict options: recommended to use TextOptions<|endoftext|> |
02cca5727a1d9195c62eeeccc7db3dc31ab8f2e27668d373bd88b4f9357ff846 | def add_timecode(self, align, frame_start=None, frame_end=None, frame_start_tc=None, text=None, options=None):
'\n Convenience method to create the frame number expression.\n\n :param enum align: alignment, must use provided enum flags\n :param int frame_start: starting frame for burnins current frame\n :param int frame_start_tc: starting frame for burnins timecode\n :param str text: text that will be before timecode\n :param dict options: recommended to use TimeCodeOptions\n '
if (not options):
options = ffmpeg_burnins.TimeCodeOptions(**self.options_init)
options = options.copy()
if frame_start:
options['frame_offset'] = frame_start
if frame_end:
options['frame_end'] = frame_end
if (not frame_start_tc):
frame_start_tc = options['frame_offset']
if (not text):
text = ''
if (not options.get('fps')):
options['fps'] = self.frame_rate
if isinstance(frame_start_tc, str):
options['timecode'] = frame_start_tc
else:
options['timecode'] = ffmpeg_burnins._frames_to_timecode(frame_start_tc, self.frame_rate)
self._add_burnin(text, align, options, TIMECODE) | Convenience method to create the frame number expression.
:param enum align: alignment, must use provided enum flags
:param int frame_start: starting frame for burnins current frame
:param int frame_start_tc: starting frame for burnins timecode
:param str text: text that will be before timecode
:param dict options: recommended to use TimeCodeOptions | pype/scripts/otio_burnin.py | add_timecode | tokejepsen/pype | 0 | python | def add_timecode(self, align, frame_start=None, frame_end=None, frame_start_tc=None, text=None, options=None):
'\n Convenience method to create the frame number expression.\n\n :param enum align: alignment, must use provided enum flags\n :param int frame_start: starting frame for burnins current frame\n :param int frame_start_tc: starting frame for burnins timecode\n :param str text: text that will be before timecode\n :param dict options: recommended to use TimeCodeOptions\n '
if (not options):
options = ffmpeg_burnins.TimeCodeOptions(**self.options_init)
options = options.copy()
if frame_start:
options['frame_offset'] = frame_start
if frame_end:
options['frame_end'] = frame_end
if (not frame_start_tc):
frame_start_tc = options['frame_offset']
if (not text):
text =
if (not options.get('fps')):
options['fps'] = self.frame_rate
if isinstance(frame_start_tc, str):
options['timecode'] = frame_start_tc
else:
options['timecode'] = ffmpeg_burnins._frames_to_timecode(frame_start_tc, self.frame_rate)
self._add_burnin(text, align, options, TIMECODE) | def add_timecode(self, align, frame_start=None, frame_end=None, frame_start_tc=None, text=None, options=None):
'\n Convenience method to create the frame number expression.\n\n :param enum align: alignment, must use provided enum flags\n :param int frame_start: starting frame for burnins current frame\n :param int frame_start_tc: starting frame for burnins timecode\n :param str text: text that will be before timecode\n :param dict options: recommended to use TimeCodeOptions\n '
if (not options):
options = ffmpeg_burnins.TimeCodeOptions(**self.options_init)
options = options.copy()
if frame_start:
options['frame_offset'] = frame_start
if frame_end:
options['frame_end'] = frame_end
if (not frame_start_tc):
frame_start_tc = options['frame_offset']
if (not text):
text =
if (not options.get('fps')):
options['fps'] = self.frame_rate
if isinstance(frame_start_tc, str):
options['timecode'] = frame_start_tc
else:
options['timecode'] = ffmpeg_burnins._frames_to_timecode(frame_start_tc, self.frame_rate)
self._add_burnin(text, align, options, TIMECODE)<|docstring|>Convenience method to create the frame number expression.
:param enum align: alignment, must use provided enum flags
:param int frame_start: starting frame for burnins current frame
:param int frame_start_tc: starting frame for burnins timecode
:param str text: text that will be before timecode
:param dict options: recommended to use TimeCodeOptions<|endoftext|> |
dff8771c449f454b58e5ceeebe27b88e6e90ae50871d682d504c8c84af7bf53e | def _add_burnin(self, text, align, options, draw):
'\n Generic method for building the filter flags.\n :param str text: text to apply to the drawtext\n :param enum align: alignment, must use provided enum flags\n :param dict options:\n '
final_text = text
text_for_size = text
if (CURRENT_FRAME_SPLITTER in text):
frame_start = options['frame_offset']
frame_end = options.get('frame_end', frame_start)
if (not frame_start):
replacement_final = replacement_size = str(MISSING_KEY_VALUE)
else:
replacement_final = "\\'{}\\'".format(('%%{eif\\:n+%d\\:d}' % frame_start))
replacement_size = str(frame_end)
final_text = final_text.replace(CURRENT_FRAME_SPLITTER, replacement_final)
text_for_size = text_for_size.replace(CURRENT_FRAME_SPLITTER, replacement_size)
resolution = self.resolution
data = {'text': final_text.replace(',', '\\,').replace(':', '\\:'), 'color': options['font_color'], 'size': options['font_size']}
timecode_text = (options.get('timecode') or '')
text_for_size += timecode_text
data.update(options)
data.update(ffmpeg_burnins._drawtext(align, resolution, text_for_size, options))
if (('font' in data) and ffmpeg_burnins._is_windows()):
data['font'] = data['font'].replace(os.sep, ('\\\\' + os.sep))
data['font'] = data['font'].replace(':', '\\:')
self.filters['drawtext'].append((draw % data))
if (options.get('bg_color') is not None):
box = (ffmpeg_burnins.BOX % {'border': options['bg_padding'], 'color': options['bg_color'], 'opacity': options['bg_opacity']})
self.filters['drawtext'][(- 1)] += (':%s' % box) | Generic method for building the filter flags.
:param str text: text to apply to the drawtext
:param enum align: alignment, must use provided enum flags
:param dict options: | pype/scripts/otio_burnin.py | _add_burnin | tokejepsen/pype | 0 | python | def _add_burnin(self, text, align, options, draw):
'\n Generic method for building the filter flags.\n :param str text: text to apply to the drawtext\n :param enum align: alignment, must use provided enum flags\n :param dict options:\n '
final_text = text
text_for_size = text
if (CURRENT_FRAME_SPLITTER in text):
frame_start = options['frame_offset']
frame_end = options.get('frame_end', frame_start)
if (not frame_start):
replacement_final = replacement_size = str(MISSING_KEY_VALUE)
else:
replacement_final = "\\'{}\\'".format(('%%{eif\\:n+%d\\:d}' % frame_start))
replacement_size = str(frame_end)
final_text = final_text.replace(CURRENT_FRAME_SPLITTER, replacement_final)
text_for_size = text_for_size.replace(CURRENT_FRAME_SPLITTER, replacement_size)
resolution = self.resolution
data = {'text': final_text.replace(',', '\\,').replace(':', '\\:'), 'color': options['font_color'], 'size': options['font_size']}
timecode_text = (options.get('timecode') or )
text_for_size += timecode_text
data.update(options)
data.update(ffmpeg_burnins._drawtext(align, resolution, text_for_size, options))
if (('font' in data) and ffmpeg_burnins._is_windows()):
data['font'] = data['font'].replace(os.sep, ('\\\\' + os.sep))
data['font'] = data['font'].replace(':', '\\:')
self.filters['drawtext'].append((draw % data))
if (options.get('bg_color') is not None):
box = (ffmpeg_burnins.BOX % {'border': options['bg_padding'], 'color': options['bg_color'], 'opacity': options['bg_opacity']})
self.filters['drawtext'][(- 1)] += (':%s' % box) | def _add_burnin(self, text, align, options, draw):
'\n Generic method for building the filter flags.\n :param str text: text to apply to the drawtext\n :param enum align: alignment, must use provided enum flags\n :param dict options:\n '
final_text = text
text_for_size = text
if (CURRENT_FRAME_SPLITTER in text):
frame_start = options['frame_offset']
frame_end = options.get('frame_end', frame_start)
if (not frame_start):
replacement_final = replacement_size = str(MISSING_KEY_VALUE)
else:
replacement_final = "\\'{}\\'".format(('%%{eif\\:n+%d\\:d}' % frame_start))
replacement_size = str(frame_end)
final_text = final_text.replace(CURRENT_FRAME_SPLITTER, replacement_final)
text_for_size = text_for_size.replace(CURRENT_FRAME_SPLITTER, replacement_size)
resolution = self.resolution
data = {'text': final_text.replace(',', '\\,').replace(':', '\\:'), 'color': options['font_color'], 'size': options['font_size']}
timecode_text = (options.get('timecode') or )
text_for_size += timecode_text
data.update(options)
data.update(ffmpeg_burnins._drawtext(align, resolution, text_for_size, options))
if (('font' in data) and ffmpeg_burnins._is_windows()):
data['font'] = data['font'].replace(os.sep, ('\\\\' + os.sep))
data['font'] = data['font'].replace(':', '\\:')
self.filters['drawtext'].append((draw % data))
if (options.get('bg_color') is not None):
box = (ffmpeg_burnins.BOX % {'border': options['bg_padding'], 'color': options['bg_color'], 'opacity': options['bg_opacity']})
self.filters['drawtext'][(- 1)] += (':%s' % box)<|docstring|>Generic method for building the filter flags.
:param str text: text to apply to the drawtext
:param enum align: alignment, must use provided enum flags
:param dict options:<|endoftext|> |
2b8fa660318c689a60f5279273fa76e14c4eaffefc933b6bf7bc414622852163 | def command(self, output=None, args=None, overwrite=False):
'\n Generate the entire FFMPEG command.\n\n :param str output: output file\n :param str args: additional FFMPEG arguments\n :param bool overwrite: overwrite the output if it exists\n :returns: completed command\n :rtype: str\n '
output = (output or '')
if overwrite:
output = '-y {}'.format(output)
filters = ''
if self.filter_string:
filters = '-vf "{}"'.format(self.filter_string)
return (FFMPEG % {'input': self.source, 'output': output, 'args': (('%s ' % args) if args else ''), 'filters': filters}).strip() | Generate the entire FFMPEG command.
:param str output: output file
:param str args: additional FFMPEG arguments
:param bool overwrite: overwrite the output if it exists
:returns: completed command
:rtype: str | pype/scripts/otio_burnin.py | command | tokejepsen/pype | 0 | python | def command(self, output=None, args=None, overwrite=False):
'\n Generate the entire FFMPEG command.\n\n :param str output: output file\n :param str args: additional FFMPEG arguments\n :param bool overwrite: overwrite the output if it exists\n :returns: completed command\n :rtype: str\n '
output = (output or )
if overwrite:
output = '-y {}'.format(output)
filters =
if self.filter_string:
filters = '-vf "{}"'.format(self.filter_string)
return (FFMPEG % {'input': self.source, 'output': output, 'args': (('%s ' % args) if args else ), 'filters': filters}).strip() | def command(self, output=None, args=None, overwrite=False):
'\n Generate the entire FFMPEG command.\n\n :param str output: output file\n :param str args: additional FFMPEG arguments\n :param bool overwrite: overwrite the output if it exists\n :returns: completed command\n :rtype: str\n '
output = (output or )
if overwrite:
output = '-y {}'.format(output)
filters =
if self.filter_string:
filters = '-vf "{}"'.format(self.filter_string)
return (FFMPEG % {'input': self.source, 'output': output, 'args': (('%s ' % args) if args else ), 'filters': filters}).strip()<|docstring|>Generate the entire FFMPEG command.
:param str output: output file
:param str args: additional FFMPEG arguments
:param bool overwrite: overwrite the output if it exists
:returns: completed command
:rtype: str<|endoftext|> |
8f8ce127e096e5ae9d637915aaafea7df8475c67654c09bb4681ad659df6d51a | def render(self, output, args=None, overwrite=False, **kwargs):
'\n Render the media to a specified destination.\n\n :param str output: output file\n :param str args: additional FFMPEG arguments\n :param bool overwrite: overwrite the output if it exists\n '
if ((not overwrite) and os.path.exists(output)):
raise RuntimeError(("Destination '%s' exists, please use overwrite" % output))
is_sequence = ('%' in output)
command = self.command(output=output, args=args, overwrite=overwrite)
proc = subprocess.Popen(command, shell=True)
proc.communicate()
if (proc.returncode != 0):
raise RuntimeError(("Failed to render '%s': %s'" % (output, command)))
if is_sequence:
output = (output % kwargs.get('duration'))
if (not os.path.exists(output)):
raise RuntimeError(("Failed to generate this fucking file '%s'" % output)) | Render the media to a specified destination.
:param str output: output file
:param str args: additional FFMPEG arguments
:param bool overwrite: overwrite the output if it exists | pype/scripts/otio_burnin.py | render | tokejepsen/pype | 0 | python | def render(self, output, args=None, overwrite=False, **kwargs):
'\n Render the media to a specified destination.\n\n :param str output: output file\n :param str args: additional FFMPEG arguments\n :param bool overwrite: overwrite the output if it exists\n '
if ((not overwrite) and os.path.exists(output)):
raise RuntimeError(("Destination '%s' exists, please use overwrite" % output))
is_sequence = ('%' in output)
command = self.command(output=output, args=args, overwrite=overwrite)
proc = subprocess.Popen(command, shell=True)
proc.communicate()
if (proc.returncode != 0):
raise RuntimeError(("Failed to render '%s': %s'" % (output, command)))
if is_sequence:
output = (output % kwargs.get('duration'))
if (not os.path.exists(output)):
raise RuntimeError(("Failed to generate this fucking file '%s'" % output)) | def render(self, output, args=None, overwrite=False, **kwargs):
'\n Render the media to a specified destination.\n\n :param str output: output file\n :param str args: additional FFMPEG arguments\n :param bool overwrite: overwrite the output if it exists\n '
if ((not overwrite) and os.path.exists(output)):
raise RuntimeError(("Destination '%s' exists, please use overwrite" % output))
is_sequence = ('%' in output)
command = self.command(output=output, args=args, overwrite=overwrite)
proc = subprocess.Popen(command, shell=True)
proc.communicate()
if (proc.returncode != 0):
raise RuntimeError(("Failed to render '%s': %s'" % (output, command)))
if is_sequence:
output = (output % kwargs.get('duration'))
if (not os.path.exists(output)):
raise RuntimeError(("Failed to generate this fucking file '%s'" % output))<|docstring|>Render the media to a specified destination.
:param str output: output file
:param str args: additional FFMPEG arguments
:param bool overwrite: overwrite the output if it exists<|endoftext|> |
b0d407ee2e593c3af12352746155cd581768cc0e70763ca146b5ff7bfe67396a | def start(self):
'\n This function starts the Plugin Scheduler. It installs signal handlers, acquire an distributed lock and then\n return a Celery application instance\n\n The flow of the startup process is follows:\n start -> _celery_beat_service_started (starts) -> plugin_scheduler_task_thread\n\n The plugin_scheduler_task_thread runs the plugin_scheduler_task every "[\'plugin_type\'][\'plugin_scan_interval\']"\n seconds, which comes from the system wide configuration file\n\n The reason for this slightly convoluted startup is that because the plugin_scheduler_task needs the Celery Beat\n Service instance object so that it can update the schedule periodically and this only available after the\n _celery_beat_service_started callback function is called by Celery Beat\n\n Returns:\n celery.app: The Celery App instance to be used by the scheduler\n\n '
logger = self._logger
logger.info((u'%s Plugin Scheduler main thread: OS PID: %d' % (self._plugin_type_display_name, get_os_tid())))
logger.info((u'"Tour Of Duty" adjusted values: tasks: %d count, time: %d seconds, memory: %dMB' % (self._tour_of_duty.adjusted_tasks, self._tour_of_duty.adjusted_seconds, self._tour_of_duty.adjusted_memory_growth_mb)))
logger.info(u'Setting up signal handlers')
self._install_signal_handlers()
client_id = get_client_id(str(const.PLUGIN_CLIENT_ID_PREFIX))
lock_path = str(const.LOCK_PATH_DELIMITER.join([_f for _f in [const.PLUGIN_SCHEDULER_LOCK_PATH, self._plugin_type, self._plugin_subtype, str('lock')] if _f]))
logger.info((u'Creating lock object for %s Plugin Scheduler under lock path "%s"' % (self._plugin_type, lock_path)))
try:
self._lock = PanoptesLock(context=self._panoptes_context, path=lock_path, timeout=self._lock_timeout, retries=0, identifier=client_id)
except Exception as e:
sys.exit((u'Failed to create lock object: %s' % repr(e)))
if self._lock.locked:
logger.info(u'Starting Celery Beat Service')
try:
self._celery = PanoptesCeleryInstance(self._panoptes_context, self._celery_config).celery
self._celery.conf.update(CELERYBEAT_MAX_LOOP_INTERVAL=self._config[self._plugin_type][u'celerybeat_max_loop_interval'])
except:
logger.exception(u'Error trying to start Celery Beat Service')
return self._celery | This function starts the Plugin Scheduler. It installs signal handlers, acquire an distributed lock and then
return a Celery application instance
The flow of the startup process is follows:
start -> _celery_beat_service_started (starts) -> plugin_scheduler_task_thread
The plugin_scheduler_task_thread runs the plugin_scheduler_task every "['plugin_type']['plugin_scan_interval']"
seconds, which comes from the system wide configuration file
The reason for this slightly convoluted startup is that because the plugin_scheduler_task needs the Celery Beat
Service instance object so that it can update the schedule periodically and this only available after the
_celery_beat_service_started callback function is called by Celery Beat
Returns:
celery.app: The Celery App instance to be used by the scheduler | yahoo_panoptes/framework/plugins/scheduler.py | start | ilholmes/panoptes | 86 | python | def start(self):
'\n This function starts the Plugin Scheduler. It installs signal handlers, acquire an distributed lock and then\n return a Celery application instance\n\n The flow of the startup process is follows:\n start -> _celery_beat_service_started (starts) -> plugin_scheduler_task_thread\n\n The plugin_scheduler_task_thread runs the plugin_scheduler_task every "[\'plugin_type\'][\'plugin_scan_interval\']"\n seconds, which comes from the system wide configuration file\n\n The reason for this slightly convoluted startup is that because the plugin_scheduler_task needs the Celery Beat\n Service instance object so that it can update the schedule periodically and this only available after the\n _celery_beat_service_started callback function is called by Celery Beat\n\n Returns:\n celery.app: The Celery App instance to be used by the scheduler\n\n '
logger = self._logger
logger.info((u'%s Plugin Scheduler main thread: OS PID: %d' % (self._plugin_type_display_name, get_os_tid())))
logger.info((u'"Tour Of Duty" adjusted values: tasks: %d count, time: %d seconds, memory: %dMB' % (self._tour_of_duty.adjusted_tasks, self._tour_of_duty.adjusted_seconds, self._tour_of_duty.adjusted_memory_growth_mb)))
logger.info(u'Setting up signal handlers')
self._install_signal_handlers()
client_id = get_client_id(str(const.PLUGIN_CLIENT_ID_PREFIX))
lock_path = str(const.LOCK_PATH_DELIMITER.join([_f for _f in [const.PLUGIN_SCHEDULER_LOCK_PATH, self._plugin_type, self._plugin_subtype, str('lock')] if _f]))
logger.info((u'Creating lock object for %s Plugin Scheduler under lock path "%s"' % (self._plugin_type, lock_path)))
try:
self._lock = PanoptesLock(context=self._panoptes_context, path=lock_path, timeout=self._lock_timeout, retries=0, identifier=client_id)
except Exception as e:
sys.exit((u'Failed to create lock object: %s' % repr(e)))
if self._lock.locked:
logger.info(u'Starting Celery Beat Service')
try:
self._celery = PanoptesCeleryInstance(self._panoptes_context, self._celery_config).celery
self._celery.conf.update(CELERYBEAT_MAX_LOOP_INTERVAL=self._config[self._plugin_type][u'celerybeat_max_loop_interval'])
except:
logger.exception(u'Error trying to start Celery Beat Service')
return self._celery | def start(self):
'\n This function starts the Plugin Scheduler. It installs signal handlers, acquire an distributed lock and then\n return a Celery application instance\n\n The flow of the startup process is follows:\n start -> _celery_beat_service_started (starts) -> plugin_scheduler_task_thread\n\n The plugin_scheduler_task_thread runs the plugin_scheduler_task every "[\'plugin_type\'][\'plugin_scan_interval\']"\n seconds, which comes from the system wide configuration file\n\n The reason for this slightly convoluted startup is that because the plugin_scheduler_task needs the Celery Beat\n Service instance object so that it can update the schedule periodically and this only available after the\n _celery_beat_service_started callback function is called by Celery Beat\n\n Returns:\n celery.app: The Celery App instance to be used by the scheduler\n\n '
logger = self._logger
logger.info((u'%s Plugin Scheduler main thread: OS PID: %d' % (self._plugin_type_display_name, get_os_tid())))
logger.info((u'"Tour Of Duty" adjusted values: tasks: %d count, time: %d seconds, memory: %dMB' % (self._tour_of_duty.adjusted_tasks, self._tour_of_duty.adjusted_seconds, self._tour_of_duty.adjusted_memory_growth_mb)))
logger.info(u'Setting up signal handlers')
self._install_signal_handlers()
client_id = get_client_id(str(const.PLUGIN_CLIENT_ID_PREFIX))
lock_path = str(const.LOCK_PATH_DELIMITER.join([_f for _f in [const.PLUGIN_SCHEDULER_LOCK_PATH, self._plugin_type, self._plugin_subtype, str('lock')] if _f]))
logger.info((u'Creating lock object for %s Plugin Scheduler under lock path "%s"' % (self._plugin_type, lock_path)))
try:
self._lock = PanoptesLock(context=self._panoptes_context, path=lock_path, timeout=self._lock_timeout, retries=0, identifier=client_id)
except Exception as e:
sys.exit((u'Failed to create lock object: %s' % repr(e)))
if self._lock.locked:
logger.info(u'Starting Celery Beat Service')
try:
self._celery = PanoptesCeleryInstance(self._panoptes_context, self._celery_config).celery
self._celery.conf.update(CELERYBEAT_MAX_LOOP_INTERVAL=self._config[self._plugin_type][u'celerybeat_max_loop_interval'])
except:
logger.exception(u'Error trying to start Celery Beat Service')
return self._celery<|docstring|>This function starts the Plugin Scheduler. It installs signal handlers, acquire an distributed lock and then
return a Celery application instance
The flow of the startup process is follows:
start -> _celery_beat_service_started (starts) -> plugin_scheduler_task_thread
The plugin_scheduler_task_thread runs the plugin_scheduler_task every "['plugin_type']['plugin_scan_interval']"
seconds, which comes from the system wide configuration file
The reason for this slightly convoluted startup is that because the plugin_scheduler_task needs the Celery Beat
Service instance object so that it can update the schedule periodically and this only available after the
_celery_beat_service_started callback function is called by Celery Beat
Returns:
celery.app: The Celery App instance to be used by the scheduler<|endoftext|> |
9d49221fb8e41ef4e362751915f8f723ad159ce596bc1f987d5b8b49ec678705 | def run(self, sender=None, args=None, **kwargs):
'\n This function is called after the Celery Beat Service has finished initialization.\n The function (re)installs the signal handlers, since they are overwritten by the Celery Beat Service.\n It stores the reference to the Celery Beat Service instance and starts the Plugin Scheduler thread\n\n Args:\n sender (celery.beat.Service): The Celery Beat Service instance\n args: Variable length argument list\n **kwargs: Arbitrary keyword argument\n\n Returns:\n None\n '
logger = self._logger
logger.info(u'Reinstalling signal handlers after Celery Beat Service setup')
self._install_signal_handlers()
self._plugin_scheduler_celery_beat_service = sender
self._t = threading.Thread(target=self._plugin_scheduler_task_thread)
self._t.start() | This function is called after the Celery Beat Service has finished initialization.
The function (re)installs the signal handlers, since they are overwritten by the Celery Beat Service.
It stores the reference to the Celery Beat Service instance and starts the Plugin Scheduler thread
Args:
sender (celery.beat.Service): The Celery Beat Service instance
args: Variable length argument list
**kwargs: Arbitrary keyword argument
Returns:
None | yahoo_panoptes/framework/plugins/scheduler.py | run | ilholmes/panoptes | 86 | python | def run(self, sender=None, args=None, **kwargs):
'\n This function is called after the Celery Beat Service has finished initialization.\n The function (re)installs the signal handlers, since they are overwritten by the Celery Beat Service.\n It stores the reference to the Celery Beat Service instance and starts the Plugin Scheduler thread\n\n Args:\n sender (celery.beat.Service): The Celery Beat Service instance\n args: Variable length argument list\n **kwargs: Arbitrary keyword argument\n\n Returns:\n None\n '
logger = self._logger
logger.info(u'Reinstalling signal handlers after Celery Beat Service setup')
self._install_signal_handlers()
self._plugin_scheduler_celery_beat_service = sender
self._t = threading.Thread(target=self._plugin_scheduler_task_thread)
self._t.start() | def run(self, sender=None, args=None, **kwargs):
'\n This function is called after the Celery Beat Service has finished initialization.\n The function (re)installs the signal handlers, since they are overwritten by the Celery Beat Service.\n It stores the reference to the Celery Beat Service instance and starts the Plugin Scheduler thread\n\n Args:\n sender (celery.beat.Service): The Celery Beat Service instance\n args: Variable length argument list\n **kwargs: Arbitrary keyword argument\n\n Returns:\n None\n '
logger = self._logger
logger.info(u'Reinstalling signal handlers after Celery Beat Service setup')
self._install_signal_handlers()
self._plugin_scheduler_celery_beat_service = sender
self._t = threading.Thread(target=self._plugin_scheduler_task_thread)
self._t.start()<|docstring|>This function is called after the Celery Beat Service has finished initialization.
The function (re)installs the signal handlers, since they are overwritten by the Celery Beat Service.
It stores the reference to the Celery Beat Service instance and starts the Plugin Scheduler thread
Args:
sender (celery.beat.Service): The Celery Beat Service instance
args: Variable length argument list
**kwargs: Arbitrary keyword argument
Returns:
None<|endoftext|> |
f4d9096aecf09306ffedcbd98642cc4df33a0df7bed7e0854fa33eeada27ff06 | def _plugin_scheduler_task_thread(self):
"\n This function is the entry point of the Plugin Scheduler thread. It checks if the Plugin Scheduler is shutdown\n mode and if not, then calls the plugin_scheduler_task function every 'plugin_scan_interval'\n seconds\n\n Returns:\n None\n\n "
logger = self._logger
logger.info((u'%s Plugin Scheduler Task thread: OS PID: %d' % (self._plugin_type_display_name, get_os_tid())))
while (not self._shutdown_plugin_scheduler.is_set()):
if self._lock.locked:
self._cycles_without_lock = 0
try:
self._plugin_scheduler_task(self._plugin_scheduler_celery_beat_service, self._tour_of_duty.iterations)
self._tour_of_duty.increment_task_count()
except Exception:
logger.exception(u'Error trying to execute plugin scheduler task')
else:
self._cycles_without_lock += 1
if (self._cycles_without_lock < const.PLUGIN_SCHEDULER_MAX_CYCLES_WITHOUT_LOCK):
logger.warn((u'%s Plugin Scheduler lock not held, skipping scheduling cycle' % self._plugin_type_display_name))
else:
logger.warn((u'%s Plugin Scheduler lock not held for %d cycles, shutting down' % (self._plugin_type_display_name, self._cycles_without_lock)))
self._shutdown()
if self._tour_of_duty.completed:
why = []
why += ([u'tasks'] if self._tour_of_duty.tasks_completed else [])
why += ([u'time'] if self._tour_of_duty.time_completed else [])
why += ([u'memory growth'] if self._tour_of_duty.memory_growth_completed else [])
logger.info((u'%s Plugin Scheduler "Tour Of Duty" completed because of %s going to shutdown' % (self._plugin_type_display_name, ', '.join(why))))
self._shutdown()
self._shutdown_plugin_scheduler.wait(self._config[self._plugin_type][u'plugin_scan_interval'])
logger.critical((u'%s Plugin Scheduler Task thread shutdown' % self._plugin_type_display_name)) | This function is the entry point of the Plugin Scheduler thread. It checks if the Plugin Scheduler is shutdown
mode and if not, then calls the plugin_scheduler_task function every 'plugin_scan_interval'
seconds
Returns:
None | yahoo_panoptes/framework/plugins/scheduler.py | _plugin_scheduler_task_thread | ilholmes/panoptes | 86 | python | def _plugin_scheduler_task_thread(self):
"\n This function is the entry point of the Plugin Scheduler thread. It checks if the Plugin Scheduler is shutdown\n mode and if not, then calls the plugin_scheduler_task function every 'plugin_scan_interval'\n seconds\n\n Returns:\n None\n\n "
logger = self._logger
logger.info((u'%s Plugin Scheduler Task thread: OS PID: %d' % (self._plugin_type_display_name, get_os_tid())))
while (not self._shutdown_plugin_scheduler.is_set()):
if self._lock.locked:
self._cycles_without_lock = 0
try:
self._plugin_scheduler_task(self._plugin_scheduler_celery_beat_service, self._tour_of_duty.iterations)
self._tour_of_duty.increment_task_count()
except Exception:
logger.exception(u'Error trying to execute plugin scheduler task')
else:
self._cycles_without_lock += 1
if (self._cycles_without_lock < const.PLUGIN_SCHEDULER_MAX_CYCLES_WITHOUT_LOCK):
logger.warn((u'%s Plugin Scheduler lock not held, skipping scheduling cycle' % self._plugin_type_display_name))
else:
logger.warn((u'%s Plugin Scheduler lock not held for %d cycles, shutting down' % (self._plugin_type_display_name, self._cycles_without_lock)))
self._shutdown()
if self._tour_of_duty.completed:
why = []
why += ([u'tasks'] if self._tour_of_duty.tasks_completed else [])
why += ([u'time'] if self._tour_of_duty.time_completed else [])
why += ([u'memory growth'] if self._tour_of_duty.memory_growth_completed else [])
logger.info((u'%s Plugin Scheduler "Tour Of Duty" completed because of %s going to shutdown' % (self._plugin_type_display_name, ', '.join(why))))
self._shutdown()
self._shutdown_plugin_scheduler.wait(self._config[self._plugin_type][u'plugin_scan_interval'])
logger.critical((u'%s Plugin Scheduler Task thread shutdown' % self._plugin_type_display_name)) | def _plugin_scheduler_task_thread(self):
"\n This function is the entry point of the Plugin Scheduler thread. It checks if the Plugin Scheduler is shutdown\n mode and if not, then calls the plugin_scheduler_task function every 'plugin_scan_interval'\n seconds\n\n Returns:\n None\n\n "
logger = self._logger
logger.info((u'%s Plugin Scheduler Task thread: OS PID: %d' % (self._plugin_type_display_name, get_os_tid())))
while (not self._shutdown_plugin_scheduler.is_set()):
if self._lock.locked:
self._cycles_without_lock = 0
try:
self._plugin_scheduler_task(self._plugin_scheduler_celery_beat_service, self._tour_of_duty.iterations)
self._tour_of_duty.increment_task_count()
except Exception:
logger.exception(u'Error trying to execute plugin scheduler task')
else:
self._cycles_without_lock += 1
if (self._cycles_without_lock < const.PLUGIN_SCHEDULER_MAX_CYCLES_WITHOUT_LOCK):
logger.warn((u'%s Plugin Scheduler lock not held, skipping scheduling cycle' % self._plugin_type_display_name))
else:
logger.warn((u'%s Plugin Scheduler lock not held for %d cycles, shutting down' % (self._plugin_type_display_name, self._cycles_without_lock)))
self._shutdown()
if self._tour_of_duty.completed:
why = []
why += ([u'tasks'] if self._tour_of_duty.tasks_completed else [])
why += ([u'time'] if self._tour_of_duty.time_completed else [])
why += ([u'memory growth'] if self._tour_of_duty.memory_growth_completed else [])
logger.info((u'%s Plugin Scheduler "Tour Of Duty" completed because of %s going to shutdown' % (self._plugin_type_display_name, ', '.join(why))))
self._shutdown()
self._shutdown_plugin_scheduler.wait(self._config[self._plugin_type][u'plugin_scan_interval'])
logger.critical((u'%s Plugin Scheduler Task thread shutdown' % self._plugin_type_display_name))<|docstring|>This function is the entry point of the Plugin Scheduler thread. It checks if the Plugin Scheduler is shutdown
mode and if not, then calls the plugin_scheduler_task function every 'plugin_scan_interval'
seconds
Returns:
None<|endoftext|> |
0f2508cdad8b6a04da5f6a36150c928fd661462c2fbf0a6655da3b92c69659b5 | def _shutdown(self):
'\n The main shutdown method, which handles two scenarios\n * The Plugin Scheduler thread is alive: sets an event to shutdown the thread\n * The Plugin Scheduler thread is not alive: this can happen if we have not been able to acquire the lock or\n the if the Plugin Scheduler thread quits unexpectedly. In this case, this handler proceeds to call the function\n to shutdown other services (e.g. Celery Beat Service)\n\n Returns:\n None\n '
logger = self._logger
if self._shutdown_plugin_scheduler.is_set():
print(u'%s Plugin Scheduler already in the process of shutdown, ignoring redundant call')
return
shutdown_interval = int((int(self._config[self._plugin_type][u'plugin_scan_interval']) * 2))
logger.info((u'Shutdown/restart requested - may take up to %s seconds' % shutdown_interval))
logger.info((u'Signalling for %s Plugin Scheduler Task Thread to shutdown' % self._plugin_type_display_name))
self._shutdown_plugin_scheduler.set()
if (self._t != threading.currentThread()):
if ((self._t is not None) and self._t.isAlive()):
self._t.join()
if ((self._t is None) or (not self._t.isAlive())):
logger.info((u'%s Plugin Scheduler Task Thread is not active - shutting down other services' % self._plugin_type_display_name))
else:
logger.info((u'%s Plugin Scheduler shutdown called from plugin scheduler task thread' % self._plugin_type_display_name))
if self._plugin_scheduler_celery_beat_service:
logger.info(u'Shutting down Celery Beat Service')
self._plugin_scheduler_celery_beat_service.stop()
if self._lock:
logger.info(u'Releasing lock')
self._lock.release()
logger.info(u'Plugin Scheduler shutdown complete')
sys.exit() | The main shutdown method, which handles two scenarios
* The Plugin Scheduler thread is alive: sets an event to shutdown the thread
* The Plugin Scheduler thread is not alive: this can happen if we have not been able to acquire the lock or
the if the Plugin Scheduler thread quits unexpectedly. In this case, this handler proceeds to call the function
to shutdown other services (e.g. Celery Beat Service)
Returns:
None | yahoo_panoptes/framework/plugins/scheduler.py | _shutdown | ilholmes/panoptes | 86 | python | def _shutdown(self):
'\n The main shutdown method, which handles two scenarios\n * The Plugin Scheduler thread is alive: sets an event to shutdown the thread\n * The Plugin Scheduler thread is not alive: this can happen if we have not been able to acquire the lock or\n the if the Plugin Scheduler thread quits unexpectedly. In this case, this handler proceeds to call the function\n to shutdown other services (e.g. Celery Beat Service)\n\n Returns:\n None\n '
logger = self._logger
if self._shutdown_plugin_scheduler.is_set():
print(u'%s Plugin Scheduler already in the process of shutdown, ignoring redundant call')
return
shutdown_interval = int((int(self._config[self._plugin_type][u'plugin_scan_interval']) * 2))
logger.info((u'Shutdown/restart requested - may take up to %s seconds' % shutdown_interval))
logger.info((u'Signalling for %s Plugin Scheduler Task Thread to shutdown' % self._plugin_type_display_name))
self._shutdown_plugin_scheduler.set()
if (self._t != threading.currentThread()):
if ((self._t is not None) and self._t.isAlive()):
self._t.join()
if ((self._t is None) or (not self._t.isAlive())):
logger.info((u'%s Plugin Scheduler Task Thread is not active - shutting down other services' % self._plugin_type_display_name))
else:
logger.info((u'%s Plugin Scheduler shutdown called from plugin scheduler task thread' % self._plugin_type_display_name))
if self._plugin_scheduler_celery_beat_service:
logger.info(u'Shutting down Celery Beat Service')
self._plugin_scheduler_celery_beat_service.stop()
if self._lock:
logger.info(u'Releasing lock')
self._lock.release()
logger.info(u'Plugin Scheduler shutdown complete')
sys.exit() | def _shutdown(self):
'\n The main shutdown method, which handles two scenarios\n * The Plugin Scheduler thread is alive: sets an event to shutdown the thread\n * The Plugin Scheduler thread is not alive: this can happen if we have not been able to acquire the lock or\n the if the Plugin Scheduler thread quits unexpectedly. In this case, this handler proceeds to call the function\n to shutdown other services (e.g. Celery Beat Service)\n\n Returns:\n None\n '
logger = self._logger
if self._shutdown_plugin_scheduler.is_set():
print(u'%s Plugin Scheduler already in the process of shutdown, ignoring redundant call')
return
shutdown_interval = int((int(self._config[self._plugin_type][u'plugin_scan_interval']) * 2))
logger.info((u'Shutdown/restart requested - may take up to %s seconds' % shutdown_interval))
logger.info((u'Signalling for %s Plugin Scheduler Task Thread to shutdown' % self._plugin_type_display_name))
self._shutdown_plugin_scheduler.set()
if (self._t != threading.currentThread()):
if ((self._t is not None) and self._t.isAlive()):
self._t.join()
if ((self._t is None) or (not self._t.isAlive())):
logger.info((u'%s Plugin Scheduler Task Thread is not active - shutting down other services' % self._plugin_type_display_name))
else:
logger.info((u'%s Plugin Scheduler shutdown called from plugin scheduler task thread' % self._plugin_type_display_name))
if self._plugin_scheduler_celery_beat_service:
logger.info(u'Shutting down Celery Beat Service')
self._plugin_scheduler_celery_beat_service.stop()
if self._lock:
logger.info(u'Releasing lock')
self._lock.release()
logger.info(u'Plugin Scheduler shutdown complete')
sys.exit()<|docstring|>The main shutdown method, which handles two scenarios
* The Plugin Scheduler thread is alive: sets an event to shutdown the thread
* The Plugin Scheduler thread is not alive: this can happen if we have not been able to acquire the lock or
the if the Plugin Scheduler thread quits unexpectedly. In this case, this handler proceeds to call the function
to shutdown other services (e.g. Celery Beat Service)
Returns:
None<|endoftext|> |
995c9d03c4f58331e63c1e7ad5fda4b4fa3234c39f98e151293f449837118e03 | def _install_signal_handlers(self):
'\n Installs signal handlers for SIGTERM, SIGINT and SIGHUP\n\n Returns:\n None\n '
signal.signal(signal.SIGTERM, self._signal_handler)
signal.signal(signal.SIGINT, self._signal_handler)
signal.signal(signal.SIGHUP, self._signal_handler) | Installs signal handlers for SIGTERM, SIGINT and SIGHUP
Returns:
None | yahoo_panoptes/framework/plugins/scheduler.py | _install_signal_handlers | ilholmes/panoptes | 86 | python | def _install_signal_handlers(self):
'\n Installs signal handlers for SIGTERM, SIGINT and SIGHUP\n\n Returns:\n None\n '
signal.signal(signal.SIGTERM, self._signal_handler)
signal.signal(signal.SIGINT, self._signal_handler)
signal.signal(signal.SIGHUP, self._signal_handler) | def _install_signal_handlers(self):
'\n Installs signal handlers for SIGTERM, SIGINT and SIGHUP\n\n Returns:\n None\n '
signal.signal(signal.SIGTERM, self._signal_handler)
signal.signal(signal.SIGINT, self._signal_handler)
signal.signal(signal.SIGHUP, self._signal_handler)<|docstring|>Installs signal handlers for SIGTERM, SIGINT and SIGHUP
Returns:
None<|endoftext|> |
d5f71b57ae71bd7edac5749799b206fd03eec8d0f36423be3bd59873d4eb566f | def _signal_handler(self, signal_number, _):
'\n Signal handler - wraps the _shutdown method with some checks\n\n Args:\n signal_number (int): The received signal number\n _ (frame): Current stack frame object\n\n Returns:\n None\n '
print((u'Caught %s, shutting down %s Plugin Scheduler' % (const.SIGNALS_TO_NAMES_DICT[signal_number], self._plugin_type_display_name)))
if self._shutdown_plugin_scheduler.is_set():
print((u'%s Plugin Scheduler already in the process of shutdown, ignoring %s' % (self._plugin_type_display_name, const.SIGNALS_TO_NAMES_DICT[signal_number])))
return
self._shutdown() | Signal handler - wraps the _shutdown method with some checks
Args:
signal_number (int): The received signal number
_ (frame): Current stack frame object
Returns:
None | yahoo_panoptes/framework/plugins/scheduler.py | _signal_handler | ilholmes/panoptes | 86 | python | def _signal_handler(self, signal_number, _):
'\n Signal handler - wraps the _shutdown method with some checks\n\n Args:\n signal_number (int): The received signal number\n _ (frame): Current stack frame object\n\n Returns:\n None\n '
print((u'Caught %s, shutting down %s Plugin Scheduler' % (const.SIGNALS_TO_NAMES_DICT[signal_number], self._plugin_type_display_name)))
if self._shutdown_plugin_scheduler.is_set():
print((u'%s Plugin Scheduler already in the process of shutdown, ignoring %s' % (self._plugin_type_display_name, const.SIGNALS_TO_NAMES_DICT[signal_number])))
return
self._shutdown() | def _signal_handler(self, signal_number, _):
'\n Signal handler - wraps the _shutdown method with some checks\n\n Args:\n signal_number (int): The received signal number\n _ (frame): Current stack frame object\n\n Returns:\n None\n '
print((u'Caught %s, shutting down %s Plugin Scheduler' % (const.SIGNALS_TO_NAMES_DICT[signal_number], self._plugin_type_display_name)))
if self._shutdown_plugin_scheduler.is_set():
print((u'%s Plugin Scheduler already in the process of shutdown, ignoring %s' % (self._plugin_type_display_name, const.SIGNALS_TO_NAMES_DICT[signal_number])))
return
self._shutdown()<|docstring|>Signal handler - wraps the _shutdown method with some checks
Args:
signal_number (int): The received signal number
_ (frame): Current stack frame object
Returns:
None<|endoftext|> |
bf00145e00c54ab0f8685c0ac8c904526cb7e9a2e35006fb37a8cb542c533718 | def estimate_max_likelihood_params(copula, data: np.ndarray, x0: Union[(np.ndarray, float)], optim_options: dict, verbose: int, scale: float):
'\n Fits the copula with the Maximum Likelihood Estimator\n\n Parameters\n ----------\n copula\n Copula whose parameters are to be estimated\n data\n Data to fit the copula with\n x0\n Initial parameters for optimization\n optim_options\n optimizer options\n verbose\n Verbosity level for the optimizer\n scale\n Amount to scale the objective function value. This is helpful in achieving higher accuracy\n as it increases the sensitivity of the optimizer. The downside is that the optimizer could\n likely run longer as a result\n '
def calc_log_lik(param: Union[(float, Collection[float])]) -> float:
'\n Calculates the log likelihood after setting the new parameters (inserted from the optimizer) of the copula\n\n Parameters\n ----------\n param: ndarray\n Parameters of the copula\n\n Returns\n -------\n float\n Negative log likelihood of the copula\n\n '
if any(np.isnan(np.ravel(param))):
return np.inf
try:
copula.params = param
return ((- copula.log_lik(data, to_pobs=False)) * scale)
except ValueError:
return np.inf
res: OptimizeResult = minimize(calc_log_lik, x0, **optim_options)
if (not res['success']):
if (verbose >= 1):
warn_no_convergence()
estimate = (np.nan if np.isscalar(x0) else np.repeat(np.nan, len(x0)))
else:
estimate = res['x']
copula.params = estimate
return FitSummary(estimate, 'Maximum likelihood', (- res['fun']), len(data), optim_options, res) | Fits the copula with the Maximum Likelihood Estimator
Parameters
----------
copula
Copula whose parameters are to be estimated
data
Data to fit the copula with
x0
Initial parameters for optimization
optim_options
optimizer options
verbose
Verbosity level for the optimizer
scale
Amount to scale the objective function value. This is helpful in achieving higher accuracy
as it increases the sensitivity of the optimizer. The downside is that the optimizer could
likely run longer as a result | copulae/copula/estimator/max_likelihood.py | estimate_max_likelihood_params | CrisDS81/copulae | 100 | python | def estimate_max_likelihood_params(copula, data: np.ndarray, x0: Union[(np.ndarray, float)], optim_options: dict, verbose: int, scale: float):
'\n Fits the copula with the Maximum Likelihood Estimator\n\n Parameters\n ----------\n copula\n Copula whose parameters are to be estimated\n data\n Data to fit the copula with\n x0\n Initial parameters for optimization\n optim_options\n optimizer options\n verbose\n Verbosity level for the optimizer\n scale\n Amount to scale the objective function value. This is helpful in achieving higher accuracy\n as it increases the sensitivity of the optimizer. The downside is that the optimizer could\n likely run longer as a result\n '
def calc_log_lik(param: Union[(float, Collection[float])]) -> float:
'\n Calculates the log likelihood after setting the new parameters (inserted from the optimizer) of the copula\n\n Parameters\n ----------\n param: ndarray\n Parameters of the copula\n\n Returns\n -------\n float\n Negative log likelihood of the copula\n\n '
if any(np.isnan(np.ravel(param))):
return np.inf
try:
copula.params = param
return ((- copula.log_lik(data, to_pobs=False)) * scale)
except ValueError:
return np.inf
res: OptimizeResult = minimize(calc_log_lik, x0, **optim_options)
if (not res['success']):
if (verbose >= 1):
warn_no_convergence()
estimate = (np.nan if np.isscalar(x0) else np.repeat(np.nan, len(x0)))
else:
estimate = res['x']
copula.params = estimate
return FitSummary(estimate, 'Maximum likelihood', (- res['fun']), len(data), optim_options, res) | def estimate_max_likelihood_params(copula, data: np.ndarray, x0: Union[(np.ndarray, float)], optim_options: dict, verbose: int, scale: float):
'\n Fits the copula with the Maximum Likelihood Estimator\n\n Parameters\n ----------\n copula\n Copula whose parameters are to be estimated\n data\n Data to fit the copula with\n x0\n Initial parameters for optimization\n optim_options\n optimizer options\n verbose\n Verbosity level for the optimizer\n scale\n Amount to scale the objective function value. This is helpful in achieving higher accuracy\n as it increases the sensitivity of the optimizer. The downside is that the optimizer could\n likely run longer as a result\n '
def calc_log_lik(param: Union[(float, Collection[float])]) -> float:
'\n Calculates the log likelihood after setting the new parameters (inserted from the optimizer) of the copula\n\n Parameters\n ----------\n param: ndarray\n Parameters of the copula\n\n Returns\n -------\n float\n Negative log likelihood of the copula\n\n '
if any(np.isnan(np.ravel(param))):
return np.inf
try:
copula.params = param
return ((- copula.log_lik(data, to_pobs=False)) * scale)
except ValueError:
return np.inf
res: OptimizeResult = minimize(calc_log_lik, x0, **optim_options)
if (not res['success']):
if (verbose >= 1):
warn_no_convergence()
estimate = (np.nan if np.isscalar(x0) else np.repeat(np.nan, len(x0)))
else:
estimate = res['x']
copula.params = estimate
return FitSummary(estimate, 'Maximum likelihood', (- res['fun']), len(data), optim_options, res)<|docstring|>Fits the copula with the Maximum Likelihood Estimator
Parameters
----------
copula
Copula whose parameters are to be estimated
data
Data to fit the copula with
x0
Initial parameters for optimization
optim_options
optimizer options
verbose
Verbosity level for the optimizer
scale
Amount to scale the objective function value. This is helpful in achieving higher accuracy
as it increases the sensitivity of the optimizer. The downside is that the optimizer could
likely run longer as a result<|endoftext|> |
aae2823ad0e253d6ffc334094951b58596f5f649ae63cb3a6eeed00f076e09d4 | def calc_log_lik(param: Union[(float, Collection[float])]) -> float:
'\n Calculates the log likelihood after setting the new parameters (inserted from the optimizer) of the copula\n\n Parameters\n ----------\n param: ndarray\n Parameters of the copula\n\n Returns\n -------\n float\n Negative log likelihood of the copula\n\n '
if any(np.isnan(np.ravel(param))):
return np.inf
try:
copula.params = param
return ((- copula.log_lik(data, to_pobs=False)) * scale)
except ValueError:
return np.inf | Calculates the log likelihood after setting the new parameters (inserted from the optimizer) of the copula
Parameters
----------
param: ndarray
Parameters of the copula
Returns
-------
float
Negative log likelihood of the copula | copulae/copula/estimator/max_likelihood.py | calc_log_lik | CrisDS81/copulae | 100 | python | def calc_log_lik(param: Union[(float, Collection[float])]) -> float:
'\n Calculates the log likelihood after setting the new parameters (inserted from the optimizer) of the copula\n\n Parameters\n ----------\n param: ndarray\n Parameters of the copula\n\n Returns\n -------\n float\n Negative log likelihood of the copula\n\n '
if any(np.isnan(np.ravel(param))):
return np.inf
try:
copula.params = param
return ((- copula.log_lik(data, to_pobs=False)) * scale)
except ValueError:
return np.inf | def calc_log_lik(param: Union[(float, Collection[float])]) -> float:
'\n Calculates the log likelihood after setting the new parameters (inserted from the optimizer) of the copula\n\n Parameters\n ----------\n param: ndarray\n Parameters of the copula\n\n Returns\n -------\n float\n Negative log likelihood of the copula\n\n '
if any(np.isnan(np.ravel(param))):
return np.inf
try:
copula.params = param
return ((- copula.log_lik(data, to_pobs=False)) * scale)
except ValueError:
return np.inf<|docstring|>Calculates the log likelihood after setting the new parameters (inserted from the optimizer) of the copula
Parameters
----------
param: ndarray
Parameters of the copula
Returns
-------
float
Negative log likelihood of the copula<|endoftext|> |
f2155bef472dfa6ce71cd48875986923ae20fb02ec25581acd55f445044f004e | def invalid_request_error(e):
'Generates a valid ELG "failure" response if the request cannot be parsed'
return ({'failure': {'errors': [{'code': 'elg.request.invalid', 'text': 'Invalid request message'}]}}, 400) | Generates a valid ELG "failure" response if the request cannot be parsed | docker/elg_app.py | invalid_request_error | GateNLP/ToxicClassifier | 0 | python | def invalid_request_error(e):
return ({'failure': {'errors': [{'code': 'elg.request.invalid', 'text': 'Invalid request message'}]}}, 400) | def invalid_request_error(e):
return ({'failure': {'errors': [{'code': 'elg.request.invalid', 'text': 'Invalid request message'}]}}, 400)<|docstring|>Generates a valid ELG "failure" response if the request cannot be parsed<|endoftext|> |
8ba04ea6400d37216807d60417c046511d138c220f93c46535a89a024339e6e4 | @app.errorhandler(RequestTooLarge)
def request_too_large(e):
'Generates a valid ELG "failure" response if the request is too large'
return ({'failure': {'errors': [{'code': 'elg.request.too.large', 'text': 'Request size too large'}]}}, 400) | Generates a valid ELG "failure" response if the request is too large | docker/elg_app.py | request_too_large | GateNLP/ToxicClassifier | 0 | python | @app.errorhandler(RequestTooLarge)
def request_too_large(e):
return ({'failure': {'errors': [{'code': 'elg.request.too.large', 'text': 'Request size too large'}]}}, 400) | @app.errorhandler(RequestTooLarge)
def request_too_large(e):
return ({'failure': {'errors': [{'code': 'elg.request.too.large', 'text': 'Request size too large'}]}}, 400)<|docstring|>Generates a valid ELG "failure" response if the request is too large<|endoftext|> |
346c5ef1cc76a5e26204e15df2ad40ebd37c06d8624cc0961d0895a7837cb183 | @app.route('/process', methods=['POST'])
async def process_request():
'Main request processing logic - accepts a JSON request and returns a JSON response.'
(ctype, type_params) = cgi.parse_header(request.content_type)
if (ctype == 'text/plain'):
content = (await get_text_content(request, type_params.get('charset', 'utf-8')))
elif (ctype == 'application/json'):
data = (await request.get_json())
if ((data.get('type') != 'text') or ('content' not in data)):
raise BadRequest()
content = data['content']
else:
raise BadRequest()
annotations = dict()
(prediction, probabilities) = (await run_sync(classify)(content))
features = dict()
features[('is' + annotationType)] = (prediction == 1)
features['probability'] = probabilities[prediction]
annot = {'start': 0, 'end': len(content), 'features': features}
annotations.setdefault((annotationType + 'Language'), []).append(annot)
return dict(response={'type': 'annotations', 'annotations': annotations}) | Main request processing logic - accepts a JSON request and returns a JSON response. | docker/elg_app.py | process_request | GateNLP/ToxicClassifier | 0 | python | @app.route('/process', methods=['POST'])
async def process_request():
(ctype, type_params) = cgi.parse_header(request.content_type)
if (ctype == 'text/plain'):
content = (await get_text_content(request, type_params.get('charset', 'utf-8')))
elif (ctype == 'application/json'):
data = (await request.get_json())
if ((data.get('type') != 'text') or ('content' not in data)):
raise BadRequest()
content = data['content']
else:
raise BadRequest()
annotations = dict()
(prediction, probabilities) = (await run_sync(classify)(content))
features = dict()
features[('is' + annotationType)] = (prediction == 1)
features['probability'] = probabilities[prediction]
annot = {'start': 0, 'end': len(content), 'features': features}
annotations.setdefault((annotationType + 'Language'), []).append(annot)
return dict(response={'type': 'annotations', 'annotations': annotations}) | @app.route('/process', methods=['POST'])
async def process_request():
(ctype, type_params) = cgi.parse_header(request.content_type)
if (ctype == 'text/plain'):
content = (await get_text_content(request, type_params.get('charset', 'utf-8')))
elif (ctype == 'application/json'):
data = (await request.get_json())
if ((data.get('type') != 'text') or ('content' not in data)):
raise BadRequest()
content = data['content']
else:
raise BadRequest()
annotations = dict()
(prediction, probabilities) = (await run_sync(classify)(content))
features = dict()
features[('is' + annotationType)] = (prediction == 1)
features['probability'] = probabilities[prediction]
annot = {'start': 0, 'end': len(content), 'features': features}
annotations.setdefault((annotationType + 'Language'), []).append(annot)
return dict(response={'type': 'annotations', 'annotations': annotations})<|docstring|>Main request processing logic - accepts a JSON request and returns a JSON response.<|endoftext|> |
54314c4c001cf556746f7450521c28aaf42e51e39e0d0819f7d8a588621cd6ad | def equilibrium():
'This function suggest probability to bet in heads(0) or tails(1) depending on the historical data. \n The main driver of this analysis is to maintaining the equivalent 50/50 - Equilibrium - for each side '
probheads = probtails = 0.0
print(f'The probability to find heads (0) is {probheads:.2f} and tails (1) is {probtails:.2f}') | This function suggest probability to bet in heads(0) or tails(1) depending on the historical data.
The main driver of this analysis is to maintaining the equivalent 50/50 - Equilibrium - for each side | Python Exercises/headsandtailsprediction.py | equilibrium | felipebrigo/Python-projects | 0 | python | def equilibrium():
'This function suggest probability to bet in heads(0) or tails(1) depending on the historical data. \n The main driver of this analysis is to maintaining the equivalent 50/50 - Equilibrium - for each side '
probheads = probtails = 0.0
print(f'The probability to find heads (0) is {probheads:.2f} and tails (1) is {probtails:.2f}') | def equilibrium():
'This function suggest probability to bet in heads(0) or tails(1) depending on the historical data. \n The main driver of this analysis is to maintaining the equivalent 50/50 - Equilibrium - for each side '
probheads = probtails = 0.0
print(f'The probability to find heads (0) is {probheads:.2f} and tails (1) is {probtails:.2f}')<|docstring|>This function suggest probability to bet in heads(0) or tails(1) depending on the historical data.
The main driver of this analysis is to maintaining the equivalent 50/50 - Equilibrium - for each side<|endoftext|> |
f27a860994c433d83818d9240e20b4be597e93842f1a0ad4c04afbeb564275d1 | def signal_noise(response):
'\n Signal and noise as defined in Borst and Theunissen 1999, Figure 2\n\n Parameters\n ----------\n\n response: nitime TimeSeries object\n The data here are individual responses of a single unit to the same\n stimulus, with repetitions being the first dimension and time as the\n last dimension\n '
signal = np.mean(response.data, 0)
noise = (response.data - signal)
return (ts.TimeSeries(signal, sampling_rate=response.sampling_rate), ts.TimeSeries(noise, sampling_rate=response.sampling_rate)) | Signal and noise as defined in Borst and Theunissen 1999, Figure 2
Parameters
----------
response: nitime TimeSeries object
The data here are individual responses of a single unit to the same
stimulus, with repetitions being the first dimension and time as the
last dimension | nitime/analysis/snr.py | signal_noise | miketrumpis/nitime | 172 | python | def signal_noise(response):
'\n Signal and noise as defined in Borst and Theunissen 1999, Figure 2\n\n Parameters\n ----------\n\n response: nitime TimeSeries object\n The data here are individual responses of a single unit to the same\n stimulus, with repetitions being the first dimension and time as the\n last dimension\n '
signal = np.mean(response.data, 0)
noise = (response.data - signal)
return (ts.TimeSeries(signal, sampling_rate=response.sampling_rate), ts.TimeSeries(noise, sampling_rate=response.sampling_rate)) | def signal_noise(response):
'\n Signal and noise as defined in Borst and Theunissen 1999, Figure 2\n\n Parameters\n ----------\n\n response: nitime TimeSeries object\n The data here are individual responses of a single unit to the same\n stimulus, with repetitions being the first dimension and time as the\n last dimension\n '
signal = np.mean(response.data, 0)
noise = (response.data - signal)
return (ts.TimeSeries(signal, sampling_rate=response.sampling_rate), ts.TimeSeries(noise, sampling_rate=response.sampling_rate))<|docstring|>Signal and noise as defined in Borst and Theunissen 1999, Figure 2
Parameters
----------
response: nitime TimeSeries object
The data here are individual responses of a single unit to the same
stimulus, with repetitions being the first dimension and time as the
last dimension<|endoftext|> |
9e5a9219f3bb3d76846b167c3994e170beafb2b9a5b14ab2bb5c919461f31cda | def __init__(self, input=None, bandwidth=None, adaptive=False, low_bias=False):
'\n Initializer for the multi_taper_SNR object\n\n Parameters\n ----------\n input: TimeSeries object\n\n bandwidth: float,\n The bandwidth of the windowing function will determine the number\n tapers to use. This parameters represents trade-off between\n frequency resolution (lower main lobe bandwidth for the taper) and\n variance reduction (higher bandwidth and number of averaged\n estimates). Per default will be set to 4 times the fundamental\n frequency, such that NW=4\n\n adaptive: bool, default to False\n Whether to set the weights for the tapered spectra according to the\n adaptive algorithm (Thompson, 2007).\n\n low_bias : bool, default to False\n Rather than use 2NW tapers, only use the tapers that have better\n than 90% spectral concentration within the bandwidth (still using a\n maximum of 2NW tapers)\n\n Notes\n -----\n\n Thompson, DJ (2007) Jackknifing multitaper spectrum estimates. IEEE\n Signal Processing Magazing. 24: 20-30\n\n '
self.input = input
(self.signal, self.noise) = signal_noise(input)
self.bandwidth = bandwidth
self.adaptive = adaptive
self.low_bias = low_bias | Initializer for the multi_taper_SNR object
Parameters
----------
input: TimeSeries object
bandwidth: float,
The bandwidth of the windowing function will determine the number
tapers to use. This parameters represents trade-off between
frequency resolution (lower main lobe bandwidth for the taper) and
variance reduction (higher bandwidth and number of averaged
estimates). Per default will be set to 4 times the fundamental
frequency, such that NW=4
adaptive: bool, default to False
Whether to set the weights for the tapered spectra according to the
adaptive algorithm (Thompson, 2007).
low_bias : bool, default to False
Rather than use 2NW tapers, only use the tapers that have better
than 90% spectral concentration within the bandwidth (still using a
maximum of 2NW tapers)
Notes
-----
Thompson, DJ (2007) Jackknifing multitaper spectrum estimates. IEEE
Signal Processing Magazing. 24: 20-30 | nitime/analysis/snr.py | __init__ | miketrumpis/nitime | 172 | python | def __init__(self, input=None, bandwidth=None, adaptive=False, low_bias=False):
'\n Initializer for the multi_taper_SNR object\n\n Parameters\n ----------\n input: TimeSeries object\n\n bandwidth: float,\n The bandwidth of the windowing function will determine the number\n tapers to use. This parameters represents trade-off between\n frequency resolution (lower main lobe bandwidth for the taper) and\n variance reduction (higher bandwidth and number of averaged\n estimates). Per default will be set to 4 times the fundamental\n frequency, such that NW=4\n\n adaptive: bool, default to False\n Whether to set the weights for the tapered spectra according to the\n adaptive algorithm (Thompson, 2007).\n\n low_bias : bool, default to False\n Rather than use 2NW tapers, only use the tapers that have better\n than 90% spectral concentration within the bandwidth (still using a\n maximum of 2NW tapers)\n\n Notes\n -----\n\n Thompson, DJ (2007) Jackknifing multitaper spectrum estimates. IEEE\n Signal Processing Magazing. 24: 20-30\n\n '
self.input = input
(self.signal, self.noise) = signal_noise(input)
self.bandwidth = bandwidth
self.adaptive = adaptive
self.low_bias = low_bias | def __init__(self, input=None, bandwidth=None, adaptive=False, low_bias=False):
'\n Initializer for the multi_taper_SNR object\n\n Parameters\n ----------\n input: TimeSeries object\n\n bandwidth: float,\n The bandwidth of the windowing function will determine the number\n tapers to use. This parameters represents trade-off between\n frequency resolution (lower main lobe bandwidth for the taper) and\n variance reduction (higher bandwidth and number of averaged\n estimates). Per default will be set to 4 times the fundamental\n frequency, such that NW=4\n\n adaptive: bool, default to False\n Whether to set the weights for the tapered spectra according to the\n adaptive algorithm (Thompson, 2007).\n\n low_bias : bool, default to False\n Rather than use 2NW tapers, only use the tapers that have better\n than 90% spectral concentration within the bandwidth (still using a\n maximum of 2NW tapers)\n\n Notes\n -----\n\n Thompson, DJ (2007) Jackknifing multitaper spectrum estimates. IEEE\n Signal Processing Magazing. 24: 20-30\n\n '
self.input = input
(self.signal, self.noise) = signal_noise(input)
self.bandwidth = bandwidth
self.adaptive = adaptive
self.low_bias = low_bias<|docstring|>Initializer for the multi_taper_SNR object
Parameters
----------
input: TimeSeries object
bandwidth: float,
The bandwidth of the windowing function will determine the number
tapers to use. This parameters represents trade-off between
frequency resolution (lower main lobe bandwidth for the taper) and
variance reduction (higher bandwidth and number of averaged
estimates). Per default will be set to 4 times the fundamental
frequency, such that NW=4
adaptive: bool, default to False
Whether to set the weights for the tapered spectra according to the
adaptive algorithm (Thompson, 2007).
low_bias : bool, default to False
Rather than use 2NW tapers, only use the tapers that have better
than 90% spectral concentration within the bandwidth (still using a
maximum of 2NW tapers)
Notes
-----
Thompson, DJ (2007) Jackknifing multitaper spectrum estimates. IEEE
Signal Processing Magazing. 24: 20-30<|endoftext|> |
76f7048a6091239e1cac563b627c4e7869fed4a941865fd0e4729c3e3f395b15 | @desc.setattr_on_read
def correlation(self):
'\n The correlation between all combinations of trials\n\n Returns\n -------\n (r,e) : tuple\n r is the mean correlation and e is the mean error of the correlation\n (with df = n_trials - 1)\n '
c = np.corrcoef(self.input.data)
c = c[tril_indices_from(c, (- 1))]
return (np.mean(c), stats.sem(c)) | The correlation between all combinations of trials
Returns
-------
(r,e) : tuple
r is the mean correlation and e is the mean error of the correlation
(with df = n_trials - 1) | nitime/analysis/snr.py | correlation | miketrumpis/nitime | 172 | python | @desc.setattr_on_read
def correlation(self):
'\n The correlation between all combinations of trials\n\n Returns\n -------\n (r,e) : tuple\n r is the mean correlation and e is the mean error of the correlation\n (with df = n_trials - 1)\n '
c = np.corrcoef(self.input.data)
c = c[tril_indices_from(c, (- 1))]
return (np.mean(c), stats.sem(c)) | @desc.setattr_on_read
def correlation(self):
'\n The correlation between all combinations of trials\n\n Returns\n -------\n (r,e) : tuple\n r is the mean correlation and e is the mean error of the correlation\n (with df = n_trials - 1)\n '
c = np.corrcoef(self.input.data)
c = c[tril_indices_from(c, (- 1))]
return (np.mean(c), stats.sem(c))<|docstring|>The correlation between all combinations of trials
Returns
-------
(r,e) : tuple
r is the mean correlation and e is the mean error of the correlation
(with df = n_trials - 1)<|endoftext|> |
b80fc2123f6160d693fd164c885e00f0379ca6f097342689ef4142fe579cd36e | def state_concatenate(self, obs, pose_pre, history_buffer, command, num_buffer=2):
'\n\n :param obs:\n :param history_buffer:\n :param command:\n :return:\n '
velocity_base = ((obs[:6] - pose_pre) / self.dt)
data_tmp = history_buffer.pull().copy()[::(- 1)]
data_size = history_buffer.num_size_per
if (len(data_tmp) == 0):
data_history = np.zeros((data_size * num_buffer))
else:
for i in range(len(data_tmp)):
if (i == 0):
data_history = data_tmp[0]
else:
data_history = np.append(data_history, data_tmp[i])
if (len(data_tmp) < num_buffer):
for i in range((num_buffer - len(data_tmp))):
data_history = np.append(data_history, np.zeros(data_size))
state = obs
if (num_buffer > 0):
state = np.append(state, data_history.reshape((1, (- 1))))
if (self.command_mode == 'command'):
vel = ((np.concatenate((obs[:2], obs[5:6])) - np.concatenate((pose_pre[:2], pose_pre[5:6]))) / self.dt)
v_e = (vel - command)
state = np.append(state, v_e)
elif (self.command_mode == 'delta'):
state = np.append(state, command)
elif (self.command_mode == 'no'):
state = state
else:
raise Exception(' command mode is not corect!')
return state | :param obs:
:param history_buffer:
:param command:
:return: | my_envs/mujoco/cellrobotCPG2.py | state_concatenate | Jerryxiaoyu/my_baselines | 0 | python | def state_concatenate(self, obs, pose_pre, history_buffer, command, num_buffer=2):
'\n\n :param obs:\n :param history_buffer:\n :param command:\n :return:\n '
velocity_base = ((obs[:6] - pose_pre) / self.dt)
data_tmp = history_buffer.pull().copy()[::(- 1)]
data_size = history_buffer.num_size_per
if (len(data_tmp) == 0):
data_history = np.zeros((data_size * num_buffer))
else:
for i in range(len(data_tmp)):
if (i == 0):
data_history = data_tmp[0]
else:
data_history = np.append(data_history, data_tmp[i])
if (len(data_tmp) < num_buffer):
for i in range((num_buffer - len(data_tmp))):
data_history = np.append(data_history, np.zeros(data_size))
state = obs
if (num_buffer > 0):
state = np.append(state, data_history.reshape((1, (- 1))))
if (self.command_mode == 'command'):
vel = ((np.concatenate((obs[:2], obs[5:6])) - np.concatenate((pose_pre[:2], pose_pre[5:6]))) / self.dt)
v_e = (vel - command)
state = np.append(state, v_e)
elif (self.command_mode == 'delta'):
state = np.append(state, command)
elif (self.command_mode == 'no'):
state = state
else:
raise Exception(' command mode is not corect!')
return state | def state_concatenate(self, obs, pose_pre, history_buffer, command, num_buffer=2):
'\n\n :param obs:\n :param history_buffer:\n :param command:\n :return:\n '
velocity_base = ((obs[:6] - pose_pre) / self.dt)
data_tmp = history_buffer.pull().copy()[::(- 1)]
data_size = history_buffer.num_size_per
if (len(data_tmp) == 0):
data_history = np.zeros((data_size * num_buffer))
else:
for i in range(len(data_tmp)):
if (i == 0):
data_history = data_tmp[0]
else:
data_history = np.append(data_history, data_tmp[i])
if (len(data_tmp) < num_buffer):
for i in range((num_buffer - len(data_tmp))):
data_history = np.append(data_history, np.zeros(data_size))
state = obs
if (num_buffer > 0):
state = np.append(state, data_history.reshape((1, (- 1))))
if (self.command_mode == 'command'):
vel = ((np.concatenate((obs[:2], obs[5:6])) - np.concatenate((pose_pre[:2], pose_pre[5:6]))) / self.dt)
v_e = (vel - command)
state = np.append(state, v_e)
elif (self.command_mode == 'delta'):
state = np.append(state, command)
elif (self.command_mode == 'no'):
state = state
else:
raise Exception(' command mode is not corect!')
return state<|docstring|>:param obs:
:param history_buffer:
:param command:
:return:<|endoftext|> |
63ac203a01892c2d6e642763d6e8daf81b8aa41dc1c364492b220310613ef17a | def reward_fun6(self, velocity_base, v_commdand, action, obs):
'\n add orien\n :param velocity_base:\n :param v_commdand:\n :param action:\n :param obs:\n :return:\n '
v_e = (np.concatenate((velocity_base[:2], velocity_base[(- 1):])) - v_commdand)
vxy = v_commdand[:2]
wyaw = v_commdand[2]
q_vel = obs[19:32]
orien = obs[3:6]
vx = v_commdand[0]
vy = v_commdand[1]
kc = 1
c_w = ((- 2) * self.dt)
c_v1 = ((- 10) * self.dt)
c_v2 = ((- 1) * self.dt)
lin_vel_reward = (c_v1 * np.linalg.norm((velocity_base[0:2] - vxy)))
ang_vel_reward = (c_w * np.linalg.norm((velocity_base[(- 1)] - wyaw)))
c_t = (0.0005 * self.dt)
torque_cost = (((- kc) * c_t) * np.square(action).sum())
c_js = (0.03 * self.dt)
joint_speed_cost = (((- kc) * c_js) * np.square(q_vel).sum())
c_0 = (0.4 * self.dt)
orientation_cost = 0
orientation_cost = ((kc * c_0) * np.square(([0, 0] - orien[:2])).sum())
c_s = (0.5 * self.dt)
smoothness_cost = (((- kc) * c_s) * np.square((self.action_pre - action)).sum())
survive_reward = 0.2
reward = ((((((lin_vel_reward + ang_vel_reward) + torque_cost) + joint_speed_cost) + orientation_cost) + smoothness_cost) + survive_reward)
other_rewards = np.array([reward, lin_vel_reward, ang_vel_reward, torque_cost, joint_speed_cost, orientation_cost, smoothness_cost, survive_reward])
return (reward, other_rewards) | add orien
:param velocity_base:
:param v_commdand:
:param action:
:param obs:
:return: | my_envs/mujoco/cellrobotCPG2.py | reward_fun6 | Jerryxiaoyu/my_baselines | 0 | python | def reward_fun6(self, velocity_base, v_commdand, action, obs):
'\n add orien\n :param velocity_base:\n :param v_commdand:\n :param action:\n :param obs:\n :return:\n '
v_e = (np.concatenate((velocity_base[:2], velocity_base[(- 1):])) - v_commdand)
vxy = v_commdand[:2]
wyaw = v_commdand[2]
q_vel = obs[19:32]
orien = obs[3:6]
vx = v_commdand[0]
vy = v_commdand[1]
kc = 1
c_w = ((- 2) * self.dt)
c_v1 = ((- 10) * self.dt)
c_v2 = ((- 1) * self.dt)
lin_vel_reward = (c_v1 * np.linalg.norm((velocity_base[0:2] - vxy)))
ang_vel_reward = (c_w * np.linalg.norm((velocity_base[(- 1)] - wyaw)))
c_t = (0.0005 * self.dt)
torque_cost = (((- kc) * c_t) * np.square(action).sum())
c_js = (0.03 * self.dt)
joint_speed_cost = (((- kc) * c_js) * np.square(q_vel).sum())
c_0 = (0.4 * self.dt)
orientation_cost = 0
orientation_cost = ((kc * c_0) * np.square(([0, 0] - orien[:2])).sum())
c_s = (0.5 * self.dt)
smoothness_cost = (((- kc) * c_s) * np.square((self.action_pre - action)).sum())
survive_reward = 0.2
reward = ((((((lin_vel_reward + ang_vel_reward) + torque_cost) + joint_speed_cost) + orientation_cost) + smoothness_cost) + survive_reward)
other_rewards = np.array([reward, lin_vel_reward, ang_vel_reward, torque_cost, joint_speed_cost, orientation_cost, smoothness_cost, survive_reward])
return (reward, other_rewards) | def reward_fun6(self, velocity_base, v_commdand, action, obs):
'\n add orien\n :param velocity_base:\n :param v_commdand:\n :param action:\n :param obs:\n :return:\n '
v_e = (np.concatenate((velocity_base[:2], velocity_base[(- 1):])) - v_commdand)
vxy = v_commdand[:2]
wyaw = v_commdand[2]
q_vel = obs[19:32]
orien = obs[3:6]
vx = v_commdand[0]
vy = v_commdand[1]
kc = 1
c_w = ((- 2) * self.dt)
c_v1 = ((- 10) * self.dt)
c_v2 = ((- 1) * self.dt)
lin_vel_reward = (c_v1 * np.linalg.norm((velocity_base[0:2] - vxy)))
ang_vel_reward = (c_w * np.linalg.norm((velocity_base[(- 1)] - wyaw)))
c_t = (0.0005 * self.dt)
torque_cost = (((- kc) * c_t) * np.square(action).sum())
c_js = (0.03 * self.dt)
joint_speed_cost = (((- kc) * c_js) * np.square(q_vel).sum())
c_0 = (0.4 * self.dt)
orientation_cost = 0
orientation_cost = ((kc * c_0) * np.square(([0, 0] - orien[:2])).sum())
c_s = (0.5 * self.dt)
smoothness_cost = (((- kc) * c_s) * np.square((self.action_pre - action)).sum())
survive_reward = 0.2
reward = ((((((lin_vel_reward + ang_vel_reward) + torque_cost) + joint_speed_cost) + orientation_cost) + smoothness_cost) + survive_reward)
other_rewards = np.array([reward, lin_vel_reward, ang_vel_reward, torque_cost, joint_speed_cost, orientation_cost, smoothness_cost, survive_reward])
return (reward, other_rewards)<|docstring|>add orien
:param velocity_base:
:param v_commdand:
:param action:
:param obs:
:return:<|endoftext|> |
264b59934e47f391cb214e42e95aa372c1fa7782321f9d9cd94331d28eba0579 | def reward_fun7(self, velocity_base, v_commdand, action, obs):
'\n integal orien\n :param velocity_base:\n :param v_commdand:\n :param action:\n :param obs:\n :return:\n '
v_e = (np.concatenate((velocity_base[:2], velocity_base[(- 1):])) - v_commdand)
vxy = v_commdand[:2]
wyaw = v_commdand[2]
q_vel = obs[19:32]
orien = obs[3:6]
vx = v_commdand[0]
vy = v_commdand[1]
kc = 1
c_w = ((- 2) * self.dt)
c_v1 = ((- 10) * self.dt)
c_v2 = ((- 1) * self.dt)
lin_vel_reward = (c_v1 * np.linalg.norm((velocity_base[0:2] - vxy)))
ang_vel_reward = (c_w * np.linalg.norm((velocity_base[(- 1)] - wyaw)))
c_t = (0.0005 * self.dt)
torque_cost = (((- kc) * c_t) * np.square(action).sum())
c_js = (0.03 * self.dt)
joint_speed_cost = (((- kc) * c_js) * np.square(q_vel).sum())
c_0 = (0.4 * self.dt)
orientation_cost = 0
orientation_cost = ((kc * c_0) * np.square(([0, 0] - orien[:2])).sum())
c_s = (0.5 * self.dt)
smoothness_cost = (((- kc) * c_s) * np.square((self.action_pre - action)).sum())
survive_reward = 0.2
c_y = (5 * self.dt)
orien_yaw_cost = ((- c_y) * np.linalg.norm((orien[(- 1)] - self.goal_orien_yaw)))
reward = (((((((lin_vel_reward + ang_vel_reward) + torque_cost) + joint_speed_cost) + orientation_cost) + smoothness_cost) + survive_reward) + orien_yaw_cost)
other_rewards = np.array([reward, lin_vel_reward, ang_vel_reward, torque_cost, joint_speed_cost, orientation_cost, smoothness_cost, survive_reward, orien_yaw_cost])
return (reward, other_rewards) | integal orien
:param velocity_base:
:param v_commdand:
:param action:
:param obs:
:return: | my_envs/mujoco/cellrobotCPG2.py | reward_fun7 | Jerryxiaoyu/my_baselines | 0 | python | def reward_fun7(self, velocity_base, v_commdand, action, obs):
'\n integal orien\n :param velocity_base:\n :param v_commdand:\n :param action:\n :param obs:\n :return:\n '
v_e = (np.concatenate((velocity_base[:2], velocity_base[(- 1):])) - v_commdand)
vxy = v_commdand[:2]
wyaw = v_commdand[2]
q_vel = obs[19:32]
orien = obs[3:6]
vx = v_commdand[0]
vy = v_commdand[1]
kc = 1
c_w = ((- 2) * self.dt)
c_v1 = ((- 10) * self.dt)
c_v2 = ((- 1) * self.dt)
lin_vel_reward = (c_v1 * np.linalg.norm((velocity_base[0:2] - vxy)))
ang_vel_reward = (c_w * np.linalg.norm((velocity_base[(- 1)] - wyaw)))
c_t = (0.0005 * self.dt)
torque_cost = (((- kc) * c_t) * np.square(action).sum())
c_js = (0.03 * self.dt)
joint_speed_cost = (((- kc) * c_js) * np.square(q_vel).sum())
c_0 = (0.4 * self.dt)
orientation_cost = 0
orientation_cost = ((kc * c_0) * np.square(([0, 0] - orien[:2])).sum())
c_s = (0.5 * self.dt)
smoothness_cost = (((- kc) * c_s) * np.square((self.action_pre - action)).sum())
survive_reward = 0.2
c_y = (5 * self.dt)
orien_yaw_cost = ((- c_y) * np.linalg.norm((orien[(- 1)] - self.goal_orien_yaw)))
reward = (((((((lin_vel_reward + ang_vel_reward) + torque_cost) + joint_speed_cost) + orientation_cost) + smoothness_cost) + survive_reward) + orien_yaw_cost)
other_rewards = np.array([reward, lin_vel_reward, ang_vel_reward, torque_cost, joint_speed_cost, orientation_cost, smoothness_cost, survive_reward, orien_yaw_cost])
return (reward, other_rewards) | def reward_fun7(self, velocity_base, v_commdand, action, obs):
'\n integal orien\n :param velocity_base:\n :param v_commdand:\n :param action:\n :param obs:\n :return:\n '
v_e = (np.concatenate((velocity_base[:2], velocity_base[(- 1):])) - v_commdand)
vxy = v_commdand[:2]
wyaw = v_commdand[2]
q_vel = obs[19:32]
orien = obs[3:6]
vx = v_commdand[0]
vy = v_commdand[1]
kc = 1
c_w = ((- 2) * self.dt)
c_v1 = ((- 10) * self.dt)
c_v2 = ((- 1) * self.dt)
lin_vel_reward = (c_v1 * np.linalg.norm((velocity_base[0:2] - vxy)))
ang_vel_reward = (c_w * np.linalg.norm((velocity_base[(- 1)] - wyaw)))
c_t = (0.0005 * self.dt)
torque_cost = (((- kc) * c_t) * np.square(action).sum())
c_js = (0.03 * self.dt)
joint_speed_cost = (((- kc) * c_js) * np.square(q_vel).sum())
c_0 = (0.4 * self.dt)
orientation_cost = 0
orientation_cost = ((kc * c_0) * np.square(([0, 0] - orien[:2])).sum())
c_s = (0.5 * self.dt)
smoothness_cost = (((- kc) * c_s) * np.square((self.action_pre - action)).sum())
survive_reward = 0.2
c_y = (5 * self.dt)
orien_yaw_cost = ((- c_y) * np.linalg.norm((orien[(- 1)] - self.goal_orien_yaw)))
reward = (((((((lin_vel_reward + ang_vel_reward) + torque_cost) + joint_speed_cost) + orientation_cost) + smoothness_cost) + survive_reward) + orien_yaw_cost)
other_rewards = np.array([reward, lin_vel_reward, ang_vel_reward, torque_cost, joint_speed_cost, orientation_cost, smoothness_cost, survive_reward, orien_yaw_cost])
return (reward, other_rewards)<|docstring|>integal orien
:param velocity_base:
:param v_commdand:
:param action:
:param obs:
:return:<|endoftext|> |
58ba394c75bcb55e63012eb2b254469283c2793cdacea24bddc5c16df87dd424 | @staticmethod
def create_key():
'\n Create a admin key.\n\n Returns:\n str: admin key\n '
raw_key = Fernet.generate_key()
decoded = base64.b64encode(raw_key).decode()
return decoded[:((len(decoded) // 4) * 4)] | Create a admin key.
Returns:
str: admin key | kaipred/util/crypto.py | create_key | lisphilar/kaipred | 0 | python | @staticmethod
def create_key():
'\n Create a admin key.\n\n Returns:\n str: admin key\n '
raw_key = Fernet.generate_key()
decoded = base64.b64encode(raw_key).decode()
return decoded[:((len(decoded) // 4) * 4)] | @staticmethod
def create_key():
'\n Create a admin key.\n\n Returns:\n str: admin key\n '
raw_key = Fernet.generate_key()
decoded = base64.b64encode(raw_key).decode()
return decoded[:((len(decoded) // 4) * 4)]<|docstring|>Create a admin key.
Returns:
str: admin key<|endoftext|> |
3b8f4c66889b04c220154aff3050bddda25e484a52332b7f8bfedd98681f8b5d | def encrypt(self, raw):
'\n Encrypt the string.\n\n Args:\n raw (str): target string\n\n Return"\n str: encrypted string\n '
encoded = base64.b64decode(raw.encode())
encoded_encrypted = self._cipher.encrypt(encoded)
return base64.b64encode(encoded_encrypted).decode('utf-8') | Encrypt the string.
Args:
raw (str): target string
Return"
str: encrypted string | kaipred/util/crypto.py | encrypt | lisphilar/kaipred | 0 | python | def encrypt(self, raw):
'\n Encrypt the string.\n\n Args:\n raw (str): target string\n\n Return"\n str: encrypted string\n '
encoded = base64.b64decode(raw.encode())
encoded_encrypted = self._cipher.encrypt(encoded)
return base64.b64encode(encoded_encrypted).decode('utf-8') | def encrypt(self, raw):
'\n Encrypt the string.\n\n Args:\n raw (str): target string\n\n Return"\n str: encrypted string\n '
encoded = base64.b64decode(raw.encode())
encoded_encrypted = self._cipher.encrypt(encoded)
return base64.b64encode(encoded_encrypted).decode('utf-8')<|docstring|>Encrypt the string.
Args:
raw (str): target string
Return"
str: encrypted string<|endoftext|> |
19ef18c29f9080e48e1233e189597d710ae7050b109a4717aa98dc2ab0a13759 | def decrypt(self, encrypted):
'\n Decrypt the encrypted string.\n\n Args:\n str: encrypted string\n '
encoded_encrypted = base64.b64decode(encrypted.encode())
encoded = self._cipher.decrypt(encoded_encrypted)
return base64.b64encode(encoded).decode('utf-8') | Decrypt the encrypted string.
Args:
str: encrypted string | kaipred/util/crypto.py | decrypt | lisphilar/kaipred | 0 | python | def decrypt(self, encrypted):
'\n Decrypt the encrypted string.\n\n Args:\n str: encrypted string\n '
encoded_encrypted = base64.b64decode(encrypted.encode())
encoded = self._cipher.decrypt(encoded_encrypted)
return base64.b64encode(encoded).decode('utf-8') | def decrypt(self, encrypted):
'\n Decrypt the encrypted string.\n\n Args:\n str: encrypted string\n '
encoded_encrypted = base64.b64decode(encrypted.encode())
encoded = self._cipher.decrypt(encoded_encrypted)
return base64.b64encode(encoded).decode('utf-8')<|docstring|>Decrypt the encrypted string.
Args:
str: encrypted string<|endoftext|> |
5afeb4235cc7f9ef438a666d7fc942d7c607c4898c9bcf8725a5c244ebb0954b | def test_storing_and_loading_station_snapshot(self):
'\n Stores and writes a station (instrument) snapshot.\n '
self.mock_parabola_2.x(1)
self.mock_parabola_2.y(2.245)
self.mock_parabola_2.array_like(np.linspace(0, 11, 23))
snap = self.station.snapshot(update=True)
data_object = h5d.Data(name='test_object_snap', datadir=self.datadir)
h5d.write_dict_to_hdf5(snap, data_object)
data_object.close()
filepath = data_object.filepath
new_dict = {}
opened_hdf5_file = h5py.File(filepath, 'r')
h5d.read_dict_from_hdf5(new_dict, opened_hdf5_file)
self.assertEqual(snap.keys(), new_dict.keys())
self.assertEqual(snap['instruments'].keys(), new_dict['instruments'].keys())
mock_parab_pars = snap['instruments']['mock_parabola_2']['parameters']
self.assertEqual(mock_parab_pars['x']['value'], 1)
self.assertEqual(mock_parab_pars['y']['value'], 2.245)
np.testing.assert_array_equal(mock_parab_pars['array_like']['value'], np.linspace(0, 11, 23)) | Stores and writes a station (instrument) snapshot. | pycqed/tests/test_hdf5_datasaving_loading.py | test_storing_and_loading_station_snapshot | nuttamas/PycQED_py3 | 60 | python | def test_storing_and_loading_station_snapshot(self):
'\n \n '
self.mock_parabola_2.x(1)
self.mock_parabola_2.y(2.245)
self.mock_parabola_2.array_like(np.linspace(0, 11, 23))
snap = self.station.snapshot(update=True)
data_object = h5d.Data(name='test_object_snap', datadir=self.datadir)
h5d.write_dict_to_hdf5(snap, data_object)
data_object.close()
filepath = data_object.filepath
new_dict = {}
opened_hdf5_file = h5py.File(filepath, 'r')
h5d.read_dict_from_hdf5(new_dict, opened_hdf5_file)
self.assertEqual(snap.keys(), new_dict.keys())
self.assertEqual(snap['instruments'].keys(), new_dict['instruments'].keys())
mock_parab_pars = snap['instruments']['mock_parabola_2']['parameters']
self.assertEqual(mock_parab_pars['x']['value'], 1)
self.assertEqual(mock_parab_pars['y']['value'], 2.245)
np.testing.assert_array_equal(mock_parab_pars['array_like']['value'], np.linspace(0, 11, 23)) | def test_storing_and_loading_station_snapshot(self):
'\n \n '
self.mock_parabola_2.x(1)
self.mock_parabola_2.y(2.245)
self.mock_parabola_2.array_like(np.linspace(0, 11, 23))
snap = self.station.snapshot(update=True)
data_object = h5d.Data(name='test_object_snap', datadir=self.datadir)
h5d.write_dict_to_hdf5(snap, data_object)
data_object.close()
filepath = data_object.filepath
new_dict = {}
opened_hdf5_file = h5py.File(filepath, 'r')
h5d.read_dict_from_hdf5(new_dict, opened_hdf5_file)
self.assertEqual(snap.keys(), new_dict.keys())
self.assertEqual(snap['instruments'].keys(), new_dict['instruments'].keys())
mock_parab_pars = snap['instruments']['mock_parabola_2']['parameters']
self.assertEqual(mock_parab_pars['x']['value'], 1)
self.assertEqual(mock_parab_pars['y']['value'], 2.245)
np.testing.assert_array_equal(mock_parab_pars['array_like']['value'], np.linspace(0, 11, 23))<|docstring|>Stores and writes a station (instrument) snapshot.<|endoftext|> |
9b67dc49cb502b96bb2dbd7035c5756ca9b16b8e198602ecea1549b104064265 | @unittest.skip('FIXME: disabled, see PR #643')
def test_writing_and_reading_dicts_to_hdf5(self):
'\n Tests dumping some random dictionary to hdf5 and reading back the\n stored values. The input dictionary contains:\n - list of ints\n - list of floats\n - nested dict\n - 1D array\n - 2D array\n\n '
test_dict = {'list_of_ints': list(np.arange(5)), 'list_of_floats': list(np.arange(5.1)), 'some_bool': True, 'weird_dict': {'a': 5}, 'dataset1': np.linspace(0, 20, 31), 'dataset2': np.array([[2, 3, 4, 5], [2, 3, 1, 2]]), 'list_of_mixed_type': ['hello', 4, 4.2, {'a': 5}, [4, 3]], 'tuple_of_mixed_type': tuple(['hello', 4, 4.2, {'a': 5}, [4, 3]]), 'a list of strings': ['my ', 'name ', 'is ', 'earl.'], 'some_np_bool': np.bool(True), 'list_of_dicts': [{'a': 5}, {'b': 3}], 'some_int': 3, 'some_float': 3.5, 'some_np_int': np.int(3), 'some_np_float': np.float(3.5)}
data_object = h5d.Data(name='test_object', datadir=self.datadir)
h5d.write_dict_to_hdf5(test_dict, data_object)
data_object.close()
filepath = data_object.filepath
new_dict = {}
opened_hdf5_file = h5py.File(filepath, 'r')
h5d.read_dict_from_hdf5(new_dict, opened_hdf5_file)
self.assertEqual(test_dict.keys(), new_dict.keys())
self.assertEqual(test_dict['list_of_ints'], new_dict['list_of_ints'])
self.assertEqual(test_dict['list_of_floats'], new_dict['list_of_floats'])
self.assertEqual(test_dict['weird_dict'], new_dict['weird_dict'])
self.assertEqual(test_dict['some_bool'], new_dict['some_bool'])
self.assertEqual(test_dict['list_of_dicts'], new_dict['list_of_dicts'])
self.assertEqual(test_dict['list_of_mixed_type'], new_dict['list_of_mixed_type'])
self.assertEqual(test_dict['list_of_mixed_type'][0], new_dict['list_of_mixed_type'][0])
self.assertEqual(test_dict['list_of_mixed_type'][2], new_dict['list_of_mixed_type'][2])
self.assertEqual(test_dict['tuple_of_mixed_type'], new_dict['tuple_of_mixed_type'])
self.assertEqual(type(test_dict['tuple_of_mixed_type']), type(new_dict['tuple_of_mixed_type']))
self.assertEqual(test_dict['tuple_of_mixed_type'][0], new_dict['tuple_of_mixed_type'][0])
self.assertEqual(test_dict['tuple_of_mixed_type'][2], new_dict['tuple_of_mixed_type'][2])
self.assertEqual(test_dict['some_np_bool'], new_dict['some_np_bool'])
self.assertEqual(test_dict['some_int'], new_dict['some_int'])
self.assertEqual(test_dict['some_np_float'], new_dict['some_np_float'])
self.assertEqual(test_dict['a list of strings'], new_dict['a list of strings'])
self.assertEqual(test_dict['a list of strings'][0], new_dict['a list of strings'][0]) | Tests dumping some random dictionary to hdf5 and reading back the
stored values. The input dictionary contains:
- list of ints
- list of floats
- nested dict
- 1D array
- 2D array | pycqed/tests/test_hdf5_datasaving_loading.py | test_writing_and_reading_dicts_to_hdf5 | nuttamas/PycQED_py3 | 60 | python | @unittest.skip('FIXME: disabled, see PR #643')
def test_writing_and_reading_dicts_to_hdf5(self):
'\n Tests dumping some random dictionary to hdf5 and reading back the\n stored values. The input dictionary contains:\n - list of ints\n - list of floats\n - nested dict\n - 1D array\n - 2D array\n\n '
test_dict = {'list_of_ints': list(np.arange(5)), 'list_of_floats': list(np.arange(5.1)), 'some_bool': True, 'weird_dict': {'a': 5}, 'dataset1': np.linspace(0, 20, 31), 'dataset2': np.array([[2, 3, 4, 5], [2, 3, 1, 2]]), 'list_of_mixed_type': ['hello', 4, 4.2, {'a': 5}, [4, 3]], 'tuple_of_mixed_type': tuple(['hello', 4, 4.2, {'a': 5}, [4, 3]]), 'a list of strings': ['my ', 'name ', 'is ', 'earl.'], 'some_np_bool': np.bool(True), 'list_of_dicts': [{'a': 5}, {'b': 3}], 'some_int': 3, 'some_float': 3.5, 'some_np_int': np.int(3), 'some_np_float': np.float(3.5)}
data_object = h5d.Data(name='test_object', datadir=self.datadir)
h5d.write_dict_to_hdf5(test_dict, data_object)
data_object.close()
filepath = data_object.filepath
new_dict = {}
opened_hdf5_file = h5py.File(filepath, 'r')
h5d.read_dict_from_hdf5(new_dict, opened_hdf5_file)
self.assertEqual(test_dict.keys(), new_dict.keys())
self.assertEqual(test_dict['list_of_ints'], new_dict['list_of_ints'])
self.assertEqual(test_dict['list_of_floats'], new_dict['list_of_floats'])
self.assertEqual(test_dict['weird_dict'], new_dict['weird_dict'])
self.assertEqual(test_dict['some_bool'], new_dict['some_bool'])
self.assertEqual(test_dict['list_of_dicts'], new_dict['list_of_dicts'])
self.assertEqual(test_dict['list_of_mixed_type'], new_dict['list_of_mixed_type'])
self.assertEqual(test_dict['list_of_mixed_type'][0], new_dict['list_of_mixed_type'][0])
self.assertEqual(test_dict['list_of_mixed_type'][2], new_dict['list_of_mixed_type'][2])
self.assertEqual(test_dict['tuple_of_mixed_type'], new_dict['tuple_of_mixed_type'])
self.assertEqual(type(test_dict['tuple_of_mixed_type']), type(new_dict['tuple_of_mixed_type']))
self.assertEqual(test_dict['tuple_of_mixed_type'][0], new_dict['tuple_of_mixed_type'][0])
self.assertEqual(test_dict['tuple_of_mixed_type'][2], new_dict['tuple_of_mixed_type'][2])
self.assertEqual(test_dict['some_np_bool'], new_dict['some_np_bool'])
self.assertEqual(test_dict['some_int'], new_dict['some_int'])
self.assertEqual(test_dict['some_np_float'], new_dict['some_np_float'])
self.assertEqual(test_dict['a list of strings'], new_dict['a list of strings'])
self.assertEqual(test_dict['a list of strings'][0], new_dict['a list of strings'][0]) | @unittest.skip('FIXME: disabled, see PR #643')
def test_writing_and_reading_dicts_to_hdf5(self):
'\n Tests dumping some random dictionary to hdf5 and reading back the\n stored values. The input dictionary contains:\n - list of ints\n - list of floats\n - nested dict\n - 1D array\n - 2D array\n\n '
test_dict = {'list_of_ints': list(np.arange(5)), 'list_of_floats': list(np.arange(5.1)), 'some_bool': True, 'weird_dict': {'a': 5}, 'dataset1': np.linspace(0, 20, 31), 'dataset2': np.array([[2, 3, 4, 5], [2, 3, 1, 2]]), 'list_of_mixed_type': ['hello', 4, 4.2, {'a': 5}, [4, 3]], 'tuple_of_mixed_type': tuple(['hello', 4, 4.2, {'a': 5}, [4, 3]]), 'a list of strings': ['my ', 'name ', 'is ', 'earl.'], 'some_np_bool': np.bool(True), 'list_of_dicts': [{'a': 5}, {'b': 3}], 'some_int': 3, 'some_float': 3.5, 'some_np_int': np.int(3), 'some_np_float': np.float(3.5)}
data_object = h5d.Data(name='test_object', datadir=self.datadir)
h5d.write_dict_to_hdf5(test_dict, data_object)
data_object.close()
filepath = data_object.filepath
new_dict = {}
opened_hdf5_file = h5py.File(filepath, 'r')
h5d.read_dict_from_hdf5(new_dict, opened_hdf5_file)
self.assertEqual(test_dict.keys(), new_dict.keys())
self.assertEqual(test_dict['list_of_ints'], new_dict['list_of_ints'])
self.assertEqual(test_dict['list_of_floats'], new_dict['list_of_floats'])
self.assertEqual(test_dict['weird_dict'], new_dict['weird_dict'])
self.assertEqual(test_dict['some_bool'], new_dict['some_bool'])
self.assertEqual(test_dict['list_of_dicts'], new_dict['list_of_dicts'])
self.assertEqual(test_dict['list_of_mixed_type'], new_dict['list_of_mixed_type'])
self.assertEqual(test_dict['list_of_mixed_type'][0], new_dict['list_of_mixed_type'][0])
self.assertEqual(test_dict['list_of_mixed_type'][2], new_dict['list_of_mixed_type'][2])
self.assertEqual(test_dict['tuple_of_mixed_type'], new_dict['tuple_of_mixed_type'])
self.assertEqual(type(test_dict['tuple_of_mixed_type']), type(new_dict['tuple_of_mixed_type']))
self.assertEqual(test_dict['tuple_of_mixed_type'][0], new_dict['tuple_of_mixed_type'][0])
self.assertEqual(test_dict['tuple_of_mixed_type'][2], new_dict['tuple_of_mixed_type'][2])
self.assertEqual(test_dict['some_np_bool'], new_dict['some_np_bool'])
self.assertEqual(test_dict['some_int'], new_dict['some_int'])
self.assertEqual(test_dict['some_np_float'], new_dict['some_np_float'])
self.assertEqual(test_dict['a list of strings'], new_dict['a list of strings'])
self.assertEqual(test_dict['a list of strings'][0], new_dict['a list of strings'][0])<|docstring|>Tests dumping some random dictionary to hdf5 and reading back the
stored values. The input dictionary contains:
- list of ints
- list of floats
- nested dict
- 1D array
- 2D array<|endoftext|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.