body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
---|---|---|---|---|---|---|---|---|---|
d6ccdd4d0574b230c22bfbe8eb7e364926ee3b10bcd98926db6a523fcaf527a0 | def show_mesh_in_space_time_ipynb(self, highlight=False, highlight_along_time=0, delta_t=0.004, bound_x=0.505, bound_t=0.02):
'\n Show the mesh represented in time and space.\n @ highlight: a flag to highlight the grids or not\n @ highlight_along_time: which grid group should be\n shown along specific time\n @ delta_t\n @ bound: |delta_t*N/2| <= bound\n\n TODO:\n bound_x\n bound_t\n to zoom in and out\n '
mesh = list(self.mesh)
mesh_cut = self._get_cut_mesh_by_xbound(mesh, bound_x)
mesh_x_on_dt_n = mesh_cut
mesh_for_loop = list(mesh_x_on_dt_n)
mesh_x_on_dt_2_n = []
for i in range((len(mesh_for_loop) - 1)):
mesh_x_on_dt_2_n.append((mesh_x_on_dt_n[i] + (self.xstep / 2.0)))
N = int((bound_t / delta_t))
list_all_func = []
step = (delta_t / 2)
for i in range(N):
if ((i % 2) == 0):
list_all_func.append(([(i * step)] * len(mesh_x_on_dt_n)))
else:
list_all_func.append(([(i * step)] * len(mesh_x_on_dt_2_n)))
number_of_one_side = (len(list_all_func) - 1)
list_all_func_for_loop = list(list_all_func)
for i in range(len(list_all_func_for_loop)):
if (not (i == 0)):
func_negative = []
for ele in list_all_func_for_loop[i]:
func_negative.append((- ele))
list_all_func.append(func_negative)
highlight_color = None
if highlight:
highlight_color = 'green'
for i in range(len(list_all_func)):
color = (highlight_color if (list_all_func[i][0] == highlight_along_time) else None)
if ((number_of_one_side % 2) == 0):
if ((i % 2) == 0):
plt.scatter(mesh_x_on_dt_n, list_all_func[i], color=color)
else:
plt.scatter(mesh_x_on_dt_2_n, list_all_func[i], color=color)
elif (i > number_of_one_side):
if ((i % 2) == 0):
plt.scatter(mesh_x_on_dt_2_n, list_all_func[i], color=color)
else:
plt.scatter(mesh_x_on_dt_n, list_all_func[i], color=color)
elif ((i % 2) == 0):
plt.scatter(mesh_x_on_dt_n, list_all_func[i], color=color)
else:
plt.scatter(mesh_x_on_dt_2_n, list_all_func[i], color=color) | Show the mesh represented in time and space.
@ highlight: a flag to highlight the grids or not
@ highlight_along_time: which grid group should be
shown along specific time
@ delta_t
@ bound: |delta_t*N/2| <= bound
TODO:
bound_x
bound_t
to zoom in and out | shocktube1dcalc/generator_mesh.py | show_mesh_in_space_time_ipynb | yezhengkai/shocktube1dcalc | 2 | python | def show_mesh_in_space_time_ipynb(self, highlight=False, highlight_along_time=0, delta_t=0.004, bound_x=0.505, bound_t=0.02):
'\n Show the mesh represented in time and space.\n @ highlight: a flag to highlight the grids or not\n @ highlight_along_time: which grid group should be\n shown along specific time\n @ delta_t\n @ bound: |delta_t*N/2| <= bound\n\n TODO:\n bound_x\n bound_t\n to zoom in and out\n '
mesh = list(self.mesh)
mesh_cut = self._get_cut_mesh_by_xbound(mesh, bound_x)
mesh_x_on_dt_n = mesh_cut
mesh_for_loop = list(mesh_x_on_dt_n)
mesh_x_on_dt_2_n = []
for i in range((len(mesh_for_loop) - 1)):
mesh_x_on_dt_2_n.append((mesh_x_on_dt_n[i] + (self.xstep / 2.0)))
N = int((bound_t / delta_t))
list_all_func = []
step = (delta_t / 2)
for i in range(N):
if ((i % 2) == 0):
list_all_func.append(([(i * step)] * len(mesh_x_on_dt_n)))
else:
list_all_func.append(([(i * step)] * len(mesh_x_on_dt_2_n)))
number_of_one_side = (len(list_all_func) - 1)
list_all_func_for_loop = list(list_all_func)
for i in range(len(list_all_func_for_loop)):
if (not (i == 0)):
func_negative = []
for ele in list_all_func_for_loop[i]:
func_negative.append((- ele))
list_all_func.append(func_negative)
highlight_color = None
if highlight:
highlight_color = 'green'
for i in range(len(list_all_func)):
color = (highlight_color if (list_all_func[i][0] == highlight_along_time) else None)
if ((number_of_one_side % 2) == 0):
if ((i % 2) == 0):
plt.scatter(mesh_x_on_dt_n, list_all_func[i], color=color)
else:
plt.scatter(mesh_x_on_dt_2_n, list_all_func[i], color=color)
elif (i > number_of_one_side):
if ((i % 2) == 0):
plt.scatter(mesh_x_on_dt_2_n, list_all_func[i], color=color)
else:
plt.scatter(mesh_x_on_dt_n, list_all_func[i], color=color)
elif ((i % 2) == 0):
plt.scatter(mesh_x_on_dt_n, list_all_func[i], color=color)
else:
plt.scatter(mesh_x_on_dt_2_n, list_all_func[i], color=color) | def show_mesh_in_space_time_ipynb(self, highlight=False, highlight_along_time=0, delta_t=0.004, bound_x=0.505, bound_t=0.02):
'\n Show the mesh represented in time and space.\n @ highlight: a flag to highlight the grids or not\n @ highlight_along_time: which grid group should be\n shown along specific time\n @ delta_t\n @ bound: |delta_t*N/2| <= bound\n\n TODO:\n bound_x\n bound_t\n to zoom in and out\n '
mesh = list(self.mesh)
mesh_cut = self._get_cut_mesh_by_xbound(mesh, bound_x)
mesh_x_on_dt_n = mesh_cut
mesh_for_loop = list(mesh_x_on_dt_n)
mesh_x_on_dt_2_n = []
for i in range((len(mesh_for_loop) - 1)):
mesh_x_on_dt_2_n.append((mesh_x_on_dt_n[i] + (self.xstep / 2.0)))
N = int((bound_t / delta_t))
list_all_func = []
step = (delta_t / 2)
for i in range(N):
if ((i % 2) == 0):
list_all_func.append(([(i * step)] * len(mesh_x_on_dt_n)))
else:
list_all_func.append(([(i * step)] * len(mesh_x_on_dt_2_n)))
number_of_one_side = (len(list_all_func) - 1)
list_all_func_for_loop = list(list_all_func)
for i in range(len(list_all_func_for_loop)):
if (not (i == 0)):
func_negative = []
for ele in list_all_func_for_loop[i]:
func_negative.append((- ele))
list_all_func.append(func_negative)
highlight_color = None
if highlight:
highlight_color = 'green'
for i in range(len(list_all_func)):
color = (highlight_color if (list_all_func[i][0] == highlight_along_time) else None)
if ((number_of_one_side % 2) == 0):
if ((i % 2) == 0):
plt.scatter(mesh_x_on_dt_n, list_all_func[i], color=color)
else:
plt.scatter(mesh_x_on_dt_2_n, list_all_func[i], color=color)
elif (i > number_of_one_side):
if ((i % 2) == 0):
plt.scatter(mesh_x_on_dt_2_n, list_all_func[i], color=color)
else:
plt.scatter(mesh_x_on_dt_n, list_all_func[i], color=color)
elif ((i % 2) == 0):
plt.scatter(mesh_x_on_dt_n, list_all_func[i], color=color)
else:
plt.scatter(mesh_x_on_dt_2_n, list_all_func[i], color=color)<|docstring|>Show the mesh represented in time and space.
@ highlight: a flag to highlight the grids or not
@ highlight_along_time: which grid group should be
shown along specific time
@ delta_t
@ bound: |delta_t*N/2| <= bound
TODO:
bound_x
bound_t
to zoom in and out<|endoftext|> |
08d758a57b19edb964935e054089ef7ab21e52c3373062967b8f6a5dbb23b469 | def __init__(self, id=None, organization_id=None, alias=None, deleted=False, created=None, changed_by=None, updated=None):
'\n Alias - a model defined in Swagger\n\n :param dict swaggerTypes: The key is attribute name\n and the value is attribute type.\n :param dict attributeMap: The key is attribute name\n and the value is json key in definition.\n '
self.swagger_types = {'id': 'str', 'organization_id': 'str', 'alias': 'str', 'deleted': 'bool', 'created': 'datetime', 'changed_by': 'str', 'updated': 'datetime'}
self.attribute_map = {'id': 'id', 'organization_id': 'organizationID', 'alias': 'alias', 'deleted': 'deleted', 'created': 'created', 'changed_by': 'changedBy', 'updated': 'updated'}
self._id = id
self._organization_id = organization_id
self._alias = alias
self._deleted = deleted
self._created = created
self._changed_by = changed_by
self._updated = updated | Alias - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition. | billforward/models/alias.py | __init__ | billforward/bf-python | 2 | python | def __init__(self, id=None, organization_id=None, alias=None, deleted=False, created=None, changed_by=None, updated=None):
'\n Alias - a model defined in Swagger\n\n :param dict swaggerTypes: The key is attribute name\n and the value is attribute type.\n :param dict attributeMap: The key is attribute name\n and the value is json key in definition.\n '
self.swagger_types = {'id': 'str', 'organization_id': 'str', 'alias': 'str', 'deleted': 'bool', 'created': 'datetime', 'changed_by': 'str', 'updated': 'datetime'}
self.attribute_map = {'id': 'id', 'organization_id': 'organizationID', 'alias': 'alias', 'deleted': 'deleted', 'created': 'created', 'changed_by': 'changedBy', 'updated': 'updated'}
self._id = id
self._organization_id = organization_id
self._alias = alias
self._deleted = deleted
self._created = created
self._changed_by = changed_by
self._updated = updated | def __init__(self, id=None, organization_id=None, alias=None, deleted=False, created=None, changed_by=None, updated=None):
'\n Alias - a model defined in Swagger\n\n :param dict swaggerTypes: The key is attribute name\n and the value is attribute type.\n :param dict attributeMap: The key is attribute name\n and the value is json key in definition.\n '
self.swagger_types = {'id': 'str', 'organization_id': 'str', 'alias': 'str', 'deleted': 'bool', 'created': 'datetime', 'changed_by': 'str', 'updated': 'datetime'}
self.attribute_map = {'id': 'id', 'organization_id': 'organizationID', 'alias': 'alias', 'deleted': 'deleted', 'created': 'created', 'changed_by': 'changedBy', 'updated': 'updated'}
self._id = id
self._organization_id = organization_id
self._alias = alias
self._deleted = deleted
self._created = created
self._changed_by = changed_by
self._updated = updated<|docstring|>Alias - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.<|endoftext|> |
7d65e2523e9f54fb371b4d8c294e2a3b1c41071d8794245a217b6501b7fabb59 | @property
def id(self):
'\n Gets the id of this Alias.\n { "description" : "ID of the alias.", "verbs":["POST","PUT","GET"] }\n\n :return: The id of this Alias.\n :rtype: str\n '
return self._id | Gets the id of this Alias.
{ "description" : "ID of the alias.", "verbs":["POST","PUT","GET"] }
:return: The id of this Alias.
:rtype: str | billforward/models/alias.py | id | billforward/bf-python | 2 | python | @property
def id(self):
'\n Gets the id of this Alias.\n { "description" : "ID of the alias.", "verbs":["POST","PUT","GET"] }\n\n :return: The id of this Alias.\n :rtype: str\n '
return self._id | @property
def id(self):
'\n Gets the id of this Alias.\n { "description" : "ID of the alias.", "verbs":["POST","PUT","GET"] }\n\n :return: The id of this Alias.\n :rtype: str\n '
return self._id<|docstring|>Gets the id of this Alias.
{ "description" : "ID of the alias.", "verbs":["POST","PUT","GET"] }
:return: The id of this Alias.
:rtype: str<|endoftext|> |
6bde5c3ef7f0cd763777e7959e99402f72ad2964918b66f0df029e03b2f93fe2 | @id.setter
def id(self, id):
'\n Sets the id of this Alias.\n { "description" : "ID of the alias.", "verbs":["POST","PUT","GET"] }\n\n :param id: The id of this Alias.\n :type: str\n '
self._id = id | Sets the id of this Alias.
{ "description" : "ID of the alias.", "verbs":["POST","PUT","GET"] }
:param id: The id of this Alias.
:type: str | billforward/models/alias.py | id | billforward/bf-python | 2 | python | @id.setter
def id(self, id):
'\n Sets the id of this Alias.\n { "description" : "ID of the alias.", "verbs":["POST","PUT","GET"] }\n\n :param id: The id of this Alias.\n :type: str\n '
self._id = id | @id.setter
def id(self, id):
'\n Sets the id of this Alias.\n { "description" : "ID of the alias.", "verbs":["POST","PUT","GET"] }\n\n :param id: The id of this Alias.\n :type: str\n '
self._id = id<|docstring|>Sets the id of this Alias.
{ "description" : "ID of the alias.", "verbs":["POST","PUT","GET"] }
:param id: The id of this Alias.
:type: str<|endoftext|> |
bf8d8f086c01efba8156397b59b9008f4b256131e4bca6b7fbb4001c0e6f6b67 | @property
def organization_id(self):
'\n Gets the organization_id of this Alias.\n { "description" : "ID of the organization associated with the alias.", "verbs":["POST","PUT","GET"] }\n\n :return: The organization_id of this Alias.\n :rtype: str\n '
return self._organization_id | Gets the organization_id of this Alias.
{ "description" : "ID of the organization associated with the alias.", "verbs":["POST","PUT","GET"] }
:return: The organization_id of this Alias.
:rtype: str | billforward/models/alias.py | organization_id | billforward/bf-python | 2 | python | @property
def organization_id(self):
'\n Gets the organization_id of this Alias.\n { "description" : "ID of the organization associated with the alias.", "verbs":["POST","PUT","GET"] }\n\n :return: The organization_id of this Alias.\n :rtype: str\n '
return self._organization_id | @property
def organization_id(self):
'\n Gets the organization_id of this Alias.\n { "description" : "ID of the organization associated with the alias.", "verbs":["POST","PUT","GET"] }\n\n :return: The organization_id of this Alias.\n :rtype: str\n '
return self._organization_id<|docstring|>Gets the organization_id of this Alias.
{ "description" : "ID of the organization associated with the alias.", "verbs":["POST","PUT","GET"] }
:return: The organization_id of this Alias.
:rtype: str<|endoftext|> |
d672c62c6e61d6125998e0af7d4e726d877a8cff2c4f8568774349c6a637fc92 | @organization_id.setter
def organization_id(self, organization_id):
'\n Sets the organization_id of this Alias.\n { "description" : "ID of the organization associated with the alias.", "verbs":["POST","PUT","GET"] }\n\n :param organization_id: The organization_id of this Alias.\n :type: str\n '
self._organization_id = organization_id | Sets the organization_id of this Alias.
{ "description" : "ID of the organization associated with the alias.", "verbs":["POST","PUT","GET"] }
:param organization_id: The organization_id of this Alias.
:type: str | billforward/models/alias.py | organization_id | billforward/bf-python | 2 | python | @organization_id.setter
def organization_id(self, organization_id):
'\n Sets the organization_id of this Alias.\n { "description" : "ID of the organization associated with the alias.", "verbs":["POST","PUT","GET"] }\n\n :param organization_id: The organization_id of this Alias.\n :type: str\n '
self._organization_id = organization_id | @organization_id.setter
def organization_id(self, organization_id):
'\n Sets the organization_id of this Alias.\n { "description" : "ID of the organization associated with the alias.", "verbs":["POST","PUT","GET"] }\n\n :param organization_id: The organization_id of this Alias.\n :type: str\n '
self._organization_id = organization_id<|docstring|>Sets the organization_id of this Alias.
{ "description" : "ID of the organization associated with the alias.", "verbs":["POST","PUT","GET"] }
:param organization_id: The organization_id of this Alias.
:type: str<|endoftext|> |
1ad576a391d721ece3716dbfedb3c339859074e615e3abf31898c33e44e86f01 | @property
def alias(self):
'\n Gets the alias of this Alias.\n { "description" : "A string representation of the alias.", "verbs":["POST","PUT","GET"] }\n\n :return: The alias of this Alias.\n :rtype: str\n '
return self._alias | Gets the alias of this Alias.
{ "description" : "A string representation of the alias.", "verbs":["POST","PUT","GET"] }
:return: The alias of this Alias.
:rtype: str | billforward/models/alias.py | alias | billforward/bf-python | 2 | python | @property
def alias(self):
'\n Gets the alias of this Alias.\n { "description" : "A string representation of the alias.", "verbs":["POST","PUT","GET"] }\n\n :return: The alias of this Alias.\n :rtype: str\n '
return self._alias | @property
def alias(self):
'\n Gets the alias of this Alias.\n { "description" : "A string representation of the alias.", "verbs":["POST","PUT","GET"] }\n\n :return: The alias of this Alias.\n :rtype: str\n '
return self._alias<|docstring|>Gets the alias of this Alias.
{ "description" : "A string representation of the alias.", "verbs":["POST","PUT","GET"] }
:return: The alias of this Alias.
:rtype: str<|endoftext|> |
a1c3db393253f212b157bd9aa03379459d15107319b94fc3d1c8598e16fa5947 | @alias.setter
def alias(self, alias):
'\n Sets the alias of this Alias.\n { "description" : "A string representation of the alias.", "verbs":["POST","PUT","GET"] }\n\n :param alias: The alias of this Alias.\n :type: str\n '
self._alias = alias | Sets the alias of this Alias.
{ "description" : "A string representation of the alias.", "verbs":["POST","PUT","GET"] }
:param alias: The alias of this Alias.
:type: str | billforward/models/alias.py | alias | billforward/bf-python | 2 | python | @alias.setter
def alias(self, alias):
'\n Sets the alias of this Alias.\n { "description" : "A string representation of the alias.", "verbs":["POST","PUT","GET"] }\n\n :param alias: The alias of this Alias.\n :type: str\n '
self._alias = alias | @alias.setter
def alias(self, alias):
'\n Sets the alias of this Alias.\n { "description" : "A string representation of the alias.", "verbs":["POST","PUT","GET"] }\n\n :param alias: The alias of this Alias.\n :type: str\n '
self._alias = alias<|docstring|>Sets the alias of this Alias.
{ "description" : "A string representation of the alias.", "verbs":["POST","PUT","GET"] }
:param alias: The alias of this Alias.
:type: str<|endoftext|> |
b20da54c131d4d5f7b538968af0d60fc9017683ab9e2a98a777ca83bd43dbfb6 | @property
def deleted(self):
'\n Gets the deleted of this Alias.\n { "description" : "Has this alias been deleted?", "verbs":["POST","PUT","GET"] }\n\n :return: The deleted of this Alias.\n :rtype: bool\n '
return self._deleted | Gets the deleted of this Alias.
{ "description" : "Has this alias been deleted?", "verbs":["POST","PUT","GET"] }
:return: The deleted of this Alias.
:rtype: bool | billforward/models/alias.py | deleted | billforward/bf-python | 2 | python | @property
def deleted(self):
'\n Gets the deleted of this Alias.\n { "description" : "Has this alias been deleted?", "verbs":["POST","PUT","GET"] }\n\n :return: The deleted of this Alias.\n :rtype: bool\n '
return self._deleted | @property
def deleted(self):
'\n Gets the deleted of this Alias.\n { "description" : "Has this alias been deleted?", "verbs":["POST","PUT","GET"] }\n\n :return: The deleted of this Alias.\n :rtype: bool\n '
return self._deleted<|docstring|>Gets the deleted of this Alias.
{ "description" : "Has this alias been deleted?", "verbs":["POST","PUT","GET"] }
:return: The deleted of this Alias.
:rtype: bool<|endoftext|> |
18de23a4a00f39d08bbab35199b2588f452ecbde2ade64a1f4f978dcc2b65d72 | @deleted.setter
def deleted(self, deleted):
'\n Sets the deleted of this Alias.\n { "description" : "Has this alias been deleted?", "verbs":["POST","PUT","GET"] }\n\n :param deleted: The deleted of this Alias.\n :type: bool\n '
self._deleted = deleted | Sets the deleted of this Alias.
{ "description" : "Has this alias been deleted?", "verbs":["POST","PUT","GET"] }
:param deleted: The deleted of this Alias.
:type: bool | billforward/models/alias.py | deleted | billforward/bf-python | 2 | python | @deleted.setter
def deleted(self, deleted):
'\n Sets the deleted of this Alias.\n { "description" : "Has this alias been deleted?", "verbs":["POST","PUT","GET"] }\n\n :param deleted: The deleted of this Alias.\n :type: bool\n '
self._deleted = deleted | @deleted.setter
def deleted(self, deleted):
'\n Sets the deleted of this Alias.\n { "description" : "Has this alias been deleted?", "verbs":["POST","PUT","GET"] }\n\n :param deleted: The deleted of this Alias.\n :type: bool\n '
self._deleted = deleted<|docstring|>Sets the deleted of this Alias.
{ "description" : "Has this alias been deleted?", "verbs":["POST","PUT","GET"] }
:param deleted: The deleted of this Alias.
:type: bool<|endoftext|> |
a25a8508bf614f6a3834a2c394017b974db12a447affbe944d1505f708866502 | @property
def created(self):
'\n Gets the created of this Alias.\n { "description" : "The UTC DateTime when the object was created.", "verbs":[] }\n\n :return: The created of this Alias.\n :rtype: datetime\n '
return self._created | Gets the created of this Alias.
{ "description" : "The UTC DateTime when the object was created.", "verbs":[] }
:return: The created of this Alias.
:rtype: datetime | billforward/models/alias.py | created | billforward/bf-python | 2 | python | @property
def created(self):
'\n Gets the created of this Alias.\n { "description" : "The UTC DateTime when the object was created.", "verbs":[] }\n\n :return: The created of this Alias.\n :rtype: datetime\n '
return self._created | @property
def created(self):
'\n Gets the created of this Alias.\n { "description" : "The UTC DateTime when the object was created.", "verbs":[] }\n\n :return: The created of this Alias.\n :rtype: datetime\n '
return self._created<|docstring|>Gets the created of this Alias.
{ "description" : "The UTC DateTime when the object was created.", "verbs":[] }
:return: The created of this Alias.
:rtype: datetime<|endoftext|> |
c416f667f1add7b6412eafb9476d026ef0d1f2b5ea0c69a198c8237b36e62194 | @created.setter
def created(self, created):
'\n Sets the created of this Alias.\n { "description" : "The UTC DateTime when the object was created.", "verbs":[] }\n\n :param created: The created of this Alias.\n :type: datetime\n '
self._created = created | Sets the created of this Alias.
{ "description" : "The UTC DateTime when the object was created.", "verbs":[] }
:param created: The created of this Alias.
:type: datetime | billforward/models/alias.py | created | billforward/bf-python | 2 | python | @created.setter
def created(self, created):
'\n Sets the created of this Alias.\n { "description" : "The UTC DateTime when the object was created.", "verbs":[] }\n\n :param created: The created of this Alias.\n :type: datetime\n '
self._created = created | @created.setter
def created(self, created):
'\n Sets the created of this Alias.\n { "description" : "The UTC DateTime when the object was created.", "verbs":[] }\n\n :param created: The created of this Alias.\n :type: datetime\n '
self._created = created<|docstring|>Sets the created of this Alias.
{ "description" : "The UTC DateTime when the object was created.", "verbs":[] }
:param created: The created of this Alias.
:type: datetime<|endoftext|> |
e76cc0930bc75ea35147e2ff839067b3aa4d725b2911af69b1f67e1e6ad3fb63 | @property
def changed_by(self):
'\n Gets the changed_by of this Alias.\n { "description" : "ID of the user who last updated the entity.", "verbs":[] }\n\n :return: The changed_by of this Alias.\n :rtype: str\n '
return self._changed_by | Gets the changed_by of this Alias.
{ "description" : "ID of the user who last updated the entity.", "verbs":[] }
:return: The changed_by of this Alias.
:rtype: str | billforward/models/alias.py | changed_by | billforward/bf-python | 2 | python | @property
def changed_by(self):
'\n Gets the changed_by of this Alias.\n { "description" : "ID of the user who last updated the entity.", "verbs":[] }\n\n :return: The changed_by of this Alias.\n :rtype: str\n '
return self._changed_by | @property
def changed_by(self):
'\n Gets the changed_by of this Alias.\n { "description" : "ID of the user who last updated the entity.", "verbs":[] }\n\n :return: The changed_by of this Alias.\n :rtype: str\n '
return self._changed_by<|docstring|>Gets the changed_by of this Alias.
{ "description" : "ID of the user who last updated the entity.", "verbs":[] }
:return: The changed_by of this Alias.
:rtype: str<|endoftext|> |
c81fc187f73c41b7ce9b6923c440fe3792ab54ad58ff89b3efddc48292cb9732 | @changed_by.setter
def changed_by(self, changed_by):
'\n Sets the changed_by of this Alias.\n { "description" : "ID of the user who last updated the entity.", "verbs":[] }\n\n :param changed_by: The changed_by of this Alias.\n :type: str\n '
self._changed_by = changed_by | Sets the changed_by of this Alias.
{ "description" : "ID of the user who last updated the entity.", "verbs":[] }
:param changed_by: The changed_by of this Alias.
:type: str | billforward/models/alias.py | changed_by | billforward/bf-python | 2 | python | @changed_by.setter
def changed_by(self, changed_by):
'\n Sets the changed_by of this Alias.\n { "description" : "ID of the user who last updated the entity.", "verbs":[] }\n\n :param changed_by: The changed_by of this Alias.\n :type: str\n '
self._changed_by = changed_by | @changed_by.setter
def changed_by(self, changed_by):
'\n Sets the changed_by of this Alias.\n { "description" : "ID of the user who last updated the entity.", "verbs":[] }\n\n :param changed_by: The changed_by of this Alias.\n :type: str\n '
self._changed_by = changed_by<|docstring|>Sets the changed_by of this Alias.
{ "description" : "ID of the user who last updated the entity.", "verbs":[] }
:param changed_by: The changed_by of this Alias.
:type: str<|endoftext|> |
eefb19eb45153835b89aaf79313f5843447c6e9e34907ae8b02580e2d36a8e47 | @property
def updated(self):
'\n Gets the updated of this Alias.\n { "description" : "The UTC DateTime when the object was last updated.", "verbs":[] }\n\n :return: The updated of this Alias.\n :rtype: datetime\n '
return self._updated | Gets the updated of this Alias.
{ "description" : "The UTC DateTime when the object was last updated.", "verbs":[] }
:return: The updated of this Alias.
:rtype: datetime | billforward/models/alias.py | updated | billforward/bf-python | 2 | python | @property
def updated(self):
'\n Gets the updated of this Alias.\n { "description" : "The UTC DateTime when the object was last updated.", "verbs":[] }\n\n :return: The updated of this Alias.\n :rtype: datetime\n '
return self._updated | @property
def updated(self):
'\n Gets the updated of this Alias.\n { "description" : "The UTC DateTime when the object was last updated.", "verbs":[] }\n\n :return: The updated of this Alias.\n :rtype: datetime\n '
return self._updated<|docstring|>Gets the updated of this Alias.
{ "description" : "The UTC DateTime when the object was last updated.", "verbs":[] }
:return: The updated of this Alias.
:rtype: datetime<|endoftext|> |
6c7859fc4d692cc9cf7cf7bea698051926304690eebbfb283e0fd7fc61b027b5 | @updated.setter
def updated(self, updated):
'\n Sets the updated of this Alias.\n { "description" : "The UTC DateTime when the object was last updated.", "verbs":[] }\n\n :param updated: The updated of this Alias.\n :type: datetime\n '
self._updated = updated | Sets the updated of this Alias.
{ "description" : "The UTC DateTime when the object was last updated.", "verbs":[] }
:param updated: The updated of this Alias.
:type: datetime | billforward/models/alias.py | updated | billforward/bf-python | 2 | python | @updated.setter
def updated(self, updated):
'\n Sets the updated of this Alias.\n { "description" : "The UTC DateTime when the object was last updated.", "verbs":[] }\n\n :param updated: The updated of this Alias.\n :type: datetime\n '
self._updated = updated | @updated.setter
def updated(self, updated):
'\n Sets the updated of this Alias.\n { "description" : "The UTC DateTime when the object was last updated.", "verbs":[] }\n\n :param updated: The updated of this Alias.\n :type: datetime\n '
self._updated = updated<|docstring|>Sets the updated of this Alias.
{ "description" : "The UTC DateTime when the object was last updated.", "verbs":[] }
:param updated: The updated of this Alias.
:type: datetime<|endoftext|> |
f92515cd38effc7eee4069f2288d78a0f0836df932fb36a84e3b4f7e14233415 | def to_dict(self):
'\n Returns the model properties as a dict\n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result | Returns the model properties as a dict | billforward/models/alias.py | to_dict | billforward/bf-python | 2 | python | def to_dict(self):
'\n \n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result | def to_dict(self):
'\n \n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result<|docstring|>Returns the model properties as a dict<|endoftext|> |
c373d87dd29c1e96dce460ab571bff86e58edb298ba83c85d8cc7603a6505de4 | def to_str(self):
'\n Returns the string representation of the model\n '
return pformat(self.to_dict()) | Returns the string representation of the model | billforward/models/alias.py | to_str | billforward/bf-python | 2 | python | def to_str(self):
'\n \n '
return pformat(self.to_dict()) | def to_str(self):
'\n \n '
return pformat(self.to_dict())<|docstring|>Returns the string representation of the model<|endoftext|> |
1034ff7dd2eef24d21e3c2fa7409b793ab5cbb8cd75a2eb0ab3e62604b26264d | def __repr__(self):
'\n For `print` and `pprint`\n '
return self.to_str() | For `print` and `pprint` | billforward/models/alias.py | __repr__ | billforward/bf-python | 2 | python | def __repr__(self):
'\n \n '
return self.to_str() | def __repr__(self):
'\n \n '
return self.to_str()<|docstring|>For `print` and `pprint`<|endoftext|> |
a43b3ce7478646f0122f200e4de04f4f5ed99329a4b75930eecef4ff54a23351 | def __eq__(self, other):
'\n Returns true if both objects are equal\n '
return (self.__dict__ == other.__dict__) | Returns true if both objects are equal | billforward/models/alias.py | __eq__ | billforward/bf-python | 2 | python | def __eq__(self, other):
'\n \n '
return (self.__dict__ == other.__dict__) | def __eq__(self, other):
'\n \n '
return (self.__dict__ == other.__dict__)<|docstring|>Returns true if both objects are equal<|endoftext|> |
e5050f8e1402e3a4c90d6c6e229c4c9e2b8ec61e0be457915ea9d976f7e6b0b4 | def __ne__(self, other):
'\n Returns true if both objects are not equal\n '
return (not (self == other)) | Returns true if both objects are not equal | billforward/models/alias.py | __ne__ | billforward/bf-python | 2 | python | def __ne__(self, other):
'\n \n '
return (not (self == other)) | def __ne__(self, other):
'\n \n '
return (not (self == other))<|docstring|>Returns true if both objects are not equal<|endoftext|> |
73188cc7be5472865b326979e51416e4b27b283f626dcbdbe3fcfbc788ab3bc9 | def main(nqubits, dt, solver, backend, dense=False, accelerators=None, filename=None):
'Performs adiabatic evolution with critical TFIM as the "hard" Hamiltonian.'
qibo.set_backend(backend)
if (accelerators is not None):
dense = False
solver = 'exp'
logs = BenchmarkLogger(filename)
logs.append({'nqubits': nqubits, 'dt': dt, 'solver': solver, 'dense': dense, 'backend': qibo.get_backend(), 'precision': qibo.get_precision(), 'device': qibo.get_device(), 'threads': qibo.get_threads(), 'accelerators': accelerators})
print(f'Using {solver} solver and dt = {dt}.')
print(f'Accelerators: {accelerators}')
print('Backend:', logs[(- 1)]['backend'])
start_time = time.time()
h0 = hamiltonians.X(nqubits, dense=dense)
h1 = hamiltonians.TFIM(nqubits, h=1.0, dense=dense)
logs[(- 1)]['hamiltonian_creation_time'] = (time.time() - start_time)
print(f'''
nqubits = {nqubits}, solver = {solver}''')
print(f'dense = {dense}, accelerators = {accelerators}')
print('Hamiltonians created in:', logs[(- 1)]['hamiltonian_creation_time'])
start_time = time.time()
evolution = models.AdiabaticEvolution(h0, h1, (lambda t: t), dt=dt, solver=solver, accelerators=accelerators)
logs[(- 1)]['creation_time'] = (time.time() - start_time)
print('Evolution model created in:', logs[(- 1)]['creation_time'])
start_time = time.time()
final_psi = evolution(final_time=1.0)
logs[(- 1)]['simulation_time'] = (time.time() - start_time)
print('Simulation time:', logs[(- 1)]['simulation_time'])
logs.dump() | Performs adiabatic evolution with critical TFIM as the "hard" Hamiltonian. | examples/benchmarks/evolution.py | main | daxkoh/qibo | 81 | python | def main(nqubits, dt, solver, backend, dense=False, accelerators=None, filename=None):
qibo.set_backend(backend)
if (accelerators is not None):
dense = False
solver = 'exp'
logs = BenchmarkLogger(filename)
logs.append({'nqubits': nqubits, 'dt': dt, 'solver': solver, 'dense': dense, 'backend': qibo.get_backend(), 'precision': qibo.get_precision(), 'device': qibo.get_device(), 'threads': qibo.get_threads(), 'accelerators': accelerators})
print(f'Using {solver} solver and dt = {dt}.')
print(f'Accelerators: {accelerators}')
print('Backend:', logs[(- 1)]['backend'])
start_time = time.time()
h0 = hamiltonians.X(nqubits, dense=dense)
h1 = hamiltonians.TFIM(nqubits, h=1.0, dense=dense)
logs[(- 1)]['hamiltonian_creation_time'] = (time.time() - start_time)
print(f'
nqubits = {nqubits}, solver = {solver}')
print(f'dense = {dense}, accelerators = {accelerators}')
print('Hamiltonians created in:', logs[(- 1)]['hamiltonian_creation_time'])
start_time = time.time()
evolution = models.AdiabaticEvolution(h0, h1, (lambda t: t), dt=dt, solver=solver, accelerators=accelerators)
logs[(- 1)]['creation_time'] = (time.time() - start_time)
print('Evolution model created in:', logs[(- 1)]['creation_time'])
start_time = time.time()
final_psi = evolution(final_time=1.0)
logs[(- 1)]['simulation_time'] = (time.time() - start_time)
print('Simulation time:', logs[(- 1)]['simulation_time'])
logs.dump() | def main(nqubits, dt, solver, backend, dense=False, accelerators=None, filename=None):
qibo.set_backend(backend)
if (accelerators is not None):
dense = False
solver = 'exp'
logs = BenchmarkLogger(filename)
logs.append({'nqubits': nqubits, 'dt': dt, 'solver': solver, 'dense': dense, 'backend': qibo.get_backend(), 'precision': qibo.get_precision(), 'device': qibo.get_device(), 'threads': qibo.get_threads(), 'accelerators': accelerators})
print(f'Using {solver} solver and dt = {dt}.')
print(f'Accelerators: {accelerators}')
print('Backend:', logs[(- 1)]['backend'])
start_time = time.time()
h0 = hamiltonians.X(nqubits, dense=dense)
h1 = hamiltonians.TFIM(nqubits, h=1.0, dense=dense)
logs[(- 1)]['hamiltonian_creation_time'] = (time.time() - start_time)
print(f'
nqubits = {nqubits}, solver = {solver}')
print(f'dense = {dense}, accelerators = {accelerators}')
print('Hamiltonians created in:', logs[(- 1)]['hamiltonian_creation_time'])
start_time = time.time()
evolution = models.AdiabaticEvolution(h0, h1, (lambda t: t), dt=dt, solver=solver, accelerators=accelerators)
logs[(- 1)]['creation_time'] = (time.time() - start_time)
print('Evolution model created in:', logs[(- 1)]['creation_time'])
start_time = time.time()
final_psi = evolution(final_time=1.0)
logs[(- 1)]['simulation_time'] = (time.time() - start_time)
print('Simulation time:', logs[(- 1)]['simulation_time'])
logs.dump()<|docstring|>Performs adiabatic evolution with critical TFIM as the "hard" Hamiltonian.<|endoftext|> |
6b85435d733078470c17995b7c76ea2a1d0b80fbe1ceeaadbffb6c872a54d7fd | def compute(dm, do):
' Computes bias'
return (dm - do) | Computes bias | src/python/packages/wgne/bias.py | compute | susburrows/uvcmetrics | 0 | python | def compute(dm, do):
' '
return (dm - do) | def compute(dm, do):
' '
return (dm - do)<|docstring|>Computes bias<|endoftext|> |
6002620e71a389c81ca712fe2c6ab3c608871f8650b7acc70e6242ce0f1689a5 | def models(channels):
'\n Creates and initializes the models\n :return: Encoder, Generator, Discriminator\n '
device = torch.device(('cuda' if torch.cuda.is_available() else 'cpu'))
encoder = Encoder(channels).to(device)
encoder.apply(init_weights)
generator = Generator(channels).to(device)
generator.apply(init_weights)
discriminator = Discriminator(channels).to(device)
discriminator.apply(init_weights)
return (encoder, generator, discriminator) | Creates and initializes the models
:return: Encoder, Generator, Discriminator | src/VAE_GAN.py | models | vineeths96/Variational-Generative-Image-Compression | 1 | python | def models(channels):
'\n Creates and initializes the models\n :return: Encoder, Generator, Discriminator\n '
device = torch.device(('cuda' if torch.cuda.is_available() else 'cpu'))
encoder = Encoder(channels).to(device)
encoder.apply(init_weights)
generator = Generator(channels).to(device)
generator.apply(init_weights)
discriminator = Discriminator(channels).to(device)
discriminator.apply(init_weights)
return (encoder, generator, discriminator) | def models(channels):
'\n Creates and initializes the models\n :return: Encoder, Generator, Discriminator\n '
device = torch.device(('cuda' if torch.cuda.is_available() else 'cpu'))
encoder = Encoder(channels).to(device)
encoder.apply(init_weights)
generator = Generator(channels).to(device)
generator.apply(init_weights)
discriminator = Discriminator(channels).to(device)
discriminator.apply(init_weights)
return (encoder, generator, discriminator)<|docstring|>Creates and initializes the models
:return: Encoder, Generator, Discriminator<|endoftext|> |
dba43a2d8119fdc20a657efbdb8508b7b35fe4ae74b3dc5cbc174592f51307e5 | def train(channels):
'\n Trains the VAE-GAN model\n :return: Encoder, Generator\n '
device = torch.device(('cuda' if torch.cuda.is_available() else 'cpu'))
(encoder, generator, discriminator) = models(channels)
train_loader = train_dataloader()
test_loader = test_dataloader()
test_batch = next(iter(test_loader)).to(device)
bce_criterion = nn.BCELoss()
l1_criterion = nn.L1Loss()
encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=LEARNING_RATE, betas=(BETA1, BETA2))
generator_optimizer = torch.optim.Adam(generator.parameters(), lr=LEARNING_RATE, betas=(BETA1, BETA2))
discriminator_optimizer = torch.optim.Adam(discriminator.parameters(), lr=LEARNING_RATE, betas=(BETA1, BETA2))
losses = {model: [] for model in ['encoder', 'generator', 'discriminator']}
print(f"Starting Training at {datetime.now().strftime('%Y_%m_%d_%H_%M_%S')}")
for epoch in range(NUM_EPOCHS):
for (i, images) in enumerate(train_loader):
encoder.train()
generator.train()
discriminator.train()
images = images.to(device)
fake_images = generator(encoder(images))
z_real = {'image': images, 'encoded_image': encoder(images)}
z_fake = {'image': fake_images, 'encoded_image': encoder(images)}
discriminator.zero_grad()
label = torch.empty(images.size(0), device=device).uniform_((1 - SMOOTH), 1)
output = discriminator(z_real).view((- 1))
discriminator_loss_real = bce_criterion(output, label)
label = torch.empty(images.size(0), device=device).uniform_(0, SMOOTH)
output = discriminator(z_fake).view((- 1))
discriminator_loss_fake = bce_criterion(output, label)
discriminator_loss = (discriminator_loss_real + discriminator_loss_fake)
discriminator_loss.backward(retain_graph=True)
discriminator_optimizer.step()
generator.zero_grad()
label = torch.empty(images.size(0), device=device).uniform_((1 - SMOOTH), 1)
output = discriminator(z_fake).view((- 1))
generator_loss = (bce_criterion(output, label) + (2 * l1_criterion(images, fake_images)))
generator_loss.backward(retain_graph=True)
encoder.zero_grad()
label = torch.empty(images.size(0), device=device).uniform_((1 - SMOOTH), 1)
output = discriminator(z_fake).view((- 1))
encoder_loss = (bce_criterion(output, label) + (2 * l1_criterion(images, fake_images)))
encoder_loss.backward(retain_graph=True)
generator_optimizer.step()
encoder_optimizer.step()
if ((i % LOG_FREQUENCY) == 0):
print(f'Epoch: {epoch}, Iteration: {i}, Discriminator Loss: {discriminator_loss.item()}, Generator Loss: {generator_loss.item()}, Encoder Loss: {encoder_loss.item()}')
losses['encoder'].append(encoder_loss.item())
losses['generator'].append(generator_loss.item())
losses['discriminator'].append(discriminator_loss.item())
if ((i % VAL_FREQUENCY) == 0):
encoder.eval()
generator.eval()
encoded_image = encoder(test_batch)
reconstructed_image = generator(encoded_image)
plot_image(test_batch, reconstructed_image, NUM_IMAGES)
print(f"Completed Training at {datetime.now().strftime('%Y_%m_%d_%H_%M_%S')}")
return (encoder, generator) | Trains the VAE-GAN model
:return: Encoder, Generator | src/VAE_GAN.py | train | vineeths96/Variational-Generative-Image-Compression | 1 | python | def train(channels):
'\n Trains the VAE-GAN model\n :return: Encoder, Generator\n '
device = torch.device(('cuda' if torch.cuda.is_available() else 'cpu'))
(encoder, generator, discriminator) = models(channels)
train_loader = train_dataloader()
test_loader = test_dataloader()
test_batch = next(iter(test_loader)).to(device)
bce_criterion = nn.BCELoss()
l1_criterion = nn.L1Loss()
encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=LEARNING_RATE, betas=(BETA1, BETA2))
generator_optimizer = torch.optim.Adam(generator.parameters(), lr=LEARNING_RATE, betas=(BETA1, BETA2))
discriminator_optimizer = torch.optim.Adam(discriminator.parameters(), lr=LEARNING_RATE, betas=(BETA1, BETA2))
losses = {model: [] for model in ['encoder', 'generator', 'discriminator']}
print(f"Starting Training at {datetime.now().strftime('%Y_%m_%d_%H_%M_%S')}")
for epoch in range(NUM_EPOCHS):
for (i, images) in enumerate(train_loader):
encoder.train()
generator.train()
discriminator.train()
images = images.to(device)
fake_images = generator(encoder(images))
z_real = {'image': images, 'encoded_image': encoder(images)}
z_fake = {'image': fake_images, 'encoded_image': encoder(images)}
discriminator.zero_grad()
label = torch.empty(images.size(0), device=device).uniform_((1 - SMOOTH), 1)
output = discriminator(z_real).view((- 1))
discriminator_loss_real = bce_criterion(output, label)
label = torch.empty(images.size(0), device=device).uniform_(0, SMOOTH)
output = discriminator(z_fake).view((- 1))
discriminator_loss_fake = bce_criterion(output, label)
discriminator_loss = (discriminator_loss_real + discriminator_loss_fake)
discriminator_loss.backward(retain_graph=True)
discriminator_optimizer.step()
generator.zero_grad()
label = torch.empty(images.size(0), device=device).uniform_((1 - SMOOTH), 1)
output = discriminator(z_fake).view((- 1))
generator_loss = (bce_criterion(output, label) + (2 * l1_criterion(images, fake_images)))
generator_loss.backward(retain_graph=True)
encoder.zero_grad()
label = torch.empty(images.size(0), device=device).uniform_((1 - SMOOTH), 1)
output = discriminator(z_fake).view((- 1))
encoder_loss = (bce_criterion(output, label) + (2 * l1_criterion(images, fake_images)))
encoder_loss.backward(retain_graph=True)
generator_optimizer.step()
encoder_optimizer.step()
if ((i % LOG_FREQUENCY) == 0):
print(f'Epoch: {epoch}, Iteration: {i}, Discriminator Loss: {discriminator_loss.item()}, Generator Loss: {generator_loss.item()}, Encoder Loss: {encoder_loss.item()}')
losses['encoder'].append(encoder_loss.item())
losses['generator'].append(generator_loss.item())
losses['discriminator'].append(discriminator_loss.item())
if ((i % VAL_FREQUENCY) == 0):
encoder.eval()
generator.eval()
encoded_image = encoder(test_batch)
reconstructed_image = generator(encoded_image)
plot_image(test_batch, reconstructed_image, NUM_IMAGES)
print(f"Completed Training at {datetime.now().strftime('%Y_%m_%d_%H_%M_%S')}")
return (encoder, generator) | def train(channels):
'\n Trains the VAE-GAN model\n :return: Encoder, Generator\n '
device = torch.device(('cuda' if torch.cuda.is_available() else 'cpu'))
(encoder, generator, discriminator) = models(channels)
train_loader = train_dataloader()
test_loader = test_dataloader()
test_batch = next(iter(test_loader)).to(device)
bce_criterion = nn.BCELoss()
l1_criterion = nn.L1Loss()
encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=LEARNING_RATE, betas=(BETA1, BETA2))
generator_optimizer = torch.optim.Adam(generator.parameters(), lr=LEARNING_RATE, betas=(BETA1, BETA2))
discriminator_optimizer = torch.optim.Adam(discriminator.parameters(), lr=LEARNING_RATE, betas=(BETA1, BETA2))
losses = {model: [] for model in ['encoder', 'generator', 'discriminator']}
print(f"Starting Training at {datetime.now().strftime('%Y_%m_%d_%H_%M_%S')}")
for epoch in range(NUM_EPOCHS):
for (i, images) in enumerate(train_loader):
encoder.train()
generator.train()
discriminator.train()
images = images.to(device)
fake_images = generator(encoder(images))
z_real = {'image': images, 'encoded_image': encoder(images)}
z_fake = {'image': fake_images, 'encoded_image': encoder(images)}
discriminator.zero_grad()
label = torch.empty(images.size(0), device=device).uniform_((1 - SMOOTH), 1)
output = discriminator(z_real).view((- 1))
discriminator_loss_real = bce_criterion(output, label)
label = torch.empty(images.size(0), device=device).uniform_(0, SMOOTH)
output = discriminator(z_fake).view((- 1))
discriminator_loss_fake = bce_criterion(output, label)
discriminator_loss = (discriminator_loss_real + discriminator_loss_fake)
discriminator_loss.backward(retain_graph=True)
discriminator_optimizer.step()
generator.zero_grad()
label = torch.empty(images.size(0), device=device).uniform_((1 - SMOOTH), 1)
output = discriminator(z_fake).view((- 1))
generator_loss = (bce_criterion(output, label) + (2 * l1_criterion(images, fake_images)))
generator_loss.backward(retain_graph=True)
encoder.zero_grad()
label = torch.empty(images.size(0), device=device).uniform_((1 - SMOOTH), 1)
output = discriminator(z_fake).view((- 1))
encoder_loss = (bce_criterion(output, label) + (2 * l1_criterion(images, fake_images)))
encoder_loss.backward(retain_graph=True)
generator_optimizer.step()
encoder_optimizer.step()
if ((i % LOG_FREQUENCY) == 0):
print(f'Epoch: {epoch}, Iteration: {i}, Discriminator Loss: {discriminator_loss.item()}, Generator Loss: {generator_loss.item()}, Encoder Loss: {encoder_loss.item()}')
losses['encoder'].append(encoder_loss.item())
losses['generator'].append(generator_loss.item())
losses['discriminator'].append(discriminator_loss.item())
if ((i % VAL_FREQUENCY) == 0):
encoder.eval()
generator.eval()
encoded_image = encoder(test_batch)
reconstructed_image = generator(encoded_image)
plot_image(test_batch, reconstructed_image, NUM_IMAGES)
print(f"Completed Training at {datetime.now().strftime('%Y_%m_%d_%H_%M_%S')}")
return (encoder, generator)<|docstring|>Trains the VAE-GAN model
:return: Encoder, Generator<|endoftext|> |
b71c073438fbbf42ec68c3bf386256248b33185bf2775d5e6d4f8a19b752ecda | def test_nominal(self):
'\n Test nominal execution\n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=24)})
ds = adjust_temporal_attrs(ds)
m = ConsoleMonitor()
actual = long_term_average(ds, monitor=m)
self.assertEqual(m._percentage, 100)
self.assertEqual(actual['first'].attrs['cell_methods'], 'time: mean over years')
self.assertEqual(actual.dims, {'time': 12, 'nv': 2, 'lat': 45, 'lon': 90})
self.assertEqual(actual.time.attrs['climatology'], 'climatology_bounds')
actual = long_term_average(ds, var='first')
with self.assertRaises(KeyError):
actual['second'] | Test nominal execution | test/ops/test_aggregate.py | test_nominal | TomBlock/cate | 0 | python | def test_nominal(self):
'\n \n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=24)})
ds = adjust_temporal_attrs(ds)
m = ConsoleMonitor()
actual = long_term_average(ds, monitor=m)
self.assertEqual(m._percentage, 100)
self.assertEqual(actual['first'].attrs['cell_methods'], 'time: mean over years')
self.assertEqual(actual.dims, {'time': 12, 'nv': 2, 'lat': 45, 'lon': 90})
self.assertEqual(actual.time.attrs['climatology'], 'climatology_bounds')
actual = long_term_average(ds, var='first')
with self.assertRaises(KeyError):
actual['second'] | def test_nominal(self):
'\n \n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=24)})
ds = adjust_temporal_attrs(ds)
m = ConsoleMonitor()
actual = long_term_average(ds, monitor=m)
self.assertEqual(m._percentage, 100)
self.assertEqual(actual['first'].attrs['cell_methods'], 'time: mean over years')
self.assertEqual(actual.dims, {'time': 12, 'nv': 2, 'lat': 45, 'lon': 90})
self.assertEqual(actual.time.attrs['climatology'], 'climatology_bounds')
actual = long_term_average(ds, var='first')
with self.assertRaises(KeyError):
actual['second']<|docstring|>Test nominal execution<|endoftext|> |
a9ab8d6dcad75e99f7f875d145a355ed4f99f1e4913cad6e116c00ab725b95e6 | def test_registered(self):
'\n Test registered operation execution\n '
reg_op = OP_REGISTRY.get_op(object_to_qualified_name(long_term_average))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=24)})
ds = adjust_temporal_attrs(ds)
reg_op(ds=ds) | Test registered operation execution | test/ops/test_aggregate.py | test_registered | TomBlock/cate | 0 | python | def test_registered(self):
'\n \n '
reg_op = OP_REGISTRY.get_op(object_to_qualified_name(long_term_average))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=24)})
ds = adjust_temporal_attrs(ds)
reg_op(ds=ds) | def test_registered(self):
'\n \n '
reg_op = OP_REGISTRY.get_op(object_to_qualified_name(long_term_average))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=24)})
ds = adjust_temporal_attrs(ds)
reg_op(ds=ds)<|docstring|>Test registered operation execution<|endoftext|> |
3898a724c2499685235394eb8383acb369b05d849922aa968b0dc6044531694e | def test_validation(self):
'\n Test input validation\n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
long_term_average(ds)
self.assertIn('normalize', str(err.exception))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', periods=24)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
long_term_average(ds)
self.assertIn('temporal aggregation', str(err.exception)) | Test input validation | test/ops/test_aggregate.py | test_validation | TomBlock/cate | 0 | python | def test_validation(self):
'\n \n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
long_term_average(ds)
self.assertIn('normalize', str(err.exception))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', periods=24)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
long_term_average(ds)
self.assertIn('temporal aggregation', str(err.exception)) | def test_validation(self):
'\n \n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
long_term_average(ds)
self.assertIn('normalize', str(err.exception))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', periods=24)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
long_term_average(ds)
self.assertIn('temporal aggregation', str(err.exception))<|docstring|>Test input validation<|endoftext|> |
a6f69b9bb677184ff65e7cfd674c158d4a315f3f50e6d1e57a8a2499295d3ea7 | def test_nominal(self):
'\n Test nominal exeuction\n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', '2000-12-31')})
ds = adjust_temporal_attrs(ds)
ex = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=12)})
ex.first.attrs['cell_methods'] = 'time: mean within years'
ex.second.attrs['cell_methods'] = 'time: mean within years'
m = ConsoleMonitor()
actual = temporal_aggregation(ds, monitor=m)
self.assertTrue(actual.broadcast_equals(ex)) | Test nominal exeuction | test/ops/test_aggregate.py | test_nominal | TomBlock/cate | 0 | python | def test_nominal(self):
'\n \n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', '2000-12-31')})
ds = adjust_temporal_attrs(ds)
ex = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=12)})
ex.first.attrs['cell_methods'] = 'time: mean within years'
ex.second.attrs['cell_methods'] = 'time: mean within years'
m = ConsoleMonitor()
actual = temporal_aggregation(ds, monitor=m)
self.assertTrue(actual.broadcast_equals(ex)) | def test_nominal(self):
'\n \n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', '2000-12-31')})
ds = adjust_temporal_attrs(ds)
ex = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=12)})
ex.first.attrs['cell_methods'] = 'time: mean within years'
ex.second.attrs['cell_methods'] = 'time: mean within years'
m = ConsoleMonitor()
actual = temporal_aggregation(ds, monitor=m)
self.assertTrue(actual.broadcast_equals(ex))<|docstring|>Test nominal exeuction<|endoftext|> |
85e1ea1aeec765e8f1c4bc3403e2535a13101ab459470b5aefdd794e0c3720ca | def test_registered(self):
'\n Test registered operation execution\n '
reg_op = OP_REGISTRY.get_op(object_to_qualified_name(temporal_aggregation))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', '2000-12-31')})
ds = adjust_temporal_attrs(ds)
ex = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=12)})
ex.first.attrs['cell_methods'] = 'time: mean within years'
ex.second.attrs['cell_methods'] = 'time: mean within years'
actual = reg_op(ds=ds)
self.assertTrue(actual.broadcast_equals(ex)) | Test registered operation execution | test/ops/test_aggregate.py | test_registered | TomBlock/cate | 0 | python | def test_registered(self):
'\n \n '
reg_op = OP_REGISTRY.get_op(object_to_qualified_name(temporal_aggregation))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', '2000-12-31')})
ds = adjust_temporal_attrs(ds)
ex = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=12)})
ex.first.attrs['cell_methods'] = 'time: mean within years'
ex.second.attrs['cell_methods'] = 'time: mean within years'
actual = reg_op(ds=ds)
self.assertTrue(actual.broadcast_equals(ex)) | def test_registered(self):
'\n \n '
reg_op = OP_REGISTRY.get_op(object_to_qualified_name(temporal_aggregation))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 366])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', '2000-12-31')})
ds = adjust_temporal_attrs(ds)
ex = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'second': (['lat', 'lon', 'time'], np.ones([45, 90, 12])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=12)})
ex.first.attrs['cell_methods'] = 'time: mean within years'
ex.second.attrs['cell_methods'] = 'time: mean within years'
actual = reg_op(ds=ds)
self.assertTrue(actual.broadcast_equals(ex))<|docstring|>Test registered operation execution<|endoftext|> |
fc1d2ab882d57e29f2c4c7f6e2c123b9d277de7a404fcfd6d28d7601f993ab43 | def test_validation(self):
'\n Test input validation\n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
temporal_aggregation(ds)
self.assertIn('normalize', str(err.exception))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=24)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
temporal_aggregation(ds)
self.assertIn('daily dataset', str(err.exception)) | Test input validation | test/ops/test_aggregate.py | test_validation | TomBlock/cate | 0 | python | def test_validation(self):
'\n \n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
temporal_aggregation(ds)
self.assertIn('normalize', str(err.exception))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=24)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
temporal_aggregation(ds)
self.assertIn('daily dataset', str(err.exception)) | def test_validation(self):
'\n \n '
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
temporal_aggregation(ds)
self.assertIn('normalize', str(err.exception))
ds = xr.Dataset({'first': (['lat', 'lon', 'time'], np.ones([45, 90, 24])), 'lat': np.linspace((- 88), 88, 45), 'lon': np.linspace((- 178), 178, 90), 'time': pd.date_range('2000-01-01', freq='MS', periods=24)})
ds = adjust_temporal_attrs(ds)
with self.assertRaises(ValueError) as err:
temporal_aggregation(ds)
self.assertIn('daily dataset', str(err.exception))<|docstring|>Test input validation<|endoftext|> |
9a7b85bed57ee1bd308b361cce792ef1049b042290959af2c66a25c709beacc2 | def extract_tb(tb, limit=None):
"Return list of up to limit pre-processed entries from traceback.\n\n This is useful for alternate formatting of stack traces. If\n 'limit' is omitted or None, all entries are extracted. A\n pre-processed stack trace entry is a 5-tuple (filename, line\n number, function name, text, selfstr) representing the information that is\n usually printed for a stack trace. The text is a string with\n leading and trailing whitespace stripped; if the source is not\n available it is None.\n\n This function is modified to return the name of the 'self' parameter's class as\n the 5th element of the tuple.\n "
if (limit is None):
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while ((tb is not None) and ((limit is None) or (n < limit))):
f = tb.tb_frame
lineno = tb.tb_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
self = f.f_locals.get('self')
try:
selfstr = ((self and '(self is a {0})'.format(self.__class__.__name__)) or ' ')
if hasattr(self, 'name'):
selfstr += ('(named %s)' % self.name)
except:
selfstr = ' '
traceback.linecache.checkcache(filename)
line = traceback.linecache.getline(filename, lineno, f.f_globals)
if line:
line = line.strip()
else:
line = None
list.append(MCETraceFrame(filename, lineno, name, line, selfstr))
tb = tb.tb_next
n = (n + 1)
return list | Return list of up to limit pre-processed entries from traceback.
This is useful for alternate formatting of stack traces. If
'limit' is omitted or None, all entries are extracted. A
pre-processed stack trace entry is a 5-tuple (filename, line
number, function name, text, selfstr) representing the information that is
usually printed for a stack trace. The text is a string with
leading and trailing whitespace stripped; if the source is not
available it is None.
This function is modified to return the name of the 'self' parameter's class as
the 5th element of the tuple. | src/mcedit2/util/custom_traceback.py | extract_tb | Astro-Johnny/mcedit2 | 673 | python | def extract_tb(tb, limit=None):
"Return list of up to limit pre-processed entries from traceback.\n\n This is useful for alternate formatting of stack traces. If\n 'limit' is omitted or None, all entries are extracted. A\n pre-processed stack trace entry is a 5-tuple (filename, line\n number, function name, text, selfstr) representing the information that is\n usually printed for a stack trace. The text is a string with\n leading and trailing whitespace stripped; if the source is not\n available it is None.\n\n This function is modified to return the name of the 'self' parameter's class as\n the 5th element of the tuple.\n "
if (limit is None):
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while ((tb is not None) and ((limit is None) or (n < limit))):
f = tb.tb_frame
lineno = tb.tb_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
self = f.f_locals.get('self')
try:
selfstr = ((self and '(self is a {0})'.format(self.__class__.__name__)) or ' ')
if hasattr(self, 'name'):
selfstr += ('(named %s)' % self.name)
except:
selfstr = ' '
traceback.linecache.checkcache(filename)
line = traceback.linecache.getline(filename, lineno, f.f_globals)
if line:
line = line.strip()
else:
line = None
list.append(MCETraceFrame(filename, lineno, name, line, selfstr))
tb = tb.tb_next
n = (n + 1)
return list | def extract_tb(tb, limit=None):
"Return list of up to limit pre-processed entries from traceback.\n\n This is useful for alternate formatting of stack traces. If\n 'limit' is omitted or None, all entries are extracted. A\n pre-processed stack trace entry is a 5-tuple (filename, line\n number, function name, text, selfstr) representing the information that is\n usually printed for a stack trace. The text is a string with\n leading and trailing whitespace stripped; if the source is not\n available it is None.\n\n This function is modified to return the name of the 'self' parameter's class as\n the 5th element of the tuple.\n "
if (limit is None):
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while ((tb is not None) and ((limit is None) or (n < limit))):
f = tb.tb_frame
lineno = tb.tb_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
self = f.f_locals.get('self')
try:
selfstr = ((self and '(self is a {0})'.format(self.__class__.__name__)) or ' ')
if hasattr(self, 'name'):
selfstr += ('(named %s)' % self.name)
except:
selfstr = ' '
traceback.linecache.checkcache(filename)
line = traceback.linecache.getline(filename, lineno, f.f_globals)
if line:
line = line.strip()
else:
line = None
list.append(MCETraceFrame(filename, lineno, name, line, selfstr))
tb = tb.tb_next
n = (n + 1)
return list<|docstring|>Return list of up to limit pre-processed entries from traceback.
This is useful for alternate formatting of stack traces. If
'limit' is omitted or None, all entries are extracted. A
pre-processed stack trace entry is a 5-tuple (filename, line
number, function name, text, selfstr) representing the information that is
usually printed for a stack trace. The text is a string with
leading and trailing whitespace stripped; if the source is not
available it is None.
This function is modified to return the name of the 'self' parameter's class as
the 5th element of the tuple.<|endoftext|> |
a55e5857757b6017a6b28ac6934ce06158479b0b772091b48ba098a44fdd12a3 | def format_list(extracted_list):
"Format a list of traceback entry tuples for printing.\n\n Given a list of tuples as returned by extract_tb() or\n extract_stack(), return a list of strings ready for printing.\n Each string in the resulting list corresponds to the item with the\n same index in the argument list. Each string ends in a newline;\n the strings may contain internal newlines as well, for those items\n whose source text line is not None.\n\n This function is modified to include the 5th item of the tuple as\n the name of the class of the 'self' parameter.\n "
list = []
for frame in extracted_list:
(filename, lineno, name, line) = frame
selfstr = getattr(frame, 'selfstr', None)
item = (' File "%s", line %d, in %s %s\n' % (filename, lineno, name, selfstr[:60]))
if line:
item = (item + (' %s\n' % line.strip()))
list.append(item)
return list | Format a list of traceback entry tuples for printing.
Given a list of tuples as returned by extract_tb() or
extract_stack(), return a list of strings ready for printing.
Each string in the resulting list corresponds to the item with the
same index in the argument list. Each string ends in a newline;
the strings may contain internal newlines as well, for those items
whose source text line is not None.
This function is modified to include the 5th item of the tuple as
the name of the class of the 'self' parameter. | src/mcedit2/util/custom_traceback.py | format_list | Astro-Johnny/mcedit2 | 673 | python | def format_list(extracted_list):
"Format a list of traceback entry tuples for printing.\n\n Given a list of tuples as returned by extract_tb() or\n extract_stack(), return a list of strings ready for printing.\n Each string in the resulting list corresponds to the item with the\n same index in the argument list. Each string ends in a newline;\n the strings may contain internal newlines as well, for those items\n whose source text line is not None.\n\n This function is modified to include the 5th item of the tuple as\n the name of the class of the 'self' parameter.\n "
list = []
for frame in extracted_list:
(filename, lineno, name, line) = frame
selfstr = getattr(frame, 'selfstr', None)
item = (' File "%s", line %d, in %s %s\n' % (filename, lineno, name, selfstr[:60]))
if line:
item = (item + (' %s\n' % line.strip()))
list.append(item)
return list | def format_list(extracted_list):
"Format a list of traceback entry tuples for printing.\n\n Given a list of tuples as returned by extract_tb() or\n extract_stack(), return a list of strings ready for printing.\n Each string in the resulting list corresponds to the item with the\n same index in the argument list. Each string ends in a newline;\n the strings may contain internal newlines as well, for those items\n whose source text line is not None.\n\n This function is modified to include the 5th item of the tuple as\n the name of the class of the 'self' parameter.\n "
list = []
for frame in extracted_list:
(filename, lineno, name, line) = frame
selfstr = getattr(frame, 'selfstr', None)
item = (' File "%s", line %d, in %s %s\n' % (filename, lineno, name, selfstr[:60]))
if line:
item = (item + (' %s\n' % line.strip()))
list.append(item)
return list<|docstring|>Format a list of traceback entry tuples for printing.
Given a list of tuples as returned by extract_tb() or
extract_stack(), return a list of strings ready for printing.
Each string in the resulting list corresponds to the item with the
same index in the argument list. Each string ends in a newline;
the strings may contain internal newlines as well, for those items
whose source text line is not None.
This function is modified to include the 5th item of the tuple as
the name of the class of the 'self' parameter.<|endoftext|> |
b09e6cdd37aaa1f6b254e12406b6b0c8d211827220964f33aa1e002acf89c0c8 | def print_list(extracted_list, file=None):
'Print the list of tuples as returned by extract_tb() or\n extract_stack() as a formatted stack trace to the given file.\n\n This function is modified to print the 5th element of the tuple\n returned by the modified functions above.\n '
if (file is None):
file = sys.stderr
for entry in extracted_list:
(filename, lineno, name, line) = entry
selfstr = getattr(entry, 'selfstr', None)
print((' File "%s", line %d, in %s %s' % (filename, lineno, name, selfstr)), file=file)
if line:
print((' %s' % line.strip()), file=file) | Print the list of tuples as returned by extract_tb() or
extract_stack() as a formatted stack trace to the given file.
This function is modified to print the 5th element of the tuple
returned by the modified functions above. | src/mcedit2/util/custom_traceback.py | print_list | Astro-Johnny/mcedit2 | 673 | python | def print_list(extracted_list, file=None):
'Print the list of tuples as returned by extract_tb() or\n extract_stack() as a formatted stack trace to the given file.\n\n This function is modified to print the 5th element of the tuple\n returned by the modified functions above.\n '
if (file is None):
file = sys.stderr
for entry in extracted_list:
(filename, lineno, name, line) = entry
selfstr = getattr(entry, 'selfstr', None)
print((' File "%s", line %d, in %s %s' % (filename, lineno, name, selfstr)), file=file)
if line:
print((' %s' % line.strip()), file=file) | def print_list(extracted_list, file=None):
'Print the list of tuples as returned by extract_tb() or\n extract_stack() as a formatted stack trace to the given file.\n\n This function is modified to print the 5th element of the tuple\n returned by the modified functions above.\n '
if (file is None):
file = sys.stderr
for entry in extracted_list:
(filename, lineno, name, line) = entry
selfstr = getattr(entry, 'selfstr', None)
print((' File "%s", line %d, in %s %s' % (filename, lineno, name, selfstr)), file=file)
if line:
print((' %s' % line.strip()), file=file)<|docstring|>Print the list of tuples as returned by extract_tb() or
extract_stack() as a formatted stack trace to the given file.
This function is modified to print the 5th element of the tuple
returned by the modified functions above.<|endoftext|> |
275948c3717e9f6034c3e87cacde7f0e7b582ede48431694fad85945aef6e0f7 | def print_tb(tb, limit=None, file=None):
"Print up to 'limit' stack trace entries from the traceback 'tb'.\n\n If 'limit' is omitted or None, all entries are printed. If 'file'\n is omitted or None, the output goes to sys.stderr; otherwise\n 'file' should be an open file or file-like object with a write()\n method.\n\n This function is modified to also print the name of the 'self' parameter's class.\n "
if (file is None):
file = sys.stderr
if (limit is None):
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
n = 0
_print = traceback._print
while ((tb is not None) and ((limit is None) or (n < limit))):
f = tb.tb_frame
lineno = tb.tb_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
self = f.f_locals.get('self')
try:
selfstr = ((self and '(self is a {0})'.format(self.__class__.__name__)) or ' ')
except:
selfstr = ' '
_print(file, (' File "%s", line %d, in %s %s' % (filename, lineno, name, selfstr)))
linecache = traceback.linecache
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line:
_print(file, (' ' + line.strip()))
tb = tb.tb_next
n += 1 | Print up to 'limit' stack trace entries from the traceback 'tb'.
If 'limit' is omitted or None, all entries are printed. If 'file'
is omitted or None, the output goes to sys.stderr; otherwise
'file' should be an open file or file-like object with a write()
method.
This function is modified to also print the name of the 'self' parameter's class. | src/mcedit2/util/custom_traceback.py | print_tb | Astro-Johnny/mcedit2 | 673 | python | def print_tb(tb, limit=None, file=None):
"Print up to 'limit' stack trace entries from the traceback 'tb'.\n\n If 'limit' is omitted or None, all entries are printed. If 'file'\n is omitted or None, the output goes to sys.stderr; otherwise\n 'file' should be an open file or file-like object with a write()\n method.\n\n This function is modified to also print the name of the 'self' parameter's class.\n "
if (file is None):
file = sys.stderr
if (limit is None):
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
n = 0
_print = traceback._print
while ((tb is not None) and ((limit is None) or (n < limit))):
f = tb.tb_frame
lineno = tb.tb_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
self = f.f_locals.get('self')
try:
selfstr = ((self and '(self is a {0})'.format(self.__class__.__name__)) or ' ')
except:
selfstr = ' '
_print(file, (' File "%s", line %d, in %s %s' % (filename, lineno, name, selfstr)))
linecache = traceback.linecache
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line:
_print(file, (' ' + line.strip()))
tb = tb.tb_next
n += 1 | def print_tb(tb, limit=None, file=None):
"Print up to 'limit' stack trace entries from the traceback 'tb'.\n\n If 'limit' is omitted or None, all entries are printed. If 'file'\n is omitted or None, the output goes to sys.stderr; otherwise\n 'file' should be an open file or file-like object with a write()\n method.\n\n This function is modified to also print the name of the 'self' parameter's class.\n "
if (file is None):
file = sys.stderr
if (limit is None):
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
n = 0
_print = traceback._print
while ((tb is not None) and ((limit is None) or (n < limit))):
f = tb.tb_frame
lineno = tb.tb_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
self = f.f_locals.get('self')
try:
selfstr = ((self and '(self is a {0})'.format(self.__class__.__name__)) or ' ')
except:
selfstr = ' '
_print(file, (' File "%s", line %d, in %s %s' % (filename, lineno, name, selfstr)))
linecache = traceback.linecache
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line:
_print(file, (' ' + line.strip()))
tb = tb.tb_next
n += 1<|docstring|>Print up to 'limit' stack trace entries from the traceback 'tb'.
If 'limit' is omitted or None, all entries are printed. If 'file'
is omitted or None, the output goes to sys.stderr; otherwise
'file' should be an open file or file-like object with a write()
method.
This function is modified to also print the name of the 'self' parameter's class.<|endoftext|> |
fab3bf829c2dd7f00a71d4a3f5961ebb3bdc6dbf3ae22032ca05f09ebfb297a1 | async def post_error(self, context, error_text, add_error_message=''):
'Has the bot post an error message in the bot channel. Quotes original message for context. Reacts to original message with an X emote to notify the message author.'
try:
(await context.message.add_reaction('❌'))
message_minus_forbidden = context.message.content.replace('@', '')
message_minus_forbidden = message_minus_forbidden.replace('`', '')
quote = (((((('`' + context.message.author.name) + ': ') + message_minus_forbidden) + '`') + linesep) + linesep)
(await self.post_message(self.bot_channel, ((((quote + '**[ERROR]** ') + error_text) + ' ') + add_error_message)))
except Exception as e:
log.fatal('EXCEPTION OCCURRED WHILE POSTING ERROR:')
log.exception(e) | Has the bot post an error message in the bot channel. Quotes original message for context. Reacts to original message with an X emote to notify the message author. | bot.py | post_error | Lustidrike/Economy-Bot | 2 | python | async def post_error(self, context, error_text, add_error_message=):
try:
(await context.message.add_reaction('❌'))
message_minus_forbidden = context.message.content.replace('@', )
message_minus_forbidden = message_minus_forbidden.replace('`', )
quote = (((((('`' + context.message.author.name) + ': ') + message_minus_forbidden) + '`') + linesep) + linesep)
(await self.post_message(self.bot_channel, ((((quote + '**[ERROR]** ') + error_text) + ' ') + add_error_message)))
except Exception as e:
log.fatal('EXCEPTION OCCURRED WHILE POSTING ERROR:')
log.exception(e) | async def post_error(self, context, error_text, add_error_message=):
try:
(await context.message.add_reaction('❌'))
message_minus_forbidden = context.message.content.replace('@', )
message_minus_forbidden = message_minus_forbidden.replace('`', )
quote = (((((('`' + context.message.author.name) + ': ') + message_minus_forbidden) + '`') + linesep) + linesep)
(await self.post_message(self.bot_channel, ((((quote + '**[ERROR]** ') + error_text) + ' ') + add_error_message)))
except Exception as e:
log.fatal('EXCEPTION OCCURRED WHILE POSTING ERROR:')
log.exception(e)<|docstring|>Has the bot post an error message in the bot channel. Quotes original message for context. Reacts to original message with an X emote to notify the message author.<|endoftext|> |
4b0d5c47a10d7e14c950e39de23fa8cc37c299e1a42346c6d27eed76e25cd20c | async def post_message(self, channel, message_text, embed=None):
'Has the bot post a message in the respective channel.'
try:
if (embed is None):
chunk_size = 2000
if (message_text.startswith('```') and message_text.endswith('```')):
message_text = message_text[3:]
message_text = message_text[:(- 3)]
chunk_size = 1994
for i in range(0, len(message_text), chunk_size):
text_chunk = message_text[i:(i + chunk_size)]
if (chunk_size == 1994):
text_chunk = ('```' + text_chunk)
text_chunk += '```'
attempts = 0
while (attempts < config.repost_attempts):
try:
return (await channel.send(text_chunk))
except discord.errors.HTTPException:
attempts += 1
(await asyncio.sleep(2))
else:
attempts = 0
while (attempts < config.repost_attempts):
try:
return (await channel.send(embed=embed))
break
except discord.errors.HTTPException:
attempts += 1
(await asyncio.sleep(2))
except Exception as e:
(await self.bot_channel.send((('**[ERROR]** A critical error occurred.' + ' ') + config.additional_error_message)))
log.fatal('EXCEPTION OCCURRED WHILE POSTING MESSAGE:')
log.exception(e) | Has the bot post a message in the respective channel. | bot.py | post_message | Lustidrike/Economy-Bot | 2 | python | async def post_message(self, channel, message_text, embed=None):
try:
if (embed is None):
chunk_size = 2000
if (message_text.startswith('```') and message_text.endswith('```')):
message_text = message_text[3:]
message_text = message_text[:(- 3)]
chunk_size = 1994
for i in range(0, len(message_text), chunk_size):
text_chunk = message_text[i:(i + chunk_size)]
if (chunk_size == 1994):
text_chunk = ('```' + text_chunk)
text_chunk += '```'
attempts = 0
while (attempts < config.repost_attempts):
try:
return (await channel.send(text_chunk))
except discord.errors.HTTPException:
attempts += 1
(await asyncio.sleep(2))
else:
attempts = 0
while (attempts < config.repost_attempts):
try:
return (await channel.send(embed=embed))
break
except discord.errors.HTTPException:
attempts += 1
(await asyncio.sleep(2))
except Exception as e:
(await self.bot_channel.send((('**[ERROR]** A critical error occurred.' + ' ') + config.additional_error_message)))
log.fatal('EXCEPTION OCCURRED WHILE POSTING MESSAGE:')
log.exception(e) | async def post_message(self, channel, message_text, embed=None):
try:
if (embed is None):
chunk_size = 2000
if (message_text.startswith('```') and message_text.endswith('```')):
message_text = message_text[3:]
message_text = message_text[:(- 3)]
chunk_size = 1994
for i in range(0, len(message_text), chunk_size):
text_chunk = message_text[i:(i + chunk_size)]
if (chunk_size == 1994):
text_chunk = ('```' + text_chunk)
text_chunk += '```'
attempts = 0
while (attempts < config.repost_attempts):
try:
return (await channel.send(text_chunk))
except discord.errors.HTTPException:
attempts += 1
(await asyncio.sleep(2))
else:
attempts = 0
while (attempts < config.repost_attempts):
try:
return (await channel.send(embed=embed))
break
except discord.errors.HTTPException:
attempts += 1
(await asyncio.sleep(2))
except Exception as e:
(await self.bot_channel.send((('**[ERROR]** A critical error occurred.' + ' ') + config.additional_error_message)))
log.fatal('EXCEPTION OCCURRED WHILE POSTING MESSAGE:')
log.exception(e)<|docstring|>Has the bot post a message in the respective channel.<|endoftext|> |
fdfa5ca31005a6ffc27088b363548930c6b2c70d28522d85ba8676e5ada7c9e5 | def dimensionality_reduction(df_input, decomposition_method: str=None, k: int=None, explained_variance: float=None, **kwargs: Any):
"\n Runs the chosen method of dimensionality reduction in the input data (df_input)\n and returns the reduced one.\n\n :param df_input: sparse matrix\n :type: Array to compute SVD and PCA on, of shape (M,N)\n :param decomposition_method: Choice of method\n :type: str, default = None\n :param K: Number of singular values(SVD) and principal component analyis(PCA) to compute.\n Must be 1 <= k < min(A.shape)\n :type: int, default = None\n :param explained_variance: 0 < n_components < 1, select the number of components such that the\n amount of variance that needs to be explained is greater than the percentage\n specified by n_components\n :type: float, default = None\n :params kwargs: Extra parameters passed to the selected method's base function\n :type: dict\n :raise ValueError: K and explained_variance must be defined.\n :raise TypeError: explained_variance must be a float.\n :raise ValueError : explained_variance must be in the interval (0..1) and k\n or explained_variance must be defined\n :raise NotImplementedError: Model implemented yet. Available names: 'SVD', 'PCA'\n :return: Input data with reduced dimensions\n :rtype: numpy.ndarray\n\n "
if ((k is None) and (explained_variance is None)):
raise ValueError('k and explained_variance must be defined')
if (decomposition_method == 'SVD'):
df_input = df_input.astype(float)
(u, _, _) = svds(df_input, k=k, **kwargs)
return u
elif (decomposition_method == 'PCA'):
if (k is not None):
u = PCA(k, **kwargs).fit_transform(df_input)
elif (explained_variance is not None):
if (not isinstance(explained_variance, float)):
raise TypeError(f'explained_variance must be a float, but its value passed was {explained_variance}.')
if ((explained_variance <= 0) or (explained_variance >= 1)):
raise ValueError('explained_variance must be in the interval (0..1)')
u = PCA(explained_variance, svd_solver='full', **kwargs).fit_transform(df_input)
return u
else:
raise NotImplementedError("Method not implemented yet. Available names: 'SVD', 'PCA'.") | Runs the chosen method of dimensionality reduction in the input data (df_input)
and returns the reduced one.
:param df_input: sparse matrix
:type: Array to compute SVD and PCA on, of shape (M,N)
:param decomposition_method: Choice of method
:type: str, default = None
:param K: Number of singular values(SVD) and principal component analyis(PCA) to compute.
Must be 1 <= k < min(A.shape)
:type: int, default = None
:param explained_variance: 0 < n_components < 1, select the number of components such that the
amount of variance that needs to be explained is greater than the percentage
specified by n_components
:type: float, default = None
:params kwargs: Extra parameters passed to the selected method's base function
:type: dict
:raise ValueError: K and explained_variance must be defined.
:raise TypeError: explained_variance must be a float.
:raise ValueError : explained_variance must be in the interval (0..1) and k
or explained_variance must be defined
:raise NotImplementedError: Model implemented yet. Available names: 'SVD', 'PCA'
:return: Input data with reduced dimensions
:rtype: numpy.ndarray | gumly/dimensionality_reduction.py | dimensionality_reduction | GAVB-SERVICOS/Gumly | 9 | python | def dimensionality_reduction(df_input, decomposition_method: str=None, k: int=None, explained_variance: float=None, **kwargs: Any):
"\n Runs the chosen method of dimensionality reduction in the input data (df_input)\n and returns the reduced one.\n\n :param df_input: sparse matrix\n :type: Array to compute SVD and PCA on, of shape (M,N)\n :param decomposition_method: Choice of method\n :type: str, default = None\n :param K: Number of singular values(SVD) and principal component analyis(PCA) to compute.\n Must be 1 <= k < min(A.shape)\n :type: int, default = None\n :param explained_variance: 0 < n_components < 1, select the number of components such that the\n amount of variance that needs to be explained is greater than the percentage\n specified by n_components\n :type: float, default = None\n :params kwargs: Extra parameters passed to the selected method's base function\n :type: dict\n :raise ValueError: K and explained_variance must be defined.\n :raise TypeError: explained_variance must be a float.\n :raise ValueError : explained_variance must be in the interval (0..1) and k\n or explained_variance must be defined\n :raise NotImplementedError: Model implemented yet. Available names: 'SVD', 'PCA'\n :return: Input data with reduced dimensions\n :rtype: numpy.ndarray\n\n "
if ((k is None) and (explained_variance is None)):
raise ValueError('k and explained_variance must be defined')
if (decomposition_method == 'SVD'):
df_input = df_input.astype(float)
(u, _, _) = svds(df_input, k=k, **kwargs)
return u
elif (decomposition_method == 'PCA'):
if (k is not None):
u = PCA(k, **kwargs).fit_transform(df_input)
elif (explained_variance is not None):
if (not isinstance(explained_variance, float)):
raise TypeError(f'explained_variance must be a float, but its value passed was {explained_variance}.')
if ((explained_variance <= 0) or (explained_variance >= 1)):
raise ValueError('explained_variance must be in the interval (0..1)')
u = PCA(explained_variance, svd_solver='full', **kwargs).fit_transform(df_input)
return u
else:
raise NotImplementedError("Method not implemented yet. Available names: 'SVD', 'PCA'.") | def dimensionality_reduction(df_input, decomposition_method: str=None, k: int=None, explained_variance: float=None, **kwargs: Any):
"\n Runs the chosen method of dimensionality reduction in the input data (df_input)\n and returns the reduced one.\n\n :param df_input: sparse matrix\n :type: Array to compute SVD and PCA on, of shape (M,N)\n :param decomposition_method: Choice of method\n :type: str, default = None\n :param K: Number of singular values(SVD) and principal component analyis(PCA) to compute.\n Must be 1 <= k < min(A.shape)\n :type: int, default = None\n :param explained_variance: 0 < n_components < 1, select the number of components such that the\n amount of variance that needs to be explained is greater than the percentage\n specified by n_components\n :type: float, default = None\n :params kwargs: Extra parameters passed to the selected method's base function\n :type: dict\n :raise ValueError: K and explained_variance must be defined.\n :raise TypeError: explained_variance must be a float.\n :raise ValueError : explained_variance must be in the interval (0..1) and k\n or explained_variance must be defined\n :raise NotImplementedError: Model implemented yet. Available names: 'SVD', 'PCA'\n :return: Input data with reduced dimensions\n :rtype: numpy.ndarray\n\n "
if ((k is None) and (explained_variance is None)):
raise ValueError('k and explained_variance must be defined')
if (decomposition_method == 'SVD'):
df_input = df_input.astype(float)
(u, _, _) = svds(df_input, k=k, **kwargs)
return u
elif (decomposition_method == 'PCA'):
if (k is not None):
u = PCA(k, **kwargs).fit_transform(df_input)
elif (explained_variance is not None):
if (not isinstance(explained_variance, float)):
raise TypeError(f'explained_variance must be a float, but its value passed was {explained_variance}.')
if ((explained_variance <= 0) or (explained_variance >= 1)):
raise ValueError('explained_variance must be in the interval (0..1)')
u = PCA(explained_variance, svd_solver='full', **kwargs).fit_transform(df_input)
return u
else:
raise NotImplementedError("Method not implemented yet. Available names: 'SVD', 'PCA'.")<|docstring|>Runs the chosen method of dimensionality reduction in the input data (df_input)
and returns the reduced one.
:param df_input: sparse matrix
:type: Array to compute SVD and PCA on, of shape (M,N)
:param decomposition_method: Choice of method
:type: str, default = None
:param K: Number of singular values(SVD) and principal component analyis(PCA) to compute.
Must be 1 <= k < min(A.shape)
:type: int, default = None
:param explained_variance: 0 < n_components < 1, select the number of components such that the
amount of variance that needs to be explained is greater than the percentage
specified by n_components
:type: float, default = None
:params kwargs: Extra parameters passed to the selected method's base function
:type: dict
:raise ValueError: K and explained_variance must be defined.
:raise TypeError: explained_variance must be a float.
:raise ValueError : explained_variance must be in the interval (0..1) and k
or explained_variance must be defined
:raise NotImplementedError: Model implemented yet. Available names: 'SVD', 'PCA'
:return: Input data with reduced dimensions
:rtype: numpy.ndarray<|endoftext|> |
23f70ab0b5be5fe659e8d1382dad2adbb5bfad278aa68f92bc1f319320d6ad22 | def __init__(self, neural_network, learning_rate, max_error, max_iterations=None):
'\n :type neural_network: synapyse.base.neural_network.NeuralNetwork\n :type learning_rate: float\n :type max_error: float\n :type max_iterations: int\n '
SupervisedLearning.__init__(self, neural_network, learning_rate, max_error, max_iterations) | :type neural_network: synapyse.base.neural_network.NeuralNetwork
:type learning_rate: float
:type max_error: float
:type max_iterations: int | synapyse/impl/learning/least_mean_square.py | __init__ | synapyse/synapyse | 4 | python | def __init__(self, neural_network, learning_rate, max_error, max_iterations=None):
'\n :type neural_network: synapyse.base.neural_network.NeuralNetwork\n :type learning_rate: float\n :type max_error: float\n :type max_iterations: int\n '
SupervisedLearning.__init__(self, neural_network, learning_rate, max_error, max_iterations) | def __init__(self, neural_network, learning_rate, max_error, max_iterations=None):
'\n :type neural_network: synapyse.base.neural_network.NeuralNetwork\n :type learning_rate: float\n :type max_error: float\n :type max_iterations: int\n '
SupervisedLearning.__init__(self, neural_network, learning_rate, max_error, max_iterations)<|docstring|>:type neural_network: synapyse.base.neural_network.NeuralNetwork
:type learning_rate: float
:type max_error: float
:type max_iterations: int<|endoftext|> |
894a8350341a9936c96c8b8101370e524a6dbd5a40e7eb001b2321e488c42891 | def update_neuron_weights(self, neuron, error):
'\n :type neuron: synapyse.base.neuron.Neuron\n :type error: float\n '
for connection in neuron.input_connections.values():
if Logger.is_debug_enabled():
Logger.debug('LeastMeanSquare::update_neuron_weights: weight before=', connection.weight)
connection.weight += ((connection.origin.output * error) * self.learning_rate)
if Logger.is_debug_enabled():
Logger.debug('LeastMeanSquare::update_neuron_weights: weight after=', connection.weight) | :type neuron: synapyse.base.neuron.Neuron
:type error: float | synapyse/impl/learning/least_mean_square.py | update_neuron_weights | synapyse/synapyse | 4 | python | def update_neuron_weights(self, neuron, error):
'\n :type neuron: synapyse.base.neuron.Neuron\n :type error: float\n '
for connection in neuron.input_connections.values():
if Logger.is_debug_enabled():
Logger.debug('LeastMeanSquare::update_neuron_weights: weight before=', connection.weight)
connection.weight += ((connection.origin.output * error) * self.learning_rate)
if Logger.is_debug_enabled():
Logger.debug('LeastMeanSquare::update_neuron_weights: weight after=', connection.weight) | def update_neuron_weights(self, neuron, error):
'\n :type neuron: synapyse.base.neuron.Neuron\n :type error: float\n '
for connection in neuron.input_connections.values():
if Logger.is_debug_enabled():
Logger.debug('LeastMeanSquare::update_neuron_weights: weight before=', connection.weight)
connection.weight += ((connection.origin.output * error) * self.learning_rate)
if Logger.is_debug_enabled():
Logger.debug('LeastMeanSquare::update_neuron_weights: weight after=', connection.weight)<|docstring|>:type neuron: synapyse.base.neuron.Neuron
:type error: float<|endoftext|> |
cb48914afacf895e328bc60989da14577b1c9c1db623f231462731deb485cce3 | def __init__(self, id=None, buyer_id=None, shipper=None, date_shipped=None, date_delivered=None, tracking_number=None, cost=None, xp=None, account=None, from_address_id=None, to_address_id=None, from_address=None, to_address=None):
'\n PartialShipment - a model defined in Swagger\n\n :param dict swaggerTypes: The key is attribute name\n and the value is attribute type.\n :param dict attributeMap: The key is attribute name\n and the value is json key in definition.\n '
self.swagger_types = {'id': 'str', 'buyer_id': 'str', 'shipper': 'str', 'date_shipped': 'str', 'date_delivered': 'str', 'tracking_number': 'str', 'cost': 'float', 'xp': 'object', 'account': 'str', 'from_address_id': 'str', 'to_address_id': 'str', 'from_address': 'Address', 'to_address': 'Address'}
self.attribute_map = {'id': 'ID', 'buyer_id': 'BuyerID', 'shipper': 'Shipper', 'date_shipped': 'DateShipped', 'date_delivered': 'DateDelivered', 'tracking_number': 'TrackingNumber', 'cost': 'Cost', 'xp': 'xp', 'account': 'Account', 'from_address_id': 'FromAddressID', 'to_address_id': 'ToAddressID', 'from_address': 'FromAddress', 'to_address': 'ToAddress'}
self._id = id
self._buyer_id = buyer_id
self._shipper = shipper
self._date_shipped = date_shipped
self._date_delivered = date_delivered
self._tracking_number = tracking_number
self._cost = cost
self._xp = xp
self._account = account
self._from_address_id = from_address_id
self._to_address_id = to_address_id
self._from_address = from_address
self._to_address = to_address | PartialShipment - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition. | OrderCloud/models/partial_shipment.py | __init__ | klreeher/python-sdk | 0 | python | def __init__(self, id=None, buyer_id=None, shipper=None, date_shipped=None, date_delivered=None, tracking_number=None, cost=None, xp=None, account=None, from_address_id=None, to_address_id=None, from_address=None, to_address=None):
'\n PartialShipment - a model defined in Swagger\n\n :param dict swaggerTypes: The key is attribute name\n and the value is attribute type.\n :param dict attributeMap: The key is attribute name\n and the value is json key in definition.\n '
self.swagger_types = {'id': 'str', 'buyer_id': 'str', 'shipper': 'str', 'date_shipped': 'str', 'date_delivered': 'str', 'tracking_number': 'str', 'cost': 'float', 'xp': 'object', 'account': 'str', 'from_address_id': 'str', 'to_address_id': 'str', 'from_address': 'Address', 'to_address': 'Address'}
self.attribute_map = {'id': 'ID', 'buyer_id': 'BuyerID', 'shipper': 'Shipper', 'date_shipped': 'DateShipped', 'date_delivered': 'DateDelivered', 'tracking_number': 'TrackingNumber', 'cost': 'Cost', 'xp': 'xp', 'account': 'Account', 'from_address_id': 'FromAddressID', 'to_address_id': 'ToAddressID', 'from_address': 'FromAddress', 'to_address': 'ToAddress'}
self._id = id
self._buyer_id = buyer_id
self._shipper = shipper
self._date_shipped = date_shipped
self._date_delivered = date_delivered
self._tracking_number = tracking_number
self._cost = cost
self._xp = xp
self._account = account
self._from_address_id = from_address_id
self._to_address_id = to_address_id
self._from_address = from_address
self._to_address = to_address | def __init__(self, id=None, buyer_id=None, shipper=None, date_shipped=None, date_delivered=None, tracking_number=None, cost=None, xp=None, account=None, from_address_id=None, to_address_id=None, from_address=None, to_address=None):
'\n PartialShipment - a model defined in Swagger\n\n :param dict swaggerTypes: The key is attribute name\n and the value is attribute type.\n :param dict attributeMap: The key is attribute name\n and the value is json key in definition.\n '
self.swagger_types = {'id': 'str', 'buyer_id': 'str', 'shipper': 'str', 'date_shipped': 'str', 'date_delivered': 'str', 'tracking_number': 'str', 'cost': 'float', 'xp': 'object', 'account': 'str', 'from_address_id': 'str', 'to_address_id': 'str', 'from_address': 'Address', 'to_address': 'Address'}
self.attribute_map = {'id': 'ID', 'buyer_id': 'BuyerID', 'shipper': 'Shipper', 'date_shipped': 'DateShipped', 'date_delivered': 'DateDelivered', 'tracking_number': 'TrackingNumber', 'cost': 'Cost', 'xp': 'xp', 'account': 'Account', 'from_address_id': 'FromAddressID', 'to_address_id': 'ToAddressID', 'from_address': 'FromAddress', 'to_address': 'ToAddress'}
self._id = id
self._buyer_id = buyer_id
self._shipper = shipper
self._date_shipped = date_shipped
self._date_delivered = date_delivered
self._tracking_number = tracking_number
self._cost = cost
self._xp = xp
self._account = account
self._from_address_id = from_address_id
self._to_address_id = to_address_id
self._from_address = from_address
self._to_address = to_address<|docstring|>PartialShipment - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.<|endoftext|> |
995529653201dc02711e6a7ab3feda139187951de39a4feb9a5f00214514723f | @property
def id(self):
'\n Gets the id of this PartialShipment.\n\n\n :return: The id of this PartialShipment.\n :rtype: str\n '
return self._id | Gets the id of this PartialShipment.
:return: The id of this PartialShipment.
:rtype: str | OrderCloud/models/partial_shipment.py | id | klreeher/python-sdk | 0 | python | @property
def id(self):
'\n Gets the id of this PartialShipment.\n\n\n :return: The id of this PartialShipment.\n :rtype: str\n '
return self._id | @property
def id(self):
'\n Gets the id of this PartialShipment.\n\n\n :return: The id of this PartialShipment.\n :rtype: str\n '
return self._id<|docstring|>Gets the id of this PartialShipment.
:return: The id of this PartialShipment.
:rtype: str<|endoftext|> |
539b90b9ddd2d71aa769d0598cd48f208737fa00a0f5756f11985fdd6929084e | @id.setter
def id(self, id):
'\n Sets the id of this PartialShipment.\n\n\n :param id: The id of this PartialShipment.\n :type: str\n '
self._id = id | Sets the id of this PartialShipment.
:param id: The id of this PartialShipment.
:type: str | OrderCloud/models/partial_shipment.py | id | klreeher/python-sdk | 0 | python | @id.setter
def id(self, id):
'\n Sets the id of this PartialShipment.\n\n\n :param id: The id of this PartialShipment.\n :type: str\n '
self._id = id | @id.setter
def id(self, id):
'\n Sets the id of this PartialShipment.\n\n\n :param id: The id of this PartialShipment.\n :type: str\n '
self._id = id<|docstring|>Sets the id of this PartialShipment.
:param id: The id of this PartialShipment.
:type: str<|endoftext|> |
7d522f368bb8e9840916ad42fdcb3c53c8dfe1eac172b4f70d9850316893cf21 | @property
def buyer_id(self):
'\n Gets the buyer_id of this PartialShipment.\n\n\n :return: The buyer_id of this PartialShipment.\n :rtype: str\n '
return self._buyer_id | Gets the buyer_id of this PartialShipment.
:return: The buyer_id of this PartialShipment.
:rtype: str | OrderCloud/models/partial_shipment.py | buyer_id | klreeher/python-sdk | 0 | python | @property
def buyer_id(self):
'\n Gets the buyer_id of this PartialShipment.\n\n\n :return: The buyer_id of this PartialShipment.\n :rtype: str\n '
return self._buyer_id | @property
def buyer_id(self):
'\n Gets the buyer_id of this PartialShipment.\n\n\n :return: The buyer_id of this PartialShipment.\n :rtype: str\n '
return self._buyer_id<|docstring|>Gets the buyer_id of this PartialShipment.
:return: The buyer_id of this PartialShipment.
:rtype: str<|endoftext|> |
4fa0312cfdbb7b98f5561c1c0b3fe423edc9dc396a3f02a299f3ece0c097b447 | @buyer_id.setter
def buyer_id(self, buyer_id):
'\n Sets the buyer_id of this PartialShipment.\n\n\n :param buyer_id: The buyer_id of this PartialShipment.\n :type: str\n '
self._buyer_id = buyer_id | Sets the buyer_id of this PartialShipment.
:param buyer_id: The buyer_id of this PartialShipment.
:type: str | OrderCloud/models/partial_shipment.py | buyer_id | klreeher/python-sdk | 0 | python | @buyer_id.setter
def buyer_id(self, buyer_id):
'\n Sets the buyer_id of this PartialShipment.\n\n\n :param buyer_id: The buyer_id of this PartialShipment.\n :type: str\n '
self._buyer_id = buyer_id | @buyer_id.setter
def buyer_id(self, buyer_id):
'\n Sets the buyer_id of this PartialShipment.\n\n\n :param buyer_id: The buyer_id of this PartialShipment.\n :type: str\n '
self._buyer_id = buyer_id<|docstring|>Sets the buyer_id of this PartialShipment.
:param buyer_id: The buyer_id of this PartialShipment.
:type: str<|endoftext|> |
ddeb74243b3e7b263ce960a3ae630b2c4329a2bab2646d1580c09e4fced21789 | @property
def shipper(self):
'\n Gets the shipper of this PartialShipment.\n\n\n :return: The shipper of this PartialShipment.\n :rtype: str\n '
return self._shipper | Gets the shipper of this PartialShipment.
:return: The shipper of this PartialShipment.
:rtype: str | OrderCloud/models/partial_shipment.py | shipper | klreeher/python-sdk | 0 | python | @property
def shipper(self):
'\n Gets the shipper of this PartialShipment.\n\n\n :return: The shipper of this PartialShipment.\n :rtype: str\n '
return self._shipper | @property
def shipper(self):
'\n Gets the shipper of this PartialShipment.\n\n\n :return: The shipper of this PartialShipment.\n :rtype: str\n '
return self._shipper<|docstring|>Gets the shipper of this PartialShipment.
:return: The shipper of this PartialShipment.
:rtype: str<|endoftext|> |
7772b99b2a1f438793fc20a4528709f6a9e6559fd242870322f38862fa9e1b2a | @shipper.setter
def shipper(self, shipper):
'\n Sets the shipper of this PartialShipment.\n\n\n :param shipper: The shipper of this PartialShipment.\n :type: str\n '
self._shipper = shipper | Sets the shipper of this PartialShipment.
:param shipper: The shipper of this PartialShipment.
:type: str | OrderCloud/models/partial_shipment.py | shipper | klreeher/python-sdk | 0 | python | @shipper.setter
def shipper(self, shipper):
'\n Sets the shipper of this PartialShipment.\n\n\n :param shipper: The shipper of this PartialShipment.\n :type: str\n '
self._shipper = shipper | @shipper.setter
def shipper(self, shipper):
'\n Sets the shipper of this PartialShipment.\n\n\n :param shipper: The shipper of this PartialShipment.\n :type: str\n '
self._shipper = shipper<|docstring|>Sets the shipper of this PartialShipment.
:param shipper: The shipper of this PartialShipment.
:type: str<|endoftext|> |
85d21f368c1dbebdb443dc1b62a554a54c2e3ba6ee3bdd6f08940a76c736e9d9 | @property
def date_shipped(self):
'\n Gets the date_shipped of this PartialShipment.\n\n\n :return: The date_shipped of this PartialShipment.\n :rtype: str\n '
return self._date_shipped | Gets the date_shipped of this PartialShipment.
:return: The date_shipped of this PartialShipment.
:rtype: str | OrderCloud/models/partial_shipment.py | date_shipped | klreeher/python-sdk | 0 | python | @property
def date_shipped(self):
'\n Gets the date_shipped of this PartialShipment.\n\n\n :return: The date_shipped of this PartialShipment.\n :rtype: str\n '
return self._date_shipped | @property
def date_shipped(self):
'\n Gets the date_shipped of this PartialShipment.\n\n\n :return: The date_shipped of this PartialShipment.\n :rtype: str\n '
return self._date_shipped<|docstring|>Gets the date_shipped of this PartialShipment.
:return: The date_shipped of this PartialShipment.
:rtype: str<|endoftext|> |
243c35649650f0c4838ede59cc67fd52772e571732cda02bb13cff31baff35ce | @date_shipped.setter
def date_shipped(self, date_shipped):
'\n Sets the date_shipped of this PartialShipment.\n\n\n :param date_shipped: The date_shipped of this PartialShipment.\n :type: str\n '
self._date_shipped = date_shipped | Sets the date_shipped of this PartialShipment.
:param date_shipped: The date_shipped of this PartialShipment.
:type: str | OrderCloud/models/partial_shipment.py | date_shipped | klreeher/python-sdk | 0 | python | @date_shipped.setter
def date_shipped(self, date_shipped):
'\n Sets the date_shipped of this PartialShipment.\n\n\n :param date_shipped: The date_shipped of this PartialShipment.\n :type: str\n '
self._date_shipped = date_shipped | @date_shipped.setter
def date_shipped(self, date_shipped):
'\n Sets the date_shipped of this PartialShipment.\n\n\n :param date_shipped: The date_shipped of this PartialShipment.\n :type: str\n '
self._date_shipped = date_shipped<|docstring|>Sets the date_shipped of this PartialShipment.
:param date_shipped: The date_shipped of this PartialShipment.
:type: str<|endoftext|> |
f6566119c739df118c36de5d0f75376c556b12120b6f7b67c4c157a0955e4dfd | @property
def date_delivered(self):
'\n Gets the date_delivered of this PartialShipment.\n\n\n :return: The date_delivered of this PartialShipment.\n :rtype: str\n '
return self._date_delivered | Gets the date_delivered of this PartialShipment.
:return: The date_delivered of this PartialShipment.
:rtype: str | OrderCloud/models/partial_shipment.py | date_delivered | klreeher/python-sdk | 0 | python | @property
def date_delivered(self):
'\n Gets the date_delivered of this PartialShipment.\n\n\n :return: The date_delivered of this PartialShipment.\n :rtype: str\n '
return self._date_delivered | @property
def date_delivered(self):
'\n Gets the date_delivered of this PartialShipment.\n\n\n :return: The date_delivered of this PartialShipment.\n :rtype: str\n '
return self._date_delivered<|docstring|>Gets the date_delivered of this PartialShipment.
:return: The date_delivered of this PartialShipment.
:rtype: str<|endoftext|> |
9e7b3ad81b7b00aa3d924eed61b620b3d5e9adbe4c2f51e1e87cdc2a630a9ec3 | @date_delivered.setter
def date_delivered(self, date_delivered):
'\n Sets the date_delivered of this PartialShipment.\n\n\n :param date_delivered: The date_delivered of this PartialShipment.\n :type: str\n '
self._date_delivered = date_delivered | Sets the date_delivered of this PartialShipment.
:param date_delivered: The date_delivered of this PartialShipment.
:type: str | OrderCloud/models/partial_shipment.py | date_delivered | klreeher/python-sdk | 0 | python | @date_delivered.setter
def date_delivered(self, date_delivered):
'\n Sets the date_delivered of this PartialShipment.\n\n\n :param date_delivered: The date_delivered of this PartialShipment.\n :type: str\n '
self._date_delivered = date_delivered | @date_delivered.setter
def date_delivered(self, date_delivered):
'\n Sets the date_delivered of this PartialShipment.\n\n\n :param date_delivered: The date_delivered of this PartialShipment.\n :type: str\n '
self._date_delivered = date_delivered<|docstring|>Sets the date_delivered of this PartialShipment.
:param date_delivered: The date_delivered of this PartialShipment.
:type: str<|endoftext|> |
394f6a91204437dbbd0a5170b33e5c49d99e70ef6f836848b23b01f50b28ad45 | @property
def tracking_number(self):
'\n Gets the tracking_number of this PartialShipment.\n\n\n :return: The tracking_number of this PartialShipment.\n :rtype: str\n '
return self._tracking_number | Gets the tracking_number of this PartialShipment.
:return: The tracking_number of this PartialShipment.
:rtype: str | OrderCloud/models/partial_shipment.py | tracking_number | klreeher/python-sdk | 0 | python | @property
def tracking_number(self):
'\n Gets the tracking_number of this PartialShipment.\n\n\n :return: The tracking_number of this PartialShipment.\n :rtype: str\n '
return self._tracking_number | @property
def tracking_number(self):
'\n Gets the tracking_number of this PartialShipment.\n\n\n :return: The tracking_number of this PartialShipment.\n :rtype: str\n '
return self._tracking_number<|docstring|>Gets the tracking_number of this PartialShipment.
:return: The tracking_number of this PartialShipment.
:rtype: str<|endoftext|> |
f91945603aaedd05e5dcc4c2c9300e8cbc922c59cc9d69deed857911d9011216 | @tracking_number.setter
def tracking_number(self, tracking_number):
'\n Sets the tracking_number of this PartialShipment.\n\n\n :param tracking_number: The tracking_number of this PartialShipment.\n :type: str\n '
self._tracking_number = tracking_number | Sets the tracking_number of this PartialShipment.
:param tracking_number: The tracking_number of this PartialShipment.
:type: str | OrderCloud/models/partial_shipment.py | tracking_number | klreeher/python-sdk | 0 | python | @tracking_number.setter
def tracking_number(self, tracking_number):
'\n Sets the tracking_number of this PartialShipment.\n\n\n :param tracking_number: The tracking_number of this PartialShipment.\n :type: str\n '
self._tracking_number = tracking_number | @tracking_number.setter
def tracking_number(self, tracking_number):
'\n Sets the tracking_number of this PartialShipment.\n\n\n :param tracking_number: The tracking_number of this PartialShipment.\n :type: str\n '
self._tracking_number = tracking_number<|docstring|>Sets the tracking_number of this PartialShipment.
:param tracking_number: The tracking_number of this PartialShipment.
:type: str<|endoftext|> |
f8c235e09cde053fb880f283c9a3624a81056408e13532a684ee660a4e40dfa6 | @property
def cost(self):
'\n Gets the cost of this PartialShipment.\n\n\n :return: The cost of this PartialShipment.\n :rtype: float\n '
return self._cost | Gets the cost of this PartialShipment.
:return: The cost of this PartialShipment.
:rtype: float | OrderCloud/models/partial_shipment.py | cost | klreeher/python-sdk | 0 | python | @property
def cost(self):
'\n Gets the cost of this PartialShipment.\n\n\n :return: The cost of this PartialShipment.\n :rtype: float\n '
return self._cost | @property
def cost(self):
'\n Gets the cost of this PartialShipment.\n\n\n :return: The cost of this PartialShipment.\n :rtype: float\n '
return self._cost<|docstring|>Gets the cost of this PartialShipment.
:return: The cost of this PartialShipment.
:rtype: float<|endoftext|> |
897b0a453fb7c8108258ba1e27fafcbf0a7da63525996e214da56c713f059399 | @cost.setter
def cost(self, cost):
'\n Sets the cost of this PartialShipment.\n\n\n :param cost: The cost of this PartialShipment.\n :type: float\n '
self._cost = cost | Sets the cost of this PartialShipment.
:param cost: The cost of this PartialShipment.
:type: float | OrderCloud/models/partial_shipment.py | cost | klreeher/python-sdk | 0 | python | @cost.setter
def cost(self, cost):
'\n Sets the cost of this PartialShipment.\n\n\n :param cost: The cost of this PartialShipment.\n :type: float\n '
self._cost = cost | @cost.setter
def cost(self, cost):
'\n Sets the cost of this PartialShipment.\n\n\n :param cost: The cost of this PartialShipment.\n :type: float\n '
self._cost = cost<|docstring|>Sets the cost of this PartialShipment.
:param cost: The cost of this PartialShipment.
:type: float<|endoftext|> |
034ca46abf7b299141eccfeb87827b024ce4ee03cbe9eaf4de214ca64e57460d | @property
def xp(self):
'\n Gets the xp of this PartialShipment.\n\n\n :return: The xp of this PartialShipment.\n :rtype: object\n '
return self._xp | Gets the xp of this PartialShipment.
:return: The xp of this PartialShipment.
:rtype: object | OrderCloud/models/partial_shipment.py | xp | klreeher/python-sdk | 0 | python | @property
def xp(self):
'\n Gets the xp of this PartialShipment.\n\n\n :return: The xp of this PartialShipment.\n :rtype: object\n '
return self._xp | @property
def xp(self):
'\n Gets the xp of this PartialShipment.\n\n\n :return: The xp of this PartialShipment.\n :rtype: object\n '
return self._xp<|docstring|>Gets the xp of this PartialShipment.
:return: The xp of this PartialShipment.
:rtype: object<|endoftext|> |
c4767d12fb0f3cd0376788100c825ddd02fd498bea26ef0aeb67aad432ce5cd2 | @xp.setter
def xp(self, xp):
'\n Sets the xp of this PartialShipment.\n\n\n :param xp: The xp of this PartialShipment.\n :type: object\n '
self._xp = xp | Sets the xp of this PartialShipment.
:param xp: The xp of this PartialShipment.
:type: object | OrderCloud/models/partial_shipment.py | xp | klreeher/python-sdk | 0 | python | @xp.setter
def xp(self, xp):
'\n Sets the xp of this PartialShipment.\n\n\n :param xp: The xp of this PartialShipment.\n :type: object\n '
self._xp = xp | @xp.setter
def xp(self, xp):
'\n Sets the xp of this PartialShipment.\n\n\n :param xp: The xp of this PartialShipment.\n :type: object\n '
self._xp = xp<|docstring|>Sets the xp of this PartialShipment.
:param xp: The xp of this PartialShipment.
:type: object<|endoftext|> |
7c8cce5345f73be2f65b8870a03763c0f880bb32ffd1e9691c4df1d35bfe2aa6 | @property
def account(self):
'\n Gets the account of this PartialShipment.\n\n\n :return: The account of this PartialShipment.\n :rtype: str\n '
return self._account | Gets the account of this PartialShipment.
:return: The account of this PartialShipment.
:rtype: str | OrderCloud/models/partial_shipment.py | account | klreeher/python-sdk | 0 | python | @property
def account(self):
'\n Gets the account of this PartialShipment.\n\n\n :return: The account of this PartialShipment.\n :rtype: str\n '
return self._account | @property
def account(self):
'\n Gets the account of this PartialShipment.\n\n\n :return: The account of this PartialShipment.\n :rtype: str\n '
return self._account<|docstring|>Gets the account of this PartialShipment.
:return: The account of this PartialShipment.
:rtype: str<|endoftext|> |
f4adc7d8e45611bf2a5ff3061c6124704139eb3c95498a1e4d25de52df17fdcf | @account.setter
def account(self, account):
'\n Sets the account of this PartialShipment.\n\n\n :param account: The account of this PartialShipment.\n :type: str\n '
self._account = account | Sets the account of this PartialShipment.
:param account: The account of this PartialShipment.
:type: str | OrderCloud/models/partial_shipment.py | account | klreeher/python-sdk | 0 | python | @account.setter
def account(self, account):
'\n Sets the account of this PartialShipment.\n\n\n :param account: The account of this PartialShipment.\n :type: str\n '
self._account = account | @account.setter
def account(self, account):
'\n Sets the account of this PartialShipment.\n\n\n :param account: The account of this PartialShipment.\n :type: str\n '
self._account = account<|docstring|>Sets the account of this PartialShipment.
:param account: The account of this PartialShipment.
:type: str<|endoftext|> |
2c23cc5feb8e49cb8a5e8ed0d4b42aaea96c1bee98994988b231b6d407325b43 | @property
def from_address_id(self):
'\n Gets the from_address_id of this PartialShipment.\n\n\n :return: The from_address_id of this PartialShipment.\n :rtype: str\n '
return self._from_address_id | Gets the from_address_id of this PartialShipment.
:return: The from_address_id of this PartialShipment.
:rtype: str | OrderCloud/models/partial_shipment.py | from_address_id | klreeher/python-sdk | 0 | python | @property
def from_address_id(self):
'\n Gets the from_address_id of this PartialShipment.\n\n\n :return: The from_address_id of this PartialShipment.\n :rtype: str\n '
return self._from_address_id | @property
def from_address_id(self):
'\n Gets the from_address_id of this PartialShipment.\n\n\n :return: The from_address_id of this PartialShipment.\n :rtype: str\n '
return self._from_address_id<|docstring|>Gets the from_address_id of this PartialShipment.
:return: The from_address_id of this PartialShipment.
:rtype: str<|endoftext|> |
078244c6a4933f4d1d6e15450944d987194a2f0189ce491f43f976fd74a693ca | @from_address_id.setter
def from_address_id(self, from_address_id):
'\n Sets the from_address_id of this PartialShipment.\n\n\n :param from_address_id: The from_address_id of this PartialShipment.\n :type: str\n '
self._from_address_id = from_address_id | Sets the from_address_id of this PartialShipment.
:param from_address_id: The from_address_id of this PartialShipment.
:type: str | OrderCloud/models/partial_shipment.py | from_address_id | klreeher/python-sdk | 0 | python | @from_address_id.setter
def from_address_id(self, from_address_id):
'\n Sets the from_address_id of this PartialShipment.\n\n\n :param from_address_id: The from_address_id of this PartialShipment.\n :type: str\n '
self._from_address_id = from_address_id | @from_address_id.setter
def from_address_id(self, from_address_id):
'\n Sets the from_address_id of this PartialShipment.\n\n\n :param from_address_id: The from_address_id of this PartialShipment.\n :type: str\n '
self._from_address_id = from_address_id<|docstring|>Sets the from_address_id of this PartialShipment.
:param from_address_id: The from_address_id of this PartialShipment.
:type: str<|endoftext|> |
7cd89d3c533ac48a0a057f0bbe418fe1ed444506fa18442caf48865032a043c9 | @property
def to_address_id(self):
'\n Gets the to_address_id of this PartialShipment.\n\n\n :return: The to_address_id of this PartialShipment.\n :rtype: str\n '
return self._to_address_id | Gets the to_address_id of this PartialShipment.
:return: The to_address_id of this PartialShipment.
:rtype: str | OrderCloud/models/partial_shipment.py | to_address_id | klreeher/python-sdk | 0 | python | @property
def to_address_id(self):
'\n Gets the to_address_id of this PartialShipment.\n\n\n :return: The to_address_id of this PartialShipment.\n :rtype: str\n '
return self._to_address_id | @property
def to_address_id(self):
'\n Gets the to_address_id of this PartialShipment.\n\n\n :return: The to_address_id of this PartialShipment.\n :rtype: str\n '
return self._to_address_id<|docstring|>Gets the to_address_id of this PartialShipment.
:return: The to_address_id of this PartialShipment.
:rtype: str<|endoftext|> |
46a298812973902dd56bb4a44760dbbb8ab4c0964dff63659d9b3fb1996ad0dc | @to_address_id.setter
def to_address_id(self, to_address_id):
'\n Sets the to_address_id of this PartialShipment.\n\n\n :param to_address_id: The to_address_id of this PartialShipment.\n :type: str\n '
self._to_address_id = to_address_id | Sets the to_address_id of this PartialShipment.
:param to_address_id: The to_address_id of this PartialShipment.
:type: str | OrderCloud/models/partial_shipment.py | to_address_id | klreeher/python-sdk | 0 | python | @to_address_id.setter
def to_address_id(self, to_address_id):
'\n Sets the to_address_id of this PartialShipment.\n\n\n :param to_address_id: The to_address_id of this PartialShipment.\n :type: str\n '
self._to_address_id = to_address_id | @to_address_id.setter
def to_address_id(self, to_address_id):
'\n Sets the to_address_id of this PartialShipment.\n\n\n :param to_address_id: The to_address_id of this PartialShipment.\n :type: str\n '
self._to_address_id = to_address_id<|docstring|>Sets the to_address_id of this PartialShipment.
:param to_address_id: The to_address_id of this PartialShipment.
:type: str<|endoftext|> |
746f34fcd9c640065b8e539fb75939974e338e14dea5876a5154a6ef8e440f4e | @property
def from_address(self):
'\n Gets the from_address of this PartialShipment.\n\n\n :return: The from_address of this PartialShipment.\n :rtype: Address\n '
return self._from_address | Gets the from_address of this PartialShipment.
:return: The from_address of this PartialShipment.
:rtype: Address | OrderCloud/models/partial_shipment.py | from_address | klreeher/python-sdk | 0 | python | @property
def from_address(self):
'\n Gets the from_address of this PartialShipment.\n\n\n :return: The from_address of this PartialShipment.\n :rtype: Address\n '
return self._from_address | @property
def from_address(self):
'\n Gets the from_address of this PartialShipment.\n\n\n :return: The from_address of this PartialShipment.\n :rtype: Address\n '
return self._from_address<|docstring|>Gets the from_address of this PartialShipment.
:return: The from_address of this PartialShipment.
:rtype: Address<|endoftext|> |
2c5f7e6e6bf1358770575b5a7df84adf6952e64f09eed10e0c3fd7e8adbaa4a8 | @from_address.setter
def from_address(self, from_address):
'\n Sets the from_address of this PartialShipment.\n\n\n :param from_address: The from_address of this PartialShipment.\n :type: Address\n '
self._from_address = from_address | Sets the from_address of this PartialShipment.
:param from_address: The from_address of this PartialShipment.
:type: Address | OrderCloud/models/partial_shipment.py | from_address | klreeher/python-sdk | 0 | python | @from_address.setter
def from_address(self, from_address):
'\n Sets the from_address of this PartialShipment.\n\n\n :param from_address: The from_address of this PartialShipment.\n :type: Address\n '
self._from_address = from_address | @from_address.setter
def from_address(self, from_address):
'\n Sets the from_address of this PartialShipment.\n\n\n :param from_address: The from_address of this PartialShipment.\n :type: Address\n '
self._from_address = from_address<|docstring|>Sets the from_address of this PartialShipment.
:param from_address: The from_address of this PartialShipment.
:type: Address<|endoftext|> |
9631d0553edea0cc5fe3ae095055ee649849324363ac11a13baccdb1af905963 | @property
def to_address(self):
'\n Gets the to_address of this PartialShipment.\n\n\n :return: The to_address of this PartialShipment.\n :rtype: Address\n '
return self._to_address | Gets the to_address of this PartialShipment.
:return: The to_address of this PartialShipment.
:rtype: Address | OrderCloud/models/partial_shipment.py | to_address | klreeher/python-sdk | 0 | python | @property
def to_address(self):
'\n Gets the to_address of this PartialShipment.\n\n\n :return: The to_address of this PartialShipment.\n :rtype: Address\n '
return self._to_address | @property
def to_address(self):
'\n Gets the to_address of this PartialShipment.\n\n\n :return: The to_address of this PartialShipment.\n :rtype: Address\n '
return self._to_address<|docstring|>Gets the to_address of this PartialShipment.
:return: The to_address of this PartialShipment.
:rtype: Address<|endoftext|> |
1faf2e541ac43184ebed57a8622b93ef24569d150a423561432a1b620b92e1bb | @to_address.setter
def to_address(self, to_address):
'\n Sets the to_address of this PartialShipment.\n\n\n :param to_address: The to_address of this PartialShipment.\n :type: Address\n '
self._to_address = to_address | Sets the to_address of this PartialShipment.
:param to_address: The to_address of this PartialShipment.
:type: Address | OrderCloud/models/partial_shipment.py | to_address | klreeher/python-sdk | 0 | python | @to_address.setter
def to_address(self, to_address):
'\n Sets the to_address of this PartialShipment.\n\n\n :param to_address: The to_address of this PartialShipment.\n :type: Address\n '
self._to_address = to_address | @to_address.setter
def to_address(self, to_address):
'\n Sets the to_address of this PartialShipment.\n\n\n :param to_address: The to_address of this PartialShipment.\n :type: Address\n '
self._to_address = to_address<|docstring|>Sets the to_address of this PartialShipment.
:param to_address: The to_address of this PartialShipment.
:type: Address<|endoftext|> |
f92515cd38effc7eee4069f2288d78a0f0836df932fb36a84e3b4f7e14233415 | def to_dict(self):
'\n Returns the model properties as a dict\n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result | Returns the model properties as a dict | OrderCloud/models/partial_shipment.py | to_dict | klreeher/python-sdk | 0 | python | def to_dict(self):
'\n \n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result | def to_dict(self):
'\n \n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result<|docstring|>Returns the model properties as a dict<|endoftext|> |
c373d87dd29c1e96dce460ab571bff86e58edb298ba83c85d8cc7603a6505de4 | def to_str(self):
'\n Returns the string representation of the model\n '
return pformat(self.to_dict()) | Returns the string representation of the model | OrderCloud/models/partial_shipment.py | to_str | klreeher/python-sdk | 0 | python | def to_str(self):
'\n \n '
return pformat(self.to_dict()) | def to_str(self):
'\n \n '
return pformat(self.to_dict())<|docstring|>Returns the string representation of the model<|endoftext|> |
1034ff7dd2eef24d21e3c2fa7409b793ab5cbb8cd75a2eb0ab3e62604b26264d | def __repr__(self):
'\n For `print` and `pprint`\n '
return self.to_str() | For `print` and `pprint` | OrderCloud/models/partial_shipment.py | __repr__ | klreeher/python-sdk | 0 | python | def __repr__(self):
'\n \n '
return self.to_str() | def __repr__(self):
'\n \n '
return self.to_str()<|docstring|>For `print` and `pprint`<|endoftext|> |
a43b3ce7478646f0122f200e4de04f4f5ed99329a4b75930eecef4ff54a23351 | def __eq__(self, other):
'\n Returns true if both objects are equal\n '
return (self.__dict__ == other.__dict__) | Returns true if both objects are equal | OrderCloud/models/partial_shipment.py | __eq__ | klreeher/python-sdk | 0 | python | def __eq__(self, other):
'\n \n '
return (self.__dict__ == other.__dict__) | def __eq__(self, other):
'\n \n '
return (self.__dict__ == other.__dict__)<|docstring|>Returns true if both objects are equal<|endoftext|> |
e5050f8e1402e3a4c90d6c6e229c4c9e2b8ec61e0be457915ea9d976f7e6b0b4 | def __ne__(self, other):
'\n Returns true if both objects are not equal\n '
return (not (self == other)) | Returns true if both objects are not equal | OrderCloud/models/partial_shipment.py | __ne__ | klreeher/python-sdk | 0 | python | def __ne__(self, other):
'\n \n '
return (not (self == other)) | def __ne__(self, other):
'\n \n '
return (not (self == other))<|docstring|>Returns true if both objects are not equal<|endoftext|> |
cc8fc0de2eab79b24b61b766650a2b2c0c6a1ddbaab0bf32abd9718affdc5d46 | @classmethod
def interpolate(cls, collect, defaultx=None, defaulty=None, defaultz=None, table_data=None, parent=None):
'\n Class method to create data interpolated onto a 3D model.\n \n The arguments are the same as __init__.\n '
self = cls(collect, parent=parent, defaultx=defaultx, defaulty=defaulty, defaultz=defaultz, table_data=table_data)
value = self.exec_()
if (value == QtWidgets.QDialog.Accepted):
self._apply() | Class method to create data interpolated onto a 3D model.
The arguments are the same as __init__. | glue/dialogs/interpolate_onto_3d/qt/interpolate_onto_3d.py | interpolate | gluesolutions/glue | 0 | python | @classmethod
def interpolate(cls, collect, defaultx=None, defaulty=None, defaultz=None, table_data=None, parent=None):
'\n Class method to create data interpolated onto a 3D model.\n \n The arguments are the same as __init__.\n '
self = cls(collect, parent=parent, defaultx=defaultx, defaulty=defaulty, defaultz=defaultz, table_data=table_data)
value = self.exec_()
if (value == QtWidgets.QDialog.Accepted):
self._apply() | @classmethod
def interpolate(cls, collect, defaultx=None, defaulty=None, defaultz=None, table_data=None, parent=None):
'\n Class method to create data interpolated onto a 3D model.\n \n The arguments are the same as __init__.\n '
self = cls(collect, parent=parent, defaultx=defaultx, defaulty=defaulty, defaultz=defaultz, table_data=table_data)
value = self.exec_()
if (value == QtWidgets.QDialog.Accepted):
self._apply()<|docstring|>Class method to create data interpolated onto a 3D model.
The arguments are the same as __init__.<|endoftext|> |
e2e672a8360f6ef8b8bafac26b17c050ef8052192952a14db1d04caf463769af | def compl_item(self, tokens, index):
'Completion for pattern item commands.\n\n Complement using the information defined in the inherited class.\n '
candidates = []
while (index < len(tokens)):
if (tokens[(index - 1)] == '/'):
candidates = []
break
elif (tokens[(index - 1)] in self.DATA_FIELDS):
candidates = self.MATCHING_PATTERN
elif (tokens[(index - 1)] in self.MATCHING_PATTERN):
if (tokens[(index - 1)] == 'prefix'):
candidates = ['Prefix']
else:
tmp_token = self.DATA_FIELDS_VALUES.get(tokens[(index - 2)])
if (tmp_token is not None):
candidates = [tmp_token]
else:
candidates = []
else:
candidates = copy.deepcopy(self.DATA_FIELDS)
candidates.append('/')
index += 1
return (candidates, index) | Completion for pattern item commands.
Complement using the information defined in the inherited class. | src/cli/commands/pri_flow_compl_pattern.py | compl_item | Hideyuki-Yamashita/change_component_name_git | 0 | python | def compl_item(self, tokens, index):
'Completion for pattern item commands.\n\n Complement using the information defined in the inherited class.\n '
candidates = []
while (index < len(tokens)):
if (tokens[(index - 1)] == '/'):
candidates = []
break
elif (tokens[(index - 1)] in self.DATA_FIELDS):
candidates = self.MATCHING_PATTERN
elif (tokens[(index - 1)] in self.MATCHING_PATTERN):
if (tokens[(index - 1)] == 'prefix'):
candidates = ['Prefix']
else:
tmp_token = self.DATA_FIELDS_VALUES.get(tokens[(index - 2)])
if (tmp_token is not None):
candidates = [tmp_token]
else:
candidates = []
else:
candidates = copy.deepcopy(self.DATA_FIELDS)
candidates.append('/')
index += 1
return (candidates, index) | def compl_item(self, tokens, index):
'Completion for pattern item commands.\n\n Complement using the information defined in the inherited class.\n '
candidates = []
while (index < len(tokens)):
if (tokens[(index - 1)] == '/'):
candidates = []
break
elif (tokens[(index - 1)] in self.DATA_FIELDS):
candidates = self.MATCHING_PATTERN
elif (tokens[(index - 1)] in self.MATCHING_PATTERN):
if (tokens[(index - 1)] == 'prefix'):
candidates = ['Prefix']
else:
tmp_token = self.DATA_FIELDS_VALUES.get(tokens[(index - 2)])
if (tmp_token is not None):
candidates = [tmp_token]
else:
candidates = []
else:
candidates = copy.deepcopy(self.DATA_FIELDS)
candidates.append('/')
index += 1
return (candidates, index)<|docstring|>Completion for pattern item commands.
Complement using the information defined in the inherited class.<|endoftext|> |
7104f13f7968c9097fb73834d4d0d447eaa29ed3249660e7fb0490c760f61bbc | @pytest.mark.parametrize('template', [template_simple, template_nested_while1, template_nested_while2, template_nested_if, template_function, template_other])
@pytest.mark.parametrize('keyword', ['break', 'raise Some', 'raise Some()', 'raise'])
def test_correct_while_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
'Testing while loops with correct code.'
tree = parse_ast_tree(template.format(keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, []) | Testing while loops with correct code. | tests/test_visitors/test_ast/test_loops/test_loops/test_infinite_while_loops.py | test_correct_while_loops | pawelarybak/wemake-python-styleguide | 1 | python | @pytest.mark.parametrize('template', [template_simple, template_nested_while1, template_nested_while2, template_nested_if, template_function, template_other])
@pytest.mark.parametrize('keyword', ['break', 'raise Some', 'raise Some()', 'raise'])
def test_correct_while_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
tree = parse_ast_tree(template.format(keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, []) | @pytest.mark.parametrize('template', [template_simple, template_nested_while1, template_nested_while2, template_nested_if, template_function, template_other])
@pytest.mark.parametrize('keyword', ['break', 'raise Some', 'raise Some()', 'raise'])
def test_correct_while_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
tree = parse_ast_tree(template.format(keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [])<|docstring|>Testing while loops with correct code.<|endoftext|> |
53cd361f90fc6ad1d74a91940cc76f7e81638db0c4e73ad12aacb96e6026bdf9 | @pytest.mark.parametrize('template', [template_function])
@pytest.mark.parametrize('keyword', ['return', 'return some'])
def test_correct_while_loops_function(assert_errors, parse_ast_tree, keyword, template, default_options, mode):
'Testing while loops with ``return`` statements.'
tree = parse_ast_tree(mode(template.format(keyword)))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, []) | Testing while loops with ``return`` statements. | tests/test_visitors/test_ast/test_loops/test_loops/test_infinite_while_loops.py | test_correct_while_loops_function | pawelarybak/wemake-python-styleguide | 1 | python | @pytest.mark.parametrize('template', [template_function])
@pytest.mark.parametrize('keyword', ['return', 'return some'])
def test_correct_while_loops_function(assert_errors, parse_ast_tree, keyword, template, default_options, mode):
tree = parse_ast_tree(mode(template.format(keyword)))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, []) | @pytest.mark.parametrize('template', [template_function])
@pytest.mark.parametrize('keyword', ['return', 'return some'])
def test_correct_while_loops_function(assert_errors, parse_ast_tree, keyword, template, default_options, mode):
tree = parse_ast_tree(mode(template.format(keyword)))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [])<|docstring|>Testing while loops with ``return`` statements.<|endoftext|> |
f1bc764b34ac95ae64907b73d48c28bc19ee873deaa93a1391144f5fa9beb3b4 | @pytest.mark.parametrize('template', [template_other])
@pytest.mark.parametrize('keyword', ['print(some)', 'attr.method()', 'a = 1'])
def test_other_while_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
'Testing other while loops with regular code.'
tree = parse_ast_tree(template.format(keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, []) | Testing other while loops with regular code. | tests/test_visitors/test_ast/test_loops/test_loops/test_infinite_while_loops.py | test_other_while_loops | pawelarybak/wemake-python-styleguide | 1 | python | @pytest.mark.parametrize('template', [template_other])
@pytest.mark.parametrize('keyword', ['print(some)', 'attr.method()', 'a = 1'])
def test_other_while_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
tree = parse_ast_tree(template.format(keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, []) | @pytest.mark.parametrize('template', [template_other])
@pytest.mark.parametrize('keyword', ['print(some)', 'attr.method()', 'a = 1'])
def test_other_while_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
tree = parse_ast_tree(template.format(keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [])<|docstring|>Testing other while loops with regular code.<|endoftext|> |
435fe7d9781f070a30f9ae40997bf3858afa8fe1e1c094d6e427ca914e6b5127 | @pytest.mark.parametrize('template', [template_simple, template_nested_while1, template_nested_while2, template_nested_if, template_function])
@pytest.mark.parametrize('keyword', ['print(some)', 'attr.method()', 'a = 1'])
def test_wrong_while_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
'Testing while loops with wrong code.'
tree = parse_ast_tree(template.format(keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [InfiniteWhileLoopViolation]) | Testing while loops with wrong code. | tests/test_visitors/test_ast/test_loops/test_loops/test_infinite_while_loops.py | test_wrong_while_loops | pawelarybak/wemake-python-styleguide | 1 | python | @pytest.mark.parametrize('template', [template_simple, template_nested_while1, template_nested_while2, template_nested_if, template_function])
@pytest.mark.parametrize('keyword', ['print(some)', 'attr.method()', 'a = 1'])
def test_wrong_while_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
tree = parse_ast_tree(template.format(keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [InfiniteWhileLoopViolation]) | @pytest.mark.parametrize('template', [template_simple, template_nested_while1, template_nested_while2, template_nested_if, template_function])
@pytest.mark.parametrize('keyword', ['print(some)', 'attr.method()', 'a = 1'])
def test_wrong_while_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
tree = parse_ast_tree(template.format(keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [InfiniteWhileLoopViolation])<|docstring|>Testing while loops with wrong code.<|endoftext|> |
910a88007ed572c8350833890832a76d80ec4d933731ea5a9ec550c177b1f2fd | @pytest.mark.parametrize('template', [template_double_while])
@pytest.mark.parametrize('keyword', ['break', 'raise ValueError'])
def test_double_while_correct_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
'Testing while loops with wrong code.'
tree = parse_ast_tree(template.format(keyword, keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, []) | Testing while loops with wrong code. | tests/test_visitors/test_ast/test_loops/test_loops/test_infinite_while_loops.py | test_double_while_correct_loops | pawelarybak/wemake-python-styleguide | 1 | python | @pytest.mark.parametrize('template', [template_double_while])
@pytest.mark.parametrize('keyword', ['break', 'raise ValueError'])
def test_double_while_correct_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
tree = parse_ast_tree(template.format(keyword, keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, []) | @pytest.mark.parametrize('template', [template_double_while])
@pytest.mark.parametrize('keyword', ['break', 'raise ValueError'])
def test_double_while_correct_loops(assert_errors, parse_ast_tree, keyword, template, default_options):
tree = parse_ast_tree(template.format(keyword, keyword))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [])<|docstring|>Testing while loops with wrong code.<|endoftext|> |
73bee27c8c6fdf358cfcae52e2873b103c30fb31105083572e0a43e721096a64 | @pytest.mark.parametrize('template', [template_double_while])
@pytest.mark.parametrize(('keyword1', 'keyword2'), [('print()', 'break'), ('break', 'other.attr = 1')])
def test_double_while_wrong_loops(assert_errors, parse_ast_tree, keyword1, keyword2, template, default_options):
'Testing while loops with wrong code.'
tree = parse_ast_tree(template.format(keyword1, keyword2))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [InfiniteWhileLoopViolation]) | Testing while loops with wrong code. | tests/test_visitors/test_ast/test_loops/test_loops/test_infinite_while_loops.py | test_double_while_wrong_loops | pawelarybak/wemake-python-styleguide | 1 | python | @pytest.mark.parametrize('template', [template_double_while])
@pytest.mark.parametrize(('keyword1', 'keyword2'), [('print()', 'break'), ('break', 'other.attr = 1')])
def test_double_while_wrong_loops(assert_errors, parse_ast_tree, keyword1, keyword2, template, default_options):
tree = parse_ast_tree(template.format(keyword1, keyword2))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [InfiniteWhileLoopViolation]) | @pytest.mark.parametrize('template', [template_double_while])
@pytest.mark.parametrize(('keyword1', 'keyword2'), [('print()', 'break'), ('break', 'other.attr = 1')])
def test_double_while_wrong_loops(assert_errors, parse_ast_tree, keyword1, keyword2, template, default_options):
tree = parse_ast_tree(template.format(keyword1, keyword2))
visitor = WrongLoopVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [InfiniteWhileLoopViolation])<|docstring|>Testing while loops with wrong code.<|endoftext|> |
199c83403d5d19a7ac3a0c804efd41722bd471f12bf7fd2a10bd73ae7f19ce23 | def finalize_options(self):
'Set options for the command line.'
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True | Set options for the command line. | setup.py | finalize_options | vklab/pytest-localstack | 0 | python | def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True | def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True<|docstring|>Set options for the command line.<|endoftext|> |
cfe873f6a32ee507b927c401523590d8b54a70f148f2853b4014951846cbc62b | def run_tests(self):
'Execute the test runner command.'
import pytest
sys.exit(pytest.main(self.test_args)) | Execute the test runner command. | setup.py | run_tests | vklab/pytest-localstack | 0 | python | def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args)) | def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))<|docstring|>Execute the test runner command.<|endoftext|> |
e8c363bf7ee73ed0c02fc74b39e773e846f2fe91e635168f14764b46ba0ecc64 | @property
def extra_state_attributes(self):
'Return the state attributes.'
attrs: dict[(str, Any)] = {}
attrs['ventilationRemainingTime'] = self._system.ventilationRemainingTime
attrs['ventilatingUntilTime'] = self._system.ventilatingUntilTime
attrs['diagVentilationRuntime'] = self._system.diagVentilationRuntime
return attrs | Return the state attributes. | custom_components/lennoxs30/switch.py | extra_state_attributes | hufman/lennoxs30 | 0 | python | @property
def extra_state_attributes(self):
attrs: dict[(str, Any)] = {}
attrs['ventilationRemainingTime'] = self._system.ventilationRemainingTime
attrs['ventilatingUntilTime'] = self._system.ventilatingUntilTime
attrs['diagVentilationRuntime'] = self._system.diagVentilationRuntime
return attrs | @property
def extra_state_attributes(self):
attrs: dict[(str, Any)] = {}
attrs['ventilationRemainingTime'] = self._system.ventilationRemainingTime
attrs['ventilatingUntilTime'] = self._system.ventilatingUntilTime
attrs['diagVentilationRuntime'] = self._system.diagVentilationRuntime
return attrs<|docstring|>Return the state attributes.<|endoftext|> |
559d224dcf80542c943006ddcdfb92093ced3a78063fee87f87d4db15299e161 | def update(self):
'Update data from the thermostat API.'
return True | Update data from the thermostat API. | custom_components/lennoxs30/switch.py | update | hufman/lennoxs30 | 0 | python | def update(self):
return True | def update(self):
return True<|docstring|>Update data from the thermostat API.<|endoftext|> |
53669033a44cc2b7f0c0eb1c203b1e7a7c81e72e96769d5c38bc62208b72137f | @property
def should_poll(self):
'No polling needed.'
return False | No polling needed. | custom_components/lennoxs30/switch.py | should_poll | hufman/lennoxs30 | 0 | python | @property
def should_poll(self):
return False | @property
def should_poll(self):
return False<|docstring|>No polling needed.<|endoftext|> |
522c5920f27cb57463dbb54dd1db14467160c2954e6da5a9323d1e934763bb45 | @property
def extra_state_attributes(self):
'Return the state attributes.'
return {} | Return the state attributes. | custom_components/lennoxs30/switch.py | extra_state_attributes | hufman/lennoxs30 | 0 | python | @property
def extra_state_attributes(self):
return {} | @property
def extra_state_attributes(self):
return {}<|docstring|>Return the state attributes.<|endoftext|> |
559d224dcf80542c943006ddcdfb92093ced3a78063fee87f87d4db15299e161 | def update(self):
'Update data from the thermostat API.'
return True | Update data from the thermostat API. | custom_components/lennoxs30/switch.py | update | hufman/lennoxs30 | 0 | python | def update(self):
return True | def update(self):
return True<|docstring|>Update data from the thermostat API.<|endoftext|> |
53669033a44cc2b7f0c0eb1c203b1e7a7c81e72e96769d5c38bc62208b72137f | @property
def should_poll(self):
'No polling needed.'
return False | No polling needed. | custom_components/lennoxs30/switch.py | should_poll | hufman/lennoxs30 | 0 | python | @property
def should_poll(self):
return False | @property
def should_poll(self):
return False<|docstring|>No polling needed.<|endoftext|> |
ef02a2bf00b8356747d0c648a7402f9323b9e7bf5133c6b6911efb6ceeaa1629 | def follow_user(username):
'Adds the current user as follower of the given user.'
if (not g.user):
abort(401)
whom_id = functions.get_user_id(username)
if (whom_id is None):
abort(404)
db = functions.get_db()
db.execute('insert into follower (who_id, whom_id) values (?, ?)', [session['user_id'], whom_id])
db.commit()
flash(('You are now following "%s"' % username))
return redirect(functions.url_for('/%(username)s', {'username': username})) | Adds the current user as follower of the given user. | controllers/follow.py | follow_user | limciana/updevcamp-2015a-set1b | 0 | python | def follow_user(username):
if (not g.user):
abort(401)
whom_id = functions.get_user_id(username)
if (whom_id is None):
abort(404)
db = functions.get_db()
db.execute('insert into follower (who_id, whom_id) values (?, ?)', [session['user_id'], whom_id])
db.commit()
flash(('You are now following "%s"' % username))
return redirect(functions.url_for('/%(username)s', {'username': username})) | def follow_user(username):
if (not g.user):
abort(401)
whom_id = functions.get_user_id(username)
if (whom_id is None):
abort(404)
db = functions.get_db()
db.execute('insert into follower (who_id, whom_id) values (?, ?)', [session['user_id'], whom_id])
db.commit()
flash(('You are now following "%s"' % username))
return redirect(functions.url_for('/%(username)s', {'username': username}))<|docstring|>Adds the current user as follower of the given user.<|endoftext|> |
97f836c19dbe988cb0ef180ebd9ef366c2d99eb3b63296c94bd74119ee41a4a6 | def unfollow_user(username):
'Removes the current user as follower of the given user.'
if (not g.user):
abort(401)
whom_id = functions.get_user_id(username)
if (whom_id is None):
abort(404)
db = functions.get_db()
db.execute('delete from follower where who_id=? and whom_id=?', [session['user_id'], whom_id])
db.commit()
flash(('You are no longer following "%s"' % username))
return redirect(functions.url_for('/%(username)s', {'username': username})) | Removes the current user as follower of the given user. | controllers/follow.py | unfollow_user | limciana/updevcamp-2015a-set1b | 0 | python | def unfollow_user(username):
if (not g.user):
abort(401)
whom_id = functions.get_user_id(username)
if (whom_id is None):
abort(404)
db = functions.get_db()
db.execute('delete from follower where who_id=? and whom_id=?', [session['user_id'], whom_id])
db.commit()
flash(('You are no longer following "%s"' % username))
return redirect(functions.url_for('/%(username)s', {'username': username})) | def unfollow_user(username):
if (not g.user):
abort(401)
whom_id = functions.get_user_id(username)
if (whom_id is None):
abort(404)
db = functions.get_db()
db.execute('delete from follower where who_id=? and whom_id=?', [session['user_id'], whom_id])
db.commit()
flash(('You are no longer following "%s"' % username))
return redirect(functions.url_for('/%(username)s', {'username': username}))<|docstring|>Removes the current user as follower of the given user.<|endoftext|> |
e12efa8949de8b848da3855a7d53707d7a034d0e6897383affecdae7d9b79721 | def calCovariance(self, x, y):
'\n calculat the covariance of two matrices\n x: left matrix\n y: right matrix\n \n if the input for x and y are both 1-D vectors, they will be reshaped to (len(vector),1)\n '
if isinstance(x, np.ndarray):
x = self.cp.asarray(x)
if isinstance(y, np.ndarray):
y = self.cp.asarray(y)
if self.DEBUG:
mempool = self.memPool
pinned_mempool = self.cp.get_default_pinned_memory_pool()
print(mempool.get_limit())
print(mempool.used_bytes())
print(mempool.total_bytes())
print(pinned_mempool.n_free_blocks())
temp = self.cp.matmul(x.T, y)
self.cp.cuda.Stream.null.synchronize()
if (self.cp.cuda.runtime.getDeviceCount() == 1):
out = self.cp.asnumpy(temp)
else:
with self.cp.cuda.Device(1):
out = self.cp.array(temp)
del x
del y
del temp
self.memPool.free_all_blocks()
return out | calculat the covariance of two matrices
x: left matrix
y: right matrix
if the input for x and y are both 1-D vectors, they will be reshaped to (len(vector),1) | mTRFpy/CudaCore.py | calCovariance | powerfulbean/mTRFpy | 0 | python | def calCovariance(self, x, y):
'\n calculat the covariance of two matrices\n x: left matrix\n y: right matrix\n \n if the input for x and y are both 1-D vectors, they will be reshaped to (len(vector),1)\n '
if isinstance(x, np.ndarray):
x = self.cp.asarray(x)
if isinstance(y, np.ndarray):
y = self.cp.asarray(y)
if self.DEBUG:
mempool = self.memPool
pinned_mempool = self.cp.get_default_pinned_memory_pool()
print(mempool.get_limit())
print(mempool.used_bytes())
print(mempool.total_bytes())
print(pinned_mempool.n_free_blocks())
temp = self.cp.matmul(x.T, y)
self.cp.cuda.Stream.null.synchronize()
if (self.cp.cuda.runtime.getDeviceCount() == 1):
out = self.cp.asnumpy(temp)
else:
with self.cp.cuda.Device(1):
out = self.cp.array(temp)
del x
del y
del temp
self.memPool.free_all_blocks()
return out | def calCovariance(self, x, y):
'\n calculat the covariance of two matrices\n x: left matrix\n y: right matrix\n \n if the input for x and y are both 1-D vectors, they will be reshaped to (len(vector),1)\n '
if isinstance(x, np.ndarray):
x = self.cp.asarray(x)
if isinstance(y, np.ndarray):
y = self.cp.asarray(y)
if self.DEBUG:
mempool = self.memPool
pinned_mempool = self.cp.get_default_pinned_memory_pool()
print(mempool.get_limit())
print(mempool.used_bytes())
print(mempool.total_bytes())
print(pinned_mempool.n_free_blocks())
temp = self.cp.matmul(x.T, y)
self.cp.cuda.Stream.null.synchronize()
if (self.cp.cuda.runtime.getDeviceCount() == 1):
out = self.cp.asnumpy(temp)
else:
with self.cp.cuda.Device(1):
out = self.cp.array(temp)
del x
del y
del temp
self.memPool.free_all_blocks()
return out<|docstring|>calculat the covariance of two matrices
x: left matrix
y: right matrix
if the input for x and y are both 1-D vectors, they will be reshaped to (len(vector),1)<|endoftext|> |
0545f2368c11409d250ac4b274250561b725a04be365ba899846d0348d64d0ee | def calSelfCovariance(self, x):
'\n calculat the covariance of two matrices\n x: left matrix\n y: right matrix\n \n if the input for x and y are both 1-D vectors, they will be reshaped to (len(vector),1)\n '
if isinstance(x, np.ndarray):
x = self.cp.asarray(x)
if self.DEBUG:
mempool = self.memPool
pinned_mempool = self.cp.get_default_pinned_memory_pool()
print(mempool.get_limit())
print(mempool.used_bytes())
print(mempool.total_bytes())
print(pinned_mempool.n_free_blocks())
temp = self.cp.matmul(x.T, x)
self.cp.cuda.Stream.null.synchronize()
out = self.cp.asnumpy(temp)
del x
del temp
self.memPool.free_all_blocks()
return out | calculat the covariance of two matrices
x: left matrix
y: right matrix
if the input for x and y are both 1-D vectors, they will be reshaped to (len(vector),1) | mTRFpy/CudaCore.py | calSelfCovariance | powerfulbean/mTRFpy | 0 | python | def calSelfCovariance(self, x):
'\n calculat the covariance of two matrices\n x: left matrix\n y: right matrix\n \n if the input for x and y are both 1-D vectors, they will be reshaped to (len(vector),1)\n '
if isinstance(x, np.ndarray):
x = self.cp.asarray(x)
if self.DEBUG:
mempool = self.memPool
pinned_mempool = self.cp.get_default_pinned_memory_pool()
print(mempool.get_limit())
print(mempool.used_bytes())
print(mempool.total_bytes())
print(pinned_mempool.n_free_blocks())
temp = self.cp.matmul(x.T, x)
self.cp.cuda.Stream.null.synchronize()
out = self.cp.asnumpy(temp)
del x
del temp
self.memPool.free_all_blocks()
return out | def calSelfCovariance(self, x):
'\n calculat the covariance of two matrices\n x: left matrix\n y: right matrix\n \n if the input for x and y are both 1-D vectors, they will be reshaped to (len(vector),1)\n '
if isinstance(x, np.ndarray):
x = self.cp.asarray(x)
if self.DEBUG:
mempool = self.memPool
pinned_mempool = self.cp.get_default_pinned_memory_pool()
print(mempool.get_limit())
print(mempool.used_bytes())
print(mempool.total_bytes())
print(pinned_mempool.n_free_blocks())
temp = self.cp.matmul(x.T, x)
self.cp.cuda.Stream.null.synchronize()
out = self.cp.asnumpy(temp)
del x
del temp
self.memPool.free_all_blocks()
return out<|docstring|>calculat the covariance of two matrices
x: left matrix
y: right matrix
if the input for x and y are both 1-D vectors, they will be reshaped to (len(vector),1)<|endoftext|> |
c453a0e42f1fa0576bf905f8b84de1dc53c96e5fd27d8ef7f298336d50e1a31c | def construct_channel(self, *args, **kwargs):
'\n Creates ChannelNode and build topic tree\n Args:\n - args: arguments passed in during upload_channel (currently None)\n - kwargs: extra argumens and options not handled by `uploadchannel`.\n For example, add the command line option lang="fr" and the string\n "fr" will be passed along to `construct_channel` as kwargs[\'lang\'].\n Returns: ChannelNode\n '
channel = self.get_channel(*args, **kwargs)
raise NotImplementedError('constuct_channel method not implemented yet...')
raise_for_invalid_channel(channel)
return channel | Creates ChannelNode and build topic tree
Args:
- args: arguments passed in during upload_channel (currently None)
- kwargs: extra argumens and options not handled by `uploadchannel`.
For example, add the command line option lang="fr" and the string
"fr" will be passed along to `construct_channel` as kwargs['lang'].
Returns: ChannelNode | sushichef.py | construct_channel | learningequality/sushi-chef-proyecto-biosfera | 0 | python | def construct_channel(self, *args, **kwargs):
'\n Creates ChannelNode and build topic tree\n Args:\n - args: arguments passed in during upload_channel (currently None)\n - kwargs: extra argumens and options not handled by `uploadchannel`.\n For example, add the command line option lang="fr" and the string\n "fr" will be passed along to `construct_channel` as kwargs[\'lang\'].\n Returns: ChannelNode\n '
channel = self.get_channel(*args, **kwargs)
raise NotImplementedError('constuct_channel method not implemented yet...')
raise_for_invalid_channel(channel)
return channel | def construct_channel(self, *args, **kwargs):
'\n Creates ChannelNode and build topic tree\n Args:\n - args: arguments passed in during upload_channel (currently None)\n - kwargs: extra argumens and options not handled by `uploadchannel`.\n For example, add the command line option lang="fr" and the string\n "fr" will be passed along to `construct_channel` as kwargs[\'lang\'].\n Returns: ChannelNode\n '
channel = self.get_channel(*args, **kwargs)
raise NotImplementedError('constuct_channel method not implemented yet...')
raise_for_invalid_channel(channel)
return channel<|docstring|>Creates ChannelNode and build topic tree
Args:
- args: arguments passed in during upload_channel (currently None)
- kwargs: extra argumens and options not handled by `uploadchannel`.
For example, add the command line option lang="fr" and the string
"fr" will be passed along to `construct_channel` as kwargs['lang'].
Returns: ChannelNode<|endoftext|> |
6020fa7bba748a2dbc68a20972980e5293c4949951f7666d1037a4a741e26cdc | def _benchmark(self, gradient_type, num_gpus, mode, loss_scaling):
'Benchmarks loss scaling.\n\n We run a simple model with several scalar variables. The loss is the sum of\n all variables. The model is simple because we want to measure only the\n performance of loss scaling, not the performance of the model itself.\n\n Args:\n gradient_type: "optimizer" or "gradient_tape". How gradients are computed.\n "optimizer" uses Optimizer.minimize. "gradient_tape" uses\n GradientTape.gradient.\n num_gpus: The number of GPUs to use. Must be at least 1.\n mode: "eager", "tf_function", or "graph". "eager" means to use eager mode.\n "tf_function" means to use eager mode where all computations are wrapped\n in a tf.function. "graph" means to use TensorFlow 1\'s graph mode with a\n tf.compat.v1.Session. "graph" is unsupported with a\n LossScaleGradientTape.\n loss_scaling: "fixed", "dynamic", or None. The type of loss scaling to\n use. None means use no loss scaling, which is useful as a baseline to\n see how much slower loss scaling is in comparison.\n '
if (mode == 'graph'):
graph = ops.Graph()
ctx_mgr = graph.as_default()
elif (mode == 'eager'):
ctx_mgr = context.eager_mode()
else:
assert (mode == 'tf_function')
ctx_mgr = context.eager_mode()
ls_str = (loss_scaling or 'no_loss_scaling')
name = ('%s_%d_GPU_%s_%s' % (gradient_type, num_gpus, mode, ls_str))
with ctx_mgr, _get_strategy(num_gpus).scope() as strategy:
opt = adam.Adam()
if (loss_scaling == 'fixed'):
loss_scale = loss_scale_module.FixedLossScale(2.0)
elif (loss_scaling == 'dynamic'):
increment_period = 1000000
loss_scale = loss_scale_module.DynamicLossScale(initial_loss_scale=2.0, increment_period=increment_period)
else:
assert (loss_scaling is None)
loss_scale = None
num_vars = 200
num_warmup_iters = 1
num_iters = 20
var_list = [variables.Variable(i, dtype='float32') for i in range(num_vars)]
def get_loss():
return math_ops.add_n(var_list)
if (gradient_type == 'gradient_tape'):
tape_cls = ((lambda : lsgt_module.LossScaleGradientTape(loss_scale)) if loss_scale else backprop.GradientTape)
def minimize_fn():
with tape_cls() as tape:
loss = get_loss()
grads = tape.gradient(loss, var_list)
return opt.apply_gradients(zip(grads, var_list))
else:
assert (gradient_type == 'optimizer')
if loss_scale:
opt = loss_scale_optimizer.LossScaleOptimizer(opt, loss_scale)
def minimize_fn():
return opt.minimize(get_loss, var_list)
if (mode == 'graph'):
run_op = strategy.run(minimize_fn)
init_op = variables.global_variables_initializer()
with session_module.Session() as sess:
sess.run(init_op)
self.run_op_benchmark(sess, run_op, min_iters=num_iters, burn_iters=num_warmup_iters, name=name)
return
def run_fn():
strategy.run(minimize_fn)
if (mode == 'tf_function'):
run_fn = def_function.function(run_fn)
for _ in range(num_warmup_iters):
run_fn()
start = time.time()
for _ in range(num_iters):
run_fn()
end = time.time()
self.report_benchmark(iters=num_iters, wall_time=((end - start) / num_iters), name=name) | Benchmarks loss scaling.
We run a simple model with several scalar variables. The loss is the sum of
all variables. The model is simple because we want to measure only the
performance of loss scaling, not the performance of the model itself.
Args:
gradient_type: "optimizer" or "gradient_tape". How gradients are computed.
"optimizer" uses Optimizer.minimize. "gradient_tape" uses
GradientTape.gradient.
num_gpus: The number of GPUs to use. Must be at least 1.
mode: "eager", "tf_function", or "graph". "eager" means to use eager mode.
"tf_function" means to use eager mode where all computations are wrapped
in a tf.function. "graph" means to use TensorFlow 1's graph mode with a
tf.compat.v1.Session. "graph" is unsupported with a
LossScaleGradientTape.
loss_scaling: "fixed", "dynamic", or None. The type of loss scaling to
use. None means use no loss scaling, which is useful as a baseline to
see how much slower loss scaling is in comparison. | tensorflow/python/keras/mixed_precision/experimental/loss_scale_benchmark.py | _benchmark | ErikGro/tensorflow | 78 | python | def _benchmark(self, gradient_type, num_gpus, mode, loss_scaling):
'Benchmarks loss scaling.\n\n We run a simple model with several scalar variables. The loss is the sum of\n all variables. The model is simple because we want to measure only the\n performance of loss scaling, not the performance of the model itself.\n\n Args:\n gradient_type: "optimizer" or "gradient_tape". How gradients are computed.\n "optimizer" uses Optimizer.minimize. "gradient_tape" uses\n GradientTape.gradient.\n num_gpus: The number of GPUs to use. Must be at least 1.\n mode: "eager", "tf_function", or "graph". "eager" means to use eager mode.\n "tf_function" means to use eager mode where all computations are wrapped\n in a tf.function. "graph" means to use TensorFlow 1\'s graph mode with a\n tf.compat.v1.Session. "graph" is unsupported with a\n LossScaleGradientTape.\n loss_scaling: "fixed", "dynamic", or None. The type of loss scaling to\n use. None means use no loss scaling, which is useful as a baseline to\n see how much slower loss scaling is in comparison.\n '
if (mode == 'graph'):
graph = ops.Graph()
ctx_mgr = graph.as_default()
elif (mode == 'eager'):
ctx_mgr = context.eager_mode()
else:
assert (mode == 'tf_function')
ctx_mgr = context.eager_mode()
ls_str = (loss_scaling or 'no_loss_scaling')
name = ('%s_%d_GPU_%s_%s' % (gradient_type, num_gpus, mode, ls_str))
with ctx_mgr, _get_strategy(num_gpus).scope() as strategy:
opt = adam.Adam()
if (loss_scaling == 'fixed'):
loss_scale = loss_scale_module.FixedLossScale(2.0)
elif (loss_scaling == 'dynamic'):
increment_period = 1000000
loss_scale = loss_scale_module.DynamicLossScale(initial_loss_scale=2.0, increment_period=increment_period)
else:
assert (loss_scaling is None)
loss_scale = None
num_vars = 200
num_warmup_iters = 1
num_iters = 20
var_list = [variables.Variable(i, dtype='float32') for i in range(num_vars)]
def get_loss():
return math_ops.add_n(var_list)
if (gradient_type == 'gradient_tape'):
tape_cls = ((lambda : lsgt_module.LossScaleGradientTape(loss_scale)) if loss_scale else backprop.GradientTape)
def minimize_fn():
with tape_cls() as tape:
loss = get_loss()
grads = tape.gradient(loss, var_list)
return opt.apply_gradients(zip(grads, var_list))
else:
assert (gradient_type == 'optimizer')
if loss_scale:
opt = loss_scale_optimizer.LossScaleOptimizer(opt, loss_scale)
def minimize_fn():
return opt.minimize(get_loss, var_list)
if (mode == 'graph'):
run_op = strategy.run(minimize_fn)
init_op = variables.global_variables_initializer()
with session_module.Session() as sess:
sess.run(init_op)
self.run_op_benchmark(sess, run_op, min_iters=num_iters, burn_iters=num_warmup_iters, name=name)
return
def run_fn():
strategy.run(minimize_fn)
if (mode == 'tf_function'):
run_fn = def_function.function(run_fn)
for _ in range(num_warmup_iters):
run_fn()
start = time.time()
for _ in range(num_iters):
run_fn()
end = time.time()
self.report_benchmark(iters=num_iters, wall_time=((end - start) / num_iters), name=name) | def _benchmark(self, gradient_type, num_gpus, mode, loss_scaling):
'Benchmarks loss scaling.\n\n We run a simple model with several scalar variables. The loss is the sum of\n all variables. The model is simple because we want to measure only the\n performance of loss scaling, not the performance of the model itself.\n\n Args:\n gradient_type: "optimizer" or "gradient_tape". How gradients are computed.\n "optimizer" uses Optimizer.minimize. "gradient_tape" uses\n GradientTape.gradient.\n num_gpus: The number of GPUs to use. Must be at least 1.\n mode: "eager", "tf_function", or "graph". "eager" means to use eager mode.\n "tf_function" means to use eager mode where all computations are wrapped\n in a tf.function. "graph" means to use TensorFlow 1\'s graph mode with a\n tf.compat.v1.Session. "graph" is unsupported with a\n LossScaleGradientTape.\n loss_scaling: "fixed", "dynamic", or None. The type of loss scaling to\n use. None means use no loss scaling, which is useful as a baseline to\n see how much slower loss scaling is in comparison.\n '
if (mode == 'graph'):
graph = ops.Graph()
ctx_mgr = graph.as_default()
elif (mode == 'eager'):
ctx_mgr = context.eager_mode()
else:
assert (mode == 'tf_function')
ctx_mgr = context.eager_mode()
ls_str = (loss_scaling or 'no_loss_scaling')
name = ('%s_%d_GPU_%s_%s' % (gradient_type, num_gpus, mode, ls_str))
with ctx_mgr, _get_strategy(num_gpus).scope() as strategy:
opt = adam.Adam()
if (loss_scaling == 'fixed'):
loss_scale = loss_scale_module.FixedLossScale(2.0)
elif (loss_scaling == 'dynamic'):
increment_period = 1000000
loss_scale = loss_scale_module.DynamicLossScale(initial_loss_scale=2.0, increment_period=increment_period)
else:
assert (loss_scaling is None)
loss_scale = None
num_vars = 200
num_warmup_iters = 1
num_iters = 20
var_list = [variables.Variable(i, dtype='float32') for i in range(num_vars)]
def get_loss():
return math_ops.add_n(var_list)
if (gradient_type == 'gradient_tape'):
tape_cls = ((lambda : lsgt_module.LossScaleGradientTape(loss_scale)) if loss_scale else backprop.GradientTape)
def minimize_fn():
with tape_cls() as tape:
loss = get_loss()
grads = tape.gradient(loss, var_list)
return opt.apply_gradients(zip(grads, var_list))
else:
assert (gradient_type == 'optimizer')
if loss_scale:
opt = loss_scale_optimizer.LossScaleOptimizer(opt, loss_scale)
def minimize_fn():
return opt.minimize(get_loss, var_list)
if (mode == 'graph'):
run_op = strategy.run(minimize_fn)
init_op = variables.global_variables_initializer()
with session_module.Session() as sess:
sess.run(init_op)
self.run_op_benchmark(sess, run_op, min_iters=num_iters, burn_iters=num_warmup_iters, name=name)
return
def run_fn():
strategy.run(minimize_fn)
if (mode == 'tf_function'):
run_fn = def_function.function(run_fn)
for _ in range(num_warmup_iters):
run_fn()
start = time.time()
for _ in range(num_iters):
run_fn()
end = time.time()
self.report_benchmark(iters=num_iters, wall_time=((end - start) / num_iters), name=name)<|docstring|>Benchmarks loss scaling.
We run a simple model with several scalar variables. The loss is the sum of
all variables. The model is simple because we want to measure only the
performance of loss scaling, not the performance of the model itself.
Args:
gradient_type: "optimizer" or "gradient_tape". How gradients are computed.
"optimizer" uses Optimizer.minimize. "gradient_tape" uses
GradientTape.gradient.
num_gpus: The number of GPUs to use. Must be at least 1.
mode: "eager", "tf_function", or "graph". "eager" means to use eager mode.
"tf_function" means to use eager mode where all computations are wrapped
in a tf.function. "graph" means to use TensorFlow 1's graph mode with a
tf.compat.v1.Session. "graph" is unsupported with a
LossScaleGradientTape.
loss_scaling: "fixed", "dynamic", or None. The type of loss scaling to
use. None means use no loss scaling, which is useful as a baseline to
see how much slower loss scaling is in comparison.<|endoftext|> |
9db744615ff4c7f1f43960f58a0517751eeb9feebb531d64beca2cc57a6834e4 | def CheckCommonality(lines, col):
"\n This function will check for the commonality of 1's vs 0's for a list\n of lines (given as lines) for a specific column position (given as col)\n and return which is greater or if they're equal.\n\n Parameters\n ----------\n lines : list\n list of strings of binary numbers.\n col : int\n column position.\n\n Returns\n -------\n '1', '0', or '='.\n\n "
(rows, cols) = (len(lines), len(lines[0]))
storage_array = np.zeros((rows, cols))
for (i, line) in enumerate(lines):
for (j, bit) in enumerate(line):
bit = int(bit)
storage_array[(i, j)] = bit
count = Counter(storage_array[(:, col)])
zero_count = count[0.0]
one_count = count[1.0]
if (zero_count > one_count):
return '0'
elif (one_count > zero_count):
return '1'
else:
return '=' | This function will check for the commonality of 1's vs 0's for a list
of lines (given as lines) for a specific column position (given as col)
and return which is greater or if they're equal.
Parameters
----------
lines : list
list of strings of binary numbers.
col : int
column position.
Returns
-------
'1', '0', or '='. | 2021/day3.py | CheckCommonality | wapisani/AoC | 0 | python | def CheckCommonality(lines, col):
"\n This function will check for the commonality of 1's vs 0's for a list\n of lines (given as lines) for a specific column position (given as col)\n and return which is greater or if they're equal.\n\n Parameters\n ----------\n lines : list\n list of strings of binary numbers.\n col : int\n column position.\n\n Returns\n -------\n '1', '0', or '='.\n\n "
(rows, cols) = (len(lines), len(lines[0]))
storage_array = np.zeros((rows, cols))
for (i, line) in enumerate(lines):
for (j, bit) in enumerate(line):
bit = int(bit)
storage_array[(i, j)] = bit
count = Counter(storage_array[(:, col)])
zero_count = count[0.0]
one_count = count[1.0]
if (zero_count > one_count):
return '0'
elif (one_count > zero_count):
return '1'
else:
return '=' | def CheckCommonality(lines, col):
"\n This function will check for the commonality of 1's vs 0's for a list\n of lines (given as lines) for a specific column position (given as col)\n and return which is greater or if they're equal.\n\n Parameters\n ----------\n lines : list\n list of strings of binary numbers.\n col : int\n column position.\n\n Returns\n -------\n '1', '0', or '='.\n\n "
(rows, cols) = (len(lines), len(lines[0]))
storage_array = np.zeros((rows, cols))
for (i, line) in enumerate(lines):
for (j, bit) in enumerate(line):
bit = int(bit)
storage_array[(i, j)] = bit
count = Counter(storage_array[(:, col)])
zero_count = count[0.0]
one_count = count[1.0]
if (zero_count > one_count):
return '0'
elif (one_count > zero_count):
return '1'
else:
return '='<|docstring|>This function will check for the commonality of 1's vs 0's for a list
of lines (given as lines) for a specific column position (given as col)
and return which is greater or if they're equal.
Parameters
----------
lines : list
list of strings of binary numbers.
col : int
column position.
Returns
-------
'1', '0', or '='.<|endoftext|> |
b220288e199e26ebb2105ce4e427ae25184436a3f4cb75e0bb979a0e8e98ed4e | def validated(base_model=None):
'\n Decorates a constructor with typed arguments with validation logic which\n delegates to a Pydantic model. If `base_model` is not provided, an implicit\n model is synthesized. If `base_model` is provided, its fields and types\n should be consistent with the constructor arguments and the model should\n extend `ConfigBase`.\n '
def validator(ctor):
ctor_clsnme = dict(inspect.getmembers(ctor))['__qualname__'].split('.')[0]
ctor_params = inspect.signature(ctor).parameters
ctor_fields = {param.name: ((param.annotation if (param.annotation != inspect.Parameter.empty) else Any), (param.default if (param.default != inspect.Parameter.empty) else ...)) for param in ctor_params.values() if ((param.name != 'self') and (param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD))}
if (base_model is None):
CtorModel = create_model(f'{ctor_clsnme}Model', __config__=ConfigBase.Config, **ctor_fields)
else:
CtorModel = create_model(f'{ctor_clsnme}Model', __base__=base_model, **ctor_fields)
@functools.wraps(ctor)
def ctor_wrapper(*args, **kwargs):
(self, *args) = args
nmargs = {name: arg for ((name, param), arg) in zip(list(ctor_params.items()), ([self] + args)) if (name != 'self')}
model = CtorModel(**{**nmargs, **kwargs})
all_args = {**nmargs, **kwargs, **model.__values__}
if (not getattr(self, '__init_args__', {})):
self.__init_args__ = OrderedDict({name: arg for (name, arg) in sorted(all_args.items()) if (type(arg) != mx.gluon.ParameterDict)})
self.__class__.__getnewargs_ex__ = validated_getnewargs_ex
self.__class__.__repr__ = validated_repr
return ctor(self, **all_args)
setattr(ctor_wrapper, 'Model', CtorModel)
return ctor_wrapper
return validator | Decorates a constructor with typed arguments with validation logic which
delegates to a Pydantic model. If `base_model` is not provided, an implicit
model is synthesized. If `base_model` is provided, its fields and types
should be consistent with the constructor arguments and the model should
extend `ConfigBase`. | gluonts/core/component.py | validated | lostella/gluon-ts | 0 | python | def validated(base_model=None):
'\n Decorates a constructor with typed arguments with validation logic which\n delegates to a Pydantic model. If `base_model` is not provided, an implicit\n model is synthesized. If `base_model` is provided, its fields and types\n should be consistent with the constructor arguments and the model should\n extend `ConfigBase`.\n '
def validator(ctor):
ctor_clsnme = dict(inspect.getmembers(ctor))['__qualname__'].split('.')[0]
ctor_params = inspect.signature(ctor).parameters
ctor_fields = {param.name: ((param.annotation if (param.annotation != inspect.Parameter.empty) else Any), (param.default if (param.default != inspect.Parameter.empty) else ...)) for param in ctor_params.values() if ((param.name != 'self') and (param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD))}
if (base_model is None):
CtorModel = create_model(f'{ctor_clsnme}Model', __config__=ConfigBase.Config, **ctor_fields)
else:
CtorModel = create_model(f'{ctor_clsnme}Model', __base__=base_model, **ctor_fields)
@functools.wraps(ctor)
def ctor_wrapper(*args, **kwargs):
(self, *args) = args
nmargs = {name: arg for ((name, param), arg) in zip(list(ctor_params.items()), ([self] + args)) if (name != 'self')}
model = CtorModel(**{**nmargs, **kwargs})
all_args = {**nmargs, **kwargs, **model.__values__}
if (not getattr(self, '__init_args__', {})):
self.__init_args__ = OrderedDict({name: arg for (name, arg) in sorted(all_args.items()) if (type(arg) != mx.gluon.ParameterDict)})
self.__class__.__getnewargs_ex__ = validated_getnewargs_ex
self.__class__.__repr__ = validated_repr
return ctor(self, **all_args)
setattr(ctor_wrapper, 'Model', CtorModel)
return ctor_wrapper
return validator | def validated(base_model=None):
'\n Decorates a constructor with typed arguments with validation logic which\n delegates to a Pydantic model. If `base_model` is not provided, an implicit\n model is synthesized. If `base_model` is provided, its fields and types\n should be consistent with the constructor arguments and the model should\n extend `ConfigBase`.\n '
def validator(ctor):
ctor_clsnme = dict(inspect.getmembers(ctor))['__qualname__'].split('.')[0]
ctor_params = inspect.signature(ctor).parameters
ctor_fields = {param.name: ((param.annotation if (param.annotation != inspect.Parameter.empty) else Any), (param.default if (param.default != inspect.Parameter.empty) else ...)) for param in ctor_params.values() if ((param.name != 'self') and (param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD))}
if (base_model is None):
CtorModel = create_model(f'{ctor_clsnme}Model', __config__=ConfigBase.Config, **ctor_fields)
else:
CtorModel = create_model(f'{ctor_clsnme}Model', __base__=base_model, **ctor_fields)
@functools.wraps(ctor)
def ctor_wrapper(*args, **kwargs):
(self, *args) = args
nmargs = {name: arg for ((name, param), arg) in zip(list(ctor_params.items()), ([self] + args)) if (name != 'self')}
model = CtorModel(**{**nmargs, **kwargs})
all_args = {**nmargs, **kwargs, **model.__values__}
if (not getattr(self, '__init_args__', {})):
self.__init_args__ = OrderedDict({name: arg for (name, arg) in sorted(all_args.items()) if (type(arg) != mx.gluon.ParameterDict)})
self.__class__.__getnewargs_ex__ = validated_getnewargs_ex
self.__class__.__repr__ = validated_repr
return ctor(self, **all_args)
setattr(ctor_wrapper, 'Model', CtorModel)
return ctor_wrapper
return validator<|docstring|>Decorates a constructor with typed arguments with validation logic which
delegates to a Pydantic model. If `base_model` is not provided, an implicit
model is synthesized. If `base_model` is provided, its fields and types
should be consistent with the constructor arguments and the model should
extend `ConfigBase`.<|endoftext|> |
1118fbf5601c0a0eb9d2ce700e0ff0abd62a02f9a03812953623e08a38f424cb | def __init__(self, text: str, color: Optional[List[RGBColor]], flashing: Optional[FlashingType]=None, flash_mask: Optional[str]=None, transition: Optional[dict]=None, transition_out: Optional[dict]=None, priority: int=0, key: str=None):
'Class initializer.'
self.text = text
self.colors = color
self.flashing = flashing
self.flash_mask = flash_mask
self.transition = transition
self.transition_out = transition_out
self.priority = priority
self.key = key | Class initializer. | mpf/devices/segment_display/text_stack_entry.py | __init__ | rochuck/mpf | 163 | python | def __init__(self, text: str, color: Optional[List[RGBColor]], flashing: Optional[FlashingType]=None, flash_mask: Optional[str]=None, transition: Optional[dict]=None, transition_out: Optional[dict]=None, priority: int=0, key: str=None):
self.text = text
self.colors = color
self.flashing = flashing
self.flash_mask = flash_mask
self.transition = transition
self.transition_out = transition_out
self.priority = priority
self.key = key | def __init__(self, text: str, color: Optional[List[RGBColor]], flashing: Optional[FlashingType]=None, flash_mask: Optional[str]=None, transition: Optional[dict]=None, transition_out: Optional[dict]=None, priority: int=0, key: str=None):
self.text = text
self.colors = color
self.flashing = flashing
self.flash_mask = flash_mask
self.transition = transition
self.transition_out = transition_out
self.priority = priority
self.key = key<|docstring|>Class initializer.<|endoftext|> |
63ce90a832bfa7ffd75055c5f5be9a31969d676caee9cc79a57f7736ce86e785 | def __repr__(self):
'Return str representation.'
return '<TextStackEntry: {} (priority: {}, key: {} colors: {}) >'.format(self.text, self.priority, self.key, self.colors) | Return str representation. | mpf/devices/segment_display/text_stack_entry.py | __repr__ | rochuck/mpf | 163 | python | def __repr__(self):
return '<TextStackEntry: {} (priority: {}, key: {} colors: {}) >'.format(self.text, self.priority, self.key, self.colors) | def __repr__(self):
return '<TextStackEntry: {} (priority: {}, key: {} colors: {}) >'.format(self.text, self.priority, self.key, self.colors)<|docstring|>Return str representation.<|endoftext|> |
fd2b50d675a256ac49193cd6e47a0c8166bf7a33e70c5dbba322a7cb293f8637 | @Sakuya.interactions(guild=MY_GUILD)
async def join(client, event):
'Joins to voice channel.'
voice_state = event.voice_state
if (voice_state is None):
return 'You are not at a voice channel!'
try:
(await client.join_voice(voice_state.channel))
except TimeoutError:
return 'Timed out meanwhile tried to connect.'
except RuntimeError:
return 'The client cannot play voice, some libraries are not loaded.'
return f'Joined to {voice_state.channel.name}' | Joins to voice channel. | docs/examples/e11_basic_voice/main.py | join | monoidic/hata | 173 | python | @Sakuya.interactions(guild=MY_GUILD)
async def join(client, event):
voice_state = event.voice_state
if (voice_state is None):
return 'You are not at a voice channel!'
try:
(await client.join_voice(voice_state.channel))
except TimeoutError:
return 'Timed out meanwhile tried to connect.'
except RuntimeError:
return 'The client cannot play voice, some libraries are not loaded.'
return f'Joined to {voice_state.channel.name}' | @Sakuya.interactions(guild=MY_GUILD)
async def join(client, event):
voice_state = event.voice_state
if (voice_state is None):
return 'You are not at a voice channel!'
try:
(await client.join_voice(voice_state.channel))
except TimeoutError:
return 'Timed out meanwhile tried to connect.'
except RuntimeError:
return 'The client cannot play voice, some libraries are not loaded.'
return f'Joined to {voice_state.channel.name}'<|docstring|>Joins to voice channel.<|endoftext|> |
34752cd21f1eb1eaa92ed7e5a9edae65b6099073da7d7f0b85fcbbdde46bede5 | @Sakuya.interactions(guild=MY_GUILD)
async def yt(event, url: ('str', 'The name or the url of a track')=None):
'Plays from youtube.'
if (YTAudio is None):
return 'This option in unavailable :c'
if (url is None):
return 'Please define what to play.'
voice_client = event.voice_client
if (voice_client is None):
return 'There is no voice client at your guild.'
try:
source = (await YTAudio(url, stream=True))
except DownloadError:
return 'Error meanwhile downloading'
if voice_client.append(source):
content = 'Now playing'
else:
content = 'Added to queue'
return f'{content} {source.title}!' | Plays from youtube. | docs/examples/e11_basic_voice/main.py | yt | monoidic/hata | 173 | python | @Sakuya.interactions(guild=MY_GUILD)
async def yt(event, url: ('str', 'The name or the url of a track')=None):
if (YTAudio is None):
return 'This option in unavailable :c'
if (url is None):
return 'Please define what to play.'
voice_client = event.voice_client
if (voice_client is None):
return 'There is no voice client at your guild.'
try:
source = (await YTAudio(url, stream=True))
except DownloadError:
return 'Error meanwhile downloading'
if voice_client.append(source):
content = 'Now playing'
else:
content = 'Added to queue'
return f'{content} {source.title}!' | @Sakuya.interactions(guild=MY_GUILD)
async def yt(event, url: ('str', 'The name or the url of a track')=None):
if (YTAudio is None):
return 'This option in unavailable :c'
if (url is None):
return 'Please define what to play.'
voice_client = event.voice_client
if (voice_client is None):
return 'There is no voice client at your guild.'
try:
source = (await YTAudio(url, stream=True))
except DownloadError:
return 'Error meanwhile downloading'
if voice_client.append(source):
content = 'Now playing'
else:
content = 'Added to queue'
return f'{content} {source.title}!'<|docstring|>Plays from youtube.<|endoftext|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.