nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
tahoe-lafs/tahoe-lafs
766a53b5208c03c45ca0a98e97eee76870276aa1
src/allmydata/client.py
python
_make_secret
()
return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + b"\n"
Returns a base32-encoded random secret of hashutil.CRYPTO_VAL_SIZE bytes.
Returns a base32-encoded random secret of hashutil.CRYPTO_VAL_SIZE bytes.
[ "Returns", "a", "base32", "-", "encoded", "random", "secret", "of", "hashutil", ".", "CRYPTO_VAL_SIZE", "bytes", "." ]
def _make_secret(): """ Returns a base32-encoded random secret of hashutil.CRYPTO_VAL_SIZE bytes. """ return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + b"\n"
[ "def", "_make_secret", "(", ")", ":", "return", "base32", ".", "b2a", "(", "os", ".", "urandom", "(", "hashutil", ".", "CRYPTO_VAL_SIZE", ")", ")", "+", "b\"\\n\"" ]
https://github.com/tahoe-lafs/tahoe-lafs/blob/766a53b5208c03c45ca0a98e97eee76870276aa1/src/allmydata/client.py#L147-L152
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
source/addons/stock/stock.py
python
stock_package._complete_name
(self, cr, uid, ids, name, args, context=None)
return res
Forms complete name of location from parent location to child location. @return: Dictionary of values
Forms complete name of location from parent location to child location.
[ "Forms", "complete", "name", "of", "location", "from", "parent", "location", "to", "child", "location", "." ]
def _complete_name(self, cr, uid, ids, name, args, context=None): """ Forms complete name of location from parent location to child location. @return: Dictionary of values """ res = {} for m in self.browse(cr, uid, ids, context=context): res[m.id] = m.name parent = m.parent_id while parent: res[m.id] = parent.name + ' / ' + res[m.id] parent = parent.parent_id return res
[ "def", "_complete_name", "(", "self", ",", "cr", ",", "uid", ",", "ids", ",", "name", ",", "args", ",", "context", "=", "None", ")", ":", "res", "=", "{", "}", "for", "m", "in", "self", ".", "browse", "(", "cr", ",", "uid", ",", "ids", ",", "context", "=", "context", ")", ":", "res", "[", "m", ".", "id", "]", "=", "m", ".", "name", "parent", "=", "m", ".", "parent_id", "while", "parent", ":", "res", "[", "m", ".", "id", "]", "=", "parent", ".", "name", "+", "' / '", "+", "res", "[", "m", ".", "id", "]", "parent", "=", "parent", ".", "parent_id", "return", "res" ]
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/source/addons/stock/stock.py#L3907-L3918
openstack/nova
b49b7663e1c3073917d5844b81d38db8e86d05c4
nova/db/main/api.py
python
console_auth_token_destroy_all_by_instance
(context, instance_uuid)
Delete all console authorizations belonging to the instance.
Delete all console authorizations belonging to the instance.
[ "Delete", "all", "console", "authorizations", "belonging", "to", "the", "instance", "." ]
def console_auth_token_destroy_all_by_instance(context, instance_uuid): """Delete all console authorizations belonging to the instance.""" context.session.query(models.ConsoleAuthToken).\ filter_by(instance_uuid=instance_uuid).delete()
[ "def", "console_auth_token_destroy_all_by_instance", "(", "context", ",", "instance_uuid", ")", ":", "context", ".", "session", ".", "query", "(", "models", ".", "ConsoleAuthToken", ")", ".", "filter_by", "(", "instance_uuid", "=", "instance_uuid", ")", ".", "delete", "(", ")" ]
https://github.com/openstack/nova/blob/b49b7663e1c3073917d5844b81d38db8e86d05c4/nova/db/main/api.py#L4701-L4704
plotly/plotly.py
cfad7862594b35965c0e000813bd7805e8494a5b
packages/python/plotly/plotly/graph_objs/scatterpolargl/selected/_textfont.py
python
Textfont.color
(self)
return self["color"]
Sets the text font color of selected points. The 'color' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen Returns ------- str
Sets the text font color of selected points. The 'color' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen
[ "Sets", "the", "text", "font", "color", "of", "selected", "points", ".", "The", "color", "property", "is", "a", "color", "and", "may", "be", "specified", "as", ":", "-", "A", "hex", "string", "(", "e", ".", "g", ".", "#ff0000", ")", "-", "An", "rgb", "/", "rgba", "string", "(", "e", ".", "g", ".", "rgb", "(", "255", "0", "0", ")", ")", "-", "An", "hsl", "/", "hsla", "string", "(", "e", ".", "g", ".", "hsl", "(", "0", "100%", "50%", ")", ")", "-", "An", "hsv", "/", "hsva", "string", "(", "e", ".", "g", ".", "hsv", "(", "0", "100%", "100%", ")", ")", "-", "A", "named", "CSS", "color", ":", "aliceblue", "antiquewhite", "aqua", "aquamarine", "azure", "beige", "bisque", "black", "blanchedalmond", "blue", "blueviolet", "brown", "burlywood", "cadetblue", "chartreuse", "chocolate", "coral", "cornflowerblue", "cornsilk", "crimson", "cyan", "darkblue", "darkcyan", "darkgoldenrod", "darkgray", "darkgrey", "darkgreen", "darkkhaki", "darkmagenta", "darkolivegreen", "darkorange", "darkorchid", "darkred", "darksalmon", "darkseagreen", "darkslateblue", "darkslategray", "darkslategrey", "darkturquoise", "darkviolet", "deeppink", "deepskyblue", "dimgray", "dimgrey", "dodgerblue", "firebrick", "floralwhite", "forestgreen", "fuchsia", "gainsboro", "ghostwhite", "gold", "goldenrod", "gray", "grey", "green", "greenyellow", "honeydew", "hotpink", "indianred", "indigo", "ivory", "khaki", "lavender", "lavenderblush", "lawngreen", "lemonchiffon", "lightblue", "lightcoral", "lightcyan", "lightgoldenrodyellow", "lightgray", "lightgrey", "lightgreen", "lightpink", "lightsalmon", "lightseagreen", "lightskyblue", "lightslategray", "lightslategrey", "lightsteelblue", "lightyellow", "lime", "limegreen", "linen", "magenta", "maroon", "mediumaquamarine", "mediumblue", "mediumorchid", "mediumpurple", "mediumseagreen", "mediumslateblue", "mediumspringgreen", "mediumturquoise", "mediumvioletred", "midnightblue", "mintcream", "mistyrose", "moccasin", "navajowhite", "navy", "oldlace", "olive", "olivedrab", "orange", "orangered", "orchid", "palegoldenrod", "palegreen", "paleturquoise", "palevioletred", "papayawhip", "peachpuff", "peru", "pink", "plum", "powderblue", "purple", "red", "rosybrown", "royalblue", "rebeccapurple", "saddlebrown", "salmon", "sandybrown", "seagreen", "seashell", "sienna", "silver", "skyblue", "slateblue", "slategray", "slategrey", "snow", "springgreen", "steelblue", "tan", "teal", "thistle", "tomato", "turquoise", "violet", "wheat", "white", "whitesmoke", "yellow", "yellowgreen" ]
def color(self): """ Sets the text font color of selected points. The 'color' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen Returns ------- str """ return self["color"]
[ "def", "color", "(", "self", ")", ":", "return", "self", "[", "\"color\"", "]" ]
https://github.com/plotly/plotly.py/blob/cfad7862594b35965c0e000813bd7805e8494a5b/packages/python/plotly/plotly/graph_objs/scatterpolargl/selected/_textfont.py#L16-L66
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/datetime.py
python
timedelta.__reduce__
(self)
return (self.__class__, self._getstate())
[]
def __reduce__(self): return (self.__class__, self._getstate())
[ "def", "__reduce__", "(", "self", ")", ":", "return", "(", "self", ".", "__class__", ",", "self", ".", "_getstate", "(", ")", ")" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/datetime.py#L628-L629
thomasweng15/E.V.E.
e3bea3e45d0c549eccc6824c9cadbcc6980545f6
actions/wolfram.py
python
Wolfram.open
(self, wolfram, text, controller)
Open webpage of visual WolframAlpha result.
Open webpage of visual WolframAlpha result.
[ "Open", "webpage", "of", "visual", "WolframAlpha", "result", "." ]
def open(self, wolfram, text, controller): """Open webpage of visual WolframAlpha result.""" wolfram_url = "http://www.wolframalpha.com/input/?i=" + text.replace(" ", "+") controller.open(wolfram_url)
[ "def", "open", "(", "self", ",", "wolfram", ",", "text", ",", "controller", ")", ":", "wolfram_url", "=", "\"http://www.wolframalpha.com/input/?i=\"", "+", "text", ".", "replace", "(", "\" \"", ",", "\"+\"", ")", "controller", ".", "open", "(", "wolfram_url", ")" ]
https://github.com/thomasweng15/E.V.E./blob/e3bea3e45d0c549eccc6824c9cadbcc6980545f6/actions/wolfram.py#L71-L74
jelmer/xandikos
3149a633c388a6f1dffbc6686763fca00f72e3bc
xandikos/store/git.py
python
GitStore.set_displayname
(self, displayname)
Set the display name. :param displayname: New display name
Set the display name.
[ "Set", "the", "display", "name", "." ]
def set_displayname(self, displayname): """Set the display name. :param displayname: New display name """ self.config.set_displayname(displayname)
[ "def", "set_displayname", "(", "self", ",", "displayname", ")", ":", "self", ".", "config", ".", "set_displayname", "(", "displayname", ")" ]
https://github.com/jelmer/xandikos/blob/3149a633c388a6f1dffbc6686763fca00f72e3bc/xandikos/store/git.py#L481-L486
robinhood/faust
01b4c0ad8390221db71751d80001b0fd879291e2
faust/types/settings/settings.py
python
Settings.value_serializer
(self)
Default value serializer. Serializer used for values by default when no serializer is specified, or a model is not being used. This can be string, the name of a serializer/codec, or an actual :class:`faust.serializers.codecs.Codec` instance. .. seealso:: - The :ref:`codecs` section in the model guide -- for more information about codecs.
Default value serializer.
[ "Default", "value", "serializer", "." ]
def value_serializer(self) -> CodecArg: """Default value serializer. Serializer used for values by default when no serializer is specified, or a model is not being used. This can be string, the name of a serializer/codec, or an actual :class:`faust.serializers.codecs.Codec` instance. .. seealso:: - The :ref:`codecs` section in the model guide -- for more information about codecs. """
[ "def", "value_serializer", "(", "self", ")", "->", "CodecArg", ":" ]
https://github.com/robinhood/faust/blob/01b4c0ad8390221db71751d80001b0fd879291e2/faust/types/settings/settings.py#L1143-L1156
pantsbuild/pex
473c6ac732ed4bc338b4b20a9ec930d1d722c9b4
pex/vendor/_vendored/setuptools/setuptools/command/develop.py
python
VersionlessRequirement.__getattr__
(self, name)
return getattr(self.__dist, name)
[]
def __getattr__(self, name): return getattr(self.__dist, name)
[ "def", "__getattr__", "(", "self", ",", "name", ")", ":", "return", "getattr", "(", "self", ".", "__dist", ",", "name", ")" ]
https://github.com/pantsbuild/pex/blob/473c6ac732ed4bc338b4b20a9ec930d1d722c9b4/pex/vendor/_vendored/setuptools/setuptools/command/develop.py#L237-L238
freedombox/FreedomBox
335a7f92cc08f27981f838a7cddfc67740598e54
plinth/modules/tor/__init__.py
python
TorApp.__init__
(self)
Create components for the app.
Create components for the app.
[ "Create", "components", "for", "the", "app", "." ]
def __init__(self): """Create components for the app.""" super().__init__() info = app_module.Info(app_id=self.app_id, version=self._version, depends=['names' ], name=_('Tor'), icon_filename='tor', short_description=_('Anonymity Network'), description=_description, manual_page='Tor', clients=manifest.clients, donation_url='https://donate.torproject.org/') self.add(info) menu_item = menu.Menu('menu-tor', info.name, info.short_description, info.icon_filename, 'tor:index', parent_url_name='apps') self.add(menu_item) packages = Packages('packages-tor', [ 'tor', 'tor-geoipdb', 'torsocks', 'obfs4proxy', 'apt-transport-tor' ]) self.add(packages) domain_type = DomainType('domain-type-tor', _('Tor Onion Service'), 'tor:index', can_have_certificate=False) self.add(domain_type) firewall = Firewall('firewall-tor-socks', _('Tor Socks Proxy'), ports=['tor-socks'], is_external=False) self.add(firewall) firewall = Firewall('firewall-tor-relay', _('Tor Bridge Relay'), ports=['tor-orport', 'tor-obfs3', 'tor-obfs4'], is_external=True) self.add(firewall) daemon = Daemon( 'daemon-tor', 'tor@plinth', strict_check=True, listen_ports=[(9050, 'tcp4'), (9050, 'tcp6'), (9040, 'tcp4'), (9040, 'tcp6'), (9053, 'udp4'), (9053, 'udp6')]) self.add(daemon) users_and_groups = UsersAndGroups('users-and-groups-tor', reserved_usernames=['debian-tor']) self.add(users_and_groups) backup_restore = BackupRestore('backup-restore-tor', **manifest.backup) self.add(backup_restore)
[ "def", "__init__", "(", "self", ")", ":", "super", "(", ")", ".", "__init__", "(", ")", "info", "=", "app_module", ".", "Info", "(", "app_id", "=", "self", ".", "app_id", ",", "version", "=", "self", ".", "_version", ",", "depends", "=", "[", "'names'", "]", ",", "name", "=", "_", "(", "'Tor'", ")", ",", "icon_filename", "=", "'tor'", ",", "short_description", "=", "_", "(", "'Anonymity Network'", ")", ",", "description", "=", "_description", ",", "manual_page", "=", "'Tor'", ",", "clients", "=", "manifest", ".", "clients", ",", "donation_url", "=", "'https://donate.torproject.org/'", ")", "self", ".", "add", "(", "info", ")", "menu_item", "=", "menu", ".", "Menu", "(", "'menu-tor'", ",", "info", ".", "name", ",", "info", ".", "short_description", ",", "info", ".", "icon_filename", ",", "'tor:index'", ",", "parent_url_name", "=", "'apps'", ")", "self", ".", "add", "(", "menu_item", ")", "packages", "=", "Packages", "(", "'packages-tor'", ",", "[", "'tor'", ",", "'tor-geoipdb'", ",", "'torsocks'", ",", "'obfs4proxy'", ",", "'apt-transport-tor'", "]", ")", "self", ".", "add", "(", "packages", ")", "domain_type", "=", "DomainType", "(", "'domain-type-tor'", ",", "_", "(", "'Tor Onion Service'", ")", ",", "'tor:index'", ",", "can_have_certificate", "=", "False", ")", "self", ".", "add", "(", "domain_type", ")", "firewall", "=", "Firewall", "(", "'firewall-tor-socks'", ",", "_", "(", "'Tor Socks Proxy'", ")", ",", "ports", "=", "[", "'tor-socks'", "]", ",", "is_external", "=", "False", ")", "self", ".", "add", "(", "firewall", ")", "firewall", "=", "Firewall", "(", "'firewall-tor-relay'", ",", "_", "(", "'Tor Bridge Relay'", ")", ",", "ports", "=", "[", "'tor-orport'", ",", "'tor-obfs3'", ",", "'tor-obfs4'", "]", ",", "is_external", "=", "True", ")", "self", ".", "add", "(", "firewall", ")", "daemon", "=", "Daemon", "(", "'daemon-tor'", ",", "'tor@plinth'", ",", "strict_check", "=", "True", ",", "listen_ports", "=", "[", "(", "9050", ",", "'tcp4'", ")", ",", "(", "9050", ",", "'tcp6'", ")", ",", "(", "9040", ",", "'tcp4'", ")", ",", "(", "9040", ",", "'tcp6'", ")", ",", "(", "9053", ",", "'udp4'", ")", ",", "(", "9053", ",", "'udp6'", ")", "]", ")", "self", ".", "add", "(", "daemon", ")", "users_and_groups", "=", "UsersAndGroups", "(", "'users-and-groups-tor'", ",", "reserved_usernames", "=", "[", "'debian-tor'", "]", ")", "self", ".", "add", "(", "users_and_groups", ")", "backup_restore", "=", "BackupRestore", "(", "'backup-restore-tor'", ",", "*", "*", "manifest", ".", "backup", ")", "self", ".", "add", "(", "backup_restore", ")" ]
https://github.com/freedombox/FreedomBox/blob/335a7f92cc08f27981f838a7cddfc67740598e54/plinth/modules/tor/__init__.py#L44-L91
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/cdn/v20180606/cdn_client.py
python
CdnClient.CreateEdgePackTask
(self, request)
动态打包任务提交接口 :param request: Request instance for CreateEdgePackTask. :type request: :class:`tencentcloud.cdn.v20180606.models.CreateEdgePackTaskRequest` :rtype: :class:`tencentcloud.cdn.v20180606.models.CreateEdgePackTaskResponse`
动态打包任务提交接口
[ "动态打包任务提交接口" ]
def CreateEdgePackTask(self, request): """动态打包任务提交接口 :param request: Request instance for CreateEdgePackTask. :type request: :class:`tencentcloud.cdn.v20180606.models.CreateEdgePackTaskRequest` :rtype: :class:`tencentcloud.cdn.v20180606.models.CreateEdgePackTaskResponse` """ try: params = request._serialize() body = self.call("CreateEdgePackTask", params) response = json.loads(body) if "Error" not in response["Response"]: model = models.CreateEdgePackTaskResponse() model._deserialize(response["Response"]) return model else: code = response["Response"]["Error"]["Code"] message = response["Response"]["Error"]["Message"] reqid = response["Response"]["RequestId"] raise TencentCloudSDKException(code, message, reqid) except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(e.message, e.message)
[ "def", "CreateEdgePackTask", "(", "self", ",", "request", ")", ":", "try", ":", "params", "=", "request", ".", "_serialize", "(", ")", "body", "=", "self", ".", "call", "(", "\"CreateEdgePackTask\"", ",", "params", ")", "response", "=", "json", ".", "loads", "(", "body", ")", "if", "\"Error\"", "not", "in", "response", "[", "\"Response\"", "]", ":", "model", "=", "models", ".", "CreateEdgePackTaskResponse", "(", ")", "model", ".", "_deserialize", "(", "response", "[", "\"Response\"", "]", ")", "return", "model", "else", ":", "code", "=", "response", "[", "\"Response\"", "]", "[", "\"Error\"", "]", "[", "\"Code\"", "]", "message", "=", "response", "[", "\"Response\"", "]", "[", "\"Error\"", "]", "[", "\"Message\"", "]", "reqid", "=", "response", "[", "\"Response\"", "]", "[", "\"RequestId\"", "]", "raise", "TencentCloudSDKException", "(", "code", ",", "message", ",", "reqid", ")", "except", "Exception", "as", "e", ":", "if", "isinstance", "(", "e", ",", "TencentCloudSDKException", ")", ":", "raise", "else", ":", "raise", "TencentCloudSDKException", "(", "e", ".", "message", ",", "e", ".", "message", ")" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/cdn/v20180606/cdn_client.py#L113-L138
nlloyd/SubliminalCollaborator
5c619e17ddbe8acb9eea8996ec038169ddcd50a1
libs/twisted/internet/pollreactor.py
python
PollReactor.removeAll
(self)
return self._removeAll( [self._selectables[fd] for fd in self._reads], [self._selectables[fd] for fd in self._writes])
Remove all selectables, and return a list of them.
Remove all selectables, and return a list of them.
[ "Remove", "all", "selectables", "and", "return", "a", "list", "of", "them", "." ]
def removeAll(self): """ Remove all selectables, and return a list of them. """ return self._removeAll( [self._selectables[fd] for fd in self._reads], [self._selectables[fd] for fd in self._writes])
[ "def", "removeAll", "(", "self", ")", ":", "return", "self", ".", "_removeAll", "(", "[", "self", ".", "_selectables", "[", "fd", "]", "for", "fd", "in", "self", ".", "_reads", "]", ",", "[", "self", ".", "_selectables", "[", "fd", "]", "for", "fd", "in", "self", ".", "_writes", "]", ")" ]
https://github.com/nlloyd/SubliminalCollaborator/blob/5c619e17ddbe8acb9eea8996ec038169ddcd50a1/libs/twisted/internet/pollreactor.py#L138-L144
Minyus/causallift
c808b15de7e7912c4c08e9ad4152e4bd33f008c9
kedro_cli.py
python
install
()
Install project dependencies from both requirements.txt and environment.yml (optional).
Install project dependencies from both requirements.txt and environment.yml (optional).
[ "Install", "project", "dependencies", "from", "both", "requirements", ".", "txt", "and", "environment", ".", "yml", "(", "optional", ")", "." ]
def install(): """Install project dependencies from both requirements.txt and environment.yml (optional).""" if (Path.cwd() / "src" / "environment.yml").is_file(): call(["conda", "install", "--file", "src/environment.yml", "--yes"]) python_call("pip", ["install", "-U", "-r", "src/requirements.txt"])
[ "def", "install", "(", ")", ":", "if", "(", "Path", ".", "cwd", "(", ")", "/", "\"src\"", "/", "\"environment.yml\"", ")", ".", "is_file", "(", ")", ":", "call", "(", "[", "\"conda\"", ",", "\"install\"", ",", "\"--file\"", ",", "\"src/environment.yml\"", ",", "\"--yes\"", "]", ")", "python_call", "(", "\"pip\"", ",", "[", "\"install\"", ",", "\"-U\"", ",", "\"-r\"", ",", "\"src/requirements.txt\"", "]", ")" ]
https://github.com/Minyus/causallift/blob/c808b15de7e7912c4c08e9ad4152e4bd33f008c9/kedro_cli.py#L163-L169
1040003585/WebScrapingWithPython
a770fa5b03894076c8c9539b1ffff34424ffc016
portia_examle/lib/python2.7/site-packages/wheel/signatures/ed25519py.py
python
crypto_sign_keypair
(seed=None)
return Keypair(vkbytes, skbytes+vkbytes)
Return (verifying, secret) key from a given seed, or os.urandom(32)
Return (verifying, secret) key from a given seed, or os.urandom(32)
[ "Return", "(", "verifying", "secret", ")", "key", "from", "a", "given", "seed", "or", "os", ".", "urandom", "(", "32", ")" ]
def crypto_sign_keypair(seed=None): """Return (verifying, secret) key from a given seed, or os.urandom(32)""" if seed is None: seed = os.urandom(PUBLICKEYBYTES) else: warnings.warn("ed25519ll should choose random seed.", RuntimeWarning) if len(seed) != 32: raise ValueError("seed must be 32 random bytes or None.") skbytes = seed vkbytes = djbec.publickey(skbytes) return Keypair(vkbytes, skbytes+vkbytes)
[ "def", "crypto_sign_keypair", "(", "seed", "=", "None", ")", ":", "if", "seed", "is", "None", ":", "seed", "=", "os", ".", "urandom", "(", "PUBLICKEYBYTES", ")", "else", ":", "warnings", ".", "warn", "(", "\"ed25519ll should choose random seed.\"", ",", "RuntimeWarning", ")", "if", "len", "(", "seed", ")", "!=", "32", ":", "raise", "ValueError", "(", "\"seed must be 32 random bytes or None.\"", ")", "skbytes", "=", "seed", "vkbytes", "=", "djbec", ".", "publickey", "(", "skbytes", ")", "return", "Keypair", "(", "vkbytes", ",", "skbytes", "+", "vkbytes", ")" ]
https://github.com/1040003585/WebScrapingWithPython/blob/a770fa5b03894076c8c9539b1ffff34424ffc016/portia_examle/lib/python2.7/site-packages/wheel/signatures/ed25519py.py#L18-L29
huggingface/transformers
623b4f7c63f60cce917677ee704d6c93ee960b4b
src/transformers/utils/dummy_tf_objects.py
python
TFXLNetLMHeadModel.from_pretrained
(cls, *args, **kwargs)
[]
def from_pretrained(cls, *args, **kwargs): requires_backends(cls, ["tf"])
[ "def", "from_pretrained", "(", "cls", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "requires_backends", "(", "cls", ",", "[", "\"tf\"", "]", ")" ]
https://github.com/huggingface/transformers/blob/623b4f7c63f60cce917677ee704d6c93ee960b4b/src/transformers/utils/dummy_tf_objects.py#L2965-L2966
emesene/emesene
4548a4098310e21b16437bb36223a7f632a4f7bc
emesene/e3/base/Logger.py
python
Account.__init__
(self, id_, id_account, account, status, nick='', message='', path='', cid=None)
constructor
constructor
[ "constructor" ]
def __init__(self, id_, id_account, account, status, nick='', message='', path='', cid=None): '''constructor''' self.id = id_ self.id_account = id_account self.account = account self.status = status self.nick = nick self.message = message self.path = path self.cid = cid self.groups = []
[ "def", "__init__", "(", "self", ",", "id_", ",", "id_account", ",", "account", ",", "status", ",", "nick", "=", "''", ",", "message", "=", "''", ",", "path", "=", "''", ",", "cid", "=", "None", ")", ":", "self", ".", "id", "=", "id_", "self", ".", "id_account", "=", "id_account", "self", ".", "account", "=", "account", "self", ".", "status", "=", "status", "self", ".", "nick", "=", "nick", "self", ".", "message", "=", "message", "self", ".", "path", "=", "path", "self", ".", "cid", "=", "cid", "self", ".", "groups", "=", "[", "]" ]
https://github.com/emesene/emesene/blob/4548a4098310e21b16437bb36223a7f632a4f7bc/emesene/e3/base/Logger.py#L38-L49
Franck-Dernoncourt/NeuroNER
3817feaf290c1f6e03ae23ea964e68c88d0e7a88
neuroner/prepare_pretrained_model.py
python
prepare_pretrained_model_for_restoring
(output_folder_name, epoch_number, model_name, delete_token_mappings=False)
Copy the dataset.pickle, parameters.ini, and model checkpoint files after removing the data used for training. The dataset and labels are deleted from dataset.pickle by default. The only information about the dataset that remain in the pretrained model is the list of tokens that appears in the dataset and the corresponding token embeddings learned from the dataset. If delete_token_mappings is set to True, index_to_token and token_to_index mappings are deleted from dataset.pickle additionally, and the corresponding token embeddings are deleted from the model checkpoint files. In this case, the pretrained model would not contain any information about the dataset used for training the model. If you wish to share a pretrained model with delete_token_mappings = True, it is highly recommended to use some external pre-trained token embeddings and freeze them while training the model to obtain high performance. This can be done by specifying the token_pretrained_embedding_filepath and setting freeze_token_embeddings = True in parameters.ini for training.
Copy the dataset.pickle, parameters.ini, and model checkpoint files after removing the data used for training. The dataset and labels are deleted from dataset.pickle by default. The only information about the dataset that remain in the pretrained model is the list of tokens that appears in the dataset and the corresponding token embeddings learned from the dataset. If delete_token_mappings is set to True, index_to_token and token_to_index mappings are deleted from dataset.pickle additionally, and the corresponding token embeddings are deleted from the model checkpoint files. In this case, the pretrained model would not contain any information about the dataset used for training the model. If you wish to share a pretrained model with delete_token_mappings = True, it is highly recommended to use some external pre-trained token embeddings and freeze them while training the model to obtain high performance. This can be done by specifying the token_pretrained_embedding_filepath and setting freeze_token_embeddings = True in parameters.ini for training.
[ "Copy", "the", "dataset", ".", "pickle", "parameters", ".", "ini", "and", "model", "checkpoint", "files", "after", "removing", "the", "data", "used", "for", "training", ".", "The", "dataset", "and", "labels", "are", "deleted", "from", "dataset", ".", "pickle", "by", "default", ".", "The", "only", "information", "about", "the", "dataset", "that", "remain", "in", "the", "pretrained", "model", "is", "the", "list", "of", "tokens", "that", "appears", "in", "the", "dataset", "and", "the", "corresponding", "token", "embeddings", "learned", "from", "the", "dataset", ".", "If", "delete_token_mappings", "is", "set", "to", "True", "index_to_token", "and", "token_to_index", "mappings", "are", "deleted", "from", "dataset", ".", "pickle", "additionally", "and", "the", "corresponding", "token", "embeddings", "are", "deleted", "from", "the", "model", "checkpoint", "files", ".", "In", "this", "case", "the", "pretrained", "model", "would", "not", "contain", "any", "information", "about", "the", "dataset", "used", "for", "training", "the", "model", ".", "If", "you", "wish", "to", "share", "a", "pretrained", "model", "with", "delete_token_mappings", "=", "True", "it", "is", "highly", "recommended", "to", "use", "some", "external", "pre", "-", "trained", "token", "embeddings", "and", "freeze", "them", "while", "training", "the", "model", "to", "obtain", "high", "performance", ".", "This", "can", "be", "done", "by", "specifying", "the", "token_pretrained_embedding_filepath", "and", "setting", "freeze_token_embeddings", "=", "True", "in", "parameters", ".", "ini", "for", "training", "." ]
def prepare_pretrained_model_for_restoring(output_folder_name, epoch_number, model_name, delete_token_mappings=False): ''' Copy the dataset.pickle, parameters.ini, and model checkpoint files after removing the data used for training. The dataset and labels are deleted from dataset.pickle by default. The only information about the dataset that remain in the pretrained model is the list of tokens that appears in the dataset and the corresponding token embeddings learned from the dataset. If delete_token_mappings is set to True, index_to_token and token_to_index mappings are deleted from dataset.pickle additionally, and the corresponding token embeddings are deleted from the model checkpoint files. In this case, the pretrained model would not contain any information about the dataset used for training the model. If you wish to share a pretrained model with delete_token_mappings = True, it is highly recommended to use some external pre-trained token embeddings and freeze them while training the model to obtain high performance. This can be done by specifying the token_pretrained_embedding_filepath and setting freeze_token_embeddings = True in parameters.ini for training. ''' input_model_folder = os.path.join('.', 'output', output_folder_name, 'model') output_model_folder = os.path.join('.', 'trained_models', model_name) utils.create_folder_if_not_exists(output_model_folder) # trim and copy dataset.pickle input_dataset_filepath = os.path.join(input_model_folder, 'dataset.pickle') output_dataset_filepath = os.path.join(output_model_folder, 'dataset.pickle') trim_dataset_pickle(input_dataset_filepath, output_dataset_filepath, delete_token_mappings=delete_token_mappings) # copy parameters.ini parameters_filepath = os.path.join(input_model_folder, 'parameters.ini') shutil.copy(parameters_filepath, output_model_folder) # (trim and) copy checkpoint files epoch_number_string = str(epoch_number).zfill(5) if delete_token_mappings: input_checkpoint_filepath = os.path.join(input_model_folder, 'model_{0}.ckpt'.format(epoch_number_string)) output_checkpoint_filepath = os.path.join(output_model_folder, 'model.ckpt') trim_model_checkpoint(parameters_filepath, output_dataset_filepath, input_checkpoint_filepath, output_checkpoint_filepath) else: for filepath in glob.glob(os.path.join(input_model_folder, 'model_{0}.ckpt*'.format(epoch_number_string))): shutil.copyfile(filepath, os.path.join(output_model_folder, os.path.basename(filepath).replace('_' + epoch_number_string, '')))
[ "def", "prepare_pretrained_model_for_restoring", "(", "output_folder_name", ",", "epoch_number", ",", "model_name", ",", "delete_token_mappings", "=", "False", ")", ":", "input_model_folder", "=", "os", ".", "path", ".", "join", "(", "'.'", ",", "'output'", ",", "output_folder_name", ",", "'model'", ")", "output_model_folder", "=", "os", ".", "path", ".", "join", "(", "'.'", ",", "'trained_models'", ",", "model_name", ")", "utils", ".", "create_folder_if_not_exists", "(", "output_model_folder", ")", "# trim and copy dataset.pickle", "input_dataset_filepath", "=", "os", ".", "path", ".", "join", "(", "input_model_folder", ",", "'dataset.pickle'", ")", "output_dataset_filepath", "=", "os", ".", "path", ".", "join", "(", "output_model_folder", ",", "'dataset.pickle'", ")", "trim_dataset_pickle", "(", "input_dataset_filepath", ",", "output_dataset_filepath", ",", "delete_token_mappings", "=", "delete_token_mappings", ")", "# copy parameters.ini", "parameters_filepath", "=", "os", ".", "path", ".", "join", "(", "input_model_folder", ",", "'parameters.ini'", ")", "shutil", ".", "copy", "(", "parameters_filepath", ",", "output_model_folder", ")", "# (trim and) copy checkpoint files", "epoch_number_string", "=", "str", "(", "epoch_number", ")", ".", "zfill", "(", "5", ")", "if", "delete_token_mappings", ":", "input_checkpoint_filepath", "=", "os", ".", "path", ".", "join", "(", "input_model_folder", ",", "'model_{0}.ckpt'", ".", "format", "(", "epoch_number_string", ")", ")", "output_checkpoint_filepath", "=", "os", ".", "path", ".", "join", "(", "output_model_folder", ",", "'model.ckpt'", ")", "trim_model_checkpoint", "(", "parameters_filepath", ",", "output_dataset_filepath", ",", "input_checkpoint_filepath", ",", "output_checkpoint_filepath", ")", "else", ":", "for", "filepath", "in", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "input_model_folder", ",", "'model_{0}.ckpt*'", ".", "format", "(", "epoch_number_string", ")", ")", ")", ":", "shutil", ".", "copyfile", "(", "filepath", ",", "os", ".", "path", ".", "join", "(", "output_model_folder", ",", "os", ".", "path", ".", "basename", "(", "filepath", ")", ".", "replace", "(", "'_'", "+", "epoch_number_string", ",", "''", ")", ")", ")" ]
https://github.com/Franck-Dernoncourt/NeuroNER/blob/3817feaf290c1f6e03ae23ea964e68c88d0e7a88/neuroner/prepare_pretrained_model.py#L82-L131
whyliam/whyliam.workflows.youdao
2dfa7f1de56419dab1c2e70c1a27e5e13ba25a5c
workflow/util.py
python
LockFile.locked
(self)
return self._lock.is_set()
``True`` if file is locked by this instance.
``True`` if file is locked by this instance.
[ "True", "if", "file", "is", "locked", "by", "this", "instance", "." ]
def locked(self): """``True`` if file is locked by this instance.""" return self._lock.is_set()
[ "def", "locked", "(", "self", ")", ":", "return", "self", ".", "_lock", ".", "is_set", "(", ")" ]
https://github.com/whyliam/whyliam.workflows.youdao/blob/2dfa7f1de56419dab1c2e70c1a27e5e13ba25a5c/workflow/util.py#L415-L417
fergusq/tampio
b0017d2557a21c47fddc20cfceeb92f4e0376401
voikko/libvoikko.py
python
Voikko.setLibrarySearchPath
(cls, searchPath)
Set the path to a directory that should be used to search for the native library before trying to load it from the default (OS specific) lookup path.
Set the path to a directory that should be used to search for the native library before trying to load it from the default (OS specific) lookup path.
[ "Set", "the", "path", "to", "a", "directory", "that", "should", "be", "used", "to", "search", "for", "the", "native", "library", "before", "trying", "to", "load", "it", "from", "the", "default", "(", "OS", "specific", ")", "lookup", "path", "." ]
def setLibrarySearchPath(cls, searchPath): """Set the path to a directory that should be used to search for the native library before trying to load it from the default (OS specific) lookup path. """ cls._sharedLibrarySearchPath = searchPath
[ "def", "setLibrarySearchPath", "(", "cls", ",", "searchPath", ")", ":", "cls", ".", "_sharedLibrarySearchPath", "=", "searchPath" ]
https://github.com/fergusq/tampio/blob/b0017d2557a21c47fddc20cfceeb92f4e0376401/voikko/libvoikko.py#L419-L423
colour-science/colour
38782ac059e8ddd91939f3432bf06811c16667f0
colour/models/hdr_ipt.py
python
XYZ_to_hdr_IPT
(XYZ, Y_s=0.2, Y_abs=100, method='Fairchild 2011')
return from_range_100(IPT_hdr)
Converts from *CIE XYZ* tristimulus values to *hdr-IPT* colourspace. Parameters ---------- XYZ : array_like *CIE XYZ* tristimulus values. Y_s : numeric or array_like Relative luminance :math:`Y_s` of the surround. Y_abs : numeric or array_like Absolute luminance :math:`Y_{abs}` of the scene diffuse white in :math:`cd/m^2`. method : str, optional **{'Fairchild 2011', 'Fairchild 2010'}**, Computation method. Returns ------- ndarray *hdr-IPT* colourspace array. Notes ----- +-------------+-------------------------+---------------------+ | **Domain** | **Scale - Reference** | **Scale - 1** | +=============+=========================+=====================+ | ``XYZ`` | [0, 1] | [0, 1] | +-------------+-------------------------+---------------------+ | ``Y_s`` | [0, 1] | [0, 1] | +-------------+-------------------------+---------------------+ +-------------+-------------------------+---------------------+ | **Range** | **Scale - Reference** | **Scale - 1** | +=============+=========================+=====================+ | ``IPT_hdr`` | ``I_hdr`` : [0, 100] | ``I_hdr`` : [0, 1] | | | | | | | ``P_hdr`` : [-100, 100] | ``P_hdr`` : [-1, 1] | | | | | | | ``T_hdr`` : [-100, 100] | ``T_hdr`` : [-1, 1] | +-------------+-------------------------+---------------------+ - Input *CIE XYZ* tristimulus values must be adapted to *CIE Standard Illuminant D Series* *D65*. References ---------- :cite:`Fairchild2010`, :cite:`Fairchild2011` Examples -------- >>> XYZ = np.array([0.20654008, 0.12197225, 0.05136952]) >>> XYZ_to_hdr_IPT(XYZ) # doctest: +ELLIPSIS array([ 48.3937634..., 42.4499020..., 22.0195403...]) >>> XYZ_to_hdr_IPT(XYZ, method='Fairchild 2010') # doctest: +ELLIPSIS array([ 30.0287314..., 83.9384506..., 34.9028738...])
Converts from *CIE XYZ* tristimulus values to *hdr-IPT* colourspace.
[ "Converts", "from", "*", "CIE", "XYZ", "*", "tristimulus", "values", "to", "*", "hdr", "-", "IPT", "*", "colourspace", "." ]
def XYZ_to_hdr_IPT(XYZ, Y_s=0.2, Y_abs=100, method='Fairchild 2011'): """ Converts from *CIE XYZ* tristimulus values to *hdr-IPT* colourspace. Parameters ---------- XYZ : array_like *CIE XYZ* tristimulus values. Y_s : numeric or array_like Relative luminance :math:`Y_s` of the surround. Y_abs : numeric or array_like Absolute luminance :math:`Y_{abs}` of the scene diffuse white in :math:`cd/m^2`. method : str, optional **{'Fairchild 2011', 'Fairchild 2010'}**, Computation method. Returns ------- ndarray *hdr-IPT* colourspace array. Notes ----- +-------------+-------------------------+---------------------+ | **Domain** | **Scale - Reference** | **Scale - 1** | +=============+=========================+=====================+ | ``XYZ`` | [0, 1] | [0, 1] | +-------------+-------------------------+---------------------+ | ``Y_s`` | [0, 1] | [0, 1] | +-------------+-------------------------+---------------------+ +-------------+-------------------------+---------------------+ | **Range** | **Scale - Reference** | **Scale - 1** | +=============+=========================+=====================+ | ``IPT_hdr`` | ``I_hdr`` : [0, 100] | ``I_hdr`` : [0, 1] | | | | | | | ``P_hdr`` : [-100, 100] | ``P_hdr`` : [-1, 1] | | | | | | | ``T_hdr`` : [-100, 100] | ``T_hdr`` : [-1, 1] | +-------------+-------------------------+---------------------+ - Input *CIE XYZ* tristimulus values must be adapted to *CIE Standard Illuminant D Series* *D65*. References ---------- :cite:`Fairchild2010`, :cite:`Fairchild2011` Examples -------- >>> XYZ = np.array([0.20654008, 0.12197225, 0.05136952]) >>> XYZ_to_hdr_IPT(XYZ) # doctest: +ELLIPSIS array([ 48.3937634..., 42.4499020..., 22.0195403...]) >>> XYZ_to_hdr_IPT(XYZ, method='Fairchild 2010') # doctest: +ELLIPSIS array([ 30.0287314..., 83.9384506..., 34.9028738...]) """ XYZ = to_domain_1(XYZ) method = validate_method(method, HDR_IPT_METHODS) if method == 'fairchild 2010': lightness_callable = lightness_Fairchild2010 else: lightness_callable = lightness_Fairchild2011 e = exponent_hdr_IPT(Y_s, Y_abs, method)[..., np.newaxis] LMS = vector_dot(MATRIX_IPT_XYZ_TO_LMS, XYZ) # Domain and range scaling has already be handled. with domain_range_scale('ignore'): LMS_prime = np.sign(LMS) * np.abs(lightness_callable(LMS, e)) IPT_hdr = vector_dot(MATRIX_IPT_LMS_P_TO_IPT, LMS_prime) return from_range_100(IPT_hdr)
[ "def", "XYZ_to_hdr_IPT", "(", "XYZ", ",", "Y_s", "=", "0.2", ",", "Y_abs", "=", "100", ",", "method", "=", "'Fairchild 2011'", ")", ":", "XYZ", "=", "to_domain_1", "(", "XYZ", ")", "method", "=", "validate_method", "(", "method", ",", "HDR_IPT_METHODS", ")", "if", "method", "==", "'fairchild 2010'", ":", "lightness_callable", "=", "lightness_Fairchild2010", "else", ":", "lightness_callable", "=", "lightness_Fairchild2011", "e", "=", "exponent_hdr_IPT", "(", "Y_s", ",", "Y_abs", ",", "method", ")", "[", "...", ",", "np", ".", "newaxis", "]", "LMS", "=", "vector_dot", "(", "MATRIX_IPT_XYZ_TO_LMS", ",", "XYZ", ")", "# Domain and range scaling has already be handled.", "with", "domain_range_scale", "(", "'ignore'", ")", ":", "LMS_prime", "=", "np", ".", "sign", "(", "LMS", ")", "*", "np", ".", "abs", "(", "lightness_callable", "(", "LMS", ",", "e", ")", ")", "IPT_hdr", "=", "vector_dot", "(", "MATRIX_IPT_LMS_P_TO_IPT", ",", "LMS_prime", ")", "return", "from_range_100", "(", "IPT_hdr", ")" ]
https://github.com/colour-science/colour/blob/38782ac059e8ddd91939f3432bf06811c16667f0/colour/models/hdr_ipt.py#L141-L218
pyqteval/evlal_win
ce7fc77ac5cdf864cd6aa9b04b5329501f8f4c92
pyinstaller-2.0/PyInstaller/lib/altgraph/GraphAlgo.py
python
_priorityDictionary.smallest
(self)
return heap[0][1]
Find smallest item after removing deleted items from front of heap.
Find smallest item after removing deleted items from front of heap.
[ "Find", "smallest", "item", "after", "removing", "deleted", "items", "from", "front", "of", "heap", "." ]
def smallest(self): ''' Find smallest item after removing deleted items from front of heap. ''' if len(self) == 0: raise IndexError, "smallest of empty priorityDictionary" heap = self.__heap while heap[0][1] not in self or self[heap[0][1]] != heap[0][0]: lastItem = heap.pop() insertionPoint = 0 while 1: smallChild = 2*insertionPoint+1 if smallChild+1 < len(heap) and heap[smallChild] > heap[smallChild+1] : smallChild += 1 if smallChild >= len(heap) or lastItem <= heap[smallChild]: heap[insertionPoint] = lastItem break heap[insertionPoint] = heap[smallChild] insertionPoint = smallChild return heap[0][1]
[ "def", "smallest", "(", "self", ")", ":", "if", "len", "(", "self", ")", "==", "0", ":", "raise", "IndexError", ",", "\"smallest of empty priorityDictionary\"", "heap", "=", "self", ".", "__heap", "while", "heap", "[", "0", "]", "[", "1", "]", "not", "in", "self", "or", "self", "[", "heap", "[", "0", "]", "[", "1", "]", "]", "!=", "heap", "[", "0", "]", "[", "0", "]", ":", "lastItem", "=", "heap", ".", "pop", "(", ")", "insertionPoint", "=", "0", "while", "1", ":", "smallChild", "=", "2", "*", "insertionPoint", "+", "1", "if", "smallChild", "+", "1", "<", "len", "(", "heap", ")", "and", "heap", "[", "smallChild", "]", ">", "heap", "[", "smallChild", "+", "1", "]", ":", "smallChild", "+=", "1", "if", "smallChild", ">=", "len", "(", "heap", ")", "or", "lastItem", "<=", "heap", "[", "smallChild", "]", ":", "heap", "[", "insertionPoint", "]", "=", "lastItem", "break", "heap", "[", "insertionPoint", "]", "=", "heap", "[", "smallChild", "]", "insertionPoint", "=", "smallChild", "return", "heap", "[", "0", "]", "[", "1", "]" ]
https://github.com/pyqteval/evlal_win/blob/ce7fc77ac5cdf864cd6aa9b04b5329501f8f4c92/pyinstaller-2.0/PyInstaller/lib/altgraph/GraphAlgo.py#L90-L109
openstack/keystone
771c943ad2116193e7bb118c74993c829d93bd71
keystone/resource/core.py
python
Manager.assert_domain_not_federated
(self, domain_id, domain)
Assert the Domain's name and id do not match the reserved keyword. Note that the reserved keyword is defined in the configuration file, by default, it is 'Federated', it is also case insensitive. If config's option is empty the default hardcoded value 'Federated' will be used. :raise AssertionError: if domain named match the value in the config.
Assert the Domain's name and id do not match the reserved keyword.
[ "Assert", "the", "Domain", "s", "name", "and", "id", "do", "not", "match", "the", "reserved", "keyword", "." ]
def assert_domain_not_federated(self, domain_id, domain): """Assert the Domain's name and id do not match the reserved keyword. Note that the reserved keyword is defined in the configuration file, by default, it is 'Federated', it is also case insensitive. If config's option is empty the default hardcoded value 'Federated' will be used. :raise AssertionError: if domain named match the value in the config. """ # NOTE(marek-denis): We cannot create this attribute in the __init__ as # config values are always initialized to default value. federated_domain = CONF.federation.federated_domain_name.lower() if (domain.get('name') and domain['name'].lower() == federated_domain): raise AssertionError(_('Domain cannot be named %s') % domain['name']) if (domain_id.lower() == federated_domain): raise AssertionError(_('Domain cannot have ID %s') % domain_id)
[ "def", "assert_domain_not_federated", "(", "self", ",", "domain_id", ",", "domain", ")", ":", "# NOTE(marek-denis): We cannot create this attribute in the __init__ as", "# config values are always initialized to default value.", "federated_domain", "=", "CONF", ".", "federation", ".", "federated_domain_name", ".", "lower", "(", ")", "if", "(", "domain", ".", "get", "(", "'name'", ")", "and", "domain", "[", "'name'", "]", ".", "lower", "(", ")", "==", "federated_domain", ")", ":", "raise", "AssertionError", "(", "_", "(", "'Domain cannot be named %s'", ")", "%", "domain", "[", "'name'", "]", ")", "if", "(", "domain_id", ".", "lower", "(", ")", "==", "federated_domain", ")", ":", "raise", "AssertionError", "(", "_", "(", "'Domain cannot have ID %s'", ")", "%", "domain_id", ")" ]
https://github.com/openstack/keystone/blob/771c943ad2116193e7bb118c74993c829d93bd71/keystone/resource/core.py#L245-L264
ebranca/owasp-pysec
163e10a146db04f40648979e8d7c0c10e7737781
pysec/alg.py
python
knp_first
(source, pattern, start=0, stop=None)
Return the index of the first occurrence of pattern in source[start:stop]
Return the index of the first occurrence of pattern in source[start:stop]
[ "Return", "the", "index", "of", "the", "first", "occurrence", "of", "pattern", "in", "source", "[", "start", ":", "stop", "]" ]
def knp_first(source, pattern, start=0, stop=None): """Return the index of the first occurrence of pattern in source[start:stop]""" try: return knp(source, pattern, start, stop).next() except StopIteration: return -1
[ "def", "knp_first", "(", "source", ",", "pattern", ",", "start", "=", "0", ",", "stop", "=", "None", ")", ":", "try", ":", "return", "knp", "(", "source", ",", "pattern", ",", "start", ",", "stop", ")", ".", "next", "(", ")", "except", "StopIteration", ":", "return", "-", "1" ]
https://github.com/ebranca/owasp-pysec/blob/163e10a146db04f40648979e8d7c0c10e7737781/pysec/alg.py#L50-L56
google/grr
8ad8a4d2c5a93c92729206b7771af19d92d4f915
grr/server/grr_response_server/flows/general/collectors.py
python
ArtifactCollectorFlow._AreArtifactsKnowledgeBaseArtifacts
(self)
return True
[]
def _AreArtifactsKnowledgeBaseArtifacts(self): knowledgebase_list = config.CONFIG["Artifacts.knowledge_base"] for artifact_name in self.args.artifact_list: if artifact_name not in knowledgebase_list: return False return True
[ "def", "_AreArtifactsKnowledgeBaseArtifacts", "(", "self", ")", ":", "knowledgebase_list", "=", "config", ".", "CONFIG", "[", "\"Artifacts.knowledge_base\"", "]", "for", "artifact_name", "in", "self", ".", "args", ".", "artifact_list", ":", "if", "artifact_name", "not", "in", "knowledgebase_list", ":", "return", "False", "return", "True" ]
https://github.com/google/grr/blob/8ad8a4d2c5a93c92729206b7771af19d92d4f915/grr/server/grr_response_server/flows/general/collectors.py#L250-L255
maas/maas
db2f89970c640758a51247c59bf1ec6f60cf4ab5
src/maasserver/api/ssl_keys.py
python
SSLKeyHandler.delete
(self, request, id)
return rc.DELETED
@description-title Delete an SSL key @description Deletes the SSL key with the given ID. @param (int) "id" [required=true] An SSH key ID. @success (http-status-code) "204" 204 @error (http-status-code) "404" 404 @error (content) "not-found" The requested SSH key is not found. @error-example "not-found" Not Found @error (http-status-code) "403" 403 @error (content) "no-perms" The requesting user does not own the key. @error-example "no-perms" Can't delete a key you don't own.
@description-title Delete an SSL key @description Deletes the SSL key with the given ID.
[ "@description", "-", "title", "Delete", "an", "SSL", "key", "@description", "Deletes", "the", "SSL", "key", "with", "the", "given", "ID", "." ]
def delete(self, request, id): """@description-title Delete an SSL key @description Deletes the SSL key with the given ID. @param (int) "id" [required=true] An SSH key ID. @success (http-status-code) "204" 204 @error (http-status-code) "404" 404 @error (content) "not-found" The requested SSH key is not found. @error-example "not-found" Not Found @error (http-status-code) "403" 403 @error (content) "no-perms" The requesting user does not own the key. @error-example "no-perms" Can't delete a key you don't own. """ key = get_object_or_404(SSLKey, id=id) if key.user != request.user: return HttpResponseForbidden( "Can't delete a key you don't own.", content_type=( "text/plain; charset=%s" % settings.DEFAULT_CHARSET ), ) key.delete() create_audit_event( EVENT_TYPES.AUTHORISATION, ENDPOINT.API, request, None, description="Deleted SSL key id='%s'." % id, ) return rc.DELETED
[ "def", "delete", "(", "self", ",", "request", ",", "id", ")", ":", "key", "=", "get_object_or_404", "(", "SSLKey", ",", "id", "=", "id", ")", "if", "key", ".", "user", "!=", "request", ".", "user", ":", "return", "HttpResponseForbidden", "(", "\"Can't delete a key you don't own.\"", ",", "content_type", "=", "(", "\"text/plain; charset=%s\"", "%", "settings", ".", "DEFAULT_CHARSET", ")", ",", ")", "key", ".", "delete", "(", ")", "create_audit_event", "(", "EVENT_TYPES", ".", "AUTHORISATION", ",", "ENDPOINT", ".", "API", ",", "request", ",", "None", ",", "description", "=", "\"Deleted SSL key id='%s'.\"", "%", "id", ",", ")", "return", "rc", ".", "DELETED" ]
https://github.com/maas/maas/blob/db2f89970c640758a51247c59bf1ec6f60cf4ab5/src/maasserver/api/ssl_keys.py#L122-L156
wucng/TensorExpand
4ea58f64f5c5082b278229b799c9f679536510b7
TensorExpand/Object detection/faster rcnn/CharlesShang-TFFRCNN-master/experiments/profiling/gprof2dot.py
python
Profile.__init__
(self)
[]
def __init__(self): Object.__init__(self) self.functions = {} self.cycles = []
[ "def", "__init__", "(", "self", ")", ":", "Object", ".", "__init__", "(", "self", ")", "self", ".", "functions", "=", "{", "}", "self", ".", "cycles", "=", "[", "]" ]
https://github.com/wucng/TensorExpand/blob/4ea58f64f5c5082b278229b799c9f679536510b7/TensorExpand/Object detection/faster rcnn/CharlesShang-TFFRCNN-master/experiments/profiling/gprof2dot.py#L291-L294
PythonTurtle/PythonTurtle
929f10892e62ece8d5f230d3e995d380d6493cf3
pythonturtle/shelltoprocess/forkedpyshell.py
python
Shell.setLocalShell
(self)
Add 'shell' to locals as reference to ShellFacade instance.
Add 'shell' to locals as reference to ShellFacade instance.
[ "Add", "shell", "to", "locals", "as", "reference", "to", "ShellFacade", "instance", "." ]
def setLocalShell(self): """Add 'shell' to locals as reference to ShellFacade instance.""" self.interp.locals['shell'] = ShellFacade(other=self)
[ "def", "setLocalShell", "(", "self", ")", ":", "self", ".", "interp", ".", "locals", "[", "'shell'", "]", "=", "ShellFacade", "(", "other", "=", "self", ")" ]
https://github.com/PythonTurtle/PythonTurtle/blob/929f10892e62ece8d5f230d3e995d380d6493cf3/pythonturtle/shelltoprocess/forkedpyshell.py#L439-L441
blinktrade/bitex
a4896e7faef9c4aa0ca5325f18b77db67003764e
libs/websocket.py
python
_SSLSocketWrapper.recv
(self, bufsize)
return self.ssl.read(bufsize)
[]
def recv(self, bufsize): return self.ssl.read(bufsize)
[ "def", "recv", "(", "self", ",", "bufsize", ")", ":", "return", "self", ".", "ssl", ".", "read", "(", "bufsize", ")" ]
https://github.com/blinktrade/bitex/blob/a4896e7faef9c4aa0ca5325f18b77db67003764e/libs/websocket.py#L202-L203
rajarshd/Multi-Step-Reasoning
3218d626839f7217554f38d82e00e4f460b508e4
msr/reader/model.py
python
Model.parallelize
(self)
Use data parallel to copy the model across several gpus. This will take all gpus visible with CUDA_VISIBLE_DEVICES.
Use data parallel to copy the model across several gpus. This will take all gpus visible with CUDA_VISIBLE_DEVICES.
[ "Use", "data", "parallel", "to", "copy", "the", "model", "across", "several", "gpus", ".", "This", "will", "take", "all", "gpus", "visible", "with", "CUDA_VISIBLE_DEVICES", "." ]
def parallelize(self): """Use data parallel to copy the model across several gpus. This will take all gpus visible with CUDA_VISIBLE_DEVICES. """ self.parallel = True self.network = torch.nn.DataParallel(self.network)
[ "def", "parallelize", "(", "self", ")", ":", "self", ".", "parallel", "=", "True", "self", ".", "network", "=", "torch", ".", "nn", ".", "DataParallel", "(", "self", ".", "network", ")" ]
https://github.com/rajarshd/Multi-Step-Reasoning/blob/3218d626839f7217554f38d82e00e4f460b508e4/msr/reader/model.py#L965-L970
ifwe/digsby
f5fe00244744aa131e07f09348d10563f3d8fa99
digsby/devplugins/irc/irccmds.py
python
IRCCommander.raw
(self, text)
Sends raw text to the server.
Sends raw text to the server.
[ "Sends", "raw", "text", "to", "the", "server", "." ]
def raw(self, text): 'Sends raw text to the server.' self._irc.sendraw(text)
[ "def", "raw", "(", "self", ",", "text", ")", ":", "self", ".", "_irc", ".", "sendraw", "(", "text", ")" ]
https://github.com/ifwe/digsby/blob/f5fe00244744aa131e07f09348d10563f3d8fa99/digsby/devplugins/irc/irccmds.py#L64-L67
annoviko/pyclustering
bf4f51a472622292627ec8c294eb205585e50f52
pyclustering/nnet/__init__.py
python
network.set_connection
(self, i, j)
! @brief Couples two specified oscillators in the network with dynamic connections. @param[in] i (uint): index of an oscillator that should be coupled with oscillator 'j' in the network. @param[in] j (uint): index of an oscillator that should be coupled with oscillator 'i' in the network. @note This method can be used only in case of DYNAMIC connections, otherwise it throws expection.
!
[ "!" ]
def set_connection(self, i, j): """! @brief Couples two specified oscillators in the network with dynamic connections. @param[in] i (uint): index of an oscillator that should be coupled with oscillator 'j' in the network. @param[in] j (uint): index of an oscillator that should be coupled with oscillator 'i' in the network. @note This method can be used only in case of DYNAMIC connections, otherwise it throws expection. """ if (self.structure != conn_type.DYNAMIC): raise NameError("Connection between oscillators can be changed only in case of dynamic type."); if (self._conn_represent == conn_represent.MATRIX): self._osc_conn[i][j] = True; self._osc_conn[j][i] = True; else: self._osc_conn[i].append(j); self._osc_conn[j].append(i);
[ "def", "set_connection", "(", "self", ",", "i", ",", "j", ")", ":", "if", "(", "self", ".", "structure", "!=", "conn_type", ".", "DYNAMIC", ")", ":", "raise", "NameError", "(", "\"Connection between oscillators can be changed only in case of dynamic type.\"", ")", "if", "(", "self", ".", "_conn_represent", "==", "conn_represent", ".", "MATRIX", ")", ":", "self", ".", "_osc_conn", "[", "i", "]", "[", "j", "]", "=", "True", "self", ".", "_osc_conn", "[", "j", "]", "[", "i", "]", "=", "True", "else", ":", "self", ".", "_osc_conn", "[", "i", "]", ".", "append", "(", "j", ")", "self", ".", "_osc_conn", "[", "j", "]", ".", "append", "(", "i", ")" ]
https://github.com/annoviko/pyclustering/blob/bf4f51a472622292627ec8c294eb205585e50f52/pyclustering/nnet/__init__.py#L372-L391
plotly/plotly.py
cfad7862594b35965c0e000813bd7805e8494a5b
packages/python/plotly/plotly/graph_objs/scattergl/_unselected.py
python
Unselected.marker
(self)
return self["marker"]
The 'marker' property is an instance of Marker that may be specified as: - An instance of :class:`plotly.graph_objs.scattergl.unselected.Marker` - A dict of string/value properties that will be passed to the Marker constructor Supported dict properties: color Sets the marker color of unselected points, applied only when a selection exists. opacity Sets the marker opacity of unselected points, applied only when a selection exists. size Sets the marker size of unselected points, applied only when a selection exists. Returns ------- plotly.graph_objs.scattergl.unselected.Marker
The 'marker' property is an instance of Marker that may be specified as: - An instance of :class:`plotly.graph_objs.scattergl.unselected.Marker` - A dict of string/value properties that will be passed to the Marker constructor Supported dict properties: color Sets the marker color of unselected points, applied only when a selection exists. opacity Sets the marker opacity of unselected points, applied only when a selection exists. size Sets the marker size of unselected points, applied only when a selection exists.
[ "The", "marker", "property", "is", "an", "instance", "of", "Marker", "that", "may", "be", "specified", "as", ":", "-", "An", "instance", "of", ":", "class", ":", "plotly", ".", "graph_objs", ".", "scattergl", ".", "unselected", ".", "Marker", "-", "A", "dict", "of", "string", "/", "value", "properties", "that", "will", "be", "passed", "to", "the", "Marker", "constructor", "Supported", "dict", "properties", ":", "color", "Sets", "the", "marker", "color", "of", "unselected", "points", "applied", "only", "when", "a", "selection", "exists", ".", "opacity", "Sets", "the", "marker", "opacity", "of", "unselected", "points", "applied", "only", "when", "a", "selection", "exists", ".", "size", "Sets", "the", "marker", "size", "of", "unselected", "points", "applied", "only", "when", "a", "selection", "exists", "." ]
def marker(self): """ The 'marker' property is an instance of Marker that may be specified as: - An instance of :class:`plotly.graph_objs.scattergl.unselected.Marker` - A dict of string/value properties that will be passed to the Marker constructor Supported dict properties: color Sets the marker color of unselected points, applied only when a selection exists. opacity Sets the marker opacity of unselected points, applied only when a selection exists. size Sets the marker size of unselected points, applied only when a selection exists. Returns ------- plotly.graph_objs.scattergl.unselected.Marker """ return self["marker"]
[ "def", "marker", "(", "self", ")", ":", "return", "self", "[", "\"marker\"", "]" ]
https://github.com/plotly/plotly.py/blob/cfad7862594b35965c0e000813bd7805e8494a5b/packages/python/plotly/plotly/graph_objs/scattergl/_unselected.py#L16-L40
YingZhangDUT/Deep-Mutual-Learning
34a20583debe4e0dab1d9856db69bed278c5c011
datasets/convert_to_tfrecords.py
python
run
(image_dir, output_dir, split_name)
Convert images to tfrecords. Args: image_dir: The image directory where the raw images are stored. output_dir: The directory where the lists and tfrecords are stored. split_name: The split name of dataset.
Convert images to tfrecords. Args: image_dir: The image directory where the raw images are stored. output_dir: The directory where the lists and tfrecords are stored. split_name: The split name of dataset.
[ "Convert", "images", "to", "tfrecords", ".", "Args", ":", "image_dir", ":", "The", "image", "directory", "where", "the", "raw", "images", "are", "stored", ".", "output_dir", ":", "The", "directory", "where", "the", "lists", "and", "tfrecords", "are", "stored", ".", "split_name", ":", "The", "split", "name", "of", "dataset", "." ]
def run(image_dir, output_dir, split_name): """Convert images to tfrecords. Args: image_dir: The image directory where the raw images are stored. output_dir: The directory where the lists and tfrecords are stored. split_name: The split name of dataset. """ list_filename = os.path.join(output_dir, '%s.txt' % split_name) tf_filename = os.path.join(output_dir, '%s.tfrecord' % split_name) if tf.gfile.Exists(tf_filename): print('Dataset files already exist. Exiting without re-creating them.') return with tf.python_io.TFRecordWriter(tf_filename) as tfrecord_writer: _add_to_tfrecord(image_dir, list_filename, tfrecord_writer, split_name) print(" Done! \n")
[ "def", "run", "(", "image_dir", ",", "output_dir", ",", "split_name", ")", ":", "list_filename", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", "'%s.txt'", "%", "split_name", ")", "tf_filename", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", "'%s.tfrecord'", "%", "split_name", ")", "if", "tf", ".", "gfile", ".", "Exists", "(", "tf_filename", ")", ":", "print", "(", "'Dataset files already exist. Exiting without re-creating them.'", ")", "return", "with", "tf", ".", "python_io", ".", "TFRecordWriter", "(", "tf_filename", ")", "as", "tfrecord_writer", ":", "_add_to_tfrecord", "(", "image_dir", ",", "list_filename", ",", "tfrecord_writer", ",", "split_name", ")", "print", "(", "\" Done! \\n\"", ")" ]
https://github.com/YingZhangDUT/Deep-Mutual-Learning/blob/34a20583debe4e0dab1d9856db69bed278c5c011/datasets/convert_to_tfrecords.py#L50-L67
nate-parrott/Flashlight
c3a7c7278a1cccf8918e7543faffc68e863ff5ab
flashlightplugins/cloudstorage/common.py
python
posix_time_to_http
(posix_time)
Convert posix time to HTML header time format. Args: posix_time: unix time. Returns: A datatime str in RFC 2616 format.
Convert posix time to HTML header time format.
[ "Convert", "posix", "time", "to", "HTML", "header", "time", "format", "." ]
def posix_time_to_http(posix_time): """Convert posix time to HTML header time format. Args: posix_time: unix time. Returns: A datatime str in RFC 2616 format. """ if posix_time: return email_utils.formatdate(posix_time, usegmt=True)
[ "def", "posix_time_to_http", "(", "posix_time", ")", ":", "if", "posix_time", ":", "return", "email_utils", ".", "formatdate", "(", "posix_time", ",", "usegmt", "=", "True", ")" ]
https://github.com/nate-parrott/Flashlight/blob/c3a7c7278a1cccf8918e7543faffc68e863ff5ab/flashlightplugins/cloudstorage/common.py#L331-L341
sagemath/sagecell
f654176956af8ebd83a0769b916116d600785398
namespace.py
python
InstrumentedNamespace.__init__
(self, *args, **kwargs)
Set up a namespace id
Set up a namespace id
[ "Set", "up", "a", "namespace", "id" ]
def __init__(self, *args, **kwargs): """ Set up a namespace id """ dict.__init__(self,*args,**kwargs) self.events = defaultdict(lambda: defaultdict(list))
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "dict", ".", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "self", ".", "events", "=", "defaultdict", "(", "lambda", ":", "defaultdict", "(", "list", ")", ")" ]
https://github.com/sagemath/sagecell/blob/f654176956af8ebd83a0769b916116d600785398/namespace.py#L5-L10
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/sympy/sympy/core/numbers.py
python
NaN.__mul__
(self, other)
return self
[]
def __mul__(self, other): return self
[ "def", "__mul__", "(", "self", ",", "other", ")", ":", "return", "self" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/core/numbers.py#L2524-L2525
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/whoosh/util/varints.py
python
varint_to_int
(vi)
return i
[]
def varint_to_int(vi): b = ord(vi[0]) p = 1 i = b & 0x7f shift = 7 while b & 0x80 != 0: b = ord(vi[p]) p += 1 i |= (b & 0x7F) << shift shift += 7 return i
[ "def", "varint_to_int", "(", "vi", ")", ":", "b", "=", "ord", "(", "vi", "[", "0", "]", ")", "p", "=", "1", "i", "=", "b", "&", "0x7f", "shift", "=", "7", "while", "b", "&", "0x80", "!=", "0", ":", "b", "=", "ord", "(", "vi", "[", "p", "]", ")", "p", "+=", "1", "i", "|=", "(", "b", "&", "0x7F", ")", "<<", "shift", "shift", "+=", "7", "return", "i" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/whoosh/util/varints.py#L63-L73
oracle/oci-python-sdk
3c1604e4e212008fb6718e2f68cdb5ef71fd5793
src/oci/core/virtual_network_client.py
python
VirtualNetworkClient.remove_drg_route_distribution_statements
(self, drg_route_distribution_id, remove_drg_route_distribution_statements_details, **kwargs)
Removes one or more route distribution statements from the specified route distribution's map. :param str drg_route_distribution_id: (required) The `OCID`__ of the route distribution. __ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm :param oci.core.models.RemoveDrgRouteDistributionStatementsDetails remove_drg_route_distribution_statements_details: (required) Request with one or more route distribution statements to remove from the route distribution. :param obj retry_strategy: (optional) A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level. This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__. To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`. :return: A :class:`~oci.response.Response` object with data of type None :rtype: :class:`~oci.response.Response` :example: Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/core/remove_drg_route_distribution_statements.py.html>`__ to see an example of how to use remove_drg_route_distribution_statements API.
Removes one or more route distribution statements from the specified route distribution's map.
[ "Removes", "one", "or", "more", "route", "distribution", "statements", "from", "the", "specified", "route", "distribution", "s", "map", "." ]
def remove_drg_route_distribution_statements(self, drg_route_distribution_id, remove_drg_route_distribution_statements_details, **kwargs): """ Removes one or more route distribution statements from the specified route distribution's map. :param str drg_route_distribution_id: (required) The `OCID`__ of the route distribution. __ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm :param oci.core.models.RemoveDrgRouteDistributionStatementsDetails remove_drg_route_distribution_statements_details: (required) Request with one or more route distribution statements to remove from the route distribution. :param obj retry_strategy: (optional) A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level. This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__. To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`. :return: A :class:`~oci.response.Response` object with data of type None :rtype: :class:`~oci.response.Response` :example: Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/core/remove_drg_route_distribution_statements.py.html>`__ to see an example of how to use remove_drg_route_distribution_statements API. """ resource_path = "/drgRouteDistributions/{drgRouteDistributionId}/actions/removeDrgRouteDistributionStatements" method = "POST" expected_kwargs = ["retry_strategy"] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "remove_drg_route_distribution_statements got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "drgRouteDistributionId": drg_route_distribution_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.base_client.get_preferred_retry_strategy( operation_retry_strategy=kwargs.get('retry_strategy'), client_retry_strategy=self.retry_strategy ) if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_client_retries_header(header_params) retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=remove_drg_route_distribution_statements_details) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=remove_drg_route_distribution_statements_details)
[ "def", "remove_drg_route_distribution_statements", "(", "self", ",", "drg_route_distribution_id", ",", "remove_drg_route_distribution_statements_details", ",", "*", "*", "kwargs", ")", ":", "resource_path", "=", "\"/drgRouteDistributions/{drgRouteDistributionId}/actions/removeDrgRouteDistributionStatements\"", "method", "=", "\"POST\"", "expected_kwargs", "=", "[", "\"retry_strategy\"", "]", "extra_kwargs", "=", "[", "_key", "for", "_key", "in", "six", ".", "iterkeys", "(", "kwargs", ")", "if", "_key", "not", "in", "expected_kwargs", "]", "if", "extra_kwargs", ":", "raise", "ValueError", "(", "\"remove_drg_route_distribution_statements got unknown kwargs: {!r}\"", ".", "format", "(", "extra_kwargs", ")", ")", "path_params", "=", "{", "\"drgRouteDistributionId\"", ":", "drg_route_distribution_id", "}", "path_params", "=", "{", "k", ":", "v", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "path_params", ")", "if", "v", "is", "not", "missing", "}", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "path_params", ")", ":", "if", "v", "is", "None", "or", "(", "isinstance", "(", "v", ",", "six", ".", "string_types", ")", "and", "len", "(", "v", ".", "strip", "(", ")", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "'Parameter {} cannot be None, whitespace or empty string'", ".", "format", "(", "k", ")", ")", "header_params", "=", "{", "\"accept\"", ":", "\"application/json\"", ",", "\"content-type\"", ":", "\"application/json\"", "}", "retry_strategy", "=", "self", ".", "base_client", ".", "get_preferred_retry_strategy", "(", "operation_retry_strategy", "=", "kwargs", ".", "get", "(", "'retry_strategy'", ")", ",", "client_retry_strategy", "=", "self", ".", "retry_strategy", ")", "if", "retry_strategy", ":", "if", "not", "isinstance", "(", "retry_strategy", ",", "retry", ".", "NoneRetryStrategy", ")", ":", "self", ".", "base_client", ".", "add_opc_client_retries_header", "(", "header_params", ")", "retry_strategy", ".", "add_circuit_breaker_callback", "(", "self", ".", "circuit_breaker_callback", ")", "return", "retry_strategy", ".", "make_retrying_call", "(", "self", ".", "base_client", ".", "call_api", ",", "resource_path", "=", "resource_path", ",", "method", "=", "method", ",", "path_params", "=", "path_params", ",", "header_params", "=", "header_params", ",", "body", "=", "remove_drg_route_distribution_statements_details", ")", "else", ":", "return", "self", ".", "base_client", ".", "call_api", "(", "resource_path", "=", "resource_path", ",", "method", "=", "method", ",", "path_params", "=", "path_params", ",", "header_params", "=", "header_params", ",", "body", "=", "remove_drg_route_distribution_statements_details", ")" ]
https://github.com/oracle/oci-python-sdk/blob/3c1604e4e212008fb6718e2f68cdb5ef71fd5793/src/oci/core/virtual_network_client.py#L17750-L17823
yzhao062/combo
229d578de498b47ae03cf2580472aceebf8c2766
combo/models/base.py
python
BaseAggregator._detector_predict_proba
(self, X, proba_method='linear')
Predict the probability of a sample being outlier. Two approaches are possible: 1. simply use Min-max conversion to linearly transform the outlier scores into the range of [0,1]. The model must be fitted first. 2. use unifying scores, see :cite:`kriegel2011interpreting`. Parameters ---------- X : numpy array of shape (n_samples, n_features) The input samples. proba_method : str, optional (default='linear') Probability conversion method. It must be one of 'linear' or 'unify'. Returns ------- outlier_labels : numpy array of shape (n_samples,) For each observation, tells whether or not it should be considered as an outlier according to the fitted model. Return the outlier probability, ranging in [0,1].
Predict the probability of a sample being outlier. Two approaches are possible:
[ "Predict", "the", "probability", "of", "a", "sample", "being", "outlier", ".", "Two", "approaches", "are", "possible", ":" ]
def _detector_predict_proba(self, X, proba_method='linear'): """Predict the probability of a sample being outlier. Two approaches are possible: 1. simply use Min-max conversion to linearly transform the outlier scores into the range of [0,1]. The model must be fitted first. 2. use unifying scores, see :cite:`kriegel2011interpreting`. Parameters ---------- X : numpy array of shape (n_samples, n_features) The input samples. proba_method : str, optional (default='linear') Probability conversion method. It must be one of 'linear' or 'unify'. Returns ------- outlier_labels : numpy array of shape (n_samples,) For each observation, tells whether or not it should be considered as an outlier according to the fitted model. Return the outlier probability, ranging in [0,1]. """ check_is_fitted(self, ['decision_scores_', 'threshold_', 'labels_']) train_scores = self.decision_scores_ test_scores = self.decision_function(X) probs = np.zeros([X.shape[0], int(self._classes)]) if proba_method == 'linear': scaler = MinMaxScaler().fit(train_scores.reshape(-1, 1)) probs[:, 1] = scaler.transform( test_scores.reshape(-1, 1)).ravel().clip(0, 1) probs[:, 0] = 1 - probs[:, 1] return probs elif proba_method == 'unify': # turn output into probability pre_erf_score = (test_scores - self._mu) / ( self._sigma * np.sqrt(2)) erf_score = erf(pre_erf_score) probs[:, 1] = erf_score.clip(0, 1).ravel() probs[:, 0] = 1 - probs[:, 1] return probs else: raise ValueError(proba_method, 'is not a valid probability conversion method')
[ "def", "_detector_predict_proba", "(", "self", ",", "X", ",", "proba_method", "=", "'linear'", ")", ":", "check_is_fitted", "(", "self", ",", "[", "'decision_scores_'", ",", "'threshold_'", ",", "'labels_'", "]", ")", "train_scores", "=", "self", ".", "decision_scores_", "test_scores", "=", "self", ".", "decision_function", "(", "X", ")", "probs", "=", "np", ".", "zeros", "(", "[", "X", ".", "shape", "[", "0", "]", ",", "int", "(", "self", ".", "_classes", ")", "]", ")", "if", "proba_method", "==", "'linear'", ":", "scaler", "=", "MinMaxScaler", "(", ")", ".", "fit", "(", "train_scores", ".", "reshape", "(", "-", "1", ",", "1", ")", ")", "probs", "[", ":", ",", "1", "]", "=", "scaler", ".", "transform", "(", "test_scores", ".", "reshape", "(", "-", "1", ",", "1", ")", ")", ".", "ravel", "(", ")", ".", "clip", "(", "0", ",", "1", ")", "probs", "[", ":", ",", "0", "]", "=", "1", "-", "probs", "[", ":", ",", "1", "]", "return", "probs", "elif", "proba_method", "==", "'unify'", ":", "# turn output into probability", "pre_erf_score", "=", "(", "test_scores", "-", "self", ".", "_mu", ")", "/", "(", "self", ".", "_sigma", "*", "np", ".", "sqrt", "(", "2", ")", ")", "erf_score", "=", "erf", "(", "pre_erf_score", ")", "probs", "[", ":", ",", "1", "]", "=", "erf_score", ".", "clip", "(", "0", ",", "1", ")", ".", "ravel", "(", ")", "probs", "[", ":", ",", "0", "]", "=", "1", "-", "probs", "[", ":", ",", "1", "]", "return", "probs", "else", ":", "raise", "ValueError", "(", "proba_method", ",", "'is not a valid probability conversion method'", ")" ]
https://github.com/yzhao062/combo/blob/229d578de498b47ae03cf2580472aceebf8c2766/combo/models/base.py#L165-L215
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/min/site.py
python
setcopyright
()
Set 'copyright' and 'credits' in builtins
Set 'copyright' and 'credits' in builtins
[ "Set", "copyright", "and", "credits", "in", "builtins" ]
def setcopyright(): """Set 'copyright' and 'credits' in builtins""" builtins.copyright = _sitebuiltins._Printer("copyright", sys.copyright) if sys.platform[:4] == 'java': builtins.credits = _sitebuiltins._Printer( "credits", "Jython is maintained by the Jython developers (www.jython.org).") else: builtins.credits = _sitebuiltins._Printer("credits", """\ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands for supporting Python development. See www.python.org for more information.""") files, dirs = [], [] # Not all modules are required to have a __file__ attribute. See # PEP 420 for more details. if hasattr(os, '__file__'): here = os.path.dirname(os.__file__) files.extend(["LICENSE.txt", "LICENSE"]) dirs.extend([os.path.join(here, os.pardir), here, os.curdir]) builtins.license = _sitebuiltins._Printer( "license", "See https://www.python.org/psf/license/", files, dirs)
[ "def", "setcopyright", "(", ")", ":", "builtins", ".", "copyright", "=", "_sitebuiltins", ".", "_Printer", "(", "\"copyright\"", ",", "sys", ".", "copyright", ")", "if", "sys", ".", "platform", "[", ":", "4", "]", "==", "'java'", ":", "builtins", ".", "credits", "=", "_sitebuiltins", ".", "_Printer", "(", "\"credits\"", ",", "\"Jython is maintained by the Jython developers (www.jython.org).\"", ")", "else", ":", "builtins", ".", "credits", "=", "_sitebuiltins", ".", "_Printer", "(", "\"credits\"", ",", "\"\"\"\\\n Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands\n for supporting Python development. See www.python.org for more information.\"\"\"", ")", "files", ",", "dirs", "=", "[", "]", ",", "[", "]", "# Not all modules are required to have a __file__ attribute. See", "# PEP 420 for more details.", "if", "hasattr", "(", "os", ",", "'__file__'", ")", ":", "here", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "__file__", ")", "files", ".", "extend", "(", "[", "\"LICENSE.txt\"", ",", "\"LICENSE\"", "]", ")", "dirs", ".", "extend", "(", "[", "os", ".", "path", ".", "join", "(", "here", ",", "os", ".", "pardir", ")", ",", "here", ",", "os", ".", "curdir", "]", ")", "builtins", ".", "license", "=", "_sitebuiltins", ".", "_Printer", "(", "\"license\"", ",", "\"See https://www.python.org/psf/license/\"", ",", "files", ",", "dirs", ")" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/min/site.py#L407-L428
benoitc/couchdbkit
6be148640c00b54ee87a2f2d502e9d67fa5b45a8
couchdbkit/external.py
python
External.send_response
(self, code=200, body="", headers={})
[]
def send_response(self, code=200, body="", headers={}): resp = { 'code': code, 'body': body, 'headers': headers } self.write(json.dumps(resp))
[ "def", "send_response", "(", "self", ",", "code", "=", "200", ",", "body", "=", "\"\"", ",", "headers", "=", "{", "}", ")", ":", "resp", "=", "{", "'code'", ":", "code", ",", "'body'", ":", "body", ",", "'headers'", ":", "headers", "}", "self", ".", "write", "(", "json", ".", "dumps", "(", "resp", ")", ")" ]
https://github.com/benoitc/couchdbkit/blob/6be148640c00b54ee87a2f2d502e9d67fa5b45a8/couchdbkit/external.py#L52-L58
cuthbertLab/music21
bd30d4663e52955ed922c10fdf541419d8c67671
music21/abcFormat/__init__.py
python
ABCMetadata.getDefaultQuarterLength
(self)
r''' If there is a quarter length representation available, return it as a floating point value >>> am = abcFormat.ABCMetadata('L:1/2') >>> am.preParse() >>> am.getDefaultQuarterLength() 2.0 >>> am = abcFormat.ABCMetadata('L:1/8') >>> am.preParse() >>> am.getDefaultQuarterLength() 0.5 >>> am = abcFormat.ABCMetadata('M:C|') >>> am.preParse() >>> am.getDefaultQuarterLength() 0.5 If taking from meter, find the "fraction" and if < 0.75 use sixteenth notes. If >= 0.75 use eighth notes. >>> am = abcFormat.ABCMetadata('M:2/4') >>> am.preParse() >>> am.getDefaultQuarterLength() 0.25 >>> am = abcFormat.ABCMetadata('M:3/4') >>> am.preParse() >>> am.getDefaultQuarterLength() 0.5 >>> am = abcFormat.ABCMetadata('M:6/8') >>> am.preParse() >>> am.getDefaultQuarterLength() 0.5 Meter is only used for default length if there is no L: >>> x = 'L:1/4\nM:3/4\n\nf' >>> sc = converter.parse(x, format='abc') >>> sc.recurse().notes.first().duration.type 'quarter'
r''' If there is a quarter length representation available, return it as a floating point value
[ "r", "If", "there", "is", "a", "quarter", "length", "representation", "available", "return", "it", "as", "a", "floating", "point", "value" ]
def getDefaultQuarterLength(self) -> float: r''' If there is a quarter length representation available, return it as a floating point value >>> am = abcFormat.ABCMetadata('L:1/2') >>> am.preParse() >>> am.getDefaultQuarterLength() 2.0 >>> am = abcFormat.ABCMetadata('L:1/8') >>> am.preParse() >>> am.getDefaultQuarterLength() 0.5 >>> am = abcFormat.ABCMetadata('M:C|') >>> am.preParse() >>> am.getDefaultQuarterLength() 0.5 If taking from meter, find the "fraction" and if < 0.75 use sixteenth notes. If >= 0.75 use eighth notes. >>> am = abcFormat.ABCMetadata('M:2/4') >>> am.preParse() >>> am.getDefaultQuarterLength() 0.25 >>> am = abcFormat.ABCMetadata('M:3/4') >>> am.preParse() >>> am.getDefaultQuarterLength() 0.5 >>> am = abcFormat.ABCMetadata('M:6/8') >>> am.preParse() >>> am.getDefaultQuarterLength() 0.5 Meter is only used for default length if there is no L: >>> x = 'L:1/4\nM:3/4\n\nf' >>> sc = converter.parse(x, format='abc') >>> sc.recurse().notes.first().duration.type 'quarter' ''' # environLocal.printDebug(['getDefaultQuarterLength', self.data]) if self.isDefaultNoteLength() and '/' in self.data: # should be in L:1/4 form n, d = self.data.split('/') n = int(n.strip()) # the notation L: 1/G is found in some essen files # this is extremely uncommon and might be an error if d == 'G': d = 4 # assume a default else: d = int(d.strip()) # 1/4 is 1, 1/8 is 0.5 return n * 4 / d elif self.isMeter(): # if meter auto-set a default not length parameters = self.getTimeSignatureParameters() if parameters is None: return 0.5 # TODO: assume default, need to configure n, d, unused_symbol = parameters if n / d < 0.75: return 0.25 # less than 0.75 the default is a sixteenth note else: return 0.5 # otherwise it is an eighth note else: # pragma: no cover raise ABCTokenException( f'no quarter length associated with this metadata: {self.data}')
[ "def", "getDefaultQuarterLength", "(", "self", ")", "->", "float", ":", "# environLocal.printDebug(['getDefaultQuarterLength', self.data])", "if", "self", ".", "isDefaultNoteLength", "(", ")", "and", "'/'", "in", "self", ".", "data", ":", "# should be in L:1/4 form", "n", ",", "d", "=", "self", ".", "data", ".", "split", "(", "'/'", ")", "n", "=", "int", "(", "n", ".", "strip", "(", ")", ")", "# the notation L: 1/G is found in some essen files", "# this is extremely uncommon and might be an error", "if", "d", "==", "'G'", ":", "d", "=", "4", "# assume a default", "else", ":", "d", "=", "int", "(", "d", ".", "strip", "(", ")", ")", "# 1/4 is 1, 1/8 is 0.5", "return", "n", "*", "4", "/", "d", "elif", "self", ".", "isMeter", "(", ")", ":", "# if meter auto-set a default not length", "parameters", "=", "self", ".", "getTimeSignatureParameters", "(", ")", "if", "parameters", "is", "None", ":", "return", "0.5", "# TODO: assume default, need to configure", "n", ",", "d", ",", "unused_symbol", "=", "parameters", "if", "n", "/", "d", "<", "0.75", ":", "return", "0.25", "# less than 0.75 the default is a sixteenth note", "else", ":", "return", "0.5", "# otherwise it is an eighth note", "else", ":", "# pragma: no cover", "raise", "ABCTokenException", "(", "f'no quarter length associated with this metadata: {self.data}'", ")" ]
https://github.com/cuthbertLab/music21/blob/bd30d4663e52955ed922c10fdf541419d8c67671/music21/abcFormat/__init__.py#L677-L750
great-expectations/great_expectations
45224cb890aeae725af25905923d0dbbab2d969d
great_expectations/render/renderer/notebook_renderer.py
python
BaseNotebookRenderer.render_to_disk
( self, notebook_file_path: str, )
Render a notebook to disk from arguments
Render a notebook to disk from arguments
[ "Render", "a", "notebook", "to", "disk", "from", "arguments" ]
def render_to_disk( self, notebook_file_path: str, ) -> None: """ Render a notebook to disk from arguments """ raise NotImplementedError
[ "def", "render_to_disk", "(", "self", ",", "notebook_file_path", ":", "str", ",", ")", "->", "None", ":", "raise", "NotImplementedError" ]
https://github.com/great-expectations/great_expectations/blob/45224cb890aeae725af25905923d0dbbab2d969d/great_expectations/render/renderer/notebook_renderer.py#L77-L84
Net-ng/kansha
85b5816da126b1c7098707c98f217d8b2e524ff2
kansha/board/comp.py
python
Board.get_available_user_ids
(self)
return set(dbm.user.id for dbm in self.data.board_members)
Return list of member Return: - list of members
Return list of member
[ "Return", "list", "of", "member" ]
def get_available_user_ids(self): """Return list of member Return: - list of members """ return set(dbm.user.id for dbm in self.data.board_members)
[ "def", "get_available_user_ids", "(", "self", ")", ":", "return", "set", "(", "dbm", ".", "user", ".", "id", "for", "dbm", "in", "self", ".", "data", ".", "board_members", ")" ]
https://github.com/Net-ng/kansha/blob/85b5816da126b1c7098707c98f217d8b2e524ff2/kansha/board/comp.py#L748-L754
F8LEFT/DecLLVM
d38e45e3d0dd35634adae1d0cf7f96f3bd96e74c
python/idaapi.py
python
lvar_t.is_floating_var
(self, *args)
return _idaapi.lvar_t_is_floating_var(self, *args)
is_floating_var(self) -> bool
is_floating_var(self) -> bool
[ "is_floating_var", "(", "self", ")", "-", ">", "bool" ]
def is_floating_var(self, *args): """ is_floating_var(self) -> bool """ return _idaapi.lvar_t_is_floating_var(self, *args)
[ "def", "is_floating_var", "(", "self", ",", "*", "args", ")", ":", "return", "_idaapi", ".", "lvar_t_is_floating_var", "(", "self", ",", "*", "args", ")" ]
https://github.com/F8LEFT/DecLLVM/blob/d38e45e3d0dd35634adae1d0cf7f96f3bd96e74c/python/idaapi.py#L35928-L35932
Arelle/Arelle
20f3d8a8afd41668e1520799acd333349ce0ba17
arelle/TkTableWrapper.py
python
Table.spans
(self, index=None, **kwargs)
Manipulate row/col spans. When called with no arguments, all known spans are returned as a dict. When called with only the index, the span for that index only is returned, if any. Otherwise kwargs is assumed to contain keys/values pairs used to set spans. A span starts at the row,col defined by a key and continues for the specified number of rows,cols specified by its value. A span of 0,0 unsets any span on that cell.
Manipulate row/col spans.
[ "Manipulate", "row", "/", "col", "spans", "." ]
def spans(self, index=None, **kwargs): """Manipulate row/col spans. When called with no arguments, all known spans are returned as a dict. When called with only the index, the span for that index only is returned, if any. Otherwise kwargs is assumed to contain keys/values pairs used to set spans. A span starts at the row,col defined by a key and continues for the specified number of rows,cols specified by its value. A span of 0,0 unsets any span on that cell.""" if kwargs: args = tkinter._flatten(list(kwargs.items())) self.tk.call(self._w, 'spans', *args) else: return self.tk.call(self._w, 'spans', index)
[ "def", "spans", "(", "self", ",", "index", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "kwargs", ":", "args", "=", "tkinter", ".", "_flatten", "(", "list", "(", "kwargs", ".", "items", "(", ")", ")", ")", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "'spans'", ",", "*", "args", ")", "else", ":", "return", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "'spans'", ",", "index", ")" ]
https://github.com/Arelle/Arelle/blob/20f3d8a8afd41668e1520799acd333349ce0ba17/arelle/TkTableWrapper.py#L374-L387
nopernik/mpDNS
b17dc39e7068406df82cb3431b3042e74e520cf9
dnslib/dns.py
python
DNSQuestion.__ne__
(self,other)
return not(self.__eq__(other))
[]
def __ne__(self,other): return not(self.__eq__(other))
[ "def", "__ne__", "(", "self", ",", "other", ")", ":", "return", "not", "(", "self", ".", "__eq__", "(", "other", ")", ")" ]
https://github.com/nopernik/mpDNS/blob/b17dc39e7068406df82cb3431b3042e74e520cf9/dnslib/dns.py#L678-L679
zachwill/flask-engine
7c8ad4bfe36382a8c9286d873ec7b785715832a4
libs/pkg_resources.py
python
Environment.add
(self,dist)
Add `dist` if we ``can_add()`` it and it isn't already added
Add `dist` if we ``can_add()`` it and it isn't already added
[ "Add", "dist", "if", "we", "can_add", "()", "it", "and", "it", "isn", "t", "already", "added" ]
def add(self,dist): """Add `dist` if we ``can_add()`` it and it isn't already added""" if self.can_add(dist) and dist.has_version(): dists = self._distmap.setdefault(dist.key,[]) if dist not in dists: dists.append(dist) if dist.key in self._cache: _sort_dists(self._cache[dist.key])
[ "def", "add", "(", "self", ",", "dist", ")", ":", "if", "self", ".", "can_add", "(", "dist", ")", "and", "dist", ".", "has_version", "(", ")", ":", "dists", "=", "self", ".", "_distmap", ".", "setdefault", "(", "dist", ".", "key", ",", "[", "]", ")", "if", "dist", "not", "in", "dists", ":", "dists", ".", "append", "(", "dist", ")", "if", "dist", ".", "key", "in", "self", ".", "_cache", ":", "_sort_dists", "(", "self", ".", "_cache", "[", "dist", ".", "key", "]", ")" ]
https://github.com/zachwill/flask-engine/blob/7c8ad4bfe36382a8c9286d873ec7b785715832a4/libs/pkg_resources.py#L751-L758
kubernetes-client/python
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
kubernetes/client/models/admissionregistration_v1_webhook_client_config.py
python
AdmissionregistrationV1WebhookClientConfig.service
(self, service)
Sets the service of this AdmissionregistrationV1WebhookClientConfig. :param service: The service of this AdmissionregistrationV1WebhookClientConfig. # noqa: E501 :type: AdmissionregistrationV1ServiceReference
Sets the service of this AdmissionregistrationV1WebhookClientConfig.
[ "Sets", "the", "service", "of", "this", "AdmissionregistrationV1WebhookClientConfig", "." ]
def service(self, service): """Sets the service of this AdmissionregistrationV1WebhookClientConfig. :param service: The service of this AdmissionregistrationV1WebhookClientConfig. # noqa: E501 :type: AdmissionregistrationV1ServiceReference """ self._service = service
[ "def", "service", "(", "self", ",", "service", ")", ":", "self", ".", "_service", "=", "service" ]
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/models/admissionregistration_v1_webhook_client_config.py#L102-L110
nltk/nltk
3f74ac55681667d7ef78b664557487145f51eb02
nltk/corpus/reader/propbank.py
python
PropbankInstance.predid
(self)
return "rel"
Identifier of the predicate.
Identifier of the predicate.
[ "Identifier", "of", "the", "predicate", "." ]
def predid(self): """Identifier of the predicate.""" return "rel"
[ "def", "predid", "(", "self", ")", ":", "return", "\"rel\"" ]
https://github.com/nltk/nltk/blob/3f74ac55681667d7ef78b664557487145f51eb02/nltk/corpus/reader/propbank.py#L232-L234
bisohns/search-engine-parser
ede1355a1f63398d9217b8e502fbd6c52b53bf09
search_engine_parser/core/engines/stackoverflow.py
python
Search.parse_single_result
(self, single_result, return_type=ReturnType.FULL, **kwargs)
return rdict
Parses the source code to return :param single_result: single result found in <div class="summary"> :type single_result: `bs4.element.ResultSet` :return: parsed title, link and description of single result :rtype: dict
Parses the source code to return
[ "Parses", "the", "source", "code", "to", "return" ]
def parse_single_result(self, single_result, return_type=ReturnType.FULL, **kwargs): """ Parses the source code to return :param single_result: single result found in <div class="summary"> :type single_result: `bs4.element.ResultSet` :return: parsed title, link and description of single result :rtype: dict """ rdict = SearchItem() h3 = single_result.find('h3') # pylint: disable=invalid-name link_tag = h3.find('a') if return_type in (ReturnType.FULL, return_type.TITLE): # Get the text and link rdict["titles"] = link_tag.text if return_type in (ReturnType.FULL, return_type.LINK): ref_link = link_tag.get('href') link = self.base_url + ref_link rdict["links"] = link if return_type in (ReturnType.FULL, return_type.DESCRIPTION): caption = single_result.find('div', class_='excerpt') rdict["descriptions"] = caption.text return rdict
[ "def", "parse_single_result", "(", "self", ",", "single_result", ",", "return_type", "=", "ReturnType", ".", "FULL", ",", "*", "*", "kwargs", ")", ":", "rdict", "=", "SearchItem", "(", ")", "h3", "=", "single_result", ".", "find", "(", "'h3'", ")", "# pylint: disable=invalid-name", "link_tag", "=", "h3", ".", "find", "(", "'a'", ")", "if", "return_type", "in", "(", "ReturnType", ".", "FULL", ",", "return_type", ".", "TITLE", ")", ":", "# Get the text and link", "rdict", "[", "\"titles\"", "]", "=", "link_tag", ".", "text", "if", "return_type", "in", "(", "ReturnType", ".", "FULL", ",", "return_type", ".", "LINK", ")", ":", "ref_link", "=", "link_tag", ".", "get", "(", "'href'", ")", "link", "=", "self", ".", "base_url", "+", "ref_link", "rdict", "[", "\"links\"", "]", "=", "link", "if", "return_type", "in", "(", "ReturnType", ".", "FULL", ",", "return_type", ".", "DESCRIPTION", ")", ":", "caption", "=", "single_result", ".", "find", "(", "'div'", ",", "class_", "=", "'excerpt'", ")", "rdict", "[", "\"descriptions\"", "]", "=", "caption", ".", "text", "return", "rdict" ]
https://github.com/bisohns/search-engine-parser/blob/ede1355a1f63398d9217b8e502fbd6c52b53bf09/search_engine_parser/core/engines/stackoverflow.py#L35-L59
apache/tvm
6eb4ed813ebcdcd9558f0906a1870db8302ff1e0
python/tvm/contrib/miopen.py
python
conv2d_forward
( x, w, stride_h=1, stride_w=1, pad_h=0, pad_w=0, dilation_h=1, dilation_w=1, conv_mode=0, data_type=1, group_count=1, )
return te.extern( list(oshape), [x, w], lambda ins, outs: tvm.tir.call_packed( "tvm.contrib.miopen.conv2d.forward", conv_mode, data_type, pad_h, pad_w, stride_h, stride_w, dilation_h, dilation_w, algo, ins[0], ins[1], outs[0], ), name="y", )
Create an extern op that compute 2D convolution with MIOpen Parameters ---------- x: Tensor input feature map w: Tensor convolution weight stride_h: int height stride stride_w: int width stride pad_h: int height pad pad_w: int weight pad dilation_h: int height dilation dilation_w: int width dilation conv_mode: int 0: miopenConvolution 1: miopenTranspose data_type: int 0: miopenHalf (fp16) 1: miopenFloat (fp32) group_count: int number of groups Returns ------- y: Tensor The result tensor
Create an extern op that compute 2D convolution with MIOpen
[ "Create", "an", "extern", "op", "that", "compute", "2D", "convolution", "with", "MIOpen" ]
def conv2d_forward( x, w, stride_h=1, stride_w=1, pad_h=0, pad_w=0, dilation_h=1, dilation_w=1, conv_mode=0, data_type=1, group_count=1, ): """Create an extern op that compute 2D convolution with MIOpen Parameters ---------- x: Tensor input feature map w: Tensor convolution weight stride_h: int height stride stride_w: int width stride pad_h: int height pad pad_w: int weight pad dilation_h: int height dilation dilation_w: int width dilation conv_mode: int 0: miopenConvolution 1: miopenTranspose data_type: int 0: miopenHalf (fp16) 1: miopenFloat (fp32) group_count: int number of groups Returns ------- y: Tensor The result tensor """ assert 0 <= conv_mode <= 2, "0: miopenConvolution / 1: miopenTranspose / 2: miopenGroupConv" if group_count > 1: conv_mode = 2 oshape = np.zeros((len(x.shape)), dtype=np.int32) xshape = x.shape wshape = w.shape setup_func = tvm._ffi.get_global_func("tvm.contrib.miopen.conv2d.setup") algo = setup_func( conv_mode, data_type, pad_h, pad_w, stride_h, stride_w, dilation_h, dilation_w, xshape[0].value, xshape[1].value, xshape[2].value, xshape[3].value, wshape[0].value, wshape[1].value, wshape[2].value, wshape[3].value, group_count, _get_np_int32_array_handle(oshape), ) return te.extern( list(oshape), [x, w], lambda ins, outs: tvm.tir.call_packed( "tvm.contrib.miopen.conv2d.forward", conv_mode, data_type, pad_h, pad_w, stride_h, stride_w, dilation_h, dilation_w, algo, ins[0], ins[1], outs[0], ), name="y", )
[ "def", "conv2d_forward", "(", "x", ",", "w", ",", "stride_h", "=", "1", ",", "stride_w", "=", "1", ",", "pad_h", "=", "0", ",", "pad_w", "=", "0", ",", "dilation_h", "=", "1", ",", "dilation_w", "=", "1", ",", "conv_mode", "=", "0", ",", "data_type", "=", "1", ",", "group_count", "=", "1", ",", ")", ":", "assert", "0", "<=", "conv_mode", "<=", "2", ",", "\"0: miopenConvolution / 1: miopenTranspose / 2: miopenGroupConv\"", "if", "group_count", ">", "1", ":", "conv_mode", "=", "2", "oshape", "=", "np", ".", "zeros", "(", "(", "len", "(", "x", ".", "shape", ")", ")", ",", "dtype", "=", "np", ".", "int32", ")", "xshape", "=", "x", ".", "shape", "wshape", "=", "w", ".", "shape", "setup_func", "=", "tvm", ".", "_ffi", ".", "get_global_func", "(", "\"tvm.contrib.miopen.conv2d.setup\"", ")", "algo", "=", "setup_func", "(", "conv_mode", ",", "data_type", ",", "pad_h", ",", "pad_w", ",", "stride_h", ",", "stride_w", ",", "dilation_h", ",", "dilation_w", ",", "xshape", "[", "0", "]", ".", "value", ",", "xshape", "[", "1", "]", ".", "value", ",", "xshape", "[", "2", "]", ".", "value", ",", "xshape", "[", "3", "]", ".", "value", ",", "wshape", "[", "0", "]", ".", "value", ",", "wshape", "[", "1", "]", ".", "value", ",", "wshape", "[", "2", "]", ".", "value", ",", "wshape", "[", "3", "]", ".", "value", ",", "group_count", ",", "_get_np_int32_array_handle", "(", "oshape", ")", ",", ")", "return", "te", ".", "extern", "(", "list", "(", "oshape", ")", ",", "[", "x", ",", "w", "]", ",", "lambda", "ins", ",", "outs", ":", "tvm", ".", "tir", ".", "call_packed", "(", "\"tvm.contrib.miopen.conv2d.forward\"", ",", "conv_mode", ",", "data_type", ",", "pad_h", ",", "pad_w", ",", "stride_h", ",", "stride_w", ",", "dilation_h", ",", "dilation_w", ",", "algo", ",", "ins", "[", "0", "]", ",", "ins", "[", "1", "]", ",", "outs", "[", "0", "]", ",", ")", ",", "name", "=", "\"y\"", ",", ")" ]
https://github.com/apache/tvm/blob/6eb4ed813ebcdcd9558f0906a1870db8302ff1e0/python/tvm/contrib/miopen.py#L45-L138
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/dts/v20180330/dts_client.py
python
DtsClient.DescribeMigrateCheckJob
(self, request)
本接口用于创建校验后,获取校验的结果. 能查询到当前校验的状态和进度. 若通过校验, 则可调用'StartMigrateJob' 开始迁移. 若未通过校验, 则能查询到校验失败的原因. 请按照报错, 通过'ModifyMigrateJob'修改迁移配置或是调整源/目标实例的相关参数. :param request: Request instance for DescribeMigrateCheckJob. :type request: :class:`tencentcloud.dts.v20180330.models.DescribeMigrateCheckJobRequest` :rtype: :class:`tencentcloud.dts.v20180330.models.DescribeMigrateCheckJobResponse`
本接口用于创建校验后,获取校验的结果. 能查询到当前校验的状态和进度. 若通过校验, 则可调用'StartMigrateJob' 开始迁移. 若未通过校验, 则能查询到校验失败的原因. 请按照报错, 通过'ModifyMigrateJob'修改迁移配置或是调整源/目标实例的相关参数.
[ "本接口用于创建校验后", "获取校验的结果", ".", "能查询到当前校验的状态和进度", ".", "若通过校验", "则可调用", "StartMigrateJob", "开始迁移", ".", "若未通过校验", "则能查询到校验失败的原因", ".", "请按照报错", "通过", "ModifyMigrateJob", "修改迁移配置或是调整源", "/", "目标实例的相关参数", "." ]
def DescribeMigrateCheckJob(self, request): """本接口用于创建校验后,获取校验的结果. 能查询到当前校验的状态和进度. 若通过校验, 则可调用'StartMigrateJob' 开始迁移. 若未通过校验, 则能查询到校验失败的原因. 请按照报错, 通过'ModifyMigrateJob'修改迁移配置或是调整源/目标实例的相关参数. :param request: Request instance for DescribeMigrateCheckJob. :type request: :class:`tencentcloud.dts.v20180330.models.DescribeMigrateCheckJobRequest` :rtype: :class:`tencentcloud.dts.v20180330.models.DescribeMigrateCheckJobResponse` """ try: params = request._serialize() body = self.call("DescribeMigrateCheckJob", params) response = json.loads(body) if "Error" not in response["Response"]: model = models.DescribeMigrateCheckJobResponse() model._deserialize(response["Response"]) return model else: code = response["Response"]["Error"]["Code"] message = response["Response"]["Error"]["Message"] reqid = response["Response"]["RequestId"] raise TencentCloudSDKException(code, message, reqid) except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(e.message, e.message)
[ "def", "DescribeMigrateCheckJob", "(", "self", ",", "request", ")", ":", "try", ":", "params", "=", "request", ".", "_serialize", "(", ")", "body", "=", "self", ".", "call", "(", "\"DescribeMigrateCheckJob\"", ",", "params", ")", "response", "=", "json", ".", "loads", "(", "body", ")", "if", "\"Error\"", "not", "in", "response", "[", "\"Response\"", "]", ":", "model", "=", "models", ".", "DescribeMigrateCheckJobResponse", "(", ")", "model", ".", "_deserialize", "(", "response", "[", "\"Response\"", "]", ")", "return", "model", "else", ":", "code", "=", "response", "[", "\"Response\"", "]", "[", "\"Error\"", "]", "[", "\"Code\"", "]", "message", "=", "response", "[", "\"Response\"", "]", "[", "\"Error\"", "]", "[", "\"Message\"", "]", "reqid", "=", "response", "[", "\"Response\"", "]", "[", "\"RequestId\"", "]", "raise", "TencentCloudSDKException", "(", "code", ",", "message", ",", "reqid", ")", "except", "Exception", "as", "e", ":", "if", "isinstance", "(", "e", ",", "TencentCloudSDKException", ")", ":", "raise", "else", ":", "raise", "TencentCloudSDKException", "(", "e", ".", "message", ",", "e", ".", "message", ")" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/dts/v20180330/dts_client.py#L231-L258
rll/rllab
ba78e4c16dc492982e648f117875b22af3965579
rllab/envs/box2d/box2d_viewer.py
python
Box2DViewer.setCenter
(self, value)
Updates the view offset based on the center of the screen. Tells the debug draw to update its values also.
Updates the view offset based on the center of the screen.
[ "Updates", "the", "view", "offset", "based", "on", "the", "center", "of", "the", "screen", "." ]
def setCenter(self, value): """ Updates the view offset based on the center of the screen. Tells the debug draw to update its values also. """ self._viewCenter = b2Vec2(*value) self._viewCenter *= self._viewZoom self._viewOffset = self._viewCenter - self.screenSize / 2
[ "def", "setCenter", "(", "self", ",", "value", ")", ":", "self", ".", "_viewCenter", "=", "b2Vec2", "(", "*", "value", ")", "self", ".", "_viewCenter", "*=", "self", ".", "_viewZoom", "self", ".", "_viewOffset", "=", "self", ".", "_viewCenter", "-", "self", ".", "screenSize", "/", "2" ]
https://github.com/rll/rllab/blob/ba78e4c16dc492982e648f117875b22af3965579/rllab/envs/box2d/box2d_viewer.py#L182-L190
liquidctl/liquidctl
73962574632f94050c2a75f517e929a29797b5e2
liquidctl/cli.py
python
_device_set_color
(dev, args, **opts)
[]
def _device_set_color(dev, args, **opts): color = map(color_from_str, args['<color>']) dev.set_color(args['<channel>'].lower(), args['<mode>'].lower(), color, **opts)
[ "def", "_device_set_color", "(", "dev", ",", "args", ",", "*", "*", "opts", ")", ":", "color", "=", "map", "(", "color_from_str", ",", "args", "[", "'<color>'", "]", ")", "dev", ".", "set_color", "(", "args", "[", "'<channel>'", "]", ".", "lower", "(", ")", ",", "args", "[", "'<mode>'", "]", ".", "lower", "(", ")", ",", "color", ",", "*", "*", "opts", ")" ]
https://github.com/liquidctl/liquidctl/blob/73962574632f94050c2a75f517e929a29797b5e2/liquidctl/cli.py#L274-L276
facebookresearch/neuralvolumes
8c5fad49b2b05b4b2e79917ee87299e7c1676d59
data/dryice1.py
python
load_krt
(path)
return cameras
Load KRT file containing intrinsic and extrinsic parameters.
Load KRT file containing intrinsic and extrinsic parameters.
[ "Load", "KRT", "file", "containing", "intrinsic", "and", "extrinsic", "parameters", "." ]
def load_krt(path): """Load KRT file containing intrinsic and extrinsic parameters.""" cameras = {} with open(path, "r") as f: while True: name = f.readline() if name == "": break intrin = [[float(x) for x in f.readline().split()] for i in range(3)] dist = [float(x) for x in f.readline().split()] extrin = [[float(x) for x in f.readline().split()] for i in range(3)] f.readline() cameras[name[:-1]] = { "intrin": np.array(intrin), "dist": np.array(dist), "extrin": np.array(extrin)} return cameras
[ "def", "load_krt", "(", "path", ")", ":", "cameras", "=", "{", "}", "with", "open", "(", "path", ",", "\"r\"", ")", "as", "f", ":", "while", "True", ":", "name", "=", "f", ".", "readline", "(", ")", "if", "name", "==", "\"\"", ":", "break", "intrin", "=", "[", "[", "float", "(", "x", ")", "for", "x", "in", "f", ".", "readline", "(", ")", ".", "split", "(", ")", "]", "for", "i", "in", "range", "(", "3", ")", "]", "dist", "=", "[", "float", "(", "x", ")", "for", "x", "in", "f", ".", "readline", "(", ")", ".", "split", "(", ")", "]", "extrin", "=", "[", "[", "float", "(", "x", ")", "for", "x", "in", "f", ".", "readline", "(", ")", ".", "split", "(", ")", "]", "for", "i", "in", "range", "(", "3", ")", "]", "f", ".", "readline", "(", ")", "cameras", "[", "name", "[", ":", "-", "1", "]", "]", "=", "{", "\"intrin\"", ":", "np", ".", "array", "(", "intrin", ")", ",", "\"dist\"", ":", "np", ".", "array", "(", "dist", ")", ",", "\"extrin\"", ":", "np", ".", "array", "(", "extrin", ")", "}", "return", "cameras" ]
https://github.com/facebookresearch/neuralvolumes/blob/8c5fad49b2b05b4b2e79917ee87299e7c1676d59/data/dryice1.py#L13-L33
ask/mode
a104009f0c96790b9f6140179b4968da07a38c81
mode/worker.py
python
Worker.carp
(self, msg: str)
Write warning to standard err.
Write warning to standard err.
[ "Write", "warning", "to", "standard", "err", "." ]
def carp(self, msg: str) -> None: """Write warning to standard err.""" self._say(msg, file=self.stderr)
[ "def", "carp", "(", "self", ",", "msg", ":", "str", ")", "->", "None", ":", "self", ".", "_say", "(", "msg", ",", "file", "=", "self", ".", "stderr", ")" ]
https://github.com/ask/mode/blob/a104009f0c96790b9f6140179b4968da07a38c81/mode/worker.py#L153-L155
facebookresearch/FaderNetworks
cdd9e50659b635a6e04311e1cf4b9a6e6683319b
src/training.py
python
Trainer.save_best_periodic
(self, to_log)
Save the best models / periodically save the models.
Save the best models / periodically save the models.
[ "Save", "the", "best", "models", "/", "periodically", "save", "the", "models", "." ]
def save_best_periodic(self, to_log): """ Save the best models / periodically save the models. """ if to_log['ae_loss'] < self.best_loss: self.best_loss = to_log['ae_loss'] logger.info('Best reconstruction loss: %.5f' % self.best_loss) self.save_model('best_rec') if self.params.eval_clf and np.mean(to_log['clf_accu']) > self.best_accu: self.best_accu = np.mean(to_log['clf_accu']) logger.info('Best evaluation accuracy: %.5f' % self.best_accu) self.save_model('best_accu') if to_log['n_epoch'] % 5 == 0 and to_log['n_epoch'] > 0: self.save_model('periodic-%i' % to_log['n_epoch'])
[ "def", "save_best_periodic", "(", "self", ",", "to_log", ")", ":", "if", "to_log", "[", "'ae_loss'", "]", "<", "self", ".", "best_loss", ":", "self", ".", "best_loss", "=", "to_log", "[", "'ae_loss'", "]", "logger", ".", "info", "(", "'Best reconstruction loss: %.5f'", "%", "self", ".", "best_loss", ")", "self", ".", "save_model", "(", "'best_rec'", ")", "if", "self", ".", "params", ".", "eval_clf", "and", "np", ".", "mean", "(", "to_log", "[", "'clf_accu'", "]", ")", ">", "self", ".", "best_accu", ":", "self", ".", "best_accu", "=", "np", ".", "mean", "(", "to_log", "[", "'clf_accu'", "]", ")", "logger", ".", "info", "(", "'Best evaluation accuracy: %.5f'", "%", "self", ".", "best_accu", ")", "self", ".", "save_model", "(", "'best_accu'", ")", "if", "to_log", "[", "'n_epoch'", "]", "%", "5", "==", "0", "and", "to_log", "[", "'n_epoch'", "]", ">", "0", ":", "self", ".", "save_model", "(", "'periodic-%i'", "%", "to_log", "[", "'n_epoch'", "]", ")" ]
https://github.com/facebookresearch/FaderNetworks/blob/cdd9e50659b635a6e04311e1cf4b9a6e6683319b/src/training.py#L245-L258
abhik/pebl
5e7d694eb1e4f90e0f1410000b958ba62698a268
src/pebl/config.py
python
write
(filename, include_defaults=False)
Writes parameters to config file. If include_default is True, writes all parameters. Else, only writes parameters that were specifically set (via config file, command line, etc).
Writes parameters to config file.
[ "Writes", "parameters", "to", "config", "file", "." ]
def write(filename, include_defaults=False): """Writes parameters to config file. If include_default is True, writes all parameters. Else, only writes parameters that were specifically set (via config file, command line, etc). """ config = ConfigParser() params = _parameters.values() if include_defaults \ else [p for p in _parameters.values() if p.source] for param in params: config.set(param.section, param.option, param.value) with file(filename, 'w') as f: config.write(f)
[ "def", "write", "(", "filename", ",", "include_defaults", "=", "False", ")", ":", "config", "=", "ConfigParser", "(", ")", "params", "=", "_parameters", ".", "values", "(", ")", "if", "include_defaults", "else", "[", "p", "for", "p", "in", "_parameters", ".", "values", "(", ")", "if", "p", ".", "source", "]", "for", "param", "in", "params", ":", "config", ".", "set", "(", "param", ".", "section", ",", "param", ".", "option", ",", "param", ".", "value", ")", "with", "file", "(", "filename", ",", "'w'", ")", "as", "f", ":", "config", ".", "write", "(", "f", ")" ]
https://github.com/abhik/pebl/blob/5e7d694eb1e4f90e0f1410000b958ba62698a268/src/pebl/config.py#L183-L199
kuri65536/python-for-android
26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891
python-build/python-libs/gdata/src/gdata/service.py
python
GDataService.ClientLogin
(self, username, password, account_type=None, service=None, auth_service_url=None, source=None, captcha_token=None, captcha_response=None)
Convenience method for authenticating using ProgrammaticLogin. Sets values for email, password, and other optional members. Args: username: password: account_type: string (optional) service: string (optional) auth_service_url: string (optional) captcha_token: string (optional) captcha_response: string (optional)
Convenience method for authenticating using ProgrammaticLogin. Sets values for email, password, and other optional members.
[ "Convenience", "method", "for", "authenticating", "using", "ProgrammaticLogin", ".", "Sets", "values", "for", "email", "password", "and", "other", "optional", "members", "." ]
def ClientLogin(self, username, password, account_type=None, service=None, auth_service_url=None, source=None, captcha_token=None, captcha_response=None): """Convenience method for authenticating using ProgrammaticLogin. Sets values for email, password, and other optional members. Args: username: password: account_type: string (optional) service: string (optional) auth_service_url: string (optional) captcha_token: string (optional) captcha_response: string (optional) """ self.email = username self.password = password if account_type: self.account_type = account_type if service: self.service = service if source: self.source = source if auth_service_url: self.auth_service_url = auth_service_url self.ProgrammaticLogin(captcha_token, captcha_response)
[ "def", "ClientLogin", "(", "self", ",", "username", ",", "password", ",", "account_type", "=", "None", ",", "service", "=", "None", ",", "auth_service_url", "=", "None", ",", "source", "=", "None", ",", "captcha_token", "=", "None", ",", "captcha_response", "=", "None", ")", ":", "self", ".", "email", "=", "username", "self", ".", "password", "=", "password", "if", "account_type", ":", "self", ".", "account_type", "=", "account_type", "if", "service", ":", "self", ".", "service", "=", "service", "if", "source", ":", "self", ".", "source", "=", "source", "if", "auth_service_url", ":", "self", ".", "auth_service_url", "=", "auth_service_url", "self", ".", "ProgrammaticLogin", "(", "captcha_token", ",", "captcha_response", ")" ]
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-build/python-libs/gdata/src/gdata/service.py#L815-L843
spectacles/CodeComplice
8ca8ee4236f72b58caa4209d2fbd5fa56bd31d62
libs/codeintel2/css_linter.py
python
_CSSLexerClassifier.is_tag
(self, tok)
return (tok['style'] == ScintillaConstants.SCE_CSS_TAG or self.is_operator(tok, "*"))
[]
def is_tag(self, tok): return (tok['style'] == ScintillaConstants.SCE_CSS_TAG or self.is_operator(tok, "*"))
[ "def", "is_tag", "(", "self", ",", "tok", ")", ":", "return", "(", "tok", "[", "'style'", "]", "==", "ScintillaConstants", ".", "SCE_CSS_TAG", "or", "self", ".", "is_operator", "(", "tok", ",", "\"*\"", ")", ")" ]
https://github.com/spectacles/CodeComplice/blob/8ca8ee4236f72b58caa4209d2fbd5fa56bd31d62/libs/codeintel2/css_linter.py#L122-L124
knownsec/Pocsuite
877d1b1604629b8dcd6e53b167c3c98249e5e94f
pocsuite/thirdparty/pyparsing/pyparsing.py
python
upcaseTokens
(s,l,t)
return [ tt.upper() for tt in map(_ustr,t) ]
Helper parse action to convert tokens to upper case.
Helper parse action to convert tokens to upper case.
[ "Helper", "parse", "action", "to", "convert", "tokens", "to", "upper", "case", "." ]
def upcaseTokens(s,l,t): """Helper parse action to convert tokens to upper case.""" return [ tt.upper() for tt in map(_ustr,t) ]
[ "def", "upcaseTokens", "(", "s", ",", "l", ",", "t", ")", ":", "return", "[", "tt", ".", "upper", "(", ")", "for", "tt", "in", "map", "(", "_ustr", ",", "t", ")", "]" ]
https://github.com/knownsec/Pocsuite/blob/877d1b1604629b8dcd6e53b167c3c98249e5e94f/pocsuite/thirdparty/pyparsing/pyparsing.py#L3406-L3408
google/trax
d6cae2067dedd0490b78d831033607357e975015
trax/data/inputs.py
python
generate_sequential_chunks
(max_length=None)
return _f
Returns a function that generates chunks of atmost max_length length.
Returns a function that generates chunks of atmost max_length length.
[ "Returns", "a", "function", "that", "generates", "chunks", "of", "atmost", "max_length", "length", "." ]
def generate_sequential_chunks(max_length=None): """Returns a function that generates chunks of atmost max_length length.""" def _f(generator): for example in generator: n_tokens = len(example) if n_tokens <= max_length: yield example else: n_segments = int(math.ceil(float(n_tokens) / float(max_length))) for i in range(n_segments): start = max_length * i end = min(start + max_length, n_tokens) yield example[start:end] return _f
[ "def", "generate_sequential_chunks", "(", "max_length", "=", "None", ")", ":", "def", "_f", "(", "generator", ")", ":", "for", "example", "in", "generator", ":", "n_tokens", "=", "len", "(", "example", ")", "if", "n_tokens", "<=", "max_length", ":", "yield", "example", "else", ":", "n_segments", "=", "int", "(", "math", ".", "ceil", "(", "float", "(", "n_tokens", ")", "/", "float", "(", "max_length", ")", ")", ")", "for", "i", "in", "range", "(", "n_segments", ")", ":", "start", "=", "max_length", "*", "i", "end", "=", "min", "(", "start", "+", "max_length", ",", "n_tokens", ")", "yield", "example", "[", "start", ":", "end", "]", "return", "_f" ]
https://github.com/google/trax/blob/d6cae2067dedd0490b78d831033607357e975015/trax/data/inputs.py#L918-L931
tomplus/kubernetes_asyncio
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
kubernetes_asyncio/client/api/apps_v1beta2_api.py
python
AppsV1beta2Api.delete_namespaced_deployment_with_http_info
(self, name, namespace, **kwargs)
return self.api_client.call_api( '/apis/apps/v1beta2/namespaces/{namespace}/deployments/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
delete_namespaced_deployment # noqa: E501 delete a Deployment # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_deployment_with_http_info(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str name: name of the Deployment (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :param V1DeleteOptions body: :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(V1Status, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread.
delete_namespaced_deployment # noqa: E501
[ "delete_namespaced_deployment", "#", "noqa", ":", "E501" ]
def delete_namespaced_deployment_with_http_info(self, name, namespace, **kwargs): # noqa: E501 """delete_namespaced_deployment # noqa: E501 delete a Deployment # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_deployment_with_http_info(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str name: name of the Deployment (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :param V1DeleteOptions body: :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(V1Status, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'name', 'namespace', 'pretty', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy', 'body' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method delete_namespaced_deployment" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `delete_namespaced_deployment`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `delete_namespaced_deployment`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 query_params = [] if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501 query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501 if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'grace_period_seconds' in local_var_params and local_var_params['grace_period_seconds'] is not None: # noqa: E501 query_params.append(('gracePeriodSeconds', local_var_params['grace_period_seconds'])) # noqa: E501 if 'orphan_dependents' in local_var_params and local_var_params['orphan_dependents'] is not None: # noqa: E501 query_params.append(('orphanDependents', local_var_params['orphan_dependents'])) # noqa: E501 if 'propagation_policy' in local_var_params and local_var_params['propagation_policy'] is not None: # noqa: E501 query_params.append(('propagationPolicy', local_var_params['propagation_policy'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/apps/v1beta2/namespaces/{namespace}/deployments/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
[ "def", "delete_namespaced_deployment_with_http_info", "(", "self", ",", "name", ",", "namespace", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "local_var_params", "=", "locals", "(", ")", "all_params", "=", "[", "'name'", ",", "'namespace'", ",", "'pretty'", ",", "'dry_run'", ",", "'grace_period_seconds'", ",", "'orphan_dependents'", ",", "'propagation_policy'", ",", "'body'", "]", "all_params", ".", "extend", "(", "[", "'async_req'", ",", "'_return_http_data_only'", ",", "'_preload_content'", ",", "'_request_timeout'", "]", ")", "for", "key", ",", "val", "in", "six", ".", "iteritems", "(", "local_var_params", "[", "'kwargs'", "]", ")", ":", "if", "key", "not", "in", "all_params", ":", "raise", "ApiTypeError", "(", "\"Got an unexpected keyword argument '%s'\"", "\" to method delete_namespaced_deployment\"", "%", "key", ")", "local_var_params", "[", "key", "]", "=", "val", "del", "local_var_params", "[", "'kwargs'", "]", "# verify the required parameter 'name' is set", "if", "self", ".", "api_client", ".", "client_side_validation", "and", "(", "'name'", "not", "in", "local_var_params", "or", "# noqa: E501", "local_var_params", "[", "'name'", "]", "is", "None", ")", ":", "# noqa: E501", "raise", "ApiValueError", "(", "\"Missing the required parameter `name` when calling `delete_namespaced_deployment`\"", ")", "# noqa: E501", "# verify the required parameter 'namespace' is set", "if", "self", ".", "api_client", ".", "client_side_validation", "and", "(", "'namespace'", "not", "in", "local_var_params", "or", "# noqa: E501", "local_var_params", "[", "'namespace'", "]", "is", "None", ")", ":", "# noqa: E501", "raise", "ApiValueError", "(", "\"Missing the required parameter `namespace` when calling `delete_namespaced_deployment`\"", ")", "# noqa: E501", "collection_formats", "=", "{", "}", "path_params", "=", "{", "}", "if", "'name'", "in", "local_var_params", ":", "path_params", "[", "'name'", "]", "=", "local_var_params", "[", "'name'", "]", "# noqa: E501", "if", "'namespace'", "in", "local_var_params", ":", "path_params", "[", "'namespace'", "]", "=", "local_var_params", "[", "'namespace'", "]", "# noqa: E501", "query_params", "=", "[", "]", "if", "'pretty'", "in", "local_var_params", "and", "local_var_params", "[", "'pretty'", "]", "is", "not", "None", ":", "# noqa: E501", "query_params", ".", "append", "(", "(", "'pretty'", ",", "local_var_params", "[", "'pretty'", "]", ")", ")", "# noqa: E501", "if", "'dry_run'", "in", "local_var_params", "and", "local_var_params", "[", "'dry_run'", "]", "is", "not", "None", ":", "# noqa: E501", "query_params", ".", "append", "(", "(", "'dryRun'", ",", "local_var_params", "[", "'dry_run'", "]", ")", ")", "# noqa: E501", "if", "'grace_period_seconds'", "in", "local_var_params", "and", "local_var_params", "[", "'grace_period_seconds'", "]", "is", "not", "None", ":", "# noqa: E501", "query_params", ".", "append", "(", "(", "'gracePeriodSeconds'", ",", "local_var_params", "[", "'grace_period_seconds'", "]", ")", ")", "# noqa: E501", "if", "'orphan_dependents'", "in", "local_var_params", "and", "local_var_params", "[", "'orphan_dependents'", "]", "is", "not", "None", ":", "# noqa: E501", "query_params", ".", "append", "(", "(", "'orphanDependents'", ",", "local_var_params", "[", "'orphan_dependents'", "]", ")", ")", "# noqa: E501", "if", "'propagation_policy'", "in", "local_var_params", "and", "local_var_params", "[", "'propagation_policy'", "]", "is", "not", "None", ":", "# noqa: E501", "query_params", ".", "append", "(", "(", "'propagationPolicy'", ",", "local_var_params", "[", "'propagation_policy'", "]", ")", ")", "# noqa: E501", "header_params", "=", "{", "}", "form_params", "=", "[", "]", "local_var_files", "=", "{", "}", "body_params", "=", "None", "if", "'body'", "in", "local_var_params", ":", "body_params", "=", "local_var_params", "[", "'body'", "]", "# HTTP header `Accept`", "header_params", "[", "'Accept'", "]", "=", "self", ".", "api_client", ".", "select_header_accept", "(", "[", "'application/json'", ",", "'application/yaml'", ",", "'application/vnd.kubernetes.protobuf'", "]", ")", "# noqa: E501", "# Authentication setting", "auth_settings", "=", "[", "'BearerToken'", "]", "# noqa: E501", "return", "self", ".", "api_client", ".", "call_api", "(", "'/apis/apps/v1beta2/namespaces/{namespace}/deployments/{name}'", ",", "'DELETE'", ",", "path_params", ",", "query_params", ",", "header_params", ",", "body", "=", "body_params", ",", "post_params", "=", "form_params", ",", "files", "=", "local_var_files", ",", "response_type", "=", "'V1Status'", ",", "# noqa: E501", "auth_settings", "=", "auth_settings", ",", "async_req", "=", "local_var_params", ".", "get", "(", "'async_req'", ")", ",", "_return_http_data_only", "=", "local_var_params", ".", "get", "(", "'_return_http_data_only'", ")", ",", "# noqa: E501", "_preload_content", "=", "local_var_params", ".", "get", "(", "'_preload_content'", ",", "True", ")", ",", "_request_timeout", "=", "local_var_params", ".", "get", "(", "'_request_timeout'", ")", ",", "collection_formats", "=", "collection_formats", ")" ]
https://github.com/tomplus/kubernetes_asyncio/blob/f028cc793e3a2c519be6a52a49fb77ff0b014c9b/kubernetes_asyncio/client/api/apps_v1beta2_api.py#L1937-L2056
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/sqlalchemy/orm/identity.py
python
StrongInstanceDict.add
(self, state)
[]
def add(self, state): if state.key in self: if attributes.instance_state(self._dict[state.key]) is not state: raise sa_exc.InvalidRequestError( "Can't attach instance " "%s; another instance with key %s is already " "present in this session." % ( orm_util.state_str(state), state.key)) return False else: self._dict[state.key] = state.obj() self._manage_incoming_state(state) return True
[ "def", "add", "(", "self", ",", "state", ")", ":", "if", "state", ".", "key", "in", "self", ":", "if", "attributes", ".", "instance_state", "(", "self", ".", "_dict", "[", "state", ".", "key", "]", ")", "is", "not", "state", ":", "raise", "sa_exc", ".", "InvalidRequestError", "(", "\"Can't attach instance \"", "\"%s; another instance with key %s is already \"", "\"present in this session.\"", "%", "(", "orm_util", ".", "state_str", "(", "state", ")", ",", "state", ".", "key", ")", ")", "return", "False", "else", ":", "self", ".", "_dict", "[", "state", ".", "key", "]", "=", "state", ".", "obj", "(", ")", "self", ".", "_manage_incoming_state", "(", "state", ")", "return", "True" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/sqlalchemy/orm/identity.py#L300-L312
hyperledger/aries-cloudagent-python
2f36776e99f6053ae92eed8123b5b1b2e891c02a
aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py
python
DIFPresExchHandler.process_constraint_holders
( self, subject_ids: Sequence[str], )
Check if holder or subject of claim still controls the identifier.
Check if holder or subject of claim still controls the identifier.
[ "Check", "if", "holder", "or", "subject", "of", "claim", "still", "controls", "the", "identifier", "." ]
async def process_constraint_holders( self, subject_ids: Sequence[str], ) -> bool: """Check if holder or subject of claim still controls the identifier.""" async with self.profile.session() as session: wallet = session.inject(BaseWallet) try: for subject_id in subject_ids: await wallet.get_local_did(subject_id.replace("did:sov:", "")) self.is_holder = True return True except (WalletError, WalletNotFoundError): return False
[ "async", "def", "process_constraint_holders", "(", "self", ",", "subject_ids", ":", "Sequence", "[", "str", "]", ",", ")", "->", "bool", ":", "async", "with", "self", ".", "profile", ".", "session", "(", ")", "as", "session", ":", "wallet", "=", "session", ".", "inject", "(", "BaseWallet", ")", "try", ":", "for", "subject_id", "in", "subject_ids", ":", "await", "wallet", ".", "get_local_did", "(", "subject_id", ".", "replace", "(", "\"did:sov:\"", ",", "\"\"", ")", ")", "self", ".", "is_holder", "=", "True", "return", "True", "except", "(", "WalletError", ",", "WalletNotFoundError", ")", ":", "return", "False" ]
https://github.com/hyperledger/aries-cloudagent-python/blob/2f36776e99f6053ae92eed8123b5b1b2e891c02a/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py#L437-L450
albertz/music-player
d23586f5bf657cbaea8147223be7814d117ae73d
mac/pyobjc-framework-Quartz/Examples/Programming with Quartz/BasicDrawing/ImageMasking.py
python
getMaskData2
()
return _data2
[]
def getMaskData2(): return _data2
[ "def", "getMaskData2", "(", ")", ":", "return", "_data2" ]
https://github.com/albertz/music-player/blob/d23586f5bf657cbaea8147223be7814d117ae73d/mac/pyobjc-framework-Quartz/Examples/Programming with Quartz/BasicDrawing/ImageMasking.py#L230-L231
stanfordnlp/cocoa
26bfea048f00241eb4dbff861b2d9260bde52636
mutualfriends/web/third_party_backend.py
python
BackendConnection.create_user_if_necessary
(self, userid)
Adds a new user to the database if necessary :param : :return:
Adds a new user to the database if necessary :param : :return:
[ "Adds", "a", "new", "user", "to", "the", "database", "if", "necessary", ":", "param", ":", ":", "return", ":" ]
def create_user_if_necessary(self, userid): """ Adds a new user to the database if necessary :param : :return: """ with self.conn: cursor = self.conn.cursor() now = get_timestamp() cursor.execute("INSERT OR IGNORE INTO ActiveUsers VALUES (?,?,?,?,?)", (userid, json.dumps([]), json.dumps([]), 0, now))
[ "def", "create_user_if_necessary", "(", "self", ",", "userid", ")", ":", "with", "self", ".", "conn", ":", "cursor", "=", "self", ".", "conn", ".", "cursor", "(", ")", "now", "=", "get_timestamp", "(", ")", "cursor", ".", "execute", "(", "\"INSERT OR IGNORE INTO ActiveUsers VALUES (?,?,?,?,?)\"", ",", "(", "userid", ",", "json", ".", "dumps", "(", "[", "]", ")", ",", "json", ".", "dumps", "(", "[", "]", ")", ",", "0", ",", "now", ")", ")" ]
https://github.com/stanfordnlp/cocoa/blob/26bfea048f00241eb4dbff861b2d9260bde52636/mutualfriends/web/third_party_backend.py#L31-L41
pymc-devs/pymc
38867dd19e96afb0ceccc8ccd74a9795f118dfe3
pymc/distributions/continuous.py
python
Gumbel.logcdf
( value: Union[float, np.ndarray, TensorVariable], mu: Union[float, np.ndarray, TensorVariable], beta: Union[float, np.ndarray, TensorVariable], )
return check_parameters(res, 0 < beta, msg="beta > 0")
Compute the log of the cumulative distribution function for Gumbel distribution at the specified value. Parameters ---------- value: numeric or np.ndarray or aesara.tensor Value(s) for which log CDF is calculated. If the log CDF for multiple values are desired the values must be provided in a numpy array or Aesara tensor. Returns ------- TensorVariable
Compute the log of the cumulative distribution function for Gumbel distribution at the specified value.
[ "Compute", "the", "log", "of", "the", "cumulative", "distribution", "function", "for", "Gumbel", "distribution", "at", "the", "specified", "value", "." ]
def logcdf( value: Union[float, np.ndarray, TensorVariable], mu: Union[float, np.ndarray, TensorVariable], beta: Union[float, np.ndarray, TensorVariable], ) -> TensorVariable: """ Compute the log of the cumulative distribution function for Gumbel distribution at the specified value. Parameters ---------- value: numeric or np.ndarray or aesara.tensor Value(s) for which log CDF is calculated. If the log CDF for multiple values are desired the values must be provided in a numpy array or Aesara tensor. Returns ------- TensorVariable """ res = -at.exp(-(value - mu) / beta) return check_parameters(res, 0 < beta, msg="beta > 0")
[ "def", "logcdf", "(", "value", ":", "Union", "[", "float", ",", "np", ".", "ndarray", ",", "TensorVariable", "]", ",", "mu", ":", "Union", "[", "float", ",", "np", ".", "ndarray", ",", "TensorVariable", "]", ",", "beta", ":", "Union", "[", "float", ",", "np", ".", "ndarray", ",", "TensorVariable", "]", ",", ")", "->", "TensorVariable", ":", "res", "=", "-", "at", ".", "exp", "(", "-", "(", "value", "-", "mu", ")", "/", "beta", ")", "return", "check_parameters", "(", "res", ",", "0", "<", "beta", ",", "msg", "=", "\"beta > 0\"", ")" ]
https://github.com/pymc-devs/pymc/blob/38867dd19e96afb0ceccc8ccd74a9795f118dfe3/pymc/distributions/continuous.py#L3311-L3332
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/full/filecmp.py
python
_filter
(flist, skip)
return list(filterfalse(skip.__contains__, flist))
[]
def _filter(flist, skip): return list(filterfalse(skip.__contains__, flist))
[ "def", "_filter", "(", "flist", ",", "skip", ")", ":", "return", "list", "(", "filterfalse", "(", "skip", ".", "__contains__", ",", "flist", ")", ")" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/full/filecmp.py#L294-L295
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
apps/beeswax/gen-py/hive_metastore/ThriftHiveMetastore.py
python
Client.delete_partition_column_statistics
(self, db_name, tbl_name, part_name, col_name)
return self.recv_delete_partition_column_statistics()
Parameters: - db_name - tbl_name - part_name - col_name
Parameters: - db_name - tbl_name - part_name - col_name
[ "Parameters", ":", "-", "db_name", "-", "tbl_name", "-", "part_name", "-", "col_name" ]
def delete_partition_column_statistics(self, db_name, tbl_name, part_name, col_name): """ Parameters: - db_name - tbl_name - part_name - col_name """ self.send_delete_partition_column_statistics(db_name, tbl_name, part_name, col_name) return self.recv_delete_partition_column_statistics()
[ "def", "delete_partition_column_statistics", "(", "self", ",", "db_name", ",", "tbl_name", ",", "part_name", ",", "col_name", ")", ":", "self", ".", "send_delete_partition_column_statistics", "(", "db_name", ",", "tbl_name", ",", "part_name", ",", "col_name", ")", "return", "self", ".", "recv_delete_partition_column_statistics", "(", ")" ]
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/apps/beeswax/gen-py/hive_metastore/ThriftHiveMetastore.py#L4946-L4956
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_hxb2/lib/python3.5/site-packages/PIL/ImageSequence.py
python
Iterator.next
(self)
return self.__next__()
[]
def next(self): return self.__next__()
[ "def", "next", "(", "self", ")", ":", "return", "self", ".", "__next__", "(", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/PIL/ImageSequence.py#L55-L56
SilenceDut/nbaplus-server
368067f7a1de958c12fb35462afe2713760b52ae
beautifulsoup4-4.3.2/bs4/dammit.py
python
UnicodeDammit._to_unicode
(self, data, encoding, errors="strict")
return unicode(data, encoding, errors)
Given a string and its encoding, decodes the string into Unicode. %encoding is a string recognized by encodings.aliases
Given a string and its encoding, decodes the string into Unicode. %encoding is a string recognized by encodings.aliases
[ "Given", "a", "string", "and", "its", "encoding", "decodes", "the", "string", "into", "Unicode", ".", "%encoding", "is", "a", "string", "recognized", "by", "encodings", ".", "aliases" ]
def _to_unicode(self, data, encoding, errors="strict"): '''Given a string and its encoding, decodes the string into Unicode. %encoding is a string recognized by encodings.aliases''' return unicode(data, encoding, errors)
[ "def", "_to_unicode", "(", "self", ",", "data", ",", "encoding", ",", "errors", "=", "\"strict\"", ")", ":", "return", "unicode", "(", "data", ",", "encoding", ",", "errors", ")" ]
https://github.com/SilenceDut/nbaplus-server/blob/368067f7a1de958c12fb35462afe2713760b52ae/beautifulsoup4-4.3.2/bs4/dammit.py#L425-L428
freedombox/FreedomBox
335a7f92cc08f27981f838a7cddfc67740598e54
plinth/modules/networks/__init__.py
python
set_internet_connection_type
(internet_connection_type)
return kvstore.set('networks_internet_type', internet_connection_type)
Store the internet connection type.
Store the internet connection type.
[ "Store", "the", "internet", "connection", "type", "." ]
def set_internet_connection_type(internet_connection_type): """Store the internet connection type.""" return kvstore.set('networks_internet_type', internet_connection_type)
[ "def", "set_internet_connection_type", "(", "internet_connection_type", ")", ":", "return", "kvstore", ".", "set", "(", "'networks_internet_type'", ",", "internet_connection_type", ")" ]
https://github.com/freedombox/FreedomBox/blob/335a7f92cc08f27981f838a7cddfc67740598e54/plinth/modules/networks/__init__.py#L110-L112
FederatedAI/FATE
32540492623568ecd1afcb367360133616e02fa3
python/fate_arch/federation/pulsar/_pulsar_manager.py
python
PulsarManager.delete_namespace
(self, tenant: str, namespace: str, force: bool = False)
return response
[]
def delete_namespace(self, tenant: str, namespace: str, force: bool = False): session = self._create_session() response = session.delete( self.service_url + 'namespace/{}/{}?force={}'.format(tenant, namespace, str(force).lower()) ) return response
[ "def", "delete_namespace", "(", "self", ",", "tenant", ":", "str", ",", "namespace", ":", "str", ",", "force", ":", "bool", "=", "False", ")", ":", "session", "=", "self", ".", "_create_session", "(", ")", "response", "=", "session", ".", "delete", "(", "self", ".", "service_url", "+", "'namespace/{}/{}?force={}'", ".", "format", "(", "tenant", ",", "namespace", ",", "str", "(", "force", ")", ".", "lower", "(", ")", ")", ")", "return", "response" ]
https://github.com/FederatedAI/FATE/blob/32540492623568ecd1afcb367360133616e02fa3/python/fate_arch/federation/pulsar/_pulsar_manager.py#L161-L168
albertz/music-player
d23586f5bf657cbaea8147223be7814d117ae73d
mac/pyobjc-framework-Quartz/Examples/Programming with Quartz/BasicDrawing/Utilities.py
python
getRGBOpaqueRedColor
()
return _rgbRed
[]
def getRGBOpaqueRedColor(): global _rgbRed if _rgbRed is None: opaqueRed = (0.663, 0, 0.031, 1) _rgbRed = CGColorCreate( getTheCalibratedRGBColorSpace(), opaqueRed) return _rgbRed
[ "def", "getRGBOpaqueRedColor", "(", ")", ":", "global", "_rgbRed", "if", "_rgbRed", "is", "None", ":", "opaqueRed", "=", "(", "0.663", ",", "0", ",", "0.031", ",", "1", ")", "_rgbRed", "=", "CGColorCreate", "(", "getTheCalibratedRGBColorSpace", "(", ")", ",", "opaqueRed", ")", "return", "_rgbRed" ]
https://github.com/albertz/music-player/blob/d23586f5bf657cbaea8147223be7814d117ae73d/mac/pyobjc-framework-Quartz/Examples/Programming with Quartz/BasicDrawing/Utilities.py#L120-L126
saltstack/salt
fae5bc757ad0f1716483ce7ae180b451545c2058
salt/modules/bigip.py
python
delete_pool
(hostname, username, password, name)
A function to connect to a bigip device and delete a specific pool. hostname The host/address of the bigip device username The iControl REST username password The iControl REST password name The name of the pool which will be deleted CLI Example .. code-block:: bash salt '*' bigip.delete_node bigip admin admin my-pool
A function to connect to a bigip device and delete a specific pool.
[ "A", "function", "to", "connect", "to", "a", "bigip", "device", "and", "delete", "a", "specific", "pool", "." ]
def delete_pool(hostname, username, password, name): """ A function to connect to a bigip device and delete a specific pool. hostname The host/address of the bigip device username The iControl REST username password The iControl REST password name The name of the pool which will be deleted CLI Example .. code-block:: bash salt '*' bigip.delete_node bigip admin admin my-pool """ # build session bigip_session = _build_session(username, password) # delete to REST try: response = bigip_session.delete( BIG_IP_URL_BASE.format(host=hostname) + "/ltm/pool/{name}".format(name=name) ) except requests.exceptions.ConnectionError as e: return _load_connection_error(hostname, e) if _load_response(response) == "": return True else: return _load_response(response)
[ "def", "delete_pool", "(", "hostname", ",", "username", ",", "password", ",", "name", ")", ":", "# build session", "bigip_session", "=", "_build_session", "(", "username", ",", "password", ")", "# delete to REST", "try", ":", "response", "=", "bigip_session", ".", "delete", "(", "BIG_IP_URL_BASE", ".", "format", "(", "host", "=", "hostname", ")", "+", "\"/ltm/pool/{name}\"", ".", "format", "(", "name", "=", "name", ")", ")", "except", "requests", ".", "exceptions", ".", "ConnectionError", "as", "e", ":", "return", "_load_connection_error", "(", "hostname", ",", "e", ")", "if", "_load_response", "(", "response", ")", "==", "\"\"", ":", "return", "True", "else", ":", "return", "_load_response", "(", "response", ")" ]
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/modules/bigip.py#L1004-L1038
pyansys/pymapdl
c07291fc062b359abf0e92b95a92d753a95ef3d7
ansys/mapdl/core/_commands/preproc/meshing.py
python
Meshing.desize
( self, minl="", minh="", mxel="", angl="", angh="", edgmn="", edgmx="", adjf="", adjm="", **kwargs, )
return self.run(command, **kwargs)
Controls default element sizes. APDL Command: DESIZE Parameters ---------- minl Minimum number of elements that will be attached to a line when using lower-order elements (defaults to 3 elements per line). If MINL = DEFA, all arguments will be set back to default values. If MINL = STAT, list status of command (Including on/off status). If MINL = OFF, deactivate default element sizing. If MINL = ON, reactivate default element sizing. minh Minimum number of elements that will be attached to a line when using higher-order elements. Defaults to 2 elements per line. mxel Maximum number of elements that will be attached to a single line (lower or higher-order elements). Defaults to 15 elements per line for h-elements. To deactivate this limit, specify a large number (such as 9999). angl Maximum spanned angle per lower-order element for curved lines. Defaults to 15 degrees per element. angh Maximum spanned angle per higher-order element for curved lines. Defaults to 28 degrees per element. edgmn Minimum element edge length. Defaults to no minimum edge length. The MINL or MINH argument can override this value. edgmx Maximum element edge length. Defaults to no maximum edge length. The MXEL argument can override this value. adjf Target aspect ratio for adjacent line. Used only when free meshing. Defaults to 1.0, which attempts to create equal-sided h-elements. adjm Target aspect ratio for adjacent line. Used only when map meshing. Defaults to 4.0, which attempts to create rectangular h-elements. Notes ----- DESIZE settings are usually used for mapped meshing. They are also used for free meshing if SmartSizing is turned off [SMRTSIZE,OFF], which is the default. Even when SmartSizing is on, some DESIZE settings (such as maximum and minimum element edge length) can affect free mesh density. The default settings of the DESIZE command are used only when no other element size specifications [KESIZE, LESIZE, ESIZE] exist for a certain line. This command is also valid for rezoning.
Controls default element sizes.
[ "Controls", "default", "element", "sizes", "." ]
def desize( self, minl="", minh="", mxel="", angl="", angh="", edgmn="", edgmx="", adjf="", adjm="", **kwargs, ): """Controls default element sizes. APDL Command: DESIZE Parameters ---------- minl Minimum number of elements that will be attached to a line when using lower-order elements (defaults to 3 elements per line). If MINL = DEFA, all arguments will be set back to default values. If MINL = STAT, list status of command (Including on/off status). If MINL = OFF, deactivate default element sizing. If MINL = ON, reactivate default element sizing. minh Minimum number of elements that will be attached to a line when using higher-order elements. Defaults to 2 elements per line. mxel Maximum number of elements that will be attached to a single line (lower or higher-order elements). Defaults to 15 elements per line for h-elements. To deactivate this limit, specify a large number (such as 9999). angl Maximum spanned angle per lower-order element for curved lines. Defaults to 15 degrees per element. angh Maximum spanned angle per higher-order element for curved lines. Defaults to 28 degrees per element. edgmn Minimum element edge length. Defaults to no minimum edge length. The MINL or MINH argument can override this value. edgmx Maximum element edge length. Defaults to no maximum edge length. The MXEL argument can override this value. adjf Target aspect ratio for adjacent line. Used only when free meshing. Defaults to 1.0, which attempts to create equal-sided h-elements. adjm Target aspect ratio for adjacent line. Used only when map meshing. Defaults to 4.0, which attempts to create rectangular h-elements. Notes ----- DESIZE settings are usually used for mapped meshing. They are also used for free meshing if SmartSizing is turned off [SMRTSIZE,OFF], which is the default. Even when SmartSizing is on, some DESIZE settings (such as maximum and minimum element edge length) can affect free mesh density. The default settings of the DESIZE command are used only when no other element size specifications [KESIZE, LESIZE, ESIZE] exist for a certain line. This command is also valid for rezoning. """ command = ( f"DESIZE,{minl},{minh},{mxel},{angl},{angh},{edgmn},{edgmx},{adjf},{adjm}" ) return self.run(command, **kwargs)
[ "def", "desize", "(", "self", ",", "minl", "=", "\"\"", ",", "minh", "=", "\"\"", ",", "mxel", "=", "\"\"", ",", "angl", "=", "\"\"", ",", "angh", "=", "\"\"", ",", "edgmn", "=", "\"\"", ",", "edgmx", "=", "\"\"", ",", "adjf", "=", "\"\"", ",", "adjm", "=", "\"\"", ",", "*", "*", "kwargs", ",", ")", ":", "command", "=", "(", "f\"DESIZE,{minl},{minh},{mxel},{angl},{angh},{edgmn},{edgmx},{adjf},{adjm}\"", ")", "return", "self", ".", "run", "(", "command", ",", "*", "*", "kwargs", ")" ]
https://github.com/pyansys/pymapdl/blob/c07291fc062b359abf0e92b95a92d753a95ef3d7/ansys/mapdl/core/_commands/preproc/meshing.py#L456-L533
googleads/google-ads-python
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
google/ads/googleads/v7/services/services/google_ads_service/client.py
python
GoogleAdsServiceClient.hotel_performance_view_path
(customer_id: str,)
return "customers/{customer_id}/hotelPerformanceView".format( customer_id=customer_id, )
Return a fully-qualified hotel_performance_view string.
Return a fully-qualified hotel_performance_view string.
[ "Return", "a", "fully", "-", "qualified", "hotel_performance_view", "string", "." ]
def hotel_performance_view_path(customer_id: str,) -> str: """Return a fully-qualified hotel_performance_view string.""" return "customers/{customer_id}/hotelPerformanceView".format( customer_id=customer_id, )
[ "def", "hotel_performance_view_path", "(", "customer_id", ":", "str", ",", ")", "->", "str", ":", "return", "\"customers/{customer_id}/hotelPerformanceView\"", ".", "format", "(", "customer_id", "=", "customer_id", ",", ")" ]
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v7/services/services/google_ads_service/client.py#L1720-L1724
JaniceWuo/MovieRecommend
4c86db64ca45598917d304f535413df3bc9fea65
movierecommend/venv1/Lib/site-packages/django/core/serializers/__init__.py
python
unregister_serializer
(format)
Unregister a given serializer. This is not a thread-safe operation.
Unregister a given serializer. This is not a thread-safe operation.
[ "Unregister", "a", "given", "serializer", ".", "This", "is", "not", "a", "thread", "-", "safe", "operation", "." ]
def unregister_serializer(format): "Unregister a given serializer. This is not a thread-safe operation." if not _serializers: _load_serializers() if format not in _serializers: raise SerializerDoesNotExist(format) del _serializers[format]
[ "def", "unregister_serializer", "(", "format", ")", ":", "if", "not", "_serializers", ":", "_load_serializers", "(", ")", "if", "format", "not", "in", "_serializers", ":", "raise", "SerializerDoesNotExist", "(", "format", ")", "del", "_serializers", "[", "format", "]" ]
https://github.com/JaniceWuo/MovieRecommend/blob/4c86db64ca45598917d304f535413df3bc9fea65/movierecommend/venv1/Lib/site-packages/django/core/serializers/__init__.py#L86-L92
python/cpython
e13cdca0f5224ec4e23bdd04bb3120506964bc8b
Lib/pipes.py
python
Template.__repr__
(self)
return '<Template instance, steps=%r>' % (self.steps,)
t.__repr__() implements repr(t).
t.__repr__() implements repr(t).
[ "t", ".", "__repr__", "()", "implements", "repr", "(", "t", ")", "." ]
def __repr__(self): """t.__repr__() implements repr(t).""" return '<Template instance, steps=%r>' % (self.steps,)
[ "def", "__repr__", "(", "self", ")", ":", "return", "'<Template instance, steps=%r>'", "%", "(", "self", ".", "steps", ",", ")" ]
https://github.com/python/cpython/blob/e13cdca0f5224ec4e23bdd04bb3120506964bc8b/Lib/pipes.py#L90-L92
Project-MONAI/MONAI
83f8b06372a3803ebe9281300cb794a1f3395018
monai/transforms/utility/array.py
python
AsChannelLast.__call__
(self, img: NdarrayOrTensor)
return moveaxis(img, self.channel_dim, -1)
Apply the transform to `img`.
Apply the transform to `img`.
[ "Apply", "the", "transform", "to", "img", "." ]
def __call__(self, img: NdarrayOrTensor) -> NdarrayOrTensor: """ Apply the transform to `img`. """ return moveaxis(img, self.channel_dim, -1)
[ "def", "__call__", "(", "self", ",", "img", ":", "NdarrayOrTensor", ")", "->", "NdarrayOrTensor", ":", "return", "moveaxis", "(", "img", ",", "self", ".", "channel_dim", ",", "-", "1", ")" ]
https://github.com/Project-MONAI/MONAI/blob/83f8b06372a3803ebe9281300cb794a1f3395018/monai/transforms/utility/array.py#L159-L163
terrifyzhao/bert-utils
1d5f3eb649b4ee8a059f7050da483d0cd6d7fff4
modeling.py
python
attention_layer
(from_tensor, to_tensor, attention_mask=None, num_attention_heads=1, size_per_head=512, query_act=None, key_act=None, value_act=None, attention_probs_dropout_prob=0.0, initializer_range=0.02, do_return_2d_tensor=False, batch_size=None, from_seq_length=None, to_seq_length=None)
return context_layer
Performs multi-headed attention from `from_tensor` to `to_tensor`. This is an implementation of multi-headed attention based on "Attention is all you Need". If `from_tensor` and `to_tensor` are the same, then this is self-attention. Each timestep in `from_tensor` attends to the corresponding sequence in `to_tensor`, and returns a fixed-with vector. This function first projects `from_tensor` into a "query" tensor and `to_tensor` into "key" and "value" tensors. These are (effectively) a list of tensors of length `num_attention_heads`, where each tensor is of shape [batch_size, seq_length, size_per_head]. Then, the query and key tensors are dot-producted and scaled. These are softmaxed to obtain attention probabilities. The value tensors are then interpolated by these probabilities, then concatenated back to a single tensor and returned. In practice, the multi-headed attention are done with transposes and reshapes rather than actual separate tensors. Args: from_tensor: float Tensor of shape [batch_size, from_seq_length, from_width]. to_tensor: float Tensor of shape [batch_size, to_seq_length, to_width]. attention_mask: (optional) int32 Tensor of shape [batch_size, from_seq_length, to_seq_length]. The values should be 1 or 0. The attention scores will effectively be set to -infinity for any positions in the mask that are 0, and will be unchanged for positions that are 1. num_attention_heads: int. Number of attention heads. size_per_head: int. Size of each attention head. query_act: (optional) Activation function for the query transform. key_act: (optional) Activation function for the key transform. value_act: (optional) Activation function for the value transform. attention_probs_dropout_prob: (optional) float. Dropout probability of the attention probabilities. initializer_range: float. Range of the weight initializer. do_return_2d_tensor: bool. If True, the output will be of shape [batch_size * from_seq_length, num_attention_heads * size_per_head]. If False, the output will be of shape [batch_size, from_seq_length, num_attention_heads * size_per_head]. batch_size: (Optional) int. If the input is 2D, this might be the batch size of the 3D version of the `from_tensor` and `to_tensor`. from_seq_length: (Optional) If the input is 2D, this might be the seq length of the 3D version of the `from_tensor`. to_seq_length: (Optional) If the input is 2D, this might be the seq length of the 3D version of the `to_tensor`. Returns: float Tensor of shape [batch_size, from_seq_length, num_attention_heads * size_per_head]. (If `do_return_2d_tensor` is true, this will be of shape [batch_size * from_seq_length, num_attention_heads * size_per_head]). Raises: ValueError: Any of the arguments or tensor shapes are invalid.
Performs multi-headed attention from `from_tensor` to `to_tensor`.
[ "Performs", "multi", "-", "headed", "attention", "from", "from_tensor", "to", "to_tensor", "." ]
def attention_layer(from_tensor, to_tensor, attention_mask=None, num_attention_heads=1, size_per_head=512, query_act=None, key_act=None, value_act=None, attention_probs_dropout_prob=0.0, initializer_range=0.02, do_return_2d_tensor=False, batch_size=None, from_seq_length=None, to_seq_length=None): """Performs multi-headed attention from `from_tensor` to `to_tensor`. This is an implementation of multi-headed attention based on "Attention is all you Need". If `from_tensor` and `to_tensor` are the same, then this is self-attention. Each timestep in `from_tensor` attends to the corresponding sequence in `to_tensor`, and returns a fixed-with vector. This function first projects `from_tensor` into a "query" tensor and `to_tensor` into "key" and "value" tensors. These are (effectively) a list of tensors of length `num_attention_heads`, where each tensor is of shape [batch_size, seq_length, size_per_head]. Then, the query and key tensors are dot-producted and scaled. These are softmaxed to obtain attention probabilities. The value tensors are then interpolated by these probabilities, then concatenated back to a single tensor and returned. In practice, the multi-headed attention are done with transposes and reshapes rather than actual separate tensors. Args: from_tensor: float Tensor of shape [batch_size, from_seq_length, from_width]. to_tensor: float Tensor of shape [batch_size, to_seq_length, to_width]. attention_mask: (optional) int32 Tensor of shape [batch_size, from_seq_length, to_seq_length]. The values should be 1 or 0. The attention scores will effectively be set to -infinity for any positions in the mask that are 0, and will be unchanged for positions that are 1. num_attention_heads: int. Number of attention heads. size_per_head: int. Size of each attention head. query_act: (optional) Activation function for the query transform. key_act: (optional) Activation function for the key transform. value_act: (optional) Activation function for the value transform. attention_probs_dropout_prob: (optional) float. Dropout probability of the attention probabilities. initializer_range: float. Range of the weight initializer. do_return_2d_tensor: bool. If True, the output will be of shape [batch_size * from_seq_length, num_attention_heads * size_per_head]. If False, the output will be of shape [batch_size, from_seq_length, num_attention_heads * size_per_head]. batch_size: (Optional) int. If the input is 2D, this might be the batch size of the 3D version of the `from_tensor` and `to_tensor`. from_seq_length: (Optional) If the input is 2D, this might be the seq length of the 3D version of the `from_tensor`. to_seq_length: (Optional) If the input is 2D, this might be the seq length of the 3D version of the `to_tensor`. Returns: float Tensor of shape [batch_size, from_seq_length, num_attention_heads * size_per_head]. (If `do_return_2d_tensor` is true, this will be of shape [batch_size * from_seq_length, num_attention_heads * size_per_head]). Raises: ValueError: Any of the arguments or tensor shapes are invalid. """ def transpose_for_scores(input_tensor, batch_size, num_attention_heads, seq_length, width): output_tensor = tf.reshape( input_tensor, [batch_size, seq_length, num_attention_heads, width]) output_tensor = tf.transpose(output_tensor, [0, 2, 1, 3]) return output_tensor from_shape = get_shape_list(from_tensor, expected_rank=[2, 3]) to_shape = get_shape_list(to_tensor, expected_rank=[2, 3]) if len(from_shape) != len(to_shape): raise ValueError( "The rank of `from_tensor` must match the rank of `to_tensor`.") if len(from_shape) == 3: batch_size = from_shape[0] from_seq_length = from_shape[1] to_seq_length = to_shape[1] elif len(from_shape) == 2: if (batch_size is None or from_seq_length is None or to_seq_length is None): raise ValueError( "When passing in rank 2 tensors to attention_layer, the values " "for `batch_size`, `from_seq_length`, and `to_seq_length` " "must all be specified.") # Scalar dimensions referenced here: # B = batch size (number of sequences) # F = `from_tensor` sequence length # T = `to_tensor` sequence length # N = `num_attention_heads` # H = `size_per_head` from_tensor_2d = reshape_to_matrix(from_tensor) to_tensor_2d = reshape_to_matrix(to_tensor) # `query_layer` = [B*F, N*H] query_layer = tf.layers.dense( from_tensor_2d, num_attention_heads * size_per_head, activation=query_act, name="query", kernel_initializer=create_initializer(initializer_range)) # `key_layer` = [B*T, N*H] key_layer = tf.layers.dense( to_tensor_2d, num_attention_heads * size_per_head, activation=key_act, name="key", kernel_initializer=create_initializer(initializer_range)) # `value_layer` = [B*T, N*H] value_layer = tf.layers.dense( to_tensor_2d, num_attention_heads * size_per_head, activation=value_act, name="value", kernel_initializer=create_initializer(initializer_range)) # `query_layer` = [B, N, F, H] query_layer = transpose_for_scores(query_layer, batch_size, num_attention_heads, from_seq_length, size_per_head) # `key_layer` = [B, N, T, H] key_layer = transpose_for_scores(key_layer, batch_size, num_attention_heads, to_seq_length, size_per_head) # Take the dot product between "query" and "key" to get the raw # attention scores. # `attention_scores` = [B, N, F, T] attention_scores = tf.matmul(query_layer, key_layer, transpose_b=True) attention_scores = tf.multiply(attention_scores, 1.0 / math.sqrt(float(size_per_head))) if attention_mask is not None: # `attention_mask` = [B, 1, F, T] attention_mask = tf.expand_dims(attention_mask, axis=[1]) # Since attention_mask is 1.0 for positions we want to attend and 0.0 for # masked positions, this operation will create a tensor which is 0.0 for # positions we want to attend and -10000.0 for masked positions. adder = (1.0 - tf.cast(attention_mask, tf.float32)) * -10000.0 # Since we are adding it to the raw scores before the softmax, this is # effectively the same as removing these entirely. attention_scores += adder # Normalize the attention scores to probabilities. # `attention_probs` = [B, N, F, T] attention_probs = tf.nn.softmax(attention_scores) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. attention_probs = dropout(attention_probs, attention_probs_dropout_prob) # `value_layer` = [B, T, N, H] value_layer = tf.reshape( value_layer, [batch_size, to_seq_length, num_attention_heads, size_per_head]) # `value_layer` = [B, N, T, H] value_layer = tf.transpose(value_layer, [0, 2, 1, 3]) # `context_layer` = [B, N, F, H] context_layer = tf.matmul(attention_probs, value_layer) # `context_layer` = [B, F, N, H] context_layer = tf.transpose(context_layer, [0, 2, 1, 3]) if do_return_2d_tensor: # `context_layer` = [B*F, N*V] context_layer = tf.reshape( context_layer, [batch_size * from_seq_length, num_attention_heads * size_per_head]) else: # `context_layer` = [B, F, N*V] context_layer = tf.reshape( context_layer, [batch_size, from_seq_length, num_attention_heads * size_per_head]) return context_layer
[ "def", "attention_layer", "(", "from_tensor", ",", "to_tensor", ",", "attention_mask", "=", "None", ",", "num_attention_heads", "=", "1", ",", "size_per_head", "=", "512", ",", "query_act", "=", "None", ",", "key_act", "=", "None", ",", "value_act", "=", "None", ",", "attention_probs_dropout_prob", "=", "0.0", ",", "initializer_range", "=", "0.02", ",", "do_return_2d_tensor", "=", "False", ",", "batch_size", "=", "None", ",", "from_seq_length", "=", "None", ",", "to_seq_length", "=", "None", ")", ":", "def", "transpose_for_scores", "(", "input_tensor", ",", "batch_size", ",", "num_attention_heads", ",", "seq_length", ",", "width", ")", ":", "output_tensor", "=", "tf", ".", "reshape", "(", "input_tensor", ",", "[", "batch_size", ",", "seq_length", ",", "num_attention_heads", ",", "width", "]", ")", "output_tensor", "=", "tf", ".", "transpose", "(", "output_tensor", ",", "[", "0", ",", "2", ",", "1", ",", "3", "]", ")", "return", "output_tensor", "from_shape", "=", "get_shape_list", "(", "from_tensor", ",", "expected_rank", "=", "[", "2", ",", "3", "]", ")", "to_shape", "=", "get_shape_list", "(", "to_tensor", ",", "expected_rank", "=", "[", "2", ",", "3", "]", ")", "if", "len", "(", "from_shape", ")", "!=", "len", "(", "to_shape", ")", ":", "raise", "ValueError", "(", "\"The rank of `from_tensor` must match the rank of `to_tensor`.\"", ")", "if", "len", "(", "from_shape", ")", "==", "3", ":", "batch_size", "=", "from_shape", "[", "0", "]", "from_seq_length", "=", "from_shape", "[", "1", "]", "to_seq_length", "=", "to_shape", "[", "1", "]", "elif", "len", "(", "from_shape", ")", "==", "2", ":", "if", "(", "batch_size", "is", "None", "or", "from_seq_length", "is", "None", "or", "to_seq_length", "is", "None", ")", ":", "raise", "ValueError", "(", "\"When passing in rank 2 tensors to attention_layer, the values \"", "\"for `batch_size`, `from_seq_length`, and `to_seq_length` \"", "\"must all be specified.\"", ")", "# Scalar dimensions referenced here:", "# B = batch size (number of sequences)", "# F = `from_tensor` sequence length", "# T = `to_tensor` sequence length", "# N = `num_attention_heads`", "# H = `size_per_head`", "from_tensor_2d", "=", "reshape_to_matrix", "(", "from_tensor", ")", "to_tensor_2d", "=", "reshape_to_matrix", "(", "to_tensor", ")", "# `query_layer` = [B*F, N*H]", "query_layer", "=", "tf", ".", "layers", ".", "dense", "(", "from_tensor_2d", ",", "num_attention_heads", "*", "size_per_head", ",", "activation", "=", "query_act", ",", "name", "=", "\"query\"", ",", "kernel_initializer", "=", "create_initializer", "(", "initializer_range", ")", ")", "# `key_layer` = [B*T, N*H]", "key_layer", "=", "tf", ".", "layers", ".", "dense", "(", "to_tensor_2d", ",", "num_attention_heads", "*", "size_per_head", ",", "activation", "=", "key_act", ",", "name", "=", "\"key\"", ",", "kernel_initializer", "=", "create_initializer", "(", "initializer_range", ")", ")", "# `value_layer` = [B*T, N*H]", "value_layer", "=", "tf", ".", "layers", ".", "dense", "(", "to_tensor_2d", ",", "num_attention_heads", "*", "size_per_head", ",", "activation", "=", "value_act", ",", "name", "=", "\"value\"", ",", "kernel_initializer", "=", "create_initializer", "(", "initializer_range", ")", ")", "# `query_layer` = [B, N, F, H]", "query_layer", "=", "transpose_for_scores", "(", "query_layer", ",", "batch_size", ",", "num_attention_heads", ",", "from_seq_length", ",", "size_per_head", ")", "# `key_layer` = [B, N, T, H]", "key_layer", "=", "transpose_for_scores", "(", "key_layer", ",", "batch_size", ",", "num_attention_heads", ",", "to_seq_length", ",", "size_per_head", ")", "# Take the dot product between \"query\" and \"key\" to get the raw", "# attention scores.", "# `attention_scores` = [B, N, F, T]", "attention_scores", "=", "tf", ".", "matmul", "(", "query_layer", ",", "key_layer", ",", "transpose_b", "=", "True", ")", "attention_scores", "=", "tf", ".", "multiply", "(", "attention_scores", ",", "1.0", "/", "math", ".", "sqrt", "(", "float", "(", "size_per_head", ")", ")", ")", "if", "attention_mask", "is", "not", "None", ":", "# `attention_mask` = [B, 1, F, T]", "attention_mask", "=", "tf", ".", "expand_dims", "(", "attention_mask", ",", "axis", "=", "[", "1", "]", ")", "# Since attention_mask is 1.0 for positions we want to attend and 0.0 for", "# masked positions, this operation will create a tensor which is 0.0 for", "# positions we want to attend and -10000.0 for masked positions.", "adder", "=", "(", "1.0", "-", "tf", ".", "cast", "(", "attention_mask", ",", "tf", ".", "float32", ")", ")", "*", "-", "10000.0", "# Since we are adding it to the raw scores before the softmax, this is", "# effectively the same as removing these entirely.", "attention_scores", "+=", "adder", "# Normalize the attention scores to probabilities.", "# `attention_probs` = [B, N, F, T]", "attention_probs", "=", "tf", ".", "nn", ".", "softmax", "(", "attention_scores", ")", "# This is actually dropping out entire tokens to attend to, which might", "# seem a bit unusual, but is taken from the original Transformer paper.", "attention_probs", "=", "dropout", "(", "attention_probs", ",", "attention_probs_dropout_prob", ")", "# `value_layer` = [B, T, N, H]", "value_layer", "=", "tf", ".", "reshape", "(", "value_layer", ",", "[", "batch_size", ",", "to_seq_length", ",", "num_attention_heads", ",", "size_per_head", "]", ")", "# `value_layer` = [B, N, T, H]", "value_layer", "=", "tf", ".", "transpose", "(", "value_layer", ",", "[", "0", ",", "2", ",", "1", ",", "3", "]", ")", "# `context_layer` = [B, N, F, H]", "context_layer", "=", "tf", ".", "matmul", "(", "attention_probs", ",", "value_layer", ")", "# `context_layer` = [B, F, N, H]", "context_layer", "=", "tf", ".", "transpose", "(", "context_layer", ",", "[", "0", ",", "2", ",", "1", ",", "3", "]", ")", "if", "do_return_2d_tensor", ":", "# `context_layer` = [B*F, N*V]", "context_layer", "=", "tf", ".", "reshape", "(", "context_layer", ",", "[", "batch_size", "*", "from_seq_length", ",", "num_attention_heads", "*", "size_per_head", "]", ")", "else", ":", "# `context_layer` = [B, F, N*V]", "context_layer", "=", "tf", ".", "reshape", "(", "context_layer", ",", "[", "batch_size", ",", "from_seq_length", ",", "num_attention_heads", "*", "size_per_head", "]", ")", "return", "context_layer" ]
https://github.com/terrifyzhao/bert-utils/blob/1d5f3eb649b4ee8a059f7050da483d0cd6d7fff4/modeling.py#L560-L753
jhpyle/docassemble
b90c84e57af59aa88b3404d44d0b125c70f832cc
docassemble_base/docassemble/base/util.py
python
DADict._trigger_gather
(self)
Triggers the gathering process.
Triggers the gathering process.
[ "Triggers", "the", "gathering", "process", "." ]
def _trigger_gather(self): """Triggers the gathering process.""" if docassemble.base.functions.get_gathering_mode(self.instanceName) is False: if self.auto_gather: self.gather() else: self.gathered
[ "def", "_trigger_gather", "(", "self", ")", ":", "if", "docassemble", ".", "base", ".", "functions", ".", "get_gathering_mode", "(", "self", ".", "instanceName", ")", "is", "False", ":", "if", "self", ".", "auto_gather", ":", "self", ".", "gather", "(", ")", "else", ":", "self", ".", "gathered" ]
https://github.com/jhpyle/docassemble/blob/b90c84e57af59aa88b3404d44d0b125c70f832cc/docassemble_base/docassemble/base/util.py#L2250-L2256
truenas/middleware
b11ec47d6340324f5a32287ffb4012e5d709b934
src/middlewared/middlewared/plugins/chart_releases_linux/upgrade.py
python
ChartReleaseService.upgrade_summary
(self, release_name, options)
return { 'container_images_to_update': { k: v for k, v in release['resources']['container_images'].items() if v['update_available'] }, 'changelog': changelog, 'available_versions_for_upgrade': all_newer_versions, 'item_update_available': release['update_available'], 'image_update_available': release['container_images_update_available'], **version_info, }
Retrieve upgrade summary for `release_name` which will include which container images will be updated and changelog for `options.item_version` chart version specified if applicable. If only container images need to be updated, changelog will be `null`. If chart release `release_name` does not require an upgrade, an error will be raised.
Retrieve upgrade summary for `release_name` which will include which container images will be updated and changelog for `options.item_version` chart version specified if applicable. If only container images need to be updated, changelog will be `null`.
[ "Retrieve", "upgrade", "summary", "for", "release_name", "which", "will", "include", "which", "container", "images", "will", "be", "updated", "and", "changelog", "for", "options", ".", "item_version", "chart", "version", "specified", "if", "applicable", ".", "If", "only", "container", "images", "need", "to", "be", "updated", "changelog", "will", "be", "null", "." ]
async def upgrade_summary(self, release_name, options): """ Retrieve upgrade summary for `release_name` which will include which container images will be updated and changelog for `options.item_version` chart version specified if applicable. If only container images need to be updated, changelog will be `null`. If chart release `release_name` does not require an upgrade, an error will be raised. """ release = await self.middleware.call( 'chart.release.query', [['id', '=', release_name]], {'extra': {'retrieve_resources': True}, 'get': True} ) if not release['update_available'] and not release['container_images_update_available']: raise CallError('No update is available for chart release', errno=errno.ENOENT) version_info = { 'latest_version': release['chart_metadata']['version'], 'upgrade_version': release['chart_metadata']['version'], 'latest_human_version': release['human_version'], 'upgrade_human_version': release['human_version'], } changelog = None all_newer_versions = [] if release['update_available']: available_items = await self.get_versions(release, options) latest_item = available_items['latest_version'] upgrade_version = available_items['specified_version'] version_info.update({ 'latest_version': latest_item['version'], 'latest_human_version': latest_item['human_version'], 'upgrade_version': upgrade_version['version'], 'upgrade_human_version': upgrade_version['human_version'], }) changelog = upgrade_version['changelog'] all_newer_versions = [ { 'version': v['version'], 'human_version': v['human_version'], } for v in available_items['versions'].values() if parse_version(v['version']) > parse_version(release['chart_metadata']['version']) ] return { 'container_images_to_update': { k: v for k, v in release['resources']['container_images'].items() if v['update_available'] }, 'changelog': changelog, 'available_versions_for_upgrade': all_newer_versions, 'item_update_available': release['update_available'], 'image_update_available': release['container_images_update_available'], **version_info, }
[ "async", "def", "upgrade_summary", "(", "self", ",", "release_name", ",", "options", ")", ":", "release", "=", "await", "self", ".", "middleware", ".", "call", "(", "'chart.release.query'", ",", "[", "[", "'id'", ",", "'='", ",", "release_name", "]", "]", ",", "{", "'extra'", ":", "{", "'retrieve_resources'", ":", "True", "}", ",", "'get'", ":", "True", "}", ")", "if", "not", "release", "[", "'update_available'", "]", "and", "not", "release", "[", "'container_images_update_available'", "]", ":", "raise", "CallError", "(", "'No update is available for chart release'", ",", "errno", "=", "errno", ".", "ENOENT", ")", "version_info", "=", "{", "'latest_version'", ":", "release", "[", "'chart_metadata'", "]", "[", "'version'", "]", ",", "'upgrade_version'", ":", "release", "[", "'chart_metadata'", "]", "[", "'version'", "]", ",", "'latest_human_version'", ":", "release", "[", "'human_version'", "]", ",", "'upgrade_human_version'", ":", "release", "[", "'human_version'", "]", ",", "}", "changelog", "=", "None", "all_newer_versions", "=", "[", "]", "if", "release", "[", "'update_available'", "]", ":", "available_items", "=", "await", "self", ".", "get_versions", "(", "release", ",", "options", ")", "latest_item", "=", "available_items", "[", "'latest_version'", "]", "upgrade_version", "=", "available_items", "[", "'specified_version'", "]", "version_info", ".", "update", "(", "{", "'latest_version'", ":", "latest_item", "[", "'version'", "]", ",", "'latest_human_version'", ":", "latest_item", "[", "'human_version'", "]", ",", "'upgrade_version'", ":", "upgrade_version", "[", "'version'", "]", ",", "'upgrade_human_version'", ":", "upgrade_version", "[", "'human_version'", "]", ",", "}", ")", "changelog", "=", "upgrade_version", "[", "'changelog'", "]", "all_newer_versions", "=", "[", "{", "'version'", ":", "v", "[", "'version'", "]", ",", "'human_version'", ":", "v", "[", "'human_version'", "]", ",", "}", "for", "v", "in", "available_items", "[", "'versions'", "]", ".", "values", "(", ")", "if", "parse_version", "(", "v", "[", "'version'", "]", ")", ">", "parse_version", "(", "release", "[", "'chart_metadata'", "]", "[", "'version'", "]", ")", "]", "return", "{", "'container_images_to_update'", ":", "{", "k", ":", "v", "for", "k", ",", "v", "in", "release", "[", "'resources'", "]", "[", "'container_images'", "]", ".", "items", "(", ")", "if", "v", "[", "'update_available'", "]", "}", ",", "'changelog'", ":", "changelog", ",", "'available_versions_for_upgrade'", ":", "all_newer_versions", ",", "'item_update_available'", ":", "release", "[", "'update_available'", "]", ",", "'image_update_available'", ":", "release", "[", "'container_images_update_available'", "]", ",", "*", "*", "version_info", ",", "}" ]
https://github.com/truenas/middleware/blob/b11ec47d6340324f5a32287ffb4012e5d709b934/src/middlewared/middlewared/plugins/chart_releases_linux/upgrade.py#L154-L204
sfepy/sfepy
02ec7bb2ab39ee1dfe1eb4cd509f0ffb7dcc8b25
sfepy/discrete/problem.py
python
Problem.set_mesh_coors
(self, coors, update_fields=False, actual=False, clear_all=True, extra_dofs=False)
Set mesh coordinates. Parameters ---------- coors : array The new coordinates. update_fields : bool If True, update also coordinates of fields. actual : bool If True, update the actual configuration coordinates, otherwise the undeformed configuration ones.
Set mesh coordinates.
[ "Set", "mesh", "coordinates", "." ]
def set_mesh_coors(self, coors, update_fields=False, actual=False, clear_all=True, extra_dofs=False): """ Set mesh coordinates. Parameters ---------- coors : array The new coordinates. update_fields : bool If True, update also coordinates of fields. actual : bool If True, update the actual configuration coordinates, otherwise the undeformed configuration ones. """ set_mesh_coors(self.domain, self.fields, coors, update_fields=update_fields, actual=actual, clear_all=clear_all, extra_dofs=extra_dofs)
[ "def", "set_mesh_coors", "(", "self", ",", "coors", ",", "update_fields", "=", "False", ",", "actual", "=", "False", ",", "clear_all", "=", "True", ",", "extra_dofs", "=", "False", ")", ":", "set_mesh_coors", "(", "self", ".", "domain", ",", "self", ".", "fields", ",", "coors", ",", "update_fields", "=", "update_fields", ",", "actual", "=", "actual", ",", "clear_all", "=", "clear_all", ",", "extra_dofs", "=", "extra_dofs", ")" ]
https://github.com/sfepy/sfepy/blob/02ec7bb2ab39ee1dfe1eb4cd509f0ffb7dcc8b25/sfepy/discrete/problem.py#L739-L756
plotly/plotly.py
cfad7862594b35965c0e000813bd7805e8494a5b
packages/python/plotly/plotly/graph_objs/_table.py
python
Table.customdatasrc
(self)
return self["customdatasrc"]
Sets the source reference on Chart Studio Cloud for `customdata`. The 'customdatasrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
Sets the source reference on Chart Studio Cloud for `customdata`. The 'customdatasrc' property must be specified as a string or as a plotly.grid_objs.Column object
[ "Sets", "the", "source", "reference", "on", "Chart", "Studio", "Cloud", "for", "customdata", ".", "The", "customdatasrc", "property", "must", "be", "specified", "as", "a", "string", "or", "as", "a", "plotly", ".", "grid_objs", ".", "Column", "object" ]
def customdatasrc(self): """ Sets the source reference on Chart Studio Cloud for `customdata`. The 'customdatasrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str """ return self["customdatasrc"]
[ "def", "customdatasrc", "(", "self", ")", ":", "return", "self", "[", "\"customdatasrc\"", "]" ]
https://github.com/plotly/plotly.py/blob/cfad7862594b35965c0e000813bd7805e8494a5b/packages/python/plotly/plotly/graph_objs/_table.py#L224-L236
jython/jython3
def4f8ec47cb7a9c799ea4c745f12badf92c5769
lib-python/3.5.1/importlib/abc.py
python
PathEntryFinder.invalidate_caches
(self)
An optional method for clearing the finder's cache, if any. This method is used by PathFinder.invalidate_caches().
An optional method for clearing the finder's cache, if any. This method is used by PathFinder.invalidate_caches().
[ "An", "optional", "method", "for", "clearing", "the", "finder", "s", "cache", "if", "any", ".", "This", "method", "is", "used", "by", "PathFinder", ".", "invalidate_caches", "()", "." ]
def invalidate_caches(self): """An optional method for clearing the finder's cache, if any. This method is used by PathFinder.invalidate_caches(). """
[ "def", "invalidate_caches", "(", "self", ")", ":" ]
https://github.com/jython/jython3/blob/def4f8ec47cb7a9c799ea4c745f12badf92c5769/lib-python/3.5.1/importlib/abc.py#L116-L119
makerbot/ReplicatorG
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
skein_engines/skeinforge-35/fabmetheus_utilities/euclidean.py
python
XIntersectionIndex.__cmp__
(self, other)
return int( self.x > other.x )
Get comparison in order to sort x intersections in ascending order of x.
Get comparison in order to sort x intersections in ascending order of x.
[ "Get", "comparison", "in", "order", "to", "sort", "x", "intersections", "in", "ascending", "order", "of", "x", "." ]
def __cmp__(self, other): 'Get comparison in order to sort x intersections in ascending order of x.' if self.x < other.x: return - 1 return int( self.x > other.x )
[ "def", "__cmp__", "(", "self", ",", "other", ")", ":", "if", "self", ".", "x", "<", "other", ".", "x", ":", "return", "-", "1", "return", "int", "(", "self", ".", "x", ">", "other", ".", "x", ")" ]
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-35/fabmetheus_utilities/euclidean.py#L2423-L2427
dropbox/dropbox-sdk-python
015437429be224732990041164a21a0501235db1
dropbox/team_log.py
python
EventDetails.get_sso_error_details
(self)
return self._value
Only call this if :meth:`is_sso_error_details` is true. :rtype: SsoErrorDetails
Only call this if :meth:`is_sso_error_details` is true.
[ "Only", "call", "this", "if", ":", "meth", ":", "is_sso_error_details", "is", "true", "." ]
def get_sso_error_details(self): """ Only call this if :meth:`is_sso_error_details` is true. :rtype: SsoErrorDetails """ if not self.is_sso_error_details(): raise AttributeError("tag 'sso_error_details' not set") return self._value
[ "def", "get_sso_error_details", "(", "self", ")", ":", "if", "not", "self", ".", "is_sso_error_details", "(", ")", ":", "raise", "AttributeError", "(", "\"tag 'sso_error_details' not set\"", ")", "return", "self", ".", "_value" ]
https://github.com/dropbox/dropbox-sdk-python/blob/015437429be224732990041164a21a0501235db1/dropbox/team_log.py#L18489-L18497
aws-quickstart/quickstart-redhat-openshift
2b87dd38b72e7e4c439a606c5a9ea458d72da612
functions/source/DeleteBucketContents/requests/utils.py
python
guess_filename
(obj)
Tries to guess the filename of the given object.
Tries to guess the filename of the given object.
[ "Tries", "to", "guess", "the", "filename", "of", "the", "given", "object", "." ]
def guess_filename(obj): """Tries to guess the filename of the given object.""" name = getattr(obj, 'name', None) if (name and isinstance(name, basestring) and name[0] != '<' and name[-1] != '>'): return os.path.basename(name)
[ "def", "guess_filename", "(", "obj", ")", ":", "name", "=", "getattr", "(", "obj", ",", "'name'", ",", "None", ")", "if", "(", "name", "and", "isinstance", "(", "name", ",", "basestring", ")", "and", "name", "[", "0", "]", "!=", "'<'", "and", "name", "[", "-", "1", "]", "!=", "'>'", ")", ":", "return", "os", ".", "path", ".", "basename", "(", "name", ")" ]
https://github.com/aws-quickstart/quickstart-redhat-openshift/blob/2b87dd38b72e7e4c439a606c5a9ea458d72da612/functions/source/DeleteBucketContents/requests/utils.py#L219-L224
PaddlePaddle/ERNIE
15eddb022ce1beb281777e9ab8807a1bdfa7a76e
propeller/paddle/train/metrics.py
python
Mrr.__init__
(self, qid, label, pred)
doc
doc
[ "doc" ]
def __init__(self, qid, label, pred): """doc""" if label.shape != pred.shape: raise ValueError( 'expect label shape == pred shape, got: label.shape=%s, pred.shape = %s' % (repr(label), repr(pred))) self.qid = qid self.label = label self.pred = pred self.reset()
[ "def", "__init__", "(", "self", ",", "qid", ",", "label", ",", "pred", ")", ":", "if", "label", ".", "shape", "!=", "pred", ".", "shape", ":", "raise", "ValueError", "(", "'expect label shape == pred shape, got: label.shape=%s, pred.shape = %s'", "%", "(", "repr", "(", "label", ")", ",", "repr", "(", "pred", ")", ")", ")", "self", ".", "qid", "=", "qid", "self", ".", "label", "=", "label", "self", ".", "pred", "=", "pred", "self", ".", "reset", "(", ")" ]
https://github.com/PaddlePaddle/ERNIE/blob/15eddb022ce1beb281777e9ab8807a1bdfa7a76e/propeller/paddle/train/metrics.py#L447-L457
dipu-bd/lightnovel-crawler
eca7a71f217ce7a6b0a54d2e2afb349571871880
sources/en/n/novelmtl.py
python
NovelMTLCrawler.read_novel_info
(self)
Get novel title, autor, cover etc
Get novel title, autor, cover etc
[ "Get", "novel", "title", "autor", "cover", "etc" ]
def read_novel_info(self): '''Get novel title, autor, cover etc''' logger.debug('Visiting %s', self.novel_url) soup = self.get_soup(self.novel_url) self.novel_title = soup.select_one('.novel-info .novel-title').text.strip() logger.info('Novel title: %s', self.novel_title) try: self.novel_cover = self.absolute_url( soup.select_one('#novel figure.cover img')['data-src']) logger.info('Novel cover: %s', self.novel_cover) except Exception as e: logger.debug('Failed to get novel cover: %s', e) try: self.novel_author = soup.select_one( '.novel-info .author span[itemprop="author"]').text.strip() logger.info('%s', self.novel_author) except Exception as e: logger.debug('Failed to parse novel author. Error: %s', e) last_page = soup.select('#chapters .pagination li a')[-1]['href'] logger.debug('Last page: %s', last_page) last_page_qs = parse_qs(urlparse(last_page).query) max_page = int(last_page_qs['page'][0]) wjm = last_page_qs['wjm'][0] logger.debug('Max page: %d, wjm = %s', max_page, wjm) futures = [] for i in range(max_page + 1): payload = { 'page': i, 'wjm': wjm, '_': self.cur_time, 'X-Requested-With': 'XMLHttpRequest', } url = chapter_list_url + '?' + '&'.join([ '%s=%s' % (k, v) for k, v in payload.items() ]) logger.debug('Fetching chapters from %s', url) futures.append(self.executor.submit(self.get_soup, url)) # end for for page, f in enumerate(futures): soup = f.result() vol_id = page + 1 self.volumes.append({'id': vol_id}) for a in soup.select('ul.chapter-list li a'): chap_id = len(self.chapters) + 1 self.chapters.append({ 'id': chap_id, 'volume': vol_id, 'url': self.absolute_url(a['href']), 'title': a.select_one('.chapter-title').text.strip(), })
[ "def", "read_novel_info", "(", "self", ")", ":", "logger", ".", "debug", "(", "'Visiting %s'", ",", "self", ".", "novel_url", ")", "soup", "=", "self", ".", "get_soup", "(", "self", ".", "novel_url", ")", "self", ".", "novel_title", "=", "soup", ".", "select_one", "(", "'.novel-info .novel-title'", ")", ".", "text", ".", "strip", "(", ")", "logger", ".", "info", "(", "'Novel title: %s'", ",", "self", ".", "novel_title", ")", "try", ":", "self", ".", "novel_cover", "=", "self", ".", "absolute_url", "(", "soup", ".", "select_one", "(", "'#novel figure.cover img'", ")", "[", "'data-src'", "]", ")", "logger", ".", "info", "(", "'Novel cover: %s'", ",", "self", ".", "novel_cover", ")", "except", "Exception", "as", "e", ":", "logger", ".", "debug", "(", "'Failed to get novel cover: %s'", ",", "e", ")", "try", ":", "self", ".", "novel_author", "=", "soup", ".", "select_one", "(", "'.novel-info .author span[itemprop=\"author\"]'", ")", ".", "text", ".", "strip", "(", ")", "logger", ".", "info", "(", "'%s'", ",", "self", ".", "novel_author", ")", "except", "Exception", "as", "e", ":", "logger", ".", "debug", "(", "'Failed to parse novel author. Error: %s'", ",", "e", ")", "last_page", "=", "soup", ".", "select", "(", "'#chapters .pagination li a'", ")", "[", "-", "1", "]", "[", "'href'", "]", "logger", ".", "debug", "(", "'Last page: %s'", ",", "last_page", ")", "last_page_qs", "=", "parse_qs", "(", "urlparse", "(", "last_page", ")", ".", "query", ")", "max_page", "=", "int", "(", "last_page_qs", "[", "'page'", "]", "[", "0", "]", ")", "wjm", "=", "last_page_qs", "[", "'wjm'", "]", "[", "0", "]", "logger", ".", "debug", "(", "'Max page: %d, wjm = %s'", ",", "max_page", ",", "wjm", ")", "futures", "=", "[", "]", "for", "i", "in", "range", "(", "max_page", "+", "1", ")", ":", "payload", "=", "{", "'page'", ":", "i", ",", "'wjm'", ":", "wjm", ",", "'_'", ":", "self", ".", "cur_time", ",", "'X-Requested-With'", ":", "'XMLHttpRequest'", ",", "}", "url", "=", "chapter_list_url", "+", "'?'", "+", "'&'", ".", "join", "(", "[", "'%s=%s'", "%", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "payload", ".", "items", "(", ")", "]", ")", "logger", ".", "debug", "(", "'Fetching chapters from %s'", ",", "url", ")", "futures", ".", "append", "(", "self", ".", "executor", ".", "submit", "(", "self", ".", "get_soup", ",", "url", ")", ")", "# end for", "for", "page", ",", "f", "in", "enumerate", "(", "futures", ")", ":", "soup", "=", "f", ".", "result", "(", ")", "vol_id", "=", "page", "+", "1", "self", ".", "volumes", ".", "append", "(", "{", "'id'", ":", "vol_id", "}", ")", "for", "a", "in", "soup", ".", "select", "(", "'ul.chapter-list li a'", ")", ":", "chap_id", "=", "len", "(", "self", ".", "chapters", ")", "+", "1", "self", ".", "chapters", ".", "append", "(", "{", "'id'", ":", "chap_id", ",", "'volume'", ":", "vol_id", ",", "'url'", ":", "self", ".", "absolute_url", "(", "a", "[", "'href'", "]", ")", ",", "'title'", ":", "a", ".", "select_one", "(", "'.chapter-title'", ")", ".", "text", ".", "strip", "(", ")", ",", "}", ")" ]
https://github.com/dipu-bd/lightnovel-crawler/blob/eca7a71f217ce7a6b0a54d2e2afb349571871880/sources/en/n/novelmtl.py#L55-L111
ricequant/rqalpha-mod-ctp
bfd40801f9a182226a911cac74660f62993eb6db
rqalpha_mod_ctp/ctp/pyctp/linux64_35/__init__.py
python
TraderApi.OnRspQryCombAction
(self, pCombAction, pRspInfo, nRequestID, bIsLast)
请求查询申请组合响应
请求查询申请组合响应
[ "请求查询申请组合响应" ]
def OnRspQryCombAction(self, pCombAction, pRspInfo, nRequestID, bIsLast): """请求查询申请组合响应"""
[ "def", "OnRspQryCombAction", "(", "self", ",", "pCombAction", ",", "pRspInfo", ",", "nRequestID", ",", "bIsLast", ")", ":" ]
https://github.com/ricequant/rqalpha-mod-ctp/blob/bfd40801f9a182226a911cac74660f62993eb6db/rqalpha_mod_ctp/ctp/pyctp/linux64_35/__init__.py#L660-L661
chrysn/aiocoap
1f03d4ceb969b2b443c288c312d44c3b7c3e2031
aiocoap/cli/common.py
python
add_server_arguments
(parser)
Add the --bind option to an argparse parser
Add the --bind option to an argparse parser
[ "Add", "the", "--", "bind", "option", "to", "an", "argparse", "parser" ]
def add_server_arguments(parser): """Add the --bind option to an argparse parser""" def hostportsplit_helper(arg): """Wrapper around hostportsplit that gives better error messages than 'invalid hostportsplit value'""" try: return hostportsplit(arg) except ValueError: raise parser.error("Invalid argument to --bind." + " Did you mean --bind '[%s]'?" % arg if arg.count(':') >= 2 and '[' not in arg else " See --help-bind for details.") parser.add_argument('--bind', help="Host and/or port to bind to (see --help-bind for details)", type=hostportsplit_helper, default=None) parser.add_argument('--credentials', help="JSON file pointing to credentials for the server's identity/ies.", type=Path) # These are to be eventually migrated into credentials parser.add_argument('--tls-server-certificate', help="TLS certificate (chain) to present to connecting clients (in PEM format)", metavar="CRT") parser.add_argument('--tls-server-key', help="TLS key to load that supports the server certificate", metavar="KEY") parser.add_argument('--help-bind', help=argparse.SUPPRESS, action=_HelpBind)
[ "def", "add_server_arguments", "(", "parser", ")", ":", "def", "hostportsplit_helper", "(", "arg", ")", ":", "\"\"\"Wrapper around hostportsplit that gives better error messages than\n 'invalid hostportsplit value'\"\"\"", "try", ":", "return", "hostportsplit", "(", "arg", ")", "except", "ValueError", ":", "raise", "parser", ".", "error", "(", "\"Invalid argument to --bind.\"", "+", "\" Did you mean --bind '[%s]'?\"", "%", "arg", "if", "arg", ".", "count", "(", "':'", ")", ">=", "2", "and", "'['", "not", "in", "arg", "else", "\" See --help-bind for details.\"", ")", "parser", ".", "add_argument", "(", "'--bind'", ",", "help", "=", "\"Host and/or port to bind to (see --help-bind for details)\"", ",", "type", "=", "hostportsplit_helper", ",", "default", "=", "None", ")", "parser", ".", "add_argument", "(", "'--credentials'", ",", "help", "=", "\"JSON file pointing to credentials for the server's identity/ies.\"", ",", "type", "=", "Path", ")", "# These are to be eventually migrated into credentials", "parser", ".", "add_argument", "(", "'--tls-server-certificate'", ",", "help", "=", "\"TLS certificate (chain) to present to connecting clients (in PEM format)\"", ",", "metavar", "=", "\"CRT\"", ")", "parser", ".", "add_argument", "(", "'--tls-server-key'", ",", "help", "=", "\"TLS key to load that supports the server certificate\"", ",", "metavar", "=", "\"KEY\"", ")", "parser", ".", "add_argument", "(", "'--help-bind'", ",", "help", "=", "argparse", ".", "SUPPRESS", ",", "action", "=", "_HelpBind", ")" ]
https://github.com/chrysn/aiocoap/blob/1f03d4ceb969b2b443c288c312d44c3b7c3e2031/aiocoap/cli/common.py#L59-L82
omz/PythonistaAppTemplate
f560f93f8876d82a21d108977f90583df08d55af
PythonistaAppTemplate/PythonistaKit.framework/pylib/site-packages/parsedatetime/__init__.py
python
_initSymbols
(ptc)
Initialize symbols and single character constants.
Initialize symbols and single character constants.
[ "Initialize", "symbols", "and", "single", "character", "constants", "." ]
def _initSymbols(ptc): """ Initialize symbols and single character constants. """ # build am and pm lists to contain # original case, lowercase and first-char # versions of the meridian text if len(ptc.locale.meridian) > 0: am = ptc.locale.meridian[0] ptc.am = [ am ] if len(am) > 0: ptc.am.append(am[0]) am = am.lower() ptc.am.append(am) ptc.am.append(am[0]) else: am = '' ptc.am = [ '', '' ] if len(ptc.locale.meridian) > 1: pm = ptc.locale.meridian[1] ptc.pm = [ pm ] if len(pm) > 0: ptc.pm.append(pm[0]) pm = pm.lower() ptc.pm.append(pm) ptc.pm.append(pm[0]) else: pm = '' ptc.pm = [ '', '' ]
[ "def", "_initSymbols", "(", "ptc", ")", ":", "# build am and pm lists to contain", "# original case, lowercase and first-char", "# versions of the meridian text", "if", "len", "(", "ptc", ".", "locale", ".", "meridian", ")", ">", "0", ":", "am", "=", "ptc", ".", "locale", ".", "meridian", "[", "0", "]", "ptc", ".", "am", "=", "[", "am", "]", "if", "len", "(", "am", ")", ">", "0", ":", "ptc", ".", "am", ".", "append", "(", "am", "[", "0", "]", ")", "am", "=", "am", ".", "lower", "(", ")", "ptc", ".", "am", ".", "append", "(", "am", ")", "ptc", ".", "am", ".", "append", "(", "am", "[", "0", "]", ")", "else", ":", "am", "=", "''", "ptc", ".", "am", "=", "[", "''", ",", "''", "]", "if", "len", "(", "ptc", ".", "locale", ".", "meridian", ")", ">", "1", ":", "pm", "=", "ptc", ".", "locale", ".", "meridian", "[", "1", "]", "ptc", ".", "pm", "=", "[", "pm", "]", "if", "len", "(", "pm", ")", ">", "0", ":", "ptc", ".", "pm", ".", "append", "(", "pm", "[", "0", "]", ")", "pm", "=", "pm", ".", "lower", "(", ")", "ptc", ".", "pm", ".", "append", "(", "pm", ")", "ptc", ".", "pm", ".", "append", "(", "pm", "[", "0", "]", ")", "else", ":", "pm", "=", "''", "ptc", ".", "pm", "=", "[", "''", ",", "''", "]" ]
https://github.com/omz/PythonistaAppTemplate/blob/f560f93f8876d82a21d108977f90583df08d55af/PythonistaAppTemplate/PythonistaKit.framework/pylib/site-packages/parsedatetime/__init__.py#L1930-L1962
kanzure/nanoengineer
874e4c9f8a9190f093625b267f9767e19f82e6c4
cad/src/command_support/EditCommand.py
python
EditCommand._removeStructure
(self)
Remove this structure. @see: L{self.cancelStructure}
Remove this structure.
[ "Remove", "this", "structure", "." ]
def _removeStructure(self): """ Remove this structure. @see: L{self.cancelStructure} """ if self.struct is not None: self.struct.kill_with_contents() self.struct = None self._revertNumber() self.win.win_update()
[ "def", "_removeStructure", "(", "self", ")", ":", "if", "self", ".", "struct", "is", "not", "None", ":", "self", ".", "struct", ".", "kill_with_contents", "(", ")", "self", ".", "struct", "=", "None", "self", ".", "_revertNumber", "(", ")", "self", ".", "win", ".", "win_update", "(", ")" ]
https://github.com/kanzure/nanoengineer/blob/874e4c9f8a9190f093625b267f9767e19f82e6c4/cad/src/command_support/EditCommand.py#L358-L367
diegma/graph-2-text
56e19d3066116c26464acde46c1b52ec46f319e6
onmt/Models.py
python
RNNEncoder.forward
(self, src, lengths=None, encoder_state=None)
return encoder_final, memory_bank
See :obj:`EncoderBase.forward()`
See :obj:`EncoderBase.forward()`
[ "See", ":", "obj", ":", "EncoderBase", ".", "forward", "()" ]
def forward(self, src, lengths=None, encoder_state=None): "See :obj:`EncoderBase.forward()`" self._check_args(src, lengths, encoder_state) emb = self.embeddings(src) s_len, batch, emb_dim = emb.size() packed_emb = emb if lengths is not None and not self.no_pack_padded_seq: # Lengths data is wrapped inside a Variable. lengths = lengths.view(-1).tolist() packed_emb = pack(emb, lengths) memory_bank, encoder_final = self.rnn(packed_emb, encoder_state) if lengths is not None and not self.no_pack_padded_seq: memory_bank = unpack(memory_bank)[0] if self.use_bridge: encoder_final = self._bridge(encoder_final) return encoder_final, memory_bank
[ "def", "forward", "(", "self", ",", "src", ",", "lengths", "=", "None", ",", "encoder_state", "=", "None", ")", ":", "self", ".", "_check_args", "(", "src", ",", "lengths", ",", "encoder_state", ")", "emb", "=", "self", ".", "embeddings", "(", "src", ")", "s_len", ",", "batch", ",", "emb_dim", "=", "emb", ".", "size", "(", ")", "packed_emb", "=", "emb", "if", "lengths", "is", "not", "None", "and", "not", "self", ".", "no_pack_padded_seq", ":", "# Lengths data is wrapped inside a Variable.", "lengths", "=", "lengths", ".", "view", "(", "-", "1", ")", ".", "tolist", "(", ")", "packed_emb", "=", "pack", "(", "emb", ",", "lengths", ")", "memory_bank", ",", "encoder_final", "=", "self", ".", "rnn", "(", "packed_emb", ",", "encoder_state", ")", "if", "lengths", "is", "not", "None", "and", "not", "self", ".", "no_pack_padded_seq", ":", "memory_bank", "=", "unpack", "(", "memory_bank", ")", "[", "0", "]", "if", "self", ".", "use_bridge", ":", "encoder_final", "=", "self", ".", "_bridge", "(", "encoder_final", ")", "return", "encoder_final", ",", "memory_bank" ]
https://github.com/diegma/graph-2-text/blob/56e19d3066116c26464acde46c1b52ec46f319e6/onmt/Models.py#L134-L154
firedm/FireDM
d27d3d27c869625f75520ca2bacfa9ebd11caf2f
firedm/controller.py
python
Controller._update_playlist_menu
(self, pl_menu)
update playlist menu and send notification to view
update playlist menu and send notification to view
[ "update", "playlist", "menu", "and", "send", "notification", "to", "view" ]
def _update_playlist_menu(self, pl_menu): """update playlist menu and send notification to view""" self.playlist_menu = pl_menu self._update_view(command='playlist_menu', playlist_menu=pl_menu)
[ "def", "_update_playlist_menu", "(", "self", ",", "pl_menu", ")", ":", "self", ".", "playlist_menu", "=", "pl_menu", "self", ".", "_update_view", "(", "command", "=", "'playlist_menu'", ",", "playlist_menu", "=", "pl_menu", ")" ]
https://github.com/firedm/FireDM/blob/d27d3d27c869625f75520ca2bacfa9ebd11caf2f/firedm/controller.py#L586-L589
coderedcorp/coderedcms
14bd43ffa418279986658dde93005bd6dfbd763c
coderedcms/models/page_models.py
python
CoderedEventIndexPage.fullcalendar_view
(self)
return { self.CalendarViews.NONE: '', self.CalendarViews.MONTH: 'dayGridMonth', self.CalendarViews.AGENDA_WEEK: 'timeGridWeek', self.CalendarViews.AGENDA_DAY: 'timeGridDay', self.CalendarViews.LIST_MONTH: 'listMonth', }[self.default_calendar_view]
Translate calendar views to fullcalendar.js identifiers.
Translate calendar views to fullcalendar.js identifiers.
[ "Translate", "calendar", "views", "to", "fullcalendar", ".", "js", "identifiers", "." ]
def fullcalendar_view(self) -> str: """ Translate calendar views to fullcalendar.js identifiers. """ return { self.CalendarViews.NONE: '', self.CalendarViews.MONTH: 'dayGridMonth', self.CalendarViews.AGENDA_WEEK: 'timeGridWeek', self.CalendarViews.AGENDA_DAY: 'timeGridDay', self.CalendarViews.LIST_MONTH: 'listMonth', }[self.default_calendar_view]
[ "def", "fullcalendar_view", "(", "self", ")", "->", "str", ":", "return", "{", "self", ".", "CalendarViews", ".", "NONE", ":", "''", ",", "self", ".", "CalendarViews", ".", "MONTH", ":", "'dayGridMonth'", ",", "self", ".", "CalendarViews", ".", "AGENDA_WEEK", ":", "'timeGridWeek'", ",", "self", ".", "CalendarViews", ".", "AGENDA_DAY", ":", "'timeGridDay'", ",", "self", ".", "CalendarViews", ".", "LIST_MONTH", ":", "'listMonth'", ",", "}", "[", "self", ".", "default_calendar_view", "]" ]
https://github.com/coderedcorp/coderedcms/blob/14bd43ffa418279986658dde93005bd6dfbd763c/coderedcms/models/page_models.py#L972-L982