nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/webapp2-2.5.1/webapp2_extras/i18n.py
python
format_time
(time=None, format=None, rebase=True)
return get_i18n().format_time(time, format, rebase)
See :meth:`I18n.format_time`.
See :meth:`I18n.format_time`.
[ "See", ":", "meth", ":", "I18n", ".", "format_time", "." ]
def format_time(time=None, format=None, rebase=True): """See :meth:`I18n.format_time`.""" return get_i18n().format_time(time, format, rebase)
[ "def", "format_time", "(", "time", "=", "None", ",", "format", "=", "None", ",", "rebase", "=", "True", ")", ":", "return", "get_i18n", "(", ")", ".", "format_time", "(", "time", ",", "format", ",", "rebase", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/webapp2-2.5.1/webapp2_extras/i18n.py#L741-L743
peering-manager/peering-manager
62c870fb9caa6dfc056feb77c595d45bc3c4988a
messaging/filters.py
python
EmailFilterSet.search
(self, queryset, name, value)
return queryset.filter( Q(name__icontains=value) | Q(subject__icontains=value) | Q(template__icontains=value) )
[]
def search(self, queryset, name, value): if not value.strip(): return queryset return queryset.filter( Q(name__icontains=value) | Q(subject__icontains=value) | Q(template__icontains=value) )
[ "def", "search", "(", "self", ",", "queryset", ",", "name", ",", "value", ")", ":", "if", "not", "value", ".", "strip", "(", ")", ":", "return", "queryset", "return", "queryset", ".", "filter", "(", "Q", "(", "name__icontains", "=", "value", ")", "|", "Q", "(", "subject__icontains", "=", "value", ")", "|", "Q", "(", "template__icontains", "=", "value", ")", ")" ]
https://github.com/peering-manager/peering-manager/blob/62c870fb9caa6dfc056feb77c595d45bc3c4988a/messaging/filters.py#L73-L80
boto/boto
b2a6f08122b2f1b89888d2848e730893595cd001
boto/kms/layer1.py
python
KMSConnection.update_key_description
(self, key_id, description)
return self.make_request(action='UpdateKeyDescription', body=json.dumps(params))
:type key_id: string :param key_id: :type description: string :param description:
[]
def update_key_description(self, key_id, description): """ :type key_id: string :param key_id: :type description: string :param description: """ params = {'KeyId': key_id, 'Description': description, } return self.make_request(action='UpdateKeyDescription', body=json.dumps(params))
[ "def", "update_key_description", "(", "self", ",", "key_id", ",", "description", ")", ":", "params", "=", "{", "'KeyId'", ":", "key_id", ",", "'Description'", ":", "description", ",", "}", "return", "self", ".", "make_request", "(", "action", "=", "'UpdateKeyDescription'", ",", "body", "=", "json", ".", "dumps", "(", "params", ")", ")" ]
https://github.com/boto/boto/blob/b2a6f08122b2f1b89888d2848e730893595cd001/boto/kms/layer1.py#L783-L796
interpretml/interpret-community
84d86b7514fd9812f1497329bf1c4c9fc864370e
python/interpret_community/common/warnings_suppressor.py
python
shap_warnings_suppressor.__enter__
(self)
return log
Begins suppressing shap warnings.
Begins suppressing shap warnings.
[ "Begins", "suppressing", "shap", "warnings", "." ]
def __enter__(self): """Begins suppressing shap warnings.""" if self._entered: raise RuntimeError("Cannot enter %r twice" % self) self._entered = True self._tf_warnings_suppressor.__enter__() log = self._catch_warnings.__enter__() warnings.filterwarnings('ignore', 'Starting from version 2.2.1', UserWarning) return log
[ "def", "__enter__", "(", "self", ")", ":", "if", "self", ".", "_entered", ":", "raise", "RuntimeError", "(", "\"Cannot enter %r twice\"", "%", "self", ")", "self", ".", "_entered", "=", "True", "self", ".", "_tf_warnings_suppressor", ".", "__enter__", "(", ")", "log", "=", "self", ".", "_catch_warnings", ".", "__enter__", "(", ")", "warnings", ".", "filterwarnings", "(", "'ignore'", ",", "'Starting from version 2.2.1'", ",", "UserWarning", ")", "return", "log" ]
https://github.com/interpretml/interpret-community/blob/84d86b7514fd9812f1497329bf1c4c9fc864370e/python/interpret_community/common/warnings_suppressor.py#L51-L59
rspivak/lsbasi
07e1a14516156a21ebe2d82e0bae4bba5ad73dd6
part9/python/spi.py
python
Parser.term
(self)
return node
term : factor ((MUL | DIV) factor)*
term : factor ((MUL | DIV) factor)*
[ "term", ":", "factor", "((", "MUL", "|", "DIV", ")", "factor", ")", "*" ]
def term(self): """term : factor ((MUL | DIV) factor)*""" node = self.factor() while self.current_token.type in (MUL, DIV): token = self.current_token if token.type == MUL: self.eat(MUL) elif token.type == DIV: self.eat(DIV) node = BinOp(left=node, op=token, right=self.factor()) return node
[ "def", "term", "(", "self", ")", ":", "node", "=", "self", ".", "factor", "(", ")", "while", "self", ".", "current_token", ".", "type", "in", "(", "MUL", ",", "DIV", ")", ":", "token", "=", "self", ".", "current_token", "if", "token", ".", "type", "==", "MUL", ":", "self", ".", "eat", "(", "MUL", ")", "elif", "token", ".", "type", "==", "DIV", ":", "self", ".", "eat", "(", "DIV", ")", "node", "=", "BinOp", "(", "left", "=", "node", ",", "op", "=", "token", ",", "right", "=", "self", ".", "factor", "(", ")", ")", "return", "node" ]
https://github.com/rspivak/lsbasi/blob/07e1a14516156a21ebe2d82e0bae4bba5ad73dd6/part9/python/spi.py#L320-L333
pyexcel/pyexcel
c1c99d4724e5c2adc6b714116a050287e07e1835
pyexcel/internal/source_plugin.py
python
SourcePluginManager.get_book_source
(self, **keywords)
return self.get_a_plugin( target=constants.BOOK, action=constants.READ_ACTION, **keywords )
obtain a book read source plugin for signature functions
obtain a book read source plugin for signature functions
[ "obtain", "a", "book", "read", "source", "plugin", "for", "signature", "functions" ]
def get_book_source(self, **keywords): """obtain a book read source plugin for signature functions""" return self.get_a_plugin( target=constants.BOOK, action=constants.READ_ACTION, **keywords )
[ "def", "get_book_source", "(", "self", ",", "*", "*", "keywords", ")", ":", "return", "self", ".", "get_a_plugin", "(", "target", "=", "constants", ".", "BOOK", ",", "action", "=", "constants", ".", "READ_ACTION", ",", "*", "*", "keywords", ")" ]
https://github.com/pyexcel/pyexcel/blob/c1c99d4724e5c2adc6b714116a050287e07e1835/pyexcel/internal/source_plugin.py#L83-L87
PacktPublishing/Expert-Python-Programming_Second-Edition
2ccdbd302dea96aecc3aef04aaf08b0cb937f30a
chapter2/lists.py
python
evens_using_for_loop
(count)
return evens
Calculate evens using for loop
Calculate evens using for loop
[ "Calculate", "evens", "using", "for", "loop" ]
def evens_using_for_loop(count): """ Calculate evens using for loop """ evens = [] for i in range(count): if i % 2 == 0: evens.append(i) return evens
[ "def", "evens_using_for_loop", "(", "count", ")", ":", "evens", "=", "[", "]", "for", "i", "in", "range", "(", "count", ")", ":", "if", "i", "%", "2", "==", "0", ":", "evens", ".", "append", "(", "i", ")", "return", "evens" ]
https://github.com/PacktPublishing/Expert-Python-Programming_Second-Edition/blob/2ccdbd302dea96aecc3aef04aaf08b0cb937f30a/chapter2/lists.py#L7-L13
FSecureLABS/Jandroid
e31d0dab58a2bfd6ed8e0a387172b8bd7c893436
libs/platform-tools/platform-tools_windows/systrace/catapult/devil/devil/android/apk_helper.py
python
ApkHelper.GetAbis
(self)
Returns a list of ABIs in the apk (empty list if no native code).
Returns a list of ABIs in the apk (empty list if no native code).
[ "Returns", "a", "list", "of", "ABIs", "in", "the", "apk", "(", "empty", "list", "if", "no", "native", "code", ")", "." ]
def GetAbis(self): """Returns a list of ABIs in the apk (empty list if no native code).""" # Use lib/* to determine the compatible ABIs. libs = set() for path in self._ListApkPaths(): path_tokens = path.split('/') if len(path_tokens) >= 2 and path_tokens[0] == 'lib': libs.add(path_tokens[1]) lib_to_abi = { 'armeabi-v7a': ['armeabi-v7a', 'arm64-v8a'], 'arm64-v8a': ['arm64-v8a'], 'x86': ['x86', 'x64'], 'x64': ['x64'] } try: output = set() for lib in libs: for abi in lib_to_abi[lib]: output.add(abi) return sorted(output) except KeyError: raise base_error.BaseError('Unexpected ABI in lib/* folder.')
[ "def", "GetAbis", "(", "self", ")", ":", "# Use lib/* to determine the compatible ABIs.", "libs", "=", "set", "(", ")", "for", "path", "in", "self", ".", "_ListApkPaths", "(", ")", ":", "path_tokens", "=", "path", ".", "split", "(", "'/'", ")", "if", "len", "(", "path_tokens", ")", ">=", "2", "and", "path_tokens", "[", "0", "]", "==", "'lib'", ":", "libs", ".", "add", "(", "path_tokens", "[", "1", "]", ")", "lib_to_abi", "=", "{", "'armeabi-v7a'", ":", "[", "'armeabi-v7a'", ",", "'arm64-v8a'", "]", ",", "'arm64-v8a'", ":", "[", "'arm64-v8a'", "]", ",", "'x86'", ":", "[", "'x86'", ",", "'x64'", "]", ",", "'x64'", ":", "[", "'x64'", "]", "}", "try", ":", "output", "=", "set", "(", ")", "for", "lib", "in", "libs", ":", "for", "abi", "in", "lib_to_abi", "[", "lib", "]", ":", "output", ".", "add", "(", "abi", ")", "return", "sorted", "(", "output", ")", "except", "KeyError", ":", "raise", "base_error", ".", "BaseError", "(", "'Unexpected ABI in lib/* folder.'", ")" ]
https://github.com/FSecureLABS/Jandroid/blob/e31d0dab58a2bfd6ed8e0a387172b8bd7c893436/libs/platform-tools/platform-tools_windows/systrace/catapult/devil/devil/android/apk_helper.py#L251-L272
ultrabug/py3status
80ec45a9db0712b0de55f83291c321f6fceb6a6d
py3status/py3.py
python
Py3.get_color_names_list
(self, format_string, matches=None)
return list(found)
Returns a list of color names in ``format_string``. :param format_string: Accepts a format string. :param matches: Filter results with a string or a list of strings. If ``matches`` is provided then it is used to filter the result using fnmatch so the following patterns can be used: .. code-block:: none * matches everything ? matches any single character [seq] matches any character in seq [!seq] matches any character not in seq
Returns a list of color names in ``format_string``.
[ "Returns", "a", "list", "of", "color", "names", "in", "format_string", "." ]
def get_color_names_list(self, format_string, matches=None): """ Returns a list of color names in ``format_string``. :param format_string: Accepts a format string. :param matches: Filter results with a string or a list of strings. If ``matches`` is provided then it is used to filter the result using fnmatch so the following patterns can be used: .. code-block:: none * matches everything ? matches any single character [seq] matches any character in seq [!seq] matches any character not in seq """ if not getattr(self._py3status_module, "thresholds", None): return [] elif not format_string: return [] if format_string not in self._format_color_names: names = self._formatter.get_color_names(format_string) self._format_color_names[format_string] = names else: names = self._format_color_names[format_string] if not matches: return list(names) elif isinstance(matches, str): matches = [matches] # filter matches found = set() for match in matches: for name in names: if fnmatch(name, match): found.add(name) return list(found)
[ "def", "get_color_names_list", "(", "self", ",", "format_string", ",", "matches", "=", "None", ")", ":", "if", "not", "getattr", "(", "self", ".", "_py3status_module", ",", "\"thresholds\"", ",", "None", ")", ":", "return", "[", "]", "elif", "not", "format_string", ":", "return", "[", "]", "if", "format_string", "not", "in", "self", ".", "_format_color_names", ":", "names", "=", "self", ".", "_formatter", ".", "get_color_names", "(", "format_string", ")", "self", ".", "_format_color_names", "[", "format_string", "]", "=", "names", "else", ":", "names", "=", "self", ".", "_format_color_names", "[", "format_string", "]", "if", "not", "matches", ":", "return", "list", "(", "names", ")", "elif", "isinstance", "(", "matches", ",", "str", ")", ":", "matches", "=", "[", "matches", "]", "# filter matches", "found", "=", "set", "(", ")", "for", "match", "in", "matches", ":", "for", "name", "in", "names", ":", "if", "fnmatch", "(", "name", ",", "match", ")", ":", "found", ".", "add", "(", "name", ")", "return", "list", "(", "found", ")" ]
https://github.com/ultrabug/py3status/blob/80ec45a9db0712b0de55f83291c321f6fceb6a6d/py3status/py3.py#L685-L723
inpanel/inpanel
be53d86a72e30dd5476780ed5ba334315a23004b
lib/tornado/web.py
python
RequestHandler._execute
(self, transforms, *args, **kwargs)
Executes this request with the given output transforms.
Executes this request with the given output transforms.
[ "Executes", "this", "request", "with", "the", "given", "output", "transforms", "." ]
def _execute(self, transforms, *args, **kwargs): """Executes this request with the given output transforms.""" self._transforms = transforms try: if self.request.method not in self.SUPPORTED_METHODS: raise HTTPError(405) # If XSRF cookies are turned on, reject form submissions without # the proper cookie if self.request.method not in ("GET", "HEAD", "OPTIONS") and \ self.application.settings.get("xsrf_cookies"): self.check_xsrf_cookie() self.prepare() if not self._finished: args = [self.decode_argument(arg) for arg in args] kwargs = dict((k, self.decode_argument(v, name=k)) for (k, v) in kwargs.iteritems()) getattr(self, self.request.method.lower())(*args, **kwargs) if self._auto_finish and not self._finished: self.finish() except Exception as e: self._handle_request_exception(e)
[ "def", "_execute", "(", "self", ",", "transforms", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_transforms", "=", "transforms", "try", ":", "if", "self", ".", "request", ".", "method", "not", "in", "self", ".", "SUPPORTED_METHODS", ":", "raise", "HTTPError", "(", "405", ")", "# If XSRF cookies are turned on, reject form submissions without", "# the proper cookie", "if", "self", ".", "request", ".", "method", "not", "in", "(", "\"GET\"", ",", "\"HEAD\"", ",", "\"OPTIONS\"", ")", "and", "self", ".", "application", ".", "settings", ".", "get", "(", "\"xsrf_cookies\"", ")", ":", "self", ".", "check_xsrf_cookie", "(", ")", "self", ".", "prepare", "(", ")", "if", "not", "self", ".", "_finished", ":", "args", "=", "[", "self", ".", "decode_argument", "(", "arg", ")", "for", "arg", "in", "args", "]", "kwargs", "=", "dict", "(", "(", "k", ",", "self", ".", "decode_argument", "(", "v", ",", "name", "=", "k", ")", ")", "for", "(", "k", ",", "v", ")", "in", "kwargs", ".", "iteritems", "(", ")", ")", "getattr", "(", "self", ",", "self", ".", "request", ".", "method", ".", "lower", "(", ")", ")", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "self", ".", "_auto_finish", "and", "not", "self", ".", "_finished", ":", "self", ".", "finish", "(", ")", "except", "Exception", "as", "e", ":", "self", ".", "_handle_request_exception", "(", "e", ")" ]
https://github.com/inpanel/inpanel/blob/be53d86a72e30dd5476780ed5ba334315a23004b/lib/tornado/web.py#L1005-L1025
rlpy/rlpy
af25d2011fff1d61cb7c5cc8992549808f0c6103
rlpy/Domains/HelicopterHover.py
python
HelicopterHoverExtended._in_body_coord
(self, p, q)
return q_p[1:]
q is the inverse quaternion of the rotation of the helicopter in world coordinates
q is the inverse quaternion of the rotation of the helicopter in world coordinates
[ "q", "is", "the", "inverse", "quaternion", "of", "the", "rotation", "of", "the", "helicopter", "in", "world", "coordinates" ]
def _in_body_coord(self, p, q): """ q is the inverse quaternion of the rotation of the helicopter in world coordinates """ q_pos = np.zeros((4)) q_pos[1:] = p q_p = trans.quaternion_multiply(trans.quaternion_multiply(q, q_pos), trans.quaternion_conjugate(q)) return q_p[1:]
[ "def", "_in_body_coord", "(", "self", ",", "p", ",", "q", ")", ":", "q_pos", "=", "np", ".", "zeros", "(", "(", "4", ")", ")", "q_pos", "[", "1", ":", "]", "=", "p", "q_p", "=", "trans", ".", "quaternion_multiply", "(", "trans", ".", "quaternion_multiply", "(", "q", ",", "q_pos", ")", ",", "trans", ".", "quaternion_conjugate", "(", "q", ")", ")", "return", "q_p", "[", "1", ":", "]" ]
https://github.com/rlpy/rlpy/blob/af25d2011fff1d61cb7c5cc8992549808f0c6103/rlpy/Domains/HelicopterHover.py#L226-L234
Mindwerks/worldengine
64dff8eb7824ce46b5b6cb8006bcef21822ef144
worldengine/basic_map_operations.py
python
distance
(pa, pb)
return math.sqrt((ax - bx) ** 2 + (ay - by) ** 2)
[]
def distance(pa, pb): ax, ay = pa bx, by = pb return math.sqrt((ax - bx) ** 2 + (ay - by) ** 2)
[ "def", "distance", "(", "pa", ",", "pb", ")", ":", "ax", ",", "ay", "=", "pa", "bx", ",", "by", "=", "pb", "return", "math", ".", "sqrt", "(", "(", "ax", "-", "bx", ")", "**", "2", "+", "(", "ay", "-", "by", ")", "**", "2", ")" ]
https://github.com/Mindwerks/worldengine/blob/64dff8eb7824ce46b5b6cb8006bcef21822ef144/worldengine/basic_map_operations.py#L4-L7
googleads/google-ads-python
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
google/ads/googleads/v9/services/services/google_ads_service/client.py
python
GoogleAdsServiceClient.parse_campaign_criterion_path
(path: str)
return m.groupdict() if m else {}
Parse a campaign_criterion path into its component segments.
Parse a campaign_criterion path into its component segments.
[ "Parse", "a", "campaign_criterion", "path", "into", "its", "component", "segments", "." ]
def parse_campaign_criterion_path(path: str) -> Dict[str, str]: """Parse a campaign_criterion path into its component segments.""" m = re.match( r"^customers/(?P<customer_id>.+?)/campaignCriteria/(?P<campaign_id>.+?)~(?P<criterion_id>.+?)$", path, ) return m.groupdict() if m else {}
[ "def", "parse_campaign_criterion_path", "(", "path", ":", "str", ")", "->", "Dict", "[", "str", ",", "str", "]", ":", "m", "=", "re", ".", "match", "(", "r\"^customers/(?P<customer_id>.+?)/campaignCriteria/(?P<campaign_id>.+?)~(?P<criterion_id>.+?)$\"", ",", "path", ",", ")", "return", "m", ".", "groupdict", "(", ")", "if", "m", "else", "{", "}" ]
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v9/services/services/google_ads_service/client.py#L1087-L1093
maas/maas
db2f89970c640758a51247c59bf1ec6f60cf4ab5
src/maasserver/models/signals/staticipaddress.py
python
post_delete_remake_sip_for_bmc
(sender, instance, **kwargs)
Now that the StaticIPAddress instance is gone, ask each BMC that was using it to make a new one. When a StaticIPAddress is deleted, any BMC models sharing it will automatically set their ip_address links to None. They are then recreated here in post_delete.
Now that the StaticIPAddress instance is gone, ask each BMC that was using it to make a new one.
[ "Now", "that", "the", "StaticIPAddress", "instance", "is", "gone", "ask", "each", "BMC", "that", "was", "using", "it", "to", "make", "a", "new", "one", "." ]
def post_delete_remake_sip_for_bmc(sender, instance, **kwargs): """Now that the StaticIPAddress instance is gone, ask each BMC that was using it to make a new one. When a StaticIPAddress is deleted, any BMC models sharing it will automatically set their ip_address links to None. They are then recreated here in post_delete. """ for bmc in instance.__previous_bmcs: # This BMC model instance was created in pre_delete and hasn't been # updated to reflect the just executed deletion. Set the ip_address to # None to replicate this. We can avoid the DB hit as we always want a # new StaticIPAddress instance to be created by save(). bmc.ip_address = None # BMC.save() will extract and create a new IP from power_parameters. bmc.save()
[ "def", "post_delete_remake_sip_for_bmc", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "for", "bmc", "in", "instance", ".", "__previous_bmcs", ":", "# This BMC model instance was created in pre_delete and hasn't been", "# updated to reflect the just executed deletion. Set the ip_address to", "# None to replicate this. We can avoid the DB hit as we always want a", "# new StaticIPAddress instance to be created by save().", "bmc", ".", "ip_address", "=", "None", "# BMC.save() will extract and create a new IP from power_parameters.", "bmc", ".", "save", "(", ")" ]
https://github.com/maas/maas/blob/db2f89970c640758a51247c59bf1ec6f60cf4ab5/src/maasserver/models/signals/staticipaddress.py#L44-L59
CPJKU/madmom
3bc8334099feb310acfce884ebdb76a28e01670d
madmom/evaluation/chords.py
python
reduce_to_triads
(chords, keep_bass=False)
return reduced_chords
Reduce chords to triads. The function follows the reduction rules implemented in [1]_. If a chord chord does not contain a third, major second or fourth, it is reduced to a power chord. If it does not contain neither a third nor a fifth, it is reduced to a single note "chord". Parameters ---------- chords : numpy structured array Chords to be reduced. keep_bass : bool Indicates whether to keep the bass note or set it to 0. Returns ------- reduced_chords : numpy structured array Chords reduced to triads. References ---------- .. [1] Johan Pauwels and Geoffroy Peeters. "Evaluating Automatically Estimated Chord Sequences." In Proceedings of ICASSP 2013, Vancouver, Canada, 2013.
Reduce chords to triads.
[ "Reduce", "chords", "to", "triads", "." ]
def reduce_to_triads(chords, keep_bass=False): """ Reduce chords to triads. The function follows the reduction rules implemented in [1]_. If a chord chord does not contain a third, major second or fourth, it is reduced to a power chord. If it does not contain neither a third nor a fifth, it is reduced to a single note "chord". Parameters ---------- chords : numpy structured array Chords to be reduced. keep_bass : bool Indicates whether to keep the bass note or set it to 0. Returns ------- reduced_chords : numpy structured array Chords reduced to triads. References ---------- .. [1] Johan Pauwels and Geoffroy Peeters. "Evaluating Automatically Estimated Chord Sequences." In Proceedings of ICASSP 2013, Vancouver, Canada, 2013. """ unison = chords['intervals'][:, 0].astype(bool) maj_sec = chords['intervals'][:, 2].astype(bool) min_third = chords['intervals'][:, 3].astype(bool) maj_third = chords['intervals'][:, 4].astype(bool) perf_fourth = chords['intervals'][:, 5].astype(bool) dim_fifth = chords['intervals'][:, 6].astype(bool) perf_fifth = chords['intervals'][:, 7].astype(bool) aug_fifth = chords['intervals'][:, 8].astype(bool) no_chord = (chords['intervals'] == NO_CHORD[-1]).all(axis=1) reduced_chords = chords.copy() ivs = reduced_chords['intervals'] ivs[~no_chord] = interval_list('(1)') ivs[unison & perf_fifth] = interval_list('(1,5)') ivs[~perf_fourth & maj_sec] = _shorthands['sus2'] ivs[perf_fourth & ~maj_sec] = _shorthands['sus4'] ivs[min_third] = _shorthands['min'] ivs[min_third & aug_fifth & ~perf_fifth] = interval_list('(1,b3,#5)') ivs[min_third & dim_fifth & ~perf_fifth] = _shorthands['dim'] ivs[maj_third] = _shorthands['maj'] ivs[maj_third & dim_fifth & ~perf_fifth] = interval_list('(1,3,b5)') ivs[maj_third & aug_fifth & ~perf_fifth] = _shorthands['aug'] if not keep_bass: reduced_chords['bass'] = 0 else: # remove bass notes if they are not part of the intervals anymore reduced_chords['bass'] *= ivs[range(len(reduced_chords)), reduced_chords['bass']] # keep -1 in bass for no chords reduced_chords['bass'][no_chord] = -1 return reduced_chords
[ "def", "reduce_to_triads", "(", "chords", ",", "keep_bass", "=", "False", ")", ":", "unison", "=", "chords", "[", "'intervals'", "]", "[", ":", ",", "0", "]", ".", "astype", "(", "bool", ")", "maj_sec", "=", "chords", "[", "'intervals'", "]", "[", ":", ",", "2", "]", ".", "astype", "(", "bool", ")", "min_third", "=", "chords", "[", "'intervals'", "]", "[", ":", ",", "3", "]", ".", "astype", "(", "bool", ")", "maj_third", "=", "chords", "[", "'intervals'", "]", "[", ":", ",", "4", "]", ".", "astype", "(", "bool", ")", "perf_fourth", "=", "chords", "[", "'intervals'", "]", "[", ":", ",", "5", "]", ".", "astype", "(", "bool", ")", "dim_fifth", "=", "chords", "[", "'intervals'", "]", "[", ":", ",", "6", "]", ".", "astype", "(", "bool", ")", "perf_fifth", "=", "chords", "[", "'intervals'", "]", "[", ":", ",", "7", "]", ".", "astype", "(", "bool", ")", "aug_fifth", "=", "chords", "[", "'intervals'", "]", "[", ":", ",", "8", "]", ".", "astype", "(", "bool", ")", "no_chord", "=", "(", "chords", "[", "'intervals'", "]", "==", "NO_CHORD", "[", "-", "1", "]", ")", ".", "all", "(", "axis", "=", "1", ")", "reduced_chords", "=", "chords", ".", "copy", "(", ")", "ivs", "=", "reduced_chords", "[", "'intervals'", "]", "ivs", "[", "~", "no_chord", "]", "=", "interval_list", "(", "'(1)'", ")", "ivs", "[", "unison", "&", "perf_fifth", "]", "=", "interval_list", "(", "'(1,5)'", ")", "ivs", "[", "~", "perf_fourth", "&", "maj_sec", "]", "=", "_shorthands", "[", "'sus2'", "]", "ivs", "[", "perf_fourth", "&", "~", "maj_sec", "]", "=", "_shorthands", "[", "'sus4'", "]", "ivs", "[", "min_third", "]", "=", "_shorthands", "[", "'min'", "]", "ivs", "[", "min_third", "&", "aug_fifth", "&", "~", "perf_fifth", "]", "=", "interval_list", "(", "'(1,b3,#5)'", ")", "ivs", "[", "min_third", "&", "dim_fifth", "&", "~", "perf_fifth", "]", "=", "_shorthands", "[", "'dim'", "]", "ivs", "[", "maj_third", "]", "=", "_shorthands", "[", "'maj'", "]", "ivs", "[", "maj_third", "&", "dim_fifth", "&", "~", "perf_fifth", "]", "=", "interval_list", "(", "'(1,3,b5)'", ")", "ivs", "[", "maj_third", "&", "aug_fifth", "&", "~", "perf_fifth", "]", "=", "_shorthands", "[", "'aug'", "]", "if", "not", "keep_bass", ":", "reduced_chords", "[", "'bass'", "]", "=", "0", "else", ":", "# remove bass notes if they are not part of the intervals anymore", "reduced_chords", "[", "'bass'", "]", "*=", "ivs", "[", "range", "(", "len", "(", "reduced_chords", ")", ")", ",", "reduced_chords", "[", "'bass'", "]", "]", "# keep -1 in bass for no chords", "reduced_chords", "[", "'bass'", "]", "[", "no_chord", "]", "=", "-", "1", "return", "reduced_chords" ]
https://github.com/CPJKU/madmom/blob/3bc8334099feb310acfce884ebdb76a28e01670d/madmom/evaluation/chords.py#L428-L491
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/robotparser.py
python
RobotFileParser.can_fetch
(self, useragent, url)
return True
using the parsed robots.txt decide if useragent can fetch url
using the parsed robots.txt decide if useragent can fetch url
[ "using", "the", "parsed", "robots", ".", "txt", "decide", "if", "useragent", "can", "fetch", "url" ]
def can_fetch(self, useragent, url): """using the parsed robots.txt decide if useragent can fetch url""" if self.disallow_all: return False if self.allow_all: return True # Until the robots.txt file has been read or found not # to exist, we must assume that no url is allowable. # This prevents false positives when a user erroneously # calls can_fetch() before calling read(). if not self.last_checked: return False # search for given user agent matches # the first match counts parsed_url = urlparse.urlparse(urllib.unquote(url)) url = urlparse.urlunparse(('', '', parsed_url.path, parsed_url.params, parsed_url.query, parsed_url.fragment)) url = urllib.quote(url) if not url: url = "/" for entry in self.entries: if entry.applies_to(useragent): return entry.allowance(url) # try the default entry last if self.default_entry: return self.default_entry.allowance(url) # agent not found ==> access granted return True
[ "def", "can_fetch", "(", "self", ",", "useragent", ",", "url", ")", ":", "if", "self", ".", "disallow_all", ":", "return", "False", "if", "self", ".", "allow_all", ":", "return", "True", "# Until the robots.txt file has been read or found not", "# to exist, we must assume that no url is allowable.", "# This prevents false positives when a user erroneously", "# calls can_fetch() before calling read().", "if", "not", "self", ".", "last_checked", ":", "return", "False", "# search for given user agent matches", "# the first match counts", "parsed_url", "=", "urlparse", ".", "urlparse", "(", "urllib", ".", "unquote", "(", "url", ")", ")", "url", "=", "urlparse", ".", "urlunparse", "(", "(", "''", ",", "''", ",", "parsed_url", ".", "path", ",", "parsed_url", ".", "params", ",", "parsed_url", ".", "query", ",", "parsed_url", ".", "fragment", ")", ")", "url", "=", "urllib", ".", "quote", "(", "url", ")", "if", "not", "url", ":", "url", "=", "\"/\"", "for", "entry", "in", "self", ".", "entries", ":", "if", "entry", ".", "applies_to", "(", "useragent", ")", ":", "return", "entry", ".", "allowance", "(", "url", ")", "# try the default entry last", "if", "self", ".", "default_entry", ":", "return", "self", ".", "default_entry", ".", "allowance", "(", "url", ")", "# agent not found ==> access granted", "return", "True" ]
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/robotparser.py#L130-L159
1012598167/flask_mongodb_game
60c7e0351586656ec38f851592886338e50b4110
python_flask/venv/Lib/site-packages/jinja2/utils.py
python
LRUCache.clear
(self)
Clear the cache.
Clear the cache.
[ "Clear", "the", "cache", "." ]
def clear(self): """Clear the cache.""" self._wlock.acquire() try: self._mapping.clear() self._queue.clear() finally: self._wlock.release()
[ "def", "clear", "(", "self", ")", ":", "self", ".", "_wlock", ".", "acquire", "(", ")", "try", ":", "self", ".", "_mapping", ".", "clear", "(", ")", "self", ".", "_queue", ".", "clear", "(", ")", "finally", ":", "self", ".", "_wlock", ".", "release", "(", ")" ]
https://github.com/1012598167/flask_mongodb_game/blob/60c7e0351586656ec38f851592886338e50b4110/python_flask/venv/Lib/site-packages/jinja2/utils.py#L369-L376
apple/coremltools
141a83af482fcbdd5179807c9eaff9a7999c2c49
deps/protobuf/python/google/protobuf/internal/python_message.py
python
_ExtensionDict.__getitem__
(self, extension_handle)
return result
Returns the current value of the given extension handle.
Returns the current value of the given extension handle.
[ "Returns", "the", "current", "value", "of", "the", "given", "extension", "handle", "." ]
def __getitem__(self, extension_handle): """Returns the current value of the given extension handle.""" _VerifyExtensionHandle(self._extended_message, extension_handle) result = self._extended_message._fields.get(extension_handle) if result is not None: return result if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: result = extension_handle._default_constructor(self._extended_message) elif extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: result = extension_handle.message_type._concrete_class() try: result._SetListener(self._extended_message._listener_for_children) except ReferenceError: pass else: # Singular scalar -- just return the default without inserting into the # dict. return extension_handle.default_value # Atomically check if another thread has preempted us and, if not, swap # in the new object we just created. If someone has preempted us, we # take that object and discard ours. # WARNING: We are relying on setdefault() being atomic. This is true # in CPython but we haven't investigated others. This warning appears # in several other locations in this file. result = self._extended_message._fields.setdefault( extension_handle, result) return result
[ "def", "__getitem__", "(", "self", ",", "extension_handle", ")", ":", "_VerifyExtensionHandle", "(", "self", ".", "_extended_message", ",", "extension_handle", ")", "result", "=", "self", ".", "_extended_message", ".", "_fields", ".", "get", "(", "extension_handle", ")", "if", "result", "is", "not", "None", ":", "return", "result", "if", "extension_handle", ".", "label", "==", "_FieldDescriptor", ".", "LABEL_REPEATED", ":", "result", "=", "extension_handle", ".", "_default_constructor", "(", "self", ".", "_extended_message", ")", "elif", "extension_handle", ".", "cpp_type", "==", "_FieldDescriptor", ".", "CPPTYPE_MESSAGE", ":", "result", "=", "extension_handle", ".", "message_type", ".", "_concrete_class", "(", ")", "try", ":", "result", ".", "_SetListener", "(", "self", ".", "_extended_message", ".", "_listener_for_children", ")", "except", "ReferenceError", ":", "pass", "else", ":", "# Singular scalar -- just return the default without inserting into the", "# dict.", "return", "extension_handle", ".", "default_value", "# Atomically check if another thread has preempted us and, if not, swap", "# in the new object we just created. If someone has preempted us, we", "# take that object and discard ours.", "# WARNING: We are relying on setdefault() being atomic. This is true", "# in CPython but we haven't investigated others. This warning appears", "# in several other locations in this file.", "result", "=", "self", ".", "_extended_message", ".", "_fields", ".", "setdefault", "(", "extension_handle", ",", "result", ")", "return", "result" ]
https://github.com/apple/coremltools/blob/141a83af482fcbdd5179807c9eaff9a7999c2c49/deps/protobuf/python/google/protobuf/internal/python_message.py#L1441-L1472
vcheckzen/FODI
3bb23644938a33c3fdfb9611a622e35ed4ce6532
back-end-py/main/3rd/Crypto/Cipher/_mode_ccm.py
python
CcmMode.encrypt
(self, plaintext, output=None)
return self._cipher.encrypt(plaintext, output=output)
Encrypt data with the key set at initialization. A cipher object is stateful: once you have encrypted a message you cannot encrypt (or decrypt) another message using the same object. This method can be called only **once** if ``msg_len`` was not passed at initialization. If ``msg_len`` was given, the data to encrypt can be broken up in two or more pieces and `encrypt` can be called multiple times. That is, the statement: >>> c.encrypt(a) + c.encrypt(b) is equivalent to: >>> c.encrypt(a+b) This function does not add any padding to the plaintext. :Parameters: plaintext : bytes/bytearray/memoryview The piece of data to encrypt. It can be of any length. :Keywords: output : bytearray/memoryview The location where the ciphertext must be written to. If ``None``, the ciphertext is returned. :Return: If ``output`` is ``None``, the ciphertext as ``bytes``. Otherwise, ``None``.
Encrypt data with the key set at initialization.
[ "Encrypt", "data", "with", "the", "key", "set", "at", "initialization", "." ]
def encrypt(self, plaintext, output=None): """Encrypt data with the key set at initialization. A cipher object is stateful: once you have encrypted a message you cannot encrypt (or decrypt) another message using the same object. This method can be called only **once** if ``msg_len`` was not passed at initialization. If ``msg_len`` was given, the data to encrypt can be broken up in two or more pieces and `encrypt` can be called multiple times. That is, the statement: >>> c.encrypt(a) + c.encrypt(b) is equivalent to: >>> c.encrypt(a+b) This function does not add any padding to the plaintext. :Parameters: plaintext : bytes/bytearray/memoryview The piece of data to encrypt. It can be of any length. :Keywords: output : bytearray/memoryview The location where the ciphertext must be written to. If ``None``, the ciphertext is returned. :Return: If ``output`` is ``None``, the ciphertext as ``bytes``. Otherwise, ``None``. """ if self.encrypt not in self._next: raise TypeError("encrypt() can only be called after" " initialization or an update()") self._next = [self.encrypt, self.digest] # No more associated data allowed from now if self._assoc_len is None: assert(isinstance(self._cache, list)) self._assoc_len = sum([len(x) for x in self._cache]) if self._msg_len is not None: self._start_mac() else: if self._cumul_assoc_len < self._assoc_len: raise ValueError("Associated data is too short") # Only once piece of plaintext accepted if message length was # not declared in advance if self._msg_len is None: self._msg_len = len(plaintext) self._start_mac() self._next = [self.digest] self._cumul_msg_len += len(plaintext) if self._cumul_msg_len > self._msg_len: raise ValueError("Message is too long") if self._mac_status == MacStatus.PROCESSING_AUTH_DATA: # Associated data is concatenated with the least number # of zero bytes (possibly none) to reach alignment to # the 16 byte boundary (A.2.3) self._pad_cache_and_update() self._mac_status = MacStatus.PROCESSING_PLAINTEXT self._update(plaintext) return self._cipher.encrypt(plaintext, output=output)
[ "def", "encrypt", "(", "self", ",", "plaintext", ",", "output", "=", "None", ")", ":", "if", "self", ".", "encrypt", "not", "in", "self", ".", "_next", ":", "raise", "TypeError", "(", "\"encrypt() can only be called after\"", "\" initialization or an update()\"", ")", "self", ".", "_next", "=", "[", "self", ".", "encrypt", ",", "self", ".", "digest", "]", "# No more associated data allowed from now", "if", "self", ".", "_assoc_len", "is", "None", ":", "assert", "(", "isinstance", "(", "self", ".", "_cache", ",", "list", ")", ")", "self", ".", "_assoc_len", "=", "sum", "(", "[", "len", "(", "x", ")", "for", "x", "in", "self", ".", "_cache", "]", ")", "if", "self", ".", "_msg_len", "is", "not", "None", ":", "self", ".", "_start_mac", "(", ")", "else", ":", "if", "self", ".", "_cumul_assoc_len", "<", "self", ".", "_assoc_len", ":", "raise", "ValueError", "(", "\"Associated data is too short\"", ")", "# Only once piece of plaintext accepted if message length was", "# not declared in advance", "if", "self", ".", "_msg_len", "is", "None", ":", "self", ".", "_msg_len", "=", "len", "(", "plaintext", ")", "self", ".", "_start_mac", "(", ")", "self", ".", "_next", "=", "[", "self", ".", "digest", "]", "self", ".", "_cumul_msg_len", "+=", "len", "(", "plaintext", ")", "if", "self", ".", "_cumul_msg_len", ">", "self", ".", "_msg_len", ":", "raise", "ValueError", "(", "\"Message is too long\"", ")", "if", "self", ".", "_mac_status", "==", "MacStatus", ".", "PROCESSING_AUTH_DATA", ":", "# Associated data is concatenated with the least number", "# of zero bytes (possibly none) to reach alignment to", "# the 16 byte boundary (A.2.3)", "self", ".", "_pad_cache_and_update", "(", ")", "self", ".", "_mac_status", "=", "MacStatus", ".", "PROCESSING_PLAINTEXT", "self", ".", "_update", "(", "plaintext", ")", "return", "self", ".", "_cipher", ".", "encrypt", "(", "plaintext", ",", "output", "=", "output", ")" ]
https://github.com/vcheckzen/FODI/blob/3bb23644938a33c3fdfb9611a622e35ed4ce6532/back-end-py/main/3rd/Crypto/Cipher/_mode_ccm.py#L302-L373
brightmart/multi-label_classification
b5febe17eaf9d937d71cabab56c5da48ee68f7b5
bert/modeling.py
python
BertModel.get_sequence_output
(self)
return self.sequence_output
Gets final hidden layer of encoder. Returns: float Tensor of shape [batch_size, seq_length, hidden_size] corresponding to the final hidden of the transformer encoder.
Gets final hidden layer of encoder.
[ "Gets", "final", "hidden", "layer", "of", "encoder", "." ]
def get_sequence_output(self): """Gets final hidden layer of encoder. Returns: float Tensor of shape [batch_size, seq_length, hidden_size] corresponding to the final hidden of the transformer encoder. """ return self.sequence_output
[ "def", "get_sequence_output", "(", "self", ")", ":", "return", "self", ".", "sequence_output" ]
https://github.com/brightmart/multi-label_classification/blob/b5febe17eaf9d937d71cabab56c5da48ee68f7b5/bert/modeling.py#L237-L244
City-Bureau/city-scrapers
b295d0aa612e3979a9fccab7c5f55ecea9ed074c
city_scrapers/spiders/il_governors_state_university.py
python
IlGovernorsStateUniversitySpider._parse_links
(self, item, response)
return links
Parse or generate links.
Parse or generate links.
[ "Parse", "or", "generate", "links", "." ]
def _parse_links(self, item, response): """Parse or generate links.""" links = [] # the links to the agenda, if present, are in the third and fourth columns for col in [2, 3]: for link_parent in item[col].xpath("a"): link_ext = link_parent.css("::attr(href)").get() if link_ext is not None: link = response.urljoin(link_ext) title = link_parent.xpath("text()").get() links.append({"href": link, "title": title}) return links
[ "def", "_parse_links", "(", "self", ",", "item", ",", "response", ")", ":", "links", "=", "[", "]", "# the links to the agenda, if present, are in the third and fourth columns", "for", "col", "in", "[", "2", ",", "3", "]", ":", "for", "link_parent", "in", "item", "[", "col", "]", ".", "xpath", "(", "\"a\"", ")", ":", "link_ext", "=", "link_parent", ".", "css", "(", "\"::attr(href)\"", ")", ".", "get", "(", ")", "if", "link_ext", "is", "not", "None", ":", "link", "=", "response", ".", "urljoin", "(", "link_ext", ")", "title", "=", "link_parent", ".", "xpath", "(", "\"text()\"", ")", ".", "get", "(", ")", "links", ".", "append", "(", "{", "\"href\"", ":", "link", ",", "\"title\"", ":", "title", "}", ")", "return", "links" ]
https://github.com/City-Bureau/city-scrapers/blob/b295d0aa612e3979a9fccab7c5f55ecea9ed074c/city_scrapers/spiders/il_governors_state_university.py#L227-L238
geex-arts/django-jet
06ab6436d8add9aafcf771df40358409564e6bcb
jet/dashboard/dashboard.py
python
DashboardUrls.get_urls
(self)
return self._urls
[]
def get_urls(self): return self._urls
[ "def", "get_urls", "(", "self", ")", ":", "return", "self", ".", "_urls" ]
https://github.com/geex-arts/django-jet/blob/06ab6436d8add9aafcf771df40358409564e6bcb/jet/dashboard/dashboard.py#L308-L309
kerlomz/captcha_platform
f7d719bd1239a987996e266bd7fe35c96003b378
sdk/onnx/sdk.py
python
ModelConfig.category_extract
(param)
[]
def category_extract(param): if isinstance(param, list): return param if isinstance(param, str): if param in SIMPLE_CATEGORY_MODEL.keys(): return SIMPLE_CATEGORY_MODEL.get(param) raise ValueError( "Category set configuration error, customized category set should be list type" )
[ "def", "category_extract", "(", "param", ")", ":", "if", "isinstance", "(", "param", ",", "list", ")", ":", "return", "param", "if", "isinstance", "(", "param", ",", "str", ")", ":", "if", "param", "in", "SIMPLE_CATEGORY_MODEL", ".", "keys", "(", ")", ":", "return", "SIMPLE_CATEGORY_MODEL", ".", "get", "(", "param", ")", "raise", "ValueError", "(", "\"Category set configuration error, customized category set should be list type\"", ")" ]
https://github.com/kerlomz/captcha_platform/blob/f7d719bd1239a987996e266bd7fe35c96003b378/sdk/onnx/sdk.py#L249-L257
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/pyparsing-2.3.1-py3.7.egg/pyparsing.py
python
ParseResults.copy
( self )
return ret
Returns a new copy of a :class:`ParseResults` object.
Returns a new copy of a :class:`ParseResults` object.
[ "Returns", "a", "new", "copy", "of", "a", ":", "class", ":", "ParseResults", "object", "." ]
def copy( self ): """ Returns a new copy of a :class:`ParseResults` object. """ ret = ParseResults( self.__toklist ) ret.__tokdict = dict(self.__tokdict.items()) ret.__parent = self.__parent ret.__accumNames.update( self.__accumNames ) ret.__name = self.__name return ret
[ "def", "copy", "(", "self", ")", ":", "ret", "=", "ParseResults", "(", "self", ".", "__toklist", ")", "ret", ".", "__tokdict", "=", "dict", "(", "self", ".", "__tokdict", ".", "items", "(", ")", ")", "ret", ".", "__parent", "=", "self", ".", "__parent", "ret", ".", "__accumNames", ".", "update", "(", "self", ".", "__accumNames", ")", "ret", ".", "__name", "=", "self", ".", "__name", "return", "ret" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/pyparsing-2.3.1-py3.7.egg/pyparsing.py#L875-L884
sqlalchemy/sqlalchemy
eb716884a4abcabae84a6aaba105568e925b7d27
lib/sqlalchemy/dialects/mysql/types.py
python
TINYINT.__init__
(self, display_width=None, **kw)
Construct a TINYINT. :param display_width: Optional, maximum display width for this number. :param unsigned: a boolean, optional. :param zerofill: Optional. If true, values will be stored as strings left-padded with zeros. Note that this does not effect the values returned by the underlying database API, which continue to be numeric.
Construct a TINYINT.
[ "Construct", "a", "TINYINT", "." ]
def __init__(self, display_width=None, **kw): """Construct a TINYINT. :param display_width: Optional, maximum display width for this number. :param unsigned: a boolean, optional. :param zerofill: Optional. If true, values will be stored as strings left-padded with zeros. Note that this does not effect the values returned by the underlying database API, which continue to be numeric. """ super(TINYINT, self).__init__(display_width=display_width, **kw)
[ "def", "__init__", "(", "self", ",", "display_width", "=", "None", ",", "*", "*", "kw", ")", ":", "super", "(", "TINYINT", ",", "self", ")", ".", "__init__", "(", "display_width", "=", "display_width", ",", "*", "*", "kw", ")" ]
https://github.com/sqlalchemy/sqlalchemy/blob/eb716884a4abcabae84a6aaba105568e925b7d27/lib/sqlalchemy/dialects/mysql/types.py#L320-L333
moonnejs/uiKLine
08646956bd1d729c88d5d2617bf0599eb3efb3d1
uiKLine.py
python
KLineWidget.updateSig
(self,sig)
刷新买卖信号
刷新买卖信号
[ "刷新买卖信号" ]
def updateSig(self,sig): """刷新买卖信号""" self.listSig = sig self.plotMark()
[ "def", "updateSig", "(", "self", ",", "sig", ")", ":", "self", ".", "listSig", "=", "sig", "self", ".", "plotMark", "(", ")" ]
https://github.com/moonnejs/uiKLine/blob/08646956bd1d729c88d5d2617bf0599eb3efb3d1/uiKLine.py#L728-L731
google/coursebuilder-core
08f809db3226d9269e30d5edd0edd33bd22041f4
coursebuilder/modules/data_pump/data_pump.py
python
DataPumpJob._maybe_create_course_dataset
(self, service, bigquery_settings)
Create dataset within BigQuery if it's not already there.
Create dataset within BigQuery if it's not already there.
[ "Create", "dataset", "within", "BigQuery", "if", "it", "s", "not", "already", "there", "." ]
def _maybe_create_course_dataset(self, service, bigquery_settings): """Create dataset within BigQuery if it's not already there.""" datasets = service.datasets() try: datasets.get(projectId=bigquery_settings.project_id, datasetId=bigquery_settings.dataset_id).execute() except apiclient.errors.HttpError, ex: if ex.resp.status != 404: raise datasets.insert(projectId=bigquery_settings.project_id, body={ 'datasetReference': { 'projectId': bigquery_settings.project_id, 'datasetId': bigquery_settings.dataset_id }}).execute()
[ "def", "_maybe_create_course_dataset", "(", "self", ",", "service", ",", "bigquery_settings", ")", ":", "datasets", "=", "service", ".", "datasets", "(", ")", "try", ":", "datasets", ".", "get", "(", "projectId", "=", "bigquery_settings", ".", "project_id", ",", "datasetId", "=", "bigquery_settings", ".", "dataset_id", ")", ".", "execute", "(", ")", "except", "apiclient", ".", "errors", ".", "HttpError", ",", "ex", ":", "if", "ex", ".", "resp", ".", "status", "!=", "404", ":", "raise", "datasets", ".", "insert", "(", "projectId", "=", "bigquery_settings", ".", "project_id", ",", "body", "=", "{", "'datasetReference'", ":", "{", "'projectId'", ":", "bigquery_settings", ".", "project_id", ",", "'datasetId'", ":", "bigquery_settings", ".", "dataset_id", "}", "}", ")", ".", "execute", "(", ")" ]
https://github.com/google/coursebuilder-core/blob/08f809db3226d9269e30d5edd0edd33bd22041f4/coursebuilder/modules/data_pump/data_pump.py#L445-L459
omz/PythonistaAppTemplate
f560f93f8876d82a21d108977f90583df08d55af
PythonistaAppTemplate/PythonistaKit.framework/pylib_ext/sympy/combinatorics/generators.py
python
rubik
(n)
return g
Return permutations for an nxn Rubik's cube. Permutations returned are for rotation of each of the slice from the face up to the last face for each of the 3 sides (in this order): front, right and bottom. Hence, the first n - 1 permutations are for the slices from the front.
Return permutations for an nxn Rubik's cube.
[ "Return", "permutations", "for", "an", "nxn", "Rubik", "s", "cube", "." ]
def rubik(n): """Return permutations for an nxn Rubik's cube. Permutations returned are for rotation of each of the slice from the face up to the last face for each of the 3 sides (in this order): front, right and bottom. Hence, the first n - 1 permutations are for the slices from the front. """ if n < 2: raise ValueError('dimension of cube must be > 1') # 1-based reference to rows and columns in Matrix def getr(f, i): return faces[f].col(n - i) def getl(f, i): return faces[f].col(i - 1) def getu(f, i): return faces[f].row(i - 1) def getd(f, i): return faces[f].row(n - i) def setr(f, i, s): faces[f][:, n - i] = Matrix(n, 1, s) def setl(f, i, s): faces[f][:, i - 1] = Matrix(n, 1, s) def setu(f, i, s): faces[f][i - 1, :] = Matrix(1, n, s) def setd(f, i, s): faces[f][n - i, :] = Matrix(1, n, s) # motion of a single face def cw(F, r=1): for _ in range(r): face = faces[F] rv = [] for c in range(n): for r in range(n - 1, -1, -1): rv.append(face[r, c]) faces[F] = Matrix(n, n, rv) def ccw(F): cw(F, 3) # motion of plane i from the F side; # fcw(0) moves the F face, fcw(1) moves the plane # just behind the front face, etc... def fcw(i, r=1): for _ in range(r): if i == 0: cw(F) i += 1 temp = getr(L, i) setr(L, i, list((getu(D, i)))) setu(D, i, list(reversed(getl(R, i)))) setl(R, i, list((getd(U, i)))) setd(U, i, list(reversed(temp))) i -= 1 def fccw(i): fcw(i, 3) # motion of the entire cube from the F side def FCW(r=1): for _ in range(r): cw(F) ccw(B) cw(U) t = faces[U] cw(L) faces[U] = faces[L] cw(D) faces[L] = faces[D] cw(R) faces[D] = faces[R] faces[R] = t def FCCW(): FCW(3) # motion of the entire cube from the U side def UCW(r=1): for _ in range(r): cw(U) ccw(D) t = faces[F] faces[F] = faces[R] faces[R] = faces[B] faces[B] = faces[L] faces[L] = t def UCCW(): UCW(3) # defining the permutations for the cube U, F, R, B, L, D = names = symbols('U, F, R, B, L, D') # the faces are represented by nxn matrices faces = {} count = 0 for fi in range(6): f = [] for a in range(n**2): f.append(count) count += 1 faces[names[fi]] = Matrix(n, n, f) # this will either return the value of the current permutation # (show != 1) or else append the permutation to the group, g def perm(show=0): # add perm to the list of perms p = [] for f in names: p.extend(faces[f]) if show: return p g.append(Permutation(p)) g = [] # container for the group's permutations I = list(range(6*n**2)) # the identity permutation used for checking # define permutations corresonding to cw rotations of the planes # up TO the last plane from that direction; by not including the # last plane, the orientation of the cube is maintained. # F slices for i in range(n - 1): fcw(i) perm() fccw(i) # restore assert perm(1) == I # R slices # bring R to front UCW() for i in range(n - 1): fcw(i) # put it back in place UCCW() # record perm() # restore # bring face to fron UCW() fccw(i) # restore UCCW() assert perm(1) == I # D slices # bring up bottom FCW() UCCW() FCCW() for i in range(n - 1): # turn strip fcw(i) # put bottom back on the bottom FCW() UCW() FCCW() # record perm() # restore # bring up bottom FCW() UCCW() FCCW() # turn strip fccw(i) # put bottom back on the bottom FCW() UCW() FCCW() assert perm(1) == I return g
[ "def", "rubik", "(", "n", ")", ":", "if", "n", "<", "2", ":", "raise", "ValueError", "(", "'dimension of cube must be > 1'", ")", "# 1-based reference to rows and columns in Matrix", "def", "getr", "(", "f", ",", "i", ")", ":", "return", "faces", "[", "f", "]", ".", "col", "(", "n", "-", "i", ")", "def", "getl", "(", "f", ",", "i", ")", ":", "return", "faces", "[", "f", "]", ".", "col", "(", "i", "-", "1", ")", "def", "getu", "(", "f", ",", "i", ")", ":", "return", "faces", "[", "f", "]", ".", "row", "(", "i", "-", "1", ")", "def", "getd", "(", "f", ",", "i", ")", ":", "return", "faces", "[", "f", "]", ".", "row", "(", "n", "-", "i", ")", "def", "setr", "(", "f", ",", "i", ",", "s", ")", ":", "faces", "[", "f", "]", "[", ":", ",", "n", "-", "i", "]", "=", "Matrix", "(", "n", ",", "1", ",", "s", ")", "def", "setl", "(", "f", ",", "i", ",", "s", ")", ":", "faces", "[", "f", "]", "[", ":", ",", "i", "-", "1", "]", "=", "Matrix", "(", "n", ",", "1", ",", "s", ")", "def", "setu", "(", "f", ",", "i", ",", "s", ")", ":", "faces", "[", "f", "]", "[", "i", "-", "1", ",", ":", "]", "=", "Matrix", "(", "1", ",", "n", ",", "s", ")", "def", "setd", "(", "f", ",", "i", ",", "s", ")", ":", "faces", "[", "f", "]", "[", "n", "-", "i", ",", ":", "]", "=", "Matrix", "(", "1", ",", "n", ",", "s", ")", "# motion of a single face", "def", "cw", "(", "F", ",", "r", "=", "1", ")", ":", "for", "_", "in", "range", "(", "r", ")", ":", "face", "=", "faces", "[", "F", "]", "rv", "=", "[", "]", "for", "c", "in", "range", "(", "n", ")", ":", "for", "r", "in", "range", "(", "n", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "rv", ".", "append", "(", "face", "[", "r", ",", "c", "]", ")", "faces", "[", "F", "]", "=", "Matrix", "(", "n", ",", "n", ",", "rv", ")", "def", "ccw", "(", "F", ")", ":", "cw", "(", "F", ",", "3", ")", "# motion of plane i from the F side;", "# fcw(0) moves the F face, fcw(1) moves the plane", "# just behind the front face, etc...", "def", "fcw", "(", "i", ",", "r", "=", "1", ")", ":", "for", "_", "in", "range", "(", "r", ")", ":", "if", "i", "==", "0", ":", "cw", "(", "F", ")", "i", "+=", "1", "temp", "=", "getr", "(", "L", ",", "i", ")", "setr", "(", "L", ",", "i", ",", "list", "(", "(", "getu", "(", "D", ",", "i", ")", ")", ")", ")", "setu", "(", "D", ",", "i", ",", "list", "(", "reversed", "(", "getl", "(", "R", ",", "i", ")", ")", ")", ")", "setl", "(", "R", ",", "i", ",", "list", "(", "(", "getd", "(", "U", ",", "i", ")", ")", ")", ")", "setd", "(", "U", ",", "i", ",", "list", "(", "reversed", "(", "temp", ")", ")", ")", "i", "-=", "1", "def", "fccw", "(", "i", ")", ":", "fcw", "(", "i", ",", "3", ")", "# motion of the entire cube from the F side", "def", "FCW", "(", "r", "=", "1", ")", ":", "for", "_", "in", "range", "(", "r", ")", ":", "cw", "(", "F", ")", "ccw", "(", "B", ")", "cw", "(", "U", ")", "t", "=", "faces", "[", "U", "]", "cw", "(", "L", ")", "faces", "[", "U", "]", "=", "faces", "[", "L", "]", "cw", "(", "D", ")", "faces", "[", "L", "]", "=", "faces", "[", "D", "]", "cw", "(", "R", ")", "faces", "[", "D", "]", "=", "faces", "[", "R", "]", "faces", "[", "R", "]", "=", "t", "def", "FCCW", "(", ")", ":", "FCW", "(", "3", ")", "# motion of the entire cube from the U side", "def", "UCW", "(", "r", "=", "1", ")", ":", "for", "_", "in", "range", "(", "r", ")", ":", "cw", "(", "U", ")", "ccw", "(", "D", ")", "t", "=", "faces", "[", "F", "]", "faces", "[", "F", "]", "=", "faces", "[", "R", "]", "faces", "[", "R", "]", "=", "faces", "[", "B", "]", "faces", "[", "B", "]", "=", "faces", "[", "L", "]", "faces", "[", "L", "]", "=", "t", "def", "UCCW", "(", ")", ":", "UCW", "(", "3", ")", "# defining the permutations for the cube", "U", ",", "F", ",", "R", ",", "B", ",", "L", ",", "D", "=", "names", "=", "symbols", "(", "'U, F, R, B, L, D'", ")", "# the faces are represented by nxn matrices", "faces", "=", "{", "}", "count", "=", "0", "for", "fi", "in", "range", "(", "6", ")", ":", "f", "=", "[", "]", "for", "a", "in", "range", "(", "n", "**", "2", ")", ":", "f", ".", "append", "(", "count", ")", "count", "+=", "1", "faces", "[", "names", "[", "fi", "]", "]", "=", "Matrix", "(", "n", ",", "n", ",", "f", ")", "# this will either return the value of the current permutation", "# (show != 1) or else append the permutation to the group, g", "def", "perm", "(", "show", "=", "0", ")", ":", "# add perm to the list of perms", "p", "=", "[", "]", "for", "f", "in", "names", ":", "p", ".", "extend", "(", "faces", "[", "f", "]", ")", "if", "show", ":", "return", "p", "g", ".", "append", "(", "Permutation", "(", "p", ")", ")", "g", "=", "[", "]", "# container for the group's permutations", "I", "=", "list", "(", "range", "(", "6", "*", "n", "**", "2", ")", ")", "# the identity permutation used for checking", "# define permutations corresonding to cw rotations of the planes", "# up TO the last plane from that direction; by not including the", "# last plane, the orientation of the cube is maintained.", "# F slices", "for", "i", "in", "range", "(", "n", "-", "1", ")", ":", "fcw", "(", "i", ")", "perm", "(", ")", "fccw", "(", "i", ")", "# restore", "assert", "perm", "(", "1", ")", "==", "I", "# R slices", "# bring R to front", "UCW", "(", ")", "for", "i", "in", "range", "(", "n", "-", "1", ")", ":", "fcw", "(", "i", ")", "# put it back in place", "UCCW", "(", ")", "# record", "perm", "(", ")", "# restore", "# bring face to fron", "UCW", "(", ")", "fccw", "(", "i", ")", "# restore", "UCCW", "(", ")", "assert", "perm", "(", "1", ")", "==", "I", "# D slices", "# bring up bottom", "FCW", "(", ")", "UCCW", "(", ")", "FCCW", "(", ")", "for", "i", "in", "range", "(", "n", "-", "1", ")", ":", "# turn strip", "fcw", "(", "i", ")", "# put bottom back on the bottom", "FCW", "(", ")", "UCW", "(", ")", "FCCW", "(", ")", "# record", "perm", "(", ")", "# restore", "# bring up bottom", "FCW", "(", ")", "UCCW", "(", ")", "FCCW", "(", ")", "# turn strip", "fccw", "(", "i", ")", "# put bottom back on the bottom", "FCW", "(", ")", "UCW", "(", ")", "FCCW", "(", ")", "assert", "perm", "(", "1", ")", "==", "I", "return", "g" ]
https://github.com/omz/PythonistaAppTemplate/blob/f560f93f8876d82a21d108977f90583df08d55af/PythonistaAppTemplate/PythonistaKit.framework/pylib_ext/sympy/combinatorics/generators.py#L130-L313
Blockstream/satellite
ceb46a00e176c43a6b4170359f6948663a0616bb
blocksatcli/api/msg.py
python
ApiMsg.serialize
(self, target='original')
Serialize data to stdout Args: target : Target bytes array to print (original, encapsulated or encrypted).
Serialize data to stdout
[ "Serialize", "data", "to", "stdout" ]
def serialize(self, target='original'): """Serialize data to stdout Args: target : Target bytes array to print (original, encapsulated or encrypted). """ data = self.get_data(target) assert (isinstance(data, bytes)) sys.stdout.buffer.write(data) sys.stdout.buffer.flush()
[ "def", "serialize", "(", "self", ",", "target", "=", "'original'", ")", ":", "data", "=", "self", ".", "get_data", "(", "target", ")", "assert", "(", "isinstance", "(", "data", ",", "bytes", ")", ")", "sys", ".", "stdout", ".", "buffer", ".", "write", "(", "data", ")", "sys", ".", "stdout", ".", "buffer", ".", "flush", "(", ")" ]
https://github.com/Blockstream/satellite/blob/ceb46a00e176c43a6b4170359f6948663a0616bb/blocksatcli/api/msg.py#L491-L502
photonlines/Python-Prolog-Interpreter
1e208bf20b332ab92b50e1926afe365f95f66811
prologpy/interpreter.py
python
Variable.substitute_variable_bindings
(self, variable_bindings)
return self
Fetch the currently bound variable value for our variable and return the substituted bindings if our variable is mapped. If our variable isn't mapped, we simply return the variable as the substitute.
Fetch the currently bound variable value for our variable and return the substituted bindings if our variable is mapped. If our variable isn't mapped, we simply return the variable as the substitute.
[ "Fetch", "the", "currently", "bound", "variable", "value", "for", "our", "variable", "and", "return", "the", "substituted", "bindings", "if", "our", "variable", "is", "mapped", ".", "If", "our", "variable", "isn", "t", "mapped", "we", "simply", "return", "the", "variable", "as", "the", "substitute", "." ]
def substitute_variable_bindings(self, variable_bindings): """Fetch the currently bound variable value for our variable and return the substituted bindings if our variable is mapped. If our variable isn't mapped, we simply return the variable as the substitute. """ bound_variable_value = variable_bindings.get(self) if bound_variable_value: return bound_variable_value.substitute_variable_bindings( variable_bindings ) return self
[ "def", "substitute_variable_bindings", "(", "self", ",", "variable_bindings", ")", ":", "bound_variable_value", "=", "variable_bindings", ".", "get", "(", "self", ")", "if", "bound_variable_value", ":", "return", "bound_variable_value", ".", "substitute_variable_bindings", "(", "variable_bindings", ")", "return", "self" ]
https://github.com/photonlines/Python-Prolog-Interpreter/blob/1e208bf20b332ab92b50e1926afe365f95f66811/prologpy/interpreter.py#L125-L136
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/logging/__init__.py
python
Filterer.__init__
(self)
Initialize the list of filters to be an empty list.
Initialize the list of filters to be an empty list.
[ "Initialize", "the", "list", "of", "filters", "to", "be", "an", "empty", "list", "." ]
def __init__(self): """ Initialize the list of filters to be an empty list. """ self.filters = []
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "filters", "=", "[", "]" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/logging/__init__.py#L673-L677
googleads/google-ads-python
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
google/ads/googleads/v8/services/services/customer_user_access_service/client.py
python
CustomerUserAccessServiceClient.from_service_account_file
(cls, filename: str, *args, **kwargs)
return cls(*args, **kwargs)
Creates an instance of this client using the provided credentials file. Args: filename (str): The path to the service account private key json file. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: CustomerUserAccessServiceClient: The constructed client.
Creates an instance of this client using the provided credentials file.
[ "Creates", "an", "instance", "of", "this", "client", "using", "the", "provided", "credentials", "file", "." ]
def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials file. Args: filename (str): The path to the service account private key json file. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: CustomerUserAccessServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( filename ) kwargs["credentials"] = credentials return cls(*args, **kwargs)
[ "def", "from_service_account_file", "(", "cls", ",", "filename", ":", "str", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "credentials", "=", "service_account", ".", "Credentials", ".", "from_service_account_file", "(", "filename", ")", "kwargs", "[", "\"credentials\"", "]", "=", "credentials", "return", "cls", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v8/services/services/customer_user_access_service/client.py#L135-L152
MichaelGrupp/evo
c65af3b69188aaadbbd7b5f99ac7973d74343d65
evo/core/transformations.py
python
quaternion_from_euler
(ai, aj, ak, axes='sxyz')
return q
Return quaternion from Euler angles and axis sequence. ai, aj, ak : Euler's roll, pitch and yaw angles axes : One of 24 axis sequences as string or encoded tuple >>> q = quaternion_from_euler(1, 2, 3, 'ryxz') >>> numpy.allclose(q, [0.435953, 0.310622, -0.718287, 0.444435]) True
Return quaternion from Euler angles and axis sequence.
[ "Return", "quaternion", "from", "Euler", "angles", "and", "axis", "sequence", "." ]
def quaternion_from_euler(ai, aj, ak, axes='sxyz'): """Return quaternion from Euler angles and axis sequence. ai, aj, ak : Euler's roll, pitch and yaw angles axes : One of 24 axis sequences as string or encoded tuple >>> q = quaternion_from_euler(1, 2, 3, 'ryxz') >>> numpy.allclose(q, [0.435953, 0.310622, -0.718287, 0.444435]) True """ try: firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()] except (AttributeError, KeyError): _TUPLE2AXES[axes] # validation firstaxis, parity, repetition, frame = axes i = firstaxis + 1 j = _NEXT_AXIS[i+parity-1] + 1 k = _NEXT_AXIS[i-parity] + 1 if frame: ai, ak = ak, ai if parity: aj = -aj ai /= 2.0 aj /= 2.0 ak /= 2.0 ci = math.cos(ai) si = math.sin(ai) cj = math.cos(aj) sj = math.sin(aj) ck = math.cos(ak) sk = math.sin(ak) cc = ci*ck cs = ci*sk sc = si*ck ss = si*sk q = numpy.empty((4, )) if repetition: q[0] = cj*(cc - ss) q[i] = cj*(cs + sc) q[j] = sj*(cc + ss) q[k] = sj*(cs - sc) else: q[0] = cj*cc + sj*ss q[i] = cj*sc - sj*cs q[j] = cj*ss + sj*cc q[k] = cj*cs - sj*sc if parity: q[j] *= -1.0 return q
[ "def", "quaternion_from_euler", "(", "ai", ",", "aj", ",", "ak", ",", "axes", "=", "'sxyz'", ")", ":", "try", ":", "firstaxis", ",", "parity", ",", "repetition", ",", "frame", "=", "_AXES2TUPLE", "[", "axes", ".", "lower", "(", ")", "]", "except", "(", "AttributeError", ",", "KeyError", ")", ":", "_TUPLE2AXES", "[", "axes", "]", "# validation", "firstaxis", ",", "parity", ",", "repetition", ",", "frame", "=", "axes", "i", "=", "firstaxis", "+", "1", "j", "=", "_NEXT_AXIS", "[", "i", "+", "parity", "-", "1", "]", "+", "1", "k", "=", "_NEXT_AXIS", "[", "i", "-", "parity", "]", "+", "1", "if", "frame", ":", "ai", ",", "ak", "=", "ak", ",", "ai", "if", "parity", ":", "aj", "=", "-", "aj", "ai", "/=", "2.0", "aj", "/=", "2.0", "ak", "/=", "2.0", "ci", "=", "math", ".", "cos", "(", "ai", ")", "si", "=", "math", ".", "sin", "(", "ai", ")", "cj", "=", "math", ".", "cos", "(", "aj", ")", "sj", "=", "math", ".", "sin", "(", "aj", ")", "ck", "=", "math", ".", "cos", "(", "ak", ")", "sk", "=", "math", ".", "sin", "(", "ak", ")", "cc", "=", "ci", "*", "ck", "cs", "=", "ci", "*", "sk", "sc", "=", "si", "*", "ck", "ss", "=", "si", "*", "sk", "q", "=", "numpy", ".", "empty", "(", "(", "4", ",", ")", ")", "if", "repetition", ":", "q", "[", "0", "]", "=", "cj", "*", "(", "cc", "-", "ss", ")", "q", "[", "i", "]", "=", "cj", "*", "(", "cs", "+", "sc", ")", "q", "[", "j", "]", "=", "sj", "*", "(", "cc", "+", "ss", ")", "q", "[", "k", "]", "=", "sj", "*", "(", "cs", "-", "sc", ")", "else", ":", "q", "[", "0", "]", "=", "cj", "*", "cc", "+", "sj", "*", "ss", "q", "[", "i", "]", "=", "cj", "*", "sc", "-", "sj", "*", "cs", "q", "[", "j", "]", "=", "cj", "*", "ss", "+", "sj", "*", "cc", "q", "[", "k", "]", "=", "cj", "*", "cs", "-", "sj", "*", "sc", "if", "parity", ":", "q", "[", "j", "]", "*=", "-", "1.0", "return", "q" ]
https://github.com/MichaelGrupp/evo/blob/c65af3b69188aaadbbd7b5f99ac7973d74343d65/evo/core/transformations.py#L1185-L1239
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/sympy/combinatorics/perm_groups.py
python
_stabilizer
(degree, generators, alpha)
return [_af_new(x) for x in stab_gens]
r"""Return the stabilizer subgroup of ``alpha``. The stabilizer of `\alpha` is the group `G_\alpha = \{g \in G | g(\alpha) = \alpha\}`. For a proof of correctness, see [1], p.79. degree : degree of G generators : generators of G Examples ======== >>> from sympy.combinatorics import Permutation >>> Permutation.print_cyclic = True >>> from sympy.combinatorics.perm_groups import _stabilizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> G = DihedralGroup(6) >>> _stabilizer(G.degree, G.generators, 5) [(5)(0 4)(1 3), (5)] See Also ======== orbit
r"""Return the stabilizer subgroup of ``alpha``.
[ "r", "Return", "the", "stabilizer", "subgroup", "of", "alpha", "." ]
def _stabilizer(degree, generators, alpha): r"""Return the stabilizer subgroup of ``alpha``. The stabilizer of `\alpha` is the group `G_\alpha = \{g \in G | g(\alpha) = \alpha\}`. For a proof of correctness, see [1], p.79. degree : degree of G generators : generators of G Examples ======== >>> from sympy.combinatorics import Permutation >>> Permutation.print_cyclic = True >>> from sympy.combinatorics.perm_groups import _stabilizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> G = DihedralGroup(6) >>> _stabilizer(G.degree, G.generators, 5) [(5)(0 4)(1 3), (5)] See Also ======== orbit """ orb = [alpha] table = {alpha: list(range(degree))} table_inv = {alpha: list(range(degree))} used = [False]*degree used[alpha] = True gens = [x._array_form for x in generators] stab_gens = [] for b in orb: for gen in gens: temp = gen[b] if used[temp] is False: gen_temp = _af_rmul(gen, table[b]) orb.append(temp) table[temp] = gen_temp table_inv[temp] = _af_invert(gen_temp) used[temp] = True else: schreier_gen = _af_rmuln(table_inv[temp], gen, table[b]) if schreier_gen not in stab_gens: stab_gens.append(schreier_gen) return [_af_new(x) for x in stab_gens]
[ "def", "_stabilizer", "(", "degree", ",", "generators", ",", "alpha", ")", ":", "orb", "=", "[", "alpha", "]", "table", "=", "{", "alpha", ":", "list", "(", "range", "(", "degree", ")", ")", "}", "table_inv", "=", "{", "alpha", ":", "list", "(", "range", "(", "degree", ")", ")", "}", "used", "=", "[", "False", "]", "*", "degree", "used", "[", "alpha", "]", "=", "True", "gens", "=", "[", "x", ".", "_array_form", "for", "x", "in", "generators", "]", "stab_gens", "=", "[", "]", "for", "b", "in", "orb", ":", "for", "gen", "in", "gens", ":", "temp", "=", "gen", "[", "b", "]", "if", "used", "[", "temp", "]", "is", "False", ":", "gen_temp", "=", "_af_rmul", "(", "gen", ",", "table", "[", "b", "]", ")", "orb", ".", "append", "(", "temp", ")", "table", "[", "temp", "]", "=", "gen_temp", "table_inv", "[", "temp", "]", "=", "_af_invert", "(", "gen_temp", ")", "used", "[", "temp", "]", "=", "True", "else", ":", "schreier_gen", "=", "_af_rmuln", "(", "table_inv", "[", "temp", "]", ",", "gen", ",", "table", "[", "b", "]", ")", "if", "schreier_gen", "not", "in", "stab_gens", ":", "stab_gens", ".", "append", "(", "schreier_gen", ")", "return", "[", "_af_new", "(", "x", ")", "for", "x", "in", "stab_gens", "]" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/combinatorics/perm_groups.py#L4961-L5008
Ericsson/codechecker
c4e43f62dc3acbf71d3109b337db7c97f7852f43
analyzer/codechecker_analyzer/analyzer_context.py
python
Context.__init_env
(self)
Set environment variables.
Set environment variables.
[ "Set", "environment", "variables", "." ]
def __init_env(self): """ Set environment variables. """ # Get generic package specific environment variables. self.logger_bin = os.environ.get(self.env_vars['cc_logger_bin']) self.logger_file = os.environ.get(self.env_vars['cc_logger_file']) self.logger_compilers = os.environ.get( self.env_vars['cc_logger_compiles']) self.ld_preload = os.environ.get(self.env_vars['ld_preload']) self.ld_lib_path = self.env_vars['env_ld_lib_path']
[ "def", "__init_env", "(", "self", ")", ":", "# Get generic package specific environment variables.", "self", ".", "logger_bin", "=", "os", ".", "environ", ".", "get", "(", "self", ".", "env_vars", "[", "'cc_logger_bin'", "]", ")", "self", ".", "logger_file", "=", "os", ".", "environ", ".", "get", "(", "self", ".", "env_vars", "[", "'cc_logger_file'", "]", ")", "self", ".", "logger_compilers", "=", "os", ".", "environ", ".", "get", "(", "self", ".", "env_vars", "[", "'cc_logger_compiles'", "]", ")", "self", ".", "ld_preload", "=", "os", ".", "environ", ".", "get", "(", "self", ".", "env_vars", "[", "'ld_preload'", "]", ")", "self", ".", "ld_lib_path", "=", "self", ".", "env_vars", "[", "'env_ld_lib_path'", "]" ]
https://github.com/Ericsson/codechecker/blob/c4e43f62dc3acbf71d3109b337db7c97f7852f43/analyzer/codechecker_analyzer/analyzer_context.py#L106-L114
mikedh/trimesh
6b1e05616b44e6dd708d9bc748b211656ebb27ec
trimesh/base.py
python
Trimesh.face_adjacency_edges
(self)
return self._cache['face_adjacency_edges']
Returns the edges that are shared by the adjacent faces. Returns -------- edges : (n, 2) int Vertex indices which correspond to face_adjacency
Returns the edges that are shared by the adjacent faces.
[ "Returns", "the", "edges", "that", "are", "shared", "by", "the", "adjacent", "faces", "." ]
def face_adjacency_edges(self): """ Returns the edges that are shared by the adjacent faces. Returns -------- edges : (n, 2) int Vertex indices which correspond to face_adjacency """ # this value is calculated as a byproduct of the face adjacency populate = self.face_adjacency return self._cache['face_adjacency_edges']
[ "def", "face_adjacency_edges", "(", "self", ")", ":", "# this value is calculated as a byproduct of the face adjacency", "populate", "=", "self", ".", "face_adjacency", "return", "self", ".", "_cache", "[", "'face_adjacency_edges'", "]" ]
https://github.com/mikedh/trimesh/blob/6b1e05616b44e6dd708d9bc748b211656ebb27ec/trimesh/base.py#L1333-L1344
SeldonIO/alibi
ce961caf995d22648a8338857822c90428af4765
alibi/explainers/backends/pytorch/cfrl_base.py
python
PtCounterfactualRLDataset.__init__
(self, X: np.ndarray, preprocessor: Callable, predictor: Callable, conditional_func: Callable, batch_size: int)
Constructor. Parameters ---------- X Array of input instances. The input should NOT be preprocessed as it will be preprocessed when calling the `preprocessor` function. preprocessor Preprocessor function. This function correspond to the preprocessing steps applied to the autoencoder model. predictor Prediction function. The classifier function should expect the input in the original format and preprocess it internally in the `predictor` if necessary. conditional_func Conditional function generator. Given an preprocessed input array, the functions generates a conditional array. batch_size Dimension of the batch used during training. The same batch size is used to infer the classification labels of the input dataset.
Constructor.
[ "Constructor", "." ]
def __init__(self, X: np.ndarray, preprocessor: Callable, predictor: Callable, conditional_func: Callable, batch_size: int) -> None: """ Constructor. Parameters ---------- X Array of input instances. The input should NOT be preprocessed as it will be preprocessed when calling the `preprocessor` function. preprocessor Preprocessor function. This function correspond to the preprocessing steps applied to the autoencoder model. predictor Prediction function. The classifier function should expect the input in the original format and preprocess it internally in the `predictor` if necessary. conditional_func Conditional function generator. Given an preprocessed input array, the functions generates a conditional array. batch_size Dimension of the batch used during training. The same batch size is used to infer the classification labels of the input dataset. """ super().__init__() self.X = X self.preprocessor = preprocessor self.predictor = predictor self.conditional_func = conditional_func self.batch_size = batch_size # Infer the labels of the input dataset. This is performed in batches. self.Y_m = self.predict_batches(X=self.X, predictor=self.predictor, batch_size=self.batch_size) # Define number of classes for classification & minimum and maximum labels for regression if self.Y_m.shape[1] > 1: self.num_classes = self.Y_m.shape[1] else: self.min_m = np.min(self.Y_m) self.max_m = np.max(self.Y_m) # Preprocess the input data. self.X = self.preprocessor(self.X)
[ "def", "__init__", "(", "self", ",", "X", ":", "np", ".", "ndarray", ",", "preprocessor", ":", "Callable", ",", "predictor", ":", "Callable", ",", "conditional_func", ":", "Callable", ",", "batch_size", ":", "int", ")", "->", "None", ":", "super", "(", ")", ".", "__init__", "(", ")", "self", ".", "X", "=", "X", "self", ".", "preprocessor", "=", "preprocessor", "self", ".", "predictor", "=", "predictor", "self", ".", "conditional_func", "=", "conditional_func", "self", ".", "batch_size", "=", "batch_size", "# Infer the labels of the input dataset. This is performed in batches.", "self", ".", "Y_m", "=", "self", ".", "predict_batches", "(", "X", "=", "self", ".", "X", ",", "predictor", "=", "self", ".", "predictor", ",", "batch_size", "=", "self", ".", "batch_size", ")", "# Define number of classes for classification & minimum and maximum labels for regression", "if", "self", ".", "Y_m", ".", "shape", "[", "1", "]", ">", "1", ":", "self", ".", "num_classes", "=", "self", ".", "Y_m", ".", "shape", "[", "1", "]", "else", ":", "self", ".", "min_m", "=", "np", ".", "min", "(", "self", ".", "Y_m", ")", "self", ".", "max_m", "=", "np", ".", "max", "(", "self", ".", "Y_m", ")", "# Preprocess the input data.", "self", ".", "X", "=", "self", ".", "preprocessor", "(", "self", ".", "X", ")" ]
https://github.com/SeldonIO/alibi/blob/ce961caf995d22648a8338857822c90428af4765/alibi/explainers/backends/pytorch/cfrl_base.py#L26-L73
ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework
cb692f527e4e819b6c228187c5702d990a180043
external/Scripting Engine/Xenotix Python Scripting Engine/Lib/mailbox.py
python
_mboxMMDFMessage.get_flags
(self)
return self.get('Status', '') + self.get('X-Status', '')
Return as a string the flags that are set.
Return as a string the flags that are set.
[ "Return", "as", "a", "string", "the", "flags", "that", "are", "set", "." ]
def get_flags(self): """Return as a string the flags that are set.""" return self.get('Status', '') + self.get('X-Status', '')
[ "def", "get_flags", "(", "self", ")", ":", "return", "self", ".", "get", "(", "'Status'", ",", "''", ")", "+", "self", ".", "get", "(", "'X-Status'", ",", "''", ")" ]
https://github.com/ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework/blob/cb692f527e4e819b6c228187c5702d990a180043/external/Scripting Engine/Xenotix Python Scripting Engine/Lib/mailbox.py#L1552-L1554
EricSteinberger/PokerRL
e02ea667061b96912e424231da071b6f20a262f7
PokerRL/game/_/look_up_table.py
python
_LutGetterHoldem.get_idx_2_hole_card_LUT
(self)
return self.cpp_backend.get_idx_2_hole_card_lut()
[]
def get_idx_2_hole_card_LUT(self): return self.cpp_backend.get_idx_2_hole_card_lut()
[ "def", "get_idx_2_hole_card_LUT", "(", "self", ")", ":", "return", "self", ".", "cpp_backend", ".", "get_idx_2_hole_card_lut", "(", ")" ]
https://github.com/EricSteinberger/PokerRL/blob/e02ea667061b96912e424231da071b6f20a262f7/PokerRL/game/_/look_up_table.py#L115-L116
JinpengLI/deep_ocr
450148c0c51b3565a96ac2f3c94ee33022e55307
deep_ocr/ocrolib/morph.py
python
spread_labels
(labels,maxdist=9999999)
return spread
Spread the given labels to the background
Spread the given labels to the background
[ "Spread", "the", "given", "labels", "to", "the", "background" ]
def spread_labels(labels,maxdist=9999999): """Spread the given labels to the background""" distances,features = morphology.distance_transform_edt(labels==0,return_distances=1,return_indices=1) indexes = features[0]*labels.shape[1]+features[1] spread = labels.ravel()[indexes.ravel()].reshape(*labels.shape) spread *= (distances<maxdist) return spread
[ "def", "spread_labels", "(", "labels", ",", "maxdist", "=", "9999999", ")", ":", "distances", ",", "features", "=", "morphology", ".", "distance_transform_edt", "(", "labels", "==", "0", ",", "return_distances", "=", "1", ",", "return_indices", "=", "1", ")", "indexes", "=", "features", "[", "0", "]", "*", "labels", ".", "shape", "[", "1", "]", "+", "features", "[", "1", "]", "spread", "=", "labels", ".", "ravel", "(", ")", "[", "indexes", ".", "ravel", "(", ")", "]", ".", "reshape", "(", "*", "labels", ".", "shape", ")", "spread", "*=", "(", "distances", "<", "maxdist", ")", "return", "spread" ]
https://github.com/JinpengLI/deep_ocr/blob/450148c0c51b3565a96ac2f3c94ee33022e55307/deep_ocr/ocrolib/morph.py#L127-L133
O365/python-o365
7f77005c3cee8177d0141e79b8eda8a7b60c5124
O365/excel.py
python
Range.insert_range
(self, shift)
return self._get_range('insert_range', method='POST', shift=shift.capitalize())
Inserts a cell or a range of cells into the worksheet in place of this range, and shifts the other cells to make space. :param str shift: Specifies which way to shift the cells. The possible values are: down, right. :return: new Range instance at the now blank space
Inserts a cell or a range of cells into the worksheet in place of this range, and shifts the other cells to make space. :param str shift: Specifies which way to shift the cells. The possible values are: down, right. :return: new Range instance at the now blank space
[ "Inserts", "a", "cell", "or", "a", "range", "of", "cells", "into", "the", "worksheet", "in", "place", "of", "this", "range", "and", "shifts", "the", "other", "cells", "to", "make", "space", ".", ":", "param", "str", "shift", ":", "Specifies", "which", "way", "to", "shift", "the", "cells", ".", "The", "possible", "values", "are", ":", "down", "right", ".", ":", "return", ":", "new", "Range", "instance", "at", "the", "now", "blank", "space" ]
def insert_range(self, shift): """ Inserts a cell or a range of cells into the worksheet in place of this range, and shifts the other cells to make space. :param str shift: Specifies which way to shift the cells. The possible values are: down, right. :return: new Range instance at the now blank space """ return self._get_range('insert_range', method='POST', shift=shift.capitalize())
[ "def", "insert_range", "(", "self", ",", "shift", ")", ":", "return", "self", ".", "_get_range", "(", "'insert_range'", ",", "method", "=", "'POST'", ",", "shift", "=", "shift", ".", "capitalize", "(", ")", ")" ]
https://github.com/O365/python-o365/blob/7f77005c3cee8177d0141e79b8eda8a7b60c5124/O365/excel.py#L788-L795
learningequality/ka-lite
571918ea668013dcf022286ea85eff1c5333fb8b
kalite/packages/bundled/django/core/serializers/xml_serializer.py
python
Serializer.end_object
(self, obj)
Called after handling all fields for an object.
Called after handling all fields for an object.
[ "Called", "after", "handling", "all", "fields", "for", "an", "object", "." ]
def end_object(self, obj): """ Called after handling all fields for an object. """ self.indent(1) self.xml.endElement("object")
[ "def", "end_object", "(", "self", ",", "obj", ")", ":", "self", ".", "indent", "(", "1", ")", "self", ".", "xml", ".", "endElement", "(", "\"object\"", ")" ]
https://github.com/learningequality/ka-lite/blob/571918ea668013dcf022286ea85eff1c5333fb8b/kalite/packages/bundled/django/core/serializers/xml_serializer.py#L60-L65
vaexio/vaex
6c1571f4f1ac030eb7128c1b35b2ccbb5dd29cac
packages/vaex-core/vaex/logging.py
python
set_log_level_info
(loggers=["vaex"])
set log level to info
set log level to info
[ "set", "log", "level", "to", "info" ]
def set_log_level_info(loggers=["vaex"]): """set log level to info""" set_log_level(loggers, logging.INFO)
[ "def", "set_log_level_info", "(", "loggers", "=", "[", "\"vaex\"", "]", ")", ":", "set_log_level", "(", "loggers", ",", "logging", ".", "INFO", ")" ]
https://github.com/vaexio/vaex/blob/6c1571f4f1ac030eb7128c1b35b2ccbb5dd29cac/packages/vaex-core/vaex/logging.py#L25-L27
pantsbuild/pex
473c6ac732ed4bc338b4b20a9ec930d1d722c9b4
pex/pex_builder.py
python
PEXBuilder.freeze
(self, bytecode_compile=True)
Freeze the PEX. :param bytecode_compile: If True, precompile .py files into .pyc files when freezing code. Freezing the PEX writes all the necessary metadata and environment bootstrapping code. It may only be called once and renders the PEXBuilder immutable.
Freeze the PEX.
[ "Freeze", "the", "PEX", "." ]
def freeze(self, bytecode_compile=True): """Freeze the PEX. :param bytecode_compile: If True, precompile .py files into .pyc files when freezing code. Freezing the PEX writes all the necessary metadata and environment bootstrapping code. It may only be called once and renders the PEXBuilder immutable. """ self._ensure_unfrozen("Freezing the environment") self._prepare_bootstrap() self._prepare_code() if bytecode_compile: self._precompile_source() self._frozen = True
[ "def", "freeze", "(", "self", ",", "bytecode_compile", "=", "True", ")", ":", "self", ".", "_ensure_unfrozen", "(", "\"Freezing the environment\"", ")", "self", ".", "_prepare_bootstrap", "(", ")", "self", ".", "_prepare_code", "(", ")", "if", "bytecode_compile", ":", "self", ".", "_precompile_source", "(", ")", "self", ".", "_frozen", "=", "True" ]
https://github.com/pantsbuild/pex/blob/473c6ac732ed4bc338b4b20a9ec930d1d722c9b4/pex/pex_builder.py#L598-L611
misterch0c/shadowbroker
e3a069bea47a2c1009697941ac214adc6f90aa8d
windows/Resources/Python/Core/Lib/idlelib/rpc.py
python
RPCServer.handle_error
(self, request, client_address)
Override TCPServer method Error message goes to __stderr__. No error message if exiting normally or socket raised EOF. Other exceptions not handled in server code will cause os._exit.
Override TCPServer method Error message goes to __stderr__. No error message if exiting normally or socket raised EOF. Other exceptions not handled in server code will cause os._exit.
[ "Override", "TCPServer", "method", "Error", "message", "goes", "to", "__stderr__", ".", "No", "error", "message", "if", "exiting", "normally", "or", "socket", "raised", "EOF", ".", "Other", "exceptions", "not", "handled", "in", "server", "code", "will", "cause", "os", ".", "_exit", "." ]
def handle_error(self, request, client_address): """Override TCPServer method Error message goes to __stderr__. No error message if exiting normally or socket raised EOF. Other exceptions not handled in server code will cause os._exit. """ try: raise except SystemExit: raise except: erf = sys.__stderr__ print >> erf, '\n' + '-' * 40 print >> erf, 'Unhandled server exception!' print >> erf, 'Thread: %s' % threading.currentThread().getName() print >> erf, 'Client Address: ', client_address print >> erf, 'Request: ', repr(request) traceback.print_exc(file=erf) print >> erf, '\n*** Unrecoverable, server exiting!' print >> erf, '-' * 40 os._exit(0)
[ "def", "handle_error", "(", "self", ",", "request", ",", "client_address", ")", ":", "try", ":", "raise", "except", "SystemExit", ":", "raise", "except", ":", "erf", "=", "sys", ".", "__stderr__", "print", ">>", "erf", ",", "'\\n'", "+", "'-'", "*", "40", "print", ">>", "erf", ",", "'Unhandled server exception!'", "print", ">>", "erf", ",", "'Thread: %s'", "%", "threading", ".", "currentThread", "(", ")", ".", "getName", "(", ")", "print", ">>", "erf", ",", "'Client Address: '", ",", "client_address", "print", ">>", "erf", ",", "'Request: '", ",", "repr", "(", "request", ")", "traceback", ".", "print_exc", "(", "file", "=", "erf", ")", "print", ">>", "erf", ",", "'\\n*** Unrecoverable, server exiting!'", "print", ">>", "erf", ",", "'-'", "*", "40", "os", ".", "_exit", "(", "0", ")" ]
https://github.com/misterch0c/shadowbroker/blob/e3a069bea47a2c1009697941ac214adc6f90aa8d/windows/Resources/Python/Core/Lib/idlelib/rpc.py#L89-L111
openembedded/bitbake
98407efc8c670abd71d3fa88ec3776ee9b5c38f3
lib/layerindexlib/__init__.py
python
LayerIndex._parse_params
(self, params)
return param_dict
Take a parameter list, return a dictionary of parameters. Expected to be called from the data of urllib.parse.urlparse(url).params If there are two conflicting parameters, last in wins...
Take a parameter list, return a dictionary of parameters.
[ "Take", "a", "parameter", "list", "return", "a", "dictionary", "of", "parameters", "." ]
def _parse_params(self, params): '''Take a parameter list, return a dictionary of parameters. Expected to be called from the data of urllib.parse.urlparse(url).params If there are two conflicting parameters, last in wins... ''' param_dict = {} for param in params.split(';'): if not param: continue item = param.split('=', 1) logger.debug(item) param_dict[item[0]] = item[1] return param_dict
[ "def", "_parse_params", "(", "self", ",", "params", ")", ":", "param_dict", "=", "{", "}", "for", "param", "in", "params", ".", "split", "(", "';'", ")", ":", "if", "not", "param", ":", "continue", "item", "=", "param", ".", "split", "(", "'='", ",", "1", ")", "logger", ".", "debug", "(", "item", ")", "param_dict", "[", "item", "[", "0", "]", "]", "=", "item", "[", "1", "]", "return", "param_dict" ]
https://github.com/openembedded/bitbake/blob/98407efc8c670abd71d3fa88ec3776ee9b5c38f3/lib/layerindexlib/__init__.py#L83-L99
Azure/azure-iot-sdk-python
51fa810907373fd2134af49bd03d3977ca7a9a8d
azure-iot-hub/azure/iot/hub/iothub_configuration_manager.py
python
IoTHubConfigurationManager.create_configuration
(self, configuration)
return self.protocol.configuration.create_or_update(configuration.id, configuration)
Creates a configuration for devices or modules of an IoTHub. :param str configuration_id: The id of the configuration. :param Configuration configuration: The configuration to create. :raises: `HttpOperationError<msrest.exceptions.HttpOperationError>` if the HTTP response status is not in [200]. :returns: Configuration object containing the created configuration.
Creates a configuration for devices or modules of an IoTHub.
[ "Creates", "a", "configuration", "for", "devices", "or", "modules", "of", "an", "IoTHub", "." ]
def create_configuration(self, configuration): """Creates a configuration for devices or modules of an IoTHub. :param str configuration_id: The id of the configuration. :param Configuration configuration: The configuration to create. :raises: `HttpOperationError<msrest.exceptions.HttpOperationError>` if the HTTP response status is not in [200]. :returns: Configuration object containing the created configuration. """ return self.protocol.configuration.create_or_update(configuration.id, configuration)
[ "def", "create_configuration", "(", "self", ",", "configuration", ")", ":", "return", "self", ".", "protocol", ".", "configuration", ".", "create_or_update", "(", "configuration", ".", "id", ",", "configuration", ")" ]
https://github.com/Azure/azure-iot-sdk-python/blob/51fa810907373fd2134af49bd03d3977ca7a9a8d/azure-iot-hub/azure/iot/hub/iothub_configuration_manager.py#L85-L96
theislab/anndata
664e32b0aa6625fe593370d37174384c05abfd4e
anndata/_core/sparse_dataset.py
python
get_backed_class
(format_str: str)
[]
def get_backed_class(format_str: str) -> Type[BackedSparseMatrix]: for fmt, backed_class, _ in FORMATS: if format_str == fmt: return backed_class raise ValueError(f"Format string {format_str} is not supported.")
[ "def", "get_backed_class", "(", "format_str", ":", "str", ")", "->", "Type", "[", "BackedSparseMatrix", "]", ":", "for", "fmt", ",", "backed_class", ",", "_", "in", "FORMATS", ":", "if", "format_str", "==", "fmt", ":", "return", "backed_class", "raise", "ValueError", "(", "f\"Format string {format_str} is not supported.\"", ")" ]
https://github.com/theislab/anndata/blob/664e32b0aa6625fe593370d37174384c05abfd4e/anndata/_core/sparse_dataset.py#L223-L227
GNS3/gns3-gui
da8adbaa18ab60e053af2a619efd468f4c8950f3
gns3/modules/vmware/__init__.py
python
VMware.preferencePages
()
return [VMwarePreferencesPage, VMwareVMPreferencesPage]
Returns the preference pages for this module. :returns: QWidget object list
Returns the preference pages for this module.
[ "Returns", "the", "preference", "pages", "for", "this", "module", "." ]
def preferencePages(): """ Returns the preference pages for this module. :returns: QWidget object list """ from .pages.vmware_preferences_page import VMwarePreferencesPage from .pages.vmware_vm_preferences_page import VMwareVMPreferencesPage return [VMwarePreferencesPage, VMwareVMPreferencesPage]
[ "def", "preferencePages", "(", ")", ":", "from", ".", "pages", ".", "vmware_preferences_page", "import", "VMwarePreferencesPage", "from", ".", "pages", ".", "vmware_vm_preferences_page", "import", "VMwareVMPreferencesPage", "return", "[", "VMwarePreferencesPage", ",", "VMwareVMPreferencesPage", "]" ]
https://github.com/GNS3/gns3-gui/blob/da8adbaa18ab60e053af2a619efd468f4c8950f3/gns3/modules/vmware/__init__.py#L270-L279
Walleclipse/ChineseAddress_OCR
ca7929c72cbac09c71501f06bf16c387f42f00cf
ctpn/lib/roi_data_layer/minibatch.py
python
_get_bbox_regression_labels
(bbox_target_data, num_classes)
return bbox_targets, bbox_inside_weights
Bounding-box regression targets are stored in a compact form in the roidb. This function expands those targets into the 4-of-4*K representation used by the network (i.e. only one class has non-zero targets). The loss weights are similarly expanded. Returns: bbox_target_data (ndarray): N x 4K blob of regression targets bbox_inside_weights (ndarray): N x 4K blob of loss weights
Bounding-box regression targets are stored in a compact form in the roidb.
[ "Bounding", "-", "box", "regression", "targets", "are", "stored", "in", "a", "compact", "form", "in", "the", "roidb", "." ]
def _get_bbox_regression_labels(bbox_target_data, num_classes): """Bounding-box regression targets are stored in a compact form in the roidb. This function expands those targets into the 4-of-4*K representation used by the network (i.e. only one class has non-zero targets). The loss weights are similarly expanded. Returns: bbox_target_data (ndarray): N x 4K blob of regression targets bbox_inside_weights (ndarray): N x 4K blob of loss weights """ clss = bbox_target_data[:, 0] bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32) bbox_inside_weights = np.zeros(bbox_targets.shape, dtype=np.float32) inds = np.where(clss > 0)[0] for ind in inds: cls = clss[ind] start = 4 * cls end = start + 4 bbox_targets[ind, start:end] = bbox_target_data[ind, 1:] bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS return bbox_targets, bbox_inside_weights
[ "def", "_get_bbox_regression_labels", "(", "bbox_target_data", ",", "num_classes", ")", ":", "clss", "=", "bbox_target_data", "[", ":", ",", "0", "]", "bbox_targets", "=", "np", ".", "zeros", "(", "(", "clss", ".", "size", ",", "4", "*", "num_classes", ")", ",", "dtype", "=", "np", ".", "float32", ")", "bbox_inside_weights", "=", "np", ".", "zeros", "(", "bbox_targets", ".", "shape", ",", "dtype", "=", "np", ".", "float32", ")", "inds", "=", "np", ".", "where", "(", "clss", ">", "0", ")", "[", "0", "]", "for", "ind", "in", "inds", ":", "cls", "=", "clss", "[", "ind", "]", "start", "=", "4", "*", "cls", "end", "=", "start", "+", "4", "bbox_targets", "[", "ind", ",", "start", ":", "end", "]", "=", "bbox_target_data", "[", "ind", ",", "1", ":", "]", "bbox_inside_weights", "[", "ind", ",", "start", ":", "end", "]", "=", "cfg", ".", "TRAIN", ".", "BBOX_INSIDE_WEIGHTS", "return", "bbox_targets", ",", "bbox_inside_weights" ]
https://github.com/Walleclipse/ChineseAddress_OCR/blob/ca7929c72cbac09c71501f06bf16c387f42f00cf/ctpn/lib/roi_data_layer/minibatch.py#L156-L178
Jenyay/outwiker
50530cf7b3f71480bb075b2829bc0669773b835b
src/outwiker/gui/controls/ultimatelistctrl.py
python
UltimateListItem.DeleteWindow
(self)
Deletes the window associated to the item (if any).
Deletes the window associated to the item (if any).
[ "Deletes", "the", "window", "associated", "to", "the", "item", "(", "if", "any", ")", "." ]
def DeleteWindow(self): """ Deletes the window associated to the item (if any). """ if self._wnd: listCtrl = self._wnd.GetParent() if self in listCtrl._itemWithWindow: listCtrl._itemWithWindow.remove(self) self._wnd.Destroy() self._wnd = None
[ "def", "DeleteWindow", "(", "self", ")", ":", "if", "self", ".", "_wnd", ":", "listCtrl", "=", "self", ".", "_wnd", ".", "GetParent", "(", ")", "if", "self", "in", "listCtrl", ".", "_itemWithWindow", ":", "listCtrl", ".", "_itemWithWindow", ".", "remove", "(", "self", ")", "self", ".", "_wnd", ".", "Destroy", "(", ")", "self", ".", "_wnd", "=", "None" ]
https://github.com/Jenyay/outwiker/blob/50530cf7b3f71480bb075b2829bc0669773b835b/src/outwiker/gui/controls/ultimatelistctrl.py#L1984-L1992
travisgoodspeed/goodfet
1750cc1e8588af5470385e52fa098ca7364c2863
contrib/reCAN/mainDisplay.py
python
DisplayApp.idInfo
(self)
This method will open an info box for the user to gain information on a known arbID
This method will open an info box for the user to gain information on a known arbID
[ "This", "method", "will", "open", "an", "info", "box", "for", "the", "user", "to", "gain", "information", "on", "a", "known", "arbID" ]
def idInfo(self): """ This method will open an info box for the user to gain information on a known arbID""" infoBox = info(parent=self.root, title="Information Gathered") pass
[ "def", "idInfo", "(", "self", ")", ":", "infoBox", "=", "info", "(", "parent", "=", "self", ".", "root", ",", "title", "=", "\"Information Gathered\"", ")", "pass" ]
https://github.com/travisgoodspeed/goodfet/blob/1750cc1e8588af5470385e52fa098ca7364c2863/contrib/reCAN/mainDisplay.py#L2704-L2708
twilio/twilio-python
6e1e811ea57a1edfadd5161ace87397c563f6915
twilio/rest/serverless/v1/service/function/function_version/__init__.py
python
FunctionVersionPage.__repr__
(self)
return '<Twilio.Serverless.V1.FunctionVersionPage>'
Provide a friendly representation :returns: Machine friendly representation :rtype: str
Provide a friendly representation
[ "Provide", "a", "friendly", "representation" ]
def __repr__(self): """ Provide a friendly representation :returns: Machine friendly representation :rtype: str """ return '<Twilio.Serverless.V1.FunctionVersionPage>'
[ "def", "__repr__", "(", "self", ")", ":", "return", "'<Twilio.Serverless.V1.FunctionVersionPage>'" ]
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/serverless/v1/service/function/function_version/__init__.py#L195-L202
tensorflow/models
6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3
research/deeplab/core/resnet_v1_beta.py
python
root_block_fn_for_beta_variant
(net, depth_multiplier=1.0)
return net
Gets root_block_fn for beta variant. ResNet-v1 beta variant modifies the first original 7x7 convolution to three 3x3 convolutions. Args: net: A tensor of size [batch, height, width, channels], input to the model. depth_multiplier: Controls the number of convolution output channels for each input channel. The total number of depthwise convolution output channels will be equal to `num_filters_out * depth_multiplier`. Returns: A tensor after three 3x3 convolutions.
Gets root_block_fn for beta variant.
[ "Gets", "root_block_fn", "for", "beta", "variant", "." ]
def root_block_fn_for_beta_variant(net, depth_multiplier=1.0): """Gets root_block_fn for beta variant. ResNet-v1 beta variant modifies the first original 7x7 convolution to three 3x3 convolutions. Args: net: A tensor of size [batch, height, width, channels], input to the model. depth_multiplier: Controls the number of convolution output channels for each input channel. The total number of depthwise convolution output channels will be equal to `num_filters_out * depth_multiplier`. Returns: A tensor after three 3x3 convolutions. """ net = conv2d_ws.conv2d_same( net, int(64 * depth_multiplier), 3, stride=2, scope='conv1_1') net = conv2d_ws.conv2d_same( net, int(64 * depth_multiplier), 3, stride=1, scope='conv1_2') net = conv2d_ws.conv2d_same( net, int(128 * depth_multiplier), 3, stride=1, scope='conv1_3') return net
[ "def", "root_block_fn_for_beta_variant", "(", "net", ",", "depth_multiplier", "=", "1.0", ")", ":", "net", "=", "conv2d_ws", ".", "conv2d_same", "(", "net", ",", "int", "(", "64", "*", "depth_multiplier", ")", ",", "3", ",", "stride", "=", "2", ",", "scope", "=", "'conv1_1'", ")", "net", "=", "conv2d_ws", ".", "conv2d_same", "(", "net", ",", "int", "(", "64", "*", "depth_multiplier", ")", ",", "3", ",", "stride", "=", "1", ",", "scope", "=", "'conv1_2'", ")", "net", "=", "conv2d_ws", ".", "conv2d_same", "(", "net", ",", "int", "(", "128", "*", "depth_multiplier", ")", ",", "3", ",", "stride", "=", "1", ",", "scope", "=", "'conv1_3'", ")", "return", "net" ]
https://github.com/tensorflow/models/blob/6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3/research/deeplab/core/resnet_v1_beta.py#L154-L176
google/clusterfuzz
f358af24f414daa17a3649b143e71ea71871ef59
src/clusterfuzz/_internal/bot/minimizer/minimizer.py
python
Testcase.get_current_testcase_data
(self)
return self.minimizer.token_combiner(self.get_required_tokens())
Return the current test case data.
Return the current test case data.
[ "Return", "the", "current", "test", "case", "data", "." ]
def get_current_testcase_data(self): """Return the current test case data.""" return self.minimizer.token_combiner(self.get_required_tokens())
[ "def", "get_current_testcase_data", "(", "self", ")", ":", "return", "self", ".", "minimizer", ".", "token_combiner", "(", "self", ".", "get_required_tokens", "(", ")", ")" ]
https://github.com/google/clusterfuzz/blob/f358af24f414daa17a3649b143e71ea71871ef59/src/clusterfuzz/_internal/bot/minimizer/minimizer.py#L177-L179
clips/pattern
d25511f9ca7ed9356b801d8663b8b5168464e68f
pattern/vector/__init__.py
python
Cluster.__init__
(self, *args, **kwargs)
A nested list of Cluster and Vector objects, returned from hierarchical() clustering.
A nested list of Cluster and Vector objects, returned from hierarchical() clustering.
[ "A", "nested", "list", "of", "Cluster", "and", "Vector", "objects", "returned", "from", "hierarchical", "()", "clustering", "." ]
def __init__(self, *args, **kwargs): """ A nested list of Cluster and Vector objects, returned from hierarchical() clustering. """ list.__init__(self, *args, **kwargs)
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "list", ".", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/clips/pattern/blob/d25511f9ca7ed9356b801d8663b8b5168464e68f/pattern/vector/__init__.py#L2094-L2098
mesonbuild/meson
a22d0f9a0a787df70ce79b05d0c45de90a970048
docs/refman/loaderbase.py
python
LoaderBase.input_files
(self)
return list(self._input_files)
[]
def input_files(self) -> T.List[Path]: return list(self._input_files)
[ "def", "input_files", "(", "self", ")", "->", "T", ".", "List", "[", "Path", "]", ":", "return", "list", "(", "self", ".", "_input_files", ")" ]
https://github.com/mesonbuild/meson/blob/a22d0f9a0a787df70ce79b05d0c45de90a970048/docs/refman/loaderbase.py#L198-L199
dmis-lab/biobert
036f683797251328893b8f1dd6b0a3f5af29c922
run_re.py
python
BioBERTDDIProcessor.get_train_examples
(self, data_dir)
return self._create_examples( self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
See base class.
See base class.
[ "See", "base", "class", "." ]
def get_train_examples(self, data_dir): """See base class.""" return self._create_examples( self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
[ "def", "get_train_examples", "(", "self", ",", "data_dir", ")", ":", "return", "self", ".", "_create_examples", "(", "self", ".", "_read_tsv", "(", "os", ".", "path", ".", "join", "(", "data_dir", ",", "\"train.tsv\"", ")", ")", ",", "\"train\"", ")" ]
https://github.com/dmis-lab/biobert/blob/036f683797251328893b8f1dd6b0a3f5af29c922/run_re.py#L380-L383
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_hxb2/lib/python3.5/site-packages/html5lib/treebuilders/_base.py
python
TreeBuilder.clearActiveFormattingElements
(self)
[]
def clearActiveFormattingElements(self): entry = self.activeFormattingElements.pop() while self.activeFormattingElements and entry != Marker: entry = self.activeFormattingElements.pop()
[ "def", "clearActiveFormattingElements", "(", "self", ")", ":", "entry", "=", "self", ".", "activeFormattingElements", ".", "pop", "(", ")", "while", "self", ".", "activeFormattingElements", "and", "entry", "!=", "Marker", ":", "entry", "=", "self", ".", "activeFormattingElements", ".", "pop", "(", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/html5lib/treebuilders/_base.py#L227-L230
krintoxi/NoobSec-Toolkit
38738541cbc03cedb9a3b3ed13b629f781ad64f6
NoobSecToolkit - MAC OSX/scripts/sshbackdoors/backdoors/shell/pupy/pupy/packages/windows/x86/psutil/_pslinux.py
python
Process.status
(self)
[]
def status(self): with open("/proc/%s/status" % self.pid, 'rb') as f: for line in f: if line.startswith(b"State:"): letter = line.split()[1] if PY3: letter = letter.decode() # XXX is '?' legit? (we're not supposed to return # it anyway) return PROC_STATUSES.get(letter, '?')
[ "def", "status", "(", "self", ")", ":", "with", "open", "(", "\"/proc/%s/status\"", "%", "self", ".", "pid", ",", "'rb'", ")", "as", "f", ":", "for", "line", "in", "f", ":", "if", "line", ".", "startswith", "(", "b\"State:\"", ")", ":", "letter", "=", "line", ".", "split", "(", ")", "[", "1", "]", "if", "PY3", ":", "letter", "=", "letter", ".", "decode", "(", ")", "# XXX is '?' legit? (we're not supposed to return", "# it anyway)", "return", "PROC_STATUSES", ".", "get", "(", "letter", ",", "'?'", ")" ]
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit - MAC OSX/scripts/sshbackdoors/backdoors/shell/pupy/pupy/packages/windows/x86/psutil/_pslinux.py#L1133-L1142
plotly/plotly.py
cfad7862594b35965c0e000813bd7805e8494a5b
packages/python/plotly/plotly/graph_objs/waterfall/increasing/_marker.py
python
Marker.color
(self)
return self["color"]
Sets the marker color of all increasing values. The 'color' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen Returns ------- str
Sets the marker color of all increasing values. The 'color' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen
[ "Sets", "the", "marker", "color", "of", "all", "increasing", "values", ".", "The", "color", "property", "is", "a", "color", "and", "may", "be", "specified", "as", ":", "-", "A", "hex", "string", "(", "e", ".", "g", ".", "#ff0000", ")", "-", "An", "rgb", "/", "rgba", "string", "(", "e", ".", "g", ".", "rgb", "(", "255", "0", "0", ")", ")", "-", "An", "hsl", "/", "hsla", "string", "(", "e", ".", "g", ".", "hsl", "(", "0", "100%", "50%", ")", ")", "-", "An", "hsv", "/", "hsva", "string", "(", "e", ".", "g", ".", "hsv", "(", "0", "100%", "100%", ")", ")", "-", "A", "named", "CSS", "color", ":", "aliceblue", "antiquewhite", "aqua", "aquamarine", "azure", "beige", "bisque", "black", "blanchedalmond", "blue", "blueviolet", "brown", "burlywood", "cadetblue", "chartreuse", "chocolate", "coral", "cornflowerblue", "cornsilk", "crimson", "cyan", "darkblue", "darkcyan", "darkgoldenrod", "darkgray", "darkgrey", "darkgreen", "darkkhaki", "darkmagenta", "darkolivegreen", "darkorange", "darkorchid", "darkred", "darksalmon", "darkseagreen", "darkslateblue", "darkslategray", "darkslategrey", "darkturquoise", "darkviolet", "deeppink", "deepskyblue", "dimgray", "dimgrey", "dodgerblue", "firebrick", "floralwhite", "forestgreen", "fuchsia", "gainsboro", "ghostwhite", "gold", "goldenrod", "gray", "grey", "green", "greenyellow", "honeydew", "hotpink", "indianred", "indigo", "ivory", "khaki", "lavender", "lavenderblush", "lawngreen", "lemonchiffon", "lightblue", "lightcoral", "lightcyan", "lightgoldenrodyellow", "lightgray", "lightgrey", "lightgreen", "lightpink", "lightsalmon", "lightseagreen", "lightskyblue", "lightslategray", "lightslategrey", "lightsteelblue", "lightyellow", "lime", "limegreen", "linen", "magenta", "maroon", "mediumaquamarine", "mediumblue", "mediumorchid", "mediumpurple", "mediumseagreen", "mediumslateblue", "mediumspringgreen", "mediumturquoise", "mediumvioletred", "midnightblue", "mintcream", "mistyrose", "moccasin", "navajowhite", "navy", "oldlace", "olive", "olivedrab", "orange", "orangered", "orchid", "palegoldenrod", "palegreen", "paleturquoise", "palevioletred", "papayawhip", "peachpuff", "peru", "pink", "plum", "powderblue", "purple", "red", "rosybrown", "royalblue", "rebeccapurple", "saddlebrown", "salmon", "sandybrown", "seagreen", "seashell", "sienna", "silver", "skyblue", "slateblue", "slategray", "slategrey", "snow", "springgreen", "steelblue", "tan", "teal", "thistle", "tomato", "turquoise", "violet", "wheat", "white", "whitesmoke", "yellow", "yellowgreen" ]
def color(self): """ Sets the marker color of all increasing values. The 'color' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen Returns ------- str """ return self["color"]
[ "def", "color", "(", "self", ")", ":", "return", "self", "[", "\"color\"", "]" ]
https://github.com/plotly/plotly.py/blob/cfad7862594b35965c0e000813bd7805e8494a5b/packages/python/plotly/plotly/graph_objs/waterfall/increasing/_marker.py#L16-L66
tjweir/liftbook
e977a7face13ade1a4558e1909a6951d2f8928dd
elyxer.py
python
HybridFunction.writeparam
(self, pos)
return self.params[name].value
Write a single param of the form $0, $x...
Write a single param of the form $0, $x...
[ "Write", "a", "single", "param", "of", "the", "form", "$0", "$x", "..." ]
def writeparam(self, pos): "Write a single param of the form $0, $x..." name = '$' + pos.skipcurrent() if not name in self.params: Trace.error('Unknown parameter ' + name) return None if not self.params[name]: return None if pos.checkskip('.'): self.params[name].value.type = pos.globalpha() return self.params[name].value
[ "def", "writeparam", "(", "self", ",", "pos", ")", ":", "name", "=", "'$'", "+", "pos", ".", "skipcurrent", "(", ")", "if", "not", "name", "in", "self", ".", "params", ":", "Trace", ".", "error", "(", "'Unknown parameter '", "+", "name", ")", "return", "None", "if", "not", "self", ".", "params", "[", "name", "]", ":", "return", "None", "if", "pos", ".", "checkskip", "(", "'.'", ")", ":", "self", ".", "params", "[", "name", "]", ".", "value", ".", "type", "=", "pos", ".", "globalpha", "(", ")", "return", "self", ".", "params", "[", "name", "]", ".", "value" ]
https://github.com/tjweir/liftbook/blob/e977a7face13ade1a4558e1909a6951d2f8928dd/elyxer.py#L4802-L4812
otsaloma/gaupol
6dec7826654d223c71a8d3279dcd967e95c46714
gaupol/dialogs/preview_error.py
python
PreviewErrorDialog._init_dialog
(self, parent)
Initialize the dialog.
Initialize the dialog.
[ "Initialize", "the", "dialog", "." ]
def _init_dialog(self, parent): """Initialize the dialog.""" self.add_button(_("_Close"), Gtk.ResponseType.CLOSE) self.set_default_response(Gtk.ResponseType.CLOSE) self.set_transient_for(parent) self.set_modal(True)
[ "def", "_init_dialog", "(", "self", ",", "parent", ")", ":", "self", ".", "add_button", "(", "_", "(", "\"_Close\"", ")", ",", "Gtk", ".", "ResponseType", ".", "CLOSE", ")", "self", ".", "set_default_response", "(", "Gtk", ".", "ResponseType", ".", "CLOSE", ")", "self", ".", "set_transient_for", "(", "parent", ")", "self", ".", "set_modal", "(", "True", ")" ]
https://github.com/otsaloma/gaupol/blob/6dec7826654d223c71a8d3279dcd967e95c46714/gaupol/dialogs/preview_error.py#L43-L48
deepgully/me
f7ad65edc2fe435310c6676bc2e322cfe5d4c8f0
libs/sqlalchemy/ext/associationproxy.py
python
AssociationProxy.contains
(self, obj)
Produce a proxied 'contains' expression using EXISTS. This expression will be a composed product using the :meth:`.RelationshipProperty.Comparator.any` , :meth:`.RelationshipProperty.Comparator.has`, and/or :meth:`.RelationshipProperty.Comparator.contains` operators of the underlying proxied attributes.
Produce a proxied 'contains' expression using EXISTS.
[ "Produce", "a", "proxied", "contains", "expression", "using", "EXISTS", "." ]
def contains(self, obj): """Produce a proxied 'contains' expression using EXISTS. This expression will be a composed product using the :meth:`.RelationshipProperty.Comparator.any` , :meth:`.RelationshipProperty.Comparator.has`, and/or :meth:`.RelationshipProperty.Comparator.contains` operators of the underlying proxied attributes. """ if self.scalar and not self._value_is_scalar: return self._comparator.has( getattr(self.target_class, self.value_attr).contains(obj) ) else: return self._comparator.any(**{self.value_attr: obj})
[ "def", "contains", "(", "self", ",", "obj", ")", ":", "if", "self", ".", "scalar", "and", "not", "self", ".", "_value_is_scalar", ":", "return", "self", ".", "_comparator", ".", "has", "(", "getattr", "(", "self", ".", "target_class", ",", "self", ".", "value_attr", ")", ".", "contains", "(", "obj", ")", ")", "else", ":", "return", "self", ".", "_comparator", ".", "any", "(", "*", "*", "{", "self", ".", "value_attr", ":", "obj", "}", ")" ]
https://github.com/deepgully/me/blob/f7ad65edc2fe435310c6676bc2e322cfe5d4c8f0/libs/sqlalchemy/ext/associationproxy.py#L409-L424
makerbot/ReplicatorG
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
skein_engines/skeinforge-47/fabmetheus_utilities/fabmetheus_tools/interpret_plugins/xml_plugins/artofillusion.py
python
processElementNode
(elementNode)
Process the xml element.
Process the xml element.
[ "Process", "the", "xml", "element", "." ]
def processElementNode(elementNode): "Process the xml element." evaluate.processArchivable(group.Group, elementNode)
[ "def", "processElementNode", "(", "elementNode", ")", ":", "evaluate", ".", "processArchivable", "(", "group", ".", "Group", ",", "elementNode", ")" ]
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-47/fabmetheus_utilities/fabmetheus_tools/interpret_plugins/xml_plugins/artofillusion.py#L87-L89
netbox-community/netbox
50309d3ab3da2212343e1d9feaf47e497df9c3cb
netbox/dcim/tables/devices.py
python
get_interface_state_attribute
(record)
Get interface enabled state as string to attach to <tr/> DOM element.
Get interface enabled state as string to attach to <tr/> DOM element.
[ "Get", "interface", "enabled", "state", "as", "string", "to", "attach", "to", "<tr", "/", ">", "DOM", "element", "." ]
def get_interface_state_attribute(record): """ Get interface enabled state as string to attach to <tr/> DOM element. """ if record.enabled: return "enabled" else: return "disabled"
[ "def", "get_interface_state_attribute", "(", "record", ")", ":", "if", "record", ".", "enabled", ":", "return", "\"enabled\"", "else", ":", "return", "\"disabled\"" ]
https://github.com/netbox-community/netbox/blob/50309d3ab3da2212343e1d9feaf47e497df9c3cb/netbox/dcim/tables/devices.py#L60-L67
electronut/pp
7cb85df1e4bd68bd7cbc9a961409d8c362f15aeb
conway/conway.py
python
addGlider
(i, j, grid)
adds a glider with top left cell at (i, j)
adds a glider with top left cell at (i, j)
[ "adds", "a", "glider", "with", "top", "left", "cell", "at", "(", "i", "j", ")" ]
def addGlider(i, j, grid): """adds a glider with top left cell at (i, j)""" glider = np.array([[0, 0, 255], [255, 0, 255], [0, 255, 255]]) grid[i:i+3, j:j+3] = glider
[ "def", "addGlider", "(", "i", ",", "j", ",", "grid", ")", ":", "glider", "=", "np", ".", "array", "(", "[", "[", "0", ",", "0", ",", "255", "]", ",", "[", "255", ",", "0", ",", "255", "]", ",", "[", "0", ",", "255", ",", "255", "]", "]", ")", "grid", "[", "i", ":", "i", "+", "3", ",", "j", ":", "j", "+", "3", "]", "=", "glider" ]
https://github.com/electronut/pp/blob/7cb85df1e4bd68bd7cbc9a961409d8c362f15aeb/conway/conway.py#L22-L27
kubernetes-client/python
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
kubernetes/client/models/v1_subject_rules_review_status.py
python
V1SubjectRulesReviewStatus.__repr__
(self)
return self.to_str()
For `print` and `pprint`
For `print` and `pprint`
[ "For", "print", "and", "pprint" ]
def __repr__(self): """For `print` and `pprint`""" return self.to_str()
[ "def", "__repr__", "(", "self", ")", ":", "return", "self", ".", "to_str", "(", ")" ]
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/models/v1_subject_rules_review_status.py#L193-L195
wikimedia/pywikibot
81a01ffaec7271bf5b4b170f85a80388420a4e78
pywikibot/tools/djvu.py
python
DjVuFile._get_page_info
(self, force=False)
return self._page_info
Return a dict of tuples (id, (size, dpi)) for all pages of djvu file. :param force: if True, refresh the cached data :type force: bool
Return a dict of tuples (id, (size, dpi)) for all pages of djvu file.
[ "Return", "a", "dict", "of", "tuples", "(", "id", "(", "size", "dpi", "))", "for", "all", "pages", "of", "djvu", "file", "." ]
def _get_page_info(self, force=False): """ Return a dict of tuples (id, (size, dpi)) for all pages of djvu file. :param force: if True, refresh the cached data :type force: bool """ if not hasattr(self, '_page_info'): self._page_info = {} res, stdoutdata = _call_cmd(['djvudump', self.file]) if not res: return False has_text = False for line in stdoutdata.decode('utf-8').split('\n'): if 'TXTz' in line: has_text = True if 'FORM:DJVU' in line: m = self._pat_form.search(line) if m: key, id = int(m.group('n')), m.group('id') else: # If djvu doc has only one page, # FORM:DJVU line in djvudump has no id key, id = 1, '' if 'INFO' in line: m = self._pat_info.search(line) if m: size, dpi = m.group('size'), int(m.group('dpi')) else: size, dpi = None, None else: continue self._page_info[key] = (id, (size, dpi)) self._has_text = has_text return self._page_info
[ "def", "_get_page_info", "(", "self", ",", "force", "=", "False", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_page_info'", ")", ":", "self", ".", "_page_info", "=", "{", "}", "res", ",", "stdoutdata", "=", "_call_cmd", "(", "[", "'djvudump'", ",", "self", ".", "file", "]", ")", "if", "not", "res", ":", "return", "False", "has_text", "=", "False", "for", "line", "in", "stdoutdata", ".", "decode", "(", "'utf-8'", ")", ".", "split", "(", "'\\n'", ")", ":", "if", "'TXTz'", "in", "line", ":", "has_text", "=", "True", "if", "'FORM:DJVU'", "in", "line", ":", "m", "=", "self", ".", "_pat_form", ".", "search", "(", "line", ")", "if", "m", ":", "key", ",", "id", "=", "int", "(", "m", ".", "group", "(", "'n'", ")", ")", ",", "m", ".", "group", "(", "'id'", ")", "else", ":", "# If djvu doc has only one page,", "# FORM:DJVU line in djvudump has no id", "key", ",", "id", "=", "1", ",", "''", "if", "'INFO'", "in", "line", ":", "m", "=", "self", ".", "_pat_info", ".", "search", "(", "line", ")", "if", "m", ":", "size", ",", "dpi", "=", "m", ".", "group", "(", "'size'", ")", ",", "int", "(", "m", ".", "group", "(", "'dpi'", ")", ")", "else", ":", "size", ",", "dpi", "=", "None", ",", "None", "else", ":", "continue", "self", ".", "_page_info", "[", "key", "]", "=", "(", "id", ",", "(", "size", ",", "dpi", ")", ")", "self", ".", "_has_text", "=", "has_text", "return", "self", ".", "_page_info" ]
https://github.com/wikimedia/pywikibot/blob/81a01ffaec7271bf5b4b170f85a80388420a4e78/pywikibot/tools/djvu.py#L146-L185
aiven/pghoard
1de0d2e33bf087b7ce3b6af556bbf941acfac3a4
pghoard/monitoring/prometheus.py
python
PrometheusClient.timing
(self, metric, value, tags=None)
[]
def timing(self, metric, value, tags=None): self._update(metric, value, tags)
[ "def", "timing", "(", "self", ",", "metric", ",", "value", ",", "tags", "=", "None", ")", ":", "self", ".", "_update", "(", "metric", ",", "value", ",", "tags", ")" ]
https://github.com/aiven/pghoard/blob/1de0d2e33bf087b7ce3b6af556bbf941acfac3a4/pghoard/monitoring/prometheus.py#L20-L21
chrivers/pyjaco
8ad793dce34ab7aed3b973aae729d6a943a2381c
pyjaco/formater.py
python
Formater.write
(self, text, indent=True, newline=True)
Writes the string text to the buffer with indentation and a newline if not specified otherwise.
Writes the string text to the buffer with indentation and a newline if not specified otherwise.
[ "Writes", "the", "string", "text", "to", "the", "buffer", "with", "indentation", "and", "a", "newline", "if", "not", "specified", "otherwise", "." ]
def write(self, text, indent=True, newline=True): """ Writes the string text to the buffer with indentation and a newline if not specified otherwise. """ if indent: self.__buffer.append(self.__indent_temp) self.__buffer.append(text) if newline: self.__buffer.append("\n")
[ "def", "write", "(", "self", ",", "text", ",", "indent", "=", "True", ",", "newline", "=", "True", ")", ":", "if", "indent", ":", "self", ".", "__buffer", ".", "append", "(", "self", ".", "__indent_temp", ")", "self", ".", "__buffer", ".", "append", "(", "text", ")", "if", "newline", ":", "self", ".", "__buffer", ".", "append", "(", "\"\\n\"", ")" ]
https://github.com/chrivers/pyjaco/blob/8ad793dce34ab7aed3b973aae729d6a943a2381c/pyjaco/formater.py#L56-L64
jet-admin/jet-django
9bd4536e02d581d39890d56190e8cc966e2714a4
jet_django/deps/rest_framework/serializers.py
python
HyperlinkedModelSerializer.build_nested_field
(self, field_name, relation_info, nested_depth)
return field_class, field_kwargs
Create nested fields for forward and reverse relationships.
Create nested fields for forward and reverse relationships.
[ "Create", "nested", "fields", "for", "forward", "and", "reverse", "relationships", "." ]
def build_nested_field(self, field_name, relation_info, nested_depth): """ Create nested fields for forward and reverse relationships. """ class NestedSerializer(HyperlinkedModelSerializer): class Meta: model = relation_info.related_model depth = nested_depth - 1 fields = '__all__' field_class = NestedSerializer field_kwargs = get_nested_relation_kwargs(relation_info) return field_class, field_kwargs
[ "def", "build_nested_field", "(", "self", ",", "field_name", ",", "relation_info", ",", "nested_depth", ")", ":", "class", "NestedSerializer", "(", "HyperlinkedModelSerializer", ")", ":", "class", "Meta", ":", "model", "=", "relation_info", ".", "related_model", "depth", "=", "nested_depth", "-", "1", "fields", "=", "'__all__'", "field_class", "=", "NestedSerializer", "field_kwargs", "=", "get_nested_relation_kwargs", "(", "relation_info", ")", "return", "field_class", ",", "field_kwargs" ]
https://github.com/jet-admin/jet-django/blob/9bd4536e02d581d39890d56190e8cc966e2714a4/jet_django/deps/rest_framework/serializers.py#L1600-L1613
mypaint/mypaint
90b36dbc7b8bd2f323383f7edf608a5e0a3a1a33
lib/layer/data.py
python
SimplePaintingLayer.flood_fill
(self, fill_args, dst_layer=None)
return self._surface.flood_fill(fill_args, dst=dst_layer._surface)
Fills a point on the surface with a color :param fill_args: Parameters common to all fill calls :type fill_args: lib.floodfill.FloodFillArguments :param dst_layer: Optional target layer (default is self!) :type dst_layer: StrokemappedPaintingLayer The `tolerance` parameter controls how much pixels are permitted to vary from the starting (target) color. This is calculated based on the rgba channel with the largest difference to the corresponding channel of the starting color, scaled to a number in [0,1] and also determines the alpha of filled pixels. The default target layer is `self`. This method invalidates the filled area of the target layer's surface, queueing a redraw if it is part of a visible document.
Fills a point on the surface with a color
[ "Fills", "a", "point", "on", "the", "surface", "with", "a", "color" ]
def flood_fill(self, fill_args, dst_layer=None): """Fills a point on the surface with a color :param fill_args: Parameters common to all fill calls :type fill_args: lib.floodfill.FloodFillArguments :param dst_layer: Optional target layer (default is self!) :type dst_layer: StrokemappedPaintingLayer The `tolerance` parameter controls how much pixels are permitted to vary from the starting (target) color. This is calculated based on the rgba channel with the largest difference to the corresponding channel of the starting color, scaled to a number in [0,1] and also determines the alpha of filled pixels. The default target layer is `self`. This method invalidates the filled area of the target layer's surface, queueing a redraw if it is part of a visible document. """ if dst_layer is None: dst_layer = self dst_layer.autosave_dirty = True # XXX hmm, not working? return self._surface.flood_fill(fill_args, dst=dst_layer._surface)
[ "def", "flood_fill", "(", "self", ",", "fill_args", ",", "dst_layer", "=", "None", ")", ":", "if", "dst_layer", "is", "None", ":", "dst_layer", "=", "self", "dst_layer", ".", "autosave_dirty", "=", "True", "# XXX hmm, not working?", "return", "self", ".", "_surface", ".", "flood_fill", "(", "fill_args", ",", "dst", "=", "dst_layer", ".", "_surface", ")" ]
https://github.com/mypaint/mypaint/blob/90b36dbc7b8bd2f323383f7edf608a5e0a3a1a33/lib/layer/data.py#L1323-L1344
nteract/scrapbook
3c74e63f7df99cca3148182454797792aede4b9b
scrapbook/encoders.py
python
TextEncoder.encode
(self, scrap, **kwargs)
return scrap
[]
def encode(self, scrap, **kwargs): if not isinstance(scrap.data, six.string_types): # TODO: set encoder information to save as encoding scrap = scrap._replace(data=str(scrap.data)) return scrap
[ "def", "encode", "(", "self", ",", "scrap", ",", "*", "*", "kwargs", ")", ":", "if", "not", "isinstance", "(", "scrap", ".", "data", ",", "six", ".", "string_types", ")", ":", "# TODO: set encoder information to save as encoding", "scrap", "=", "scrap", ".", "_replace", "(", "data", "=", "str", "(", "scrap", ".", "data", ")", ")", "return", "scrap" ]
https://github.com/nteract/scrapbook/blob/3c74e63f7df99cca3148182454797792aede4b9b/scrapbook/encoders.py#L170-L174
chartbeat-labs/textacy
40cd12fe953ef8be5958cff93ad8762262f3b757
src/textacy/lang_id/lang_identifier.py
python
LangIdentifier.save_model
(self)
Save trained :attr:`LangIdentifier.model` to disk, as bytes.
Save trained :attr:`LangIdentifier.model` to disk, as bytes.
[ "Save", "trained", ":", "attr", ":", "LangIdentifier", ".", "model", "to", "disk", "as", "bytes", "." ]
def save_model(self): """Save trained :attr:`LangIdentifier.model` to disk, as bytes.""" LOGGER.info("saving LangIdentifier model to %s", self.model_fpath) self.model.to_disk(self.model_fpath)
[ "def", "save_model", "(", "self", ")", ":", "LOGGER", ".", "info", "(", "\"saving LangIdentifier model to %s\"", ",", "self", ".", "model_fpath", ")", "self", ".", "model", ".", "to_disk", "(", "self", ".", "model_fpath", ")" ]
https://github.com/chartbeat-labs/textacy/blob/40cd12fe953ef8be5958cff93ad8762262f3b757/src/textacy/lang_id/lang_identifier.py#L112-L115
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/hunterdouglas_powerview/cover.py
python
PowerViewShade.async_set_cover_position
(self, **kwargs)
Move the shade to a specific position.
Move the shade to a specific position.
[ "Move", "the", "shade", "to", "a", "specific", "position", "." ]
async def async_set_cover_position(self, **kwargs): """Move the shade to a specific position.""" if ATTR_POSITION not in kwargs: return await self._async_move(kwargs[ATTR_POSITION])
[ "async", "def", "async_set_cover_position", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "ATTR_POSITION", "not", "in", "kwargs", ":", "return", "await", "self", ".", "_async_move", "(", "kwargs", "[", "ATTR_POSITION", "]", ")" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/hunterdouglas_powerview/cover.py#L174-L178
zedshaw/lamson
8a8ad546ea746b129fa5f069bf9278f87d01473a
examples/librelist/app/handlers/admin.py
python
CONFIRMING_SUBSCRIBE
(message, list_name=None, id_number=None, host=None)
[]
def CONFIRMING_SUBSCRIBE(message, list_name=None, id_number=None, host=None): original = CONFIRM.verify(list_name, message['from'], id_number) if original: mailinglist.add_subscriber(message['from'], list_name) msg = view.respond(locals(), "mail/subscribed.msg", From="noreply@%(host)s", To=message['from'], Subject="Welcome to %(list_name)s list.") relay.deliver(msg) CONFIRM.cancel(list_name, message['from'], id_number) return POSTING else: logging.warning("Invalid confirm from %s", message['from']) return CONFIRMING_SUBSCRIBE
[ "def", "CONFIRMING_SUBSCRIBE", "(", "message", ",", "list_name", "=", "None", ",", "id_number", "=", "None", ",", "host", "=", "None", ")", ":", "original", "=", "CONFIRM", ".", "verify", "(", "list_name", ",", "message", "[", "'from'", "]", ",", "id_number", ")", "if", "original", ":", "mailinglist", ".", "add_subscriber", "(", "message", "[", "'from'", "]", ",", "list_name", ")", "msg", "=", "view", ".", "respond", "(", "locals", "(", ")", ",", "\"mail/subscribed.msg\"", ",", "From", "=", "\"noreply@%(host)s\"", ",", "To", "=", "message", "[", "'from'", "]", ",", "Subject", "=", "\"Welcome to %(list_name)s list.\"", ")", "relay", ".", "deliver", "(", "msg", ")", "CONFIRM", ".", "cancel", "(", "list_name", ",", "message", "[", "'from'", "]", ",", "id_number", ")", "return", "POSTING", "else", ":", "logging", ".", "warning", "(", "\"Invalid confirm from %s\"", ",", "message", "[", "'from'", "]", ")", "return", "CONFIRMING_SUBSCRIBE" ]
https://github.com/zedshaw/lamson/blob/8a8ad546ea746b129fa5f069bf9278f87d01473a/examples/librelist/app/handlers/admin.py#L72-L89
ales-tsurko/cells
4cf7e395cd433762bea70cdc863a346f3a6fe1d0
packaging/macos/python/lib/python3.7/tkinter/__init__.py
python
PanedWindow.__init__
(self, master=None, cnf={}, **kw)
Construct a panedwindow widget with the parent MASTER. STANDARD OPTIONS background, borderwidth, cursor, height, orient, relief, width WIDGET-SPECIFIC OPTIONS handlepad, handlesize, opaqueresize, sashcursor, sashpad, sashrelief, sashwidth, showhandle,
Construct a panedwindow widget with the parent MASTER.
[ "Construct", "a", "panedwindow", "widget", "with", "the", "parent", "MASTER", "." ]
def __init__(self, master=None, cnf={}, **kw): """Construct a panedwindow widget with the parent MASTER. STANDARD OPTIONS background, borderwidth, cursor, height, orient, relief, width WIDGET-SPECIFIC OPTIONS handlepad, handlesize, opaqueresize, sashcursor, sashpad, sashrelief, sashwidth, showhandle, """ Widget.__init__(self, master, 'panedwindow', cnf, kw)
[ "def", "__init__", "(", "self", ",", "master", "=", "None", ",", "cnf", "=", "{", "}", ",", "*", "*", "kw", ")", ":", "Widget", ".", "__init__", "(", "self", ",", "master", ",", "'panedwindow'", ",", "cnf", ",", "kw", ")" ]
https://github.com/ales-tsurko/cells/blob/4cf7e395cd433762bea70cdc863a346f3a6fe1d0/packaging/macos/python/lib/python3.7/tkinter/__init__.py#L3799-L3813
pwnieexpress/pwn_plug_sources
1a23324f5dc2c3de20f9c810269b6a29b2758cad
src/voiper/sulley/impacket/nmb.py
python
NBResourceRecord.get_unit_id
(self)
return self.unit_id
[]
def get_unit_id(self): return self.unit_id
[ "def", "get_unit_id", "(", "self", ")", ":", "return", "self", ".", "unit_id" ]
https://github.com/pwnieexpress/pwn_plug_sources/blob/1a23324f5dc2c3de20f9c810269b6a29b2758cad/src/voiper/sulley/impacket/nmb.py#L187-L188
Gallopsled/pwntools
1573957cc8b1957399b7cc9bfae0c6f80630d5d4
pwnlib/filesystem/ssh.py
python
SSHPath.absolute
(self)
return self._new(os.path.join(self.ssh.cwd, path))
Return the absolute path to a file, preserving e.g. "../". The current working directory is determined via the :class:`.ssh` member :attr:`.ssh.cwd`. Example: >>> f = SSHPath('absA/../absB/file', ssh=ssh_conn) >>> f.absolute().path # doctest: +ELLIPSIS '/.../absB/file'
Return the absolute path to a file, preserving e.g. "../". The current working directory is determined via the :class:`.ssh` member :attr:`.ssh.cwd`.
[ "Return", "the", "absolute", "path", "to", "a", "file", "preserving", "e", ".", "g", ".", "..", "/", ".", "The", "current", "working", "directory", "is", "determined", "via", "the", ":", "class", ":", ".", "ssh", "member", ":", "attr", ":", ".", "ssh", ".", "cwd", "." ]
def absolute(self): """Return the absolute path to a file, preserving e.g. "../". The current working directory is determined via the :class:`.ssh` member :attr:`.ssh.cwd`. Example: >>> f = SSHPath('absA/../absB/file', ssh=ssh_conn) >>> f.absolute().path # doctest: +ELLIPSIS '/.../absB/file' """ path = os.path.normpath(self.path) if self.is_absolute(): return self._new(path) return self._new(os.path.join(self.ssh.cwd, path))
[ "def", "absolute", "(", "self", ")", ":", "path", "=", "os", ".", "path", ".", "normpath", "(", "self", ".", "path", ")", "if", "self", ".", "is_absolute", "(", ")", ":", "return", "self", ".", "_new", "(", "path", ")", "return", "self", ".", "_new", "(", "os", ".", "path", ".", "join", "(", "self", ".", "ssh", ".", "cwd", ",", "path", ")", ")" ]
https://github.com/Gallopsled/pwntools/blob/1573957cc8b1957399b7cc9bfae0c6f80630d5d4/pwnlib/filesystem/ssh.py#L355-L371
clinton-hall/nzbToMedia
27669389216902d1085660167e7bda0bd8527ecf
libs/common/rarfile.py
python
rar3_s2k
(psw, salt)
return key_le, iv
String-to-key hash for RAR3.
String-to-key hash for RAR3.
[ "String", "-", "to", "-", "key", "hash", "for", "RAR3", "." ]
def rar3_s2k(psw, salt): """String-to-key hash for RAR3. """ if not isinstance(psw, unicode): psw = psw.decode('utf8') seed = psw.encode('utf-16le') + salt iv = EMPTY h = sha1() for i in range(16): for j in range(0x4000): cnt = S_LONG.pack(i * 0x4000 + j) h.update(seed + cnt[:3]) if j == 0: iv += h.digest()[19:20] key_be = h.digest()[:16] key_le = pack("<LLLL", *unpack(">LLLL", key_be)) return key_le, iv
[ "def", "rar3_s2k", "(", "psw", ",", "salt", ")", ":", "if", "not", "isinstance", "(", "psw", ",", "unicode", ")", ":", "psw", "=", "psw", ".", "decode", "(", "'utf8'", ")", "seed", "=", "psw", ".", "encode", "(", "'utf-16le'", ")", "+", "salt", "iv", "=", "EMPTY", "h", "=", "sha1", "(", ")", "for", "i", "in", "range", "(", "16", ")", ":", "for", "j", "in", "range", "(", "0x4000", ")", ":", "cnt", "=", "S_LONG", ".", "pack", "(", "i", "*", "0x4000", "+", "j", ")", "h", ".", "update", "(", "seed", "+", "cnt", "[", ":", "3", "]", ")", "if", "j", "==", "0", ":", "iv", "+=", "h", ".", "digest", "(", ")", "[", "19", ":", "20", "]", "key_be", "=", "h", ".", "digest", "(", ")", "[", ":", "16", "]", "key_le", "=", "pack", "(", "\"<LLLL\"", ",", "*", "unpack", "(", "\">LLLL\"", ",", "key_be", ")", ")", "return", "key_le", ",", "iv" ]
https://github.com/clinton-hall/nzbToMedia/blob/27669389216902d1085660167e7bda0bd8527ecf/libs/common/rarfile.py#L2684-L2700
lgalke/vec4ir
69cbacc8eb574563fc1910b9e033826ffd639ca2
vec4ir/bm25.py
python
VectorizerMixin._char_wb_ngrams
(self, text_document)
return ngrams
Whitespace sensitive char-n-gram tokenization. Tokenize text_document into a sequence of character n-grams excluding any whitespace (operating only inside word boundaries)
Whitespace sensitive char-n-gram tokenization.
[ "Whitespace", "sensitive", "char", "-", "n", "-", "gram", "tokenization", "." ]
def _char_wb_ngrams(self, text_document): """Whitespace sensitive char-n-gram tokenization. Tokenize text_document into a sequence of character n-grams excluding any whitespace (operating only inside word boundaries)""" # normalize white spaces text_document = self._white_spaces.sub(" ", text_document) min_n, max_n = self.ngram_range ngrams = [] for w in text_document.split(): w = ' ' + w + ' ' w_len = len(w) for n in xrange(min_n, max_n + 1): offset = 0 ngrams.append(w[offset:offset + n]) while offset + n < w_len: offset += 1 ngrams.append(w[offset:offset + n]) if offset == 0: # count a short word (w_len < n) only once break return ngrams
[ "def", "_char_wb_ngrams", "(", "self", ",", "text_document", ")", ":", "# normalize white spaces", "text_document", "=", "self", ".", "_white_spaces", ".", "sub", "(", "\" \"", ",", "text_document", ")", "min_n", ",", "max_n", "=", "self", ".", "ngram_range", "ngrams", "=", "[", "]", "for", "w", "in", "text_document", ".", "split", "(", ")", ":", "w", "=", "' '", "+", "w", "+", "' '", "w_len", "=", "len", "(", "w", ")", "for", "n", "in", "xrange", "(", "min_n", ",", "max_n", "+", "1", ")", ":", "offset", "=", "0", "ngrams", ".", "append", "(", "w", "[", "offset", ":", "offset", "+", "n", "]", ")", "while", "offset", "+", "n", "<", "w_len", ":", "offset", "+=", "1", "ngrams", ".", "append", "(", "w", "[", "offset", ":", "offset", "+", "n", "]", ")", "if", "offset", "==", "0", ":", "# count a short word (w_len < n) only once", "break", "return", "ngrams" ]
https://github.com/lgalke/vec4ir/blob/69cbacc8eb574563fc1910b9e033826ffd639ca2/vec4ir/bm25.py#L152-L173
jankrepl/deepdow
eb6c85845c45f89e0743b8e8c29ddb69cb78da4f
deepdow/losses.py
python
Quantile.__repr__
(self)
return "{}(returns_channel={})".format(self.__class__.__name__, self.returns_channel)
Generate representation string.
Generate representation string.
[ "Generate", "representation", "string", "." ]
def __repr__(self): """Generate representation string.""" return "{}(returns_channel={})".format(self.__class__.__name__, self.returns_channel)
[ "def", "__repr__", "(", "self", ")", ":", "return", "\"{}(returns_channel={})\"", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "self", ".", "returns_channel", ")" ]
https://github.com/jankrepl/deepdow/blob/eb6c85845c45f89e0743b8e8c29ddb69cb78da4f/deepdow/losses.py#L648-L650
colour-science/colour
38782ac059e8ddd91939f3432bf06811c16667f0
colour/utilities/array.py
python
as_float
(a, dtype=None)
return dtype(a)
Converts given :math:`a` variable to *numeric* using given type. Parameters ---------- a : object Variable to convert. dtype : object Type to use for conversion, default to the type defined by the :attr:`colour.constant.DEFAULT_INT_DTYPE` attribute. In the event where :math:`a` cannot be converted, it is converted to *ndarray* using the type defined by :attr:`colour.constant.DEFAULT_FLOAT_DTYPE` attribute. Returns ------- ndarray :math:`a` variable converted to *numeric*. Warnings -------- The behaviour of this definition is different than :func:`colour.utilities.as_numeric` definition when it comes to conversion failure: the former will forcibly convert :math:`a` variable to *ndarray* using the type defined by :attr:`colour.constant.DEFAULT_FLOAT_DTYPE` attribute while the later will pass the :math:`a` variable as is. Examples -------- >>> as_float(np.array([1])) 1.0 >>> as_float(np.arange(10)) array([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9.])
Converts given :math:`a` variable to *numeric* using given type.
[ "Converts", "given", ":", "math", ":", "a", "variable", "to", "*", "numeric", "*", "using", "given", "type", "." ]
def as_float(a, dtype=None): """ Converts given :math:`a` variable to *numeric* using given type. Parameters ---------- a : object Variable to convert. dtype : object Type to use for conversion, default to the type defined by the :attr:`colour.constant.DEFAULT_INT_DTYPE` attribute. In the event where :math:`a` cannot be converted, it is converted to *ndarray* using the type defined by :attr:`colour.constant.DEFAULT_FLOAT_DTYPE` attribute. Returns ------- ndarray :math:`a` variable converted to *numeric*. Warnings -------- The behaviour of this definition is different than :func:`colour.utilities.as_numeric` definition when it comes to conversion failure: the former will forcibly convert :math:`a` variable to *ndarray* using the type defined by :attr:`colour.constant.DEFAULT_FLOAT_DTYPE` attribute while the later will pass the :math:`a` variable as is. Examples -------- >>> as_float(np.array([1])) 1.0 >>> as_float(np.arange(10)) array([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9.]) """ if dtype is None: dtype = DEFAULT_FLOAT_DTYPE attest( dtype in np.sctypes['float'], '"dtype" must be one of the following types: {0}'.format( np.sctypes['float'])) return dtype(a)
[ "def", "as_float", "(", "a", ",", "dtype", "=", "None", ")", ":", "if", "dtype", "is", "None", ":", "dtype", "=", "DEFAULT_FLOAT_DTYPE", "attest", "(", "dtype", "in", "np", ".", "sctypes", "[", "'float'", "]", ",", "'\"dtype\" must be one of the following types: {0}'", ".", "format", "(", "np", ".", "sctypes", "[", "'float'", "]", ")", ")", "return", "dtype", "(", "a", ")" ]
https://github.com/colour-science/colour/blob/38782ac059e8ddd91939f3432bf06811c16667f0/colour/utilities/array.py#L538-L581
geopython/pycsw
43a5c92fa819a3a3fdc8a8e3ef075d784dff73fc
pycsw/core/admin.py
python
get_sysprof
()
return '''pycsw system profile -------------------- Python version: %s os: %s SQLAlchemy: %s Shapely: %s lxml: %s libxml2: %s pyproj: %s OWSLib: %s''' % (sys.version_info, sys.platform, vsqlalchemy, vshapely, etree.__version__, etree.LIBXML_VERSION, vpyproj, vowslib)
Get versions of dependencies
Get versions of dependencies
[ "Get", "versions", "of", "dependencies" ]
def get_sysprof(): """Get versions of dependencies""" none = 'Module not found' try: import sqlalchemy vsqlalchemy = sqlalchemy.__version__ except ImportError: vsqlalchemy = none try: import pyproj vpyproj = pyproj.__version__ except ImportError: vpyproj = none try: import shapely try: vshapely = shapely.__version__ except AttributeError: import shapely.geos vshapely = shapely.geos.geos_capi_version except ImportError: vshapely = none try: import owslib try: vowslib = owslib.__version__ except AttributeError: vowslib = 'Module found, version not specified' except ImportError: vowslib = none return '''pycsw system profile -------------------- Python version: %s os: %s SQLAlchemy: %s Shapely: %s lxml: %s libxml2: %s pyproj: %s OWSLib: %s''' % (sys.version_info, sys.platform, vsqlalchemy, vshapely, etree.__version__, etree.LIBXML_VERSION, vpyproj, vowslib)
[ "def", "get_sysprof", "(", ")", ":", "none", "=", "'Module not found'", "try", ":", "import", "sqlalchemy", "vsqlalchemy", "=", "sqlalchemy", ".", "__version__", "except", "ImportError", ":", "vsqlalchemy", "=", "none", "try", ":", "import", "pyproj", "vpyproj", "=", "pyproj", ".", "__version__", "except", "ImportError", ":", "vpyproj", "=", "none", "try", ":", "import", "shapely", "try", ":", "vshapely", "=", "shapely", ".", "__version__", "except", "AttributeError", ":", "import", "shapely", ".", "geos", "vshapely", "=", "shapely", ".", "geos", ".", "geos_capi_version", "except", "ImportError", ":", "vshapely", "=", "none", "try", ":", "import", "owslib", "try", ":", "vowslib", "=", "owslib", ".", "__version__", "except", "AttributeError", ":", "vowslib", "=", "'Module found, version not specified'", "except", "ImportError", ":", "vowslib", "=", "none", "return", "'''pycsw system profile\n --------------------\n Python version: %s\n os: %s\n SQLAlchemy: %s\n Shapely: %s\n lxml: %s\n libxml2: %s\n pyproj: %s\n OWSLib: %s'''", "%", "(", "sys", ".", "version_info", ",", "sys", ".", "platform", ",", "vsqlalchemy", ",", "vshapely", ",", "etree", ".", "__version__", ",", "etree", ".", "LIBXML_VERSION", ",", "vpyproj", ",", "vowslib", ")" ]
https://github.com/geopython/pycsw/blob/43a5c92fa819a3a3fdc8a8e3ef075d784dff73fc/pycsw/core/admin.py#L577-L624
moinwiki/moin
568f223231aadecbd3b21a701ec02271f8d8021d
src/moin/converters/html_out.py
python
Converter.visit_moinpage_nowiki
(self, elem)
return self.new_copy(html.div, elem)
Avoid creation of a div used only for its data-lineno attrib.
Avoid creation of a div used only for its data-lineno attrib.
[ "Avoid", "creation", "of", "a", "div", "used", "only", "for", "its", "data", "-", "lineno", "attrib", "." ]
def visit_moinpage_nowiki(self, elem): """ Avoid creation of a div used only for its data-lineno attrib. """ if elem.attrib.get(html.data_lineno, None) and isinstance(elem[0][0], ET.Element): # {{{#!wiki\ntext\n}}} elem[0][0].attrib[html.data_lineno] = elem.attrib[html.data_lineno] elem[0][0].attrib[moin_page.class_] = elem[0][0].attrib.get(moin_page.class_, "") + " moin-nowiki" return self.do_children(elem) if elem.attrib.get(html.data_lineno, None) and isinstance(elem[0][0], str) and isinstance(elem[0], ET.Element): # {{{\ntext\n}}} OR {{{#!highlight python\ndef xx:\n}}} elem[0].attrib[html.data_lineno] = elem.attrib[html.data_lineno] elem[0].attrib[moin_page.class_] = elem[0].attrib.get(moin_page.class_, "") + " moin-nowiki" return self.do_children(elem) # {{{\n{{{{{\ntext\n}}}}}\n}}} # data_lineno not available, parent will have class=moin-nowiki return self.new_copy(html.div, elem)
[ "def", "visit_moinpage_nowiki", "(", "self", ",", "elem", ")", ":", "if", "elem", ".", "attrib", ".", "get", "(", "html", ".", "data_lineno", ",", "None", ")", "and", "isinstance", "(", "elem", "[", "0", "]", "[", "0", "]", ",", "ET", ".", "Element", ")", ":", "# {{{#!wiki\\ntext\\n}}}", "elem", "[", "0", "]", "[", "0", "]", ".", "attrib", "[", "html", ".", "data_lineno", "]", "=", "elem", ".", "attrib", "[", "html", ".", "data_lineno", "]", "elem", "[", "0", "]", "[", "0", "]", ".", "attrib", "[", "moin_page", ".", "class_", "]", "=", "elem", "[", "0", "]", "[", "0", "]", ".", "attrib", ".", "get", "(", "moin_page", ".", "class_", ",", "\"\"", ")", "+", "\" moin-nowiki\"", "return", "self", ".", "do_children", "(", "elem", ")", "if", "elem", ".", "attrib", ".", "get", "(", "html", ".", "data_lineno", ",", "None", ")", "and", "isinstance", "(", "elem", "[", "0", "]", "[", "0", "]", ",", "str", ")", "and", "isinstance", "(", "elem", "[", "0", "]", ",", "ET", ".", "Element", ")", ":", "# {{{\\ntext\\n}}} OR {{{#!highlight python\\ndef xx:\\n}}}", "elem", "[", "0", "]", ".", "attrib", "[", "html", ".", "data_lineno", "]", "=", "elem", ".", "attrib", "[", "html", ".", "data_lineno", "]", "elem", "[", "0", "]", ".", "attrib", "[", "moin_page", ".", "class_", "]", "=", "elem", "[", "0", "]", ".", "attrib", ".", "get", "(", "moin_page", ".", "class_", ",", "\"\"", ")", "+", "\" moin-nowiki\"", "return", "self", ".", "do_children", "(", "elem", ")", "# {{{\\n{{{{{\\ntext\\n}}}}}\\n}}} # data_lineno not available, parent will have class=moin-nowiki", "return", "self", ".", "new_copy", "(", "html", ".", "div", ",", "elem", ")" ]
https://github.com/moinwiki/moin/blob/568f223231aadecbd3b21a701ec02271f8d8021d/src/moin/converters/html_out.py#L247-L262
gnome-terminator/terminator
ca335e45eb1a4ea7c22fe0d515bb270e9a0e12a1
terminatorlib/factory.py
python
Factory.make_terminal
(self, **kwargs)
return(terminal.Terminal())
Make a Terminal
Make a Terminal
[ "Make", "a", "Terminal" ]
def make_terminal(self, **kwargs): """Make a Terminal""" from . import terminal return(terminal.Terminal())
[ "def", "make_terminal", "(", "self", ",", "*", "*", "kwargs", ")", ":", "from", ".", "import", "terminal", "return", "(", "terminal", ".", "Terminal", "(", ")", ")" ]
https://github.com/gnome-terminator/terminator/blob/ca335e45eb1a4ea7c22fe0d515bb270e9a0e12a1/terminatorlib/factory.py#L102-L105
Dman95/SASM
7e3ae6da1c219a68e26d38939338567e5c27151a
Windows/MinGW64/opt/lib/python2.7/sysconfig.py
python
_init_posix
(vars)
Initialize the module as appropriate for POSIX systems.
Initialize the module as appropriate for POSIX systems.
[ "Initialize", "the", "module", "as", "appropriate", "for", "POSIX", "systems", "." ]
def _init_posix(vars): """Initialize the module as appropriate for POSIX systems.""" # _sysconfigdata is generated at build time, see _generate_posix_vars() from _sysconfigdata import build_time_vars vars.update(build_time_vars)
[ "def", "_init_posix", "(", "vars", ")", ":", "# _sysconfigdata is generated at build time, see _generate_posix_vars()", "from", "_sysconfigdata", "import", "build_time_vars", "vars", ".", "update", "(", "build_time_vars", ")" ]
https://github.com/Dman95/SASM/blob/7e3ae6da1c219a68e26d38939338567e5c27151a/Windows/MinGW64/opt/lib/python2.7/sysconfig.py#L354-L358
makerbot/ReplicatorG
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
skein_engines/skeinforge-50/fabmetheus_utilities/geometry/creation/heightmap.py
python
addHeightsByBitmap
(heights, textLines)
Add heights by bitmap.
Add heights by bitmap.
[ "Add", "heights", "by", "bitmap", "." ]
def addHeightsByBitmap(heights, textLines): 'Add heights by bitmap.' for line in textLines[3:]: for integerWord in line.split(): heights.append(float(integerWord))
[ "def", "addHeightsByBitmap", "(", "heights", ",", "textLines", ")", ":", "for", "line", "in", "textLines", "[", "3", ":", "]", ":", "for", "integerWord", "in", "line", ".", "split", "(", ")", ":", "heights", ".", "append", "(", "float", "(", "integerWord", ")", ")" ]
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-50/fabmetheus_utilities/geometry/creation/heightmap.py#L32-L36
brython-dev/brython
9cba5fb7f43a9b52fff13e89b403e02a1dfaa5f3
www/src/Lib/pdb.py
python
Pdb.do_next
(self, arg)
return 1
n(ext) Continue execution until the next line in the current function is reached or it returns.
n(ext) Continue execution until the next line in the current function is reached or it returns.
[ "n", "(", "ext", ")", "Continue", "execution", "until", "the", "next", "line", "in", "the", "current", "function", "is", "reached", "or", "it", "returns", "." ]
def do_next(self, arg): """n(ext) Continue execution until the next line in the current function is reached or it returns. """ self.set_next(self.curframe) return 1
[ "def", "do_next", "(", "self", ",", "arg", ")", ":", "self", ".", "set_next", "(", "self", ".", "curframe", ")", "return", "1" ]
https://github.com/brython-dev/brython/blob/9cba5fb7f43a9b52fff13e89b403e02a1dfaa5f3/www/src/Lib/pdb.py#L1010-L1016
pyparallel/pyparallel
11e8c6072d48c8f13641925d17b147bf36ee0ba3
Lib/distutils/ccompiler.py
python
gen_lib_options
(compiler, library_dirs, runtime_library_dirs, libraries)
return lib_opts
Generate linker options for searching library directories and linking with specific libraries. 'libraries' and 'library_dirs' are, respectively, lists of library names (not filenames!) and search directories. Returns a list of command-line options suitable for use with some compiler (depending on the two format strings passed in).
Generate linker options for searching library directories and linking with specific libraries. 'libraries' and 'library_dirs' are, respectively, lists of library names (not filenames!) and search directories. Returns a list of command-line options suitable for use with some compiler (depending on the two format strings passed in).
[ "Generate", "linker", "options", "for", "searching", "library", "directories", "and", "linking", "with", "specific", "libraries", ".", "libraries", "and", "library_dirs", "are", "respectively", "lists", "of", "library", "names", "(", "not", "filenames!", ")", "and", "search", "directories", ".", "Returns", "a", "list", "of", "command", "-", "line", "options", "suitable", "for", "use", "with", "some", "compiler", "(", "depending", "on", "the", "two", "format", "strings", "passed", "in", ")", "." ]
def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries): """Generate linker options for searching library directories and linking with specific libraries. 'libraries' and 'library_dirs' are, respectively, lists of library names (not filenames!) and search directories. Returns a list of command-line options suitable for use with some compiler (depending on the two format strings passed in). """ lib_opts = [] for dir in library_dirs: lib_opts.append(compiler.library_dir_option(dir)) for dir in runtime_library_dirs: opt = compiler.runtime_library_dir_option(dir) if isinstance(opt, list): lib_opts = lib_opts + opt else: lib_opts.append(opt) # XXX it's important that we *not* remove redundant library mentions! # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to # resolve all symbols. I just hope we never have to say "-lfoo obj.o # -lbar" to get things to work -- that's certainly a possibility, but a # pretty nasty way to arrange your C code. for lib in libraries: (lib_dir, lib_name) = os.path.split(lib) if lib_dir: lib_file = compiler.find_library_file([lib_dir], lib_name) if lib_file: lib_opts.append(lib_file) else: compiler.warn("no library file corresponding to " "'%s' found (skipping)" % lib) else: lib_opts.append(compiler.library_option (lib)) return lib_opts
[ "def", "gen_lib_options", "(", "compiler", ",", "library_dirs", ",", "runtime_library_dirs", ",", "libraries", ")", ":", "lib_opts", "=", "[", "]", "for", "dir", "in", "library_dirs", ":", "lib_opts", ".", "append", "(", "compiler", ".", "library_dir_option", "(", "dir", ")", ")", "for", "dir", "in", "runtime_library_dirs", ":", "opt", "=", "compiler", ".", "runtime_library_dir_option", "(", "dir", ")", "if", "isinstance", "(", "opt", ",", "list", ")", ":", "lib_opts", "=", "lib_opts", "+", "opt", "else", ":", "lib_opts", ".", "append", "(", "opt", ")", "# XXX it's important that we *not* remove redundant library mentions!", "# sometimes you really do have to say \"-lfoo -lbar -lfoo\" in order to", "# resolve all symbols. I just hope we never have to say \"-lfoo obj.o", "# -lbar\" to get things to work -- that's certainly a possibility, but a", "# pretty nasty way to arrange your C code.", "for", "lib", "in", "libraries", ":", "(", "lib_dir", ",", "lib_name", ")", "=", "os", ".", "path", ".", "split", "(", "lib", ")", "if", "lib_dir", ":", "lib_file", "=", "compiler", ".", "find_library_file", "(", "[", "lib_dir", "]", ",", "lib_name", ")", "if", "lib_file", ":", "lib_opts", ".", "append", "(", "lib_file", ")", "else", ":", "compiler", ".", "warn", "(", "\"no library file corresponding to \"", "\"'%s' found (skipping)\"", "%", "lib", ")", "else", ":", "lib_opts", ".", "append", "(", "compiler", ".", "library_option", "(", "lib", ")", ")", "return", "lib_opts" ]
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/distutils/ccompiler.py#L1082-L1118
henkelis/sonospy
841f52010fd6e1e932d8f1a8896ad4e5a0667b8a
sonospy/brisa/upnp/soap.py
python
parse_soap_call
(data)
return method_name, args, kwargs, ns
Parses a soap call and returns a 4-tuple. @param data: raw soap XML call data @type data: string @return: 4-tuple (method_name, args, kwargs, namespace) @rtype: tuple
Parses a soap call and returns a 4-tuple.
[ "Parses", "a", "soap", "call", "and", "returns", "a", "4", "-", "tuple", "." ]
def parse_soap_call(data): """ Parses a soap call and returns a 4-tuple. @param data: raw soap XML call data @type data: string @return: 4-tuple (method_name, args, kwargs, namespace) @rtype: tuple """ log.debug(data) tree = parse_xml(data) body = tree.find('{http://schemas.xmlsoap.org/soap/envelope/}Body') method = body.getchildren()[0] method_name = method.tag ns = None if method_name.startswith('{') and method_name.rfind('}') > 1: ns, method_name = method_name[1:].split('}') # print "ns, method" # print ns # print method args = [] kwargs = {} for child in method.getchildren(): # print "child" # print child # print child.tag kwargs[child.tag] = __decode_result(child) args.append(kwargs[child.tag]) return method_name, args, kwargs, ns
[ "def", "parse_soap_call", "(", "data", ")", ":", "log", ".", "debug", "(", "data", ")", "tree", "=", "parse_xml", "(", "data", ")", "body", "=", "tree", ".", "find", "(", "'{http://schemas.xmlsoap.org/soap/envelope/}Body'", ")", "method", "=", "body", ".", "getchildren", "(", ")", "[", "0", "]", "method_name", "=", "method", ".", "tag", "ns", "=", "None", "if", "method_name", ".", "startswith", "(", "'{'", ")", "and", "method_name", ".", "rfind", "(", "'}'", ")", ">", "1", ":", "ns", ",", "method_name", "=", "method_name", "[", "1", ":", "]", ".", "split", "(", "'}'", ")", "# print \"ns, method\"", "# print ns", "# print method", "args", "=", "[", "]", "kwargs", "=", "{", "}", "for", "child", "in", "method", ".", "getchildren", "(", ")", ":", "# print \"child\"", "# print child", "# print child.tag", "kwargs", "[", "child", ".", "tag", "]", "=", "__decode_result", "(", "child", ")", "args", ".", "append", "(", "kwargs", "[", "child", ".", "tag", "]", ")", "return", "method_name", ",", "args", ",", "kwargs", ",", "ns" ]
https://github.com/henkelis/sonospy/blob/841f52010fd6e1e932d8f1a8896ad4e5a0667b8a/sonospy/brisa/upnp/soap.py#L413-L447
gramps-project/gramps
04d4651a43eb210192f40a9f8c2bad8ee8fa3753
gramps/gen/filters/_filterlist.py
python
FilterList.fix
(self, line)
return new_line.replace('"', '&quot;')
sanitize the custom filter name, if needed
sanitize the custom filter name, if needed
[ "sanitize", "the", "custom", "filter", "name", "if", "needed" ]
def fix(self, line): """ sanitize the custom filter name, if needed """ new_line = line.strip() new_line = new_line.replace('&', '&amp;') new_line = new_line.replace('>', '&gt;') new_line = new_line.replace('<', '&lt;') return new_line.replace('"', '&quot;')
[ "def", "fix", "(", "self", ",", "line", ")", ":", "new_line", "=", "line", ".", "strip", "(", ")", "new_line", "=", "new_line", ".", "replace", "(", "'&'", ",", "'&amp;'", ")", "new_line", "=", "new_line", ".", "replace", "(", "'>'", ",", "'&gt;'", ")", "new_line", "=", "new_line", ".", "replace", "(", "'<'", ",", "'&lt;'", ")", "return", "new_line", ".", "replace", "(", "'\"'", ",", "'&quot;'", ")" ]
https://github.com/gramps-project/gramps/blob/04d4651a43eb210192f40a9f8c2bad8ee8fa3753/gramps/gen/filters/_filterlist.py#L116-L122
coreylynch/pyFM
0696c980993889a9429e4ab0b6c7dc8be6dac4de
pyfm/pylibfm.py
python
FM._bool_to_int
(self, bool_arg)
Map bool to int for cython
Map bool to int for cython
[ "Map", "bool", "to", "int", "for", "cython" ]
def _bool_to_int(self, bool_arg): """Map bool to int for cython""" if bool_arg == True: return 1 else: return 0
[ "def", "_bool_to_int", "(", "self", ",", "bool_arg", ")", ":", "if", "bool_arg", "==", "True", ":", "return", "1", "else", ":", "return", "0" ]
https://github.com/coreylynch/pyFM/blob/0696c980993889a9429e4ab0b6c7dc8be6dac4de/pyfm/pylibfm.py#L126-L131
slim1017/VaDE
eca52b4b57e32a68f578de7a54eda5d39907b0fe
training.py
python
standardize_input_data
(data, names, shapes=None, check_batch_dim=True, exception_prefix='')
return arrays
Users may pass data as a list of arrays, dictionary of arrays, or as a single array. We normalize this to an ordered list of arrays (same order as `names`), while checking that the provided arrays have shapes that match the network's expectations.
Users may pass data as a list of arrays, dictionary of arrays, or as a single array. We normalize this to an ordered list of arrays (same order as `names`), while checking that the provided arrays have shapes that match the network's expectations.
[ "Users", "may", "pass", "data", "as", "a", "list", "of", "arrays", "dictionary", "of", "arrays", "or", "as", "a", "single", "array", ".", "We", "normalize", "this", "to", "an", "ordered", "list", "of", "arrays", "(", "same", "order", "as", "names", ")", "while", "checking", "that", "the", "provided", "arrays", "have", "shapes", "that", "match", "the", "network", "s", "expectations", "." ]
def standardize_input_data(data, names, shapes=None, check_batch_dim=True, exception_prefix=''): '''Users may pass data as a list of arrays, dictionary of arrays, or as a single array. We normalize this to an ordered list of arrays (same order as `names`), while checking that the provided arrays have shapes that match the network's expectations. ''' if type(data) is dict: arrays = [] for name in names: if name not in data: raise Exception('No data provided for "' + name + '". Need data for each key in: ' + str(data.keys())) arrays.append(data[name]) elif type(data) is list: if len(data) != len(names): if len(data) > 0 and hasattr(data[0], 'shape'): raise Exception('Error when checking ' + exception_prefix + ': the list of Numpy arrays ' 'that you are passing to your model ' 'is not the size the model expected. ' 'Expected to see ' + str(len(names)) + ' arrays but instead got ' 'the following list of ' + str(len(data)) + ' arrays: ' + str(data)[:200] + '...') else: if len(names) == 1: data = [np.asarray(data)] else: raise Exception('Error when checking ' + exception_prefix + ': you are passing a list as ' 'input to your model, ' 'but the model expects ' 'a list of ' + str(len(names)) + ' Numpy arrays instead. ' 'The list you passed was: ' + str(data)[:200]) arrays = data else: if not hasattr(data, 'shape'): raise Exception('Error when checking ' + exception_prefix + ': data should be a Numpy array, ' 'or list/dict of Numpy arrays. ' 'Found: ' + str(data)[:200] + '...') if len(names) != 1: # case: model expects multiple inputs but only received # a single Numpy array raise Exception('The model expects ' + str(len(names)) + ' input arrays, but only received one array. ' 'Found: array with shape ' + str(data.shape)) arrays = [data] # make arrays at least 2D for i in range(len(names)): array = arrays[i] if len(array.shape) == 1: array = np.expand_dims(array, 1) arrays[i] = array # check shapes compatibility if shapes: for i in range(len(names)): if shapes[i] is None: continue array = arrays[i] if len(array.shape) != len(shapes[i]): raise Exception('Error when checking ' + exception_prefix + ': expected ' + names[i] + ' to have ' + str(len(shapes[i])) + ' dimensions, but got array with shape ' + str(array.shape)) for j, (dim, ref_dim) in enumerate(zip(array.shape, shapes[i])): if not j and not check_batch_dim: # skip the first axis continue if ref_dim: if ref_dim != dim: raise Exception('Error when checking ' + exception_prefix + ': expected ' + names[i] + ' to have shape ' + str(shapes[i]) + ' but got array with shape ' + str(array.shape)) return arrays
[ "def", "standardize_input_data", "(", "data", ",", "names", ",", "shapes", "=", "None", ",", "check_batch_dim", "=", "True", ",", "exception_prefix", "=", "''", ")", ":", "if", "type", "(", "data", ")", "is", "dict", ":", "arrays", "=", "[", "]", "for", "name", "in", "names", ":", "if", "name", "not", "in", "data", ":", "raise", "Exception", "(", "'No data provided for \"'", "+", "name", "+", "'\". Need data for each key in: '", "+", "str", "(", "data", ".", "keys", "(", ")", ")", ")", "arrays", ".", "append", "(", "data", "[", "name", "]", ")", "elif", "type", "(", "data", ")", "is", "list", ":", "if", "len", "(", "data", ")", "!=", "len", "(", "names", ")", ":", "if", "len", "(", "data", ")", ">", "0", "and", "hasattr", "(", "data", "[", "0", "]", ",", "'shape'", ")", ":", "raise", "Exception", "(", "'Error when checking '", "+", "exception_prefix", "+", "': the list of Numpy arrays '", "'that you are passing to your model '", "'is not the size the model expected. '", "'Expected to see '", "+", "str", "(", "len", "(", "names", ")", ")", "+", "' arrays but instead got '", "'the following list of '", "+", "str", "(", "len", "(", "data", ")", ")", "+", "' arrays: '", "+", "str", "(", "data", ")", "[", ":", "200", "]", "+", "'...'", ")", "else", ":", "if", "len", "(", "names", ")", "==", "1", ":", "data", "=", "[", "np", ".", "asarray", "(", "data", ")", "]", "else", ":", "raise", "Exception", "(", "'Error when checking '", "+", "exception_prefix", "+", "': you are passing a list as '", "'input to your model, '", "'but the model expects '", "'a list of '", "+", "str", "(", "len", "(", "names", ")", ")", "+", "' Numpy arrays instead. '", "'The list you passed was: '", "+", "str", "(", "data", ")", "[", ":", "200", "]", ")", "arrays", "=", "data", "else", ":", "if", "not", "hasattr", "(", "data", ",", "'shape'", ")", ":", "raise", "Exception", "(", "'Error when checking '", "+", "exception_prefix", "+", "': data should be a Numpy array, '", "'or list/dict of Numpy arrays. '", "'Found: '", "+", "str", "(", "data", ")", "[", ":", "200", "]", "+", "'...'", ")", "if", "len", "(", "names", ")", "!=", "1", ":", "# case: model expects multiple inputs but only received", "# a single Numpy array", "raise", "Exception", "(", "'The model expects '", "+", "str", "(", "len", "(", "names", ")", ")", "+", "' input arrays, but only received one array. '", "'Found: array with shape '", "+", "str", "(", "data", ".", "shape", ")", ")", "arrays", "=", "[", "data", "]", "# make arrays at least 2D", "for", "i", "in", "range", "(", "len", "(", "names", ")", ")", ":", "array", "=", "arrays", "[", "i", "]", "if", "len", "(", "array", ".", "shape", ")", "==", "1", ":", "array", "=", "np", ".", "expand_dims", "(", "array", ",", "1", ")", "arrays", "[", "i", "]", "=", "array", "# check shapes compatibility", "if", "shapes", ":", "for", "i", "in", "range", "(", "len", "(", "names", ")", ")", ":", "if", "shapes", "[", "i", "]", "is", "None", ":", "continue", "array", "=", "arrays", "[", "i", "]", "if", "len", "(", "array", ".", "shape", ")", "!=", "len", "(", "shapes", "[", "i", "]", ")", ":", "raise", "Exception", "(", "'Error when checking '", "+", "exception_prefix", "+", "': expected '", "+", "names", "[", "i", "]", "+", "' to have '", "+", "str", "(", "len", "(", "shapes", "[", "i", "]", ")", ")", "+", "' dimensions, but got array with shape '", "+", "str", "(", "array", ".", "shape", ")", ")", "for", "j", ",", "(", "dim", ",", "ref_dim", ")", "in", "enumerate", "(", "zip", "(", "array", ".", "shape", ",", "shapes", "[", "i", "]", ")", ")", ":", "if", "not", "j", "and", "not", "check_batch_dim", ":", "# skip the first axis", "continue", "if", "ref_dim", ":", "if", "ref_dim", "!=", "dim", ":", "raise", "Exception", "(", "'Error when checking '", "+", "exception_prefix", "+", "': expected '", "+", "names", "[", "i", "]", "+", "' to have shape '", "+", "str", "(", "shapes", "[", "i", "]", ")", "+", "' but got array with shape '", "+", "str", "(", "array", ".", "shape", ")", ")", "return", "arrays" ]
https://github.com/slim1017/VaDE/blob/eca52b4b57e32a68f578de7a54eda5d39907b0fe/training.py#L24-L109
Garvit244/Leetcode
a1d31ff0f9f251f3dd0bee5cc8b191b7ebbccc29
200-300q/297.py
python
Codec.deserialize
(self, data)
return buildTree(preorder)
Decodes your encoded data to tree. :type data: str :rtype: TreeNode
Decodes your encoded data to tree. :type data: str :rtype: TreeNode
[ "Decodes", "your", "encoded", "data", "to", "tree", ".", ":", "type", "data", ":", "str", ":", "rtype", ":", "TreeNode" ]
def deserialize(self, data): """Decodes your encoded data to tree. :type data: str :rtype: TreeNode """ def buildTree(preorder): value = preorder.pop(0) if value == '#': return None node = TreeNode(int(value)) node.left = buildTree(preorder) node.right = buildTree(preorder) return node preorder = data.split(',')[:-1] return buildTree(preorder)
[ "def", "deserialize", "(", "self", ",", "data", ")", ":", "def", "buildTree", "(", "preorder", ")", ":", "value", "=", "preorder", ".", "pop", "(", "0", ")", "if", "value", "==", "'#'", ":", "return", "None", "node", "=", "TreeNode", "(", "int", "(", "value", ")", ")", "node", ".", "left", "=", "buildTree", "(", "preorder", ")", "node", ".", "right", "=", "buildTree", "(", "preorder", ")", "return", "node", "preorder", "=", "data", ".", "split", "(", "','", ")", "[", ":", "-", "1", "]", "return", "buildTree", "(", "preorder", ")" ]
https://github.com/Garvit244/Leetcode/blob/a1d31ff0f9f251f3dd0bee5cc8b191b7ebbccc29/200-300q/297.py#L48-L66
linxid/Machine_Learning_Study_Path
558e82d13237114bbb8152483977806fc0c222af
Machine Learning In Action/Chapter5-LogisticRegression/venv/Lib/locale.py
python
_grouping_intervals
(grouping)
[]
def _grouping_intervals(grouping): last_interval = None for interval in grouping: # if grouping is -1, we are done if interval == CHAR_MAX: return # 0: re-use last group ad infinitum if interval == 0: if last_interval is None: raise ValueError("invalid grouping") while True: yield last_interval yield interval last_interval = interval
[ "def", "_grouping_intervals", "(", "grouping", ")", ":", "last_interval", "=", "None", "for", "interval", "in", "grouping", ":", "# if grouping is -1, we are done", "if", "interval", "==", "CHAR_MAX", ":", "return", "# 0: re-use last group ad infinitum", "if", "interval", "==", "0", ":", "if", "last_interval", "is", "None", ":", "raise", "ValueError", "(", "\"invalid grouping\"", ")", "while", "True", ":", "yield", "last_interval", "yield", "interval", "last_interval", "=", "interval" ]
https://github.com/linxid/Machine_Learning_Study_Path/blob/558e82d13237114bbb8152483977806fc0c222af/Machine Learning In Action/Chapter5-LogisticRegression/venv/Lib/locale.py#L122-L135
intel/fMBT
a221c55cd7b6367aa458781b134ae155aa47a71f
utils/fmbtwindows.py
python
_run
(command, expectedExitStatus=None)
return exitStatus, out, err
Execute command in child process, return status, stdout, stderr.
Execute command in child process, return status, stdout, stderr.
[ "Execute", "command", "in", "child", "process", "return", "status", "stdout", "stderr", "." ]
def _run(command, expectedExitStatus=None): """ Execute command in child process, return status, stdout, stderr. """ if type(command) == str: shell = True else: shell = False try: p = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=_g_closeFds) if expectedExitStatus != None: out, err = p.communicate() else: out, err = ('', None) except Exception, e: class fakeProcess(object): pass p = fakeProcess p.returncode = 127 out, err = ('', e) exitStatus = p.returncode if (expectedExitStatus != None and exitStatus != expectedExitStatus and exitStatus not in expectedExitStatus): msg = "Executing %s failed. Exit status: %s, expected %s" % ( command, exitStatus, expectedExitStatus) _adapterLog("%s\n stdout: %s\n stderr: %s\n" % (msg, out, err)) raise FMBTWindowsError(msg) return exitStatus, out, err
[ "def", "_run", "(", "command", ",", "expectedExitStatus", "=", "None", ")", ":", "if", "type", "(", "command", ")", "==", "str", ":", "shell", "=", "True", "else", ":", "shell", "=", "False", "try", ":", "p", "=", "subprocess", ".", "Popen", "(", "command", ",", "shell", "=", "shell", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "close_fds", "=", "_g_closeFds", ")", "if", "expectedExitStatus", "!=", "None", ":", "out", ",", "err", "=", "p", ".", "communicate", "(", ")", "else", ":", "out", ",", "err", "=", "(", "''", ",", "None", ")", "except", "Exception", ",", "e", ":", "class", "fakeProcess", "(", "object", ")", ":", "pass", "p", "=", "fakeProcess", "p", ".", "returncode", "=", "127", "out", ",", "err", "=", "(", "''", ",", "e", ")", "exitStatus", "=", "p", ".", "returncode", "if", "(", "expectedExitStatus", "!=", "None", "and", "exitStatus", "!=", "expectedExitStatus", "and", "exitStatus", "not", "in", "expectedExitStatus", ")", ":", "msg", "=", "\"Executing %s failed. Exit status: %s, expected %s\"", "%", "(", "command", ",", "exitStatus", ",", "expectedExitStatus", ")", "_adapterLog", "(", "\"%s\\n stdout: %s\\n stderr: %s\\n\"", "%", "(", "msg", ",", "out", ",", "err", ")", ")", "raise", "FMBTWindowsError", "(", "msg", ")", "return", "exitStatus", ",", "out", ",", "err" ]
https://github.com/intel/fMBT/blob/a221c55cd7b6367aa458781b134ae155aa47a71f/utils/fmbtwindows.py#L73-L107
francisck/DanderSpritz_docs
86bb7caca5a957147f120b18bb5c31f299914904
Python/Core/Lib/idlelib/ColorDelegator.py
python
any
(name, alternates)
return '(?P<%s>' % name + '|'.join(alternates) + ')'
Return a named group pattern matching list of alternates.
Return a named group pattern matching list of alternates.
[ "Return", "a", "named", "group", "pattern", "matching", "list", "of", "alternates", "." ]
def any(name, alternates): """Return a named group pattern matching list of alternates.""" return '(?P<%s>' % name + '|'.join(alternates) + ')'
[ "def", "any", "(", "name", ",", "alternates", ")", ":", "return", "'(?P<%s>'", "%", "name", "+", "'|'", ".", "join", "(", "alternates", ")", "+", "')'" ]
https://github.com/francisck/DanderSpritz_docs/blob/86bb7caca5a957147f120b18bb5c31f299914904/Python/Core/Lib/idlelib/ColorDelegator.py#L15-L17
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/docutils-0.14/docutils/utils/math/math2html.py
python
NumberGenerator.getdependentcounter
(self, type, master)
return self.counters[type]
Get (or create) a counter of the given type that depends on another.
Get (or create) a counter of the given type that depends on another.
[ "Get", "(", "or", "create", ")", "a", "counter", "of", "the", "given", "type", "that", "depends", "on", "another", "." ]
def getdependentcounter(self, type, master): "Get (or create) a counter of the given type that depends on another." if not type in self.counters or not self.counters[type].master: self.counters[type] = self.createdependent(type, master) return self.counters[type]
[ "def", "getdependentcounter", "(", "self", ",", "type", ",", "master", ")", ":", "if", "not", "type", "in", "self", ".", "counters", "or", "not", "self", ".", "counters", "[", "type", "]", ".", "master", ":", "self", ".", "counters", "[", "type", "]", "=", "self", ".", "createdependent", "(", "type", ",", "master", ")", "return", "self", ".", "counters", "[", "type", "]" ]
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/docutils-0.14/docutils/utils/math/math2html.py#L3376-L3380