nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
golismero/golismero
7d605b937e241f51c1ca4f47b20f755eeefb9d76
thirdparty_libs/dns/message.py
python
make_query
(qname, rdtype, rdclass = dns.rdataclass.IN, use_edns=None, want_dnssec=False, ednsflags=0, payload=1280, request_payload=None, options=None)
return m
Make a query message. The query name, type, and class may all be specified either as objects of the appropriate type, or as strings. The query will have a randomly choosen query id, and its DNS flags will be set to dns.flags.RD. @param qname: The query name. @type qname: dns.name.Name object or string @param rdtype: The desired rdata type. @type rdtype: int @param rdclass: The desired rdata class; the default is class IN. @type rdclass: int @param use_edns: The EDNS level to use; the default is None (no EDNS). See the description of dns.message.Message.use_edns() for the possible values for use_edns and their meanings. @type use_edns: int or bool or None @param want_dnssec: Should the query indicate that DNSSEC is desired? @type want_dnssec: bool @param ednsflags: EDNS flag values. @type ednsflags: int @param payload: The EDNS sender's payload field, which is the maximum size of UDP datagram the sender can handle. @type payload: int @param request_payload: The EDNS payload size to use when sending this message. If not specified, defaults to the value of payload. @type request_payload: int or None @param options: The EDNS options @type options: None or list of dns.edns.Option objects @see: RFC 2671 @rtype: dns.message.Message object
Make a query message.
[ "Make", "a", "query", "message", "." ]
def make_query(qname, rdtype, rdclass = dns.rdataclass.IN, use_edns=None, want_dnssec=False, ednsflags=0, payload=1280, request_payload=None, options=None): """Make a query message. The query name, type, and class may all be specified either as objects of the appropriate type, or as strings. The query will have a randomly choosen query id, and its DNS flags will be set to dns.flags.RD. @param qname: The query name. @type qname: dns.name.Name object or string @param rdtype: The desired rdata type. @type rdtype: int @param rdclass: The desired rdata class; the default is class IN. @type rdclass: int @param use_edns: The EDNS level to use; the default is None (no EDNS). See the description of dns.message.Message.use_edns() for the possible values for use_edns and their meanings. @type use_edns: int or bool or None @param want_dnssec: Should the query indicate that DNSSEC is desired? @type want_dnssec: bool @param ednsflags: EDNS flag values. @type ednsflags: int @param payload: The EDNS sender's payload field, which is the maximum size of UDP datagram the sender can handle. @type payload: int @param request_payload: The EDNS payload size to use when sending this message. If not specified, defaults to the value of payload. @type request_payload: int or None @param options: The EDNS options @type options: None or list of dns.edns.Option objects @see: RFC 2671 @rtype: dns.message.Message object""" if isinstance(qname, (str, unicode)): qname = dns.name.from_text(qname) if isinstance(rdtype, (str, unicode)): rdtype = dns.rdatatype.from_text(rdtype) if isinstance(rdclass, (str, unicode)): rdclass = dns.rdataclass.from_text(rdclass) m = Message() m.flags |= dns.flags.RD m.find_rrset(m.question, qname, rdclass, rdtype, create=True, force_unique=True) m.use_edns(use_edns, ednsflags, payload, request_payload, options) m.want_dnssec(want_dnssec) return m
[ "def", "make_query", "(", "qname", ",", "rdtype", ",", "rdclass", "=", "dns", ".", "rdataclass", ".", "IN", ",", "use_edns", "=", "None", ",", "want_dnssec", "=", "False", ",", "ednsflags", "=", "0", ",", "payload", "=", "1280", ",", "request_payload", "=", "None", ",", "options", "=", "None", ")", ":", "if", "isinstance", "(", "qname", ",", "(", "str", ",", "unicode", ")", ")", ":", "qname", "=", "dns", ".", "name", ".", "from_text", "(", "qname", ")", "if", "isinstance", "(", "rdtype", ",", "(", "str", ",", "unicode", ")", ")", ":", "rdtype", "=", "dns", ".", "rdatatype", ".", "from_text", "(", "rdtype", ")", "if", "isinstance", "(", "rdclass", ",", "(", "str", ",", "unicode", ")", ")", ":", "rdclass", "=", "dns", ".", "rdataclass", ".", "from_text", "(", "rdclass", ")", "m", "=", "Message", "(", ")", "m", ".", "flags", "|=", "dns", ".", "flags", ".", "RD", "m", ".", "find_rrset", "(", "m", ".", "question", ",", "qname", ",", "rdclass", ",", "rdtype", ",", "create", "=", "True", ",", "force_unique", "=", "True", ")", "m", ".", "use_edns", "(", "use_edns", ",", "ednsflags", ",", "payload", ",", "request_payload", ",", "options", ")", "m", ".", "want_dnssec", "(", "want_dnssec", ")", "return", "m" ]
https://github.com/golismero/golismero/blob/7d605b937e241f51c1ca4f47b20f755eeefb9d76/thirdparty_libs/dns/message.py#L1024-L1072
aiidateam/aiida-core
c743a335480f8bb3a5e4ebd2463a31f9f3b9f9b2
aiida/orm/implementation/django/groups.py
python
DjangoGroup.__init__
(self, backend, label, user, description='', type_string='')
Construct a new Django group
Construct a new Django group
[ "Construct", "a", "new", "Django", "group" ]
def __init__(self, backend, label, user, description='', type_string=''): """Construct a new Django group""" type_check(user, users.DjangoUser) super().__init__(backend) self._dbmodel = utils.ModelWrapper( models.DbGroup(label=label, description=description, user=user.dbmodel, type_string=type_string) )
[ "def", "__init__", "(", "self", ",", "backend", ",", "label", ",", "user", ",", "description", "=", "''", ",", "type_string", "=", "''", ")", ":", "type_check", "(", "user", ",", "users", ".", "DjangoUser", ")", "super", "(", ")", ".", "__init__", "(", "backend", ")", "self", ".", "_dbmodel", "=", "utils", ".", "ModelWrapper", "(", "models", ".", "DbGroup", "(", "label", "=", "label", ",", "description", "=", "description", ",", "user", "=", "user", ".", "dbmodel", ",", "type_string", "=", "type_string", ")", ")" ]
https://github.com/aiidateam/aiida-core/blob/c743a335480f8bb3a5e4ebd2463a31f9f3b9f9b2/aiida/orm/implementation/django/groups.py#L30-L37
1012598167/flask_mongodb_game
60c7e0351586656ec38f851592886338e50b4110
python_flask/venv/Lib/site-packages/pip-19.0.3-py3.6.egg/pip/_vendor/distlib/manifest.py
python
Manifest._glob_to_re
(self, pattern)
return pattern_re
Translate a shell-like glob pattern to a regular expression. Return a string containing the regex. Differs from 'fnmatch.translate()' in that '*' does not match "special characters" (which are platform-specific).
Translate a shell-like glob pattern to a regular expression.
[ "Translate", "a", "shell", "-", "like", "glob", "pattern", "to", "a", "regular", "expression", "." ]
def _glob_to_re(self, pattern): """Translate a shell-like glob pattern to a regular expression. Return a string containing the regex. Differs from 'fnmatch.translate()' in that '*' does not match "special characters" (which are platform-specific). """ pattern_re = fnmatch.translate(pattern) # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, # and by extension they shouldn't match such "special characters" under # any OS. So change all non-escaped dots in the RE to match any # character except the special characters (currently: just os.sep). sep = os.sep if os.sep == '\\': # we're using a regex to manipulate a regex, so we need # to escape the backslash twice sep = r'\\\\' escaped = r'\1[^%s]' % sep pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re) return pattern_re
[ "def", "_glob_to_re", "(", "self", ",", "pattern", ")", ":", "pattern_re", "=", "fnmatch", ".", "translate", "(", "pattern", ")", "# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which", "# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,", "# and by extension they shouldn't match such \"special characters\" under", "# any OS. So change all non-escaped dots in the RE to match any", "# character except the special characters (currently: just os.sep).", "sep", "=", "os", ".", "sep", "if", "os", ".", "sep", "==", "'\\\\'", ":", "# we're using a regex to manipulate a regex, so we need", "# to escape the backslash twice", "sep", "=", "r'\\\\\\\\'", "escaped", "=", "r'\\1[^%s]'", "%", "sep", "pattern_re", "=", "re", ".", "sub", "(", "r'((?<!\\\\)(\\\\\\\\)*)\\.'", ",", "escaped", ",", "pattern_re", ")", "return", "pattern_re" ]
https://github.com/1012598167/flask_mongodb_game/blob/60c7e0351586656ec38f851592886338e50b4110/python_flask/venv/Lib/site-packages/pip-19.0.3-py3.6.egg/pip/_vendor/distlib/manifest.py#L372-L393
picoCTF/picoCTF
ec33d05208b51b56760d8f72f4971ea70712bc3b
picoCTF-shell/hacksport/problem.py
python
Compiled.compiler_setup
(self)
Setup function for compiled challenges
Setup function for compiled challenges
[ "Setup", "function", "for", "compiled", "challenges" ]
def compiler_setup(self): """ Setup function for compiled challenges """ if self.program_name is None: raise Exception("Must specify program_name for compiled challenge.") if self.makefile is not None: execute(["make", "-f", self.makefile]) elif len(self.compiler_sources) > 0: compile_cmd = [self.compiler] + self.compiler_flags + self.compiler_sources compile_cmd += ["-o", self.program_name] execute(compile_cmd) if not isinstance(self, Remote): # only add the setgid executable if Remote is not handling it self.compiled_files = [ExecutableFile(self.program_name)]
[ "def", "compiler_setup", "(", "self", ")", ":", "if", "self", ".", "program_name", "is", "None", ":", "raise", "Exception", "(", "\"Must specify program_name for compiled challenge.\"", ")", "if", "self", ".", "makefile", "is", "not", "None", ":", "execute", "(", "[", "\"make\"", ",", "\"-f\"", ",", "self", ".", "makefile", "]", ")", "elif", "len", "(", "self", ".", "compiler_sources", ")", ">", "0", ":", "compile_cmd", "=", "[", "self", ".", "compiler", "]", "+", "self", ".", "compiler_flags", "+", "self", ".", "compiler_sources", "compile_cmd", "+=", "[", "\"-o\"", ",", "self", ".", "program_name", "]", "execute", "(", "compile_cmd", ")", "if", "not", "isinstance", "(", "self", ",", "Remote", ")", ":", "# only add the setgid executable if Remote is not handling it", "self", ".", "compiled_files", "=", "[", "ExecutableFile", "(", "self", ".", "program_name", ")", "]" ]
https://github.com/picoCTF/picoCTF/blob/ec33d05208b51b56760d8f72f4971ea70712bc3b/picoCTF-shell/hacksport/problem.py#L176-L193
SheffieldML/GPy
bb1bc5088671f9316bc92a46d356734e34c2d5c0
GPy/util/warping_functions.py
python
IdentityFunction.fgrad_y
(self, y)
return np.ones(y.shape)
[]
def fgrad_y(self, y): return np.ones(y.shape)
[ "def", "fgrad_y", "(", "self", ",", "y", ")", ":", "return", "np", ".", "ones", "(", "y", ".", "shape", ")" ]
https://github.com/SheffieldML/GPy/blob/bb1bc5088671f9316bc92a46d356734e34c2d5c0/GPy/util/warping_functions.py#L219-L220
ceph/teuthology
6fc2011361437a9dfe4e45b50de224392eed8abc
teuthology/suite/util.py
python
teuthology_schedule
(args, verbose, dry_run, log_prefix='')
Run teuthology-schedule to schedule individual jobs. If --dry-run has been passed but --verbose has been passed just once, don't actually run the command - only print what would be executed. If --dry-run has been passed and --verbose has been passed multiple times, do both.
Run teuthology-schedule to schedule individual jobs.
[ "Run", "teuthology", "-", "schedule", "to", "schedule", "individual", "jobs", "." ]
def teuthology_schedule(args, verbose, dry_run, log_prefix=''): """ Run teuthology-schedule to schedule individual jobs. If --dry-run has been passed but --verbose has been passed just once, don't actually run the command - only print what would be executed. If --dry-run has been passed and --verbose has been passed multiple times, do both. """ exec_path = os.path.join( os.path.dirname(sys.argv[0]), 'teuthology-schedule') args.insert(0, exec_path) if dry_run: # Quote any individual args so that individual commands can be copied # and pasted in order to execute them individually. printable_args = [] for item in args: if ' ' in item: printable_args.append("'%s'" % item) else: printable_args.append(item) log.info('{0}{1}'.format( log_prefix, ' '.join(printable_args), )) if not dry_run or (dry_run and verbose > 1): subprocess.check_call(args=args)
[ "def", "teuthology_schedule", "(", "args", ",", "verbose", ",", "dry_run", ",", "log_prefix", "=", "''", ")", ":", "exec_path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "sys", ".", "argv", "[", "0", "]", ")", ",", "'teuthology-schedule'", ")", "args", ".", "insert", "(", "0", ",", "exec_path", ")", "if", "dry_run", ":", "# Quote any individual args so that individual commands can be copied", "# and pasted in order to execute them individually.", "printable_args", "=", "[", "]", "for", "item", "in", "args", ":", "if", "' '", "in", "item", ":", "printable_args", ".", "append", "(", "\"'%s'\"", "%", "item", ")", "else", ":", "printable_args", ".", "append", "(", "item", ")", "log", ".", "info", "(", "'{0}{1}'", ".", "format", "(", "log_prefix", ",", "' '", ".", "join", "(", "printable_args", ")", ",", ")", ")", "if", "not", "dry_run", "or", "(", "dry_run", "and", "verbose", ">", "1", ")", ":", "subprocess", ".", "check_call", "(", "args", "=", "args", ")" ]
https://github.com/ceph/teuthology/blob/6fc2011361437a9dfe4e45b50de224392eed8abc/teuthology/suite/util.py#L431-L459
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/curses/ascii.py
python
isupper
(c)
return 65 <= _ctoi(c) <= 90
[]
def isupper(c): return 65 <= _ctoi(c) <= 90
[ "def", "isupper", "(", "c", ")", ":", "return", "65", "<=", "_ctoi", "(", "c", ")", "<=", "90" ]
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/curses/ascii.py#L65-L65
tanghaibao/jcvi
5e720870c0928996f8b77a38208106ff0447ccb6
jcvi/utils/range.py
python
range_overlap
(a, b, ratio=False)
return ov
Returns whether two ranges overlap. Set percentage=True returns overlap ratio over the shorter range of the two. >>> range_overlap(("1", 30, 45), ("1", 41, 55)) 5 >>> range_overlap(("1", 21, 45), ("1", 41, 75), ratio=True) 0.2 >>> range_overlap(("1", 30, 45), ("1", 15, 55)) 16 >>> range_overlap(("1", 30, 45), ("1", 15, 55), ratio=True) 1.0 >>> range_overlap(("1", 30, 45), ("1", 57, 68)) 0 >>> range_overlap(("1", 30, 45), ("2", 42, 55)) 0 >>> range_overlap(("1", 30, 45), ("2", 42, 55), ratio=True) 0.0
Returns whether two ranges overlap. Set percentage=True returns overlap ratio over the shorter range of the two.
[ "Returns", "whether", "two", "ranges", "overlap", ".", "Set", "percentage", "=", "True", "returns", "overlap", "ratio", "over", "the", "shorter", "range", "of", "the", "two", "." ]
def range_overlap(a, b, ratio=False): """ Returns whether two ranges overlap. Set percentage=True returns overlap ratio over the shorter range of the two. >>> range_overlap(("1", 30, 45), ("1", 41, 55)) 5 >>> range_overlap(("1", 21, 45), ("1", 41, 75), ratio=True) 0.2 >>> range_overlap(("1", 30, 45), ("1", 15, 55)) 16 >>> range_overlap(("1", 30, 45), ("1", 15, 55), ratio=True) 1.0 >>> range_overlap(("1", 30, 45), ("1", 57, 68)) 0 >>> range_overlap(("1", 30, 45), ("2", 42, 55)) 0 >>> range_overlap(("1", 30, 45), ("2", 42, 55), ratio=True) 0.0 """ a_chr, a_min, a_max = a b_chr, b_min, b_max = b a_min, a_max = sorted((a_min, a_max)) b_min, b_max = sorted((b_min, b_max)) shorter = min((a_max - a_min), (b_max - b_min)) + 1 # must be on the same chromosome if a_chr != b_chr: ov = 0 else: ov = min(shorter, (a_max - b_min + 1), (b_max - a_min + 1)) ov = max(ov, 0) if ratio: ov /= float(shorter) return ov
[ "def", "range_overlap", "(", "a", ",", "b", ",", "ratio", "=", "False", ")", ":", "a_chr", ",", "a_min", ",", "a_max", "=", "a", "b_chr", ",", "b_min", ",", "b_max", "=", "b", "a_min", ",", "a_max", "=", "sorted", "(", "(", "a_min", ",", "a_max", ")", ")", "b_min", ",", "b_max", "=", "sorted", "(", "(", "b_min", ",", "b_max", ")", ")", "shorter", "=", "min", "(", "(", "a_max", "-", "a_min", ")", ",", "(", "b_max", "-", "b_min", ")", ")", "+", "1", "# must be on the same chromosome", "if", "a_chr", "!=", "b_chr", ":", "ov", "=", "0", "else", ":", "ov", "=", "min", "(", "shorter", ",", "(", "a_max", "-", "b_min", "+", "1", ")", ",", "(", "b_max", "-", "a_min", "+", "1", ")", ")", "ov", "=", "max", "(", "ov", ",", "0", ")", "if", "ratio", ":", "ov", "/=", "float", "(", "shorter", ")", "return", "ov" ]
https://github.com/tanghaibao/jcvi/blob/5e720870c0928996f8b77a38208106ff0447ccb6/jcvi/utils/range.py#L79-L112
Azure/azure-linux-extensions
a42ef718c746abab2b3c6a21da87b29e76364558
OmsAgent/omsagent.py
python
install
()
return exit_code, output
Ensure that this VM distro and version are supported. Install the OMSAgent shell bundle, using retries. Note: install operation times out from WAAgent at 15 minutes, so do not wait longer.
Ensure that this VM distro and version are supported. Install the OMSAgent shell bundle, using retries. Note: install operation times out from WAAgent at 15 minutes, so do not wait longer.
[ "Ensure", "that", "this", "VM", "distro", "and", "version", "are", "supported", ".", "Install", "the", "OMSAgent", "shell", "bundle", "using", "retries", ".", "Note", ":", "install", "operation", "times", "out", "from", "WAAgent", "at", "15", "minutes", "so", "do", "not", "wait", "longer", "." ]
def install(): """ Ensure that this VM distro and version are supported. Install the OMSAgent shell bundle, using retries. Note: install operation times out from WAAgent at 15 minutes, so do not wait longer. """ exit_if_vm_not_supported('Install') public_settings, protected_settings = get_settings() if public_settings is None: raise ParameterMissingException('Public configuration must be ' \ 'provided') workspaceId = public_settings.get('workspaceId') check_workspace_id(workspaceId) # Take the backup of the state for given workspace. restore_state(workspaceId) # In the case where a SCOM connection is already present, we should not # create conflicts by installing the OMSAgent packages stopOnMultipleConnections = public_settings.get('stopOnMultipleConnections') if (stopOnMultipleConnections is not None and stopOnMultipleConnections is True): detect_multiple_connections(workspaceId) package_directory = os.path.join(os.getcwd(), PackagesDirectory) bundle_path = os.path.join(package_directory, BundleFileName) os.chmod(bundle_path, 100) cmd = InstallCommandTemplate.format(bundle_path) hutil_log_info('Running command "{0}"'.format(cmd)) # Retry, since install can fail due to concurrent package operations exit_code, output = run_command_with_retries_output(cmd, retries = 15, retry_check = retry_if_dpkg_locked_or_curl_is_not_found, final_check = final_check_if_dpkg_locked) return exit_code, output
[ "def", "install", "(", ")", ":", "exit_if_vm_not_supported", "(", "'Install'", ")", "public_settings", ",", "protected_settings", "=", "get_settings", "(", ")", "if", "public_settings", "is", "None", ":", "raise", "ParameterMissingException", "(", "'Public configuration must be '", "'provided'", ")", "workspaceId", "=", "public_settings", ".", "get", "(", "'workspaceId'", ")", "check_workspace_id", "(", "workspaceId", ")", "# Take the backup of the state for given workspace.", "restore_state", "(", "workspaceId", ")", "# In the case where a SCOM connection is already present, we should not", "# create conflicts by installing the OMSAgent packages", "stopOnMultipleConnections", "=", "public_settings", ".", "get", "(", "'stopOnMultipleConnections'", ")", "if", "(", "stopOnMultipleConnections", "is", "not", "None", "and", "stopOnMultipleConnections", "is", "True", ")", ":", "detect_multiple_connections", "(", "workspaceId", ")", "package_directory", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "PackagesDirectory", ")", "bundle_path", "=", "os", ".", "path", ".", "join", "(", "package_directory", ",", "BundleFileName", ")", "os", ".", "chmod", "(", "bundle_path", ",", "100", ")", "cmd", "=", "InstallCommandTemplate", ".", "format", "(", "bundle_path", ")", "hutil_log_info", "(", "'Running command \"{0}\"'", ".", "format", "(", "cmd", ")", ")", "# Retry, since install can fail due to concurrent package operations", "exit_code", ",", "output", "=", "run_command_with_retries_output", "(", "cmd", ",", "retries", "=", "15", ",", "retry_check", "=", "retry_if_dpkg_locked_or_curl_is_not_found", ",", "final_check", "=", "final_check_if_dpkg_locked", ")", "return", "exit_code", ",", "output" ]
https://github.com/Azure/azure-linux-extensions/blob/a42ef718c746abab2b3c6a21da87b29e76364558/OmsAgent/omsagent.py#L445-L483
OpenCobolIDE/OpenCobolIDE
c78d0d335378e5fe0a5e74f53c19b68b55e85388
open_cobol_ide/view/dialogs/preferences.py
python
DlgPreferences._add_rel_lib_path
(self)
[]
def _add_rel_lib_path(self): path, status = QtWidgets.QInputDialog.getText( self, 'Add relative library path', 'Path:') if status: self.listWidgetLibPaths.addItem(system.normpath(path))
[ "def", "_add_rel_lib_path", "(", "self", ")", ":", "path", ",", "status", "=", "QtWidgets", ".", "QInputDialog", ".", "getText", "(", "self", ",", "'Add relative library path'", ",", "'Path:'", ")", "if", "status", ":", "self", ".", "listWidgetLibPaths", ".", "addItem", "(", "system", ".", "normpath", "(", "path", ")", ")" ]
https://github.com/OpenCobolIDE/OpenCobolIDE/blob/c78d0d335378e5fe0a5e74f53c19b68b55e85388/open_cobol_ide/view/dialogs/preferences.py#L196-L200
marcosfede/algorithms
1ee7c815f9d556c9cef4d4b0d21ee3a409d21629
graph/tarjan.py
python
DirectedEdge.__eq__
(self, obj)
return False
[]
def __eq__(self, obj): if isinstance(obj, DirectedEdge): return obj.nf == self.nf and obj.nt == self.nt return False
[ "def", "__eq__", "(", "self", ",", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "DirectedEdge", ")", ":", "return", "obj", ".", "nf", "==", "self", ".", "nf", "and", "obj", ".", "nt", "==", "self", ".", "nt", "return", "False" ]
https://github.com/marcosfede/algorithms/blob/1ee7c815f9d556c9cef4d4b0d21ee3a409d21629/graph/tarjan.py#L31-L34
Dentosal/python-sc2
e816cce83772d1aee1291b86b300b69405aa96b4
sc2/bot_ai.py
python
BotAI.can_afford
(self, item_id: Union[UnitTypeId, UpgradeId, AbilityId], check_supply_cost: bool=True)
return CanAffordWrapper(cost.minerals <= self.minerals, cost.vespene <= self.vespene, enough_supply)
Tests if the player has enough resources to build a unit or cast an ability.
Tests if the player has enough resources to build a unit or cast an ability.
[ "Tests", "if", "the", "player", "has", "enough", "resources", "to", "build", "a", "unit", "or", "cast", "an", "ability", "." ]
def can_afford(self, item_id: Union[UnitTypeId, UpgradeId, AbilityId], check_supply_cost: bool=True) -> "CanAffordWrapper": """Tests if the player has enough resources to build a unit or cast an ability.""" enough_supply = True if isinstance(item_id, UnitTypeId): unit = self._game_data.units[item_id.value] cost = self._game_data.calculate_ability_cost(unit.creation_ability) if check_supply_cost: enough_supply = self.can_feed(item_id) elif isinstance(item_id, UpgradeId): cost = self._game_data.upgrades[item_id.value].cost else: cost = self._game_data.calculate_ability_cost(item_id) return CanAffordWrapper(cost.minerals <= self.minerals, cost.vespene <= self.vespene, enough_supply)
[ "def", "can_afford", "(", "self", ",", "item_id", ":", "Union", "[", "UnitTypeId", ",", "UpgradeId", ",", "AbilityId", "]", ",", "check_supply_cost", ":", "bool", "=", "True", ")", "->", "\"CanAffordWrapper\"", ":", "enough_supply", "=", "True", "if", "isinstance", "(", "item_id", ",", "UnitTypeId", ")", ":", "unit", "=", "self", ".", "_game_data", ".", "units", "[", "item_id", ".", "value", "]", "cost", "=", "self", ".", "_game_data", ".", "calculate_ability_cost", "(", "unit", ".", "creation_ability", ")", "if", "check_supply_cost", ":", "enough_supply", "=", "self", ".", "can_feed", "(", "item_id", ")", "elif", "isinstance", "(", "item_id", ",", "UpgradeId", ")", ":", "cost", "=", "self", ".", "_game_data", ".", "upgrades", "[", "item_id", ".", "value", "]", ".", "cost", "else", ":", "cost", "=", "self", ".", "_game_data", ".", "calculate_ability_cost", "(", "item_id", ")", "return", "CanAffordWrapper", "(", "cost", ".", "minerals", "<=", "self", ".", "minerals", ",", "cost", ".", "vespene", "<=", "self", ".", "vespene", ",", "enough_supply", ")" ]
https://github.com/Dentosal/python-sc2/blob/e816cce83772d1aee1291b86b300b69405aa96b4/sc2/bot_ai.py#L263-L276
andresriancho/w3af
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
w3af/plugins/attack/db/sqlmap/lib/core/common.py
python
safeExpandUser
(filepath)
return retVal
Patch for a Python Issue18171 (http://bugs.python.org/issue18171)
Patch for a Python Issue18171 (http://bugs.python.org/issue18171)
[ "Patch", "for", "a", "Python", "Issue18171", "(", "http", ":", "//", "bugs", ".", "python", ".", "org", "/", "issue18171", ")" ]
def safeExpandUser(filepath): """ Patch for a Python Issue18171 (http://bugs.python.org/issue18171) """ retVal = filepath try: retVal = os.path.expanduser(filepath) except UnicodeError: _ = locale.getdefaultlocale() encoding = _[1] if _ and len(_) > 1 else UNICODE_ENCODING retVal = getUnicode(os.path.expanduser(filepath.encode(encoding)), encoding=encoding) return retVal
[ "def", "safeExpandUser", "(", "filepath", ")", ":", "retVal", "=", "filepath", "try", ":", "retVal", "=", "os", ".", "path", ".", "expanduser", "(", "filepath", ")", "except", "UnicodeError", ":", "_", "=", "locale", ".", "getdefaultlocale", "(", ")", "encoding", "=", "_", "[", "1", "]", "if", "_", "and", "len", "(", "_", ")", ">", "1", "else", "UNICODE_ENCODING", "retVal", "=", "getUnicode", "(", "os", ".", "path", ".", "expanduser", "(", "filepath", ".", "encode", "(", "encoding", ")", ")", ",", "encoding", "=", "encoding", ")", "return", "retVal" ]
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/plugins/attack/db/sqlmap/lib/core/common.py#L1742-L1756
iiau-tracker/SPLT
a196e603798e9be969d9d985c087c11cad1cda43
lib/object_detection/core/preprocessor.py
python
_apply_with_random_selector_tuples
(x, func, num_cases)
return tuple(tuples)
Computes func(x, sel), with sel sampled from [0...num_cases-1]. Args: x: A tuple of input tensors. func: Python function to apply. num_cases: Python int32, number of cases to sample sel from. Returns: The result of func(x, sel), where func receives the value of the selector as a python integer, but sel is sampled dynamically.
Computes func(x, sel), with sel sampled from [0...num_cases-1].
[ "Computes", "func", "(", "x", "sel", ")", "with", "sel", "sampled", "from", "[", "0", "...", "num_cases", "-", "1", "]", "." ]
def _apply_with_random_selector_tuples(x, func, num_cases): """Computes func(x, sel), with sel sampled from [0...num_cases-1]. Args: x: A tuple of input tensors. func: Python function to apply. num_cases: Python int32, number of cases to sample sel from. Returns: The result of func(x, sel), where func receives the value of the selector as a python integer, but sel is sampled dynamically. """ num_inputs = len(x) rand_sel = tf.random_uniform([], maxval=num_cases, dtype=tf.int32) # Pass the real x only to one of the func calls. tuples = [list() for t in x] for case in range(num_cases): new_x = [control_flow_ops.switch(t, tf.equal(rand_sel, case))[1] for t in x] output = func(tuple(new_x), case) for j in range(num_inputs): tuples[j].append(output[j]) for i in range(num_inputs): tuples[i] = control_flow_ops.merge(tuples[i])[0] return tuple(tuples)
[ "def", "_apply_with_random_selector_tuples", "(", "x", ",", "func", ",", "num_cases", ")", ":", "num_inputs", "=", "len", "(", "x", ")", "rand_sel", "=", "tf", ".", "random_uniform", "(", "[", "]", ",", "maxval", "=", "num_cases", ",", "dtype", "=", "tf", ".", "int32", ")", "# Pass the real x only to one of the func calls.", "tuples", "=", "[", "list", "(", ")", "for", "t", "in", "x", "]", "for", "case", "in", "range", "(", "num_cases", ")", ":", "new_x", "=", "[", "control_flow_ops", ".", "switch", "(", "t", ",", "tf", ".", "equal", "(", "rand_sel", ",", "case", ")", ")", "[", "1", "]", "for", "t", "in", "x", "]", "output", "=", "func", "(", "tuple", "(", "new_x", ")", ",", "case", ")", "for", "j", "in", "range", "(", "num_inputs", ")", ":", "tuples", "[", "j", "]", ".", "append", "(", "output", "[", "j", "]", ")", "for", "i", "in", "range", "(", "num_inputs", ")", ":", "tuples", "[", "i", "]", "=", "control_flow_ops", ".", "merge", "(", "tuples", "[", "i", "]", ")", "[", "0", "]", "return", "tuple", "(", "tuples", ")" ]
https://github.com/iiau-tracker/SPLT/blob/a196e603798e9be969d9d985c087c11cad1cda43/lib/object_detection/core/preprocessor.py#L76-L101
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/sympy/polys/polyclasses.py
python
DMP.integrate
(f, m=1, j=0)
return f.per(dmp_integrate_in(f.rep, m, j, f.lev, f.dom))
Computes the ``m``-th order indefinite integral of ``f`` in ``x_j``.
Computes the ``m``-th order indefinite integral of ``f`` in ``x_j``.
[ "Computes", "the", "m", "-", "th", "order", "indefinite", "integral", "of", "f", "in", "x_j", "." ]
def integrate(f, m=1, j=0): """Computes the ``m``-th order indefinite integral of ``f`` in ``x_j``. """ if not isinstance(m, int): raise TypeError("``int`` expected, got %s" % type(m)) if not isinstance(j, int): raise TypeError("``int`` expected, got %s" % type(j)) return f.per(dmp_integrate_in(f.rep, m, j, f.lev, f.dom))
[ "def", "integrate", "(", "f", ",", "m", "=", "1", ",", "j", "=", "0", ")", ":", "if", "not", "isinstance", "(", "m", ",", "int", ")", ":", "raise", "TypeError", "(", "\"``int`` expected, got %s\"", "%", "type", "(", "m", ")", ")", "if", "not", "isinstance", "(", "j", ",", "int", ")", ":", "raise", "TypeError", "(", "\"``int`` expected, got %s\"", "%", "type", "(", "j", ")", ")", "return", "f", ".", "per", "(", "dmp_integrate_in", "(", "f", ".", "rep", ",", "m", ",", "j", ",", "f", ".", "lev", ",", "f", ".", "dom", ")", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/polys/polyclasses.py#L592-L600
elbayadm/attn2d
982653439dedc7306e484e00b3dfb90e2cd7c9e1
fairseq/data/concat_dataset.py
python
ConcatDataset.size
(self, idx: int)
return self.datasets[dataset_idx].size(sample_idx)
Return an example's size as a float or tuple.
Return an example's size as a float or tuple.
[ "Return", "an", "example", "s", "size", "as", "a", "float", "or", "tuple", "." ]
def size(self, idx: int): """ Return an example's size as a float or tuple. """ dataset_idx, sample_idx = self._get_dataset_and_sample_index(idx) return self.datasets[dataset_idx].size(sample_idx)
[ "def", "size", "(", "self", ",", "idx", ":", "int", ")", ":", "dataset_idx", ",", "sample_idx", "=", "self", ".", "_get_dataset_and_sample_index", "(", "idx", ")", "return", "self", ".", "datasets", "[", "dataset_idx", "]", ".", "size", "(", "sample_idx", ")" ]
https://github.com/elbayadm/attn2d/blob/982653439dedc7306e484e00b3dfb90e2cd7c9e1/fairseq/data/concat_dataset.py#L57-L62
dictation-toolbox/dragonfly
a2b8f8e8ed1182465b831205b9804323beea112a
dragonfly/engines/backend_kaldi/audio.py
python
AudioStore.finalize
(self, text, grammar_name, rule_name, likelihood=None, tag='', has_dictation=None)
Finalizes current utterance, creating its AudioStoreEntry and saving it (if enabled).
Finalizes current utterance, creating its AudioStoreEntry and saving it (if enabled).
[ "Finalizes", "current", "utterance", "creating", "its", "AudioStoreEntry", "and", "saving", "it", "(", "if", "enabled", ")", "." ]
def finalize(self, text, grammar_name, rule_name, likelihood=None, tag='', has_dictation=None): """ Finalizes current utterance, creating its AudioStoreEntry and saving it (if enabled). """ entry = AudioStoreEntry(self.current_audio_data, grammar_name, rule_name, text, likelihood, tag, has_dictation) if self.deque is not None: if len(self.deque) == self.deque.maxlen: self.save(-1) # Save oldest, which is about to be evicted self.deque.appendleft(entry) self.blocks = []
[ "def", "finalize", "(", "self", ",", "text", ",", "grammar_name", ",", "rule_name", ",", "likelihood", "=", "None", ",", "tag", "=", "''", ",", "has_dictation", "=", "None", ")", ":", "entry", "=", "AudioStoreEntry", "(", "self", ".", "current_audio_data", ",", "grammar_name", ",", "rule_name", ",", "text", ",", "likelihood", ",", "tag", ",", "has_dictation", ")", "if", "self", ".", "deque", "is", "not", "None", ":", "if", "len", "(", "self", ".", "deque", ")", "==", "self", ".", "deque", ".", "maxlen", ":", "self", ".", "save", "(", "-", "1", ")", "# Save oldest, which is about to be evicted", "self", ".", "deque", ".", "appendleft", "(", "entry", ")", "self", ".", "blocks", "=", "[", "]" ]
https://github.com/dictation-toolbox/dragonfly/blob/a2b8f8e8ed1182465b831205b9804323beea112a/dragonfly/engines/backend_kaldi/audio.py#L362-L369
exaile/exaile
a7b58996c5c15b3aa7b9975ac13ee8f784ef4689
xlgui/cover.py
python
CoverWindow.available_image_height
(self)
return self.cover_window.get_size()[1] - tb_natural_height - sb_natural_height
Returns the available vertical space for the image
Returns the available vertical space for the image
[ "Returns", "the", "available", "vertical", "space", "for", "the", "image" ]
def available_image_height(self): """Returns the available vertical space for the image""" tb_min_height, tb_natural_height = self.toolbar.get_preferred_height() sb_min_height, sb_natural_height = self.statusbar.get_preferred_height() return self.cover_window.get_size()[1] - tb_natural_height - sb_natural_height
[ "def", "available_image_height", "(", "self", ")", ":", "tb_min_height", ",", "tb_natural_height", "=", "self", ".", "toolbar", ".", "get_preferred_height", "(", ")", "sb_min_height", ",", "sb_natural_height", "=", "self", ".", "statusbar", ".", "get_preferred_height", "(", ")", "return", "self", ".", "cover_window", ".", "get_size", "(", ")", "[", "1", "]", "-", "tb_natural_height", "-", "sb_natural_height" ]
https://github.com/exaile/exaile/blob/a7b58996c5c15b3aa7b9975ac13ee8f784ef4689/xlgui/cover.py#L844-L849
geopython/pywps
7f228ff17594912664073a629b2c2ed9d4f5f615
pywps/validator/complexvalidator.py
python
validateshapefile
(data_input, mode)
return passed
ESRI Shapefile validation example
ESRI Shapefile validation example
[ "ESRI", "Shapefile", "validation", "example" ]
def validateshapefile(data_input, mode): """ESRI Shapefile validation example """ LOGGER.info('validating Shapefile; Mode: {}'.format(mode)) passed = False if mode >= MODE.NONE: passed = True if mode >= MODE.SIMPLE: name = data_input.file (mtype, encoding) = mimetypes.guess_type(name, strict=False) passed = data_input.data_format.mime_type in {mtype, FORMATS.SHP.mime_type} if mode >= MODE.STRICT: try: import fiona sf = fiona.open(data_input.file) passed = (sf.driver == "ESRI Shapefile") except (ModuleNotFoundError, ImportError): passed = False return passed
[ "def", "validateshapefile", "(", "data_input", ",", "mode", ")", ":", "LOGGER", ".", "info", "(", "'validating Shapefile; Mode: {}'", ".", "format", "(", "mode", ")", ")", "passed", "=", "False", "if", "mode", ">=", "MODE", ".", "NONE", ":", "passed", "=", "True", "if", "mode", ">=", "MODE", ".", "SIMPLE", ":", "name", "=", "data_input", ".", "file", "(", "mtype", ",", "encoding", ")", "=", "mimetypes", ".", "guess_type", "(", "name", ",", "strict", "=", "False", ")", "passed", "=", "data_input", ".", "data_format", ".", "mime_type", "in", "{", "mtype", ",", "FORMATS", ".", "SHP", ".", "mime_type", "}", "if", "mode", ">=", "MODE", ".", "STRICT", ":", "try", ":", "import", "fiona", "sf", "=", "fiona", ".", "open", "(", "data_input", ".", "file", ")", "passed", "=", "(", "sf", ".", "driver", "==", "\"ESRI Shapefile\"", ")", "except", "(", "ModuleNotFoundError", ",", "ImportError", ")", ":", "passed", "=", "False", "return", "passed" ]
https://github.com/geopython/pywps/blob/7f228ff17594912664073a629b2c2ed9d4f5f615/pywps/validator/complexvalidator.py#L301-L327
google/grr
8ad8a4d2c5a93c92729206b7771af19d92d4f915
grr/core/grr_response_core/lib/config_lib.py
python
GrrConfigManager.PrintHelp
(self)
[]
def PrintHelp(self): print(self.FormatHelp())
[ "def", "PrintHelp", "(", "self", ")", ":", "print", "(", "self", ".", "FormatHelp", "(", ")", ")" ]
https://github.com/google/grr/blob/8ad8a4d2c5a93c92729206b7771af19d92d4f915/grr/core/grr_response_core/lib/config_lib.py#L797-L798
googledatalab/pydatalab
1c86e26a0d24e3bc8097895ddeab4d0607be4c40
solutionbox/image_classification/mltoolbox/image/classification/_predictor.py
python
_tf_predict
(model_dir, images)
return zip(predictions, labels, scores)
[]
def _tf_predict(model_dir, images): session, inputs, outputs = _load_tf_model(model_dir) with session: feed_dict = collections.defaultdict(list) for ii, image in enumerate(images): feed_dict[inputs['image_bytes']].append(image) feed_dict[inputs['key']].append(str(ii)) predictions, labels, scores = session.run( [outputs['prediction'], outputs['labels'], outputs['scores']], feed_dict=feed_dict) return zip(predictions, labels, scores)
[ "def", "_tf_predict", "(", "model_dir", ",", "images", ")", ":", "session", ",", "inputs", ",", "outputs", "=", "_load_tf_model", "(", "model_dir", ")", "with", "session", ":", "feed_dict", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "ii", ",", "image", "in", "enumerate", "(", "images", ")", ":", "feed_dict", "[", "inputs", "[", "'image_bytes'", "]", "]", ".", "append", "(", "image", ")", "feed_dict", "[", "inputs", "[", "'key'", "]", "]", ".", "append", "(", "str", "(", "ii", ")", ")", "predictions", ",", "labels", ",", "scores", "=", "session", ".", "run", "(", "[", "outputs", "[", "'prediction'", "]", ",", "outputs", "[", "'labels'", "]", ",", "outputs", "[", "'scores'", "]", "]", ",", "feed_dict", "=", "feed_dict", ")", "return", "zip", "(", "predictions", ",", "labels", ",", "scores", ")" ]
https://github.com/googledatalab/pydatalab/blob/1c86e26a0d24e3bc8097895ddeab4d0607be4c40/solutionbox/image_classification/mltoolbox/image/classification/_predictor.py#L44-L55
selfteaching/selfteaching-python-camp
9982ee964b984595e7d664b07c389cddaf158f1e
19100205/Ceasar1978/pip-19.0.3/src/pip/_vendor/requests/cookies.py
python
create_cookie
(name, value, **kwargs)
return cookielib.Cookie(**result)
Make a cookie from underspecified parameters. By default, the pair of `name` and `value` will be set for the domain '' and sent on every request (this is sometimes called a "supercookie").
Make a cookie from underspecified parameters.
[ "Make", "a", "cookie", "from", "underspecified", "parameters", "." ]
def create_cookie(name, value, **kwargs): """Make a cookie from underspecified parameters. By default, the pair of `name` and `value` will be set for the domain '' and sent on every request (this is sometimes called a "supercookie"). """ result = { 'version': 0, 'name': name, 'value': value, 'port': None, 'domain': '', 'path': '/', 'secure': False, 'expires': None, 'discard': True, 'comment': None, 'comment_url': None, 'rest': {'HttpOnly': None}, 'rfc2109': False, } badargs = set(kwargs) - set(result) if badargs: err = 'create_cookie() got unexpected keyword arguments: %s' raise TypeError(err % list(badargs)) result.update(kwargs) result['port_specified'] = bool(result['port']) result['domain_specified'] = bool(result['domain']) result['domain_initial_dot'] = result['domain'].startswith('.') result['path_specified'] = bool(result['path']) return cookielib.Cookie(**result)
[ "def", "create_cookie", "(", "name", ",", "value", ",", "*", "*", "kwargs", ")", ":", "result", "=", "{", "'version'", ":", "0", ",", "'name'", ":", "name", ",", "'value'", ":", "value", ",", "'port'", ":", "None", ",", "'domain'", ":", "''", ",", "'path'", ":", "'/'", ",", "'secure'", ":", "False", ",", "'expires'", ":", "None", ",", "'discard'", ":", "True", ",", "'comment'", ":", "None", ",", "'comment_url'", ":", "None", ",", "'rest'", ":", "{", "'HttpOnly'", ":", "None", "}", ",", "'rfc2109'", ":", "False", ",", "}", "badargs", "=", "set", "(", "kwargs", ")", "-", "set", "(", "result", ")", "if", "badargs", ":", "err", "=", "'create_cookie() got unexpected keyword arguments: %s'", "raise", "TypeError", "(", "err", "%", "list", "(", "badargs", ")", ")", "result", ".", "update", "(", "kwargs", ")", "result", "[", "'port_specified'", "]", "=", "bool", "(", "result", "[", "'port'", "]", ")", "result", "[", "'domain_specified'", "]", "=", "bool", "(", "result", "[", "'domain'", "]", ")", "result", "[", "'domain_initial_dot'", "]", "=", "result", "[", "'domain'", "]", ".", "startswith", "(", "'.'", ")", "result", "[", "'path_specified'", "]", "=", "bool", "(", "result", "[", "'path'", "]", ")", "return", "cookielib", ".", "Cookie", "(", "*", "*", "result", ")" ]
https://github.com/selfteaching/selfteaching-python-camp/blob/9982ee964b984595e7d664b07c389cddaf158f1e/19100205/Ceasar1978/pip-19.0.3/src/pip/_vendor/requests/cookies.py#L441-L474
bendmorris/static-python
2e0f8c4d7ed5b359dc7d8a75b6fb37e6b6c5c473
Lib/nntplib.py
python
_NNTPBase.over
(self, message_spec, *, file=None)
return resp, _parse_overview(lines, fmt)
Process an OVER command. If the command isn't supported, fall back to XOVER. Arguments: - message_spec: - either a message id, indicating the article to fetch information about - or a (start, end) tuple, indicating a range of article numbers; if end is None, information up to the newest message will be retrieved - or None, indicating the current article number must be used - file: Filename string or file object to store the result in Returns: - resp: server response if successful - list: list of dicts containing the response fields NOTE: the "message id" form isn't supported by XOVER
Process an OVER command. If the command isn't supported, fall back to XOVER. Arguments: - message_spec: - either a message id, indicating the article to fetch information about - or a (start, end) tuple, indicating a range of article numbers; if end is None, information up to the newest message will be retrieved - or None, indicating the current article number must be used - file: Filename string or file object to store the result in Returns: - resp: server response if successful - list: list of dicts containing the response fields
[ "Process", "an", "OVER", "command", ".", "If", "the", "command", "isn", "t", "supported", "fall", "back", "to", "XOVER", ".", "Arguments", ":", "-", "message_spec", ":", "-", "either", "a", "message", "id", "indicating", "the", "article", "to", "fetch", "information", "about", "-", "or", "a", "(", "start", "end", ")", "tuple", "indicating", "a", "range", "of", "article", "numbers", ";", "if", "end", "is", "None", "information", "up", "to", "the", "newest", "message", "will", "be", "retrieved", "-", "or", "None", "indicating", "the", "current", "article", "number", "must", "be", "used", "-", "file", ":", "Filename", "string", "or", "file", "object", "to", "store", "the", "result", "in", "Returns", ":", "-", "resp", ":", "server", "response", "if", "successful", "-", "list", ":", "list", "of", "dicts", "containing", "the", "response", "fields" ]
def over(self, message_spec, *, file=None): """Process an OVER command. If the command isn't supported, fall back to XOVER. Arguments: - message_spec: - either a message id, indicating the article to fetch information about - or a (start, end) tuple, indicating a range of article numbers; if end is None, information up to the newest message will be retrieved - or None, indicating the current article number must be used - file: Filename string or file object to store the result in Returns: - resp: server response if successful - list: list of dicts containing the response fields NOTE: the "message id" form isn't supported by XOVER """ cmd = 'OVER' if 'OVER' in self._caps else 'XOVER' if isinstance(message_spec, (tuple, list)): start, end = message_spec cmd += ' {0}-{1}'.format(start, end or '') elif message_spec is not None: cmd = cmd + ' ' + message_spec resp, lines = self._longcmdstring(cmd, file) fmt = self._getoverviewfmt() return resp, _parse_overview(lines, fmt)
[ "def", "over", "(", "self", ",", "message_spec", ",", "*", ",", "file", "=", "None", ")", ":", "cmd", "=", "'OVER'", "if", "'OVER'", "in", "self", ".", "_caps", "else", "'XOVER'", "if", "isinstance", "(", "message_spec", ",", "(", "tuple", ",", "list", ")", ")", ":", "start", ",", "end", "=", "message_spec", "cmd", "+=", "' {0}-{1}'", ".", "format", "(", "start", ",", "end", "or", "''", ")", "elif", "message_spec", "is", "not", "None", ":", "cmd", "=", "cmd", "+", "' '", "+", "message_spec", "resp", ",", "lines", "=", "self", ".", "_longcmdstring", "(", "cmd", ",", "file", ")", "fmt", "=", "self", ".", "_getoverviewfmt", "(", ")", "return", "resp", ",", "_parse_overview", "(", "lines", ",", "fmt", ")" ]
https://github.com/bendmorris/static-python/blob/2e0f8c4d7ed5b359dc7d8a75b6fb37e6b6c5c473/Lib/nntplib.py#L801-L826
lovelylain/pyctp
fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d
example/ctp/futures/ApiStruct.py
python
QryBroker.__init__
(self, BrokerID='')
[]
def __init__(self, BrokerID=''): self.BrokerID = ''
[ "def", "__init__", "(", "self", ",", "BrokerID", "=", "''", ")", ":", "self", ".", "BrokerID", "=", "''" ]
https://github.com/lovelylain/pyctp/blob/fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d/example/ctp/futures/ApiStruct.py#L3080-L3081
spectacles/CodeComplice
8ca8ee4236f72b58caa4209d2fbd5fa56bd31d62
libs/codeintel2/lang_javascript.py
python
JavaScriptCiler.addClassFunction
(self, namelist, args=None, doc=None)
[]
def addClassFunction(self, namelist, args=None, doc=None): log.debug("AddClassFunction: %s(%s)", namelist, args) toScope = self.currentClass if not toScope: # See if it's a function, we'll convert it into a class then if isinstance(self.currentScope, JSFunction): toScope = self._convertFunctionToClass(self.currentScope) if not toScope or len(namelist) > 1: self.addFunction(namelist, args, doc) else: funcName = namelist[-1] log.debug("FUNC: %s(%s) on line %d", funcName, args, self.lineno) fn = JSFunction( funcName, toScope, args, self.lineno, self.depth, doc=doc) toScope.functions[fn.name] = fn self.currentScope = fn
[ "def", "addClassFunction", "(", "self", ",", "namelist", ",", "args", "=", "None", ",", "doc", "=", "None", ")", ":", "log", ".", "debug", "(", "\"AddClassFunction: %s(%s)\"", ",", "namelist", ",", "args", ")", "toScope", "=", "self", ".", "currentClass", "if", "not", "toScope", ":", "# See if it's a function, we'll convert it into a class then", "if", "isinstance", "(", "self", ".", "currentScope", ",", "JSFunction", ")", ":", "toScope", "=", "self", ".", "_convertFunctionToClass", "(", "self", ".", "currentScope", ")", "if", "not", "toScope", "or", "len", "(", "namelist", ")", ">", "1", ":", "self", ".", "addFunction", "(", "namelist", ",", "args", ",", "doc", ")", "else", ":", "funcName", "=", "namelist", "[", "-", "1", "]", "log", ".", "debug", "(", "\"FUNC: %s(%s) on line %d\"", ",", "funcName", ",", "args", ",", "self", ".", "lineno", ")", "fn", "=", "JSFunction", "(", "funcName", ",", "toScope", ",", "args", ",", "self", ".", "lineno", ",", "self", ".", "depth", ",", "doc", "=", "doc", ")", "toScope", ".", "functions", "[", "fn", ".", "name", "]", "=", "fn", "self", ".", "currentScope", "=", "fn" ]
https://github.com/spectacles/CodeComplice/blob/8ca8ee4236f72b58caa4209d2fbd5fa56bd31d62/libs/codeintel2/lang_javascript.py#L2260-L2275
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/combinat/sloane_functions.py
python
A000290.__init__
(self)
r""" The squares: `a(n) = n^2`. INPUT: - ``n`` -- non negative integer OUTPUT: - ``integer`` -- function value EXAMPLES:: sage: a = sloane.A000290;a The squares: a(n) = n^2. sage: a(0) 0 sage: a(-1) Traceback (most recent call last): ... ValueError: input n (=-1) must be an integer >= 0 sage: a(16) 256 sage: a.list(17) [0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100, 121, 144, 169, 196, 225, 256] AUTHORS: - Jaap Spies (2007-01-25)
r""" The squares: `a(n) = n^2`.
[ "r", "The", "squares", ":", "a", "(", "n", ")", "=", "n^2", "." ]
def __init__(self): r""" The squares: `a(n) = n^2`. INPUT: - ``n`` -- non negative integer OUTPUT: - ``integer`` -- function value EXAMPLES:: sage: a = sloane.A000290;a The squares: a(n) = n^2. sage: a(0) 0 sage: a(-1) Traceback (most recent call last): ... ValueError: input n (=-1) must be an integer >= 0 sage: a(16) 256 sage: a.list(17) [0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100, 121, 144, 169, 196, 225, 256] AUTHORS: - Jaap Spies (2007-01-25) """ SloaneSequence.__init__(self, offset=0)
[ "def", "__init__", "(", "self", ")", ":", "SloaneSequence", ".", "__init__", "(", "self", ",", "offset", "=", "0", ")" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/combinat/sloane_functions.py#L2163-L2194
huggingface/naacl_transfer_learning_tutorial
dc976775bb11edee24a77e2ce161450089c5e169
utils.py
python
pad_dataset
(dataset, padding=0, to_left=True)
return dataset
Pad a dataset (list of list) to the left or the right.
Pad a dataset (list of list) to the left or the right.
[ "Pad", "a", "dataset", "(", "list", "of", "list", ")", "to", "the", "left", "or", "the", "right", "." ]
def pad_dataset(dataset, padding=0, to_left=True): """ Pad a dataset (list of list) to the left or the right. """ max_l = max(len(x) for x in dataset) dataset = [(x if to_left else []) + [padding] * (max_l - len(x)) + ([] if to_left else x) for x in dataset] return dataset
[ "def", "pad_dataset", "(", "dataset", ",", "padding", "=", "0", ",", "to_left", "=", "True", ")", ":", "max_l", "=", "max", "(", "len", "(", "x", ")", "for", "x", "in", "dataset", ")", "dataset", "=", "[", "(", "x", "if", "to_left", "else", "[", "]", ")", "+", "[", "padding", "]", "*", "(", "max_l", "-", "len", "(", "x", ")", ")", "+", "(", "[", "]", "if", "to_left", "else", "x", ")", "for", "x", "in", "dataset", "]", "return", "dataset" ]
https://github.com/huggingface/naacl_transfer_learning_tutorial/blob/dc976775bb11edee24a77e2ce161450089c5e169/utils.py#L62-L66
biopython/biopython
2dd97e71762af7b046d7f7f8a4f1e38db6b06c86
Bio/Phylo/Applications/_Fasttree.py
python
_is_int
(x)
return isinstance(x, int) or str(x).isdigit()
Test whether the argument can be serialized as an integer (PRIVATE).
Test whether the argument can be serialized as an integer (PRIVATE).
[ "Test", "whether", "the", "argument", "can", "be", "serialized", "as", "an", "integer", "(", "PRIVATE", ")", "." ]
def _is_int(x): """Test whether the argument can be serialized as an integer (PRIVATE).""" return isinstance(x, int) or str(x).isdigit()
[ "def", "_is_int", "(", "x", ")", ":", "return", "isinstance", "(", "x", ",", "int", ")", "or", "str", "(", "x", ")", ".", "isdigit", "(", ")" ]
https://github.com/biopython/biopython/blob/2dd97e71762af7b046d7f7f8a4f1e38db6b06c86/Bio/Phylo/Applications/_Fasttree.py#L15-L17
out0fmemory/GoAgent-Always-Available
c4254984fea633ce3d1893fe5901debd9f22c2a9
server/lib/google/appengine/api/appinfo.py
python
EnvironmentVariables.Merge
(cls, env_variables_one, env_variables_two)
return (EnvironmentVariables(**result_env_variables) if result_env_variables else None)
Merges to EnvironmentVariables instances. Args: env_variables_one: The first EnvironmentVariables instance or None. env_variables_two: The second EnvironmentVariables instance or None. Returns: The merged EnvironmentVariables instance, or None if both input instances are None or empty. If a variable is specified by both instances, the value from env_variables_two is used.
Merges to EnvironmentVariables instances.
[ "Merges", "to", "EnvironmentVariables", "instances", "." ]
def Merge(cls, env_variables_one, env_variables_two): """Merges to EnvironmentVariables instances. Args: env_variables_one: The first EnvironmentVariables instance or None. env_variables_two: The second EnvironmentVariables instance or None. Returns: The merged EnvironmentVariables instance, or None if both input instances are None or empty. If a variable is specified by both instances, the value from env_variables_two is used. """ result_env_variables = (env_variables_one or {}).copy() result_env_variables.update(env_variables_two or {}) return (EnvironmentVariables(**result_env_variables) if result_env_variables else None)
[ "def", "Merge", "(", "cls", ",", "env_variables_one", ",", "env_variables_two", ")", ":", "result_env_variables", "=", "(", "env_variables_one", "or", "{", "}", ")", ".", "copy", "(", ")", "result_env_variables", ".", "update", "(", "env_variables_two", "or", "{", "}", ")", "return", "(", "EnvironmentVariables", "(", "*", "*", "result_env_variables", ")", "if", "result_env_variables", "else", "None", ")" ]
https://github.com/out0fmemory/GoAgent-Always-Available/blob/c4254984fea633ce3d1893fe5901debd9f22c2a9/server/lib/google/appengine/api/appinfo.py#L1510-L1528
ContextLab/hypertools
948050a22b345c7dcccf729672c76f49609b1ac8
hypertools/plot/draw.py
python
_draw
(x, legend=None, title=None, labels=False, show=True, kwargs_list=None, fmt=None, animate=False, tail_duration=2, rotations=2, zoom=1, chemtrails=False, precog=False, bullettime=False, frame_rate=50, elev=10, azim=-60, duration=30, explore=False, size=None, ax=None)
return fig, ax, data, line_ani
Draws the plot
Draws the plot
[ "Draws", "the", "plot" ]
def _draw(x, legend=None, title=None, labels=False, show=True, kwargs_list=None, fmt=None, animate=False, tail_duration=2, rotations=2, zoom=1, chemtrails=False, precog=False, bullettime=False, frame_rate=50, elev=10, azim=-60, duration=30, explore=False, size=None, ax=None): """ Draws the plot """ # handle static plots def dispatch_static(x, ax=None): shape = x[0].shape[1] if shape==3: opts = dict(projection='3d') else: opts = dict() if not ax: fig = plt.figure() ax = fig.add_subplot(111, **opts) else: fig = ax.figure if x[0].ndim==1 or x[0].shape[-1]==1: return plot1D(x, fig, ax) elif x[0].shape[-1]==2: return plot2D(x, fig, ax) elif x[0].shape[-1]==3: return plot3D(x, fig, ax) # plot data in 1D def plot1D(data, fig, ax): n=len(data) for i in range(n): ikwargs = kwargs_list[i] if fmt is None: ax.plot(data[i][:,0], **ikwargs) else: ax.plot(data[i][:,0], fmt[i], **ikwargs) return fig, ax, data # plot data in 2D def plot2D(data, fig, ax): n=len(data) for i in range(n): ikwargs = kwargs_list[i] if fmt is None: ax.plot(data[i][:,0], data[i][:,1], **ikwargs) else: ax.plot(data[i][:,0], data[i][:,1], fmt[i], **ikwargs) return fig, ax, data # plot data in 3D def plot3D(data, fig, ax): n=len(data) for i in range(n): ikwargs = kwargs_list[i] if fmt is None: ax.plot(data[i][:,0], data[i][:,1], data[i][:,2], **ikwargs) else: ax.plot(data[i][:,0], data[i][:,1], data[i][:,2], fmt[i], **ikwargs) return fig, ax, data def annotate_plot(data, labels): """Create labels in 3d chart Args: X (np.array) - array of points, of shape (numPoints, 3) labels (list) - list of labels of shape (numPoints,1) Returns: None """ global labels_and_points labels_and_points = [] if data[0].shape[-1]>2: proj = ax.get_proj() for idx,x in enumerate(data): if labels[idx] is not None: if data[0].shape[-1]>2: x2, y2, _ = proj3d.proj_transform(x[0], x[1], x[2], proj) label = plt.annotate( labels[idx], xy = (x2, y2), xytext = (-20, 20), textcoords = 'offset points', ha = 'right', va = 'bottom', bbox = dict(boxstyle = 'round,pad=0.5', fc = 'white', alpha = 0.5), arrowprops = dict(arrowstyle = '-', connectionstyle = 'arc3,rad=0'),family='serif') labels_and_points.append((label,x[0],x[1],x[2])) elif data[0].shape[-1]==2: x2, y2 = x[0], x[1] label = plt.annotate( labels[idx], xy = (x2, y2), xytext = (-20, 20), textcoords = 'offset points', ha = 'right', va = 'bottom', bbox = dict(boxstyle = 'round,pad=0.5', fc = 'white', alpha = 0.5), arrowprops = dict(arrowstyle = '-', connectionstyle = 'arc3,rad=0'),family='serif') label.draggable() labels_and_points.append((label,x[0],x[1])) fig.canvas.draw() def update_position(e): """Update label positions in 3d chart Args: e (mouse event) - event handle to update on Returns: None """ proj = ax.get_proj() for label, x, y, z in labels_and_points: x2, y2, _ = proj3d.proj_transform(x, y, z, proj) label.xy = x2,y2 label.update_positions(fig.canvas.renderer) label._visible=True fig.canvas.draw() def hide_labels(e): """Hides labels on button press Args: e (mouse event) - event handle to update on Returns: None """ for label in labels_and_points: label[0]._visible=False def add_labels(x, labels, explore=False): """Add labels to graph if available Args: data (np.ndarray) - Array containing the data points labels (list) - List containing labels Returns: None """ # if explore mode is activated, implement the on hover behavior if explore: X = np.vstack(x) if labels is not None: if any(isinstance(el, list) for el in labels): labels = list(itertools.chain(*labels)) fig.canvas.mpl_connect('motion_notify_event', lambda event: onMouseMotion(event, X, labels)) # on mouse motion # fig.canvas.mpl_connect('button_press_event', lambda event: onMouseClick(event, X, labels)) # on mouse click else: fig.canvas.mpl_connect('motion_notify_event', lambda event: onMouseMotion(event, X)) # on mouse motion # fig.canvas.mpl_connect('button_press_event', lambda event: onMouseClick(event, X, labels)) # on mouse click elif labels is not None: X = np.vstack(x) if any(isinstance(el, list) for el in labels): labels = list(itertools.chain(*labels)) annotate_plot(X, labels) fig.canvas.mpl_connect('button_press_event', hide_labels) fig.canvas.mpl_connect('button_release_event', update_position) ##EXPLORE MODE## def distance(point, event): """Return distance between mouse position and given data point Args: point (np.array) - np.array of shape (3,), with x,y,z in data coords event (MouseEvent) - mouse event (which contains mouse position in .x and .xdata) Returns: distance (np.float64) - distance (in screen coords) between mouse pos and data point """ assert point.shape == (3,), "distance: point.shape is wrong: %s, must be (3,)" % point.shape # Project 3d data space to 2d data space x2, y2, _ = proj3d.proj_transform(point[0], point[1], point[2], plt.gca().get_proj()) # Convert 2d data space to 2d screen space x3, y3 = ax.transData.transform((x2, y2)) return np.sqrt ((x3 - event.x)**2 + (y3 - event.y)**2) def calcClosestDatapoint(X, event): """"Calculate which data point is closest to the mouse position. Args: X (np.array) - array of points, of shape (numPoints, 3) event (MouseEvent) - mouse event (containing mouse position) Returns: smallestIndex (int) - the index (into the array of points X) of the element closest to the mouse position """ distances = [distance (X[i, 0:3], event) for i in range(X.shape[0])] return np.argmin(distances) def annotate_plot_explore(X, index, labels=False): """Create popover label in 3d chart Args: X (np.array) - array of points, of shape (numPoints, 3) index (int) - index (into points array X) of item which should be printed labels (list or False) - list of data point labels (default is False) Returns: None """ # save clicked points if not hasattr(annotate_plot_explore, 'clicked'): annotate_plot_explore.clicked = [] # If we have previously displayed another label, remove it first if hasattr(annotate_plot_explore, 'label'): if index not in annotate_plot_explore.clicked: annotate_plot_explore.label.remove() # Get data point from array of points X, at position index x2, y2, _ = proj3d.proj_transform(X[index, 0], X[index, 1], X[index, 2], ax.get_proj()) if type(labels) is list: label = labels[index] else: label = "Index " + str(index) + ": (" + "{0:.2f}, ".format(X[index, 0]) + "{0:.2f}, ".format(X[index, 1]) + "{0:.2f}".format(X[index, 2]) + ")" annotate_plot_explore.label = plt.annotate( label, xy = (x2, y2), xytext = (-20, 20), textcoords = 'offset points', ha = 'right', va = 'bottom', bbox = dict(boxstyle = 'round,pad=0.5', fc = 'yellow', alpha = 0.5), arrowprops = dict(arrowstyle = '->', connectionstyle = 'arc3,rad=0')) fig.canvas.draw() def onMouseMotion(event,X,labels=False): """Event that is triggered when mouse is moved. Shows text annotation over data point closest to mouse Args: event (event) - event triggered when the mous is moved X (np.ndarray) - coordinates by datapoints matrix labels (list or False) - list of data labels (default is False) Returns: None """ closestIndex = calcClosestDatapoint(X, event) if hasattr(onMouseMotion, 'first'): pass else: onMouseMotion.first = False onMouseMotion.closestIndex_prev = calcClosestDatapoint(X, event) if closestIndex!=onMouseMotion.closestIndex_prev: if type(labels) is list: annotate_plot_explore (X, closestIndex, labels) closestIndex_prev = closestIndex else: annotate_plot_explore (X, closestIndex) closestIndex_prev = closestIndex def plot_cube(scale): cube = { "top" : ( [[-1,1],[-1,1]], [[-1,-1],[1,1]], [[1,1],[1,1]] ), "bottom" : ( [[-1,1],[-1,1]], [[-1,-1],[1,1]], [[-1,-1],[-1,-1]] ), "left" : ( [[-1,-1],[-1,-1]], [[-1,1],[-1,1]], [[-1,-1],[1,1]] ), "right" : ( [[1,1],[1,1]], [[-1,1],[-1,1]], [[-1,-1],[1,1]] ), "front" : ( [[-1,1],[-1,1]], [[-1,-1],[-1,-1]], [[-1,-1],[1,1]] ), "back" : ( [[-1,1],[-1,1]], [[1,1],[1,1]], [[-1,-1],[1,1]] ) } plane_list = [] for side in cube: (Xs, Ys, Zs) = ( np.asarray(cube[side][0])*scale, np.asarray(cube[side][1])*scale, np.asarray(cube[side][2])*scale ) plane_list.append(ax.plot_wireframe(Xs, Ys, Zs, rstride=1, cstride=1, color='black', linewidth=1)) return plane_list def plot_square(ax, scale=1): ax.add_patch(patches.Rectangle(scale*[-1, -1], scale*2, scale*2, fill=False, edgecolor='black', linewidth=1)) def update_lines_parallel(num, data_lines, lines, trail_lines, cube_scale, tail_duration=2, rotations=2, zoom=1, chemtrails=False, elev=10): if hasattr(update_lines_parallel, 'planes'): for plane in update_lines_parallel.planes: plane.remove() update_lines_parallel.planes = plot_cube(cube_scale) ax.view_init(elev=10, azim=rotations*(360*(num/data_lines[0].shape[0]))) ax.dist=9-zoom for line, data, trail in zip(lines, data_lines, trail_lines): if (precog and chemtrails) or bullettime: trail.set_data(data[:, 0:2].T) trail.set_3d_properties(data[:, 2]) elif chemtrails: trail.set_data(data[0:num-tail_duration + 1, 0:2].T) trail.set_3d_properties(data[0:num-tail_duration + 1, 2]) elif precog: trail.set_data(data[num+1:, 0:2].T) trail.set_3d_properties(data[num+1:, 2]) if num<=tail_duration: line.set_data(data[0:num+1, 0:2].T) line.set_3d_properties(data[0:num+1, 2]) else: line.set_data(data[num-tail_duration:num+1, 0:2].T) line.set_3d_properties(data[num-tail_duration:num+1, 2]) return lines, trail_lines def update_lines_spin(num, data_lines, lines, cube_scale, rotations=2, zoom=1, elev=10): if hasattr(update_lines_spin, 'planes'): for plane in update_lines_spin.planes: plane.remove() update_lines_spin.planes = plot_cube(cube_scale) ax.view_init(elev=elev, azim=rotations*(360*(num/(frame_rate*duration)))) ax.dist=9-zoom for line, data in zip(lines, data_lines): line.set_data(data[:, 0:2].T) line.set_3d_properties(data[:, 2]) return lines def dispatch_animate(x, ani_params): if x[0].shape[1] is 3: return animate_plot3D(x, **ani_params) def animate_plot3D(x, tail_duration=2, rotations=2, zoom=1, chemtrails=False, frame_rate=50, elev=10, style='parallel'): # initialize plot fig = plt.figure() ax = fig.add_subplot(111, projection='3d') # create lines if fmt is not None: lines = [ax.plot(dat[0:1, 0], dat[0:1, 1], dat[0:1, 2], fmt[idx], linewidth=1, **kwargs_list[idx])[0] for idx,dat in enumerate(x)] if is_line(fmt): trail = [ax.plot(dat[0:1, 0], dat[0:1, 1], dat[0:1, 2], fmt[idx], alpha=.3, linewidth=1, **kwargs_list[idx])[0] for idx, dat in enumerate(x)] else: lines = [ax.plot(dat[0:1, 0], dat[0:1, 1], dat[0:1, 2], linewidth=1, **kwargs_list[idx])[0] for idx,dat in enumerate(x)] if is_line(fmt): trail = [ax.plot(dat[0:1, 0], dat[0:1, 1], dat[0:1, 2], alpha=.3, linewidth=1, **kwargs_list[idx])[0] for idx, dat in enumerate(x)] if tail_duration==0: tail_duration=1 else: tail_duration = int(frame_rate*tail_duration) # get line animation if style in ['parallel', True]: line_ani = animation.FuncAnimation(fig, update_lines_parallel, x[0].shape[0], fargs=(x, lines, trail, 1, tail_duration, rotations, zoom, chemtrails, elev), interval=1000/frame_rate, blit=False, repeat=False) elif style == 'spin': line_ani = animation.FuncAnimation(fig, update_lines_spin, frame_rate*duration, fargs=(x, lines, 1, rotations, zoom, elev), interval=1000/frame_rate, blit=False, repeat=False) return fig, ax, x, line_ani # if a single point, but formatted as a line, replace with a point for i, (xi, fi) in enumerate(zip(x, fmt)): if xi.shape[0]==1 and fi in ('-', ':', '--'): fmt[i]='.' if not show: # prevents the backend from rendering this plot plt.ioff() if animate in [True, 'parallel', 'spin']: assert x[0].shape[1] is 3, "Animations are currently only supported for 3d plots." # animation params ani_params = dict(tail_duration=tail_duration, rotations=rotations, zoom=zoom, chemtrails=chemtrails, frame_rate=frame_rate, elev=elev, style=animate) # dispatch animation fig, ax, data, line_ani = dispatch_animate(x, ani_params) else: # dispatch static fig, ax, data = dispatch_static(x, ax) # if 3d, plot the cube if x[0].shape[1] is 3: # set cube scale cube_scale = 1 # plot cube plot_cube(cube_scale) # set the axes properties ax.set_xlim3d([-cube_scale, cube_scale]) ax.set_ylim3d([-cube_scale, cube_scale]) ax.set_zlim3d([-cube_scale, cube_scale]) # initialize the view ax.view_init(elev=elev, azim=azim) elif x[0].shape[1] is 2: # plot square plot_square(ax) # set axes ax.set_xlim(-1.1, 1.1) ax.set_ylim(-1.1, 1.1) # set line_ani to empty line_ani = None # remove axes ax.set_axis_off() # add labels add_labels(x, labels, explore=explore) # add title if title is not None: ax.set_title(title) # add legend if legend is not None: ax.legend() if size is not None: fig.set_size_inches(size) return fig, ax, data, line_ani
[ "def", "_draw", "(", "x", ",", "legend", "=", "None", ",", "title", "=", "None", ",", "labels", "=", "False", ",", "show", "=", "True", ",", "kwargs_list", "=", "None", ",", "fmt", "=", "None", ",", "animate", "=", "False", ",", "tail_duration", "=", "2", ",", "rotations", "=", "2", ",", "zoom", "=", "1", ",", "chemtrails", "=", "False", ",", "precog", "=", "False", ",", "bullettime", "=", "False", ",", "frame_rate", "=", "50", ",", "elev", "=", "10", ",", "azim", "=", "-", "60", ",", "duration", "=", "30", ",", "explore", "=", "False", ",", "size", "=", "None", ",", "ax", "=", "None", ")", ":", "# handle static plots", "def", "dispatch_static", "(", "x", ",", "ax", "=", "None", ")", ":", "shape", "=", "x", "[", "0", "]", ".", "shape", "[", "1", "]", "if", "shape", "==", "3", ":", "opts", "=", "dict", "(", "projection", "=", "'3d'", ")", "else", ":", "opts", "=", "dict", "(", ")", "if", "not", "ax", ":", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ",", "*", "*", "opts", ")", "else", ":", "fig", "=", "ax", ".", "figure", "if", "x", "[", "0", "]", ".", "ndim", "==", "1", "or", "x", "[", "0", "]", ".", "shape", "[", "-", "1", "]", "==", "1", ":", "return", "plot1D", "(", "x", ",", "fig", ",", "ax", ")", "elif", "x", "[", "0", "]", ".", "shape", "[", "-", "1", "]", "==", "2", ":", "return", "plot2D", "(", "x", ",", "fig", ",", "ax", ")", "elif", "x", "[", "0", "]", ".", "shape", "[", "-", "1", "]", "==", "3", ":", "return", "plot3D", "(", "x", ",", "fig", ",", "ax", ")", "# plot data in 1D", "def", "plot1D", "(", "data", ",", "fig", ",", "ax", ")", ":", "n", "=", "len", "(", "data", ")", "for", "i", "in", "range", "(", "n", ")", ":", "ikwargs", "=", "kwargs_list", "[", "i", "]", "if", "fmt", "is", "None", ":", "ax", ".", "plot", "(", "data", "[", "i", "]", "[", ":", ",", "0", "]", ",", "*", "*", "ikwargs", ")", "else", ":", "ax", ".", "plot", "(", "data", "[", "i", "]", "[", ":", ",", "0", "]", ",", "fmt", "[", "i", "]", ",", "*", "*", "ikwargs", ")", "return", "fig", ",", "ax", ",", "data", "# plot data in 2D", "def", "plot2D", "(", "data", ",", "fig", ",", "ax", ")", ":", "n", "=", "len", "(", "data", ")", "for", "i", "in", "range", "(", "n", ")", ":", "ikwargs", "=", "kwargs_list", "[", "i", "]", "if", "fmt", "is", "None", ":", "ax", ".", "plot", "(", "data", "[", "i", "]", "[", ":", ",", "0", "]", ",", "data", "[", "i", "]", "[", ":", ",", "1", "]", ",", "*", "*", "ikwargs", ")", "else", ":", "ax", ".", "plot", "(", "data", "[", "i", "]", "[", ":", ",", "0", "]", ",", "data", "[", "i", "]", "[", ":", ",", "1", "]", ",", "fmt", "[", "i", "]", ",", "*", "*", "ikwargs", ")", "return", "fig", ",", "ax", ",", "data", "# plot data in 3D", "def", "plot3D", "(", "data", ",", "fig", ",", "ax", ")", ":", "n", "=", "len", "(", "data", ")", "for", "i", "in", "range", "(", "n", ")", ":", "ikwargs", "=", "kwargs_list", "[", "i", "]", "if", "fmt", "is", "None", ":", "ax", ".", "plot", "(", "data", "[", "i", "]", "[", ":", ",", "0", "]", ",", "data", "[", "i", "]", "[", ":", ",", "1", "]", ",", "data", "[", "i", "]", "[", ":", ",", "2", "]", ",", "*", "*", "ikwargs", ")", "else", ":", "ax", ".", "plot", "(", "data", "[", "i", "]", "[", ":", ",", "0", "]", ",", "data", "[", "i", "]", "[", ":", ",", "1", "]", ",", "data", "[", "i", "]", "[", ":", ",", "2", "]", ",", "fmt", "[", "i", "]", ",", "*", "*", "ikwargs", ")", "return", "fig", ",", "ax", ",", "data", "def", "annotate_plot", "(", "data", ",", "labels", ")", ":", "\"\"\"Create labels in 3d chart\n Args:\n X (np.array) - array of points, of shape (numPoints, 3)\n labels (list) - list of labels of shape (numPoints,1)\n Returns:\n None\n \"\"\"", "global", "labels_and_points", "labels_and_points", "=", "[", "]", "if", "data", "[", "0", "]", ".", "shape", "[", "-", "1", "]", ">", "2", ":", "proj", "=", "ax", ".", "get_proj", "(", ")", "for", "idx", ",", "x", "in", "enumerate", "(", "data", ")", ":", "if", "labels", "[", "idx", "]", "is", "not", "None", ":", "if", "data", "[", "0", "]", ".", "shape", "[", "-", "1", "]", ">", "2", ":", "x2", ",", "y2", ",", "_", "=", "proj3d", ".", "proj_transform", "(", "x", "[", "0", "]", ",", "x", "[", "1", "]", ",", "x", "[", "2", "]", ",", "proj", ")", "label", "=", "plt", ".", "annotate", "(", "labels", "[", "idx", "]", ",", "xy", "=", "(", "x2", ",", "y2", ")", ",", "xytext", "=", "(", "-", "20", ",", "20", ")", ",", "textcoords", "=", "'offset points'", ",", "ha", "=", "'right'", ",", "va", "=", "'bottom'", ",", "bbox", "=", "dict", "(", "boxstyle", "=", "'round,pad=0.5'", ",", "fc", "=", "'white'", ",", "alpha", "=", "0.5", ")", ",", "arrowprops", "=", "dict", "(", "arrowstyle", "=", "'-'", ",", "connectionstyle", "=", "'arc3,rad=0'", ")", ",", "family", "=", "'serif'", ")", "labels_and_points", ".", "append", "(", "(", "label", ",", "x", "[", "0", "]", ",", "x", "[", "1", "]", ",", "x", "[", "2", "]", ")", ")", "elif", "data", "[", "0", "]", ".", "shape", "[", "-", "1", "]", "==", "2", ":", "x2", ",", "y2", "=", "x", "[", "0", "]", ",", "x", "[", "1", "]", "label", "=", "plt", ".", "annotate", "(", "labels", "[", "idx", "]", ",", "xy", "=", "(", "x2", ",", "y2", ")", ",", "xytext", "=", "(", "-", "20", ",", "20", ")", ",", "textcoords", "=", "'offset points'", ",", "ha", "=", "'right'", ",", "va", "=", "'bottom'", ",", "bbox", "=", "dict", "(", "boxstyle", "=", "'round,pad=0.5'", ",", "fc", "=", "'white'", ",", "alpha", "=", "0.5", ")", ",", "arrowprops", "=", "dict", "(", "arrowstyle", "=", "'-'", ",", "connectionstyle", "=", "'arc3,rad=0'", ")", ",", "family", "=", "'serif'", ")", "label", ".", "draggable", "(", ")", "labels_and_points", ".", "append", "(", "(", "label", ",", "x", "[", "0", "]", ",", "x", "[", "1", "]", ")", ")", "fig", ".", "canvas", ".", "draw", "(", ")", "def", "update_position", "(", "e", ")", ":", "\"\"\"Update label positions in 3d chart\n Args:\n e (mouse event) - event handle to update on\n Returns:\n None\n \"\"\"", "proj", "=", "ax", ".", "get_proj", "(", ")", "for", "label", ",", "x", ",", "y", ",", "z", "in", "labels_and_points", ":", "x2", ",", "y2", ",", "_", "=", "proj3d", ".", "proj_transform", "(", "x", ",", "y", ",", "z", ",", "proj", ")", "label", ".", "xy", "=", "x2", ",", "y2", "label", ".", "update_positions", "(", "fig", ".", "canvas", ".", "renderer", ")", "label", ".", "_visible", "=", "True", "fig", ".", "canvas", ".", "draw", "(", ")", "def", "hide_labels", "(", "e", ")", ":", "\"\"\"Hides labels on button press\n Args:\n e (mouse event) - event handle to update on\n Returns:\n None\n \"\"\"", "for", "label", "in", "labels_and_points", ":", "label", "[", "0", "]", ".", "_visible", "=", "False", "def", "add_labels", "(", "x", ",", "labels", ",", "explore", "=", "False", ")", ":", "\"\"\"Add labels to graph if available\n Args:\n data (np.ndarray) - Array containing the data points\n labels (list) - List containing labels\n Returns:\n None\n \"\"\"", "# if explore mode is activated, implement the on hover behavior", "if", "explore", ":", "X", "=", "np", ".", "vstack", "(", "x", ")", "if", "labels", "is", "not", "None", ":", "if", "any", "(", "isinstance", "(", "el", ",", "list", ")", "for", "el", "in", "labels", ")", ":", "labels", "=", "list", "(", "itertools", ".", "chain", "(", "*", "labels", ")", ")", "fig", ".", "canvas", ".", "mpl_connect", "(", "'motion_notify_event'", ",", "lambda", "event", ":", "onMouseMotion", "(", "event", ",", "X", ",", "labels", ")", ")", "# on mouse motion", "# fig.canvas.mpl_connect('button_press_event', lambda event: onMouseClick(event, X, labels)) # on mouse click", "else", ":", "fig", ".", "canvas", ".", "mpl_connect", "(", "'motion_notify_event'", ",", "lambda", "event", ":", "onMouseMotion", "(", "event", ",", "X", ")", ")", "# on mouse motion", "# fig.canvas.mpl_connect('button_press_event', lambda event: onMouseClick(event, X, labels)) # on mouse click", "elif", "labels", "is", "not", "None", ":", "X", "=", "np", ".", "vstack", "(", "x", ")", "if", "any", "(", "isinstance", "(", "el", ",", "list", ")", "for", "el", "in", "labels", ")", ":", "labels", "=", "list", "(", "itertools", ".", "chain", "(", "*", "labels", ")", ")", "annotate_plot", "(", "X", ",", "labels", ")", "fig", ".", "canvas", ".", "mpl_connect", "(", "'button_press_event'", ",", "hide_labels", ")", "fig", ".", "canvas", ".", "mpl_connect", "(", "'button_release_event'", ",", "update_position", ")", "##EXPLORE MODE##", "def", "distance", "(", "point", ",", "event", ")", ":", "\"\"\"Return distance between mouse position and given data point\n\n Args:\n point (np.array) - np.array of shape (3,), with x,y,z in data coords\n event (MouseEvent) - mouse event (which contains mouse position in .x and .xdata)\n Returns:\n distance (np.float64) - distance (in screen coords) between mouse pos and data point\n \"\"\"", "assert", "point", ".", "shape", "==", "(", "3", ",", ")", ",", "\"distance: point.shape is wrong: %s, must be (3,)\"", "%", "point", ".", "shape", "# Project 3d data space to 2d data space", "x2", ",", "y2", ",", "_", "=", "proj3d", ".", "proj_transform", "(", "point", "[", "0", "]", ",", "point", "[", "1", "]", ",", "point", "[", "2", "]", ",", "plt", ".", "gca", "(", ")", ".", "get_proj", "(", ")", ")", "# Convert 2d data space to 2d screen space", "x3", ",", "y3", "=", "ax", ".", "transData", ".", "transform", "(", "(", "x2", ",", "y2", ")", ")", "return", "np", ".", "sqrt", "(", "(", "x3", "-", "event", ".", "x", ")", "**", "2", "+", "(", "y3", "-", "event", ".", "y", ")", "**", "2", ")", "def", "calcClosestDatapoint", "(", "X", ",", "event", ")", ":", "\"\"\"\"Calculate which data point is closest to the mouse position.\n\n Args:\n X (np.array) - array of points, of shape (numPoints, 3)\n event (MouseEvent) - mouse event (containing mouse position)\n Returns:\n smallestIndex (int) - the index (into the array of points X) of the element closest to the mouse position\n \"\"\"", "distances", "=", "[", "distance", "(", "X", "[", "i", ",", "0", ":", "3", "]", ",", "event", ")", "for", "i", "in", "range", "(", "X", ".", "shape", "[", "0", "]", ")", "]", "return", "np", ".", "argmin", "(", "distances", ")", "def", "annotate_plot_explore", "(", "X", ",", "index", ",", "labels", "=", "False", ")", ":", "\"\"\"Create popover label in 3d chart\n\n Args:\n X (np.array) - array of points, of shape (numPoints, 3)\n index (int) - index (into points array X) of item which should be printed\n labels (list or False) - list of data point labels (default is False)\n Returns:\n None\n \"\"\"", "# save clicked points", "if", "not", "hasattr", "(", "annotate_plot_explore", ",", "'clicked'", ")", ":", "annotate_plot_explore", ".", "clicked", "=", "[", "]", "# If we have previously displayed another label, remove it first", "if", "hasattr", "(", "annotate_plot_explore", ",", "'label'", ")", ":", "if", "index", "not", "in", "annotate_plot_explore", ".", "clicked", ":", "annotate_plot_explore", ".", "label", ".", "remove", "(", ")", "# Get data point from array of points X, at position index", "x2", ",", "y2", ",", "_", "=", "proj3d", ".", "proj_transform", "(", "X", "[", "index", ",", "0", "]", ",", "X", "[", "index", ",", "1", "]", ",", "X", "[", "index", ",", "2", "]", ",", "ax", ".", "get_proj", "(", ")", ")", "if", "type", "(", "labels", ")", "is", "list", ":", "label", "=", "labels", "[", "index", "]", "else", ":", "label", "=", "\"Index \"", "+", "str", "(", "index", ")", "+", "\": (\"", "+", "\"{0:.2f}, \"", ".", "format", "(", "X", "[", "index", ",", "0", "]", ")", "+", "\"{0:.2f}, \"", ".", "format", "(", "X", "[", "index", ",", "1", "]", ")", "+", "\"{0:.2f}\"", ".", "format", "(", "X", "[", "index", ",", "2", "]", ")", "+", "\")\"", "annotate_plot_explore", ".", "label", "=", "plt", ".", "annotate", "(", "label", ",", "xy", "=", "(", "x2", ",", "y2", ")", ",", "xytext", "=", "(", "-", "20", ",", "20", ")", ",", "textcoords", "=", "'offset points'", ",", "ha", "=", "'right'", ",", "va", "=", "'bottom'", ",", "bbox", "=", "dict", "(", "boxstyle", "=", "'round,pad=0.5'", ",", "fc", "=", "'yellow'", ",", "alpha", "=", "0.5", ")", ",", "arrowprops", "=", "dict", "(", "arrowstyle", "=", "'->'", ",", "connectionstyle", "=", "'arc3,rad=0'", ")", ")", "fig", ".", "canvas", ".", "draw", "(", ")", "def", "onMouseMotion", "(", "event", ",", "X", ",", "labels", "=", "False", ")", ":", "\"\"\"Event that is triggered when mouse is moved. Shows text annotation over data point closest to mouse\n Args:\n event (event) - event triggered when the mous is moved\n X (np.ndarray) - coordinates by datapoints matrix\n labels (list or False) - list of data labels (default is False)\n Returns:\n None\n \"\"\"", "closestIndex", "=", "calcClosestDatapoint", "(", "X", ",", "event", ")", "if", "hasattr", "(", "onMouseMotion", ",", "'first'", ")", ":", "pass", "else", ":", "onMouseMotion", ".", "first", "=", "False", "onMouseMotion", ".", "closestIndex_prev", "=", "calcClosestDatapoint", "(", "X", ",", "event", ")", "if", "closestIndex", "!=", "onMouseMotion", ".", "closestIndex_prev", ":", "if", "type", "(", "labels", ")", "is", "list", ":", "annotate_plot_explore", "(", "X", ",", "closestIndex", ",", "labels", ")", "closestIndex_prev", "=", "closestIndex", "else", ":", "annotate_plot_explore", "(", "X", ",", "closestIndex", ")", "closestIndex_prev", "=", "closestIndex", "def", "plot_cube", "(", "scale", ")", ":", "cube", "=", "{", "\"top\"", ":", "(", "[", "[", "-", "1", ",", "1", "]", ",", "[", "-", "1", ",", "1", "]", "]", ",", "[", "[", "-", "1", ",", "-", "1", "]", ",", "[", "1", ",", "1", "]", "]", ",", "[", "[", "1", ",", "1", "]", ",", "[", "1", ",", "1", "]", "]", ")", ",", "\"bottom\"", ":", "(", "[", "[", "-", "1", ",", "1", "]", ",", "[", "-", "1", ",", "1", "]", "]", ",", "[", "[", "-", "1", ",", "-", "1", "]", ",", "[", "1", ",", "1", "]", "]", ",", "[", "[", "-", "1", ",", "-", "1", "]", ",", "[", "-", "1", ",", "-", "1", "]", "]", ")", ",", "\"left\"", ":", "(", "[", "[", "-", "1", ",", "-", "1", "]", ",", "[", "-", "1", ",", "-", "1", "]", "]", ",", "[", "[", "-", "1", ",", "1", "]", ",", "[", "-", "1", ",", "1", "]", "]", ",", "[", "[", "-", "1", ",", "-", "1", "]", ",", "[", "1", ",", "1", "]", "]", ")", ",", "\"right\"", ":", "(", "[", "[", "1", ",", "1", "]", ",", "[", "1", ",", "1", "]", "]", ",", "[", "[", "-", "1", ",", "1", "]", ",", "[", "-", "1", ",", "1", "]", "]", ",", "[", "[", "-", "1", ",", "-", "1", "]", ",", "[", "1", ",", "1", "]", "]", ")", ",", "\"front\"", ":", "(", "[", "[", "-", "1", ",", "1", "]", ",", "[", "-", "1", ",", "1", "]", "]", ",", "[", "[", "-", "1", ",", "-", "1", "]", ",", "[", "-", "1", ",", "-", "1", "]", "]", ",", "[", "[", "-", "1", ",", "-", "1", "]", ",", "[", "1", ",", "1", "]", "]", ")", ",", "\"back\"", ":", "(", "[", "[", "-", "1", ",", "1", "]", ",", "[", "-", "1", ",", "1", "]", "]", ",", "[", "[", "1", ",", "1", "]", ",", "[", "1", ",", "1", "]", "]", ",", "[", "[", "-", "1", ",", "-", "1", "]", ",", "[", "1", ",", "1", "]", "]", ")", "}", "plane_list", "=", "[", "]", "for", "side", "in", "cube", ":", "(", "Xs", ",", "Ys", ",", "Zs", ")", "=", "(", "np", ".", "asarray", "(", "cube", "[", "side", "]", "[", "0", "]", ")", "*", "scale", ",", "np", ".", "asarray", "(", "cube", "[", "side", "]", "[", "1", "]", ")", "*", "scale", ",", "np", ".", "asarray", "(", "cube", "[", "side", "]", "[", "2", "]", ")", "*", "scale", ")", "plane_list", ".", "append", "(", "ax", ".", "plot_wireframe", "(", "Xs", ",", "Ys", ",", "Zs", ",", "rstride", "=", "1", ",", "cstride", "=", "1", ",", "color", "=", "'black'", ",", "linewidth", "=", "1", ")", ")", "return", "plane_list", "def", "plot_square", "(", "ax", ",", "scale", "=", "1", ")", ":", "ax", ".", "add_patch", "(", "patches", ".", "Rectangle", "(", "scale", "*", "[", "-", "1", ",", "-", "1", "]", ",", "scale", "*", "2", ",", "scale", "*", "2", ",", "fill", "=", "False", ",", "edgecolor", "=", "'black'", ",", "linewidth", "=", "1", ")", ")", "def", "update_lines_parallel", "(", "num", ",", "data_lines", ",", "lines", ",", "trail_lines", ",", "cube_scale", ",", "tail_duration", "=", "2", ",", "rotations", "=", "2", ",", "zoom", "=", "1", ",", "chemtrails", "=", "False", ",", "elev", "=", "10", ")", ":", "if", "hasattr", "(", "update_lines_parallel", ",", "'planes'", ")", ":", "for", "plane", "in", "update_lines_parallel", ".", "planes", ":", "plane", ".", "remove", "(", ")", "update_lines_parallel", ".", "planes", "=", "plot_cube", "(", "cube_scale", ")", "ax", ".", "view_init", "(", "elev", "=", "10", ",", "azim", "=", "rotations", "*", "(", "360", "*", "(", "num", "/", "data_lines", "[", "0", "]", ".", "shape", "[", "0", "]", ")", ")", ")", "ax", ".", "dist", "=", "9", "-", "zoom", "for", "line", ",", "data", ",", "trail", "in", "zip", "(", "lines", ",", "data_lines", ",", "trail_lines", ")", ":", "if", "(", "precog", "and", "chemtrails", ")", "or", "bullettime", ":", "trail", ".", "set_data", "(", "data", "[", ":", ",", "0", ":", "2", "]", ".", "T", ")", "trail", ".", "set_3d_properties", "(", "data", "[", ":", ",", "2", "]", ")", "elif", "chemtrails", ":", "trail", ".", "set_data", "(", "data", "[", "0", ":", "num", "-", "tail_duration", "+", "1", ",", "0", ":", "2", "]", ".", "T", ")", "trail", ".", "set_3d_properties", "(", "data", "[", "0", ":", "num", "-", "tail_duration", "+", "1", ",", "2", "]", ")", "elif", "precog", ":", "trail", ".", "set_data", "(", "data", "[", "num", "+", "1", ":", ",", "0", ":", "2", "]", ".", "T", ")", "trail", ".", "set_3d_properties", "(", "data", "[", "num", "+", "1", ":", ",", "2", "]", ")", "if", "num", "<=", "tail_duration", ":", "line", ".", "set_data", "(", "data", "[", "0", ":", "num", "+", "1", ",", "0", ":", "2", "]", ".", "T", ")", "line", ".", "set_3d_properties", "(", "data", "[", "0", ":", "num", "+", "1", ",", "2", "]", ")", "else", ":", "line", ".", "set_data", "(", "data", "[", "num", "-", "tail_duration", ":", "num", "+", "1", ",", "0", ":", "2", "]", ".", "T", ")", "line", ".", "set_3d_properties", "(", "data", "[", "num", "-", "tail_duration", ":", "num", "+", "1", ",", "2", "]", ")", "return", "lines", ",", "trail_lines", "def", "update_lines_spin", "(", "num", ",", "data_lines", ",", "lines", ",", "cube_scale", ",", "rotations", "=", "2", ",", "zoom", "=", "1", ",", "elev", "=", "10", ")", ":", "if", "hasattr", "(", "update_lines_spin", ",", "'planes'", ")", ":", "for", "plane", "in", "update_lines_spin", ".", "planes", ":", "plane", ".", "remove", "(", ")", "update_lines_spin", ".", "planes", "=", "plot_cube", "(", "cube_scale", ")", "ax", ".", "view_init", "(", "elev", "=", "elev", ",", "azim", "=", "rotations", "*", "(", "360", "*", "(", "num", "/", "(", "frame_rate", "*", "duration", ")", ")", ")", ")", "ax", ".", "dist", "=", "9", "-", "zoom", "for", "line", ",", "data", "in", "zip", "(", "lines", ",", "data_lines", ")", ":", "line", ".", "set_data", "(", "data", "[", ":", ",", "0", ":", "2", "]", ".", "T", ")", "line", ".", "set_3d_properties", "(", "data", "[", ":", ",", "2", "]", ")", "return", "lines", "def", "dispatch_animate", "(", "x", ",", "ani_params", ")", ":", "if", "x", "[", "0", "]", ".", "shape", "[", "1", "]", "is", "3", ":", "return", "animate_plot3D", "(", "x", ",", "*", "*", "ani_params", ")", "def", "animate_plot3D", "(", "x", ",", "tail_duration", "=", "2", ",", "rotations", "=", "2", ",", "zoom", "=", "1", ",", "chemtrails", "=", "False", ",", "frame_rate", "=", "50", ",", "elev", "=", "10", ",", "style", "=", "'parallel'", ")", ":", "# initialize plot", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ",", "projection", "=", "'3d'", ")", "# create lines", "if", "fmt", "is", "not", "None", ":", "lines", "=", "[", "ax", ".", "plot", "(", "dat", "[", "0", ":", "1", ",", "0", "]", ",", "dat", "[", "0", ":", "1", ",", "1", "]", ",", "dat", "[", "0", ":", "1", ",", "2", "]", ",", "fmt", "[", "idx", "]", ",", "linewidth", "=", "1", ",", "*", "*", "kwargs_list", "[", "idx", "]", ")", "[", "0", "]", "for", "idx", ",", "dat", "in", "enumerate", "(", "x", ")", "]", "if", "is_line", "(", "fmt", ")", ":", "trail", "=", "[", "ax", ".", "plot", "(", "dat", "[", "0", ":", "1", ",", "0", "]", ",", "dat", "[", "0", ":", "1", ",", "1", "]", ",", "dat", "[", "0", ":", "1", ",", "2", "]", ",", "fmt", "[", "idx", "]", ",", "alpha", "=", ".3", ",", "linewidth", "=", "1", ",", "*", "*", "kwargs_list", "[", "idx", "]", ")", "[", "0", "]", "for", "idx", ",", "dat", "in", "enumerate", "(", "x", ")", "]", "else", ":", "lines", "=", "[", "ax", ".", "plot", "(", "dat", "[", "0", ":", "1", ",", "0", "]", ",", "dat", "[", "0", ":", "1", ",", "1", "]", ",", "dat", "[", "0", ":", "1", ",", "2", "]", ",", "linewidth", "=", "1", ",", "*", "*", "kwargs_list", "[", "idx", "]", ")", "[", "0", "]", "for", "idx", ",", "dat", "in", "enumerate", "(", "x", ")", "]", "if", "is_line", "(", "fmt", ")", ":", "trail", "=", "[", "ax", ".", "plot", "(", "dat", "[", "0", ":", "1", ",", "0", "]", ",", "dat", "[", "0", ":", "1", ",", "1", "]", ",", "dat", "[", "0", ":", "1", ",", "2", "]", ",", "alpha", "=", ".3", ",", "linewidth", "=", "1", ",", "*", "*", "kwargs_list", "[", "idx", "]", ")", "[", "0", "]", "for", "idx", ",", "dat", "in", "enumerate", "(", "x", ")", "]", "if", "tail_duration", "==", "0", ":", "tail_duration", "=", "1", "else", ":", "tail_duration", "=", "int", "(", "frame_rate", "*", "tail_duration", ")", "# get line animation", "if", "style", "in", "[", "'parallel'", ",", "True", "]", ":", "line_ani", "=", "animation", ".", "FuncAnimation", "(", "fig", ",", "update_lines_parallel", ",", "x", "[", "0", "]", ".", "shape", "[", "0", "]", ",", "fargs", "=", "(", "x", ",", "lines", ",", "trail", ",", "1", ",", "tail_duration", ",", "rotations", ",", "zoom", ",", "chemtrails", ",", "elev", ")", ",", "interval", "=", "1000", "/", "frame_rate", ",", "blit", "=", "False", ",", "repeat", "=", "False", ")", "elif", "style", "==", "'spin'", ":", "line_ani", "=", "animation", ".", "FuncAnimation", "(", "fig", ",", "update_lines_spin", ",", "frame_rate", "*", "duration", ",", "fargs", "=", "(", "x", ",", "lines", ",", "1", ",", "rotations", ",", "zoom", ",", "elev", ")", ",", "interval", "=", "1000", "/", "frame_rate", ",", "blit", "=", "False", ",", "repeat", "=", "False", ")", "return", "fig", ",", "ax", ",", "x", ",", "line_ani", "# if a single point, but formatted as a line, replace with a point", "for", "i", ",", "(", "xi", ",", "fi", ")", "in", "enumerate", "(", "zip", "(", "x", ",", "fmt", ")", ")", ":", "if", "xi", ".", "shape", "[", "0", "]", "==", "1", "and", "fi", "in", "(", "'-'", ",", "':'", ",", "'--'", ")", ":", "fmt", "[", "i", "]", "=", "'.'", "if", "not", "show", ":", "# prevents the backend from rendering this plot", "plt", ".", "ioff", "(", ")", "if", "animate", "in", "[", "True", ",", "'parallel'", ",", "'spin'", "]", ":", "assert", "x", "[", "0", "]", ".", "shape", "[", "1", "]", "is", "3", ",", "\"Animations are currently only supported for 3d plots.\"", "# animation params", "ani_params", "=", "dict", "(", "tail_duration", "=", "tail_duration", ",", "rotations", "=", "rotations", ",", "zoom", "=", "zoom", ",", "chemtrails", "=", "chemtrails", ",", "frame_rate", "=", "frame_rate", ",", "elev", "=", "elev", ",", "style", "=", "animate", ")", "# dispatch animation", "fig", ",", "ax", ",", "data", ",", "line_ani", "=", "dispatch_animate", "(", "x", ",", "ani_params", ")", "else", ":", "# dispatch static", "fig", ",", "ax", ",", "data", "=", "dispatch_static", "(", "x", ",", "ax", ")", "# if 3d, plot the cube", "if", "x", "[", "0", "]", ".", "shape", "[", "1", "]", "is", "3", ":", "# set cube scale", "cube_scale", "=", "1", "# plot cube", "plot_cube", "(", "cube_scale", ")", "# set the axes properties", "ax", ".", "set_xlim3d", "(", "[", "-", "cube_scale", ",", "cube_scale", "]", ")", "ax", ".", "set_ylim3d", "(", "[", "-", "cube_scale", ",", "cube_scale", "]", ")", "ax", ".", "set_zlim3d", "(", "[", "-", "cube_scale", ",", "cube_scale", "]", ")", "# initialize the view", "ax", ".", "view_init", "(", "elev", "=", "elev", ",", "azim", "=", "azim", ")", "elif", "x", "[", "0", "]", ".", "shape", "[", "1", "]", "is", "2", ":", "# plot square", "plot_square", "(", "ax", ")", "# set axes", "ax", ".", "set_xlim", "(", "-", "1.1", ",", "1.1", ")", "ax", ".", "set_ylim", "(", "-", "1.1", ",", "1.1", ")", "# set line_ani to empty", "line_ani", "=", "None", "# remove axes", "ax", ".", "set_axis_off", "(", ")", "# add labels", "add_labels", "(", "x", ",", "labels", ",", "explore", "=", "explore", ")", "# add title", "if", "title", "is", "not", "None", ":", "ax", ".", "set_title", "(", "title", ")", "# add legend", "if", "legend", "is", "not", "None", ":", "ax", ".", "legend", "(", ")", "if", "size", "is", "not", "None", ":", "fig", ".", "set_size_inches", "(", "size", ")", "return", "fig", ",", "ax", ",", "data", ",", "line_ani" ]
https://github.com/ContextLab/hypertools/blob/948050a22b345c7dcccf729672c76f49609b1ac8/hypertools/plot/draw.py#L16-L448
nathanlopez/Stitch
8e22e91c94237959c02d521aab58dc7e3d994cea
Application/stitch_lnxshell.py
python
st_lnxshell.help_location
(self)
[]
def help_location(self): st_help_location()
[ "def", "help_location", "(", "self", ")", ":", "st_help_location", "(", ")" ]
https://github.com/nathanlopez/Stitch/blob/8e22e91c94237959c02d521aab58dc7e3d994cea/Application/stitch_lnxshell.py#L226-L226
lalor/python_for_linux_system_administration
2a002fcab3e74e82287400556dee7909e1996d10
chapter11/section6/class_person1.py
python
Person.__init__
(self, name, age)
[]
def __init__(self, name, age): self.name = name self.age = age
[ "def", "__init__", "(", "self", ",", "name", ",", "age", ")", ":", "self", ".", "name", "=", "name", "self", ".", "age", "=", "age" ]
https://github.com/lalor/python_for_linux_system_administration/blob/2a002fcab3e74e82287400556dee7909e1996d10/chapter11/section6/class_person1.py#L2-L4
deanishe/alfred-repos
7f7b3999331808cb58fc33e8793f6be692ed9fe5
src/workflow/update.py
python
Version.__str__
(self)
return vstr
Return semantic version string.
Return semantic version string.
[ "Return", "semantic", "version", "string", "." ]
def __str__(self): """Return semantic version string.""" vstr = '{0}.{1}.{2}'.format(self.major, self.minor, self.patch) if self.suffix: vstr = '{0}-{1}'.format(vstr, self.suffix) if self.build: vstr = '{0}+{1}'.format(vstr, self.build) return vstr
[ "def", "__str__", "(", "self", ")", ":", "vstr", "=", "'{0}.{1}.{2}'", ".", "format", "(", "self", ".", "major", ",", "self", ".", "minor", ",", "self", ".", "patch", ")", "if", "self", ".", "suffix", ":", "vstr", "=", "'{0}-{1}'", ".", "format", "(", "vstr", ",", "self", ".", "suffix", ")", "if", "self", ".", "build", ":", "vstr", "=", "'{0}+{1}'", ".", "format", "(", "vstr", ",", "self", ".", "build", ")", "return", "vstr" ]
https://github.com/deanishe/alfred-repos/blob/7f7b3999331808cb58fc33e8793f6be692ed9fe5/src/workflow/update.py#L333-L340
buffer/thug
96ccd5bb1a45375ad665dfb8fb975978bf4659cb
thug/DOM/Window.py
python
Window.btoa
(self, s)
return base64.b64encode(s)
The btoa method encodes a string in base-64
The btoa method encodes a string in base-64
[ "The", "btoa", "method", "encodes", "a", "string", "in", "base", "-", "64" ]
def btoa(self, s): """ The btoa method encodes a string in base-64 """ if isinstance(s, str): s = s.encode() return base64.b64encode(s)
[ "def", "btoa", "(", "self", ",", "s", ")", ":", "if", "isinstance", "(", "s", ",", "str", ")", ":", "s", "=", "s", ".", "encode", "(", ")", "return", "base64", ".", "b64encode", "(", "s", ")" ]
https://github.com/buffer/thug/blob/96ccd5bb1a45375ad665dfb8fb975978bf4659cb/thug/DOM/Window.py#L1034-L1041
bbfamily/abu
2de85ae57923a720dac99a545b4f856f6b87304b
abupy/UtilBu/ABuKLUtil.py
python
qcut_change_vc
(df, q=10)
return _df_dispatch_concat(df, _qcut_change_vc)
eg: tsla = ABuSymbolPd.make_kl_df('usTSLA') ABuKLUtil.qcut_change_vc(tsla) out: change 0 [-10.45, -3.002] 1 (-3.002, -1.666] 2 (-1.666, -0.93] 3 (-0.93, -0.396] 4 (-0.396, 0.065] 5 (0.065, 0.48] 6 (0.48, 1.102] 7 (1.102, 1.922] 8 (1.922, 3.007] 9 (3.007, 11.17] :param df: abupy中格式化好的kl,或者字典,或者可迭代序列 :param q: 透传qcut使用的q参数,默认10,10等分 :return: pd.DataFrame
eg: tsla = ABuSymbolPd.make_kl_df('usTSLA') ABuKLUtil.qcut_change_vc(tsla)
[ "eg", ":", "tsla", "=", "ABuSymbolPd", ".", "make_kl_df", "(", "usTSLA", ")", "ABuKLUtil", ".", "qcut_change_vc", "(", "tsla", ")" ]
def qcut_change_vc(df, q=10): """ eg: tsla = ABuSymbolPd.make_kl_df('usTSLA') ABuKLUtil.qcut_change_vc(tsla) out: change 0 [-10.45, -3.002] 1 (-3.002, -1.666] 2 (-1.666, -0.93] 3 (-0.93, -0.396] 4 (-0.396, 0.065] 5 (0.065, 0.48] 6 (0.48, 1.102] 7 (1.102, 1.922] 8 (1.922, 3.007] 9 (3.007, 11.17] :param df: abupy中格式化好的kl,或者字典,或者可迭代序列 :param q: 透传qcut使用的q参数,默认10,10等分 :return: pd.DataFrame """ def _qcut_change_vc(p_df, df_name=''): dww = pd.qcut(p_df.p_change, q).value_counts().index.values # 构造Categories使用DataFrame套Series dww = pd.Series(dww) # 涨跌从负向正开始排序 dww.sort_values(inplace=True) dww = pd.DataFrame(dww) # 排序后index重新从0开始排列 dww.index = np.arange(0, q) dww.columns = ['{}change'.format(df_name)] return dww return _df_dispatch_concat(df, _qcut_change_vc)
[ "def", "qcut_change_vc", "(", "df", ",", "q", "=", "10", ")", ":", "def", "_qcut_change_vc", "(", "p_df", ",", "df_name", "=", "''", ")", ":", "dww", "=", "pd", ".", "qcut", "(", "p_df", ".", "p_change", ",", "q", ")", ".", "value_counts", "(", ")", ".", "index", ".", "values", "# 构造Categories使用DataFrame套Series", "dww", "=", "pd", ".", "Series", "(", "dww", ")", "# 涨跌从负向正开始排序", "dww", ".", "sort_values", "(", "inplace", "=", "True", ")", "dww", "=", "pd", ".", "DataFrame", "(", "dww", ")", "# 排序后index重新从0开始排列", "dww", ".", "index", "=", "np", ".", "arange", "(", "0", ",", "q", ")", "dww", ".", "columns", "=", "[", "'{}change'", ".", "format", "(", "df_name", ")", "]", "return", "dww", "return", "_df_dispatch_concat", "(", "df", ",", "_qcut_change_vc", ")" ]
https://github.com/bbfamily/abu/blob/2de85ae57923a720dac99a545b4f856f6b87304b/abupy/UtilBu/ABuKLUtil.py#L196-L232
ricequant/rqalpha
d8b345ca3fde299e061c6a89c1f2c362c3584c96
rqalpha/_version.py
python
run_command
(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None)
return stdout, p.returncode
Call the given command(s).
Call the given command(s).
[ "Call", "the", "given", "command", "(", "s", ")", "." ]
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode
[ "def", "run_command", "(", "commands", ",", "args", ",", "cwd", "=", "None", ",", "verbose", "=", "False", ",", "hide_stderr", "=", "False", ",", "env", "=", "None", ")", ":", "assert", "isinstance", "(", "commands", ",", "list", ")", "p", "=", "None", "for", "c", "in", "commands", ":", "try", ":", "dispcmd", "=", "str", "(", "[", "c", "]", "+", "args", ")", "# remember shell=False, so use git.cmd on windows, not just git", "p", "=", "subprocess", ".", "Popen", "(", "[", "c", "]", "+", "args", ",", "cwd", "=", "cwd", ",", "env", "=", "env", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "(", "subprocess", ".", "PIPE", "if", "hide_stderr", "else", "None", ")", ")", "break", "except", "EnvironmentError", ":", "e", "=", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "if", "e", ".", "errno", "==", "errno", ".", "ENOENT", ":", "continue", "if", "verbose", ":", "print", "(", "\"unable to run %s\"", "%", "dispcmd", ")", "print", "(", "e", ")", "return", "None", ",", "None", "else", ":", "if", "verbose", ":", "print", "(", "\"unable to find command, tried %s\"", "%", "(", "commands", ",", ")", ")", "return", "None", ",", "None", "stdout", "=", "p", ".", "communicate", "(", ")", "[", "0", "]", ".", "strip", "(", ")", "if", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ":", "stdout", "=", "stdout", ".", "decode", "(", ")", "if", "p", ".", "returncode", "!=", "0", ":", "if", "verbose", ":", "print", "(", "\"unable to run %s (error)\"", "%", "dispcmd", ")", "print", "(", "\"stdout was %s\"", "%", "stdout", ")", "return", "None", ",", "p", ".", "returncode", "return", "stdout", ",", "p", ".", "returncode" ]
https://github.com/ricequant/rqalpha/blob/d8b345ca3fde299e061c6a89c1f2c362c3584c96/rqalpha/_version.py#L70-L104
waveform80/picamera
7e4f1d379d698c44501fb84b886fadf3fc164b70
picamera/camera.py
python
PiCamera.stop_recording
(self, splitter_port=1)
Stop recording video from the camera. After calling this method the video encoder will be shut down and output will stop being written to the file-like object specified with :meth:`start_recording`. If an error occurred during recording and :meth:`wait_recording` has not been called since the error then this method will raise the exception. The *splitter_port* parameter specifies which port of the video splitter the encoder you wish to stop is attached to. This defaults to ``1`` and most users will have no need to specify anything different. Valid values are between ``0`` and ``3`` inclusive. .. versionchanged:: 1.3 The *splitter_port* parameter was added
Stop recording video from the camera.
[ "Stop", "recording", "video", "from", "the", "camera", "." ]
def stop_recording(self, splitter_port=1): """ Stop recording video from the camera. After calling this method the video encoder will be shut down and output will stop being written to the file-like object specified with :meth:`start_recording`. If an error occurred during recording and :meth:`wait_recording` has not been called since the error then this method will raise the exception. The *splitter_port* parameter specifies which port of the video splitter the encoder you wish to stop is attached to. This defaults to ``1`` and most users will have no need to specify anything different. Valid values are between ``0`` and ``3`` inclusive. .. versionchanged:: 1.3 The *splitter_port* parameter was added """ try: with self._encoders_lock: encoder = self._encoders[splitter_port] except KeyError: raise PiCameraNotRecording( 'There is no recording in progress on ' 'port %d' % splitter_port) else: try: self.wait_recording(0, splitter_port) finally: encoder.close() with self._encoders_lock: del self._encoders[splitter_port]
[ "def", "stop_recording", "(", "self", ",", "splitter_port", "=", "1", ")", ":", "try", ":", "with", "self", ".", "_encoders_lock", ":", "encoder", "=", "self", ".", "_encoders", "[", "splitter_port", "]", "except", "KeyError", ":", "raise", "PiCameraNotRecording", "(", "'There is no recording in progress on '", "'port %d'", "%", "splitter_port", ")", "else", ":", "try", ":", "self", ".", "wait_recording", "(", "0", ",", "splitter_port", ")", "finally", ":", "encoder", ".", "close", "(", ")", "with", "self", ".", "_encoders_lock", ":", "del", "self", ".", "_encoders", "[", "splitter_port", "]" ]
https://github.com/waveform80/picamera/blob/7e4f1d379d698c44501fb84b886fadf3fc164b70/picamera/camera.py#L1357-L1388
smart-mobile-software/gitstack
d9fee8f414f202143eb6e620529e8e5539a2af56
python/Lib/mailbox.py
python
mbox._generate_toc
(self)
Generate key-to-(start, stop) table of contents.
Generate key-to-(start, stop) table of contents.
[ "Generate", "key", "-", "to", "-", "(", "start", "stop", ")", "table", "of", "contents", "." ]
def _generate_toc(self): """Generate key-to-(start, stop) table of contents.""" starts, stops = [], [] self._file.seek(0) while True: line_pos = self._file.tell() line = self._file.readline() if line.startswith('From '): if len(stops) < len(starts): stops.append(line_pos - len(os.linesep)) starts.append(line_pos) elif line == '': stops.append(line_pos) break self._toc = dict(enumerate(zip(starts, stops))) self._next_key = len(self._toc) self._file_length = self._file.tell()
[ "def", "_generate_toc", "(", "self", ")", ":", "starts", ",", "stops", "=", "[", "]", ",", "[", "]", "self", ".", "_file", ".", "seek", "(", "0", ")", "while", "True", ":", "line_pos", "=", "self", ".", "_file", ".", "tell", "(", ")", "line", "=", "self", ".", "_file", ".", "readline", "(", ")", "if", "line", ".", "startswith", "(", "'From '", ")", ":", "if", "len", "(", "stops", ")", "<", "len", "(", "starts", ")", ":", "stops", ".", "append", "(", "line_pos", "-", "len", "(", "os", ".", "linesep", ")", ")", "starts", ".", "append", "(", "line_pos", ")", "elif", "line", "==", "''", ":", "stops", ".", "append", "(", "line_pos", ")", "break", "self", ".", "_toc", "=", "dict", "(", "enumerate", "(", "zip", "(", "starts", ",", "stops", ")", ")", ")", "self", ".", "_next_key", "=", "len", "(", "self", ".", "_toc", ")", "self", ".", "_file_length", "=", "self", ".", "_file", ".", "tell", "(", ")" ]
https://github.com/smart-mobile-software/gitstack/blob/d9fee8f414f202143eb6e620529e8e5539a2af56/python/Lib/mailbox.py#L793-L809
microsoft/dowhy
8b2cf6a722572a7e0d6a1f7fef22e743dbd3b13a
dowhy/causal_estimator.py
python
CausalEstimate.get_standard_error
(self, method=None, **kwargs)
return std_error
Get standard error of the obtained estimate. By default, this is done with the help of bootstrapped standard errors but can be overridden if the specific estimator implements other methods of estimating standard error. If the method provided is not bootstrap, this function calls the implementation of the specific estimator. :param method: Method for computing the standard error. :param kwargs: Other optional parameters to be passed to the estimating method. :returns: Standard error of the causal estimate.
Get standard error of the obtained estimate.
[ "Get", "standard", "error", "of", "the", "obtained", "estimate", "." ]
def get_standard_error(self, method=None, **kwargs): """ Get standard error of the obtained estimate. By default, this is done with the help of bootstrapped standard errors but can be overridden if the specific estimator implements other methods of estimating standard error. If the method provided is not bootstrap, this function calls the implementation of the specific estimator. :param method: Method for computing the standard error. :param kwargs: Other optional parameters to be passed to the estimating method. :returns: Standard error of the causal estimate. """ std_error = self.estimator.estimate_std_error(method=method, **kwargs) return std_error
[ "def", "get_standard_error", "(", "self", ",", "method", "=", "None", ",", "*", "*", "kwargs", ")", ":", "std_error", "=", "self", ".", "estimator", ".", "estimate_std_error", "(", "method", "=", "method", ",", "*", "*", "kwargs", ")", "return", "std_error" ]
https://github.com/microsoft/dowhy/blob/8b2cf6a722572a7e0d6a1f7fef22e743dbd3b13a/dowhy/causal_estimator.py#L708-L722
jesseweisberg/moveo_ros
b9282bdadbf2505a26d3b94b91e60a98d86efa34
object_detector_app/object_detection/models/ssd_inception_v2_feature_extractor.py
python
SSDInceptionV2FeatureExtractor.extract_features
(self, preprocessed_inputs)
return feature_maps.values()
Extract features from preprocessed inputs. Args: preprocessed_inputs: a [batch, height, width, channels] float tensor representing a batch of images. Returns: feature_maps: a list of tensors where the ith tensor has shape [batch, height_i, width_i, depth_i]
Extract features from preprocessed inputs.
[ "Extract", "features", "from", "preprocessed", "inputs", "." ]
def extract_features(self, preprocessed_inputs): """Extract features from preprocessed inputs. Args: preprocessed_inputs: a [batch, height, width, channels] float tensor representing a batch of images. Returns: feature_maps: a list of tensors where the ith tensor has shape [batch, height_i, width_i, depth_i] """ preprocessed_inputs.get_shape().assert_has_rank(4) shape_assert = tf.Assert( tf.logical_and(tf.greater_equal(tf.shape(preprocessed_inputs)[1], 33), tf.greater_equal(tf.shape(preprocessed_inputs)[2], 33)), ['image size must at least be 33 in both height and width.']) feature_map_layout = { 'from_layer': ['Mixed_4c', 'Mixed_5c', '', '', '', ''], 'layer_depth': [-1, -1, 512, 256, 256, 128], } with tf.control_dependencies([shape_assert]): with slim.arg_scope(self._conv_hyperparams): with tf.variable_scope('InceptionV2', reuse=self._reuse_weights) as scope: _, image_features = inception_v2.inception_v2_base( preprocessed_inputs, final_endpoint='Mixed_5c', min_depth=self._min_depth, depth_multiplier=self._depth_multiplier, scope=scope) feature_maps = feature_map_generators.multi_resolution_feature_maps( feature_map_layout=feature_map_layout, depth_multiplier=self._depth_multiplier, min_depth=self._min_depth, insert_1x1_conv=True, image_features=image_features) return feature_maps.values()
[ "def", "extract_features", "(", "self", ",", "preprocessed_inputs", ")", ":", "preprocessed_inputs", ".", "get_shape", "(", ")", ".", "assert_has_rank", "(", "4", ")", "shape_assert", "=", "tf", ".", "Assert", "(", "tf", ".", "logical_and", "(", "tf", ".", "greater_equal", "(", "tf", ".", "shape", "(", "preprocessed_inputs", ")", "[", "1", "]", ",", "33", ")", ",", "tf", ".", "greater_equal", "(", "tf", ".", "shape", "(", "preprocessed_inputs", ")", "[", "2", "]", ",", "33", ")", ")", ",", "[", "'image size must at least be 33 in both height and width.'", "]", ")", "feature_map_layout", "=", "{", "'from_layer'", ":", "[", "'Mixed_4c'", ",", "'Mixed_5c'", ",", "''", ",", "''", ",", "''", ",", "''", "]", ",", "'layer_depth'", ":", "[", "-", "1", ",", "-", "1", ",", "512", ",", "256", ",", "256", ",", "128", "]", ",", "}", "with", "tf", ".", "control_dependencies", "(", "[", "shape_assert", "]", ")", ":", "with", "slim", ".", "arg_scope", "(", "self", ".", "_conv_hyperparams", ")", ":", "with", "tf", ".", "variable_scope", "(", "'InceptionV2'", ",", "reuse", "=", "self", ".", "_reuse_weights", ")", "as", "scope", ":", "_", ",", "image_features", "=", "inception_v2", ".", "inception_v2_base", "(", "preprocessed_inputs", ",", "final_endpoint", "=", "'Mixed_5c'", ",", "min_depth", "=", "self", ".", "_min_depth", ",", "depth_multiplier", "=", "self", ".", "_depth_multiplier", ",", "scope", "=", "scope", ")", "feature_maps", "=", "feature_map_generators", ".", "multi_resolution_feature_maps", "(", "feature_map_layout", "=", "feature_map_layout", ",", "depth_multiplier", "=", "self", ".", "_depth_multiplier", ",", "min_depth", "=", "self", ".", "_min_depth", ",", "insert_1x1_conv", "=", "True", ",", "image_features", "=", "image_features", ")", "return", "feature_maps", ".", "values", "(", ")" ]
https://github.com/jesseweisberg/moveo_ros/blob/b9282bdadbf2505a26d3b94b91e60a98d86efa34/object_detector_app/object_detection/models/ssd_inception_v2_feature_extractor.py#L60-L99
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/demo/stt.py
python
DemoProvider.supported_bit_rates
(self)
return [AudioBitRates.BITRATE_16]
Return a list of supported bit rates.
Return a list of supported bit rates.
[ "Return", "a", "list", "of", "supported", "bit", "rates", "." ]
def supported_bit_rates(self) -> list[AudioBitRates]: """Return a list of supported bit rates.""" return [AudioBitRates.BITRATE_16]
[ "def", "supported_bit_rates", "(", "self", ")", "->", "list", "[", "AudioBitRates", "]", ":", "return", "[", "AudioBitRates", ".", "BITRATE_16", "]" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/demo/stt.py#L43-L45
uber/fiber
ad6faf02b8e94dee498990e9fd9c588234666725
fiber/util.py
python
find_listen_address
()
return ip, ifce
Find an IP address for Fiber to use.
Find an IP address for Fiber to use.
[ "Find", "an", "IP", "address", "for", "Fiber", "to", "use", "." ]
def find_listen_address(): """Find an IP address for Fiber to use.""" ip = None ifce = None ifces = psutil.net_if_addrs() for ifce, addrs in ifces.items(): if re.match(r"^eth", ifce) or re.match(r"^en", ifce): for snicaddr in addrs: # IPv4 only if snicaddr.family == socket.AF_INET: ip = snicaddr.address break return ip, ifce
[ "def", "find_listen_address", "(", ")", ":", "ip", "=", "None", "ifce", "=", "None", "ifces", "=", "psutil", ".", "net_if_addrs", "(", ")", "for", "ifce", ",", "addrs", "in", "ifces", ".", "items", "(", ")", ":", "if", "re", ".", "match", "(", "r\"^eth\"", ",", "ifce", ")", "or", "re", ".", "match", "(", "r\"^en\"", ",", "ifce", ")", ":", "for", "snicaddr", "in", "addrs", ":", "# IPv4 only", "if", "snicaddr", ".", "family", "==", "socket", ".", "AF_INET", ":", "ip", "=", "snicaddr", ".", "address", "break", "return", "ip", ",", "ifce" ]
https://github.com/uber/fiber/blob/ad6faf02b8e94dee498990e9fd9c588234666725/fiber/util.py#L111-L124
JoneXiong/YouPBX
e24a8b74814761bc90fd86f4217c92ec7238874b
pbx/conf.py
python
ivr_menus
(fs_conf_path)
u''' ivr_menus
u''' ivr_menus
[ "u", "ivr_menus" ]
def ivr_menus(fs_conf_path): u''' ivr_menus ''' try: from apps.extend.models import IVR except: return menus = IVR.objects.all() m_data = jinja2_template('ivr_menus.xml', menus=menus) m_file = os.path.join(fs_conf_path,'ivr_menus', 'oe_ivr.xml') f = open(m_file,'w+') f.write(m_data) f.close()
[ "def", "ivr_menus", "(", "fs_conf_path", ")", ":", "try", ":", "from", "apps", ".", "extend", ".", "models", "import", "IVR", "except", ":", "return", "menus", "=", "IVR", ".", "objects", ".", "all", "(", ")", "m_data", "=", "jinja2_template", "(", "'ivr_menus.xml'", ",", "menus", "=", "menus", ")", "m_file", "=", "os", ".", "path", ".", "join", "(", "fs_conf_path", ",", "'ivr_menus'", ",", "'oe_ivr.xml'", ")", "f", "=", "open", "(", "m_file", ",", "'w+'", ")", "f", ".", "write", "(", "m_data", ")", "f", ".", "close", "(", ")" ]
https://github.com/JoneXiong/YouPBX/blob/e24a8b74814761bc90fd86f4217c92ec7238874b/pbx/conf.py#L65-L78
py2neo-org/py2neo
2e46bbf4d622f53282e796ffc521fc4bc6d0b60d
py2neo/vendor/bottle.py
python
BaseRequest.body
(self)
return self._body
The HTTP request body as a seek-able file-like object. Depending on :attr:`MEMFILE_MAX`, this is either a temporary file or a :class:`io.BytesIO` instance. Accessing this property for the first time reads and replaces the ``wsgi.input`` environ variable. Subsequent accesses just do a `seek(0)` on the file object.
The HTTP request body as a seek-able file-like object. Depending on :attr:`MEMFILE_MAX`, this is either a temporary file or a :class:`io.BytesIO` instance. Accessing this property for the first time reads and replaces the ``wsgi.input`` environ variable. Subsequent accesses just do a `seek(0)` on the file object.
[ "The", "HTTP", "request", "body", "as", "a", "seek", "-", "able", "file", "-", "like", "object", ".", "Depending", "on", ":", "attr", ":", "MEMFILE_MAX", "this", "is", "either", "a", "temporary", "file", "or", "a", ":", "class", ":", "io", ".", "BytesIO", "instance", ".", "Accessing", "this", "property", "for", "the", "first", "time", "reads", "and", "replaces", "the", "wsgi", ".", "input", "environ", "variable", ".", "Subsequent", "accesses", "just", "do", "a", "seek", "(", "0", ")", "on", "the", "file", "object", "." ]
def body(self): """ The HTTP request body as a seek-able file-like object. Depending on :attr:`MEMFILE_MAX`, this is either a temporary file or a :class:`io.BytesIO` instance. Accessing this property for the first time reads and replaces the ``wsgi.input`` environ variable. Subsequent accesses just do a `seek(0)` on the file object. """ self._body.seek(0) return self._body
[ "def", "body", "(", "self", ")", ":", "self", ".", "_body", ".", "seek", "(", "0", ")", "return", "self", ".", "_body" ]
https://github.com/py2neo-org/py2neo/blob/2e46bbf4d622f53282e796ffc521fc4bc6d0b60d/py2neo/vendor/bottle.py#L1197-L1204
astropy/photutils
3caa48e4e4d139976ed7457dc41583fb2c56ba20
photutils/aperture/ellipse.py
python
EllipticalAnnulus._to_patch
(self, origin=(0, 0), **kwargs)
Return a `~matplotlib.patches.patch` for the aperture. Parameters ---------- origin : array_like, optional The ``(x, y)`` position of the origin of the displayed image. **kwargs : `dict` Any keyword arguments accepted by `matplotlib.patches.Patch`. Returns ------- patch : `~matplotlib.patches.patch` or list of `~matplotlib.patches.patch` A patch for the aperture. If the aperture is scalar then a single `~matplotlib.patches.patch` is returned, otherwise a list of `~matplotlib.patches.patch` is returned.
Return a `~matplotlib.patches.patch` for the aperture.
[ "Return", "a", "~matplotlib", ".", "patches", ".", "patch", "for", "the", "aperture", "." ]
def _to_patch(self, origin=(0, 0), **kwargs): """ Return a `~matplotlib.patches.patch` for the aperture. Parameters ---------- origin : array_like, optional The ``(x, y)`` position of the origin of the displayed image. **kwargs : `dict` Any keyword arguments accepted by `matplotlib.patches.Patch`. Returns ------- patch : `~matplotlib.patches.patch` or list of `~matplotlib.patches.patch` A patch for the aperture. If the aperture is scalar then a single `~matplotlib.patches.patch` is returned, otherwise a list of `~matplotlib.patches.patch` is returned. """ import matplotlib.patches as mpatches xy_positions, patch_kwargs = self._define_patch_params(origin=origin, **kwargs) patches = [] theta_deg = self.theta * 180. / np.pi for xy_position in xy_positions: patch_inner = mpatches.Ellipse(xy_position, 2.*self.a_in, 2.*self.b_in, theta_deg) patch_outer = mpatches.Ellipse(xy_position, 2.*self.a_out, 2.*self.b_out, theta_deg) path = self._make_annulus_path(patch_inner, patch_outer) patches.append(mpatches.PathPatch(path, **patch_kwargs)) if self.isscalar: return patches[0] else: return patches
[ "def", "_to_patch", "(", "self", ",", "origin", "=", "(", "0", ",", "0", ")", ",", "*", "*", "kwargs", ")", ":", "import", "matplotlib", ".", "patches", "as", "mpatches", "xy_positions", ",", "patch_kwargs", "=", "self", ".", "_define_patch_params", "(", "origin", "=", "origin", ",", "*", "*", "kwargs", ")", "patches", "=", "[", "]", "theta_deg", "=", "self", ".", "theta", "*", "180.", "/", "np", ".", "pi", "for", "xy_position", "in", "xy_positions", ":", "patch_inner", "=", "mpatches", ".", "Ellipse", "(", "xy_position", ",", "2.", "*", "self", ".", "a_in", ",", "2.", "*", "self", ".", "b_in", ",", "theta_deg", ")", "patch_outer", "=", "mpatches", ".", "Ellipse", "(", "xy_position", ",", "2.", "*", "self", ".", "a_out", ",", "2.", "*", "self", ".", "b_out", ",", "theta_deg", ")", "path", "=", "self", ".", "_make_annulus_path", "(", "patch_inner", ",", "patch_outer", ")", "patches", ".", "append", "(", "mpatches", ".", "PathPatch", "(", "path", ",", "*", "*", "patch_kwargs", ")", ")", "if", "self", ".", "isscalar", ":", "return", "patches", "[", "0", "]", "else", ":", "return", "patches" ]
https://github.com/astropy/photutils/blob/3caa48e4e4d139976ed7457dc41583fb2c56ba20/photutils/aperture/ellipse.py#L357-L396
Yelp/venv-update
5fb5491bd421fdd8ef3cff3faa5d4846b5985ec8
pip_faster.py
python
fresh_working_set
()
return WorkingSetPlusEditableInstalls()
return a pkg_resources "working set", representing the *currently* installed packages
return a pkg_resources "working set", representing the *currently* installed packages
[ "return", "a", "pkg_resources", "working", "set", "representing", "the", "*", "currently", "*", "installed", "packages" ]
def fresh_working_set(): """return a pkg_resources "working set", representing the *currently* installed packages""" class WorkingSetPlusEditableInstalls(pkg_resources.WorkingSet): def __init__(self, *args, **kwargs): self._normalized_name_mapping = {} super(WorkingSetPlusEditableInstalls, self).__init__(*args, **kwargs) def add_entry(self, entry): """Same as the original .add_entry, but sets only=False, so that egg-links are honored.""" logger.debug('working-set entry: %r', entry) self.entry_keys.setdefault(entry, []) self.entries.append(entry) for dist in pkg_resources.find_distributions(entry, False): # eggs override anything that's installed normally # fun fact: pkg_resources.working_set's results depend on the # ordering of os.listdir since the order of os.listdir is # entirely arbitrary (an implemenation detail of file system), # without calling site.main(), an .egg-link file may or may not # be honored, depending on the filesystem replace = (dist.precedence == pkg_resources.EGG_DIST) self._normalized_name_mapping[normalize_name(dist.key)] = dist.key self.add(dist, entry, False, replace=replace) def find_normalized(self, req): req = _package_req_to_pkg_resources_req(str(req)) req.key = self._normalized_name_mapping.get(normalize_name(req.key), req.key) return self.find(req) return WorkingSetPlusEditableInstalls()
[ "def", "fresh_working_set", "(", ")", ":", "class", "WorkingSetPlusEditableInstalls", "(", "pkg_resources", ".", "WorkingSet", ")", ":", "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_normalized_name_mapping", "=", "{", "}", "super", "(", "WorkingSetPlusEditableInstalls", ",", "self", ")", ".", "__init__", "(", "*", "args", ",", "*", "*", "kwargs", ")", "def", "add_entry", "(", "self", ",", "entry", ")", ":", "\"\"\"Same as the original .add_entry, but sets only=False, so that egg-links are honored.\"\"\"", "logger", ".", "debug", "(", "'working-set entry: %r'", ",", "entry", ")", "self", ".", "entry_keys", ".", "setdefault", "(", "entry", ",", "[", "]", ")", "self", ".", "entries", ".", "append", "(", "entry", ")", "for", "dist", "in", "pkg_resources", ".", "find_distributions", "(", "entry", ",", "False", ")", ":", "# eggs override anything that's installed normally", "# fun fact: pkg_resources.working_set's results depend on the", "# ordering of os.listdir since the order of os.listdir is", "# entirely arbitrary (an implemenation detail of file system),", "# without calling site.main(), an .egg-link file may or may not", "# be honored, depending on the filesystem", "replace", "=", "(", "dist", ".", "precedence", "==", "pkg_resources", ".", "EGG_DIST", ")", "self", ".", "_normalized_name_mapping", "[", "normalize_name", "(", "dist", ".", "key", ")", "]", "=", "dist", ".", "key", "self", ".", "add", "(", "dist", ",", "entry", ",", "False", ",", "replace", "=", "replace", ")", "def", "find_normalized", "(", "self", ",", "req", ")", ":", "req", "=", "_package_req_to_pkg_resources_req", "(", "str", "(", "req", ")", ")", "req", ".", "key", "=", "self", ".", "_normalized_name_mapping", ".", "get", "(", "normalize_name", "(", "req", ".", "key", ")", ",", "req", ".", "key", ")", "return", "self", ".", "find", "(", "req", ")", "return", "WorkingSetPlusEditableInstalls", "(", ")" ]
https://github.com/Yelp/venv-update/blob/5fb5491bd421fdd8ef3cff3faa5d4846b5985ec8/pip_faster.py#L250-L280
QCoDeS/Qcodes
3cda2cef44812e2aa4672781f2423bf5f816f9f9
qcodes/instrument_drivers/tektronix/AWGFileParser.py
python
_unpacker
( binaryarray: np.ndarray, dacbitdepth: int = 14 )
return wf, m1, m2
Unpacks an awg-file integer wave into a waveform and two markers in the same way as the AWG does. This can be useful for checking how the signals are going to be interpreted by the instrument. Args: binaryarray: A numpy array containing the packed waveform and markers. dacbitdepth: Specifies the bit depth for the digitisation of the waveform. Allowed values: 14, 8. Default: 14. Returns: The waveform scaled to have values from -1 to 1, marker 1, marker 2.
Unpacks an awg-file integer wave into a waveform and two markers in the same way as the AWG does. This can be useful for checking how the signals are going to be interpreted by the instrument.
[ "Unpacks", "an", "awg", "-", "file", "integer", "wave", "into", "a", "waveform", "and", "two", "markers", "in", "the", "same", "way", "as", "the", "AWG", "does", ".", "This", "can", "be", "useful", "for", "checking", "how", "the", "signals", "are", "going", "to", "be", "interpreted", "by", "the", "instrument", "." ]
def _unpacker( binaryarray: np.ndarray, dacbitdepth: int = 14 ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: """ Unpacks an awg-file integer wave into a waveform and two markers in the same way as the AWG does. This can be useful for checking how the signals are going to be interpreted by the instrument. Args: binaryarray: A numpy array containing the packed waveform and markers. dacbitdepth: Specifies the bit depth for the digitisation of the waveform. Allowed values: 14, 8. Default: 14. Returns: The waveform scaled to have values from -1 to 1, marker 1, marker 2. """ wflength = len(binaryarray) wf = np.zeros(wflength) m1 = np.zeros(wflength) m2 = np.zeros(wflength) for ii, bitnum in enumerate(binaryarray): bitstring = bin(bitnum)[2:].zfill(16) m2[ii] = int(bitstring[0]) m1[ii] = int(bitstring[1]) wf[ii] = (int(bitstring[2:], base=2)-2**13)/2**13 # print(bitstring, int(bitstring[2:], base=2)) return wf, m1, m2
[ "def", "_unpacker", "(", "binaryarray", ":", "np", ".", "ndarray", ",", "dacbitdepth", ":", "int", "=", "14", ")", "->", "Tuple", "[", "np", ".", "ndarray", ",", "np", ".", "ndarray", ",", "np", ".", "ndarray", "]", ":", "wflength", "=", "len", "(", "binaryarray", ")", "wf", "=", "np", ".", "zeros", "(", "wflength", ")", "m1", "=", "np", ".", "zeros", "(", "wflength", ")", "m2", "=", "np", ".", "zeros", "(", "wflength", ")", "for", "ii", ",", "bitnum", "in", "enumerate", "(", "binaryarray", ")", ":", "bitstring", "=", "bin", "(", "bitnum", ")", "[", "2", ":", "]", ".", "zfill", "(", "16", ")", "m2", "[", "ii", "]", "=", "int", "(", "bitstring", "[", "0", "]", ")", "m1", "[", "ii", "]", "=", "int", "(", "bitstring", "[", "1", "]", ")", "wf", "[", "ii", "]", "=", "(", "int", "(", "bitstring", "[", "2", ":", "]", ",", "base", "=", "2", ")", "-", "2", "**", "13", ")", "/", "2", "**", "13", "# print(bitstring, int(bitstring[2:], base=2))", "return", "wf", ",", "m1", ",", "m2" ]
https://github.com/QCoDeS/Qcodes/blob/3cda2cef44812e2aa4672781f2423bf5f816f9f9/qcodes/instrument_drivers/tektronix/AWGFileParser.py#L309-L340
selfteaching/selfteaching-python-camp
9982ee964b984595e7d664b07c389cddaf158f1e
19100205/Ceasar1978/pip-19.0.3/src/pip/_vendor/ipaddress.py
python
_BaseV6._compress_hextets
(cls, hextets)
return hextets
Compresses a list of hextets. Compresses a list of strings, replacing the longest continuous sequence of "0" in the list with "" and adding empty strings at the beginning or at the end of the string such that subsequently calling ":".join(hextets) will produce the compressed version of the IPv6 address. Args: hextets: A list of strings, the hextets to compress. Returns: A list of strings.
Compresses a list of hextets.
[ "Compresses", "a", "list", "of", "hextets", "." ]
def _compress_hextets(cls, hextets): """Compresses a list of hextets. Compresses a list of strings, replacing the longest continuous sequence of "0" in the list with "" and adding empty strings at the beginning or at the end of the string such that subsequently calling ":".join(hextets) will produce the compressed version of the IPv6 address. Args: hextets: A list of strings, the hextets to compress. Returns: A list of strings. """ best_doublecolon_start = -1 best_doublecolon_len = 0 doublecolon_start = -1 doublecolon_len = 0 for index, hextet in enumerate(hextets): if hextet == '0': doublecolon_len += 1 if doublecolon_start == -1: # Start of a sequence of zeros. doublecolon_start = index if doublecolon_len > best_doublecolon_len: # This is the longest sequence of zeros so far. best_doublecolon_len = doublecolon_len best_doublecolon_start = doublecolon_start else: doublecolon_len = 0 doublecolon_start = -1 if best_doublecolon_len > 1: best_doublecolon_end = (best_doublecolon_start + best_doublecolon_len) # For zeros at the end of the address. if best_doublecolon_end == len(hextets): hextets += [''] hextets[best_doublecolon_start:best_doublecolon_end] = [''] # For zeros at the beginning of the address. if best_doublecolon_start == 0: hextets = [''] + hextets return hextets
[ "def", "_compress_hextets", "(", "cls", ",", "hextets", ")", ":", "best_doublecolon_start", "=", "-", "1", "best_doublecolon_len", "=", "0", "doublecolon_start", "=", "-", "1", "doublecolon_len", "=", "0", "for", "index", ",", "hextet", "in", "enumerate", "(", "hextets", ")", ":", "if", "hextet", "==", "'0'", ":", "doublecolon_len", "+=", "1", "if", "doublecolon_start", "==", "-", "1", ":", "# Start of a sequence of zeros.", "doublecolon_start", "=", "index", "if", "doublecolon_len", ">", "best_doublecolon_len", ":", "# This is the longest sequence of zeros so far.", "best_doublecolon_len", "=", "doublecolon_len", "best_doublecolon_start", "=", "doublecolon_start", "else", ":", "doublecolon_len", "=", "0", "doublecolon_start", "=", "-", "1", "if", "best_doublecolon_len", ">", "1", ":", "best_doublecolon_end", "=", "(", "best_doublecolon_start", "+", "best_doublecolon_len", ")", "# For zeros at the end of the address.", "if", "best_doublecolon_end", "==", "len", "(", "hextets", ")", ":", "hextets", "+=", "[", "''", "]", "hextets", "[", "best_doublecolon_start", ":", "best_doublecolon_end", "]", "=", "[", "''", "]", "# For zeros at the beginning of the address.", "if", "best_doublecolon_start", "==", "0", ":", "hextets", "=", "[", "''", "]", "+", "hextets", "return", "hextets" ]
https://github.com/selfteaching/selfteaching-python-camp/blob/9982ee964b984595e7d664b07c389cddaf158f1e/19100205/Ceasar1978/pip-19.0.3/src/pip/_vendor/ipaddress.py#L1881-L1926
eventable/vobject
498555a553155ea9b26aace93332ae79365ecb31
vobject/icalendar.py
python
TimezoneComponent.__str__
(self)
return "<VTIMEZONE | {0}>".format(getattr(self, 'tzid', 'No TZID'))
[]
def __str__(self): return "<VTIMEZONE | {0}>".format(getattr(self, 'tzid', 'No TZID'))
[ "def", "__str__", "(", "self", ")", ":", "return", "\"<VTIMEZONE | {0}>\"", ".", "format", "(", "getattr", "(", "self", ",", "'tzid'", ",", "'No TZID'", ")", ")" ]
https://github.com/eventable/vobject/blob/498555a553155ea9b26aace93332ae79365ecb31/vobject/icalendar.py#L358-L359
reviewboard/reviewboard
7395902e4c181bcd1d633f61105012ffb1d18e1b
reviewboard/scmtools/svn/__init__.py
python
SVNTool.parse_diff_revision
(self, filename, revision, *args, **kwargs)
return filename, revision
Parse and return a filename and revision from a diff. Args: filename (bytes): The filename as represented in the diff. revision (bytes): The revision as represented in the diff. *args (tuple, unused): Unused positional arguments. **kwargs (dict, unused): Unused keyword arguments. Returns: tuple: A tuple containing two items: 1. The normalized filename as a byte string. 2. The normalized revision as a byte string or a :py:class:`~reviewboard.scmtools.core.Revision`.
Parse and return a filename and revision from a diff.
[ "Parse", "and", "return", "a", "filename", "and", "revision", "from", "a", "diff", "." ]
def parse_diff_revision(self, filename, revision, *args, **kwargs): """Parse and return a filename and revision from a diff. Args: filename (bytes): The filename as represented in the diff. revision (bytes): The revision as represented in the diff. *args (tuple, unused): Unused positional arguments. **kwargs (dict, unused): Unused keyword arguments. Returns: tuple: A tuple containing two items: 1. The normalized filename as a byte string. 2. The normalized revision as a byte string or a :py:class:`~reviewboard.scmtools.core.Revision`. """ assert isinstance(filename, bytes), ( 'filename must be a byte string, not %s' % type(filename)) assert isinstance(revision, bytes), ( 'revision must be a byte string, not %s' % type(revision)) # Some diffs have additional tabs between the parts of the file # revisions revision = revision.strip() if self.working_copy_re.match(revision): return filename, HEAD # "(revision )" is generated by a few weird tools (like IntelliJ). If # in the +++ line of the diff, it means HEAD, and in the --- line, it # means PRE_CREATION. Since the more important use case is parsing the # source revision, we treat it as a new file. See bugs 1937 and 2632. if revision == b'(revision )': return filename, PRE_CREATION # Binary diffs don't provide revision information, so we set a fake # "(unknown)" in the SVNDiffParser. This will never actually appear # in SVN diffs. if revision == b'(unknown)': return filename, UNKNOWN m = self.revision_re.match(revision) if not m: raise SCMError('Unable to parse diff revision header "%s"' % revision.decode('utf-8')) relocated_file = m.group(2) revision = m.group(4) # group(3) holds the revision string in braces, like '(revision 4)' # group(4) only matches the revision number, which might by None when # 'nonexistent' is given as the revision string if revision in (None, b'0'): revision = PRE_CREATION if relocated_file: if not relocated_file.startswith(b'...'): raise SCMError('Unable to parse SVN relocated path "%s"' % relocated_file.decode('utf-8')) filename = b'%s/%s' % (relocated_file[4:], filename) return filename, revision
[ "def", "parse_diff_revision", "(", "self", ",", "filename", ",", "revision", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "assert", "isinstance", "(", "filename", ",", "bytes", ")", ",", "(", "'filename must be a byte string, not %s'", "%", "type", "(", "filename", ")", ")", "assert", "isinstance", "(", "revision", ",", "bytes", ")", ",", "(", "'revision must be a byte string, not %s'", "%", "type", "(", "revision", ")", ")", "# Some diffs have additional tabs between the parts of the file", "# revisions", "revision", "=", "revision", ".", "strip", "(", ")", "if", "self", ".", "working_copy_re", ".", "match", "(", "revision", ")", ":", "return", "filename", ",", "HEAD", "# \"(revision )\" is generated by a few weird tools (like IntelliJ). If", "# in the +++ line of the diff, it means HEAD, and in the --- line, it", "# means PRE_CREATION. Since the more important use case is parsing the", "# source revision, we treat it as a new file. See bugs 1937 and 2632.", "if", "revision", "==", "b'(revision )'", ":", "return", "filename", ",", "PRE_CREATION", "# Binary diffs don't provide revision information, so we set a fake", "# \"(unknown)\" in the SVNDiffParser. This will never actually appear", "# in SVN diffs.", "if", "revision", "==", "b'(unknown)'", ":", "return", "filename", ",", "UNKNOWN", "m", "=", "self", ".", "revision_re", ".", "match", "(", "revision", ")", "if", "not", "m", ":", "raise", "SCMError", "(", "'Unable to parse diff revision header \"%s\"'", "%", "revision", ".", "decode", "(", "'utf-8'", ")", ")", "relocated_file", "=", "m", ".", "group", "(", "2", ")", "revision", "=", "m", ".", "group", "(", "4", ")", "# group(3) holds the revision string in braces, like '(revision 4)'", "# group(4) only matches the revision number, which might by None when", "# 'nonexistent' is given as the revision string", "if", "revision", "in", "(", "None", ",", "b'0'", ")", ":", "revision", "=", "PRE_CREATION", "if", "relocated_file", ":", "if", "not", "relocated_file", ".", "startswith", "(", "b'...'", ")", ":", "raise", "SCMError", "(", "'Unable to parse SVN relocated path \"%s\"'", "%", "relocated_file", ".", "decode", "(", "'utf-8'", ")", ")", "filename", "=", "b'%s/%s'", "%", "(", "relocated_file", "[", "4", ":", "]", ",", "filename", ")", "return", "filename", ",", "revision" ]
https://github.com/reviewboard/reviewboard/blob/7395902e4c181bcd1d633f61105012ffb1d18e1b/reviewboard/scmtools/svn/__init__.py#L277-L348
geopandas/geopandas
8e7133aef9e6c0d2465e07e92d954e95dedd3881
geopandas/sindex.py
python
BaseSpatialIndex.valid_query_predicates
(self)
Returns valid predicates for this spatial index. Returns ------- set Set of valid predicates for this spatial index. Examples -------- >>> from shapely.geometry import Point >>> s = geopandas.GeoSeries([Point(0, 0), Point(1, 1)]) >>> s.sindex.valid_query_predicates # doctest: +SKIP {'contains', 'crosses', 'intersects', 'within', 'touches', \ 'overlaps', None, 'covers', 'contains_properly'}
Returns valid predicates for this spatial index.
[ "Returns", "valid", "predicates", "for", "this", "spatial", "index", "." ]
def valid_query_predicates(self): """Returns valid predicates for this spatial index. Returns ------- set Set of valid predicates for this spatial index. Examples -------- >>> from shapely.geometry import Point >>> s = geopandas.GeoSeries([Point(0, 0), Point(1, 1)]) >>> s.sindex.valid_query_predicates # doctest: +SKIP {'contains', 'crosses', 'intersects', 'within', 'touches', \ 'overlaps', None, 'covers', 'contains_properly'} """ raise NotImplementedError
[ "def", "valid_query_predicates", "(", "self", ")", ":", "raise", "NotImplementedError" ]
https://github.com/geopandas/geopandas/blob/8e7133aef9e6c0d2465e07e92d954e95dedd3881/geopandas/sindex.py#L29-L45
IBM/watson-online-store
4c8b60883b319f07c3187d9cb433ef9c3ae29aea
python-flask-server/server.py
python
WebSocketSender.send_message
(self, message)
Function to send a message to the web-ui via Flask SocketIO.
Function to send a message to the web-ui via Flask SocketIO.
[ "Function", "to", "send", "a", "message", "to", "the", "web", "-", "ui", "via", "Flask", "SocketIO", "." ]
def send_message(self, message): """Function to send a message to the web-ui via Flask SocketIO.""" lines = message.split('\n') for line in lines: image = None if 'output_format[png]' in line: line, http_tail = line.split('http', 1) image = 'http' + http_tail emit('my_response', {'data': line.strip(), 'image': image})
[ "def", "send_message", "(", "self", ",", "message", ")", ":", "lines", "=", "message", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "image", "=", "None", "if", "'output_format[png]'", "in", "line", ":", "line", ",", "http_tail", "=", "line", ".", "split", "(", "'http'", ",", "1", ")", "image", "=", "'http'", "+", "http_tail", "emit", "(", "'my_response'", ",", "{", "'data'", ":", "line", ".", "strip", "(", ")", ",", "'image'", ":", "image", "}", ")" ]
https://github.com/IBM/watson-online-store/blob/4c8b60883b319f07c3187d9cb433ef9c3ae29aea/python-flask-server/server.py#L57-L66
raiden-network/raiden
76c68b426a6f81f173b9a2c09bd88a610502c38b
raiden/raiden_service.py
python
RaidenService._trigger_state_change_effects
( self, new_state: ChainState, state_changes: List[StateChange], events: List[Event], )
Trigger effects that are based on processed state changes. Examples are MS/PFS updates, transport communication channel updates and presence checks.
Trigger effects that are based on processed state changes.
[ "Trigger", "effects", "that", "are", "based", "on", "processed", "state", "changes", "." ]
def _trigger_state_change_effects( self, new_state: ChainState, state_changes: List[StateChange], events: List[Event], ) -> List[Greenlet]: """Trigger effects that are based on processed state changes. Examples are MS/PFS updates, transport communication channel updates and presence checks. """ # For safety of the mediation the monitoring service must be updated # before the balance proof is sent. Otherwise a timing attack would be # possible, where an attacker would mediate a transfer through a node, # and try to DoS it, with the expectation that the victim would # forward the payment, but wouldn't be able to send a transaction to # the blockchain nor update a MS. # Since several state_changes in one batch of state_changes can trigger # the same PFSCapacityUpdate or MonitoringUpdate we want to iterate over # all state changes to produce and send only unique messages. Assumption is # that the latest related state_change defines the correct messages. # Goal is to reduce messages. monitoring_updates: Dict[CanonicalIdentifier, BalanceProofStateChange] = {} pfs_fee_updates: Set[CanonicalIdentifier] = set() pfs_capacity_updates: Set[CanonicalIdentifier] = set() for state_change in state_changes: if self.config.services.monitoring_enabled and isinstance( state_change, BalanceProofStateChange ): monitoring_updates[state_change.balance_proof.canonical_identifier] = state_change if isinstance(state_change, PFS_UPDATE_CAPACITY_STATE_CHANGES): if isinstance(state_change, BalanceProofStateChange): canonical_identifier = state_change.balance_proof.canonical_identifier else: canonical_identifier = state_change.canonical_identifier if isinstance(state_change, PFS_UPDATE_FEE_STATE_CHANGES): pfs_fee_updates.add(canonical_identifier) else: pfs_capacity_updates.add(canonical_identifier) if isinstance(state_change, Block): self.transport.expire_services_addresses( self.rpc_client.get_block(state_change.block_hash)["timestamp"], state_change.block_number, ) for event in events: if isinstance(event, PFS_UPDATE_FEE_EVENTS): pfs_fee_updates.add(event.canonical_identifier) elif isinstance(event, PFS_UPDATE_CAPACITY_EVENTS): pfs_capacity_updates.add(event.canonical_identifier) for monitoring_update in monitoring_updates.values(): update_monitoring_service_from_balance_proof( raiden=self, chain_state=new_state, new_balance_proof=monitoring_update.balance_proof, non_closing_participant=self.address, ) for canonical_identifier in pfs_capacity_updates: send_pfs_update(raiden=self, canonical_identifier=canonical_identifier) for canonical_identifier in pfs_fee_updates: send_pfs_update( raiden=self, canonical_identifier=canonical_identifier, update_fee_schedule=True ) log.debug( "Raiden events", node=to_checksum_address(self.address), raiden_events=[redact_secret(DictSerializer.serialize(event)) for event in events], ) self.state_change_qty += len(state_changes) self._maybe_snapshot() if self.ready_to_process_events: return self.async_handle_events(chain_state=new_state, raiden_events=events) else: return []
[ "def", "_trigger_state_change_effects", "(", "self", ",", "new_state", ":", "ChainState", ",", "state_changes", ":", "List", "[", "StateChange", "]", ",", "events", ":", "List", "[", "Event", "]", ",", ")", "->", "List", "[", "Greenlet", "]", ":", "# For safety of the mediation the monitoring service must be updated", "# before the balance proof is sent. Otherwise a timing attack would be", "# possible, where an attacker would mediate a transfer through a node,", "# and try to DoS it, with the expectation that the victim would", "# forward the payment, but wouldn't be able to send a transaction to", "# the blockchain nor update a MS.", "# Since several state_changes in one batch of state_changes can trigger", "# the same PFSCapacityUpdate or MonitoringUpdate we want to iterate over", "# all state changes to produce and send only unique messages. Assumption is", "# that the latest related state_change defines the correct messages.", "# Goal is to reduce messages.", "monitoring_updates", ":", "Dict", "[", "CanonicalIdentifier", ",", "BalanceProofStateChange", "]", "=", "{", "}", "pfs_fee_updates", ":", "Set", "[", "CanonicalIdentifier", "]", "=", "set", "(", ")", "pfs_capacity_updates", ":", "Set", "[", "CanonicalIdentifier", "]", "=", "set", "(", ")", "for", "state_change", "in", "state_changes", ":", "if", "self", ".", "config", ".", "services", ".", "monitoring_enabled", "and", "isinstance", "(", "state_change", ",", "BalanceProofStateChange", ")", ":", "monitoring_updates", "[", "state_change", ".", "balance_proof", ".", "canonical_identifier", "]", "=", "state_change", "if", "isinstance", "(", "state_change", ",", "PFS_UPDATE_CAPACITY_STATE_CHANGES", ")", ":", "if", "isinstance", "(", "state_change", ",", "BalanceProofStateChange", ")", ":", "canonical_identifier", "=", "state_change", ".", "balance_proof", ".", "canonical_identifier", "else", ":", "canonical_identifier", "=", "state_change", ".", "canonical_identifier", "if", "isinstance", "(", "state_change", ",", "PFS_UPDATE_FEE_STATE_CHANGES", ")", ":", "pfs_fee_updates", ".", "add", "(", "canonical_identifier", ")", "else", ":", "pfs_capacity_updates", ".", "add", "(", "canonical_identifier", ")", "if", "isinstance", "(", "state_change", ",", "Block", ")", ":", "self", ".", "transport", ".", "expire_services_addresses", "(", "self", ".", "rpc_client", ".", "get_block", "(", "state_change", ".", "block_hash", ")", "[", "\"timestamp\"", "]", ",", "state_change", ".", "block_number", ",", ")", "for", "event", "in", "events", ":", "if", "isinstance", "(", "event", ",", "PFS_UPDATE_FEE_EVENTS", ")", ":", "pfs_fee_updates", ".", "add", "(", "event", ".", "canonical_identifier", ")", "elif", "isinstance", "(", "event", ",", "PFS_UPDATE_CAPACITY_EVENTS", ")", ":", "pfs_capacity_updates", ".", "add", "(", "event", ".", "canonical_identifier", ")", "for", "monitoring_update", "in", "monitoring_updates", ".", "values", "(", ")", ":", "update_monitoring_service_from_balance_proof", "(", "raiden", "=", "self", ",", "chain_state", "=", "new_state", ",", "new_balance_proof", "=", "monitoring_update", ".", "balance_proof", ",", "non_closing_participant", "=", "self", ".", "address", ",", ")", "for", "canonical_identifier", "in", "pfs_capacity_updates", ":", "send_pfs_update", "(", "raiden", "=", "self", ",", "canonical_identifier", "=", "canonical_identifier", ")", "for", "canonical_identifier", "in", "pfs_fee_updates", ":", "send_pfs_update", "(", "raiden", "=", "self", ",", "canonical_identifier", "=", "canonical_identifier", ",", "update_fee_schedule", "=", "True", ")", "log", ".", "debug", "(", "\"Raiden events\"", ",", "node", "=", "to_checksum_address", "(", "self", ".", "address", ")", ",", "raiden_events", "=", "[", "redact_secret", "(", "DictSerializer", ".", "serialize", "(", "event", ")", ")", "for", "event", "in", "events", "]", ",", ")", "self", ".", "state_change_qty", "+=", "len", "(", "state_changes", ")", "self", ".", "_maybe_snapshot", "(", ")", "if", "self", ".", "ready_to_process_events", ":", "return", "self", ".", "async_handle_events", "(", "chain_state", "=", "new_state", ",", "raiden_events", "=", "events", ")", "else", ":", "return", "[", "]" ]
https://github.com/raiden-network/raiden/blob/76c68b426a6f81f173b9a2c09bd88a610502c38b/raiden/raiden_service.py#L912-L995
DataDog/integrations-core
934674b29d94b70ccc008f76ea172d0cdae05e1e
kube_apiserver_metrics/datadog_checks/kube_apiserver_metrics/config_models/defaults.py
python
instance_kerberos_auth
(field, value)
return 'disabled'
[]
def instance_kerberos_auth(field, value): return 'disabled'
[ "def", "instance_kerberos_auth", "(", "field", ",", "value", ")", ":", "return", "'disabled'" ]
https://github.com/DataDog/integrations-core/blob/934674b29d94b70ccc008f76ea172d0cdae05e1e/kube_apiserver_metrics/datadog_checks/kube_apiserver_metrics/config_models/defaults.py#L105-L106
bigaidream-projects/drmad
a4bb6010595d956f29c5a42a095bab76a60b29eb
cpu_ver/experiments/exp1/exact/exact.py
python
run
()
return all_L2_regs, all_tests_rates, all_avg_regs
[]
def run(): RS = RandomState((seed, "top_rs")) all_data = mnist.load_data_as_dict() train_data, tests_data = random_partition(all_data, RS, [N_train, N_tests]) w_parser, pred_fun, loss_fun, frac_err = make_nn_funs(layer_sizes) N_weights = w_parser.vect.size def transform_weights(z_vect, transform): return z_vect * np.exp(transform) def regularization(z_vect): return np.dot(z_vect, z_vect) * np.exp(log_L2) def constrain_reg(t_vect, name): all_t = w_parser.new_vect(t_vect) for i in range(N_layers): all_t[('biases', i)] = 0.0 if name == 'universal': t_mean = np.mean([np.mean(all_t[('weights', i)]) for i in range(N_layers)]) for i in range(N_layers): all_t[('weights', i)] = t_mean elif name == 'layers': for i in range(N_layers): all_t[('weights', i)] = np.mean(all_t[('weights', i)]) elif name == 'units': for i in range(N_layers): all_t[('weights', i)] = np.mean(all_t[('weights', i)], axis=1, keepdims=True) else: raise Exception return all_t.vect def process_transform(t_vect): # Remove the redundancy due to sharing transformations within units all_t = w_parser.new_vect(t_vect) new_t = np.zeros((0,)) for i in range(N_layers): layer = all_t[('weights', i)] assert np.all(layer[:, 0] == layer[:, 1]) cur_t = log_L2 - 2 * layer[:, 0] new_t = np.concatenate((new_t, cur_t)) return new_t def train_z(data, z_vect_0, transform): N_data = data['X'].shape[0] def primal_loss(z_vect, transform, i_primal, record_results=False): RS = RandomState((seed, i_primal, "primal")) idxs = RS.randint(N_data, size=batch_size) minibatch = dictslice(data, idxs) w_vect = transform_weights(z_vect, transform) loss = loss_fun(w_vect, **minibatch) reg = regularization(z_vect) if record_results and i_primal % N_thin == 0: print "Iter {0}: train: {1}".format(i_primal, getval(loss)) return loss + reg return sgd(grad(primal_loss), transform, z_vect_0, alpha, beta, N_iters) all_transforms, all_tests_loss, all_tests_rates, all_avg_regs = [], [], [], [] def train_reg(reg_0, constraint, N_meta_iter, i_top): def hyperloss(transform, i_hyper, cur_train_data, cur_valid_data): RS = RandomState((seed, i_top, i_hyper, "hyperloss")) z_vect_0 = RS.randn(N_weights) * np.exp(log_init_scale) z_vect_final = train_z(cur_train_data, z_vect_0, transform) w_vect_final = transform_weights(z_vect_final, transform) return loss_fun(w_vect_final, **cur_valid_data) hypergrad = grad(hyperloss) def error_rate(transform, i_hyper, cur_train_data, cur_valid_data): RS = RandomState((seed, i_top, i_hyper, "hyperloss")) z_vect_0 = RS.randn(N_weights) * np.exp(log_init_scale) z_vect_final = train_z(cur_train_data, z_vect_0, transform) w_vect_final = transform_weights(z_vect_final, transform) return frac_err(w_vect_final, **cur_valid_data) cur_reg = reg_0 for i_hyper in range(N_meta_iter): if i_hyper % N_meta_thin == 0: test_rate = error_rate(cur_reg, i_hyper, train_data, tests_data) all_tests_rates.append(test_rate) all_transforms.append(cur_reg.copy()) all_avg_regs.append(np.mean(cur_reg)) print "Hyper iter {0}, error rate {1}".format(i_hyper, all_tests_rates[-1]) print "Cur_transform", np.mean(cur_reg) RS = RandomState((seed, i_top, i_hyper, "hyperloss")) cur_split = random_partition(train_data, RS, [N_train - N_valid, N_valid]) raw_grad = hypergrad(cur_reg, i_hyper, *cur_split) constrained_grad = constrain_reg(raw_grad, constraint) cur_reg -= np.sign(constrained_grad) * meta_alpha return cur_reg reg = np.zeros(N_weights)+0.2 constraints = ['universal', 'layers', 'units'] for i_top, (N_meta_iter, constraint) in enumerate(zip(all_N_meta_iter, constraints)): print "Top level iter {0}".format(i_top) reg = train_reg(reg, constraint, N_meta_iter, i_top) all_L2_regs = np.array(zip(*map(process_transform, all_transforms))) return all_L2_regs, all_tests_rates, all_avg_regs
[ "def", "run", "(", ")", ":", "RS", "=", "RandomState", "(", "(", "seed", ",", "\"top_rs\"", ")", ")", "all_data", "=", "mnist", ".", "load_data_as_dict", "(", ")", "train_data", ",", "tests_data", "=", "random_partition", "(", "all_data", ",", "RS", ",", "[", "N_train", ",", "N_tests", "]", ")", "w_parser", ",", "pred_fun", ",", "loss_fun", ",", "frac_err", "=", "make_nn_funs", "(", "layer_sizes", ")", "N_weights", "=", "w_parser", ".", "vect", ".", "size", "def", "transform_weights", "(", "z_vect", ",", "transform", ")", ":", "return", "z_vect", "*", "np", ".", "exp", "(", "transform", ")", "def", "regularization", "(", "z_vect", ")", ":", "return", "np", ".", "dot", "(", "z_vect", ",", "z_vect", ")", "*", "np", ".", "exp", "(", "log_L2", ")", "def", "constrain_reg", "(", "t_vect", ",", "name", ")", ":", "all_t", "=", "w_parser", ".", "new_vect", "(", "t_vect", ")", "for", "i", "in", "range", "(", "N_layers", ")", ":", "all_t", "[", "(", "'biases'", ",", "i", ")", "]", "=", "0.0", "if", "name", "==", "'universal'", ":", "t_mean", "=", "np", ".", "mean", "(", "[", "np", ".", "mean", "(", "all_t", "[", "(", "'weights'", ",", "i", ")", "]", ")", "for", "i", "in", "range", "(", "N_layers", ")", "]", ")", "for", "i", "in", "range", "(", "N_layers", ")", ":", "all_t", "[", "(", "'weights'", ",", "i", ")", "]", "=", "t_mean", "elif", "name", "==", "'layers'", ":", "for", "i", "in", "range", "(", "N_layers", ")", ":", "all_t", "[", "(", "'weights'", ",", "i", ")", "]", "=", "np", ".", "mean", "(", "all_t", "[", "(", "'weights'", ",", "i", ")", "]", ")", "elif", "name", "==", "'units'", ":", "for", "i", "in", "range", "(", "N_layers", ")", ":", "all_t", "[", "(", "'weights'", ",", "i", ")", "]", "=", "np", ".", "mean", "(", "all_t", "[", "(", "'weights'", ",", "i", ")", "]", ",", "axis", "=", "1", ",", "keepdims", "=", "True", ")", "else", ":", "raise", "Exception", "return", "all_t", ".", "vect", "def", "process_transform", "(", "t_vect", ")", ":", "# Remove the redundancy due to sharing transformations within units", "all_t", "=", "w_parser", ".", "new_vect", "(", "t_vect", ")", "new_t", "=", "np", ".", "zeros", "(", "(", "0", ",", ")", ")", "for", "i", "in", "range", "(", "N_layers", ")", ":", "layer", "=", "all_t", "[", "(", "'weights'", ",", "i", ")", "]", "assert", "np", ".", "all", "(", "layer", "[", ":", ",", "0", "]", "==", "layer", "[", ":", ",", "1", "]", ")", "cur_t", "=", "log_L2", "-", "2", "*", "layer", "[", ":", ",", "0", "]", "new_t", "=", "np", ".", "concatenate", "(", "(", "new_t", ",", "cur_t", ")", ")", "return", "new_t", "def", "train_z", "(", "data", ",", "z_vect_0", ",", "transform", ")", ":", "N_data", "=", "data", "[", "'X'", "]", ".", "shape", "[", "0", "]", "def", "primal_loss", "(", "z_vect", ",", "transform", ",", "i_primal", ",", "record_results", "=", "False", ")", ":", "RS", "=", "RandomState", "(", "(", "seed", ",", "i_primal", ",", "\"primal\"", ")", ")", "idxs", "=", "RS", ".", "randint", "(", "N_data", ",", "size", "=", "batch_size", ")", "minibatch", "=", "dictslice", "(", "data", ",", "idxs", ")", "w_vect", "=", "transform_weights", "(", "z_vect", ",", "transform", ")", "loss", "=", "loss_fun", "(", "w_vect", ",", "*", "*", "minibatch", ")", "reg", "=", "regularization", "(", "z_vect", ")", "if", "record_results", "and", "i_primal", "%", "N_thin", "==", "0", ":", "print", "\"Iter {0}: train: {1}\"", ".", "format", "(", "i_primal", ",", "getval", "(", "loss", ")", ")", "return", "loss", "+", "reg", "return", "sgd", "(", "grad", "(", "primal_loss", ")", ",", "transform", ",", "z_vect_0", ",", "alpha", ",", "beta", ",", "N_iters", ")", "all_transforms", ",", "all_tests_loss", ",", "all_tests_rates", ",", "all_avg_regs", "=", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", "def", "train_reg", "(", "reg_0", ",", "constraint", ",", "N_meta_iter", ",", "i_top", ")", ":", "def", "hyperloss", "(", "transform", ",", "i_hyper", ",", "cur_train_data", ",", "cur_valid_data", ")", ":", "RS", "=", "RandomState", "(", "(", "seed", ",", "i_top", ",", "i_hyper", ",", "\"hyperloss\"", ")", ")", "z_vect_0", "=", "RS", ".", "randn", "(", "N_weights", ")", "*", "np", ".", "exp", "(", "log_init_scale", ")", "z_vect_final", "=", "train_z", "(", "cur_train_data", ",", "z_vect_0", ",", "transform", ")", "w_vect_final", "=", "transform_weights", "(", "z_vect_final", ",", "transform", ")", "return", "loss_fun", "(", "w_vect_final", ",", "*", "*", "cur_valid_data", ")", "hypergrad", "=", "grad", "(", "hyperloss", ")", "def", "error_rate", "(", "transform", ",", "i_hyper", ",", "cur_train_data", ",", "cur_valid_data", ")", ":", "RS", "=", "RandomState", "(", "(", "seed", ",", "i_top", ",", "i_hyper", ",", "\"hyperloss\"", ")", ")", "z_vect_0", "=", "RS", ".", "randn", "(", "N_weights", ")", "*", "np", ".", "exp", "(", "log_init_scale", ")", "z_vect_final", "=", "train_z", "(", "cur_train_data", ",", "z_vect_0", ",", "transform", ")", "w_vect_final", "=", "transform_weights", "(", "z_vect_final", ",", "transform", ")", "return", "frac_err", "(", "w_vect_final", ",", "*", "*", "cur_valid_data", ")", "cur_reg", "=", "reg_0", "for", "i_hyper", "in", "range", "(", "N_meta_iter", ")", ":", "if", "i_hyper", "%", "N_meta_thin", "==", "0", ":", "test_rate", "=", "error_rate", "(", "cur_reg", ",", "i_hyper", ",", "train_data", ",", "tests_data", ")", "all_tests_rates", ".", "append", "(", "test_rate", ")", "all_transforms", ".", "append", "(", "cur_reg", ".", "copy", "(", ")", ")", "all_avg_regs", ".", "append", "(", "np", ".", "mean", "(", "cur_reg", ")", ")", "print", "\"Hyper iter {0}, error rate {1}\"", ".", "format", "(", "i_hyper", ",", "all_tests_rates", "[", "-", "1", "]", ")", "print", "\"Cur_transform\"", ",", "np", ".", "mean", "(", "cur_reg", ")", "RS", "=", "RandomState", "(", "(", "seed", ",", "i_top", ",", "i_hyper", ",", "\"hyperloss\"", ")", ")", "cur_split", "=", "random_partition", "(", "train_data", ",", "RS", ",", "[", "N_train", "-", "N_valid", ",", "N_valid", "]", ")", "raw_grad", "=", "hypergrad", "(", "cur_reg", ",", "i_hyper", ",", "*", "cur_split", ")", "constrained_grad", "=", "constrain_reg", "(", "raw_grad", ",", "constraint", ")", "cur_reg", "-=", "np", ".", "sign", "(", "constrained_grad", ")", "*", "meta_alpha", "return", "cur_reg", "reg", "=", "np", ".", "zeros", "(", "N_weights", ")", "+", "0.2", "constraints", "=", "[", "'universal'", ",", "'layers'", ",", "'units'", "]", "for", "i_top", ",", "(", "N_meta_iter", ",", "constraint", ")", "in", "enumerate", "(", "zip", "(", "all_N_meta_iter", ",", "constraints", ")", ")", ":", "print", "\"Top level iter {0}\"", ".", "format", "(", "i_top", ")", "reg", "=", "train_reg", "(", "reg", ",", "constraint", ",", "N_meta_iter", ",", "i_top", ")", "all_L2_regs", "=", "np", ".", "array", "(", "zip", "(", "*", "map", "(", "process_transform", ",", "all_transforms", ")", ")", ")", "return", "all_L2_regs", ",", "all_tests_rates", ",", "all_avg_regs" ]
https://github.com/bigaidream-projects/drmad/blob/a4bb6010595d956f29c5a42a095bab76a60b29eb/cpu_ver/experiments/exp1/exact/exact.py#L35-L132
hzlzh/AlfredWorkflow.com
7055f14f6922c80ea5943839eb0caff11ae57255
Sources/Workflows/Rotten-Tomatoes/PyAl/Request/requests/packages/oauthlib/oauth2/draft25/__init__.py
python
Client.prepare_request_body
(self, *args, **kwargs)
Abstract method used to create request bodies.
Abstract method used to create request bodies.
[ "Abstract", "method", "used", "to", "create", "request", "bodies", "." ]
def prepare_request_body(self, *args, **kwargs): """Abstract method used to create request bodies.""" raise NotImplementedError("Must be implemented by inheriting classes.")
[ "def", "prepare_request_body", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "raise", "NotImplementedError", "(", "\"Must be implemented by inheriting classes.\"", ")" ]
https://github.com/hzlzh/AlfredWorkflow.com/blob/7055f14f6922c80ea5943839eb0caff11ae57255/Sources/Workflows/Rotten-Tomatoes/PyAl/Request/requests/packages/oauthlib/oauth2/draft25/__init__.py#L140-L142
dit/dit
2853cb13110c5a5b2fa7ad792e238e2177013da2
dit/rate_distortion/curves.py
python
IBCurve.plot
(self, downsample=5)
return plotter.plot(downsample)
Construct an IBPlotter and utilize it to plot the information bottleneck curve. Parameters ---------- downsample : int The how frequent to display points along the IB curve. Returns ------- fig : plt.figure The resulting figure.
Construct an IBPlotter and utilize it to plot the information bottleneck curve.
[ "Construct", "an", "IBPlotter", "and", "utilize", "it", "to", "plot", "the", "information", "bottleneck", "curve", "." ]
def plot(self, downsample=5): # pragma: no cover """ Construct an IBPlotter and utilize it to plot the information bottleneck curve. Parameters ---------- downsample : int The how frequent to display points along the IB curve. Returns ------- fig : plt.figure The resulting figure. """ from .plotting import IBPlotter plotter = IBPlotter(self) return plotter.plot(downsample)
[ "def", "plot", "(", "self", ",", "downsample", "=", "5", ")", ":", "# pragma: no cover", "from", ".", "plotting", "import", "IBPlotter", "plotter", "=", "IBPlotter", "(", "self", ")", "return", "plotter", ".", "plot", "(", "downsample", ")" ]
https://github.com/dit/dit/blob/2853cb13110c5a5b2fa7ad792e238e2177013da2/dit/rate_distortion/curves.py#L513-L530
tztztztztz/eql.detectron2
29224acf4ea549c53264e6229da69868bd5470f3
detectron2/structures/instances.py
python
Instances.get_fields
(self)
return self._fields
Returns: dict: a dict which maps names (str) to data of the fields Modifying the returned dict will modify this instance.
Returns: dict: a dict which maps names (str) to data of the fields
[ "Returns", ":", "dict", ":", "a", "dict", "which", "maps", "names", "(", "str", ")", "to", "data", "of", "the", "fields" ]
def get_fields(self) -> Dict[str, Any]: """ Returns: dict: a dict which maps names (str) to data of the fields Modifying the returned dict will modify this instance. """ return self._fields
[ "def", "get_fields", "(", "self", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "return", "self", ".", "_fields" ]
https://github.com/tztztztztz/eql.detectron2/blob/29224acf4ea549c53264e6229da69868bd5470f3/detectron2/structures/instances.py#L97-L104
StevenLiuWen/ano_pred_cvpr2018
06ad21027f96f06d8cfe7ce47133b82e89631d4c
Codes/flownet2/src/net.py
python
Net.loss
(self, **kwargs)
return
Accepts prediction Tensors from the output of `model`. Returns a single Tensor representing the total loss of the model.
Accepts prediction Tensors from the output of `model`. Returns a single Tensor representing the total loss of the model.
[ "Accepts", "prediction", "Tensors", "from", "the", "output", "of", "model", ".", "Returns", "a", "single", "Tensor", "representing", "the", "total", "loss", "of", "the", "model", "." ]
def loss(self, **kwargs): """ Accepts prediction Tensors from the output of `model`. Returns a single Tensor representing the total loss of the model. """ return
[ "def", "loss", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return" ]
https://github.com/StevenLiuWen/ano_pred_cvpr2018/blob/06ad21027f96f06d8cfe7ce47133b82e89631d4c/Codes/flownet2/src/net.py#L38-L43
Source-Python-Dev-Team/Source.Python
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
addons/source-python/Python3/nntplib.py
python
_NNTPBase.head
(self, message_spec=None, *, file=None)
return self._artcmd(cmd, file)
Process a HEAD command. Argument: - message_spec: article number or message id - file: filename string or file object to store the headers in Returns: - resp: server response if successful - ArticleInfo: (article number, message id, list of header lines)
Process a HEAD command. Argument: - message_spec: article number or message id - file: filename string or file object to store the headers in Returns: - resp: server response if successful - ArticleInfo: (article number, message id, list of header lines)
[ "Process", "a", "HEAD", "command", ".", "Argument", ":", "-", "message_spec", ":", "article", "number", "or", "message", "id", "-", "file", ":", "filename", "string", "or", "file", "object", "to", "store", "the", "headers", "in", "Returns", ":", "-", "resp", ":", "server", "response", "if", "successful", "-", "ArticleInfo", ":", "(", "article", "number", "message", "id", "list", "of", "header", "lines", ")" ]
def head(self, message_spec=None, *, file=None): """Process a HEAD command. Argument: - message_spec: article number or message id - file: filename string or file object to store the headers in Returns: - resp: server response if successful - ArticleInfo: (article number, message id, list of header lines) """ if message_spec is not None: cmd = 'HEAD {0}'.format(message_spec) else: cmd = 'HEAD' return self._artcmd(cmd, file)
[ "def", "head", "(", "self", ",", "message_spec", "=", "None", ",", "*", ",", "file", "=", "None", ")", ":", "if", "message_spec", "is", "not", "None", ":", "cmd", "=", "'HEAD {0}'", ".", "format", "(", "message_spec", ")", "else", ":", "cmd", "=", "'HEAD'", "return", "self", ".", "_artcmd", "(", "cmd", ",", "file", ")" ]
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/Python3/nntplib.py#L730-L742
XX-net/XX-Net
a9898cfcf0084195fb7e69b6bc834e59aecdf14f
python3.8.2/Lib/site-packages/pip/_vendor/requests/models.py
python
Response.json
(self, **kwargs)
return complexjson.loads(self.text, **kwargs)
r"""Returns the json-encoded content of a response, if any. :param \*\*kwargs: Optional arguments that ``json.loads`` takes. :raises ValueError: If the response body does not contain valid json.
r"""Returns the json-encoded content of a response, if any.
[ "r", "Returns", "the", "json", "-", "encoded", "content", "of", "a", "response", "if", "any", "." ]
def json(self, **kwargs): r"""Returns the json-encoded content of a response, if any. :param \*\*kwargs: Optional arguments that ``json.loads`` takes. :raises ValueError: If the response body does not contain valid json. """ if not self.encoding and self.content and len(self.content) > 3: # No encoding set. JSON RFC 4627 section 3 states we should expect # UTF-8, -16 or -32. Detect which one to use; If the detection or # decoding fails, fall back to `self.text` (using chardet to make # a best guess). encoding = guess_json_utf(self.content) if encoding is not None: try: return complexjson.loads( self.content.decode(encoding), **kwargs ) except UnicodeDecodeError: # Wrong UTF codec detected; usually because it's not UTF-8 # but some other 8-bit codec. This is an RFC violation, # and the server didn't bother to tell us what codec *was* # used. pass return complexjson.loads(self.text, **kwargs)
[ "def", "json", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "encoding", "and", "self", ".", "content", "and", "len", "(", "self", ".", "content", ")", ">", "3", ":", "# No encoding set. JSON RFC 4627 section 3 states we should expect", "# UTF-8, -16 or -32. Detect which one to use; If the detection or", "# decoding fails, fall back to `self.text` (using chardet to make", "# a best guess).", "encoding", "=", "guess_json_utf", "(", "self", ".", "content", ")", "if", "encoding", "is", "not", "None", ":", "try", ":", "return", "complexjson", ".", "loads", "(", "self", ".", "content", ".", "decode", "(", "encoding", ")", ",", "*", "*", "kwargs", ")", "except", "UnicodeDecodeError", ":", "# Wrong UTF codec detected; usually because it's not UTF-8", "# but some other 8-bit codec. This is an RFC violation,", "# and the server didn't bother to tell us what codec *was*", "# used.", "pass", "return", "complexjson", ".", "loads", "(", "self", ".", "text", ",", "*", "*", "kwargs", ")" ]
https://github.com/XX-net/XX-Net/blob/a9898cfcf0084195fb7e69b6bc834e59aecdf14f/python3.8.2/Lib/site-packages/pip/_vendor/requests/models.py#L873-L897
fxsjy/jiebademo
ba3e5a34cd84b612e13f4dfb9f3ec037928c4339
jiebademo/bottle.py
python
html_escape
(string)
return string.replace('&','&amp;').replace('<','&lt;').replace('>','&gt;')\ .replace('"','&quot;').replace("'",'&#039;')
Escape HTML special characters ``&<>`` and quotes ``'"``.
Escape HTML special characters ``&<>`` and quotes ``'"``.
[ "Escape", "HTML", "special", "characters", "&<", ">", "and", "quotes", "." ]
def html_escape(string): ''' Escape HTML special characters ``&<>`` and quotes ``'"``. ''' return string.replace('&','&amp;').replace('<','&lt;').replace('>','&gt;')\ .replace('"','&quot;').replace("'",'&#039;')
[ "def", "html_escape", "(", "string", ")", ":", "return", "string", ".", "replace", "(", "'&'", ",", "'&amp;'", ")", ".", "replace", "(", "'<'", ",", "'&lt;'", ")", ".", "replace", "(", "'>'", ",", "'&gt;'", ")", ".", "replace", "(", "'\"'", ",", "'&quot;'", ")", ".", "replace", "(", "\"'\"", ",", "'&#039;'", ")" ]
https://github.com/fxsjy/jiebademo/blob/ba3e5a34cd84b612e13f4dfb9f3ec037928c4339/jiebademo/bottle.py#L1961-L1964
rigetti/grove
dc6bf6ec63e8c435fe52b1e00f707d5ce4cdb9b3
grove/tomography/operator_utils.py
python
OperatorBasis.__eq__
(self, other)
return (self.labels == other.labels and all( [(my_op - o_op).norm(FROBENIUS) < EPS for (my_op, o_op) in zip(self.ops, other.ops)]))
[]
def __eq__(self, other): return (self.labels == other.labels and all( [(my_op - o_op).norm(FROBENIUS) < EPS for (my_op, o_op) in zip(self.ops, other.ops)]))
[ "def", "__eq__", "(", "self", ",", "other", ")", ":", "return", "(", "self", ".", "labels", "==", "other", ".", "labels", "and", "all", "(", "[", "(", "my_op", "-", "o_op", ")", ".", "norm", "(", "FROBENIUS", ")", "<", "EPS", "for", "(", "my_op", ",", "o_op", ")", "in", "zip", "(", "self", ".", "ops", ",", "other", ".", "ops", ")", "]", ")", ")" ]
https://github.com/rigetti/grove/blob/dc6bf6ec63e8c435fe52b1e00f707d5ce4cdb9b3/grove/tomography/operator_utils.py#L351-L353
microsoft/unilm
65f15af2a307ebb64cfb25adf54375b002e6fe8d
xtune/src/transformers/modeling_tf_gpt2.py
python
gelu
(x)
return x * cdf
Gaussian Error Linear Unit. This is a smoother version of the RELU. Original paper: https://arxiv.org/abs/1606.08415 Args: x: float Tensor to perform activation. Returns: `x` with the GELU activation applied.
Gaussian Error Linear Unit. This is a smoother version of the RELU. Original paper: https://arxiv.org/abs/1606.08415 Args: x: float Tensor to perform activation. Returns: `x` with the GELU activation applied.
[ "Gaussian", "Error", "Linear", "Unit", ".", "This", "is", "a", "smoother", "version", "of", "the", "RELU", ".", "Original", "paper", ":", "https", ":", "//", "arxiv", ".", "org", "/", "abs", "/", "1606", ".", "08415", "Args", ":", "x", ":", "float", "Tensor", "to", "perform", "activation", ".", "Returns", ":", "x", "with", "the", "GELU", "activation", "applied", "." ]
def gelu(x): """Gaussian Error Linear Unit. This is a smoother version of the RELU. Original paper: https://arxiv.org/abs/1606.08415 Args: x: float Tensor to perform activation. Returns: `x` with the GELU activation applied. """ cdf = 0.5 * (1.0 + tf.tanh((np.sqrt(2 / np.pi) * (x + 0.044715 * tf.pow(x, 3))))) return x * cdf
[ "def", "gelu", "(", "x", ")", ":", "cdf", "=", "0.5", "*", "(", "1.0", "+", "tf", ".", "tanh", "(", "(", "np", ".", "sqrt", "(", "2", "/", "np", ".", "pi", ")", "*", "(", "x", "+", "0.044715", "*", "tf", ".", "pow", "(", "x", ",", "3", ")", ")", ")", ")", ")", "return", "x", "*", "cdf" ]
https://github.com/microsoft/unilm/blob/65f15af2a307ebb64cfb25adf54375b002e6fe8d/xtune/src/transformers/modeling_tf_gpt2.py#L46-L56
haiwen/seahub
e92fcd44e3e46260597d8faa9347cb8222b8b10d
seahub/tags/models.py
python
FileUUIDMapManager.get_fileuuidmaps_by_parent_path
(self, repo_id, parent_path)
return uuids
[]
def get_fileuuidmaps_by_parent_path(self, repo_id, parent_path): repo_id, parent_path = self.model.get_origin_repo_id_and_parent_path(repo_id, parent_path) parent_path = FileUUIDMap.normalize_path(parent_path) uuids = super(FileUUIDMapManager, self).filter( repo_id=repo_id, parent_path=parent_path ) return uuids
[ "def", "get_fileuuidmaps_by_parent_path", "(", "self", ",", "repo_id", ",", "parent_path", ")", ":", "repo_id", ",", "parent_path", "=", "self", ".", "model", ".", "get_origin_repo_id_and_parent_path", "(", "repo_id", ",", "parent_path", ")", "parent_path", "=", "FileUUIDMap", ".", "normalize_path", "(", "parent_path", ")", "uuids", "=", "super", "(", "FileUUIDMapManager", ",", "self", ")", ".", "filter", "(", "repo_id", "=", "repo_id", ",", "parent_path", "=", "parent_path", ")", "return", "uuids" ]
https://github.com/haiwen/seahub/blob/e92fcd44e3e46260597d8faa9347cb8222b8b10d/seahub/tags/models.py#L64-L70
prody/ProDy
b24bbf58aa8fffe463c8548ae50e3955910e5b7f
prody/atomic/dihedral.py
python
Dihedral.setACSIndex
(self, index)
Set the coordinate set at *index* active.
Set the coordinate set at *index* active.
[ "Set", "the", "coordinate", "set", "at", "*", "index", "*", "active", "." ]
def setACSIndex(self, index): """Set the coordinate set at *index* active.""" if self._ag._coords is None: raise AttributeError('coordinates are not set') if not isinstance(index, Integral): raise TypeError('index must be an integer') n_csets = self._ag._n_csets if n_csets <= index or n_csets < abs(index): raise IndexError('coordinate set index is out of range') if index < 0: index += n_csets self._acsi = index
[ "def", "setACSIndex", "(", "self", ",", "index", ")", ":", "if", "self", ".", "_ag", ".", "_coords", "is", "None", ":", "raise", "AttributeError", "(", "'coordinates are not set'", ")", "if", "not", "isinstance", "(", "index", ",", "Integral", ")", ":", "raise", "TypeError", "(", "'index must be an integer'", ")", "n_csets", "=", "self", ".", "_ag", ".", "_n_csets", "if", "n_csets", "<=", "index", "or", "n_csets", "<", "abs", "(", "index", ")", ":", "raise", "IndexError", "(", "'coordinate set index is out of range'", ")", "if", "index", "<", "0", ":", "index", "+=", "n_csets", "self", ".", "_acsi", "=", "index" ]
https://github.com/prody/ProDy/blob/b24bbf58aa8fffe463c8548ae50e3955910e5b7f/prody/atomic/dihedral.py#L120-L136
linxid/Machine_Learning_Study_Path
558e82d13237114bbb8152483977806fc0c222af
Machine Learning In Action/Chapter4-NaiveBayes/venv/Lib/heapq.py
python
heapreplace
(heap, item)
return returnitem
Pop and return the current smallest value, and add the new item. This is more efficient than heappop() followed by heappush(), and can be more appropriate when using a fixed-size heap. Note that the value returned may be larger than item! That constrains reasonable uses of this routine unless written as part of a conditional replacement: if item > heap[0]: item = heapreplace(heap, item)
Pop and return the current smallest value, and add the new item.
[ "Pop", "and", "return", "the", "current", "smallest", "value", "and", "add", "the", "new", "item", "." ]
def heapreplace(heap, item): """Pop and return the current smallest value, and add the new item. This is more efficient than heappop() followed by heappush(), and can be more appropriate when using a fixed-size heap. Note that the value returned may be larger than item! That constrains reasonable uses of this routine unless written as part of a conditional replacement: if item > heap[0]: item = heapreplace(heap, item) """ returnitem = heap[0] # raises appropriate IndexError if heap is empty heap[0] = item _siftup(heap, 0) return returnitem
[ "def", "heapreplace", "(", "heap", ",", "item", ")", ":", "returnitem", "=", "heap", "[", "0", "]", "# raises appropriate IndexError if heap is empty", "heap", "[", "0", "]", "=", "item", "_siftup", "(", "heap", ",", "0", ")", "return", "returnitem" ]
https://github.com/linxid/Machine_Learning_Study_Path/blob/558e82d13237114bbb8152483977806fc0c222af/Machine Learning In Action/Chapter4-NaiveBayes/venv/Lib/heapq.py#L145-L159
yiranran/Audio-driven-TalkingFace-HeadPose
d062a00a46a5d0ebb4bf66751e7a9af92ee418e8
Audio/code/convolutional_rnn/module.py
python
ConvNdRNNBase.__init__
(self, mode, in_channels, out_channels, kernel_size, num_layers=1, bias=True, batch_first=False, dropout=0., bidirectional=False, convndim=2, stride=1, dilation=1, groups=1)
[]
def __init__(self, mode, in_channels, out_channels, kernel_size, num_layers=1, bias=True, batch_first=False, dropout=0., bidirectional=False, convndim=2, stride=1, dilation=1, groups=1): super(ConvNdRNNBase, self).__init__() self.mode = mode self.in_channels = in_channels self.out_channels = out_channels self.num_layers = num_layers self.bias = bias self.batch_first = batch_first self.dropout = dropout self.bidirectional = bidirectional self.convndim = convndim if convndim == 1: ntuple = _single elif convndim == 2: ntuple = _pair elif convndim == 3: ntuple = _triple else: raise ValueError('convndim must be 1, 2, or 3, but got {}'.format(convndim)) self.kernel_size = ntuple(kernel_size) self.stride = ntuple(stride) self.dilation = ntuple(dilation) self.groups = groups num_directions = 2 if bidirectional else 1 if mode in ('LSTM', 'PeepholeLSTM'): gate_size = 4 * out_channels elif mode == 'GRU': gate_size = 3 * out_channels else: gate_size = out_channels self._all_weights = [] for layer in range(num_layers): for direction in range(num_directions): layer_input_size = in_channels if layer == 0 else out_channels * num_directions w_ih = Parameter(torch.Tensor(gate_size, layer_input_size // groups, *self.kernel_size)) w_hh = Parameter(torch.Tensor(gate_size, out_channels // groups, *self.kernel_size)) b_ih = Parameter(torch.Tensor(gate_size)) b_hh = Parameter(torch.Tensor(gate_size)) if mode == 'PeepholeLSTM': w_pi = Parameter(torch.Tensor(out_channels, out_channels // groups, *self.kernel_size)) w_pf = Parameter(torch.Tensor(out_channels, out_channels // groups, *self.kernel_size)) w_po = Parameter(torch.Tensor(out_channels, out_channels // groups, *self.kernel_size)) layer_params = (w_ih, w_hh, w_pi, w_pf, w_po, b_ih, b_hh) param_names = ['weight_ih_l{}{}', 'weight_hh_l{}{}', 'weight_pi_l{}{}', 'weight_pf_l{}{}', 'weight_po_l{}{}'] else: layer_params = (w_ih, w_hh, b_ih, b_hh) param_names = ['weight_ih_l{}{}', 'weight_hh_l{}{}'] if bias: param_names += ['bias_ih_l{}{}', 'bias_hh_l{}{}'] suffix = '_reverse' if direction == 1 else '' param_names = [x.format(layer, suffix) for x in param_names] for name, param in zip(param_names, layer_params): setattr(self, name, param) self._all_weights.append(param_names) self.reset_parameters()
[ "def", "__init__", "(", "self", ",", "mode", ",", "in_channels", ",", "out_channels", ",", "kernel_size", ",", "num_layers", "=", "1", ",", "bias", "=", "True", ",", "batch_first", "=", "False", ",", "dropout", "=", "0.", ",", "bidirectional", "=", "False", ",", "convndim", "=", "2", ",", "stride", "=", "1", ",", "dilation", "=", "1", ",", "groups", "=", "1", ")", ":", "super", "(", "ConvNdRNNBase", ",", "self", ")", ".", "__init__", "(", ")", "self", ".", "mode", "=", "mode", "self", ".", "in_channels", "=", "in_channels", "self", ".", "out_channels", "=", "out_channels", "self", ".", "num_layers", "=", "num_layers", "self", ".", "bias", "=", "bias", "self", ".", "batch_first", "=", "batch_first", "self", ".", "dropout", "=", "dropout", "self", ".", "bidirectional", "=", "bidirectional", "self", ".", "convndim", "=", "convndim", "if", "convndim", "==", "1", ":", "ntuple", "=", "_single", "elif", "convndim", "==", "2", ":", "ntuple", "=", "_pair", "elif", "convndim", "==", "3", ":", "ntuple", "=", "_triple", "else", ":", "raise", "ValueError", "(", "'convndim must be 1, 2, or 3, but got {}'", ".", "format", "(", "convndim", ")", ")", "self", ".", "kernel_size", "=", "ntuple", "(", "kernel_size", ")", "self", ".", "stride", "=", "ntuple", "(", "stride", ")", "self", ".", "dilation", "=", "ntuple", "(", "dilation", ")", "self", ".", "groups", "=", "groups", "num_directions", "=", "2", "if", "bidirectional", "else", "1", "if", "mode", "in", "(", "'LSTM'", ",", "'PeepholeLSTM'", ")", ":", "gate_size", "=", "4", "*", "out_channels", "elif", "mode", "==", "'GRU'", ":", "gate_size", "=", "3", "*", "out_channels", "else", ":", "gate_size", "=", "out_channels", "self", ".", "_all_weights", "=", "[", "]", "for", "layer", "in", "range", "(", "num_layers", ")", ":", "for", "direction", "in", "range", "(", "num_directions", ")", ":", "layer_input_size", "=", "in_channels", "if", "layer", "==", "0", "else", "out_channels", "*", "num_directions", "w_ih", "=", "Parameter", "(", "torch", ".", "Tensor", "(", "gate_size", ",", "layer_input_size", "//", "groups", ",", "*", "self", ".", "kernel_size", ")", ")", "w_hh", "=", "Parameter", "(", "torch", ".", "Tensor", "(", "gate_size", ",", "out_channels", "//", "groups", ",", "*", "self", ".", "kernel_size", ")", ")", "b_ih", "=", "Parameter", "(", "torch", ".", "Tensor", "(", "gate_size", ")", ")", "b_hh", "=", "Parameter", "(", "torch", ".", "Tensor", "(", "gate_size", ")", ")", "if", "mode", "==", "'PeepholeLSTM'", ":", "w_pi", "=", "Parameter", "(", "torch", ".", "Tensor", "(", "out_channels", ",", "out_channels", "//", "groups", ",", "*", "self", ".", "kernel_size", ")", ")", "w_pf", "=", "Parameter", "(", "torch", ".", "Tensor", "(", "out_channels", ",", "out_channels", "//", "groups", ",", "*", "self", ".", "kernel_size", ")", ")", "w_po", "=", "Parameter", "(", "torch", ".", "Tensor", "(", "out_channels", ",", "out_channels", "//", "groups", ",", "*", "self", ".", "kernel_size", ")", ")", "layer_params", "=", "(", "w_ih", ",", "w_hh", ",", "w_pi", ",", "w_pf", ",", "w_po", ",", "b_ih", ",", "b_hh", ")", "param_names", "=", "[", "'weight_ih_l{}{}'", ",", "'weight_hh_l{}{}'", ",", "'weight_pi_l{}{}'", ",", "'weight_pf_l{}{}'", ",", "'weight_po_l{}{}'", "]", "else", ":", "layer_params", "=", "(", "w_ih", ",", "w_hh", ",", "b_ih", ",", "b_hh", ")", "param_names", "=", "[", "'weight_ih_l{}{}'", ",", "'weight_hh_l{}{}'", "]", "if", "bias", ":", "param_names", "+=", "[", "'bias_ih_l{}{}'", ",", "'bias_hh_l{}{}'", "]", "suffix", "=", "'_reverse'", "if", "direction", "==", "1", "else", "''", "param_names", "=", "[", "x", ".", "format", "(", "layer", ",", "suffix", ")", "for", "x", "in", "param_names", "]", "for", "name", ",", "param", "in", "zip", "(", "param_names", ",", "layer_params", ")", ":", "setattr", "(", "self", ",", "name", ",", "param", ")", "self", ".", "_all_weights", ".", "append", "(", "param_names", ")", "self", ".", "reset_parameters", "(", ")" ]
https://github.com/yiranran/Audio-driven-TalkingFace-HeadPose/blob/d062a00a46a5d0ebb4bf66751e7a9af92ee418e8/Audio/code/convolutional_rnn/module.py#L13-L92
jgagneastro/coffeegrindsize
22661ebd21831dba4cf32bfc6ba59fe3d49f879c
App/venv/lib/python3.7/site-packages/pip/_vendor/distro.py
python
name
(pretty=False)
return _distro.name(pretty)
Return the name of the current OS distribution, as a human-readable string. If *pretty* is false, the name is returned without version or codename. (e.g. "CentOS Linux") If *pretty* is true, the version and codename are appended. (e.g. "CentOS Linux 7.1.1503 (Core)") **Lookup hierarchy:** The name is obtained from the following sources, in the specified order. The first available and non-empty value is used: * If *pretty* is false: - the value of the "NAME" attribute of the os-release file, - the value of the "Distributor ID" attribute returned by the lsb_release command, - the value of the "<name>" field of the distro release file. * If *pretty* is true: - the value of the "PRETTY_NAME" attribute of the os-release file, - the value of the "Description" attribute returned by the lsb_release command, - the value of the "<name>" field of the distro release file, appended with the value of the pretty version ("<version_id>" and "<codename>" fields) of the distro release file, if available.
Return the name of the current OS distribution, as a human-readable string.
[ "Return", "the", "name", "of", "the", "current", "OS", "distribution", "as", "a", "human", "-", "readable", "string", "." ]
def name(pretty=False): """ Return the name of the current OS distribution, as a human-readable string. If *pretty* is false, the name is returned without version or codename. (e.g. "CentOS Linux") If *pretty* is true, the version and codename are appended. (e.g. "CentOS Linux 7.1.1503 (Core)") **Lookup hierarchy:** The name is obtained from the following sources, in the specified order. The first available and non-empty value is used: * If *pretty* is false: - the value of the "NAME" attribute of the os-release file, - the value of the "Distributor ID" attribute returned by the lsb_release command, - the value of the "<name>" field of the distro release file. * If *pretty* is true: - the value of the "PRETTY_NAME" attribute of the os-release file, - the value of the "Description" attribute returned by the lsb_release command, - the value of the "<name>" field of the distro release file, appended with the value of the pretty version ("<version_id>" and "<codename>" fields) of the distro release file, if available. """ return _distro.name(pretty)
[ "def", "name", "(", "pretty", "=", "False", ")", ":", "return", "_distro", ".", "name", "(", "pretty", ")" ]
https://github.com/jgagneastro/coffeegrindsize/blob/22661ebd21831dba4cf32bfc6ba59fe3d49f879c/App/venv/lib/python3.7/site-packages/pip/_vendor/distro.py#L203-L239
dropbox/PyHive
b21c507a24ed2f2b0cf15b0b6abb1c43f31d3ee0
TCLIService/ttypes.py
python
TGetPrimaryKeysReq.__ne__
(self, other)
return not (self == other)
[]
def __ne__(self, other): return not (self == other)
[ "def", "__ne__", "(", "self", ",", "other", ")", ":", "return", "not", "(", "self", "==", "other", ")" ]
https://github.com/dropbox/PyHive/blob/b21c507a24ed2f2b0cf15b0b6abb1c43f31d3ee0/TCLIService/ttypes.py#L5354-L5355
cheran-senthil/PyRival
ce94312d429f368b724cdd8d3192935e34b7ba66
pyrival/algebra/factors.py
python
prime_factors
(n)
return Counter([n]) if f == n else prime_factors(f) + prime_factors(n // f)
returns a Counter of the prime factorization of n
returns a Counter of the prime factorization of n
[ "returns", "a", "Counter", "of", "the", "prime", "factorization", "of", "n" ]
def prime_factors(n): """returns a Counter of the prime factorization of n""" if n <= 1: return Counter() f = pollard_rho(n) return Counter([n]) if f == n else prime_factors(f) + prime_factors(n // f)
[ "def", "prime_factors", "(", "n", ")", ":", "if", "n", "<=", "1", ":", "return", "Counter", "(", ")", "f", "=", "pollard_rho", "(", "n", ")", "return", "Counter", "(", "[", "n", "]", ")", "if", "f", "==", "n", "else", "prime_factors", "(", "f", ")", "+", "prime_factors", "(", "n", "//", "f", ")" ]
https://github.com/cheran-senthil/PyRival/blob/ce94312d429f368b724cdd8d3192935e34b7ba66/pyrival/algebra/factors.py#L55-L60
evennia/evennia
fa79110ba6b219932f22297838e8ac72ebc0be0e
evennia/utils/evtable.py
python
EvCell._split_lines
(self, text)
return text.split("\n")
Simply split by linebreaks Args: text (str): text to split. Returns: split (list): split text.
Simply split by linebreaks
[ "Simply", "split", "by", "linebreaks" ]
def _split_lines(self, text): """ Simply split by linebreaks Args: text (str): text to split. Returns: split (list): split text. """ return text.split("\n")
[ "def", "_split_lines", "(", "self", ",", "text", ")", ":", "return", "text", ".", "split", "(", "\"\\n\"", ")" ]
https://github.com/evennia/evennia/blob/fa79110ba6b219932f22297838e8ac72ebc0be0e/evennia/utils/evtable.py#L487-L497
1040003585/WebScrapingWithPython
a770fa5b03894076c8c9539b1ffff34424ffc016
portia_examle/lib/python2.7/site-packages/pip/_vendor/requests/utils.py
python
should_bypass_proxies
(url)
return False
Returns whether we should bypass proxies or not. :rtype: bool
Returns whether we should bypass proxies or not.
[ "Returns", "whether", "we", "should", "bypass", "proxies", "or", "not", "." ]
def should_bypass_proxies(url): """ Returns whether we should bypass proxies or not. :rtype: bool """ get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) # First check whether no_proxy is defined. If it is, check that the URL # we're getting isn't in the no_proxy list. no_proxy = get_proxy('no_proxy') netloc = urlparse(url).netloc if no_proxy: # We need to check whether we match here. We need to see if we match # the end of the netloc, both with and without the port. no_proxy = ( host for host in no_proxy.replace(' ', '').split(',') if host ) ip = netloc.split(':')[0] if is_ipv4_address(ip): for proxy_ip in no_proxy: if is_valid_cidr(proxy_ip): if address_in_network(ip, proxy_ip): return True elif ip == proxy_ip: # If no_proxy ip was defined in plain IP notation instead of cidr notation & # matches the IP of the index return True else: for host in no_proxy: if netloc.endswith(host) or netloc.split(':')[0].endswith(host): # The URL does match something in no_proxy, so we don't want # to apply the proxies on this URL. return True # If the system proxy settings indicate that this URL should be bypassed, # don't proxy. # The proxy_bypass function is incredibly buggy on macOS in early versions # of Python 2.6, so allow this call to fail. Only catch the specific # exceptions we've seen, though: this call failing in other ways can reveal # legitimate problems. try: bypass = proxy_bypass(netloc) except (TypeError, socket.gaierror): bypass = False if bypass: return True return False
[ "def", "should_bypass_proxies", "(", "url", ")", ":", "get_proxy", "=", "lambda", "k", ":", "os", ".", "environ", ".", "get", "(", "k", ")", "or", "os", ".", "environ", ".", "get", "(", "k", ".", "upper", "(", ")", ")", "# First check whether no_proxy is defined. If it is, check that the URL", "# we're getting isn't in the no_proxy list.", "no_proxy", "=", "get_proxy", "(", "'no_proxy'", ")", "netloc", "=", "urlparse", "(", "url", ")", ".", "netloc", "if", "no_proxy", ":", "# We need to check whether we match here. We need to see if we match", "# the end of the netloc, both with and without the port.", "no_proxy", "=", "(", "host", "for", "host", "in", "no_proxy", ".", "replace", "(", "' '", ",", "''", ")", ".", "split", "(", "','", ")", "if", "host", ")", "ip", "=", "netloc", ".", "split", "(", "':'", ")", "[", "0", "]", "if", "is_ipv4_address", "(", "ip", ")", ":", "for", "proxy_ip", "in", "no_proxy", ":", "if", "is_valid_cidr", "(", "proxy_ip", ")", ":", "if", "address_in_network", "(", "ip", ",", "proxy_ip", ")", ":", "return", "True", "elif", "ip", "==", "proxy_ip", ":", "# If no_proxy ip was defined in plain IP notation instead of cidr notation &", "# matches the IP of the index", "return", "True", "else", ":", "for", "host", "in", "no_proxy", ":", "if", "netloc", ".", "endswith", "(", "host", ")", "or", "netloc", ".", "split", "(", "':'", ")", "[", "0", "]", ".", "endswith", "(", "host", ")", ":", "# The URL does match something in no_proxy, so we don't want", "# to apply the proxies on this URL.", "return", "True", "# If the system proxy settings indicate that this URL should be bypassed,", "# don't proxy.", "# The proxy_bypass function is incredibly buggy on macOS in early versions", "# of Python 2.6, so allow this call to fail. Only catch the specific", "# exceptions we've seen, though: this call failing in other ways can reveal", "# legitimate problems.", "try", ":", "bypass", "=", "proxy_bypass", "(", "netloc", ")", "except", "(", "TypeError", ",", "socket", ".", "gaierror", ")", ":", "bypass", "=", "False", "if", "bypass", ":", "return", "True", "return", "False" ]
https://github.com/1040003585/WebScrapingWithPython/blob/a770fa5b03894076c8c9539b1ffff34424ffc016/portia_examle/lib/python2.7/site-packages/pip/_vendor/requests/utils.py#L545-L596
nipy/nipy
d16d268938dcd5c15748ca051532c21f57cf8a22
nipy/algorithms/statistics/models/model.py
python
LikelihoodModel.score
(self, theta, Y, nuisance=None)
Gradient of logL with respect to theta. This is the score function of the model
Gradient of logL with respect to theta.
[ "Gradient", "of", "logL", "with", "respect", "to", "theta", "." ]
def score(self, theta, Y, nuisance=None): """ Gradient of logL with respect to theta. This is the score function of the model """ raise NotImplementedError
[ "def", "score", "(", "self", ",", "theta", ",", "Y", ",", "nuisance", "=", "None", ")", ":", "raise", "NotImplementedError" ]
https://github.com/nipy/nipy/blob/d16d268938dcd5c15748ca051532c21f57cf8a22/nipy/algorithms/statistics/models/model.py#L56-L61
ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework
cb692f527e4e819b6c228187c5702d990a180043
external/Scripting Engine/Xenotix Python Scripting Engine/packages/IronPython.StdLib.2.7.4/content/Lib/lib2to3/fixer_util.py
python
is_list
(node)
return (isinstance(node, Node) and len(node.children) > 1 and isinstance(node.children[0], Leaf) and isinstance(node.children[-1], Leaf) and node.children[0].value == u"[" and node.children[-1].value == u"]")
Does the node represent a list literal?
Does the node represent a list literal?
[ "Does", "the", "node", "represent", "a", "list", "literal?" ]
def is_list(node): """Does the node represent a list literal?""" return (isinstance(node, Node) and len(node.children) > 1 and isinstance(node.children[0], Leaf) and isinstance(node.children[-1], Leaf) and node.children[0].value == u"[" and node.children[-1].value == u"]")
[ "def", "is_list", "(", "node", ")", ":", "return", "(", "isinstance", "(", "node", ",", "Node", ")", "and", "len", "(", "node", ".", "children", ")", ">", "1", "and", "isinstance", "(", "node", ".", "children", "[", "0", "]", ",", "Leaf", ")", "and", "isinstance", "(", "node", ".", "children", "[", "-", "1", "]", ",", "Leaf", ")", "and", "node", ".", "children", "[", "0", "]", ".", "value", "==", "u\"[\"", "and", "node", ".", "children", "[", "-", "1", "]", ".", "value", "==", "u\"]\"", ")" ]
https://github.com/ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework/blob/cb692f527e4e819b6c228187c5702d990a180043/external/Scripting Engine/Xenotix Python Scripting Engine/packages/IronPython.StdLib.2.7.4/content/Lib/lib2to3/fixer_util.py#L149-L156
fluentpython/notebooks
0f6e1e8d1686743dacd9281df7c5b5921812010a
20-descriptor/bulkfood/bulkfood_v4.py
python
LineItem.__init__
(self, description, weight, price)
[]
def __init__(self, description, weight, price): self.description = description self.weight = weight self.price = price
[ "def", "__init__", "(", "self", ",", "description", ",", "weight", ",", "price", ")", ":", "self", ".", "description", "=", "description", "self", ".", "weight", "=", "weight", "self", ".", "price", "=", "price" ]
https://github.com/fluentpython/notebooks/blob/0f6e1e8d1686743dacd9281df7c5b5921812010a/20-descriptor/bulkfood/bulkfood_v4.py#L67-L70
omz/PythonistaAppTemplate
f560f93f8876d82a21d108977f90583df08d55af
PythonistaAppTemplate/PythonistaKit.framework/pylib_ext/sympy/polys/euclidtools.py
python
dmp_content
(f, u, K)
Returns GCD of multivariate coefficients. Examples ======== >>> from sympy.polys import ring, ZZ >>> R, x,y, = ring("x,y", ZZ) >>> R.dmp_content(2*x*y + 6*x + 4*y + 12) 2*y + 6
Returns GCD of multivariate coefficients.
[ "Returns", "GCD", "of", "multivariate", "coefficients", "." ]
def dmp_content(f, u, K): """ Returns GCD of multivariate coefficients. Examples ======== >>> from sympy.polys import ring, ZZ >>> R, x,y, = ring("x,y", ZZ) >>> R.dmp_content(2*x*y + 6*x + 4*y + 12) 2*y + 6 """ cont, v = dmp_LC(f, K), u - 1 if dmp_zero_p(f, u): return cont for c in f[1:]: cont = dmp_gcd(cont, c, v, K) if dmp_one_p(cont, v, K): break if K.is_negative(dmp_ground_LC(cont, v, K)): return dmp_neg(cont, v, K) else: return cont
[ "def", "dmp_content", "(", "f", ",", "u", ",", "K", ")", ":", "cont", ",", "v", "=", "dmp_LC", "(", "f", ",", "K", ")", ",", "u", "-", "1", "if", "dmp_zero_p", "(", "f", ",", "u", ")", ":", "return", "cont", "for", "c", "in", "f", "[", "1", ":", "]", ":", "cont", "=", "dmp_gcd", "(", "cont", ",", "c", ",", "v", ",", "K", ")", "if", "dmp_one_p", "(", "cont", ",", "v", ",", "K", ")", ":", "break", "if", "K", ".", "is_negative", "(", "dmp_ground_LC", "(", "cont", ",", "v", ",", "K", ")", ")", ":", "return", "dmp_neg", "(", "cont", ",", "v", ",", "K", ")", "else", ":", "return", "cont" ]
https://github.com/omz/PythonistaAppTemplate/blob/f560f93f8876d82a21d108977f90583df08d55af/PythonistaAppTemplate/PythonistaKit.framework/pylib_ext/sympy/polys/euclidtools.py#L1819-L1847
matrix-org/synapse
8e57584a5859a9002759963eb546d523d2498a01
synapse/federation/transport/client.py
python
TransportLayerClient.get_group_profile
( self, destination: str, group_id: str, requester_user_id: str )
return await self.client.get_json( destination=destination, path=path, args={"requester_user_id": requester_user_id}, ignore_backoff=True, )
Get a group profile
Get a group profile
[ "Get", "a", "group", "profile" ]
async def get_group_profile( self, destination: str, group_id: str, requester_user_id: str ) -> JsonDict: """Get a group profile""" path = _create_v1_path("/groups/%s/profile", group_id) return await self.client.get_json( destination=destination, path=path, args={"requester_user_id": requester_user_id}, ignore_backoff=True, )
[ "async", "def", "get_group_profile", "(", "self", ",", "destination", ":", "str", ",", "group_id", ":", "str", ",", "requester_user_id", ":", "str", ")", "->", "JsonDict", ":", "path", "=", "_create_v1_path", "(", "\"/groups/%s/profile\"", ",", "group_id", ")", "return", "await", "self", ".", "client", ".", "get_json", "(", "destination", "=", "destination", ",", "path", "=", "path", ",", "args", "=", "{", "\"requester_user_id\"", ":", "requester_user_id", "}", ",", "ignore_backoff", "=", "True", ",", ")" ]
https://github.com/matrix-org/synapse/blob/8e57584a5859a9002759963eb546d523d2498a01/synapse/federation/transport/client.py#L684-L695
aws/aws-parallelcluster
f1fe5679a01c524e7ea904c329bd6d17318c6cd9
cli/src/pcluster/api/pcluster_api.py
python
PclusterApi.describe_cluster_instances
( cluster_name: str, region: str, node_type: NodeType = None )
List instances for a cluster.
List instances for a cluster.
[ "List", "instances", "for", "a", "cluster", "." ]
def describe_cluster_instances( cluster_name: str, region: str, node_type: NodeType = None ) -> Union[List[ClusterInstanceInfo], ApiFailure]: """List instances for a cluster.""" try: if region: os.environ["AWS_DEFAULT_REGION"] = region cluster = Cluster(cluster_name) instances = [] if node_type == NodeType.HEAD_NODE or node_type is None: instances.append(cluster.head_node_instance) if node_type == NodeType.COMPUTE or node_type is None: instances += cluster.compute_instances return [ClusterInstanceInfo(instance) for instance in instances] except Exception as e: return ApiFailure(str(e))
[ "def", "describe_cluster_instances", "(", "cluster_name", ":", "str", ",", "region", ":", "str", ",", "node_type", ":", "NodeType", "=", "None", ")", "->", "Union", "[", "List", "[", "ClusterInstanceInfo", "]", ",", "ApiFailure", "]", ":", "try", ":", "if", "region", ":", "os", ".", "environ", "[", "\"AWS_DEFAULT_REGION\"", "]", "=", "region", "cluster", "=", "Cluster", "(", "cluster_name", ")", "instances", "=", "[", "]", "if", "node_type", "==", "NodeType", ".", "HEAD_NODE", "or", "node_type", "is", "None", ":", "instances", ".", "append", "(", "cluster", ".", "head_node_instance", ")", "if", "node_type", "==", "NodeType", ".", "COMPUTE", "or", "node_type", "is", "None", ":", "instances", "+=", "cluster", ".", "compute_instances", "return", "[", "ClusterInstanceInfo", "(", "instance", ")", "for", "instance", "in", "instances", "]", "except", "Exception", "as", "e", ":", "return", "ApiFailure", "(", "str", "(", "e", ")", ")" ]
https://github.com/aws/aws-parallelcluster/blob/f1fe5679a01c524e7ea904c329bd6d17318c6cd9/cli/src/pcluster/api/pcluster_api.py#L269-L286
megvii-model/SinglePathOneShot
36eed6cf083497ffa9cfe7b8da25bb0b6ba5a452
src/Supernet/network.py
python
ShuffleNetV2_OneShot.forward
(self, x, architecture)
return x
[]
def forward(self, x, architecture): assert self.archLen == len(architecture) x = self.first_conv(x) for archs, arch_id in zip(self.features, architecture): x = archs[arch_id](x) x = self.conv_last(x) x = self.globalpool(x) x = self.dropout(x) x = x.contiguous().view(-1, self.stage_out_channels[-1]) x = self.classifier(x) return x
[ "def", "forward", "(", "self", ",", "x", ",", "architecture", ")", ":", "assert", "self", ".", "archLen", "==", "len", "(", "architecture", ")", "x", "=", "self", ".", "first_conv", "(", "x", ")", "for", "archs", ",", "arch_id", "in", "zip", "(", "self", ".", "features", ",", "architecture", ")", ":", "x", "=", "archs", "[", "arch_id", "]", "(", "x", ")", "x", "=", "self", ".", "conv_last", "(", "x", ")", "x", "=", "self", ".", "globalpool", "(", "x", ")", "x", "=", "self", ".", "dropout", "(", "x", ")", "x", "=", "x", ".", "contiguous", "(", ")", ".", "view", "(", "-", "1", ",", "self", ".", "stage_out_channels", "[", "-", "1", "]", ")", "x", "=", "self", ".", "classifier", "(", "x", ")", "return", "x" ]
https://github.com/megvii-model/SinglePathOneShot/blob/36eed6cf083497ffa9cfe7b8da25bb0b6ba5a452/src/Supernet/network.py#L77-L92
psychopy/psychopy
01b674094f38d0e0bd51c45a6f66f671d7041696
psychopy/visual/movie2.py
python
MovieStim2._createAudioStream
(self)
Create the audio stream player for the video using pyvlc.
Create the audio stream player for the video using pyvlc.
[ "Create", "the", "audio", "stream", "player", "for", "the", "video", "using", "pyvlc", "." ]
def _createAudioStream(self): """ Create the audio stream player for the video using pyvlc. """ if not os.access(self.filename, os.R_OK): raise RuntimeError('Error: %s file not readable' % self.filename) self._vlc_instance = vlc.Instance('--novideo') try: self._audio_stream = self._vlc_instance.media_new(self.filename) except NameError: msg = 'NameError: %s vs LibVLC %s' raise ImportError(msg % (vlc.__version__, vlc.libvlc_get_version())) self._audio_stream_player = self._vlc_instance.media_player_new() self._audio_stream_player.set_media(self._audio_stream) self._audio_stream_event_manager = self._audio_stream_player.event_manager() self._audio_stream_event_manager.event_attach( vlc.EventType.MediaPlayerTimeChanged, _audioTimeCallback, weakref.ref(self), self._audio_stream_player) self._audio_stream_event_manager.event_attach( vlc.EventType.MediaPlayerEndReached, _audioEndCallback, weakref.ref(self))
[ "def", "_createAudioStream", "(", "self", ")", ":", "if", "not", "os", ".", "access", "(", "self", ".", "filename", ",", "os", ".", "R_OK", ")", ":", "raise", "RuntimeError", "(", "'Error: %s file not readable'", "%", "self", ".", "filename", ")", "self", ".", "_vlc_instance", "=", "vlc", ".", "Instance", "(", "'--novideo'", ")", "try", ":", "self", ".", "_audio_stream", "=", "self", ".", "_vlc_instance", ".", "media_new", "(", "self", ".", "filename", ")", "except", "NameError", ":", "msg", "=", "'NameError: %s vs LibVLC %s'", "raise", "ImportError", "(", "msg", "%", "(", "vlc", ".", "__version__", ",", "vlc", ".", "libvlc_get_version", "(", ")", ")", ")", "self", ".", "_audio_stream_player", "=", "self", ".", "_vlc_instance", ".", "media_player_new", "(", ")", "self", ".", "_audio_stream_player", ".", "set_media", "(", "self", ".", "_audio_stream", ")", "self", ".", "_audio_stream_event_manager", "=", "self", ".", "_audio_stream_player", ".", "event_manager", "(", ")", "self", ".", "_audio_stream_event_manager", ".", "event_attach", "(", "vlc", ".", "EventType", ".", "MediaPlayerTimeChanged", ",", "_audioTimeCallback", ",", "weakref", ".", "ref", "(", "self", ")", ",", "self", ".", "_audio_stream_player", ")", "self", ".", "_audio_stream_event_manager", ".", "event_attach", "(", "vlc", ".", "EventType", ".", "MediaPlayerEndReached", ",", "_audioEndCallback", ",", "weakref", ".", "ref", "(", "self", ")", ")" ]
https://github.com/psychopy/psychopy/blob/01b674094f38d0e0bd51c45a6f66f671d7041696/psychopy/visual/movie2.py#L376-L397
alinlab/Confident_classifier
462db01967f8a96374f2ab6a534b7c81fd872d2f
src/calculate_log.py
python
auprOut
(dir_name)
return auprBase
[]
def auprOut(dir_name): #calculate the AUPR cifar = np.loadtxt('%s/confidence_Base_In.txt'%dir_name, delimiter=',') other = np.loadtxt('%s/confidence_Base_Out.txt'%dir_name, delimiter=',') Y1 = other X1 = cifar end = np.max([np.max(X1), np.max(Y1)]) start = np.min([np.min(X1),np.min(Y1)]) gap = (end- start)/200000 auprBase = 0.0 recallTemp = 1.0 for delta in np.arange(end, start, -gap): fp = np.sum(np.sum(X1 < delta)) / np.float(len(X1)) tp = np.sum(np.sum(Y1 < delta)) / np.float(len(Y1)) if tp + fp == 0: break precision = tp / (tp + fp) recall = tp auprBase += (recallTemp-recall)*precision recallTemp = recall auprBase += recall * precision return auprBase
[ "def", "auprOut", "(", "dir_name", ")", ":", "#calculate the AUPR", "cifar", "=", "np", ".", "loadtxt", "(", "'%s/confidence_Base_In.txt'", "%", "dir_name", ",", "delimiter", "=", "','", ")", "other", "=", "np", ".", "loadtxt", "(", "'%s/confidence_Base_Out.txt'", "%", "dir_name", ",", "delimiter", "=", "','", ")", "Y1", "=", "other", "X1", "=", "cifar", "end", "=", "np", ".", "max", "(", "[", "np", ".", "max", "(", "X1", ")", ",", "np", ".", "max", "(", "Y1", ")", "]", ")", "start", "=", "np", ".", "min", "(", "[", "np", ".", "min", "(", "X1", ")", ",", "np", ".", "min", "(", "Y1", ")", "]", ")", "gap", "=", "(", "end", "-", "start", ")", "/", "200000", "auprBase", "=", "0.0", "recallTemp", "=", "1.0", "for", "delta", "in", "np", ".", "arange", "(", "end", ",", "start", ",", "-", "gap", ")", ":", "fp", "=", "np", ".", "sum", "(", "np", ".", "sum", "(", "X1", "<", "delta", ")", ")", "/", "np", ".", "float", "(", "len", "(", "X1", ")", ")", "tp", "=", "np", ".", "sum", "(", "np", ".", "sum", "(", "Y1", "<", "delta", ")", ")", "/", "np", ".", "float", "(", "len", "(", "Y1", ")", ")", "if", "tp", "+", "fp", "==", "0", ":", "break", "precision", "=", "tp", "/", "(", "tp", "+", "fp", ")", "recall", "=", "tp", "auprBase", "+=", "(", "recallTemp", "-", "recall", ")", "*", "precision", "recallTemp", "=", "recall", "auprBase", "+=", "recall", "*", "precision", "return", "auprBase" ]
https://github.com/alinlab/Confident_classifier/blob/462db01967f8a96374f2ab6a534b7c81fd872d2f/src/calculate_log.py#L97-L119
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/somfy/api.py
python
ConfigEntrySomfyApi.refresh_tokens
( self, )
return self.session.token
Refresh and return new Somfy tokens using Home Assistant OAuth2 session.
Refresh and return new Somfy tokens using Home Assistant OAuth2 session.
[ "Refresh", "and", "return", "new", "Somfy", "tokens", "using", "Home", "Assistant", "OAuth2", "session", "." ]
def refresh_tokens( self, ) -> dict[str, str | int]: """Refresh and return new Somfy tokens using Home Assistant OAuth2 session.""" run_coroutine_threadsafe( self.session.async_ensure_token_valid(), self.hass.loop ).result() return self.session.token
[ "def", "refresh_tokens", "(", "self", ",", ")", "->", "dict", "[", "str", ",", "str", "|", "int", "]", ":", "run_coroutine_threadsafe", "(", "self", ".", "session", ".", "async_ensure_token_valid", "(", ")", ",", "self", ".", "hass", ".", "loop", ")", ".", "result", "(", ")", "return", "self", ".", "session", ".", "token" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/somfy/api.py#L29-L37
awslabs/autogluon
7309118f2ab1c9519f25acf61a283a95af95842b
common/src/autogluon/common/features/feature_metadata.py
python
FeatureMetadata.print_feature_metadata_full
(self, log_prefix='', print_only_one_special=False, log_level=20, max_list_len=5, return_str=False)
[]
def print_feature_metadata_full(self, log_prefix='', print_only_one_special=False, log_level=20, max_list_len=5, return_str=False): feature_metadata_dict = self.to_dict(inverse=True) if not feature_metadata_dict: if return_str: return '' else: return keys = list(feature_metadata_dict.keys()) keys = sorted(keys) output = [((key[0], list(key[1])), feature_metadata_dict[key]) for key in keys] output_str = '' if print_only_one_special: for i, ((raw, special), features) in enumerate(output): if len(special) == 1: output[i] = ((raw, special[0]), features) elif len(special) > 1: output[i] = ((raw, special[0]), features) logger.warning(f'Warning: print_only_one_special=True was set, but features with {len(special)} special types were found. Invalid Types: {output[i]}') else: output[i] = ((raw, None), features) max_key_len = max([len(str(key)) for key, _ in output]) max_val_len = max([len(str(len(val))) for _, val in output]) for key, val in output: key_len = len(str(key)) val_len = len(str(len(val))) max_key_minus_cur = max(max_key_len - key_len, 0) max_val_minus_cur = max(max_val_len - val_len, 0) if max_list_len is not None: features = str(val[:max_list_len]) if len(val) > max_list_len: features = features[:-1] + ', ...]' else: features = str(val) if val: message = f'{log_prefix}{key}{" " * max_key_minus_cur} : {" " * max_val_minus_cur}{len(val)} | {features}' if return_str: output_str += message + '\n' else: logger.log(log_level, message) if return_str: if output_str[-1] == '\n': output_str = output_str[:-1] return output_str
[ "def", "print_feature_metadata_full", "(", "self", ",", "log_prefix", "=", "''", ",", "print_only_one_special", "=", "False", ",", "log_level", "=", "20", ",", "max_list_len", "=", "5", ",", "return_str", "=", "False", ")", ":", "feature_metadata_dict", "=", "self", ".", "to_dict", "(", "inverse", "=", "True", ")", "if", "not", "feature_metadata_dict", ":", "if", "return_str", ":", "return", "''", "else", ":", "return", "keys", "=", "list", "(", "feature_metadata_dict", ".", "keys", "(", ")", ")", "keys", "=", "sorted", "(", "keys", ")", "output", "=", "[", "(", "(", "key", "[", "0", "]", ",", "list", "(", "key", "[", "1", "]", ")", ")", ",", "feature_metadata_dict", "[", "key", "]", ")", "for", "key", "in", "keys", "]", "output_str", "=", "''", "if", "print_only_one_special", ":", "for", "i", ",", "(", "(", "raw", ",", "special", ")", ",", "features", ")", "in", "enumerate", "(", "output", ")", ":", "if", "len", "(", "special", ")", "==", "1", ":", "output", "[", "i", "]", "=", "(", "(", "raw", ",", "special", "[", "0", "]", ")", ",", "features", ")", "elif", "len", "(", "special", ")", ">", "1", ":", "output", "[", "i", "]", "=", "(", "(", "raw", ",", "special", "[", "0", "]", ")", ",", "features", ")", "logger", ".", "warning", "(", "f'Warning: print_only_one_special=True was set, but features with {len(special)} special types were found. Invalid Types: {output[i]}'", ")", "else", ":", "output", "[", "i", "]", "=", "(", "(", "raw", ",", "None", ")", ",", "features", ")", "max_key_len", "=", "max", "(", "[", "len", "(", "str", "(", "key", ")", ")", "for", "key", ",", "_", "in", "output", "]", ")", "max_val_len", "=", "max", "(", "[", "len", "(", "str", "(", "len", "(", "val", ")", ")", ")", "for", "_", ",", "val", "in", "output", "]", ")", "for", "key", ",", "val", "in", "output", ":", "key_len", "=", "len", "(", "str", "(", "key", ")", ")", "val_len", "=", "len", "(", "str", "(", "len", "(", "val", ")", ")", ")", "max_key_minus_cur", "=", "max", "(", "max_key_len", "-", "key_len", ",", "0", ")", "max_val_minus_cur", "=", "max", "(", "max_val_len", "-", "val_len", ",", "0", ")", "if", "max_list_len", "is", "not", "None", ":", "features", "=", "str", "(", "val", "[", ":", "max_list_len", "]", ")", "if", "len", "(", "val", ")", ">", "max_list_len", ":", "features", "=", "features", "[", ":", "-", "1", "]", "+", "', ...]'", "else", ":", "features", "=", "str", "(", "val", ")", "if", "val", ":", "message", "=", "f'{log_prefix}{key}{\" \" * max_key_minus_cur} : {\" \" * max_val_minus_cur}{len(val)} | {features}'", "if", "return_str", ":", "output_str", "+=", "message", "+", "'\\n'", "else", ":", "logger", ".", "log", "(", "log_level", ",", "message", ")", "if", "return_str", ":", "if", "output_str", "[", "-", "1", "]", "==", "'\\n'", ":", "output_str", "=", "output_str", "[", ":", "-", "1", "]", "return", "output_str" ]
https://github.com/awslabs/autogluon/blob/7309118f2ab1c9519f25acf61a283a95af95842b/common/src/autogluon/common/features/feature_metadata.py#L350-L392
ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework
cb692f527e4e819b6c228187c5702d990a180043
external/Scripting Engine/Xenotix Python Scripting Engine/packages/IronPython.StdLib.2.7.4/content/Lib/tempfile.py
python
NamedTemporaryFile
(mode='w+b', bufsize=-1, suffix="", prefix=template, dir=None, delete=True)
return _TemporaryFileWrapper(file, name, delete)
Create and return a temporary file. Arguments: 'prefix', 'suffix', 'dir' -- as for mkstemp. 'mode' -- the mode argument to os.fdopen (default "w+b"). 'bufsize' -- the buffer size argument to os.fdopen (default -1). 'delete' -- whether the file is deleted on close (default True). The file is created as mkstemp() would do it. Returns an object with a file-like interface; the name of the file is accessible as file.name. The file will be automatically deleted when it is closed unless the 'delete' argument is set to False.
Create and return a temporary file. Arguments: 'prefix', 'suffix', 'dir' -- as for mkstemp. 'mode' -- the mode argument to os.fdopen (default "w+b"). 'bufsize' -- the buffer size argument to os.fdopen (default -1). 'delete' -- whether the file is deleted on close (default True). The file is created as mkstemp() would do it.
[ "Create", "and", "return", "a", "temporary", "file", ".", "Arguments", ":", "prefix", "suffix", "dir", "--", "as", "for", "mkstemp", ".", "mode", "--", "the", "mode", "argument", "to", "os", ".", "fdopen", "(", "default", "w", "+", "b", ")", ".", "bufsize", "--", "the", "buffer", "size", "argument", "to", "os", ".", "fdopen", "(", "default", "-", "1", ")", ".", "delete", "--", "whether", "the", "file", "is", "deleted", "on", "close", "(", "default", "True", ")", ".", "The", "file", "is", "created", "as", "mkstemp", "()", "would", "do", "it", "." ]
def NamedTemporaryFile(mode='w+b', bufsize=-1, suffix="", prefix=template, dir=None, delete=True): """Create and return a temporary file. Arguments: 'prefix', 'suffix', 'dir' -- as for mkstemp. 'mode' -- the mode argument to os.fdopen (default "w+b"). 'bufsize' -- the buffer size argument to os.fdopen (default -1). 'delete' -- whether the file is deleted on close (default True). The file is created as mkstemp() would do it. Returns an object with a file-like interface; the name of the file is accessible as file.name. The file will be automatically deleted when it is closed unless the 'delete' argument is set to False. """ if dir is None: dir = gettempdir() if 'b' in mode: flags = _bin_openflags else: flags = _text_openflags # Setting O_TEMPORARY in the flags causes the OS to delete # the file when it is closed. This is only supported by Windows. if _os.name == 'nt' and delete: flags |= _os.O_TEMPORARY (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags) file = _os.fdopen(fd, mode, bufsize) return _TemporaryFileWrapper(file, name, delete)
[ "def", "NamedTemporaryFile", "(", "mode", "=", "'w+b'", ",", "bufsize", "=", "-", "1", ",", "suffix", "=", "\"\"", ",", "prefix", "=", "template", ",", "dir", "=", "None", ",", "delete", "=", "True", ")", ":", "if", "dir", "is", "None", ":", "dir", "=", "gettempdir", "(", ")", "if", "'b'", "in", "mode", ":", "flags", "=", "_bin_openflags", "else", ":", "flags", "=", "_text_openflags", "# Setting O_TEMPORARY in the flags causes the OS to delete", "# the file when it is closed. This is only supported by Windows.", "if", "_os", ".", "name", "==", "'nt'", "and", "delete", ":", "flags", "|=", "_os", ".", "O_TEMPORARY", "(", "fd", ",", "name", ")", "=", "_mkstemp_inner", "(", "dir", ",", "prefix", ",", "suffix", ",", "flags", ")", "file", "=", "_os", ".", "fdopen", "(", "fd", ",", "mode", ",", "bufsize", ")", "return", "_TemporaryFileWrapper", "(", "file", ",", "name", ",", "delete", ")" ]
https://github.com/ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework/blob/cb692f527e4e819b6c228187c5702d990a180043/external/Scripting Engine/Xenotix Python Scripting Engine/packages/IronPython.StdLib.2.7.4/content/Lib/tempfile.py#L419-L449
gem/oq-engine
1bdb88f3914e390abcbd285600bfd39477aae47c
openquake/hmtk/seismicity/gcmt_catalogue.py
python
GCMTCatalogue.get_number_tensors
(self)
return len(self.gcmts)
Returns number of CMTs
Returns number of CMTs
[ "Returns", "number", "of", "CMTs" ]
def get_number_tensors(self): """ Returns number of CMTs """ return len(self.gcmts)
[ "def", "get_number_tensors", "(", "self", ")", ":", "return", "len", "(", "self", ".", "gcmts", ")" ]
https://github.com/gem/oq-engine/blob/1bdb88f3914e390abcbd285600bfd39477aae47c/openquake/hmtk/seismicity/gcmt_catalogue.py#L421-L425
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/sympy/sympy/core/numbers.py
python
NegativeInfinity._eval_power
(self, expt)
``expt`` is symbolic object but not equal to 0 or 1. ================ ======= ============================== Expression Result Notes ================ ======= ============================== ``(-oo) ** nan`` ``nan`` ``(-oo) ** oo`` ``nan`` ``(-oo) ** -oo`` ``nan`` ``(-oo) ** e`` ``oo`` ``e`` is positive even integer ``(-oo) ** o`` ``-oo`` ``o`` is positive odd integer ================ ======= ============================== See Also ======== Infinity Pow NaN
``expt`` is symbolic object but not equal to 0 or 1.
[ "expt", "is", "symbolic", "object", "but", "not", "equal", "to", "0", "or", "1", "." ]
def _eval_power(self, expt): """ ``expt`` is symbolic object but not equal to 0 or 1. ================ ======= ============================== Expression Result Notes ================ ======= ============================== ``(-oo) ** nan`` ``nan`` ``(-oo) ** oo`` ``nan`` ``(-oo) ** -oo`` ``nan`` ``(-oo) ** e`` ``oo`` ``e`` is positive even integer ``(-oo) ** o`` ``-oo`` ``o`` is positive odd integer ================ ======= ============================== See Also ======== Infinity Pow NaN """ if isinstance(expt, Number): if expt is S.NaN or \ expt is S.Infinity or \ expt is S.NegativeInfinity: return S.NaN if isinstance(expt, Integer) and expt.is_positive: if expt.is_odd: return S.NegativeInfinity else: return S.Infinity return S.NegativeOne**expt*S.Infinity**expt
[ "def", "_eval_power", "(", "self", ",", "expt", ")", ":", "if", "isinstance", "(", "expt", ",", "Number", ")", ":", "if", "expt", "is", "S", ".", "NaN", "or", "expt", "is", "S", ".", "Infinity", "or", "expt", "is", "S", ".", "NegativeInfinity", ":", "return", "S", ".", "NaN", "if", "isinstance", "(", "expt", ",", "Integer", ")", "and", "expt", ".", "is_positive", ":", "if", "expt", ".", "is_odd", ":", "return", "S", ".", "NegativeInfinity", "else", ":", "return", "S", ".", "Infinity", "return", "S", ".", "NegativeOne", "**", "expt", "*", "S", ".", "Infinity", "**", "expt" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/core/numbers.py#L2384-L2418
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/tke/v20180525/models.py
python
DescribeClusterEndpointStatusRequest.__init__
(self)
r""" :param ClusterId: 集群ID :type ClusterId: str :param IsExtranet: 是否为外网访问(TRUE 外网访问 FALSE 内网访问,默认值: FALSE) :type IsExtranet: bool
r""" :param ClusterId: 集群ID :type ClusterId: str :param IsExtranet: 是否为外网访问(TRUE 外网访问 FALSE 内网访问,默认值: FALSE) :type IsExtranet: bool
[ "r", ":", "param", "ClusterId", ":", "集群ID", ":", "type", "ClusterId", ":", "str", ":", "param", "IsExtranet", ":", "是否为外网访问(TRUE", "外网访问", "FALSE", "内网访问,默认值:", "FALSE)", ":", "type", "IsExtranet", ":", "bool" ]
def __init__(self): r""" :param ClusterId: 集群ID :type ClusterId: str :param IsExtranet: 是否为外网访问(TRUE 外网访问 FALSE 内网访问,默认值: FALSE) :type IsExtranet: bool """ self.ClusterId = None self.IsExtranet = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "ClusterId", "=", "None", "self", ".", "IsExtranet", "=", "None" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/tke/v20180525/models.py#L3544-L3552
openstack/manila
142990edc027e14839d5deaf4954dd6fc88de15e
manila/share/drivers/netapp/dataontap/cluster_mode/lib_base.py
python
NetAppCmodeFileStorageLibrary.create_replica
(self, context, replica_list, new_replica, access_rules, share_snapshots, share_server=None)
return model_update
Creates the new replica on this backend and sets up SnapMirror.
Creates the new replica on this backend and sets up SnapMirror.
[ "Creates", "the", "new", "replica", "on", "this", "backend", "and", "sets", "up", "SnapMirror", "." ]
def create_replica(self, context, replica_list, new_replica, access_rules, share_snapshots, share_server=None): """Creates the new replica on this backend and sets up SnapMirror.""" active_replica = self.find_active_replica(replica_list) dm_session = data_motion.DataMotionSession() # check that the source and new replica reside in the same pool type: # either FlexGroup or FlexVol. src_share_name, src_vserver, src_backend = ( dm_session.get_backend_info_for_share(active_replica)) src_client = data_motion.get_client_for_backend( src_backend, vserver_name=src_vserver) src_is_flexgroup = self._is_flexgroup_share(src_client, src_share_name) pool_name = share_utils.extract_host(new_replica['host'], level='pool') dest_is_flexgroup = self._is_flexgroup_pool(pool_name) if src_is_flexgroup != dest_is_flexgroup: src_type = 'FlexGroup' if src_is_flexgroup else 'FlexVol' dest_type = 'FlexGroup' if dest_is_flexgroup else 'FlexVol' msg = _('Could not create replica %(replica_id)s from share ' '%(share_id)s in the destination host %(dest_host)s. The ' 'source share is from %(src_type)s style, while the ' 'destination replica host is %(dest_type)s style.') msg_args = {'replica_id': new_replica['id'], 'share_id': new_replica['share_id'], 'dest_host': new_replica['host'], 'src_type': src_type, 'dest_type': dest_type} raise exception.NetAppException(msg % msg_args) # NOTE(felipe_rodrigues): The FlexGroup replication does not support # several replicas (fan-out) in some ONTAP versions, while FlexVol is # always supported. if dest_is_flexgroup: fan_out = (src_client.is_flexgroup_fan_out_supported() and self._client.is_flexgroup_fan_out_supported()) if not fan_out and len(replica_list) > 2: msg = _('Could not create replica %(replica_id)s from share ' '%(share_id)s in the destination host %(dest_host)s. ' 'The share does not support more than one replica.') msg_args = {'replica_id': new_replica['id'], 'share_id': new_replica['share_id'], 'dest_host': new_replica['host']} raise exception.NetAppException(msg % msg_args) # 1. Create the destination share dest_backend = share_utils.extract_host(new_replica['host'], level='backend_name') vserver = (dm_session.get_vserver_from_share(new_replica) or self.configuration.netapp_vserver) vserver_client = data_motion.get_client_for_backend( dest_backend, vserver_name=vserver) is_readable = self._is_readable_replica(new_replica) self._allocate_container(new_replica, vserver, vserver_client, replica=True, create_fpolicy=False, set_qos=is_readable) # 2. Setup SnapMirror with mounting replica whether 'readable' type. relationship_type = na_utils.get_relationship_type(dest_is_flexgroup) dm_session.create_snapmirror(active_replica, new_replica, relationship_type, mount=is_readable) # 3. Create export location model_update = { 'export_locations': [], 'replica_state': constants.REPLICA_STATE_OUT_OF_SYNC, 'access_rules_status': constants.STATUS_ACTIVE, } if is_readable: model_update['export_locations'] = self._create_export( new_replica, share_server, vserver, vserver_client, replica=True) if access_rules: helper = self._get_helper(new_replica) helper.set_client(vserver_client) share_name = self._get_backend_share_name(new_replica['id']) try: helper.update_access(new_replica, share_name, access_rules) except Exception: model_update['access_rules_status'] = ( constants.SHARE_INSTANCE_RULES_ERROR) return model_update
[ "def", "create_replica", "(", "self", ",", "context", ",", "replica_list", ",", "new_replica", ",", "access_rules", ",", "share_snapshots", ",", "share_server", "=", "None", ")", ":", "active_replica", "=", "self", ".", "find_active_replica", "(", "replica_list", ")", "dm_session", "=", "data_motion", ".", "DataMotionSession", "(", ")", "# check that the source and new replica reside in the same pool type:", "# either FlexGroup or FlexVol.", "src_share_name", ",", "src_vserver", ",", "src_backend", "=", "(", "dm_session", ".", "get_backend_info_for_share", "(", "active_replica", ")", ")", "src_client", "=", "data_motion", ".", "get_client_for_backend", "(", "src_backend", ",", "vserver_name", "=", "src_vserver", ")", "src_is_flexgroup", "=", "self", ".", "_is_flexgroup_share", "(", "src_client", ",", "src_share_name", ")", "pool_name", "=", "share_utils", ".", "extract_host", "(", "new_replica", "[", "'host'", "]", ",", "level", "=", "'pool'", ")", "dest_is_flexgroup", "=", "self", ".", "_is_flexgroup_pool", "(", "pool_name", ")", "if", "src_is_flexgroup", "!=", "dest_is_flexgroup", ":", "src_type", "=", "'FlexGroup'", "if", "src_is_flexgroup", "else", "'FlexVol'", "dest_type", "=", "'FlexGroup'", "if", "dest_is_flexgroup", "else", "'FlexVol'", "msg", "=", "_", "(", "'Could not create replica %(replica_id)s from share '", "'%(share_id)s in the destination host %(dest_host)s. The '", "'source share is from %(src_type)s style, while the '", "'destination replica host is %(dest_type)s style.'", ")", "msg_args", "=", "{", "'replica_id'", ":", "new_replica", "[", "'id'", "]", ",", "'share_id'", ":", "new_replica", "[", "'share_id'", "]", ",", "'dest_host'", ":", "new_replica", "[", "'host'", "]", ",", "'src_type'", ":", "src_type", ",", "'dest_type'", ":", "dest_type", "}", "raise", "exception", ".", "NetAppException", "(", "msg", "%", "msg_args", ")", "# NOTE(felipe_rodrigues): The FlexGroup replication does not support", "# several replicas (fan-out) in some ONTAP versions, while FlexVol is", "# always supported.", "if", "dest_is_flexgroup", ":", "fan_out", "=", "(", "src_client", ".", "is_flexgroup_fan_out_supported", "(", ")", "and", "self", ".", "_client", ".", "is_flexgroup_fan_out_supported", "(", ")", ")", "if", "not", "fan_out", "and", "len", "(", "replica_list", ")", ">", "2", ":", "msg", "=", "_", "(", "'Could not create replica %(replica_id)s from share '", "'%(share_id)s in the destination host %(dest_host)s. '", "'The share does not support more than one replica.'", ")", "msg_args", "=", "{", "'replica_id'", ":", "new_replica", "[", "'id'", "]", ",", "'share_id'", ":", "new_replica", "[", "'share_id'", "]", ",", "'dest_host'", ":", "new_replica", "[", "'host'", "]", "}", "raise", "exception", ".", "NetAppException", "(", "msg", "%", "msg_args", ")", "# 1. Create the destination share", "dest_backend", "=", "share_utils", ".", "extract_host", "(", "new_replica", "[", "'host'", "]", ",", "level", "=", "'backend_name'", ")", "vserver", "=", "(", "dm_session", ".", "get_vserver_from_share", "(", "new_replica", ")", "or", "self", ".", "configuration", ".", "netapp_vserver", ")", "vserver_client", "=", "data_motion", ".", "get_client_for_backend", "(", "dest_backend", ",", "vserver_name", "=", "vserver", ")", "is_readable", "=", "self", ".", "_is_readable_replica", "(", "new_replica", ")", "self", ".", "_allocate_container", "(", "new_replica", ",", "vserver", ",", "vserver_client", ",", "replica", "=", "True", ",", "create_fpolicy", "=", "False", ",", "set_qos", "=", "is_readable", ")", "# 2. Setup SnapMirror with mounting replica whether 'readable' type.", "relationship_type", "=", "na_utils", ".", "get_relationship_type", "(", "dest_is_flexgroup", ")", "dm_session", ".", "create_snapmirror", "(", "active_replica", ",", "new_replica", ",", "relationship_type", ",", "mount", "=", "is_readable", ")", "# 3. Create export location", "model_update", "=", "{", "'export_locations'", ":", "[", "]", ",", "'replica_state'", ":", "constants", ".", "REPLICA_STATE_OUT_OF_SYNC", ",", "'access_rules_status'", ":", "constants", ".", "STATUS_ACTIVE", ",", "}", "if", "is_readable", ":", "model_update", "[", "'export_locations'", "]", "=", "self", ".", "_create_export", "(", "new_replica", ",", "share_server", ",", "vserver", ",", "vserver_client", ",", "replica", "=", "True", ")", "if", "access_rules", ":", "helper", "=", "self", ".", "_get_helper", "(", "new_replica", ")", "helper", ".", "set_client", "(", "vserver_client", ")", "share_name", "=", "self", ".", "_get_backend_share_name", "(", "new_replica", "[", "'id'", "]", ")", "try", ":", "helper", ".", "update_access", "(", "new_replica", ",", "share_name", ",", "access_rules", ")", "except", "Exception", ":", "model_update", "[", "'access_rules_status'", "]", "=", "(", "constants", ".", "SHARE_INSTANCE_RULES_ERROR", ")", "return", "model_update" ]
https://github.com/openstack/manila/blob/142990edc027e14839d5deaf4954dd6fc88de15e/manila/share/drivers/netapp/dataontap/cluster_mode/lib_base.py#L2455-L2541
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/pandas-0.24.2-py3.7-macosx-10.9-x86_64.egg/pandas/core/indexes/api.py
python
_sanitize_and_check
(indexes)
Verify the type of indexes and convert lists to Index. Cases: - [list, list, ...]: Return ([list, list, ...], 'list') - [list, Index, ...]: Return _sanitize_and_check([Index, Index, ...]) Lists are sorted and converted to Index. - [Index, Index, ...]: Return ([Index, Index, ...], TYPE) TYPE = 'special' if at least one special type, 'array' otherwise. Parameters ---------- indexes : list of Index or list objects Returns ------- sanitized_indexes : list of Index or list objects type : {'list', 'array', 'special'}
Verify the type of indexes and convert lists to Index.
[ "Verify", "the", "type", "of", "indexes", "and", "convert", "lists", "to", "Index", "." ]
def _sanitize_and_check(indexes): """ Verify the type of indexes and convert lists to Index. Cases: - [list, list, ...]: Return ([list, list, ...], 'list') - [list, Index, ...]: Return _sanitize_and_check([Index, Index, ...]) Lists are sorted and converted to Index. - [Index, Index, ...]: Return ([Index, Index, ...], TYPE) TYPE = 'special' if at least one special type, 'array' otherwise. Parameters ---------- indexes : list of Index or list objects Returns ------- sanitized_indexes : list of Index or list objects type : {'list', 'array', 'special'} """ kinds = list({type(index) for index in indexes}) if list in kinds: if len(kinds) > 1: indexes = [Index(com.try_sort(x)) if not isinstance(x, Index) else x for x in indexes] kinds.remove(list) else: return indexes, 'list' if len(kinds) > 1 or Index not in kinds: return indexes, 'special' else: return indexes, 'array'
[ "def", "_sanitize_and_check", "(", "indexes", ")", ":", "kinds", "=", "list", "(", "{", "type", "(", "index", ")", "for", "index", "in", "indexes", "}", ")", "if", "list", "in", "kinds", ":", "if", "len", "(", "kinds", ")", ">", "1", ":", "indexes", "=", "[", "Index", "(", "com", ".", "try_sort", "(", "x", ")", ")", "if", "not", "isinstance", "(", "x", ",", "Index", ")", "else", "x", "for", "x", "in", "indexes", "]", "kinds", ".", "remove", "(", "list", ")", "else", ":", "return", "indexes", ",", "'list'", "if", "len", "(", "kinds", ")", ">", "1", "or", "Index", "not", "in", "kinds", ":", "return", "indexes", ",", "'special'", "else", ":", "return", "indexes", ",", "'array'" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/pandas-0.24.2-py3.7-macosx-10.9-x86_64.egg/pandas/core/indexes/api.py#L205-L240
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/docutils/utils/math/math2html.py
python
StrikeOut.process
(self)
Set the output tag to strike.
Set the output tag to strike.
[ "Set", "the", "output", "tag", "to", "strike", "." ]
def process(self): "Set the output tag to strike." self.output.tag = 'strike'
[ "def", "process", "(", "self", ")", ":", "self", ".", "output", ".", "tag", "=", "'strike'" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/docutils/utils/math/math2html.py#L3740-L3742
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/pygments/lexers/rebol.py
python
RebolLexer.analyse_text
(text)
Check if code contains REBOL header and so it probably not R code
Check if code contains REBOL header and so it probably not R code
[ "Check", "if", "code", "contains", "REBOL", "header", "and", "so", "it", "probably", "not", "R", "code" ]
def analyse_text(text): """ Check if code contains REBOL header and so it probably not R code """ if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE): # The code starts with REBOL header return 1.0 elif re.search(r'\s*REBOL\s*[', text, re.IGNORECASE): # The code contains REBOL header but also some text before it return 0.5
[ "def", "analyse_text", "(", "text", ")", ":", "if", "re", ".", "match", "(", "r'^\\s*REBOL\\s*\\['", ",", "text", ",", "re", ".", "IGNORECASE", ")", ":", "# The code starts with REBOL header", "return", "1.0", "elif", "re", ".", "search", "(", "r'\\s*REBOL\\s*['", ",", "text", ",", "re", ".", "IGNORECASE", ")", ":", "# The code contains REBOL header but also some text before it", "return", "0.5" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/pygments/lexers/rebol.py#L235-L244
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_openshift/library/oc_atomic_container.py
python
_uninstall
(module, name)
return rc, out, err, False
uninstall an atomic container by its name.
uninstall an atomic container by its name.
[ "uninstall", "an", "atomic", "container", "by", "its", "name", "." ]
def _uninstall(module, name): ''' uninstall an atomic container by its name. ''' args = ['atomic', 'uninstall', name] rc, out, err = module.run_command(args, check_rc=False) return rc, out, err, False
[ "def", "_uninstall", "(", "module", ",", "name", ")", ":", "args", "=", "[", "'atomic'", ",", "'uninstall'", ",", "name", "]", "rc", ",", "out", ",", "err", "=", "module", ".", "run_command", "(", "args", ",", "check_rc", "=", "False", ")", "return", "rc", ",", "out", ",", "err", ",", "False" ]
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_openshift/library/oc_atomic_container.py#L89-L93
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/schemes/elliptic_curves/ell_generic.py
python
EllipticCurve_generic.plot
(self, xmin=None, xmax=None, components='both', **args)
return g
Draw a graph of this elliptic curve. The plot method is only implemented when there is a natural coercion from the base ring of ``self`` to ``RR``. In this case, ``self`` is plotted as if it was defined over ``RR``. INPUT: - ``xmin, xmax`` - (optional) points will be computed at least within this range, but possibly farther. - ``components`` - a string, one of the following: - ``both`` -- (default), scale so that both bounded and unbounded components appear - ``bounded`` -- scale the plot to show the bounded component. Raises an error if there is only one real component. - ``unbounded`` -- scale the plot to show the unbounded component, including the two flex points. - ``plot_points`` -- passed to :func:`sage.plot.generate_plot_points` - ``adaptive_tolerance`` -- passed to :func:`sage.plot.generate_plot_points` - ``adaptive_recursion`` -- passed to :func:`sage.plot.generate_plot_points` - ``randomize`` -- passed to :func:`sage.plot.generate_plot_points` - ``**args`` - all other options are passed to :class:`sage.plot.line.Line` EXAMPLES:: sage: E = EllipticCurve([0,-1]) sage: plot(E, rgbcolor=hue(0.7)) Graphics object consisting of 1 graphics primitive sage: E = EllipticCurve('37a') sage: plot(E) Graphics object consisting of 2 graphics primitives sage: plot(E, xmin=25,xmax=26) Graphics object consisting of 2 graphics primitives With :trac:`12766` we added the components keyword:: sage: E.real_components() 2 sage: E.plot(components='bounded') Graphics object consisting of 1 graphics primitive sage: E.plot(components='unbounded') Graphics object consisting of 1 graphics primitive If there is only one component then specifying components='bounded' raises a ValueError:: sage: E = EllipticCurve('9990be2') sage: E.plot(components='bounded') Traceback (most recent call last): ... ValueError: no bounded component for this curve An elliptic curve defined over the Complex Field can not be plotted:: sage: E = EllipticCurve(CC, [0,0,1,-1,0]) sage: E.plot() Traceback (most recent call last): ... NotImplementedError: plotting of curves over Complex Field with 53 bits of precision is not implemented yet
Draw a graph of this elliptic curve.
[ "Draw", "a", "graph", "of", "this", "elliptic", "curve", "." ]
def plot(self, xmin=None, xmax=None, components='both', **args): """ Draw a graph of this elliptic curve. The plot method is only implemented when there is a natural coercion from the base ring of ``self`` to ``RR``. In this case, ``self`` is plotted as if it was defined over ``RR``. INPUT: - ``xmin, xmax`` - (optional) points will be computed at least within this range, but possibly farther. - ``components`` - a string, one of the following: - ``both`` -- (default), scale so that both bounded and unbounded components appear - ``bounded`` -- scale the plot to show the bounded component. Raises an error if there is only one real component. - ``unbounded`` -- scale the plot to show the unbounded component, including the two flex points. - ``plot_points`` -- passed to :func:`sage.plot.generate_plot_points` - ``adaptive_tolerance`` -- passed to :func:`sage.plot.generate_plot_points` - ``adaptive_recursion`` -- passed to :func:`sage.plot.generate_plot_points` - ``randomize`` -- passed to :func:`sage.plot.generate_plot_points` - ``**args`` - all other options are passed to :class:`sage.plot.line.Line` EXAMPLES:: sage: E = EllipticCurve([0,-1]) sage: plot(E, rgbcolor=hue(0.7)) Graphics object consisting of 1 graphics primitive sage: E = EllipticCurve('37a') sage: plot(E) Graphics object consisting of 2 graphics primitives sage: plot(E, xmin=25,xmax=26) Graphics object consisting of 2 graphics primitives With :trac:`12766` we added the components keyword:: sage: E.real_components() 2 sage: E.plot(components='bounded') Graphics object consisting of 1 graphics primitive sage: E.plot(components='unbounded') Graphics object consisting of 1 graphics primitive If there is only one component then specifying components='bounded' raises a ValueError:: sage: E = EllipticCurve('9990be2') sage: E.plot(components='bounded') Traceback (most recent call last): ... ValueError: no bounded component for this curve An elliptic curve defined over the Complex Field can not be plotted:: sage: E = EllipticCurve(CC, [0,0,1,-1,0]) sage: E.plot() Traceback (most recent call last): ... NotImplementedError: plotting of curves over Complex Field with 53 bits of precision is not implemented yet """ RR = rings.RealField() K = self.base_ring() if not RR.has_coerce_map_from(K): raise NotImplementedError("plotting of curves over %s is not implemented yet" % K) if components not in ['both', 'bounded', 'unbounded']: raise ValueError("component must be one of 'both', 'bounded' or 'unbounded'") a1, a2, a3, a4, a6 = self.ainvs() d = self.division_polynomial(2) def f1(z): # Internal function for plotting first branch of the curve return (-(a1*z + a3) + sqrt(abs(d(z))))/2 def f2(z): # Internal function for plotting second branch of the curve return (-(a1*z + a3) - sqrt(abs(d(z))))/2 r = sorted(d.roots(RR, multiplicities=False)) if components == 'bounded' and len(r) == 1: raise ValueError("no bounded component for this curve") if isinstance(xmin, (tuple, list)): if xmax is not None: raise ValueError("xmax must be None if xmin is a tuple") if len(xmin) != 2: raise ValueError("if xmin is a tuple it must have length 2") xmin, xmax = xmin if xmin is None or xmax is None: xmins = [] xmaxs = [] if components in ['both','bounded'] and len(r) > 1: xmins.append(r[0]) xmaxs.append(r[1]) # The following 3 is an aesthetic choice. It's possible # that we should compute both of the following when # components=='both' and len(r) > 1 and take the maximum # generated xmax. if components == 'unbounded' or components == 'both' and (len(r) == 1 or r[2] - r[1] > 3*(r[1] - r[0])): flex = sorted(self.division_polynomial(3).roots(RR, multiplicities=False)) flex = flex[-1] xmins.append(r[-1]) # The doubling here is an aesthetic choice xmaxs.append(flex + 2*(flex - r[-1])) elif components == 'both': # First the easy part. xmins.append(r[-1]) # There are two components and the unbounded component # is not too far from the bounded one. We scale so # that the unbounded component is twice as tall as the # bounded component. The y values corresponding to # horizontal tangent lines are determined as follows. # We implicitly differentiate the equation for this # curve and get # 2 yy' + a1 y + a1 xy' + a3 y' = 3 x^2 + 2a2 x + a4 R = RR['x'] x = R.gen() if a1 == 0: # a horizontal tangent line can only occur at a root of Ederiv = 3*x**2 + 2*a2*x + a4 else: # y' = 0 ==> y = (3*x^2 + 2*a2*x + a4) / a1 y = (3*x**2 + 2*a2*x + a4) / a1 Ederiv = y**2 + a1*x*y + a3*y - (x**3 + a2*x**2 + a4*x + a6) critx = [a for a in Ederiv.roots(RR, multiplicities=False) if r[0] < a < r[1]] if not critx: raise RuntimeError("No horizontal tangent lines on bounded component") # The 2.5 here is an aesthetic choice ymax = 2.5 * max([f1(a) for a in critx]) ymin = 2.5 * min([f2(a) for a in critx]) top_branch = ymax**2 + a1*x*ymax + a3*ymax - (x**3 + a2*x**2 + a4*x + a6) bottom_branch = ymin**2 + a1*x*ymin + a3*ymin - (x**3 + a2*x**2 + a4*x + a6) xmaxs.append(max(top_branch.roots(RR,multiplicities=False) + bottom_branch.roots(RR,multiplicities=False))) xmins = min(xmins) xmaxs = max(xmaxs) span = xmaxs - xmins if xmin is None: xmin = xmins - .02*span if xmax is None: xmax = xmaxs + .02*span elif xmin >= xmax: raise ValueError("xmin must be less than xmax") I = [] if components in ['unbounded', 'both'] and xmax > r[-1]: # one real root; 1 component if xmin <= r[-1]: I.append((r[-1],xmax,'<')) else: I.append((xmin, xmax,'=')) if components in ['bounded','both'] and len(r) > 1 and (xmin < r[1] or xmax > r[0]): if xmin <= r[0]: if xmax >= r[1]: I.append((r[0],r[1],'o')) else: I.append((r[0],xmax,'<')) elif xmax >= r[1]: I.append((xmin, r[1], '>')) else: I.append((xmin, xmax, '=')) g = plot.Graphics() plot_points = int(args.pop('plot_points',200)) adaptive_tolerance = args.pop('adaptive_tolerance',0.01) adaptive_recursion = args.pop('adaptive_recursion',5) randomize = args.pop('randomize',True) for j in range(len(I)): a,b,shape = I[j] v = generate_plot_points(f1, (a, b), plot_points, adaptive_tolerance, adaptive_recursion, randomize) w = generate_plot_points(f2, (a, b), plot_points, adaptive_tolerance, adaptive_recursion, randomize) if shape == 'o': g += plot.line(v + list(reversed(w)) + [v[0]], **args) elif shape == '<': g += plot.line(list(reversed(v)) + w, **args) elif shape == '>': g += plot.line(v + list(reversed(w)), **args) else: g += plot.line(v, **args) g += plot.line(w, **args) return g
[ "def", "plot", "(", "self", ",", "xmin", "=", "None", ",", "xmax", "=", "None", ",", "components", "=", "'both'", ",", "*", "*", "args", ")", ":", "RR", "=", "rings", ".", "RealField", "(", ")", "K", "=", "self", ".", "base_ring", "(", ")", "if", "not", "RR", ".", "has_coerce_map_from", "(", "K", ")", ":", "raise", "NotImplementedError", "(", "\"plotting of curves over %s is not implemented yet\"", "%", "K", ")", "if", "components", "not", "in", "[", "'both'", ",", "'bounded'", ",", "'unbounded'", "]", ":", "raise", "ValueError", "(", "\"component must be one of 'both', 'bounded' or 'unbounded'\"", ")", "a1", ",", "a2", ",", "a3", ",", "a4", ",", "a6", "=", "self", ".", "ainvs", "(", ")", "d", "=", "self", ".", "division_polynomial", "(", "2", ")", "def", "f1", "(", "z", ")", ":", "# Internal function for plotting first branch of the curve", "return", "(", "-", "(", "a1", "*", "z", "+", "a3", ")", "+", "sqrt", "(", "abs", "(", "d", "(", "z", ")", ")", ")", ")", "/", "2", "def", "f2", "(", "z", ")", ":", "# Internal function for plotting second branch of the curve", "return", "(", "-", "(", "a1", "*", "z", "+", "a3", ")", "-", "sqrt", "(", "abs", "(", "d", "(", "z", ")", ")", ")", ")", "/", "2", "r", "=", "sorted", "(", "d", ".", "roots", "(", "RR", ",", "multiplicities", "=", "False", ")", ")", "if", "components", "==", "'bounded'", "and", "len", "(", "r", ")", "==", "1", ":", "raise", "ValueError", "(", "\"no bounded component for this curve\"", ")", "if", "isinstance", "(", "xmin", ",", "(", "tuple", ",", "list", ")", ")", ":", "if", "xmax", "is", "not", "None", ":", "raise", "ValueError", "(", "\"xmax must be None if xmin is a tuple\"", ")", "if", "len", "(", "xmin", ")", "!=", "2", ":", "raise", "ValueError", "(", "\"if xmin is a tuple it must have length 2\"", ")", "xmin", ",", "xmax", "=", "xmin", "if", "xmin", "is", "None", "or", "xmax", "is", "None", ":", "xmins", "=", "[", "]", "xmaxs", "=", "[", "]", "if", "components", "in", "[", "'both'", ",", "'bounded'", "]", "and", "len", "(", "r", ")", ">", "1", ":", "xmins", ".", "append", "(", "r", "[", "0", "]", ")", "xmaxs", ".", "append", "(", "r", "[", "1", "]", ")", "# The following 3 is an aesthetic choice. It's possible", "# that we should compute both of the following when", "# components=='both' and len(r) > 1 and take the maximum", "# generated xmax.", "if", "components", "==", "'unbounded'", "or", "components", "==", "'both'", "and", "(", "len", "(", "r", ")", "==", "1", "or", "r", "[", "2", "]", "-", "r", "[", "1", "]", ">", "3", "*", "(", "r", "[", "1", "]", "-", "r", "[", "0", "]", ")", ")", ":", "flex", "=", "sorted", "(", "self", ".", "division_polynomial", "(", "3", ")", ".", "roots", "(", "RR", ",", "multiplicities", "=", "False", ")", ")", "flex", "=", "flex", "[", "-", "1", "]", "xmins", ".", "append", "(", "r", "[", "-", "1", "]", ")", "# The doubling here is an aesthetic choice", "xmaxs", ".", "append", "(", "flex", "+", "2", "*", "(", "flex", "-", "r", "[", "-", "1", "]", ")", ")", "elif", "components", "==", "'both'", ":", "# First the easy part.", "xmins", ".", "append", "(", "r", "[", "-", "1", "]", ")", "# There are two components and the unbounded component", "# is not too far from the bounded one. We scale so", "# that the unbounded component is twice as tall as the", "# bounded component. The y values corresponding to", "# horizontal tangent lines are determined as follows.", "# We implicitly differentiate the equation for this", "# curve and get", "# 2 yy' + a1 y + a1 xy' + a3 y' = 3 x^2 + 2a2 x + a4", "R", "=", "RR", "[", "'x'", "]", "x", "=", "R", ".", "gen", "(", ")", "if", "a1", "==", "0", ":", "# a horizontal tangent line can only occur at a root of", "Ederiv", "=", "3", "*", "x", "**", "2", "+", "2", "*", "a2", "*", "x", "+", "a4", "else", ":", "# y' = 0 ==> y = (3*x^2 + 2*a2*x + a4) / a1", "y", "=", "(", "3", "*", "x", "**", "2", "+", "2", "*", "a2", "*", "x", "+", "a4", ")", "/", "a1", "Ederiv", "=", "y", "**", "2", "+", "a1", "*", "x", "*", "y", "+", "a3", "*", "y", "-", "(", "x", "**", "3", "+", "a2", "*", "x", "**", "2", "+", "a4", "*", "x", "+", "a6", ")", "critx", "=", "[", "a", "for", "a", "in", "Ederiv", ".", "roots", "(", "RR", ",", "multiplicities", "=", "False", ")", "if", "r", "[", "0", "]", "<", "a", "<", "r", "[", "1", "]", "]", "if", "not", "critx", ":", "raise", "RuntimeError", "(", "\"No horizontal tangent lines on bounded component\"", ")", "# The 2.5 here is an aesthetic choice", "ymax", "=", "2.5", "*", "max", "(", "[", "f1", "(", "a", ")", "for", "a", "in", "critx", "]", ")", "ymin", "=", "2.5", "*", "min", "(", "[", "f2", "(", "a", ")", "for", "a", "in", "critx", "]", ")", "top_branch", "=", "ymax", "**", "2", "+", "a1", "*", "x", "*", "ymax", "+", "a3", "*", "ymax", "-", "(", "x", "**", "3", "+", "a2", "*", "x", "**", "2", "+", "a4", "*", "x", "+", "a6", ")", "bottom_branch", "=", "ymin", "**", "2", "+", "a1", "*", "x", "*", "ymin", "+", "a3", "*", "ymin", "-", "(", "x", "**", "3", "+", "a2", "*", "x", "**", "2", "+", "a4", "*", "x", "+", "a6", ")", "xmaxs", ".", "append", "(", "max", "(", "top_branch", ".", "roots", "(", "RR", ",", "multiplicities", "=", "False", ")", "+", "bottom_branch", ".", "roots", "(", "RR", ",", "multiplicities", "=", "False", ")", ")", ")", "xmins", "=", "min", "(", "xmins", ")", "xmaxs", "=", "max", "(", "xmaxs", ")", "span", "=", "xmaxs", "-", "xmins", "if", "xmin", "is", "None", ":", "xmin", "=", "xmins", "-", ".02", "*", "span", "if", "xmax", "is", "None", ":", "xmax", "=", "xmaxs", "+", ".02", "*", "span", "elif", "xmin", ">=", "xmax", ":", "raise", "ValueError", "(", "\"xmin must be less than xmax\"", ")", "I", "=", "[", "]", "if", "components", "in", "[", "'unbounded'", ",", "'both'", "]", "and", "xmax", ">", "r", "[", "-", "1", "]", ":", "# one real root; 1 component", "if", "xmin", "<=", "r", "[", "-", "1", "]", ":", "I", ".", "append", "(", "(", "r", "[", "-", "1", "]", ",", "xmax", ",", "'<'", ")", ")", "else", ":", "I", ".", "append", "(", "(", "xmin", ",", "xmax", ",", "'='", ")", ")", "if", "components", "in", "[", "'bounded'", ",", "'both'", "]", "and", "len", "(", "r", ")", ">", "1", "and", "(", "xmin", "<", "r", "[", "1", "]", "or", "xmax", ">", "r", "[", "0", "]", ")", ":", "if", "xmin", "<=", "r", "[", "0", "]", ":", "if", "xmax", ">=", "r", "[", "1", "]", ":", "I", ".", "append", "(", "(", "r", "[", "0", "]", ",", "r", "[", "1", "]", ",", "'o'", ")", ")", "else", ":", "I", ".", "append", "(", "(", "r", "[", "0", "]", ",", "xmax", ",", "'<'", ")", ")", "elif", "xmax", ">=", "r", "[", "1", "]", ":", "I", ".", "append", "(", "(", "xmin", ",", "r", "[", "1", "]", ",", "'>'", ")", ")", "else", ":", "I", ".", "append", "(", "(", "xmin", ",", "xmax", ",", "'='", ")", ")", "g", "=", "plot", ".", "Graphics", "(", ")", "plot_points", "=", "int", "(", "args", ".", "pop", "(", "'plot_points'", ",", "200", ")", ")", "adaptive_tolerance", "=", "args", ".", "pop", "(", "'adaptive_tolerance'", ",", "0.01", ")", "adaptive_recursion", "=", "args", ".", "pop", "(", "'adaptive_recursion'", ",", "5", ")", "randomize", "=", "args", ".", "pop", "(", "'randomize'", ",", "True", ")", "for", "j", "in", "range", "(", "len", "(", "I", ")", ")", ":", "a", ",", "b", ",", "shape", "=", "I", "[", "j", "]", "v", "=", "generate_plot_points", "(", "f1", ",", "(", "a", ",", "b", ")", ",", "plot_points", ",", "adaptive_tolerance", ",", "adaptive_recursion", ",", "randomize", ")", "w", "=", "generate_plot_points", "(", "f2", ",", "(", "a", ",", "b", ")", ",", "plot_points", ",", "adaptive_tolerance", ",", "adaptive_recursion", ",", "randomize", ")", "if", "shape", "==", "'o'", ":", "g", "+=", "plot", ".", "line", "(", "v", "+", "list", "(", "reversed", "(", "w", ")", ")", "+", "[", "v", "[", "0", "]", "]", ",", "*", "*", "args", ")", "elif", "shape", "==", "'<'", ":", "g", "+=", "plot", ".", "line", "(", "list", "(", "reversed", "(", "v", ")", ")", "+", "w", ",", "*", "*", "args", ")", "elif", "shape", "==", "'>'", ":", "g", "+=", "plot", ".", "line", "(", "v", "+", "list", "(", "reversed", "(", "w", ")", ")", ",", "*", "*", "args", ")", "else", ":", "g", "+=", "plot", ".", "line", "(", "v", ",", "*", "*", "args", ")", "g", "+=", "plot", ".", "line", "(", "w", ",", "*", "*", "args", ")", "return", "g" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/schemes/elliptic_curves/ell_generic.py#L2525-L2723
Samsung/cotopaxi
d19178b1235017257fec20d0a41edc918de55574
cotopaxi/active_scanner.py
python
DTLSScanner.__init__
(self, test_params, workers=10)
Construct empty DTLSScanner object.
Construct empty DTLSScanner object.
[ "Construct", "empty", "DTLSScanner", "object", "." ]
def __init__(self, test_params, workers=10): """Construct empty DTLSScanner object.""" self.workers = workers self.capabilities = DTLSInfo(test_params)
[ "def", "__init__", "(", "self", ",", "test_params", ",", "workers", "=", "10", ")", ":", "self", ".", "workers", "=", "workers", "self", ".", "capabilities", "=", "DTLSInfo", "(", "test_params", ")" ]
https://github.com/Samsung/cotopaxi/blob/d19178b1235017257fec20d0a41edc918de55574/cotopaxi/active_scanner.py#L573-L576
cool-RR/python_toolbox
cb9ef64b48f1d03275484d707dc5079b6701ad0c
python_toolbox/wx_tools/widgets/third_party/hypertreelist.py
python
TreeListItem.GetCurrentImage
(self, column=None)
return image
Returns the current item image. :param `column`: if not ``None``, an integer specifying the column index. If it is ``None``, the main column index is used.
Returns the current item image.
[ "Returns", "the", "current", "item", "image", "." ]
def GetCurrentImage(self, column=None): """ Returns the current item image. :param `column`: if not ``None``, an integer specifying the column index. If it is ``None``, the main column index is used. """ column = (column is not None and [column] or [self._owner.GetMainColumn()])[0] if column != self._owner.GetMainColumn(): return self.GetImage(column=column) image = GenericTreeItem.GetCurrentImage(self) return image
[ "def", "GetCurrentImage", "(", "self", ",", "column", "=", "None", ")", ":", "column", "=", "(", "column", "is", "not", "None", "and", "[", "column", "]", "or", "[", "self", ".", "_owner", ".", "GetMainColumn", "(", ")", "]", ")", "[", "0", "]", "if", "column", "!=", "self", ".", "_owner", ".", "GetMainColumn", "(", ")", ":", "return", "self", ".", "GetImage", "(", "column", "=", "column", ")", "image", "=", "GenericTreeItem", ".", "GetCurrentImage", "(", "self", ")", "return", "image" ]
https://github.com/cool-RR/python_toolbox/blob/cb9ef64b48f1d03275484d707dc5079b6701ad0c/python_toolbox/wx_tools/widgets/third_party/hypertreelist.py#L1496-L1510
baidu/Dialogue
2415430e1f3997806d059455a6e5b16a2d565d06
DGU/dgu/define_paradigm.py
python
Paradigm.create_multi_cls
(self, transformer_inst, params)
return results
create multi classify paradigm network
create multi classify paradigm network
[ "create", "multi", "classify", "paradigm", "network" ]
def create_multi_cls(self, transformer_inst, params): """ create multi classify paradigm network """ cls_feats = transformer_inst.get_pooled_output() cls_feats = fluid.layers.dropout( x=cls_feats, dropout_prob=0.1, dropout_implementation="upscale_in_train") logits = fluid.layers.fc( input=cls_feats, size=params['num_labels'], param_attr=fluid.ParamAttr( name="cls_out_w", initializer=fluid.initializer.TruncatedNormal(scale=0.02)), bias_attr=fluid.ParamAttr( name="cls_out_b", initializer=fluid.initializer.Constant(0.))) labels_onehot = fluid.layers.cast(params["labels"], dtype='float32') ce_loss = fluid.layers.reduce_sum( fluid.layers.sigmoid_cross_entropy_with_logits( x=logits, label=labels_onehot)) loss = fluid.layers.mean(x=ce_loss) probs = fluid.layers.sigmoid(logits) if not params['is_training']: results = {"probs": probs} return results num_seqs = fluid.layers.tensor.fill_constant( shape=[1], dtype='int64', value=1) results = {"loss": loss, "probs": probs, "num_seqs": num_seqs} return results
[ "def", "create_multi_cls", "(", "self", ",", "transformer_inst", ",", "params", ")", ":", "cls_feats", "=", "transformer_inst", ".", "get_pooled_output", "(", ")", "cls_feats", "=", "fluid", ".", "layers", ".", "dropout", "(", "x", "=", "cls_feats", ",", "dropout_prob", "=", "0.1", ",", "dropout_implementation", "=", "\"upscale_in_train\"", ")", "logits", "=", "fluid", ".", "layers", ".", "fc", "(", "input", "=", "cls_feats", ",", "size", "=", "params", "[", "'num_labels'", "]", ",", "param_attr", "=", "fluid", ".", "ParamAttr", "(", "name", "=", "\"cls_out_w\"", ",", "initializer", "=", "fluid", ".", "initializer", ".", "TruncatedNormal", "(", "scale", "=", "0.02", ")", ")", ",", "bias_attr", "=", "fluid", ".", "ParamAttr", "(", "name", "=", "\"cls_out_b\"", ",", "initializer", "=", "fluid", ".", "initializer", ".", "Constant", "(", "0.", ")", ")", ")", "labels_onehot", "=", "fluid", ".", "layers", ".", "cast", "(", "params", "[", "\"labels\"", "]", ",", "dtype", "=", "'float32'", ")", "ce_loss", "=", "fluid", ".", "layers", ".", "reduce_sum", "(", "fluid", ".", "layers", ".", "sigmoid_cross_entropy_with_logits", "(", "x", "=", "logits", ",", "label", "=", "labels_onehot", ")", ")", "loss", "=", "fluid", ".", "layers", ".", "mean", "(", "x", "=", "ce_loss", ")", "probs", "=", "fluid", ".", "layers", ".", "sigmoid", "(", "logits", ")", "if", "not", "params", "[", "'is_training'", "]", ":", "results", "=", "{", "\"probs\"", ":", "probs", "}", "return", "results", "num_seqs", "=", "fluid", ".", "layers", ".", "tensor", ".", "fill_constant", "(", "shape", "=", "[", "1", "]", ",", "dtype", "=", "'int64'", ",", "value", "=", "1", ")", "results", "=", "{", "\"loss\"", ":", "loss", ",", "\"probs\"", ":", "probs", ",", "\"num_seqs\"", ":", "num_seqs", "}", "return", "results" ]
https://github.com/baidu/Dialogue/blob/2415430e1f3997806d059455a6e5b16a2d565d06/DGU/dgu/define_paradigm.py#L72-L105
OpenXenManager/openxenmanager
1cb5c1cb13358ba584856e99a94f9669d17670ff
src/OXM/window_addserver.py
python
AddServer.update_connect_status
(self, server)
Animates the progress bar during connection.
Animates the progress bar during connection.
[ "Animates", "the", "progress", "bar", "during", "connection", "." ]
def update_connect_status(self, server): """ Animates the progress bar during connection. """ while server.connectThread.isAlive(): self.builder.get_object("progressconnect").pulse() server.connectThread.join(1) # TODO: what does this variable do? if self.selected_host is None: self.selected_host = server.host
[ "def", "update_connect_status", "(", "self", ",", "server", ")", ":", "while", "server", ".", "connectThread", ".", "isAlive", "(", ")", ":", "self", ".", "builder", ".", "get_object", "(", "\"progressconnect\"", ")", ".", "pulse", "(", ")", "server", ".", "connectThread", ".", "join", "(", "1", ")", "# TODO: what does this variable do?", "if", "self", ".", "selected_host", "is", "None", ":", "self", ".", "selected_host", "=", "server", ".", "host" ]
https://github.com/OpenXenManager/openxenmanager/blob/1cb5c1cb13358ba584856e99a94f9669d17670ff/src/OXM/window_addserver.py#L175-L184
awslabs/autogluon
7309118f2ab1c9519f25acf61a283a95af95842b
tabular/src/autogluon/tabular/models/tab_transformer/tab_transformer_encoder.py
python
EncBase.fit
(self, data: pd.Series, dtype=None)
return data
If dtype == 'float', clean_data will cast the contents of data to floats
If dtype == 'float', clean_data will cast the contents of data to floats
[ "If", "dtype", "==", "float", "clean_data", "will", "cast", "the", "contents", "of", "data", "to", "floats" ]
def fit(self, data: pd.Series, dtype=None): """ If dtype == 'float', clean_data will cast the contents of data to floats """ if len(pd.unique(data)) == 1: raise WontEncodeError('Column contains only one value') data = self.clean_data(data, dtype) return data
[ "def", "fit", "(", "self", ",", "data", ":", "pd", ".", "Series", ",", "dtype", "=", "None", ")", ":", "if", "len", "(", "pd", ".", "unique", "(", "data", ")", ")", "==", "1", ":", "raise", "WontEncodeError", "(", "'Column contains only one value'", ")", "data", "=", "self", ".", "clean_data", "(", "data", ",", "dtype", ")", "return", "data" ]
https://github.com/awslabs/autogluon/blob/7309118f2ab1c9519f25acf61a283a95af95842b/tabular/src/autogluon/tabular/models/tab_transformer/tab_transformer_encoder.py#L46-L53
ryanrhymes/panns
86b14d28668ed2ac2d7cb16d2270cc63dbbcb952
panns/index.py
python
PannsIndex.build_sequential
(self, c)
Build the index sequentially, one projection at a time. Parameters: c: the number of binary trees to use in building the index.
Build the index sequentially, one projection at a time.
[ "Build", "the", "index", "sequentially", "one", "projection", "at", "a", "time", "." ]
def build_sequential(self, c): """ Build the index sequentially, one projection at a time. Parameters: c: the number of binary trees to use in building the index. """ for i in xrange(c): logger.info('pass %i ...' % i) tree = NaiveTree() self.btr.append(tree) children = range(len(self.mtx)) self.make_tree(tree.root, children) pass
[ "def", "build_sequential", "(", "self", ",", "c", ")", ":", "for", "i", "in", "xrange", "(", "c", ")", ":", "logger", ".", "info", "(", "'pass %i ...'", "%", "i", ")", "tree", "=", "NaiveTree", "(", ")", "self", ".", "btr", ".", "append", "(", "tree", ")", "children", "=", "range", "(", "len", "(", "self", ".", "mtx", ")", ")", "self", ".", "make_tree", "(", "tree", ".", "root", ",", "children", ")", "pass" ]
https://github.com/ryanrhymes/panns/blob/86b14d28668ed2ac2d7cb16d2270cc63dbbcb952/panns/index.py#L140-L153