repo
stringlengths
7
55
path
stringlengths
4
223
url
stringlengths
87
315
code
stringlengths
75
104k
code_tokens
list
docstring
stringlengths
1
46.9k
docstring_tokens
list
language
stringclasses
1 value
partition
stringclasses
3 values
avg_line_len
float64
7.91
980
erikrose/more-itertools
more_itertools/more.py
https://github.com/erikrose/more-itertools/blob/6a91b4e25c8e12fcf9fc2b53cf8ee0fba293e6f9/more_itertools/more.py#L2390-L2417
def time_limited(limit_seconds, iterable): """ Yield items from *iterable* until *limit_seconds* have passed. >>> from time import sleep >>> def generator(): ... yield 1 ... yield 2 ... sleep(0.2) ... yield 3 >>> iterable = generator() >>> list(time_limited(0.1, iterable)) [1, 2] Note that the time is checked before each item is yielded, and iteration stops if the time elapsed is greater than *limit_seconds*. If your time limit is 1 second, but it takes 2 seconds to generate the first item from the iterable, the function will run for 2 seconds and not yield anything. """ if limit_seconds < 0: raise ValueError('limit_seconds must be positive') start_time = monotonic() for item in iterable: if monotonic() - start_time > limit_seconds: break yield item
[ "def", "time_limited", "(", "limit_seconds", ",", "iterable", ")", ":", "if", "limit_seconds", "<", "0", ":", "raise", "ValueError", "(", "'limit_seconds must be positive'", ")", "start_time", "=", "monotonic", "(", ")", "for", "item", "in", "iterable", ":", "if", "monotonic", "(", ")", "-", "start_time", ">", "limit_seconds", ":", "break", "yield", "item" ]
Yield items from *iterable* until *limit_seconds* have passed. >>> from time import sleep >>> def generator(): ... yield 1 ... yield 2 ... sleep(0.2) ... yield 3 >>> iterable = generator() >>> list(time_limited(0.1, iterable)) [1, 2] Note that the time is checked before each item is yielded, and iteration stops if the time elapsed is greater than *limit_seconds*. If your time limit is 1 second, but it takes 2 seconds to generate the first item from the iterable, the function will run for 2 seconds and not yield anything.
[ "Yield", "items", "from", "*", "iterable", "*", "until", "*", "limit_seconds", "*", "have", "passed", "." ]
python
train
30.892857
Anaconda-Platform/anaconda-client
binstar_client/mixins/channels.py
https://github.com/Anaconda-Platform/anaconda-client/blob/b276f0572744c73c184a8b43a897cfa7fc1dc523/binstar_client/mixins/channels.py#L66-L77
def copy_channel(self, channel, owner, to_channel): ''' Tag all files in channel <channel> also as channel <to_channel> :param channel: channel to copy :param owner: Perform this operation on all packages of this user :param to_channel: Destination name (may be a channel that already exists) ''' url = '%s/channels/%s/%s/copy/%s' % (self.domain, owner, channel, to_channel) res = self.session.post(url) self._check_response(res, [201])
[ "def", "copy_channel", "(", "self", ",", "channel", ",", "owner", ",", "to_channel", ")", ":", "url", "=", "'%s/channels/%s/%s/copy/%s'", "%", "(", "self", ".", "domain", ",", "owner", ",", "channel", ",", "to_channel", ")", "res", "=", "self", ".", "session", ".", "post", "(", "url", ")", "self", ".", "_check_response", "(", "res", ",", "[", "201", "]", ")" ]
Tag all files in channel <channel> also as channel <to_channel> :param channel: channel to copy :param owner: Perform this operation on all packages of this user :param to_channel: Destination name (may be a channel that already exists)
[ "Tag", "all", "files", "in", "channel", "<channel", ">", "also", "as", "channel", "<to_channel", ">", ":", "param", "channel", ":", "channel", "to", "copy", ":", "param", "owner", ":", "Perform", "this", "operation", "on", "all", "packages", "of", "this", "user", ":", "param", "to_channel", ":", "Destination", "name", "(", "may", "be", "a", "channel", "that", "already", "exists", ")" ]
python
train
43
rwl/godot
godot/mapping.py
https://github.com/rwl/godot/blob/013687c9e8983d2aa2ceebb8a76c5c4f1e37c90f/godot/mapping.py#L74-L82
def _diagram_canvas_default(self): """ Trait initialiser """ canvas = Canvas() for tool in self.tools: canvas.tools.append(tool(canvas)) return canvas
[ "def", "_diagram_canvas_default", "(", "self", ")", ":", "canvas", "=", "Canvas", "(", ")", "for", "tool", "in", "self", ".", "tools", ":", "canvas", ".", "tools", ".", "append", "(", "tool", "(", "canvas", ")", ")", "return", "canvas" ]
Trait initialiser
[ "Trait", "initialiser" ]
python
test
21
StorjOld/heartbeat
heartbeat/Merkle/Merkle.py
https://github.com/StorjOld/heartbeat/blob/4d54f2011f1e9f688073d4347bc51bb7bd682718/heartbeat/Merkle/Merkle.py#L369-L386
def gen_challenge(self, state): """returns the next challenge and increments the seed and index in the state. :param state: the state to use for generating the challenge. will verify the integrity of the state object before using it to generate a challenge. it will then modify the state by incrementing the seed and index and resign the state for passing back to the server for storage """ state.checksig(self.key) if (state.index >= state.n): raise HeartbeatError("Out of challenges.") state.seed = MerkleHelper.get_next_seed(self.key, state.seed) chal = Challenge(state.seed, state.index) state.index += 1 state.sign(self.key) return chal
[ "def", "gen_challenge", "(", "self", ",", "state", ")", ":", "state", ".", "checksig", "(", "self", ".", "key", ")", "if", "(", "state", ".", "index", ">=", "state", ".", "n", ")", ":", "raise", "HeartbeatError", "(", "\"Out of challenges.\"", ")", "state", ".", "seed", "=", "MerkleHelper", ".", "get_next_seed", "(", "self", ".", "key", ",", "state", ".", "seed", ")", "chal", "=", "Challenge", "(", "state", ".", "seed", ",", "state", ".", "index", ")", "state", ".", "index", "+=", "1", "state", ".", "sign", "(", "self", ".", "key", ")", "return", "chal" ]
returns the next challenge and increments the seed and index in the state. :param state: the state to use for generating the challenge. will verify the integrity of the state object before using it to generate a challenge. it will then modify the state by incrementing the seed and index and resign the state for passing back to the server for storage
[ "returns", "the", "next", "challenge", "and", "increments", "the", "seed", "and", "index", "in", "the", "state", "." ]
python
train
42.166667
jobovy/galpy
galpy/potential/Potential.py
https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/potential/Potential.py#L1894-L1947
def evaluaterforces(Pot,R,z,phi=None,t=0.,v=None): """ NAME: evaluaterforces PURPOSE: convenience function to evaluate a possible sum of potentials INPUT: Pot - a potential or list of potentials R - cylindrical Galactocentric distance (can be Quantity) z - distance above the plane (can be Quantity) phi - azimuth (optional; can be Quantity) t - time (optional; can be Quantity) v - current velocity in cylindrical coordinates (optional, but required when including dissipative forces; can be a Quantity) OUTPUT: F_r(R,z,phi,t) HISTORY: 2016-06-10 - Written - Bovy (UofT) """ isList= isinstance(Pot,list) nonAxi= _isNonAxi(Pot) if nonAxi and phi is None: raise PotentialError("The (list of) Potential instances is non-axisymmetric, but you did not provide phi") dissipative= _isDissipative(Pot) if dissipative and v is None: raise PotentialError("The (list of) Potential instances includes dissipative, but you did not provide the 3D velocity (required for dissipative forces") if isList: sum= 0. for pot in Pot: if isinstance(pot,DissipativeForce): sum+= pot.rforce(R,z,phi=phi,t=t,v=v,use_physical=False) else: sum+= pot.rforce(R,z,phi=phi,t=t,use_physical=False) return sum elif isinstance(Pot,Potential): return Pot.rforce(R,z,phi=phi,t=t,use_physical=False) elif isinstance(Pot,DissipativeForce): return Pot.rforce(R,z,phi=phi,t=t,v=v,use_physical=False) else: #pragma: no cover raise PotentialError("Input to 'evaluaterforces' is neither a Potential-instance or a list of such instances")
[ "def", "evaluaterforces", "(", "Pot", ",", "R", ",", "z", ",", "phi", "=", "None", ",", "t", "=", "0.", ",", "v", "=", "None", ")", ":", "isList", "=", "isinstance", "(", "Pot", ",", "list", ")", "nonAxi", "=", "_isNonAxi", "(", "Pot", ")", "if", "nonAxi", "and", "phi", "is", "None", ":", "raise", "PotentialError", "(", "\"The (list of) Potential instances is non-axisymmetric, but you did not provide phi\"", ")", "dissipative", "=", "_isDissipative", "(", "Pot", ")", "if", "dissipative", "and", "v", "is", "None", ":", "raise", "PotentialError", "(", "\"The (list of) Potential instances includes dissipative, but you did not provide the 3D velocity (required for dissipative forces\"", ")", "if", "isList", ":", "sum", "=", "0.", "for", "pot", "in", "Pot", ":", "if", "isinstance", "(", "pot", ",", "DissipativeForce", ")", ":", "sum", "+=", "pot", ".", "rforce", "(", "R", ",", "z", ",", "phi", "=", "phi", ",", "t", "=", "t", ",", "v", "=", "v", ",", "use_physical", "=", "False", ")", "else", ":", "sum", "+=", "pot", ".", "rforce", "(", "R", ",", "z", ",", "phi", "=", "phi", ",", "t", "=", "t", ",", "use_physical", "=", "False", ")", "return", "sum", "elif", "isinstance", "(", "Pot", ",", "Potential", ")", ":", "return", "Pot", ".", "rforce", "(", "R", ",", "z", ",", "phi", "=", "phi", ",", "t", "=", "t", ",", "use_physical", "=", "False", ")", "elif", "isinstance", "(", "Pot", ",", "DissipativeForce", ")", ":", "return", "Pot", ".", "rforce", "(", "R", ",", "z", ",", "phi", "=", "phi", ",", "t", "=", "t", ",", "v", "=", "v", ",", "use_physical", "=", "False", ")", "else", ":", "#pragma: no cover ", "raise", "PotentialError", "(", "\"Input to 'evaluaterforces' is neither a Potential-instance or a list of such instances\"", ")" ]
NAME: evaluaterforces PURPOSE: convenience function to evaluate a possible sum of potentials INPUT: Pot - a potential or list of potentials R - cylindrical Galactocentric distance (can be Quantity) z - distance above the plane (can be Quantity) phi - azimuth (optional; can be Quantity) t - time (optional; can be Quantity) v - current velocity in cylindrical coordinates (optional, but required when including dissipative forces; can be a Quantity) OUTPUT: F_r(R,z,phi,t) HISTORY: 2016-06-10 - Written - Bovy (UofT)
[ "NAME", ":" ]
python
train
31.685185
vertexproject/synapse
synapse/lib/node.py
https://github.com/vertexproject/synapse/blob/22e67c5a8f6d7caddbcf34b39ab1bd2d6c4a6e0b/synapse/lib/node.py#L133-L141
async def seen(self, tick, source=None): ''' Update the .seen interval and optionally a source specific seen node. ''' await self.set('.seen', tick) if source is not None: seen = await self.snap.addNode('meta:seen', (source, self.ndef)) await seen.set('.seen', tick)
[ "async", "def", "seen", "(", "self", ",", "tick", ",", "source", "=", "None", ")", ":", "await", "self", ".", "set", "(", "'.seen'", ",", "tick", ")", "if", "source", "is", "not", "None", ":", "seen", "=", "await", "self", ".", "snap", ".", "addNode", "(", "'meta:seen'", ",", "(", "source", ",", "self", ".", "ndef", ")", ")", "await", "seen", ".", "set", "(", "'.seen'", ",", "tick", ")" ]
Update the .seen interval and optionally a source specific seen node.
[ "Update", "the", ".", "seen", "interval", "and", "optionally", "a", "source", "specific", "seen", "node", "." ]
python
train
35.888889
lk-geimfari/mimesis
mimesis/providers/code.py
https://github.com/lk-geimfari/mimesis/blob/4b16ee7a8dba6281a904654a88dbb4b052869fc5/mimesis/providers/code.py#L54-L69
def isbn(self, fmt: Optional[ISBNFormat] = None, locale: str = 'en') -> str: """Generate ISBN for current locale. To change ISBN format, pass parameter ``fmt`` with needed value of the enum object :class:`~mimesis.enums.ISBNFormat` :param fmt: ISBN format. :param locale: Locale code. :return: ISBN. :raises NonEnumerableError: if fmt is not enum ISBNFormat. """ fmt_value = self._validate_enum(item=fmt, enum=ISBNFormat) mask = ISBN_MASKS[fmt_value].format( ISBN_GROUPS[locale]) return self.random.custom_code(mask)
[ "def", "isbn", "(", "self", ",", "fmt", ":", "Optional", "[", "ISBNFormat", "]", "=", "None", ",", "locale", ":", "str", "=", "'en'", ")", "->", "str", ":", "fmt_value", "=", "self", ".", "_validate_enum", "(", "item", "=", "fmt", ",", "enum", "=", "ISBNFormat", ")", "mask", "=", "ISBN_MASKS", "[", "fmt_value", "]", ".", "format", "(", "ISBN_GROUPS", "[", "locale", "]", ")", "return", "self", ".", "random", ".", "custom_code", "(", "mask", ")" ]
Generate ISBN for current locale. To change ISBN format, pass parameter ``fmt`` with needed value of the enum object :class:`~mimesis.enums.ISBNFormat` :param fmt: ISBN format. :param locale: Locale code. :return: ISBN. :raises NonEnumerableError: if fmt is not enum ISBNFormat.
[ "Generate", "ISBN", "for", "current", "locale", "." ]
python
train
38.5
opereto/pyopereto
pyopereto/client.py
https://github.com/opereto/pyopereto/blob/16ca987738a7e1b82b52b0b099794a74ed557223/pyopereto/client.py#L934-L987
def create_process(self, service, agent=None, title=None, mode=None, service_version=None, **kwargs): ''' create_process(self, service, agent=None, title=None, mode=None, service_version=None, **kwargs) Registers a new process or processes :Parameters: * *service* (`string`) -- Service which process will be started * *agent* (`string`) -- The service identifier (e.g shell_command) * *title* (`string`) -- Title for the process * *mode* (`string`) -- production/development * *service_version* (`string`) -- Version of the service to execute :Keywords args: Json value map containing the process input properties :return: process id :Example: .. code-block:: python process_properties = {"my_input_param" : "1"} pid = opereto_client.create_process(service='simple_shell_command', title='Test simple shell command service', agent=opereto_client.input['opereto_agent'], **process_properties) ''' if not agent: agent = self.input.get('opereto_agent') if not mode: mode=self.input.get('opereto_execution_mode') or 'production' if not service_version: service_version=self.input.get('opereto_service_version') request_data = {'service_id': service, 'agents': agent, 'mode': mode, 's_version':service_version} if title: request_data['name']=title if self.input.get('pid'): request_data['pflow_id']=self.input.get('pid') request_data.update(**kwargs) ret_data= self._call_rest_api('post', '/processes', data=request_data, error='Failed to create a new process') if not isinstance(ret_data, list): raise OperetoClientError(str(ret_data)) pid = ret_data[0] message = 'New process created for service [%s] [pid = %s] '%(service, pid) if agent: message += ' [agent = %s]'%agent else: message += ' [agent = any ]' self.logger.info(message) return str(pid)
[ "def", "create_process", "(", "self", ",", "service", ",", "agent", "=", "None", ",", "title", "=", "None", ",", "mode", "=", "None", ",", "service_version", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "not", "agent", ":", "agent", "=", "self", ".", "input", ".", "get", "(", "'opereto_agent'", ")", "if", "not", "mode", ":", "mode", "=", "self", ".", "input", ".", "get", "(", "'opereto_execution_mode'", ")", "or", "'production'", "if", "not", "service_version", ":", "service_version", "=", "self", ".", "input", ".", "get", "(", "'opereto_service_version'", ")", "request_data", "=", "{", "'service_id'", ":", "service", ",", "'agents'", ":", "agent", ",", "'mode'", ":", "mode", ",", "'s_version'", ":", "service_version", "}", "if", "title", ":", "request_data", "[", "'name'", "]", "=", "title", "if", "self", ".", "input", ".", "get", "(", "'pid'", ")", ":", "request_data", "[", "'pflow_id'", "]", "=", "self", ".", "input", ".", "get", "(", "'pid'", ")", "request_data", ".", "update", "(", "*", "*", "kwargs", ")", "ret_data", "=", "self", ".", "_call_rest_api", "(", "'post'", ",", "'/processes'", ",", "data", "=", "request_data", ",", "error", "=", "'Failed to create a new process'", ")", "if", "not", "isinstance", "(", "ret_data", ",", "list", ")", ":", "raise", "OperetoClientError", "(", "str", "(", "ret_data", ")", ")", "pid", "=", "ret_data", "[", "0", "]", "message", "=", "'New process created for service [%s] [pid = %s] '", "%", "(", "service", ",", "pid", ")", "if", "agent", ":", "message", "+=", "' [agent = %s]'", "%", "agent", "else", ":", "message", "+=", "' [agent = any ]'", "self", ".", "logger", ".", "info", "(", "message", ")", "return", "str", "(", "pid", ")" ]
create_process(self, service, agent=None, title=None, mode=None, service_version=None, **kwargs) Registers a new process or processes :Parameters: * *service* (`string`) -- Service which process will be started * *agent* (`string`) -- The service identifier (e.g shell_command) * *title* (`string`) -- Title for the process * *mode* (`string`) -- production/development * *service_version* (`string`) -- Version of the service to execute :Keywords args: Json value map containing the process input properties :return: process id :Example: .. code-block:: python process_properties = {"my_input_param" : "1"} pid = opereto_client.create_process(service='simple_shell_command', title='Test simple shell command service', agent=opereto_client.input['opereto_agent'], **process_properties)
[ "create_process", "(", "self", "service", "agent", "=", "None", "title", "=", "None", "mode", "=", "None", "service_version", "=", "None", "**", "kwargs", ")" ]
python
train
38.203704
wbond/oscrypto
oscrypto/_openssl/tls.py
https://github.com/wbond/oscrypto/blob/af778bf1c88bf6c4a7342f5353b130686a5bbe1c/oscrypto/_openssl/tls.py#L986-L1035
def _shutdown(self, manual): """ Shuts down the TLS session and then shuts down the underlying socket :param manual: A boolean if the connection was manually shutdown """ if self._ssl is None: return while True: result = libssl.SSL_shutdown(self._ssl) # Don't be noisy if the socket is already closed try: self._raw_write() except (TLSDisconnectError): pass if result >= 0: break if result < 0: error = libssl.SSL_get_error(self._ssl, result) if error == LibsslConst.SSL_ERROR_WANT_READ: if self._raw_read() != b'': continue else: break elif error == LibsslConst.SSL_ERROR_WANT_WRITE: self._raw_write() continue else: handle_openssl_error(0, TLSError) if manual: self._local_closed = True libssl.SSL_free(self._ssl) self._ssl = None # BIOs are freed by SSL_free() self._rbio = None self._wbio = None try: self._socket.shutdown(socket_.SHUT_RDWR) except (socket_.error): pass
[ "def", "_shutdown", "(", "self", ",", "manual", ")", ":", "if", "self", ".", "_ssl", "is", "None", ":", "return", "while", "True", ":", "result", "=", "libssl", ".", "SSL_shutdown", "(", "self", ".", "_ssl", ")", "# Don't be noisy if the socket is already closed", "try", ":", "self", ".", "_raw_write", "(", ")", "except", "(", "TLSDisconnectError", ")", ":", "pass", "if", "result", ">=", "0", ":", "break", "if", "result", "<", "0", ":", "error", "=", "libssl", ".", "SSL_get_error", "(", "self", ".", "_ssl", ",", "result", ")", "if", "error", "==", "LibsslConst", ".", "SSL_ERROR_WANT_READ", ":", "if", "self", ".", "_raw_read", "(", ")", "!=", "b''", ":", "continue", "else", ":", "break", "elif", "error", "==", "LibsslConst", ".", "SSL_ERROR_WANT_WRITE", ":", "self", ".", "_raw_write", "(", ")", "continue", "else", ":", "handle_openssl_error", "(", "0", ",", "TLSError", ")", "if", "manual", ":", "self", ".", "_local_closed", "=", "True", "libssl", ".", "SSL_free", "(", "self", ".", "_ssl", ")", "self", ".", "_ssl", "=", "None", "# BIOs are freed by SSL_free()", "self", ".", "_rbio", "=", "None", "self", ".", "_wbio", "=", "None", "try", ":", "self", ".", "_socket", ".", "shutdown", "(", "socket_", ".", "SHUT_RDWR", ")", "except", "(", "socket_", ".", "error", ")", ":", "pass" ]
Shuts down the TLS session and then shuts down the underlying socket :param manual: A boolean if the connection was manually shutdown
[ "Shuts", "down", "the", "TLS", "session", "and", "then", "shuts", "down", "the", "underlying", "socket" ]
python
valid
26.78
Kentzo/Power
power/win32.py
https://github.com/Kentzo/Power/blob/2c99b156546225e448f7030681af3df5cd345e4b/power/win32.py#L45-L54
def get_providing_power_source_type(self): """ Returns GetSystemPowerStatus().ACLineStatus @raise: WindowsError if any underlying error occures. """ power_status = SYSTEM_POWER_STATUS() if not GetSystemPowerStatus(pointer(power_status)): raise WinError() return POWER_TYPE_MAP[power_status.ACLineStatus]
[ "def", "get_providing_power_source_type", "(", "self", ")", ":", "power_status", "=", "SYSTEM_POWER_STATUS", "(", ")", "if", "not", "GetSystemPowerStatus", "(", "pointer", "(", "power_status", ")", ")", ":", "raise", "WinError", "(", ")", "return", "POWER_TYPE_MAP", "[", "power_status", ".", "ACLineStatus", "]" ]
Returns GetSystemPowerStatus().ACLineStatus @raise: WindowsError if any underlying error occures.
[ "Returns", "GetSystemPowerStatus", "()", ".", "ACLineStatus" ]
python
train
36.3
nugget/python-insteonplm
insteonplm/tools.py
https://github.com/nugget/python-insteonplm/blob/65548041f1b0729ae1ae904443dd81b0c6cbf1bf/insteonplm/tools.py#L133-L153
async def start_all_linking(self, linkcode, group, address=None): """Start the All-Linking process with the IM and device.""" _LOGGING.info('Starting the All-Linking process') if address: linkdevice = self.plm.devices[Address(address).id] if not linkdevice: linkdevice = create(self.plm, address, None, None) _LOGGING.info('Attempting to link the PLM to device %s. ', address) self.plm.start_all_linking(linkcode, group) asyncio.sleep(.5, loop=self.loop) linkdevice.enter_linking_mode(group=group) else: _LOGGING.info('Starting All-Linking on PLM. ' 'Waiting for button press') self.plm.start_all_linking(linkcode, group) await asyncio.sleep(self.wait_time, loop=self.loop) _LOGGING.info('%d devices added to the All-Link Database', len(self.plm.devices)) await asyncio.sleep(.1, loop=self.loop)
[ "async", "def", "start_all_linking", "(", "self", ",", "linkcode", ",", "group", ",", "address", "=", "None", ")", ":", "_LOGGING", ".", "info", "(", "'Starting the All-Linking process'", ")", "if", "address", ":", "linkdevice", "=", "self", ".", "plm", ".", "devices", "[", "Address", "(", "address", ")", ".", "id", "]", "if", "not", "linkdevice", ":", "linkdevice", "=", "create", "(", "self", ".", "plm", ",", "address", ",", "None", ",", "None", ")", "_LOGGING", ".", "info", "(", "'Attempting to link the PLM to device %s. '", ",", "address", ")", "self", ".", "plm", ".", "start_all_linking", "(", "linkcode", ",", "group", ")", "asyncio", ".", "sleep", "(", ".5", ",", "loop", "=", "self", ".", "loop", ")", "linkdevice", ".", "enter_linking_mode", "(", "group", "=", "group", ")", "else", ":", "_LOGGING", ".", "info", "(", "'Starting All-Linking on PLM. '", "'Waiting for button press'", ")", "self", ".", "plm", ".", "start_all_linking", "(", "linkcode", ",", "group", ")", "await", "asyncio", ".", "sleep", "(", "self", ".", "wait_time", ",", "loop", "=", "self", ".", "loop", ")", "_LOGGING", ".", "info", "(", "'%d devices added to the All-Link Database'", ",", "len", "(", "self", ".", "plm", ".", "devices", ")", ")", "await", "asyncio", ".", "sleep", "(", ".1", ",", "loop", "=", "self", ".", "loop", ")" ]
Start the All-Linking process with the IM and device.
[ "Start", "the", "All", "-", "Linking", "process", "with", "the", "IM", "and", "device", "." ]
python
train
48.666667
adamziel/python_translate
python_translate/operations.py
https://github.com/adamziel/python_translate/blob/0aee83f434bd2d1b95767bcd63adb7ac7036c7df/python_translate/operations.py#L56-L68
def get_messages(self, domain): """ Returns all valid messages after operation. @type domain: str @rtype: dict """ if domain not in self.domains: raise ValueError('Invalid domain: {0}'.format(domain)) if domain not in self.messages or 'all' not in self.messages[domain]: self._process_domain(domain) return self.messages[domain]['all']
[ "def", "get_messages", "(", "self", ",", "domain", ")", ":", "if", "domain", "not", "in", "self", ".", "domains", ":", "raise", "ValueError", "(", "'Invalid domain: {0}'", ".", "format", "(", "domain", ")", ")", "if", "domain", "not", "in", "self", ".", "messages", "or", "'all'", "not", "in", "self", ".", "messages", "[", "domain", "]", ":", "self", ".", "_process_domain", "(", "domain", ")", "return", "self", ".", "messages", "[", "domain", "]", "[", "'all'", "]" ]
Returns all valid messages after operation. @type domain: str @rtype: dict
[ "Returns", "all", "valid", "messages", "after", "operation", "." ]
python
train
31.769231
genialis/django-rest-framework-reactive
src/rest_framework_reactive/observer.py
https://github.com/genialis/django-rest-framework-reactive/blob/ddf3d899685a54b6bd0ae4b3789649a89340c59f/src/rest_framework_reactive/observer.py#L252-L325
async def evaluate(self): """Evaluate the query observer. :param return_emitted: True if the emitted diffs should be returned (testing only) """ @database_sync_to_async def remove_subscribers(): models.Observer.subscribers.through.objects.filter( observer_id=self.id ).delete() @database_sync_to_async def get_subscriber_sessions(): return list( models.Observer.subscribers.through.objects.filter(observer_id=self.id) .distinct('subscriber_id') .values_list('subscriber_id', flat=True) ) try: settings = get_queryobserver_settings() start = time.time() # Evaluate the observer added, changed, removed = await database_sync_to_async(self._evaluate)() duration = time.time() - start # Log slow observers. if duration > settings['warnings']['max_processing_time']: self._warning("Slow observed viewset", duration=duration) # Remove subscribers of really slow observers. if duration > settings['errors']['max_processing_time']: logger.error( "Removing subscribers to extremely slow observed viewset ({})".format( self._get_logging_id() ), extra=self._get_logging_extra(duration=duration), ) await remove_subscribers() if self._meta.change_detection == Options.CHANGE_DETECTION_POLL: # Register poller. await get_channel_layer().send( CHANNEL_MAIN, { 'type': TYPE_POLL, 'observer': self.id, 'interval': self._meta.poll_interval, }, ) message = { 'type': TYPE_ITEM_UPDATE, 'observer': self.id, 'primary_key': self._meta.primary_key, 'added': added, 'changed': changed, 'removed': removed, } # Only generate notifications in case there were any changes. if added or changed or removed: for session_id in await get_subscriber_sessions(): await get_channel_layer().group_send( GROUP_SESSIONS.format(session_id=session_id), message ) except Exception: logger.exception( "Error while evaluating observer ({})".format(self._get_logging_id()), extra=self._get_logging_extra(), )
[ "async", "def", "evaluate", "(", "self", ")", ":", "@", "database_sync_to_async", "def", "remove_subscribers", "(", ")", ":", "models", ".", "Observer", ".", "subscribers", ".", "through", ".", "objects", ".", "filter", "(", "observer_id", "=", "self", ".", "id", ")", ".", "delete", "(", ")", "@", "database_sync_to_async", "def", "get_subscriber_sessions", "(", ")", ":", "return", "list", "(", "models", ".", "Observer", ".", "subscribers", ".", "through", ".", "objects", ".", "filter", "(", "observer_id", "=", "self", ".", "id", ")", ".", "distinct", "(", "'subscriber_id'", ")", ".", "values_list", "(", "'subscriber_id'", ",", "flat", "=", "True", ")", ")", "try", ":", "settings", "=", "get_queryobserver_settings", "(", ")", "start", "=", "time", ".", "time", "(", ")", "# Evaluate the observer", "added", ",", "changed", ",", "removed", "=", "await", "database_sync_to_async", "(", "self", ".", "_evaluate", ")", "(", ")", "duration", "=", "time", ".", "time", "(", ")", "-", "start", "# Log slow observers.", "if", "duration", ">", "settings", "[", "'warnings'", "]", "[", "'max_processing_time'", "]", ":", "self", ".", "_warning", "(", "\"Slow observed viewset\"", ",", "duration", "=", "duration", ")", "# Remove subscribers of really slow observers.", "if", "duration", ">", "settings", "[", "'errors'", "]", "[", "'max_processing_time'", "]", ":", "logger", ".", "error", "(", "\"Removing subscribers to extremely slow observed viewset ({})\"", ".", "format", "(", "self", ".", "_get_logging_id", "(", ")", ")", ",", "extra", "=", "self", ".", "_get_logging_extra", "(", "duration", "=", "duration", ")", ",", ")", "await", "remove_subscribers", "(", ")", "if", "self", ".", "_meta", ".", "change_detection", "==", "Options", ".", "CHANGE_DETECTION_POLL", ":", "# Register poller.", "await", "get_channel_layer", "(", ")", ".", "send", "(", "CHANNEL_MAIN", ",", "{", "'type'", ":", "TYPE_POLL", ",", "'observer'", ":", "self", ".", "id", ",", "'interval'", ":", "self", ".", "_meta", ".", "poll_interval", ",", "}", ",", ")", "message", "=", "{", "'type'", ":", "TYPE_ITEM_UPDATE", ",", "'observer'", ":", "self", ".", "id", ",", "'primary_key'", ":", "self", ".", "_meta", ".", "primary_key", ",", "'added'", ":", "added", ",", "'changed'", ":", "changed", ",", "'removed'", ":", "removed", ",", "}", "# Only generate notifications in case there were any changes.", "if", "added", "or", "changed", "or", "removed", ":", "for", "session_id", "in", "await", "get_subscriber_sessions", "(", ")", ":", "await", "get_channel_layer", "(", ")", ".", "group_send", "(", "GROUP_SESSIONS", ".", "format", "(", "session_id", "=", "session_id", ")", ",", "message", ")", "except", "Exception", ":", "logger", ".", "exception", "(", "\"Error while evaluating observer ({})\"", ".", "format", "(", "self", ".", "_get_logging_id", "(", ")", ")", ",", "extra", "=", "self", ".", "_get_logging_extra", "(", ")", ",", ")" ]
Evaluate the query observer. :param return_emitted: True if the emitted diffs should be returned (testing only)
[ "Evaluate", "the", "query", "observer", "." ]
python
train
36.689189
mitsei/dlkit
dlkit/services/repository.py
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/services/repository.py#L1810-L1818
def use_plenary_asset_composition_view(self): """Pass through to provider AssetCompositionSession.use_plenary_asset_composition_view""" self._object_views['asset_composition'] = PLENARY # self._get_provider_session('asset_composition_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_plenary_asset_composition_view() except AttributeError: pass
[ "def", "use_plenary_asset_composition_view", "(", "self", ")", ":", "self", ".", "_object_views", "[", "'asset_composition'", "]", "=", "PLENARY", "# self._get_provider_session('asset_composition_session') # To make sure the session is tracked", "for", "session", "in", "self", ".", "_get_provider_sessions", "(", ")", ":", "try", ":", "session", ".", "use_plenary_asset_composition_view", "(", ")", "except", "AttributeError", ":", "pass" ]
Pass through to provider AssetCompositionSession.use_plenary_asset_composition_view
[ "Pass", "through", "to", "provider", "AssetCompositionSession", ".", "use_plenary_asset_composition_view" ]
python
train
53.888889
dj-stripe/dj-stripe
djstripe/checks.py
https://github.com/dj-stripe/dj-stripe/blob/a5308a3808cd6e2baba49482f7a699f3a8992518/djstripe/checks.py#L126-L144
def check_webhook_secret(app_configs=None, **kwargs): """ Check that DJSTRIPE_WEBHOOK_SECRET looks correct """ from . import settings as djstripe_settings messages = [] secret = djstripe_settings.WEBHOOK_SECRET if secret and not secret.startswith("whsec_"): messages.append( checks.Warning( "DJSTRIPE_WEBHOOK_SECRET does not look valid", hint="It should start with whsec_...", id="djstripe.W003", ) ) return messages
[ "def", "check_webhook_secret", "(", "app_configs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "from", ".", "import", "settings", "as", "djstripe_settings", "messages", "=", "[", "]", "secret", "=", "djstripe_settings", ".", "WEBHOOK_SECRET", "if", "secret", "and", "not", "secret", ".", "startswith", "(", "\"whsec_\"", ")", ":", "messages", ".", "append", "(", "checks", ".", "Warning", "(", "\"DJSTRIPE_WEBHOOK_SECRET does not look valid\"", ",", "hint", "=", "\"It should start with whsec_...\"", ",", "id", "=", "\"djstripe.W003\"", ",", ")", ")", "return", "messages" ]
Check that DJSTRIPE_WEBHOOK_SECRET looks correct
[ "Check", "that", "DJSTRIPE_WEBHOOK_SECRET", "looks", "correct" ]
python
train
22.684211
romis2012/aiosocksy
aiosocksy/connector.py
https://github.com/romis2012/aiosocksy/blob/b3c3e5f067341f7b9ef1d3e1a61f9e5bf28a7df1/aiosocksy/connector.py#L55-L63
async def _create_proxy_connection(self, req, *args, **kwargs): """ args, kwargs can contain different elements (traces, timeout,...) depending on aiohttp version """ if req.proxy.scheme == 'http': return await super()._create_proxy_connection(req, *args, **kwargs) else: return await self._create_socks_connection(req=req)
[ "async", "def", "_create_proxy_connection", "(", "self", ",", "req", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "req", ".", "proxy", ".", "scheme", "==", "'http'", ":", "return", "await", "super", "(", ")", ".", "_create_proxy_connection", "(", "req", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "return", "await", "self", ".", "_create_socks_connection", "(", "req", "=", "req", ")" ]
args, kwargs can contain different elements (traces, timeout,...) depending on aiohttp version
[ "args", "kwargs", "can", "contain", "different", "elements", "(", "traces", "timeout", "...", ")", "depending", "on", "aiohttp", "version" ]
python
train
43
shoebot/shoebot
lib/web/BeautifulSoup.py
https://github.com/shoebot/shoebot/blob/d554c1765c1899fa25727c9fc6805d221585562b/lib/web/BeautifulSoup.py#L457-L462
def _invert(h): "Cheap function to invert a hash." i = {} for k,v in h.items(): i[v] = k return i
[ "def", "_invert", "(", "h", ")", ":", "i", "=", "{", "}", "for", "k", ",", "v", "in", "h", ".", "items", "(", ")", ":", "i", "[", "v", "]", "=", "k", "return", "i" ]
Cheap function to invert a hash.
[ "Cheap", "function", "to", "invert", "a", "hash", "." ]
python
valid
22.666667
gbiggs/rtctree
rtctree/manager.py
https://github.com/gbiggs/rtctree/blob/bd725a47ac87c259c8bce06156ccc9ab71111c26/rtctree/manager.py#L390-L400
def slaves(self): '''The list of slave managers of this manager, if any. This information can also be found by listing the children of this node that are of type @ref Manager. ''' with self._mutex: if not self._slaves: self._slaves = [c for c in self.children if c.is_manager] return self._slaves
[ "def", "slaves", "(", "self", ")", ":", "with", "self", ".", "_mutex", ":", "if", "not", "self", ".", "_slaves", ":", "self", ".", "_slaves", "=", "[", "c", "for", "c", "in", "self", ".", "children", "if", "c", ".", "is_manager", "]", "return", "self", ".", "_slaves" ]
The list of slave managers of this manager, if any. This information can also be found by listing the children of this node that are of type @ref Manager.
[ "The", "list", "of", "slave", "managers", "of", "this", "manager", "if", "any", "." ]
python
train
33.090909
rigetti/quantumflow
quantumflow/circuits.py
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/circuits.py#L152-L167
def map_gate(gate: Gate, args: Sequence[Qubits]) -> Circuit: """Applies the same gate all input qubits in the argument list. >>> circ = qf.map_gate(qf.H(), [[0], [1], [2]]) >>> print(circ) H(0) H(1) H(2) """ circ = Circuit() for qubits in args: circ += gate.relabel(qubits) return circ
[ "def", "map_gate", "(", "gate", ":", "Gate", ",", "args", ":", "Sequence", "[", "Qubits", "]", ")", "->", "Circuit", ":", "circ", "=", "Circuit", "(", ")", "for", "qubits", "in", "args", ":", "circ", "+=", "gate", ".", "relabel", "(", "qubits", ")", "return", "circ" ]
Applies the same gate all input qubits in the argument list. >>> circ = qf.map_gate(qf.H(), [[0], [1], [2]]) >>> print(circ) H(0) H(1) H(2)
[ "Applies", "the", "same", "gate", "all", "input", "qubits", "in", "the", "argument", "list", "." ]
python
train
20.125
opencobra/cobrapy
cobra/flux_analysis/gapfilling.py
https://github.com/opencobra/cobrapy/blob/9d1987cdb3a395cf4125a3439c3b002ff2be2009/cobra/flux_analysis/gapfilling.py#L256-L313
def gapfill(model, universal=None, lower_bound=0.05, penalties=None, demand_reactions=True, exchange_reactions=False, iterations=1): """Perform gapfilling on a model. See documentation for the class GapFiller. Parameters ---------- model : cobra.Model The model to perform gap filling on. universal : cobra.Model, None A universal model with reactions that can be used to complete the model. Only gapfill considering demand and exchange reactions if left missing. lower_bound : float The minimally accepted flux for the objective in the filled model. penalties : dict, None A dictionary with keys being 'universal' (all reactions included in the universal model), 'exchange' and 'demand' (all additionally added exchange and demand reactions) for the three reaction types. Can also have reaction identifiers for reaction specific costs. Defaults are 1, 100 and 1 respectively. iterations : int The number of rounds of gapfilling to perform. For every iteration, the penalty for every used reaction increases linearly. This way, the algorithm is encouraged to search for alternative solutions which may include previously used reactions. I.e., with enough iterations pathways including 10 steps will eventually be reported even if the shortest pathway is a single reaction. exchange_reactions : bool Consider adding exchange (uptake) reactions for all metabolites in the model. demand_reactions : bool Consider adding demand reactions for all metabolites. Returns ------- iterable list of lists with on set of reactions that completes the model per requested iteration. Examples -------- >>> import cobra.test as ct >>> from cobra import Model >>> from cobra.flux_analysis import gapfill >>> model = ct.create_test_model("salmonella") >>> universal = Model('universal') >>> universal.add_reactions(model.reactions.GF6PTA.copy()) >>> model.remove_reactions([model.reactions.GF6PTA]) >>> gapfill(model, universal) """ gapfiller = GapFiller(model, universal=universal, lower_bound=lower_bound, penalties=penalties, demand_reactions=demand_reactions, exchange_reactions=exchange_reactions) return gapfiller.fill(iterations=iterations)
[ "def", "gapfill", "(", "model", ",", "universal", "=", "None", ",", "lower_bound", "=", "0.05", ",", "penalties", "=", "None", ",", "demand_reactions", "=", "True", ",", "exchange_reactions", "=", "False", ",", "iterations", "=", "1", ")", ":", "gapfiller", "=", "GapFiller", "(", "model", ",", "universal", "=", "universal", ",", "lower_bound", "=", "lower_bound", ",", "penalties", "=", "penalties", ",", "demand_reactions", "=", "demand_reactions", ",", "exchange_reactions", "=", "exchange_reactions", ")", "return", "gapfiller", ".", "fill", "(", "iterations", "=", "iterations", ")" ]
Perform gapfilling on a model. See documentation for the class GapFiller. Parameters ---------- model : cobra.Model The model to perform gap filling on. universal : cobra.Model, None A universal model with reactions that can be used to complete the model. Only gapfill considering demand and exchange reactions if left missing. lower_bound : float The minimally accepted flux for the objective in the filled model. penalties : dict, None A dictionary with keys being 'universal' (all reactions included in the universal model), 'exchange' and 'demand' (all additionally added exchange and demand reactions) for the three reaction types. Can also have reaction identifiers for reaction specific costs. Defaults are 1, 100 and 1 respectively. iterations : int The number of rounds of gapfilling to perform. For every iteration, the penalty for every used reaction increases linearly. This way, the algorithm is encouraged to search for alternative solutions which may include previously used reactions. I.e., with enough iterations pathways including 10 steps will eventually be reported even if the shortest pathway is a single reaction. exchange_reactions : bool Consider adding exchange (uptake) reactions for all metabolites in the model. demand_reactions : bool Consider adding demand reactions for all metabolites. Returns ------- iterable list of lists with on set of reactions that completes the model per requested iteration. Examples -------- >>> import cobra.test as ct >>> from cobra import Model >>> from cobra.flux_analysis import gapfill >>> model = ct.create_test_model("salmonella") >>> universal = Model('universal') >>> universal.add_reactions(model.reactions.GF6PTA.copy()) >>> model.remove_reactions([model.reactions.GF6PTA]) >>> gapfill(model, universal)
[ "Perform", "gapfilling", "on", "a", "model", "." ]
python
valid
42.310345
fusepy/fusepy
fuse.py
https://github.com/fusepy/fusepy/blob/5d997d6706cc0204e1b3ca679651485a7e7dda49/fuse.py#L808-L812
def link(self, source, target): 'creates a hard link `target -> source` (e.g. ln source target)' return self.operations('link', target.decode(self.encoding), source.decode(self.encoding))
[ "def", "link", "(", "self", ",", "source", ",", "target", ")", ":", "return", "self", ".", "operations", "(", "'link'", ",", "target", ".", "decode", "(", "self", ".", "encoding", ")", ",", "source", ".", "decode", "(", "self", ".", "encoding", ")", ")" ]
creates a hard link `target -> source` (e.g. ln source target)
[ "creates", "a", "hard", "link", "target", "-", ">", "source", "(", "e", ".", "g", ".", "ln", "source", "target", ")" ]
python
train
47.8
openego/eDisGo
edisgo/tools/edisgo_run.py
https://github.com/openego/eDisGo/blob/e6245bdaf236f9c49dbda5a18c1c458290f41e2b/edisgo/tools/edisgo_run.py#L218-L272
def run_edisgo_pool(ding0_file_list, run_args_opt, workers=mp.cpu_count(), worker_lifetime=1): """ Use python multiprocessing toolbox for parallelization Several grids are analyzed in parallel. Parameters ---------- ding0_file_list : list Ding0 grid data file names run_args_opt : list eDisGo options, see :func:`run_edisgo_basic` and :func:`run_edisgo_twice` workers: int Number of parallel process worker_lifetime : int Bunch of grids sequentially analyzed by a worker Returns ------- all_costs_before_geno_import : list Grid extension cost before grid connection of new generators all_grid_issues_before_geno_import : list Remaining overloading or over-voltage issues in grid all_costs : list Grid extension cost due to grid connection of new generators all_grid_issues : list Remaining overloading or over-voltage issues in grid """ def collect_pool_results(result): results.append(result) results = [] pool = mp.Pool(workers, maxtasksperchild=worker_lifetime) for file in ding0_file_list: edisgo_args = [file] + run_args_opt pool.apply_async(func=run_edisgo_twice, args=(edisgo_args,), callback=collect_pool_results) pool.close() pool.join() # process results data all_costs_before_geno_import = [r[0] for r in results] all_grid_issues_before_geno_import = [r[1] for r in results] all_costs = [r[2] for r in results] all_grid_issues = [r[3] for r in results] return all_costs_before_geno_import, all_grid_issues_before_geno_import, \ all_costs, all_grid_issues
[ "def", "run_edisgo_pool", "(", "ding0_file_list", ",", "run_args_opt", ",", "workers", "=", "mp", ".", "cpu_count", "(", ")", ",", "worker_lifetime", "=", "1", ")", ":", "def", "collect_pool_results", "(", "result", ")", ":", "results", ".", "append", "(", "result", ")", "results", "=", "[", "]", "pool", "=", "mp", ".", "Pool", "(", "workers", ",", "maxtasksperchild", "=", "worker_lifetime", ")", "for", "file", "in", "ding0_file_list", ":", "edisgo_args", "=", "[", "file", "]", "+", "run_args_opt", "pool", ".", "apply_async", "(", "func", "=", "run_edisgo_twice", ",", "args", "=", "(", "edisgo_args", ",", ")", ",", "callback", "=", "collect_pool_results", ")", "pool", ".", "close", "(", ")", "pool", ".", "join", "(", ")", "# process results data", "all_costs_before_geno_import", "=", "[", "r", "[", "0", "]", "for", "r", "in", "results", "]", "all_grid_issues_before_geno_import", "=", "[", "r", "[", "1", "]", "for", "r", "in", "results", "]", "all_costs", "=", "[", "r", "[", "2", "]", "for", "r", "in", "results", "]", "all_grid_issues", "=", "[", "r", "[", "3", "]", "for", "r", "in", "results", "]", "return", "all_costs_before_geno_import", ",", "all_grid_issues_before_geno_import", ",", "all_costs", ",", "all_grid_issues" ]
Use python multiprocessing toolbox for parallelization Several grids are analyzed in parallel. Parameters ---------- ding0_file_list : list Ding0 grid data file names run_args_opt : list eDisGo options, see :func:`run_edisgo_basic` and :func:`run_edisgo_twice` workers: int Number of parallel process worker_lifetime : int Bunch of grids sequentially analyzed by a worker Returns ------- all_costs_before_geno_import : list Grid extension cost before grid connection of new generators all_grid_issues_before_geno_import : list Remaining overloading or over-voltage issues in grid all_costs : list Grid extension cost due to grid connection of new generators all_grid_issues : list Remaining overloading or over-voltage issues in grid
[ "Use", "python", "multiprocessing", "toolbox", "for", "parallelization" ]
python
train
31.418182
PyCQA/astroid
astroid/node_classes.py
https://github.com/PyCQA/astroid/blob/e0a298df55b15abcb77c2a93253f5ab7be52d0fb/astroid/node_classes.py#L3008-L3022
def postinit(self, expr=None, globals=None, locals=None): """Do some setup after initialisation. :param expr: The expression to be executed. :type expr: NodeNG or None :param globals:The globals dictionary to execute with. :type globals: NodeNG or None :param locals: The locals dictionary to execute with. :type locals: NodeNG or None """ self.expr = expr self.globals = globals self.locals = locals
[ "def", "postinit", "(", "self", ",", "expr", "=", "None", ",", "globals", "=", "None", ",", "locals", "=", "None", ")", ":", "self", ".", "expr", "=", "expr", "self", ".", "globals", "=", "globals", "self", ".", "locals", "=", "locals" ]
Do some setup after initialisation. :param expr: The expression to be executed. :type expr: NodeNG or None :param globals:The globals dictionary to execute with. :type globals: NodeNG or None :param locals: The locals dictionary to execute with. :type locals: NodeNG or None
[ "Do", "some", "setup", "after", "initialisation", "." ]
python
train
31.8
priestc/moneywagon
moneywagon/services/blockchain_services.py
https://github.com/priestc/moneywagon/blob/00518f1f557dcca8b3031f46d3564c2baa0227a3/moneywagon/services/blockchain_services.py#L339-L344
def push_tx(self, crypto, tx_hex): """ This method is untested. """ url = "%s/pushtx" % self.base_url return self.post_url(url, {'hex': tx_hex}).content
[ "def", "push_tx", "(", "self", ",", "crypto", ",", "tx_hex", ")", ":", "url", "=", "\"%s/pushtx\"", "%", "self", ".", "base_url", "return", "self", ".", "post_url", "(", "url", ",", "{", "'hex'", ":", "tx_hex", "}", ")", ".", "content" ]
This method is untested.
[ "This", "method", "is", "untested", "." ]
python
train
31.166667
autokey/autokey
lib/autokey/model.py
https://github.com/autokey/autokey/blob/35decb72f286ce68cd2a1f09ace8891a520b58d1/lib/autokey/model.py#L797-L818
def calculate_input(self, buffer): """ Calculate how many keystrokes were used in triggering this phrase. """ # TODO: This function is unused? if TriggerMode.ABBREVIATION in self.modes: if self._should_trigger_abbreviation(buffer): if self.immediate: return len(self._get_trigger_abbreviation(buffer)) else: return len(self._get_trigger_abbreviation(buffer)) + 1 # TODO - re-enable me if restoring predictive functionality #if TriggerMode.PREDICTIVE in self.modes: # if self._should_trigger_predictive(buffer): # return ConfigManager.SETTINGS[PREDICTIVE_LENGTH] if TriggerMode.HOTKEY in self.modes: if buffer == '': return len(self.modifiers) + 1 return self.parent.calculate_input(buffer)
[ "def", "calculate_input", "(", "self", ",", "buffer", ")", ":", "# TODO: This function is unused?", "if", "TriggerMode", ".", "ABBREVIATION", "in", "self", ".", "modes", ":", "if", "self", ".", "_should_trigger_abbreviation", "(", "buffer", ")", ":", "if", "self", ".", "immediate", ":", "return", "len", "(", "self", ".", "_get_trigger_abbreviation", "(", "buffer", ")", ")", "else", ":", "return", "len", "(", "self", ".", "_get_trigger_abbreviation", "(", "buffer", ")", ")", "+", "1", "# TODO - re-enable me if restoring predictive functionality", "#if TriggerMode.PREDICTIVE in self.modes:", "# if self._should_trigger_predictive(buffer):", "# return ConfigManager.SETTINGS[PREDICTIVE_LENGTH]", "if", "TriggerMode", ".", "HOTKEY", "in", "self", ".", "modes", ":", "if", "buffer", "==", "''", ":", "return", "len", "(", "self", ".", "modifiers", ")", "+", "1", "return", "self", ".", "parent", ".", "calculate_input", "(", "buffer", ")" ]
Calculate how many keystrokes were used in triggering this phrase.
[ "Calculate", "how", "many", "keystrokes", "were", "used", "in", "triggering", "this", "phrase", "." ]
python
train
40.045455
asobrien/randomOrg
randomorg/_rand_core.py
https://github.com/asobrien/randomOrg/blob/76c3f167c5689992d32cd1f827816254158160f7/randomorg/_rand_core.py#L134-L143
def quota(ip=None): """Check your quota.""" # TODO: Add arbitrary user defined IP check url = 'http://www.random.org/quota/?format=plain' data = urlopen(url) credit = int(data.read().strip()) if data.code == 200: return credit else: return "ERROR: Server responded with code %s" % data.code
[ "def", "quota", "(", "ip", "=", "None", ")", ":", "# TODO: Add arbitrary user defined IP check", "url", "=", "'http://www.random.org/quota/?format=plain'", "data", "=", "urlopen", "(", "url", ")", "credit", "=", "int", "(", "data", ".", "read", "(", ")", ".", "strip", "(", ")", ")", "if", "data", ".", "code", "==", "200", ":", "return", "credit", "else", ":", "return", "\"ERROR: Server responded with code %s\"", "%", "data", ".", "code" ]
Check your quota.
[ "Check", "your", "quota", "." ]
python
train
32.5
cloudsmith-io/cloudsmith-cli
setup.py
https://github.com/cloudsmith-io/cloudsmith-cli/blob/5bc245ca5d0bfa85380be48e7c206b4c86cc6c8e/setup.py#L27-L37
def get_long_description(): """Grok the readme, turn it into whine (rst).""" root_path = get_root_path() readme_path = os.path.join(root_path, "README.md") try: import pypandoc return pypandoc.convert(readme_path, "rst").strip() except ImportError: return "Cloudsmith CLI"
[ "def", "get_long_description", "(", ")", ":", "root_path", "=", "get_root_path", "(", ")", "readme_path", "=", "os", ".", "path", ".", "join", "(", "root_path", ",", "\"README.md\"", ")", "try", ":", "import", "pypandoc", "return", "pypandoc", ".", "convert", "(", "readme_path", ",", "\"rst\"", ")", ".", "strip", "(", ")", "except", "ImportError", ":", "return", "\"Cloudsmith CLI\"" ]
Grok the readme, turn it into whine (rst).
[ "Grok", "the", "readme", "turn", "it", "into", "whine", "(", "rst", ")", "." ]
python
train
28
jsvine/spectra
spectra/grapefruit.py
https://github.com/jsvine/spectra/blob/2269a0ae9b5923154b15bd661fb81179608f7ec2/spectra/grapefruit.py#L495-L533
def HsvToRgb(h, s, v): '''Convert the color from RGB coordinates to HSV. Parameters: :h: The Hus component value [0...1] :s: The Saturation component value [0...1] :v: The Value component [0...1] Returns: The color as an (r, g, b) tuple in the range: r[0...1], g[0...1], b[0...1] >>> Color.HslToRgb(30.0, 1.0, 0.5) (1.0, 0.5, 0.0) ''' if s==0: return (v, v, v) # achromatic (gray) h /= 60.0 h = h % 6.0 i = int(h) f = h - i if not(i&1): f = 1-f # if i is even m = v * (1.0 - s) n = v * (1.0 - (s * f)) if i==0: return (v, n, m) if i==1: return (n, v, m) if i==2: return (m, v, n) if i==3: return (m, n, v) if i==4: return (n, m, v) return (v, m, n)
[ "def", "HsvToRgb", "(", "h", ",", "s", ",", "v", ")", ":", "if", "s", "==", "0", ":", "return", "(", "v", ",", "v", ",", "v", ")", "# achromatic (gray)", "h", "/=", "60.0", "h", "=", "h", "%", "6.0", "i", "=", "int", "(", "h", ")", "f", "=", "h", "-", "i", "if", "not", "(", "i", "&", "1", ")", ":", "f", "=", "1", "-", "f", "# if i is even", "m", "=", "v", "*", "(", "1.0", "-", "s", ")", "n", "=", "v", "*", "(", "1.0", "-", "(", "s", "*", "f", ")", ")", "if", "i", "==", "0", ":", "return", "(", "v", ",", "n", ",", "m", ")", "if", "i", "==", "1", ":", "return", "(", "n", ",", "v", ",", "m", ")", "if", "i", "==", "2", ":", "return", "(", "m", ",", "v", ",", "n", ")", "if", "i", "==", "3", ":", "return", "(", "m", ",", "n", ",", "v", ")", "if", "i", "==", "4", ":", "return", "(", "n", ",", "m", ",", "v", ")", "return", "(", "v", ",", "m", ",", "n", ")" ]
Convert the color from RGB coordinates to HSV. Parameters: :h: The Hus component value [0...1] :s: The Saturation component value [0...1] :v: The Value component [0...1] Returns: The color as an (r, g, b) tuple in the range: r[0...1], g[0...1], b[0...1] >>> Color.HslToRgb(30.0, 1.0, 0.5) (1.0, 0.5, 0.0)
[ "Convert", "the", "color", "from", "RGB", "coordinates", "to", "HSV", "." ]
python
train
19.74359
istresearch/scrapy-cluster
kafka-monitor/kafka_monitor.py
https://github.com/istresearch/scrapy-cluster/blob/13aaed2349af5d792d6bcbfcadc5563158aeb599/kafka-monitor/kafka_monitor.py#L118-L141
def _setup_stats(self): ''' Sets up the stats collection ''' self.stats_dict = {} redis_conn = redis.Redis(host=self.settings['REDIS_HOST'], port=self.settings['REDIS_PORT'], db=self.settings.get('REDIS_DB')) try: redis_conn.info() self.logger.debug("Connected to Redis in StatsCollector Setup") self.redis_conn = redis_conn except ConnectionError: self.logger.warn("Failed to connect to Redis in StatsCollector" " Setup, no stats will be collected") return if self.settings['STATS_TOTAL']: self._setup_stats_total(redis_conn) if self.settings['STATS_PLUGINS']: self._setup_stats_plugins(redis_conn)
[ "def", "_setup_stats", "(", "self", ")", ":", "self", ".", "stats_dict", "=", "{", "}", "redis_conn", "=", "redis", ".", "Redis", "(", "host", "=", "self", ".", "settings", "[", "'REDIS_HOST'", "]", ",", "port", "=", "self", ".", "settings", "[", "'REDIS_PORT'", "]", ",", "db", "=", "self", ".", "settings", ".", "get", "(", "'REDIS_DB'", ")", ")", "try", ":", "redis_conn", ".", "info", "(", ")", "self", ".", "logger", ".", "debug", "(", "\"Connected to Redis in StatsCollector Setup\"", ")", "self", ".", "redis_conn", "=", "redis_conn", "except", "ConnectionError", ":", "self", ".", "logger", ".", "warn", "(", "\"Failed to connect to Redis in StatsCollector\"", "\" Setup, no stats will be collected\"", ")", "return", "if", "self", ".", "settings", "[", "'STATS_TOTAL'", "]", ":", "self", ".", "_setup_stats_total", "(", "redis_conn", ")", "if", "self", ".", "settings", "[", "'STATS_PLUGINS'", "]", ":", "self", ".", "_setup_stats_plugins", "(", "redis_conn", ")" ]
Sets up the stats collection
[ "Sets", "up", "the", "stats", "collection" ]
python
train
34.625
blockstack/blockstack-core
blockstack/lib/scripts.py
https://github.com/blockstack/blockstack-core/blob/1dcfdd39b152d29ce13e736a6a1a0981401a0505/blockstack/lib/scripts.py#L427-L456
def check_namespace(namespace_id): """ Verify that a namespace ID is well-formed >>> check_namespace(123) False >>> check_namespace(None) False >>> check_namespace('') False >>> check_namespace('abcd') True >>> check_namespace('Abcd') False >>> check_namespace('a+bcd') False >>> check_namespace('.abcd') False >>> check_namespace('abcdabcdabcdabcdabcd') False >>> check_namespace('abcdabcdabcdabcdabc') True """ if type(namespace_id) not in [str, unicode]: return False if not is_namespace_valid(namespace_id): return False return True
[ "def", "check_namespace", "(", "namespace_id", ")", ":", "if", "type", "(", "namespace_id", ")", "not", "in", "[", "str", ",", "unicode", "]", ":", "return", "False", "if", "not", "is_namespace_valid", "(", "namespace_id", ")", ":", "return", "False", "return", "True" ]
Verify that a namespace ID is well-formed >>> check_namespace(123) False >>> check_namespace(None) False >>> check_namespace('') False >>> check_namespace('abcd') True >>> check_namespace('Abcd') False >>> check_namespace('a+bcd') False >>> check_namespace('.abcd') False >>> check_namespace('abcdabcdabcdabcdabcd') False >>> check_namespace('abcdabcdabcdabcdabc') True
[ "Verify", "that", "a", "namespace", "ID", "is", "well", "-", "formed" ]
python
train
20.733333
Azure/azure-sdk-for-python
azure-mgmt-containerregistry/azure/mgmt/containerregistry/container_registry_management_client.py
https://github.com/Azure/azure-sdk-for-python/blob/d7306fde32f60a293a7567678692bdad31e4b667/azure-mgmt-containerregistry/azure/mgmt/containerregistry/container_registry_management_client.py#L144-L154
def builds(self): """Instance depends on the API version: * 2018-02-01-preview: :class:`BuildsOperations<azure.mgmt.containerregistry.v2018_02_01_preview.operations.BuildsOperations>` """ api_version = self._get_api_version('builds') if api_version == '2018-02-01-preview': from .v2018_02_01_preview.operations import BuildsOperations as OperationClass else: raise NotImplementedError("APIVersion {} is not available".format(api_version)) return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
[ "def", "builds", "(", "self", ")", ":", "api_version", "=", "self", ".", "_get_api_version", "(", "'builds'", ")", "if", "api_version", "==", "'2018-02-01-preview'", ":", "from", ".", "v2018_02_01_preview", ".", "operations", "import", "BuildsOperations", "as", "OperationClass", "else", ":", "raise", "NotImplementedError", "(", "\"APIVersion {} is not available\"", ".", "format", "(", "api_version", ")", ")", "return", "OperationClass", "(", "self", ".", "_client", ",", "self", ".", "config", ",", "Serializer", "(", "self", ".", "_models_dict", "(", "api_version", ")", ")", ",", "Deserializer", "(", "self", ".", "_models_dict", "(", "api_version", ")", ")", ")" ]
Instance depends on the API version: * 2018-02-01-preview: :class:`BuildsOperations<azure.mgmt.containerregistry.v2018_02_01_preview.operations.BuildsOperations>`
[ "Instance", "depends", "on", "the", "API", "version", ":" ]
python
test
59.181818
pandas-dev/pandas
pandas/util/_decorators.py
https://github.com/pandas-dev/pandas/blob/9feb3ad92cc0397a04b665803a49299ee7aa1037/pandas/util/_decorators.py#L77-L190
def deprecate_kwarg(old_arg_name, new_arg_name, mapping=None, stacklevel=2): """ Decorator to deprecate a keyword argument of a function. Parameters ---------- old_arg_name : str Name of argument in function to deprecate new_arg_name : str or None Name of preferred argument in function. Use None to raise warning that ``old_arg_name`` keyword is deprecated. mapping : dict or callable If mapping is present, use it to translate old arguments to new arguments. A callable must do its own value checking; values not found in a dict will be forwarded unchanged. Examples -------- The following deprecates 'cols', using 'columns' instead >>> @deprecate_kwarg(old_arg_name='cols', new_arg_name='columns') ... def f(columns=''): ... print(columns) ... >>> f(columns='should work ok') should work ok >>> f(cols='should raise warning') FutureWarning: cols is deprecated, use columns instead warnings.warn(msg, FutureWarning) should raise warning >>> f(cols='should error', columns="can\'t pass do both") TypeError: Can only specify 'cols' or 'columns', not both >>> @deprecate_kwarg('old', 'new', {'yes': True, 'no': False}) ... def f(new=False): ... print('yes!' if new else 'no!') ... >>> f(old='yes') FutureWarning: old='yes' is deprecated, use new=True instead warnings.warn(msg, FutureWarning) yes! To raise a warning that a keyword will be removed entirely in the future >>> @deprecate_kwarg(old_arg_name='cols', new_arg_name=None) ... def f(cols='', another_param=''): ... print(cols) ... >>> f(cols='should raise warning') FutureWarning: the 'cols' keyword is deprecated and will be removed in a future version please takes steps to stop use of 'cols' should raise warning >>> f(another_param='should not raise warning') should not raise warning >>> f(cols='should raise warning', another_param='') FutureWarning: the 'cols' keyword is deprecated and will be removed in a future version please takes steps to stop use of 'cols' should raise warning """ if mapping is not None and not hasattr(mapping, 'get') and \ not callable(mapping): raise TypeError("mapping from old to new argument values " "must be dict or callable!") def _deprecate_kwarg(func): @wraps(func) def wrapper(*args, **kwargs): old_arg_value = kwargs.pop(old_arg_name, None) if new_arg_name is None and old_arg_value is not None: msg = ( "the '{old_name}' keyword is deprecated and will be " "removed in a future version. " "Please take steps to stop the use of '{old_name}'" ).format(old_name=old_arg_name) warnings.warn(msg, FutureWarning, stacklevel=stacklevel) kwargs[old_arg_name] = old_arg_value return func(*args, **kwargs) if old_arg_value is not None: if mapping is not None: if hasattr(mapping, 'get'): new_arg_value = mapping.get(old_arg_value, old_arg_value) else: new_arg_value = mapping(old_arg_value) msg = ("the {old_name}={old_val!r} keyword is deprecated, " "use {new_name}={new_val!r} instead" ).format(old_name=old_arg_name, old_val=old_arg_value, new_name=new_arg_name, new_val=new_arg_value) else: new_arg_value = old_arg_value msg = ("the '{old_name}' keyword is deprecated, " "use '{new_name}' instead" ).format(old_name=old_arg_name, new_name=new_arg_name) warnings.warn(msg, FutureWarning, stacklevel=stacklevel) if kwargs.get(new_arg_name, None) is not None: msg = ("Can only specify '{old_name}' or '{new_name}', " "not both").format(old_name=old_arg_name, new_name=new_arg_name) raise TypeError(msg) else: kwargs[new_arg_name] = new_arg_value return func(*args, **kwargs) return wrapper return _deprecate_kwarg
[ "def", "deprecate_kwarg", "(", "old_arg_name", ",", "new_arg_name", ",", "mapping", "=", "None", ",", "stacklevel", "=", "2", ")", ":", "if", "mapping", "is", "not", "None", "and", "not", "hasattr", "(", "mapping", ",", "'get'", ")", "and", "not", "callable", "(", "mapping", ")", ":", "raise", "TypeError", "(", "\"mapping from old to new argument values \"", "\"must be dict or callable!\"", ")", "def", "_deprecate_kwarg", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "old_arg_value", "=", "kwargs", ".", "pop", "(", "old_arg_name", ",", "None", ")", "if", "new_arg_name", "is", "None", "and", "old_arg_value", "is", "not", "None", ":", "msg", "=", "(", "\"the '{old_name}' keyword is deprecated and will be \"", "\"removed in a future version. \"", "\"Please take steps to stop the use of '{old_name}'\"", ")", ".", "format", "(", "old_name", "=", "old_arg_name", ")", "warnings", ".", "warn", "(", "msg", ",", "FutureWarning", ",", "stacklevel", "=", "stacklevel", ")", "kwargs", "[", "old_arg_name", "]", "=", "old_arg_value", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "old_arg_value", "is", "not", "None", ":", "if", "mapping", "is", "not", "None", ":", "if", "hasattr", "(", "mapping", ",", "'get'", ")", ":", "new_arg_value", "=", "mapping", ".", "get", "(", "old_arg_value", ",", "old_arg_value", ")", "else", ":", "new_arg_value", "=", "mapping", "(", "old_arg_value", ")", "msg", "=", "(", "\"the {old_name}={old_val!r} keyword is deprecated, \"", "\"use {new_name}={new_val!r} instead\"", ")", ".", "format", "(", "old_name", "=", "old_arg_name", ",", "old_val", "=", "old_arg_value", ",", "new_name", "=", "new_arg_name", ",", "new_val", "=", "new_arg_value", ")", "else", ":", "new_arg_value", "=", "old_arg_value", "msg", "=", "(", "\"the '{old_name}' keyword is deprecated, \"", "\"use '{new_name}' instead\"", ")", ".", "format", "(", "old_name", "=", "old_arg_name", ",", "new_name", "=", "new_arg_name", ")", "warnings", ".", "warn", "(", "msg", ",", "FutureWarning", ",", "stacklevel", "=", "stacklevel", ")", "if", "kwargs", ".", "get", "(", "new_arg_name", ",", "None", ")", "is", "not", "None", ":", "msg", "=", "(", "\"Can only specify '{old_name}' or '{new_name}', \"", "\"not both\"", ")", ".", "format", "(", "old_name", "=", "old_arg_name", ",", "new_name", "=", "new_arg_name", ")", "raise", "TypeError", "(", "msg", ")", "else", ":", "kwargs", "[", "new_arg_name", "]", "=", "new_arg_value", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper", "return", "_deprecate_kwarg" ]
Decorator to deprecate a keyword argument of a function. Parameters ---------- old_arg_name : str Name of argument in function to deprecate new_arg_name : str or None Name of preferred argument in function. Use None to raise warning that ``old_arg_name`` keyword is deprecated. mapping : dict or callable If mapping is present, use it to translate old arguments to new arguments. A callable must do its own value checking; values not found in a dict will be forwarded unchanged. Examples -------- The following deprecates 'cols', using 'columns' instead >>> @deprecate_kwarg(old_arg_name='cols', new_arg_name='columns') ... def f(columns=''): ... print(columns) ... >>> f(columns='should work ok') should work ok >>> f(cols='should raise warning') FutureWarning: cols is deprecated, use columns instead warnings.warn(msg, FutureWarning) should raise warning >>> f(cols='should error', columns="can\'t pass do both") TypeError: Can only specify 'cols' or 'columns', not both >>> @deprecate_kwarg('old', 'new', {'yes': True, 'no': False}) ... def f(new=False): ... print('yes!' if new else 'no!') ... >>> f(old='yes') FutureWarning: old='yes' is deprecated, use new=True instead warnings.warn(msg, FutureWarning) yes! To raise a warning that a keyword will be removed entirely in the future >>> @deprecate_kwarg(old_arg_name='cols', new_arg_name=None) ... def f(cols='', another_param=''): ... print(cols) ... >>> f(cols='should raise warning') FutureWarning: the 'cols' keyword is deprecated and will be removed in a future version please takes steps to stop use of 'cols' should raise warning >>> f(another_param='should not raise warning') should not raise warning >>> f(cols='should raise warning', another_param='') FutureWarning: the 'cols' keyword is deprecated and will be removed in a future version please takes steps to stop use of 'cols' should raise warning
[ "Decorator", "to", "deprecate", "a", "keyword", "argument", "of", "a", "function", "." ]
python
train
40.263158
CiscoUcs/UcsPythonSDK
src/UcsSdk/utils/power.py
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/power.py#L54-L88
def set_power_state(self, desired_state): """Set power state of this node :param node: Ironic node one of :class:`ironic.db.models.Node` :raises: InvalidParameterValue if required seamicro parameters are missing. :raises: UcsOperationError on an error from UcsHandle Client. :returns: Power state of the given node """ rn_array = [self.helper.service_profile, ManagedObject(NamingId.LS_POWER).MakeRn()] try: ls_power = ucs_helper.get_managed_object(self.helper.handle, LsPower.ClassId(), {LsPower.DN: UcsUtils.MakeDn(rn_array)}) if not ls_power: raise exception.UcsOperationError("set_power_state", "Failed to get power MO," " configure valid service-profile.") else: ls_power_set = self.helper.handle.SetManagedObject( ls_power, LsPower.ClassId(), {LsPower.STATE: desired_state}, dumpXml=YesOrNo.TRUE ) if ls_power_set: power = ls_power_set.pop() return power.getattr(LsPower.STATE) else: return states.ERROR except Exception as ex: raise exception.UcsOperationError("set_power_state", "Failed to get power MO," "configure valid servie-profile.")
[ "def", "set_power_state", "(", "self", ",", "desired_state", ")", ":", "rn_array", "=", "[", "self", ".", "helper", ".", "service_profile", ",", "ManagedObject", "(", "NamingId", ".", "LS_POWER", ")", ".", "MakeRn", "(", ")", "]", "try", ":", "ls_power", "=", "ucs_helper", ".", "get_managed_object", "(", "self", ".", "helper", ".", "handle", ",", "LsPower", ".", "ClassId", "(", ")", ",", "{", "LsPower", ".", "DN", ":", "UcsUtils", ".", "MakeDn", "(", "rn_array", ")", "}", ")", "if", "not", "ls_power", ":", "raise", "exception", ".", "UcsOperationError", "(", "\"set_power_state\"", ",", "\"Failed to get power MO,\"", "\" configure valid service-profile.\"", ")", "else", ":", "ls_power_set", "=", "self", ".", "helper", ".", "handle", ".", "SetManagedObject", "(", "ls_power", ",", "LsPower", ".", "ClassId", "(", ")", ",", "{", "LsPower", ".", "STATE", ":", "desired_state", "}", ",", "dumpXml", "=", "YesOrNo", ".", "TRUE", ")", "if", "ls_power_set", ":", "power", "=", "ls_power_set", ".", "pop", "(", ")", "return", "power", ".", "getattr", "(", "LsPower", ".", "STATE", ")", "else", ":", "return", "states", ".", "ERROR", "except", "Exception", "as", "ex", ":", "raise", "exception", ".", "UcsOperationError", "(", "\"set_power_state\"", ",", "\"Failed to get power MO,\"", "\"configure valid servie-profile.\"", ")" ]
Set power state of this node :param node: Ironic node one of :class:`ironic.db.models.Node` :raises: InvalidParameterValue if required seamicro parameters are missing. :raises: UcsOperationError on an error from UcsHandle Client. :returns: Power state of the given node
[ "Set", "power", "state", "of", "this", "node" ]
python
train
46.971429
devassistant/devassistant
devassistant/gui/run_window.py
https://github.com/devassistant/devassistant/blob/2dbfeaa666a64127263664d18969c55d19ecc83e/devassistant/gui/run_window.py#L308-L327
def logs_update(self): """ Function updates logs. """ Gdk.threads_enter() if not self.debugging: self.debugging = True self.debug_btn.set_label('Info logs') else: self.debugging = False self.debug_btn.set_label('Debug logs') for record in self.debug_logs['logs']: if self.debugging: # Create a new root tree element if getattr(record, 'event_type', '') != "cmd_retcode": self.store.append([format_entry(record, show_level=True, colorize=True)]) else: if int(record.levelno) > 10: self.store.append([format_entry(record, colorize=True)]) Gdk.threads_leave()
[ "def", "logs_update", "(", "self", ")", ":", "Gdk", ".", "threads_enter", "(", ")", "if", "not", "self", ".", "debugging", ":", "self", ".", "debugging", "=", "True", "self", ".", "debug_btn", ".", "set_label", "(", "'Info logs'", ")", "else", ":", "self", ".", "debugging", "=", "False", "self", ".", "debug_btn", ".", "set_label", "(", "'Debug logs'", ")", "for", "record", "in", "self", ".", "debug_logs", "[", "'logs'", "]", ":", "if", "self", ".", "debugging", ":", "# Create a new root tree element", "if", "getattr", "(", "record", ",", "'event_type'", ",", "''", ")", "!=", "\"cmd_retcode\"", ":", "self", ".", "store", ".", "append", "(", "[", "format_entry", "(", "record", ",", "show_level", "=", "True", ",", "colorize", "=", "True", ")", "]", ")", "else", ":", "if", "int", "(", "record", ".", "levelno", ")", ">", "10", ":", "self", ".", "store", ".", "append", "(", "[", "format_entry", "(", "record", ",", "colorize", "=", "True", ")", "]", ")", "Gdk", ".", "threads_leave", "(", ")" ]
Function updates logs.
[ "Function", "updates", "logs", "." ]
python
train
38.05
last-partizan/pytils
pytils/utils.py
https://github.com/last-partizan/pytils/blob/1c570a32b15e564bc68587b8207e32d464e61d08/pytils/utils.py#L30-L44
def check_positive(value, strict=False): """ Checks if variable is positive @param value: value to check @type value: C{integer types}, C{float} or C{Decimal} @return: None when check successful @raise ValueError: check failed """ if not strict and value < 0: raise ValueError("Value must be positive or zero, not %s" % str(value)) if strict and value <= 0: raise ValueError("Value must be positive, not %s" % str(value))
[ "def", "check_positive", "(", "value", ",", "strict", "=", "False", ")", ":", "if", "not", "strict", "and", "value", "<", "0", ":", "raise", "ValueError", "(", "\"Value must be positive or zero, not %s\"", "%", "str", "(", "value", ")", ")", "if", "strict", "and", "value", "<=", "0", ":", "raise", "ValueError", "(", "\"Value must be positive, not %s\"", "%", "str", "(", "value", ")", ")" ]
Checks if variable is positive @param value: value to check @type value: C{integer types}, C{float} or C{Decimal} @return: None when check successful @raise ValueError: check failed
[ "Checks", "if", "variable", "is", "positive" ]
python
train
30.8
apache/airflow
airflow/bin/cli.py
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/bin/cli.py#L567-L575
def dag_state(args): """ Returns the state of a DagRun at the command line. >>> airflow dag_state tutorial 2015-01-01T00:00:00.000000 running """ dag = get_dag(args) dr = DagRun.find(dag.dag_id, execution_date=args.execution_date) print(dr[0].state if len(dr) > 0 else None)
[ "def", "dag_state", "(", "args", ")", ":", "dag", "=", "get_dag", "(", "args", ")", "dr", "=", "DagRun", ".", "find", "(", "dag", ".", "dag_id", ",", "execution_date", "=", "args", ".", "execution_date", ")", "print", "(", "dr", "[", "0", "]", ".", "state", "if", "len", "(", "dr", ")", ">", "0", "else", "None", ")" ]
Returns the state of a DagRun at the command line. >>> airflow dag_state tutorial 2015-01-01T00:00:00.000000 running
[ "Returns", "the", "state", "of", "a", "DagRun", "at", "the", "command", "line", ".", ">>>", "airflow", "dag_state", "tutorial", "2015", "-", "01", "-", "01T00", ":", "00", ":", "00", ".", "000000", "running" ]
python
test
33.111111
CalebBell/thermo
thermo/interface.py
https://github.com/CalebBell/thermo/blob/3857ed023a3e64fd3039a32d53576c24990ef1c3/thermo/interface.py#L1334-L1371
def calculate(self, T, P, zs, ws, method): r'''Method to calculate surface tension of a liquid mixture at temperature `T`, pressure `P`, mole fractions `zs` and weight fractions `ws` with a given method. This method has no exception handling; see `mixture_property` for that. Parameters ---------- T : float Temperature at which to calculate the property, [K] P : float Pressure at which to calculate the property, [Pa] zs : list[float] Mole fractions of all species in the mixture, [-] ws : list[float] Weight fractions of all species in the mixture, [-] method : str Name of the method to use Returns ------- sigma : float Surface tension of the liquid at given conditions, [N/m] ''' if method == SIMPLE: sigmas = [i(T) for i in self.SurfaceTensions] return mixing_simple(zs, sigmas) elif method == DIGUILIOTEJA: return Diguilio_Teja(T=T, xs=zs, sigmas_Tb=self.sigmas_Tb, Tbs=self.Tbs, Tcs=self.Tcs) elif method == WINTERFELDSCRIVENDAVIS: sigmas = [i(T) for i in self.SurfaceTensions] rhoms = [1./i(T, P) for i in self.VolumeLiquids] return Winterfeld_Scriven_Davis(zs, sigmas, rhoms) else: raise Exception('Method not valid')
[ "def", "calculate", "(", "self", ",", "T", ",", "P", ",", "zs", ",", "ws", ",", "method", ")", ":", "if", "method", "==", "SIMPLE", ":", "sigmas", "=", "[", "i", "(", "T", ")", "for", "i", "in", "self", ".", "SurfaceTensions", "]", "return", "mixing_simple", "(", "zs", ",", "sigmas", ")", "elif", "method", "==", "DIGUILIOTEJA", ":", "return", "Diguilio_Teja", "(", "T", "=", "T", ",", "xs", "=", "zs", ",", "sigmas_Tb", "=", "self", ".", "sigmas_Tb", ",", "Tbs", "=", "self", ".", "Tbs", ",", "Tcs", "=", "self", ".", "Tcs", ")", "elif", "method", "==", "WINTERFELDSCRIVENDAVIS", ":", "sigmas", "=", "[", "i", "(", "T", ")", "for", "i", "in", "self", ".", "SurfaceTensions", "]", "rhoms", "=", "[", "1.", "/", "i", "(", "T", ",", "P", ")", "for", "i", "in", "self", ".", "VolumeLiquids", "]", "return", "Winterfeld_Scriven_Davis", "(", "zs", ",", "sigmas", ",", "rhoms", ")", "else", ":", "raise", "Exception", "(", "'Method not valid'", ")" ]
r'''Method to calculate surface tension of a liquid mixture at temperature `T`, pressure `P`, mole fractions `zs` and weight fractions `ws` with a given method. This method has no exception handling; see `mixture_property` for that. Parameters ---------- T : float Temperature at which to calculate the property, [K] P : float Pressure at which to calculate the property, [Pa] zs : list[float] Mole fractions of all species in the mixture, [-] ws : list[float] Weight fractions of all species in the mixture, [-] method : str Name of the method to use Returns ------- sigma : float Surface tension of the liquid at given conditions, [N/m]
[ "r", "Method", "to", "calculate", "surface", "tension", "of", "a", "liquid", "mixture", "at", "temperature", "T", "pressure", "P", "mole", "fractions", "zs", "and", "weight", "fractions", "ws", "with", "a", "given", "method", "." ]
python
valid
38.026316
jbloomlab/phydms
phydmslib/models.py
https://github.com/jbloomlab/phydms/blob/9cdebc10bafbe543c552d79486c7f950780ed3c0/phydmslib/models.py#L790-L808
def _update_dprx(self): """Update `dprx`.""" if 'beta' in self.freeparams: for r in range(self.nsites): self.dprx['beta'][r] = self.prx[r] * (self.ln_pi_codon[r] - scipy.dot(self.ln_pi_codon[r], self.prx[r])) if 'eta' in self.freeparams: boolterm = scipy.ndarray(N_CODON, dtype='float') with scipy.errstate(divide='raise', under='raise', over='raise', invalid='raise'): for i in range(N_NT - 1): boolterm.fill(0) for j in range(3): boolterm += ((i <= CODON_NT_INDEX[j]).astype('float') / (self.eta[i] - (i == CODON_NT_INDEX[j]).astype( 'float'))) for r in range(self.nsites): self.dprx['eta'][i][r] = self.prx[r] * (boolterm - scipy.dot(boolterm, self.prx[r]) / self.prx[r].sum())
[ "def", "_update_dprx", "(", "self", ")", ":", "if", "'beta'", "in", "self", ".", "freeparams", ":", "for", "r", "in", "range", "(", "self", ".", "nsites", ")", ":", "self", ".", "dprx", "[", "'beta'", "]", "[", "r", "]", "=", "self", ".", "prx", "[", "r", "]", "*", "(", "self", ".", "ln_pi_codon", "[", "r", "]", "-", "scipy", ".", "dot", "(", "self", ".", "ln_pi_codon", "[", "r", "]", ",", "self", ".", "prx", "[", "r", "]", ")", ")", "if", "'eta'", "in", "self", ".", "freeparams", ":", "boolterm", "=", "scipy", ".", "ndarray", "(", "N_CODON", ",", "dtype", "=", "'float'", ")", "with", "scipy", ".", "errstate", "(", "divide", "=", "'raise'", ",", "under", "=", "'raise'", ",", "over", "=", "'raise'", ",", "invalid", "=", "'raise'", ")", ":", "for", "i", "in", "range", "(", "N_NT", "-", "1", ")", ":", "boolterm", ".", "fill", "(", "0", ")", "for", "j", "in", "range", "(", "3", ")", ":", "boolterm", "+=", "(", "(", "i", "<=", "CODON_NT_INDEX", "[", "j", "]", ")", ".", "astype", "(", "'float'", ")", "/", "(", "self", ".", "eta", "[", "i", "]", "-", "(", "i", "==", "CODON_NT_INDEX", "[", "j", "]", ")", ".", "astype", "(", "'float'", ")", ")", ")", "for", "r", "in", "range", "(", "self", ".", "nsites", ")", ":", "self", ".", "dprx", "[", "'eta'", "]", "[", "i", "]", "[", "r", "]", "=", "self", ".", "prx", "[", "r", "]", "*", "(", "boolterm", "-", "scipy", ".", "dot", "(", "boolterm", ",", "self", ".", "prx", "[", "r", "]", ")", "/", "self", ".", "prx", "[", "r", "]", ".", "sum", "(", ")", ")" ]
Update `dprx`.
[ "Update", "dprx", "." ]
python
train
52.736842
cloud9ers/gurumate
environment/lib/python2.7/site-packages/IPython/external/pexpect/_pexpect.py
https://github.com/cloud9ers/gurumate/blob/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e/environment/lib/python2.7/site-packages/IPython/external/pexpect/_pexpect.py#L1580-L1602
def __select (self, iwtd, owtd, ewtd, timeout=None): """This is a wrapper around select.select() that ignores signals. If select.select raises a select.error exception and errno is an EINTR error then it is ignored. Mainly this is used to ignore sigwinch (terminal resize). """ # if select() is interrupted by a signal (errno==EINTR) then # we loop back and enter the select() again. if timeout is not None: end_time = time.time() + timeout while True: try: return select.select (iwtd, owtd, ewtd, timeout) except select.error as e: if e.args[0] == errno.EINTR: # if we loop back we have to subtract the amount of time we already waited. if timeout is not None: timeout = end_time - time.time() if timeout < 0: return ([],[],[]) else: # something else caused the select.error, so this really is an exception raise
[ "def", "__select", "(", "self", ",", "iwtd", ",", "owtd", ",", "ewtd", ",", "timeout", "=", "None", ")", ":", "# if select() is interrupted by a signal (errno==EINTR) then", "# we loop back and enter the select() again.", "if", "timeout", "is", "not", "None", ":", "end_time", "=", "time", ".", "time", "(", ")", "+", "timeout", "while", "True", ":", "try", ":", "return", "select", ".", "select", "(", "iwtd", ",", "owtd", ",", "ewtd", ",", "timeout", ")", "except", "select", ".", "error", "as", "e", ":", "if", "e", ".", "args", "[", "0", "]", "==", "errno", ".", "EINTR", ":", "# if we loop back we have to subtract the amount of time we already waited.", "if", "timeout", "is", "not", "None", ":", "timeout", "=", "end_time", "-", "time", ".", "time", "(", ")", "if", "timeout", "<", "0", ":", "return", "(", "[", "]", ",", "[", "]", ",", "[", "]", ")", "else", ":", "# something else caused the select.error, so this really is an exception", "raise" ]
This is a wrapper around select.select() that ignores signals. If select.select raises a select.error exception and errno is an EINTR error then it is ignored. Mainly this is used to ignore sigwinch (terminal resize).
[ "This", "is", "a", "wrapper", "around", "select", ".", "select", "()", "that", "ignores", "signals", ".", "If", "select", ".", "select", "raises", "a", "select", ".", "error", "exception", "and", "errno", "is", "an", "EINTR", "error", "then", "it", "is", "ignored", ".", "Mainly", "this", "is", "used", "to", "ignore", "sigwinch", "(", "terminal", "resize", ")", "." ]
python
test
46.826087
ska-sa/hypercube
hypercube/base_cube.py
https://github.com/ska-sa/hypercube/blob/6564a9e65ccd9ed7e7a71bd643f183e1ec645b29/hypercube/base_cube.py#L621-L642
def dimension(self, name, copy=True): """ Returns the requested :class:`~hypercube.dims.Dimension` object Parameters ---------- name : str Name of the :class:`~hypercube.dims.Dimension` object copy : boolean Returns a copy of the :class:`~hypercube.dims.Dimension` object if True (Default value = True) Returns ------- :class:`~hypercube.dims.Dimension` A :class:`~hypercube.dims.Dimension` object. """ try: return create_dimension(name, self._dims[name]) if copy else self._dims[name] except KeyError: raise KeyError("Dimension '{n}' is not registered " "on this cube".format(n=name)), None, sys.exc_info()[2]
[ "def", "dimension", "(", "self", ",", "name", ",", "copy", "=", "True", ")", ":", "try", ":", "return", "create_dimension", "(", "name", ",", "self", ".", "_dims", "[", "name", "]", ")", "if", "copy", "else", "self", ".", "_dims", "[", "name", "]", "except", "KeyError", ":", "raise", "KeyError", "(", "\"Dimension '{n}' is not registered \"", "\"on this cube\"", ".", "format", "(", "n", "=", "name", ")", ")", ",", "None", ",", "sys", ".", "exc_info", "(", ")", "[", "2", "]" ]
Returns the requested :class:`~hypercube.dims.Dimension` object Parameters ---------- name : str Name of the :class:`~hypercube.dims.Dimension` object copy : boolean Returns a copy of the :class:`~hypercube.dims.Dimension` object if True (Default value = True) Returns ------- :class:`~hypercube.dims.Dimension` A :class:`~hypercube.dims.Dimension` object.
[ "Returns", "the", "requested", ":", "class", ":", "~hypercube", ".", "dims", ".", "Dimension", "object" ]
python
train
34.727273
wmayner/pyphi
pyphi/subsystem.py
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L484-L490
def cause_info(self, mechanism, purview): """Return the cause information for a mechanism over a purview.""" return repertoire_distance( Direction.CAUSE, self.cause_repertoire(mechanism, purview), self.unconstrained_cause_repertoire(purview) )
[ "def", "cause_info", "(", "self", ",", "mechanism", ",", "purview", ")", ":", "return", "repertoire_distance", "(", "Direction", ".", "CAUSE", ",", "self", ".", "cause_repertoire", "(", "mechanism", ",", "purview", ")", ",", "self", ".", "unconstrained_cause_repertoire", "(", "purview", ")", ")" ]
Return the cause information for a mechanism over a purview.
[ "Return", "the", "cause", "information", "for", "a", "mechanism", "over", "a", "purview", "." ]
python
train
42.428571
DigitalGlobe/gbdxtools
gbdxtools/rda/util.py
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/rda/util.py#L246-L287
def calc_toa_gain_offset(meta): """ Compute (gain, offset) tuples for each band of the specified image metadata """ # Set satellite index to look up cal factors sat_index = meta['satid'].upper() + "_" + meta['bandid'].upper() # Set scale for at sensor radiance # Eq is: # L = GAIN * DN * (ACF/EBW) + Offset # ACF abscal factor from meta data # EBW effectiveBandwidth from meta data # Gain provided by abscal from const # Offset provided by abscal from const acf = np.asarray(meta['abscalfactor']) # Should be nbands length ebw = np.asarray(meta['effbandwidth']) # Should be nbands length gain = np.asarray(constants.DG_ABSCAL_GAIN[sat_index]) scale = (acf / ebw) * gain offset = np.asarray(constants.DG_ABSCAL_OFFSET[sat_index]) e_sun_index = meta['satid'].upper() + "_" + meta['bandid'].upper() e_sun = np.asarray(constants.DG_ESUN[e_sun_index]) sun = ephem.Sun() img_obs = ephem.Observer() img_obs.lon = meta['latlonhae'][1] img_obs.lat = meta['latlonhae'][0] img_obs.elevation = meta['latlonhae'][2] img_obs.date = datetime.datetime.fromtimestamp(meta['img_datetime_obj_utc']['$date'] / 1000.0).strftime( '%Y-%m-%d %H:%M:%S.%f') sun.compute(img_obs) d_es = sun.earth_distance # Pull sun elevation from the image metadata # theta_s can be zenith or elevation - the calc below will us either # a cos or s in respectively # theta_s = float(self.meta_dg.IMD.IMAGE.MEANSUNEL) theta_s = 90 - float(meta['mean_sun_el']) scale2 = (d_es ** 2 * np.pi) / (e_sun * np.cos(np.deg2rad(theta_s))) # Return scaled data # Radiance = Scale * Image + offset, Reflectance = Radiance * Scale2 return zip(scale, scale2, offset)
[ "def", "calc_toa_gain_offset", "(", "meta", ")", ":", "# Set satellite index to look up cal factors", "sat_index", "=", "meta", "[", "'satid'", "]", ".", "upper", "(", ")", "+", "\"_\"", "+", "meta", "[", "'bandid'", "]", ".", "upper", "(", ")", "# Set scale for at sensor radiance", "# Eq is:", "# L = GAIN * DN * (ACF/EBW) + Offset", "# ACF abscal factor from meta data", "# EBW effectiveBandwidth from meta data", "# Gain provided by abscal from const", "# Offset provided by abscal from const", "acf", "=", "np", ".", "asarray", "(", "meta", "[", "'abscalfactor'", "]", ")", "# Should be nbands length", "ebw", "=", "np", ".", "asarray", "(", "meta", "[", "'effbandwidth'", "]", ")", "# Should be nbands length", "gain", "=", "np", ".", "asarray", "(", "constants", ".", "DG_ABSCAL_GAIN", "[", "sat_index", "]", ")", "scale", "=", "(", "acf", "/", "ebw", ")", "*", "gain", "offset", "=", "np", ".", "asarray", "(", "constants", ".", "DG_ABSCAL_OFFSET", "[", "sat_index", "]", ")", "e_sun_index", "=", "meta", "[", "'satid'", "]", ".", "upper", "(", ")", "+", "\"_\"", "+", "meta", "[", "'bandid'", "]", ".", "upper", "(", ")", "e_sun", "=", "np", ".", "asarray", "(", "constants", ".", "DG_ESUN", "[", "e_sun_index", "]", ")", "sun", "=", "ephem", ".", "Sun", "(", ")", "img_obs", "=", "ephem", ".", "Observer", "(", ")", "img_obs", ".", "lon", "=", "meta", "[", "'latlonhae'", "]", "[", "1", "]", "img_obs", ".", "lat", "=", "meta", "[", "'latlonhae'", "]", "[", "0", "]", "img_obs", ".", "elevation", "=", "meta", "[", "'latlonhae'", "]", "[", "2", "]", "img_obs", ".", "date", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "meta", "[", "'img_datetime_obj_utc'", "]", "[", "'$date'", "]", "/", "1000.0", ")", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S.%f'", ")", "sun", ".", "compute", "(", "img_obs", ")", "d_es", "=", "sun", ".", "earth_distance", "# Pull sun elevation from the image metadata", "# theta_s can be zenith or elevation - the calc below will us either", "# a cos or s in respectively", "# theta_s = float(self.meta_dg.IMD.IMAGE.MEANSUNEL)", "theta_s", "=", "90", "-", "float", "(", "meta", "[", "'mean_sun_el'", "]", ")", "scale2", "=", "(", "d_es", "**", "2", "*", "np", ".", "pi", ")", "/", "(", "e_sun", "*", "np", ".", "cos", "(", "np", ".", "deg2rad", "(", "theta_s", ")", ")", ")", "# Return scaled data", "# Radiance = Scale * Image + offset, Reflectance = Radiance * Scale2", "return", "zip", "(", "scale", ",", "scale2", ",", "offset", ")" ]
Compute (gain, offset) tuples for each band of the specified image metadata
[ "Compute", "(", "gain", "offset", ")", "tuples", "for", "each", "band", "of", "the", "specified", "image", "metadata" ]
python
valid
41.071429
RedHatInsights/insights-core
insights/client/connection.py
https://github.com/RedHatInsights/insights-core/blob/b57cbf8ed7c089672426ede0441e0a4f789ef4a1/insights/client/connection.py#L481-L508
def create_system(self, new_machine_id=False): """ Create the machine via the API """ client_hostname = determine_hostname() machine_id = generate_machine_id(new_machine_id) branch_info = self.branch_info if not branch_info: return False remote_branch = branch_info['remote_branch'] remote_leaf = branch_info['remote_leaf'] data = {'machine_id': machine_id, 'remote_branch': remote_branch, 'remote_leaf': remote_leaf, 'hostname': client_hostname} if self.config.display_name is not None: data['display_name'] = self.config.display_name data = json.dumps(data) post_system_url = self.api_url + '/v1/systems' logger.debug("POST System: %s", post_system_url) logger.debug(data) net_logger.info("POST %s", post_system_url) return self.session.post(post_system_url, headers={'Content-Type': 'application/json'}, data=data)
[ "def", "create_system", "(", "self", ",", "new_machine_id", "=", "False", ")", ":", "client_hostname", "=", "determine_hostname", "(", ")", "machine_id", "=", "generate_machine_id", "(", "new_machine_id", ")", "branch_info", "=", "self", ".", "branch_info", "if", "not", "branch_info", ":", "return", "False", "remote_branch", "=", "branch_info", "[", "'remote_branch'", "]", "remote_leaf", "=", "branch_info", "[", "'remote_leaf'", "]", "data", "=", "{", "'machine_id'", ":", "machine_id", ",", "'remote_branch'", ":", "remote_branch", ",", "'remote_leaf'", ":", "remote_leaf", ",", "'hostname'", ":", "client_hostname", "}", "if", "self", ".", "config", ".", "display_name", "is", "not", "None", ":", "data", "[", "'display_name'", "]", "=", "self", ".", "config", ".", "display_name", "data", "=", "json", ".", "dumps", "(", "data", ")", "post_system_url", "=", "self", ".", "api_url", "+", "'/v1/systems'", "logger", ".", "debug", "(", "\"POST System: %s\"", ",", "post_system_url", ")", "logger", ".", "debug", "(", "data", ")", "net_logger", ".", "info", "(", "\"POST %s\"", ",", "post_system_url", ")", "return", "self", ".", "session", ".", "post", "(", "post_system_url", ",", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", "}", ",", "data", "=", "data", ")" ]
Create the machine via the API
[ "Create", "the", "machine", "via", "the", "API" ]
python
train
38.107143
JamesGardiner/chwrapper
chwrapper/services/search.py
https://github.com/JamesGardiner/chwrapper/blob/50f9cb2f5264c59505e8cc4e45ee6dc5d5669134/chwrapper/services/search.py#L169-L180
def officers(self, num, **kwargs): """Search for a company's registered officers by company number. Args: num (str): Company number to search on. kwargs (dict): additional keywords passed into requests.session.get *params* keyword. """ baseuri = self._BASE_URI + "company/{}/officers".format(num) res = self.session.get(baseuri, params=kwargs) self.handle_http_error(res) return res
[ "def", "officers", "(", "self", ",", "num", ",", "*", "*", "kwargs", ")", ":", "baseuri", "=", "self", ".", "_BASE_URI", "+", "\"company/{}/officers\"", ".", "format", "(", "num", ")", "res", "=", "self", ".", "session", ".", "get", "(", "baseuri", ",", "params", "=", "kwargs", ")", "self", ".", "handle_http_error", "(", "res", ")", "return", "res" ]
Search for a company's registered officers by company number. Args: num (str): Company number to search on. kwargs (dict): additional keywords passed into requests.session.get *params* keyword.
[ "Search", "for", "a", "company", "s", "registered", "officers", "by", "company", "number", "." ]
python
train
38.333333
SamLau95/nbinteract
nbinteract/plotting.py
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/plotting.py#L451-L506
def line(x_fn, y_fn, *, options={}, **interact_params): """ Generates an interactive line chart that allows users to change the parameters of the inputs x_fn and y_fn. Args: x_fn (Array | (*args -> Array str | Array int | Array float)): If array, uses array values for x-coordinates. If function, must take parameters to interact with and return an array of strings or numbers. These will become the x-coordinates of the line plot. y_fn (Array | (Array, *args -> Array int | Array float)): If array, uses array values for y-coordinates. If function, must take in the output of x_fn as its first parameter and optionally other parameters to interact with. Must return an array of numbers. These will become the y-coordinates of the line plot. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of both `x_fn` and `y_fn`. If `x_fn` and `y_fn` have conflicting parameter names, prefix the corresponding kwargs with `x__` and `y__`. Returns: VBox with two children: the interactive controls and the figure. >>> line([1, 2, 3], [4, 7, 10]) VBox(...) >>> def x_values(max): return np.arange(0, max) >>> def y_values(xs, sd): ... return xs + np.random.normal(len(xs), scale=sd) >>> line(x_values, y_values, max=(10, 50), sd=(1, 10)) VBox(...) """ fig = options.get('_fig', False) or _create_fig(options=options) [line] = (_create_marks(fig=fig, marks=[bq.Lines], options=options)) _add_marks(fig, [line]) def wrapped(**interact_params): x_data = util.maybe_call(x_fn, interact_params, prefix='x') line.x = x_data y_bound = util.maybe_curry(y_fn, x_data) line.y = util.maybe_call(y_bound, interact_params, prefix='y') controls = widgets.interactive(wrapped, **interact_params) return widgets.VBox([controls, fig])
[ "def", "line", "(", "x_fn", ",", "y_fn", ",", "*", ",", "options", "=", "{", "}", ",", "*", "*", "interact_params", ")", ":", "fig", "=", "options", ".", "get", "(", "'_fig'", ",", "False", ")", "or", "_create_fig", "(", "options", "=", "options", ")", "[", "line", "]", "=", "(", "_create_marks", "(", "fig", "=", "fig", ",", "marks", "=", "[", "bq", ".", "Lines", "]", ",", "options", "=", "options", ")", ")", "_add_marks", "(", "fig", ",", "[", "line", "]", ")", "def", "wrapped", "(", "*", "*", "interact_params", ")", ":", "x_data", "=", "util", ".", "maybe_call", "(", "x_fn", ",", "interact_params", ",", "prefix", "=", "'x'", ")", "line", ".", "x", "=", "x_data", "y_bound", "=", "util", ".", "maybe_curry", "(", "y_fn", ",", "x_data", ")", "line", ".", "y", "=", "util", ".", "maybe_call", "(", "y_bound", ",", "interact_params", ",", "prefix", "=", "'y'", ")", "controls", "=", "widgets", ".", "interactive", "(", "wrapped", ",", "*", "*", "interact_params", ")", "return", "widgets", ".", "VBox", "(", "[", "controls", ",", "fig", "]", ")" ]
Generates an interactive line chart that allows users to change the parameters of the inputs x_fn and y_fn. Args: x_fn (Array | (*args -> Array str | Array int | Array float)): If array, uses array values for x-coordinates. If function, must take parameters to interact with and return an array of strings or numbers. These will become the x-coordinates of the line plot. y_fn (Array | (Array, *args -> Array int | Array float)): If array, uses array values for y-coordinates. If function, must take in the output of x_fn as its first parameter and optionally other parameters to interact with. Must return an array of numbers. These will become the y-coordinates of the line plot. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of both `x_fn` and `y_fn`. If `x_fn` and `y_fn` have conflicting parameter names, prefix the corresponding kwargs with `x__` and `y__`. Returns: VBox with two children: the interactive controls and the figure. >>> line([1, 2, 3], [4, 7, 10]) VBox(...) >>> def x_values(max): return np.arange(0, max) >>> def y_values(xs, sd): ... return xs + np.random.normal(len(xs), scale=sd) >>> line(x_values, y_values, max=(10, 50), sd=(1, 10)) VBox(...)
[ "Generates", "an", "interactive", "line", "chart", "that", "allows", "users", "to", "change", "the", "parameters", "of", "the", "inputs", "x_fn", "and", "y_fn", "." ]
python
train
37
wtsi-hgi/python-hgijson
hgijson/serialization.py
https://github.com/wtsi-hgi/python-hgijson/blob/6e8ccb562eabcaa816a136268a16504c2e0d4664/hgijson/serialization.py#L117-L141
def serialize(self, serializable: Optional[Union[SerializableType, List[SerializableType]]]) \ -> PrimitiveJsonType: """ Serializes the given serializable object or collection of serializable objects. :param serializable: the object or objects to serialize :return: a serialization of the given object """ if serializable is None: # Implements #17 return None elif isinstance(serializable, List): return [self.serialize(item) for item in serializable] else: serialized = self._create_serialized_container() for mapping in self._property_mappings: if mapping.object_property_getter is not None and mapping.serialized_property_setter is not None: value = mapping.object_property_getter(serializable) if not (mapping.optional and value is None): if isinstance(value, type(mapping.collection_factory([]))): value = list(mapping.collection_iter(value)) encoded_value = self._serialize_property_value(value, mapping.serializer_cls) mapping.serialized_property_setter(serialized, encoded_value) return serialized
[ "def", "serialize", "(", "self", ",", "serializable", ":", "Optional", "[", "Union", "[", "SerializableType", ",", "List", "[", "SerializableType", "]", "]", "]", ")", "->", "PrimitiveJsonType", ":", "if", "serializable", "is", "None", ":", "# Implements #17", "return", "None", "elif", "isinstance", "(", "serializable", ",", "List", ")", ":", "return", "[", "self", ".", "serialize", "(", "item", ")", "for", "item", "in", "serializable", "]", "else", ":", "serialized", "=", "self", ".", "_create_serialized_container", "(", ")", "for", "mapping", "in", "self", ".", "_property_mappings", ":", "if", "mapping", ".", "object_property_getter", "is", "not", "None", "and", "mapping", ".", "serialized_property_setter", "is", "not", "None", ":", "value", "=", "mapping", ".", "object_property_getter", "(", "serializable", ")", "if", "not", "(", "mapping", ".", "optional", "and", "value", "is", "None", ")", ":", "if", "isinstance", "(", "value", ",", "type", "(", "mapping", ".", "collection_factory", "(", "[", "]", ")", ")", ")", ":", "value", "=", "list", "(", "mapping", ".", "collection_iter", "(", "value", ")", ")", "encoded_value", "=", "self", ".", "_serialize_property_value", "(", "value", ",", "mapping", ".", "serializer_cls", ")", "mapping", ".", "serialized_property_setter", "(", "serialized", ",", "encoded_value", ")", "return", "serialized" ]
Serializes the given serializable object or collection of serializable objects. :param serializable: the object or objects to serialize :return: a serialization of the given object
[ "Serializes", "the", "given", "serializable", "object", "or", "collection", "of", "serializable", "objects", ".", ":", "param", "serializable", ":", "the", "object", "or", "objects", "to", "serialize", ":", "return", ":", "a", "serialization", "of", "the", "given", "object" ]
python
train
51.48
tchellomello/python-arlo
pyarlo/base_station.py
https://github.com/tchellomello/python-arlo/blob/db70aeb81705309c56ad32bbab1094f6cd146524/pyarlo/base_station.py#L756-L769
def update(self): """Update object properties.""" current_time = int(time.time()) last_refresh = 0 if self._last_refresh is None else self._last_refresh if current_time >= (last_refresh + self._refresh_rate): self.get_cameras_properties() self.get_ambient_sensor_data() self.get_camera_extended_properties() self._attrs = self._session.refresh_attributes(self.name) self._attrs = assert_is_dict(self._attrs) _LOGGER.debug("Called base station update of camera properties: " "Scan Interval: %s, New Properties: %s", self._refresh_rate, self.camera_properties)
[ "def", "update", "(", "self", ")", ":", "current_time", "=", "int", "(", "time", ".", "time", "(", ")", ")", "last_refresh", "=", "0", "if", "self", ".", "_last_refresh", "is", "None", "else", "self", ".", "_last_refresh", "if", "current_time", ">=", "(", "last_refresh", "+", "self", ".", "_refresh_rate", ")", ":", "self", ".", "get_cameras_properties", "(", ")", "self", ".", "get_ambient_sensor_data", "(", ")", "self", ".", "get_camera_extended_properties", "(", ")", "self", ".", "_attrs", "=", "self", ".", "_session", ".", "refresh_attributes", "(", "self", ".", "name", ")", "self", ".", "_attrs", "=", "assert_is_dict", "(", "self", ".", "_attrs", ")", "_LOGGER", ".", "debug", "(", "\"Called base station update of camera properties: \"", "\"Scan Interval: %s, New Properties: %s\"", ",", "self", ".", "_refresh_rate", ",", "self", ".", "camera_properties", ")" ]
Update object properties.
[ "Update", "object", "properties", "." ]
python
train
50.142857
tensorflow/tensor2tensor
tensor2tensor/models/transformer.py
https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/models/transformer.py#L1651-L1679
def transformer_base_vq_ada_32ex_packed(): """Set of hyperparameters for lm1b packed following tpu params.""" hparams = transformer_base_v2() expert_utils.update_hparams_for_vq_gating(hparams) hparams.moe_num_experts = 32 hparams.gating_type = "vq" # this gives us a batch size of 16 because each seq is len 256 hparams.batch_size = 5072 hparams.ffn_layer = "local_moe" hparams.shared_embedding_and_softmax_weights = False hparams.learning_rate_warmup_steps = 10000 # one epoch for languagemodel_lm1b32k_packed = 27200 steps w/ bsize 128 hparams.learning_rate_decay_steps = 27200 hparams.num_heads = 4 hparams.num_blocks = 1 hparams.moe_k = 1 hparams.num_decoder_layers = 6 hparams.label_smoothing = 0. hparams.layer_prepostprocess_dropout = 0.1 hparams.layer_postprocess_sequence = "dan" hparams.layer_preprocess_sequence = "none" hparams.weight_decay = 1e-06 hparams.attention_dropout = 0.1 hparams.optimizer = "Adafactor" hparams.learning_rate_schedule = "linear_warmup*rsqrt_decay*linear_decay" hparams.activation_dtype = "float32" hparams.learning_rate = 0.1 hparams.learning_rate_constant = 1.0 return hparams
[ "def", "transformer_base_vq_ada_32ex_packed", "(", ")", ":", "hparams", "=", "transformer_base_v2", "(", ")", "expert_utils", ".", "update_hparams_for_vq_gating", "(", "hparams", ")", "hparams", ".", "moe_num_experts", "=", "32", "hparams", ".", "gating_type", "=", "\"vq\"", "# this gives us a batch size of 16 because each seq is len 256", "hparams", ".", "batch_size", "=", "5072", "hparams", ".", "ffn_layer", "=", "\"local_moe\"", "hparams", ".", "shared_embedding_and_softmax_weights", "=", "False", "hparams", ".", "learning_rate_warmup_steps", "=", "10000", "# one epoch for languagemodel_lm1b32k_packed = 27200 steps w/ bsize 128", "hparams", ".", "learning_rate_decay_steps", "=", "27200", "hparams", ".", "num_heads", "=", "4", "hparams", ".", "num_blocks", "=", "1", "hparams", ".", "moe_k", "=", "1", "hparams", ".", "num_decoder_layers", "=", "6", "hparams", ".", "label_smoothing", "=", "0.", "hparams", ".", "layer_prepostprocess_dropout", "=", "0.1", "hparams", ".", "layer_postprocess_sequence", "=", "\"dan\"", "hparams", ".", "layer_preprocess_sequence", "=", "\"none\"", "hparams", ".", "weight_decay", "=", "1e-06", "hparams", ".", "attention_dropout", "=", "0.1", "hparams", ".", "optimizer", "=", "\"Adafactor\"", "hparams", ".", "learning_rate_schedule", "=", "\"linear_warmup*rsqrt_decay*linear_decay\"", "hparams", ".", "activation_dtype", "=", "\"float32\"", "hparams", ".", "learning_rate", "=", "0.1", "hparams", ".", "learning_rate_constant", "=", "1.0", "return", "hparams" ]
Set of hyperparameters for lm1b packed following tpu params.
[ "Set", "of", "hyperparameters", "for", "lm1b", "packed", "following", "tpu", "params", "." ]
python
train
39.413793
daviddrysdale/python-phonenumbers
python/phonenumbers/asyoutypeformatter.py
https://github.com/daviddrysdale/python-phonenumbers/blob/9cc5bb4ab5e661e70789b4c64bf7a9383c7bdc20/python/phonenumbers/asyoutypeformatter.py#L416-L431
def _append_national_number(self, national_number): """Combines the national number with any prefix (IDD/+ and country code or national prefix) that was collected. A space will be inserted between them if the current formatting template indicates this to be suitable. """ prefix_before_nn_len = len(self._prefix_before_national_number) if (self._should_add_space_after_national_prefix and prefix_before_nn_len > 0 and self._prefix_before_national_number[-1] != _SEPARATOR_BEFORE_NATIONAL_NUMBER): # We want to add a space after the national prefix if the national # prefix formatting rule indicates that this would normally be # done, with the exception of the case where we already appended a # space because the NDD was surprisingly long. return self._prefix_before_national_number + _SEPARATOR_BEFORE_NATIONAL_NUMBER + national_number else: return self._prefix_before_national_number + national_number
[ "def", "_append_national_number", "(", "self", ",", "national_number", ")", ":", "prefix_before_nn_len", "=", "len", "(", "self", ".", "_prefix_before_national_number", ")", "if", "(", "self", ".", "_should_add_space_after_national_prefix", "and", "prefix_before_nn_len", ">", "0", "and", "self", ".", "_prefix_before_national_number", "[", "-", "1", "]", "!=", "_SEPARATOR_BEFORE_NATIONAL_NUMBER", ")", ":", "# We want to add a space after the national prefix if the national", "# prefix formatting rule indicates that this would normally be", "# done, with the exception of the case where we already appended a", "# space because the NDD was surprisingly long.", "return", "self", ".", "_prefix_before_national_number", "+", "_SEPARATOR_BEFORE_NATIONAL_NUMBER", "+", "national_number", "else", ":", "return", "self", ".", "_prefix_before_national_number", "+", "national_number" ]
Combines the national number with any prefix (IDD/+ and country code or national prefix) that was collected. A space will be inserted between them if the current formatting template indicates this to be suitable.
[ "Combines", "the", "national", "number", "with", "any", "prefix", "(", "IDD", "/", "+", "and", "country", "code", "or", "national", "prefix", ")", "that", "was", "collected", ".", "A", "space", "will", "be", "inserted", "between", "them", "if", "the", "current", "formatting", "template", "indicates", "this", "to", "be", "suitable", "." ]
python
train
64.8125
CityOfZion/neo-python
neo/Core/TX/ClaimTransaction.py
https://github.com/CityOfZion/neo-python/blob/fe90f62e123d720d4281c79af0598d9df9e776fb/neo/Core/TX/ClaimTransaction.py#L124-L169
def Verify(self, mempool): """ Verify the transaction. Args: mempool: Returns: bool: True if verified. False otherwise. """ if not super(ClaimTransaction, self).Verify(mempool): return False # wat does this do # get all claim transactions from mempool list # that are not this claim # and gather all the claims of those claim transactions # and see if they intersect the claims of this transaction # and if that number is greater than zero that we do not verify # (now, to do that in python) # if (mempool.OfType < ClaimTransaction > ().Where(p => p != this).SelectMany(p= > p.Claims).Intersect(Claims).Count() > 0) # return false; # im sorry about the below otherclaimTxs = [tx for tx in mempool if tx is ClaimTransaction and tx is not self] for other in otherclaimTxs: # check to see if the length of the intersection between this objects claim's and the other txs claims is > 0 if len([list(filter(lambda x: x in self.Claims, otherClaims)) for otherClaims in other.Claims]): return False txResult = None for tx in self.GetTransactionResults(): if tx.AssetId == Blockchain.SystemCoin().Hash: txResult = tx break if txResult is None or txResult.Amount > Fixed8(0): return False try: return Blockchain.CalculateBonusIgnoreClaimed(self.Claims, False) == -txResult.Amount except Exception as e: logger.error('Could not calculate bonus: %s ' % e) return False
[ "def", "Verify", "(", "self", ",", "mempool", ")", ":", "if", "not", "super", "(", "ClaimTransaction", ",", "self", ")", ".", "Verify", "(", "mempool", ")", ":", "return", "False", "# wat does this do", "# get all claim transactions from mempool list", "# that are not this claim", "# and gather all the claims of those claim transactions", "# and see if they intersect the claims of this transaction", "# and if that number is greater than zero that we do not verify", "# (now, to do that in python)", "# if (mempool.OfType < ClaimTransaction > ().Where(p => p != this).SelectMany(p= > p.Claims).Intersect(Claims).Count() > 0)", "# return false;", "# im sorry about the below", "otherclaimTxs", "=", "[", "tx", "for", "tx", "in", "mempool", "if", "tx", "is", "ClaimTransaction", "and", "tx", "is", "not", "self", "]", "for", "other", "in", "otherclaimTxs", ":", "# check to see if the length of the intersection between this objects claim's and the other txs claims is > 0", "if", "len", "(", "[", "list", "(", "filter", "(", "lambda", "x", ":", "x", "in", "self", ".", "Claims", ",", "otherClaims", ")", ")", "for", "otherClaims", "in", "other", ".", "Claims", "]", ")", ":", "return", "False", "txResult", "=", "None", "for", "tx", "in", "self", ".", "GetTransactionResults", "(", ")", ":", "if", "tx", ".", "AssetId", "==", "Blockchain", ".", "SystemCoin", "(", ")", ".", "Hash", ":", "txResult", "=", "tx", "break", "if", "txResult", "is", "None", "or", "txResult", ".", "Amount", ">", "Fixed8", "(", "0", ")", ":", "return", "False", "try", ":", "return", "Blockchain", ".", "CalculateBonusIgnoreClaimed", "(", "self", ".", "Claims", ",", "False", ")", "==", "-", "txResult", ".", "Amount", "except", "Exception", "as", "e", ":", "logger", ".", "error", "(", "'Could not calculate bonus: %s '", "%", "e", ")", "return", "False" ]
Verify the transaction. Args: mempool: Returns: bool: True if verified. False otherwise.
[ "Verify", "the", "transaction", "." ]
python
train
36.26087
timmahrt/ProMo
promo/morph_utils/modify_pitch_accent.py
https://github.com/timmahrt/ProMo/blob/99d9f5cc01ff328a62973c5a5da910cc905ae4d5/promo/morph_utils/modify_pitch_accent.py#L149-L163
def reintegrate(self, fullPointList): ''' Integrates the pitch values of the accent into a larger pitch contour ''' # Erase the original region of the accent fullPointList = _deletePoints(fullPointList, self.minT, self.maxT) # Erase the new region of the accent fullPointList = self.deleteOverlapping(fullPointList) # Add the accent into the full pitch list outputPointList = fullPointList + self.pointList outputPointList.sort() return outputPointList
[ "def", "reintegrate", "(", "self", ",", "fullPointList", ")", ":", "# Erase the original region of the accent", "fullPointList", "=", "_deletePoints", "(", "fullPointList", ",", "self", ".", "minT", ",", "self", ".", "maxT", ")", "# Erase the new region of the accent", "fullPointList", "=", "self", ".", "deleteOverlapping", "(", "fullPointList", ")", "# Add the accent into the full pitch list", "outputPointList", "=", "fullPointList", "+", "self", ".", "pointList", "outputPointList", ".", "sort", "(", ")", "return", "outputPointList" ]
Integrates the pitch values of the accent into a larger pitch contour
[ "Integrates", "the", "pitch", "values", "of", "the", "accent", "into", "a", "larger", "pitch", "contour" ]
python
train
36.866667
craffel/mir_eval
mir_eval/display.py
https://github.com/craffel/mir_eval/blob/f41c8dafaea04b411252a516d1965af43c7d531b/mir_eval/display.py#L493-L574
def pitch(times, frequencies, midi=False, unvoiced=False, ax=None, **kwargs): '''Visualize pitch contours Parameters ---------- times : np.ndarray, shape=(n,) Sample times of frequencies frequencies : np.ndarray, shape=(n,) frequencies (in Hz) of the pitch contours. Voicing is indicated by sign (positive for voiced, non-positive for non-voiced). midi : bool If `True`, plot on a MIDI-numbered vertical axis. Otherwise, plot on a linear frequency axis. unvoiced : bool If `True`, unvoiced pitch contours are plotted and indicated by transparency. Otherwise, unvoiced pitch contours are omitted from the display. ax : matplotlib.pyplot.axes An axis handle on which to draw the pitch contours. If none is provided, a new set of axes is created. kwargs Additional keyword arguments to `matplotlib.pyplot.plot`. Returns ------- ax : matplotlib.pyplot.axes._subplots.AxesSubplot A handle to the (possibly constructed) plot axes ''' ax, _ = __get_axes(ax=ax) times = np.asarray(times) # First, segment into contiguously voiced contours frequencies, voicings = freq_to_voicing(np.asarray(frequencies, dtype=np.float)) # Here are all the change-points v_changes = 1 + np.flatnonzero(voicings[1:] != voicings[:-1]) v_changes = np.unique(np.concatenate([[0], v_changes, [len(voicings)]])) # Set up arrays of slices for voiced and unvoiced regions v_slices, u_slices = [], [] for start, end in zip(v_changes, v_changes[1:]): idx = slice(start, end) # A region is voiced if its starting sample is voiced # It's unvoiced if none of the samples in the region are voiced. if voicings[start]: v_slices.append(idx) elif frequencies[idx].all(): u_slices.append(idx) # Now we just need to plot the contour style = dict() style.update(next(ax._get_lines.prop_cycler)) style.update(kwargs) if midi: idx = frequencies > 0 frequencies[idx] = hz_to_midi(frequencies[idx]) # Tick at integer midi notes ax.yaxis.set_minor_locator(MultipleLocator(1)) for idx in v_slices: ax.plot(times[idx], frequencies[idx], **style) style.pop('label', None) # Plot the unvoiced portions if unvoiced: style['alpha'] = style.get('alpha', 1.0) * 0.5 for idx in u_slices: ax.plot(times[idx], frequencies[idx], **style) return ax
[ "def", "pitch", "(", "times", ",", "frequencies", ",", "midi", "=", "False", ",", "unvoiced", "=", "False", ",", "ax", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ax", ",", "_", "=", "__get_axes", "(", "ax", "=", "ax", ")", "times", "=", "np", ".", "asarray", "(", "times", ")", "# First, segment into contiguously voiced contours", "frequencies", ",", "voicings", "=", "freq_to_voicing", "(", "np", ".", "asarray", "(", "frequencies", ",", "dtype", "=", "np", ".", "float", ")", ")", "# Here are all the change-points", "v_changes", "=", "1", "+", "np", ".", "flatnonzero", "(", "voicings", "[", "1", ":", "]", "!=", "voicings", "[", ":", "-", "1", "]", ")", "v_changes", "=", "np", ".", "unique", "(", "np", ".", "concatenate", "(", "[", "[", "0", "]", ",", "v_changes", ",", "[", "len", "(", "voicings", ")", "]", "]", ")", ")", "# Set up arrays of slices for voiced and unvoiced regions", "v_slices", ",", "u_slices", "=", "[", "]", ",", "[", "]", "for", "start", ",", "end", "in", "zip", "(", "v_changes", ",", "v_changes", "[", "1", ":", "]", ")", ":", "idx", "=", "slice", "(", "start", ",", "end", ")", "# A region is voiced if its starting sample is voiced", "# It's unvoiced if none of the samples in the region are voiced.", "if", "voicings", "[", "start", "]", ":", "v_slices", ".", "append", "(", "idx", ")", "elif", "frequencies", "[", "idx", "]", ".", "all", "(", ")", ":", "u_slices", ".", "append", "(", "idx", ")", "# Now we just need to plot the contour", "style", "=", "dict", "(", ")", "style", ".", "update", "(", "next", "(", "ax", ".", "_get_lines", ".", "prop_cycler", ")", ")", "style", ".", "update", "(", "kwargs", ")", "if", "midi", ":", "idx", "=", "frequencies", ">", "0", "frequencies", "[", "idx", "]", "=", "hz_to_midi", "(", "frequencies", "[", "idx", "]", ")", "# Tick at integer midi notes", "ax", ".", "yaxis", ".", "set_minor_locator", "(", "MultipleLocator", "(", "1", ")", ")", "for", "idx", "in", "v_slices", ":", "ax", ".", "plot", "(", "times", "[", "idx", "]", ",", "frequencies", "[", "idx", "]", ",", "*", "*", "style", ")", "style", ".", "pop", "(", "'label'", ",", "None", ")", "# Plot the unvoiced portions", "if", "unvoiced", ":", "style", "[", "'alpha'", "]", "=", "style", ".", "get", "(", "'alpha'", ",", "1.0", ")", "*", "0.5", "for", "idx", "in", "u_slices", ":", "ax", ".", "plot", "(", "times", "[", "idx", "]", ",", "frequencies", "[", "idx", "]", ",", "*", "*", "style", ")", "return", "ax" ]
Visualize pitch contours Parameters ---------- times : np.ndarray, shape=(n,) Sample times of frequencies frequencies : np.ndarray, shape=(n,) frequencies (in Hz) of the pitch contours. Voicing is indicated by sign (positive for voiced, non-positive for non-voiced). midi : bool If `True`, plot on a MIDI-numbered vertical axis. Otherwise, plot on a linear frequency axis. unvoiced : bool If `True`, unvoiced pitch contours are plotted and indicated by transparency. Otherwise, unvoiced pitch contours are omitted from the display. ax : matplotlib.pyplot.axes An axis handle on which to draw the pitch contours. If none is provided, a new set of axes is created. kwargs Additional keyword arguments to `matplotlib.pyplot.plot`. Returns ------- ax : matplotlib.pyplot.axes._subplots.AxesSubplot A handle to the (possibly constructed) plot axes
[ "Visualize", "pitch", "contours" ]
python
train
31.085366
josiah-wolf-oberholtzer/uqbar
uqbar/sphinx/book.py
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/sphinx/book.py#L82-L113
def on_doctree_read(app, document): """ Hooks into Sphinx's ``doctree-read`` event. """ literal_blocks = uqbar.book.sphinx.collect_literal_blocks(document) cache_mapping = uqbar.book.sphinx.group_literal_blocks_by_cache_path(literal_blocks) node_mapping = {} use_cache = bool(app.config["uqbar_book_use_cache"]) for cache_path, literal_block_groups in cache_mapping.items(): kwargs = dict( extensions=app.uqbar_book_extensions, setup_lines=app.config["uqbar_book_console_setup"], teardown_lines=app.config["uqbar_book_console_teardown"], use_black=bool(app.config["uqbar_book_use_black"]), ) for literal_blocks in literal_block_groups: try: if use_cache: local_node_mapping = uqbar.book.sphinx.interpret_code_blocks_with_cache( literal_blocks, cache_path, app.connection, **kwargs ) else: local_node_mapping = uqbar.book.sphinx.interpret_code_blocks( literal_blocks, **kwargs ) node_mapping.update(local_node_mapping) except ConsoleError as exception: message = exception.args[0].splitlines()[-1] logger.warning(message, location=exception.args[1]) if app.config["uqbar_book_strict"]: raise uqbar.book.sphinx.rebuild_document(document, node_mapping)
[ "def", "on_doctree_read", "(", "app", ",", "document", ")", ":", "literal_blocks", "=", "uqbar", ".", "book", ".", "sphinx", ".", "collect_literal_blocks", "(", "document", ")", "cache_mapping", "=", "uqbar", ".", "book", ".", "sphinx", ".", "group_literal_blocks_by_cache_path", "(", "literal_blocks", ")", "node_mapping", "=", "{", "}", "use_cache", "=", "bool", "(", "app", ".", "config", "[", "\"uqbar_book_use_cache\"", "]", ")", "for", "cache_path", ",", "literal_block_groups", "in", "cache_mapping", ".", "items", "(", ")", ":", "kwargs", "=", "dict", "(", "extensions", "=", "app", ".", "uqbar_book_extensions", ",", "setup_lines", "=", "app", ".", "config", "[", "\"uqbar_book_console_setup\"", "]", ",", "teardown_lines", "=", "app", ".", "config", "[", "\"uqbar_book_console_teardown\"", "]", ",", "use_black", "=", "bool", "(", "app", ".", "config", "[", "\"uqbar_book_use_black\"", "]", ")", ",", ")", "for", "literal_blocks", "in", "literal_block_groups", ":", "try", ":", "if", "use_cache", ":", "local_node_mapping", "=", "uqbar", ".", "book", ".", "sphinx", ".", "interpret_code_blocks_with_cache", "(", "literal_blocks", ",", "cache_path", ",", "app", ".", "connection", ",", "*", "*", "kwargs", ")", "else", ":", "local_node_mapping", "=", "uqbar", ".", "book", ".", "sphinx", ".", "interpret_code_blocks", "(", "literal_blocks", ",", "*", "*", "kwargs", ")", "node_mapping", ".", "update", "(", "local_node_mapping", ")", "except", "ConsoleError", "as", "exception", ":", "message", "=", "exception", ".", "args", "[", "0", "]", ".", "splitlines", "(", ")", "[", "-", "1", "]", "logger", ".", "warning", "(", "message", ",", "location", "=", "exception", ".", "args", "[", "1", "]", ")", "if", "app", ".", "config", "[", "\"uqbar_book_strict\"", "]", ":", "raise", "uqbar", ".", "book", ".", "sphinx", ".", "rebuild_document", "(", "document", ",", "node_mapping", ")" ]
Hooks into Sphinx's ``doctree-read`` event.
[ "Hooks", "into", "Sphinx", "s", "doctree", "-", "read", "event", "." ]
python
train
46.6875
gbowerman/azurerm
azurerm/storagerp.py
https://github.com/gbowerman/azurerm/blob/79d40431d3b13f8a36aadbff5029888383d72674/azurerm/storagerp.py#L134-L148
def list_storage_accounts_sub(access_token, subscription_id): '''List the storage accounts in the specified subscription. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body list of storage accounts. ''' endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/providers/Microsoft.Storage/storageAccounts', '?api-version=', STORAGE_API]) return do_get(endpoint, access_token)
[ "def", "list_storage_accounts_sub", "(", "access_token", ",", "subscription_id", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/providers/Microsoft.Storage/storageAccounts'", ",", "'?api-version='", ",", "STORAGE_API", "]", ")", "return", "do_get", "(", "endpoint", ",", "access_token", ")" ]
List the storage accounts in the specified subscription. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body list of storage accounts.
[ "List", "the", "storage", "accounts", "in", "the", "specified", "subscription", "." ]
python
train
39.533333
jsvine/spectra
spectra/grapefruit.py
https://github.com/jsvine/spectra/blob/2269a0ae9b5923154b15bd661fb81179608f7ec2/spectra/grapefruit.py#L1023-L1042
def PilToRgb(pil): '''Convert the color from a PIL-compatible integer to RGB. Parameters: pil: a PIL compatible color representation (0xBBGGRR) Returns: The color as an (r, g, b) tuple in the range: the range: r: [0...1] g: [0...1] b: [0...1] >>> '(%g, %g, %g)' % Color.PilToRgb(0x0080ff) '(1, 0.501961, 0)' ''' r = 0xff & pil g = 0xff & (pil >> 8) b = 0xff & (pil >> 16) return tuple((v / 255.0 for v in (r, g, b)))
[ "def", "PilToRgb", "(", "pil", ")", ":", "r", "=", "0xff", "&", "pil", "g", "=", "0xff", "&", "(", "pil", ">>", "8", ")", "b", "=", "0xff", "&", "(", "pil", ">>", "16", ")", "return", "tuple", "(", "(", "v", "/", "255.0", "for", "v", "in", "(", "r", ",", "g", ",", "b", ")", ")", ")" ]
Convert the color from a PIL-compatible integer to RGB. Parameters: pil: a PIL compatible color representation (0xBBGGRR) Returns: The color as an (r, g, b) tuple in the range: the range: r: [0...1] g: [0...1] b: [0...1] >>> '(%g, %g, %g)' % Color.PilToRgb(0x0080ff) '(1, 0.501961, 0)'
[ "Convert", "the", "color", "from", "a", "PIL", "-", "compatible", "integer", "to", "RGB", "." ]
python
train
23.8
mdsol/rwslib
rwslib/builders/core.py
https://github.com/mdsol/rwslib/blob/1a86bc072d408c009ed1de8bf6e98a1769f54d18/rwslib/builders/core.py#L84-L119
def build(self, builder): """Build XML object, return the root, this is a copy for consistency and testing""" params = dict(ODMVersion="1.3", FileType=self.filetype, CreationDateTime=self.creationdatetime, Originator=self.originator, FileOID=self.fileoid, xmlns="http://www.cdisc.org/ns/odm/v1.3", ) if self.granularity_type: params['Granularity'] = self.granularity_type.value if self.source_system: params['SourceSystem'] = self.source_system if self.source_system_version: params['SourceSystemVersion'] = self.source_system_version params['xmlns:mdsol'] = "http://www.mdsol.com/ns/odm/metadata" if self.description: params['Description'] = self.description builder.start("ODM", params) # Ask the children if self.study is not None: self.study.build(builder) if self.clinical_data: for clinical_data in self.clinical_data: clinical_data.build(builder) if self.admindata is not None: self.admindata.build(builder) builder.end("ODM") return builder.close()
[ "def", "build", "(", "self", ",", "builder", ")", ":", "params", "=", "dict", "(", "ODMVersion", "=", "\"1.3\"", ",", "FileType", "=", "self", ".", "filetype", ",", "CreationDateTime", "=", "self", ".", "creationdatetime", ",", "Originator", "=", "self", ".", "originator", ",", "FileOID", "=", "self", ".", "fileoid", ",", "xmlns", "=", "\"http://www.cdisc.org/ns/odm/v1.3\"", ",", ")", "if", "self", ".", "granularity_type", ":", "params", "[", "'Granularity'", "]", "=", "self", ".", "granularity_type", ".", "value", "if", "self", ".", "source_system", ":", "params", "[", "'SourceSystem'", "]", "=", "self", ".", "source_system", "if", "self", ".", "source_system_version", ":", "params", "[", "'SourceSystemVersion'", "]", "=", "self", ".", "source_system_version", "params", "[", "'xmlns:mdsol'", "]", "=", "\"http://www.mdsol.com/ns/odm/metadata\"", "if", "self", ".", "description", ":", "params", "[", "'Description'", "]", "=", "self", ".", "description", "builder", ".", "start", "(", "\"ODM\"", ",", "params", ")", "# Ask the children", "if", "self", ".", "study", "is", "not", "None", ":", "self", ".", "study", ".", "build", "(", "builder", ")", "if", "self", ".", "clinical_data", ":", "for", "clinical_data", "in", "self", ".", "clinical_data", ":", "clinical_data", ".", "build", "(", "builder", ")", "if", "self", ".", "admindata", "is", "not", "None", ":", "self", ".", "admindata", ".", "build", "(", "builder", ")", "builder", ".", "end", "(", "\"ODM\"", ")", "return", "builder", ".", "close", "(", ")" ]
Build XML object, return the root, this is a copy for consistency and testing
[ "Build", "XML", "object", "return", "the", "root", "this", "is", "a", "copy", "for", "consistency", "and", "testing" ]
python
train
35.333333
DataBiosphere/toil
src/toil/utils/toilStats.py
https://github.com/DataBiosphere/toil/blob/a8252277ff814e7bee0971139c2344f88e44b644/src/toil/utils/toilStats.py#L92-L118
def checkOptions(options, parser): """ Check options, throw parser.error() if something goes wrong """ if options.jobStore == None: parser.error("Specify --jobStore") defaultCategories = ["time", "clock", "wait", "memory"] if options.categories is None: options.categories = defaultCategories else: options.categories = [x.lower() for x in options.categories.split(",")] for c in options.categories: if c not in defaultCategories: parser.error("Unknown category %s. Must be from %s" % (c, str(defaultCategories))) extraSort = ["count", "alpha"] if options.sortCategory is not None: if (options.sortCategory not in defaultCategories and options.sortCategory not in extraSort): parser.error("Unknown --sortCategory %s. Must be from %s" % (options.sortCategory, str(defaultCategories + extraSort))) sortFields = ["min", "med", "ave", "max", "total"] if options.sortField is not None: if (options.sortField not in sortFields): parser.error("Unknown --sortField %s. Must be from %s" % (options.sortField, str(sortFields)))
[ "def", "checkOptions", "(", "options", ",", "parser", ")", ":", "if", "options", ".", "jobStore", "==", "None", ":", "parser", ".", "error", "(", "\"Specify --jobStore\"", ")", "defaultCategories", "=", "[", "\"time\"", ",", "\"clock\"", ",", "\"wait\"", ",", "\"memory\"", "]", "if", "options", ".", "categories", "is", "None", ":", "options", ".", "categories", "=", "defaultCategories", "else", ":", "options", ".", "categories", "=", "[", "x", ".", "lower", "(", ")", "for", "x", "in", "options", ".", "categories", ".", "split", "(", "\",\"", ")", "]", "for", "c", "in", "options", ".", "categories", ":", "if", "c", "not", "in", "defaultCategories", ":", "parser", ".", "error", "(", "\"Unknown category %s. Must be from %s\"", "%", "(", "c", ",", "str", "(", "defaultCategories", ")", ")", ")", "extraSort", "=", "[", "\"count\"", ",", "\"alpha\"", "]", "if", "options", ".", "sortCategory", "is", "not", "None", ":", "if", "(", "options", ".", "sortCategory", "not", "in", "defaultCategories", "and", "options", ".", "sortCategory", "not", "in", "extraSort", ")", ":", "parser", ".", "error", "(", "\"Unknown --sortCategory %s. Must be from %s\"", "%", "(", "options", ".", "sortCategory", ",", "str", "(", "defaultCategories", "+", "extraSort", ")", ")", ")", "sortFields", "=", "[", "\"min\"", ",", "\"med\"", ",", "\"ave\"", ",", "\"max\"", ",", "\"total\"", "]", "if", "options", ".", "sortField", "is", "not", "None", ":", "if", "(", "options", ".", "sortField", "not", "in", "sortFields", ")", ":", "parser", ".", "error", "(", "\"Unknown --sortField %s. Must be from %s\"", "%", "(", "options", ".", "sortField", ",", "str", "(", "sortFields", ")", ")", ")" ]
Check options, throw parser.error() if something goes wrong
[ "Check", "options", "throw", "parser", ".", "error", "()", "if", "something", "goes", "wrong" ]
python
train
45.740741
signetlabdei/sem
sem/gridrunner.py
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/gridrunner.py#L165-L208
def run_program(self, command, working_directory=os.getcwd(), environment=None, cleanup_files=True, native_spec="-l cputype=intel"): """ Run a program through the grid, capturing the standard output. """ try: s = drmaa.Session() s.initialize() jt = s.createJobTemplate() jt.remoteCommand = os.path.dirname( os.path.abspath(__file__)) + '/run_program.sh' jt.args = [command] if environment is not None: jt.jobEnvironment = environment jt.workingDirectory = working_directory jt.nativeSpecification = native_spec output_filename = os.path.join(working_directory, 'output.txt') jt.outputPath = ':' + output_filename jt.joinFiles = True jobid = s.runJob(jt) s.wait(jobid, drmaa.Session.TIMEOUT_WAIT_FOREVER) with open(output_filename, 'r') as output: stdout = output.read() # Clean up if cleanup_files: os.remove(output_filename) finally: try: s.control(drmaa.JOB_IDS_SESSION_ALL, drmaa.JobControlAction.TERMINATE) s.synchronize([drmaa.JOB_IDS_SESSION_ALL], dispose=True) s.exit() except(drmaa.errors.NoActiveSessionException): pass return stdout
[ "def", "run_program", "(", "self", ",", "command", ",", "working_directory", "=", "os", ".", "getcwd", "(", ")", ",", "environment", "=", "None", ",", "cleanup_files", "=", "True", ",", "native_spec", "=", "\"-l cputype=intel\"", ")", ":", "try", ":", "s", "=", "drmaa", ".", "Session", "(", ")", "s", ".", "initialize", "(", ")", "jt", "=", "s", ".", "createJobTemplate", "(", ")", "jt", ".", "remoteCommand", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", "+", "'/run_program.sh'", "jt", ".", "args", "=", "[", "command", "]", "if", "environment", "is", "not", "None", ":", "jt", ".", "jobEnvironment", "=", "environment", "jt", ".", "workingDirectory", "=", "working_directory", "jt", ".", "nativeSpecification", "=", "native_spec", "output_filename", "=", "os", ".", "path", ".", "join", "(", "working_directory", ",", "'output.txt'", ")", "jt", ".", "outputPath", "=", "':'", "+", "output_filename", "jt", ".", "joinFiles", "=", "True", "jobid", "=", "s", ".", "runJob", "(", "jt", ")", "s", ".", "wait", "(", "jobid", ",", "drmaa", ".", "Session", ".", "TIMEOUT_WAIT_FOREVER", ")", "with", "open", "(", "output_filename", ",", "'r'", ")", "as", "output", ":", "stdout", "=", "output", ".", "read", "(", ")", "# Clean up", "if", "cleanup_files", ":", "os", ".", "remove", "(", "output_filename", ")", "finally", ":", "try", ":", "s", ".", "control", "(", "drmaa", ".", "JOB_IDS_SESSION_ALL", ",", "drmaa", ".", "JobControlAction", ".", "TERMINATE", ")", "s", ".", "synchronize", "(", "[", "drmaa", ".", "JOB_IDS_SESSION_ALL", "]", ",", "dispose", "=", "True", ")", "s", ".", "exit", "(", ")", "except", "(", "drmaa", ".", "errors", ".", "NoActiveSessionException", ")", ":", "pass", "return", "stdout" ]
Run a program through the grid, capturing the standard output.
[ "Run", "a", "program", "through", "the", "grid", "capturing", "the", "standard", "output", "." ]
python
train
33.295455
dhermes/bezier
src/bezier/surface.py
https://github.com/dhermes/bezier/blob/4f941f82637a8e70a5b159a9203132192e23406b/src/bezier/surface.py#L240-L286
def area(self): r"""The area of the current surface. For surfaces in :math:`\mathbf{R}^2`, this computes the area via Green's theorem. Using the vector field :math:`\mathbf{F} = \left[-y, x\right]^T`, since :math:`\partial_x(x) - \partial_y(-y) = 2` Green's theorem says twice the area is equal to .. math:: \int_{B\left(\mathcal{U}\right)} 2 \, d\mathbf{x} = \int_{\partial B\left(\mathcal{U}\right)} -y \, dx + x \, dy. This relies on the assumption that the current surface is valid, which implies that the image of the unit triangle under the B |eacute| zier map --- :math:`B\left(\mathcal{U}\right)` --- has the edges of the surface as its boundary. Note that for a given edge :math:`C(r)` with control points :math:`x_j, y_j`, the integral can be simplified: .. math:: \int_C -y \, dx + x \, dy = \int_0^1 (x y' - y x') \, dr = \sum_{i < j} (x_i y_j - y_i x_j) \int_0^1 b_{i, d} b'_{j, d} \, dr where :math:`b_{i, d}, b_{j, d}` are Bernstein basis polynomials. Returns: float: The area of the current surface. Raises: NotImplementedError: If the current surface isn't in :math:`\mathbf{R}^2`. """ if self._dimension != 2: raise NotImplementedError( "2D is the only supported dimension", "Current dimension", self._dimension, ) edge1, edge2, edge3 = self._get_edges() return _surface_helpers.compute_area( (edge1._nodes, edge2._nodes, edge3._nodes) )
[ "def", "area", "(", "self", ")", ":", "if", "self", ".", "_dimension", "!=", "2", ":", "raise", "NotImplementedError", "(", "\"2D is the only supported dimension\"", ",", "\"Current dimension\"", ",", "self", ".", "_dimension", ",", ")", "edge1", ",", "edge2", ",", "edge3", "=", "self", ".", "_get_edges", "(", ")", "return", "_surface_helpers", ".", "compute_area", "(", "(", "edge1", ".", "_nodes", ",", "edge2", ".", "_nodes", ",", "edge3", ".", "_nodes", ")", ")" ]
r"""The area of the current surface. For surfaces in :math:`\mathbf{R}^2`, this computes the area via Green's theorem. Using the vector field :math:`\mathbf{F} = \left[-y, x\right]^T`, since :math:`\partial_x(x) - \partial_y(-y) = 2` Green's theorem says twice the area is equal to .. math:: \int_{B\left(\mathcal{U}\right)} 2 \, d\mathbf{x} = \int_{\partial B\left(\mathcal{U}\right)} -y \, dx + x \, dy. This relies on the assumption that the current surface is valid, which implies that the image of the unit triangle under the B |eacute| zier map --- :math:`B\left(\mathcal{U}\right)` --- has the edges of the surface as its boundary. Note that for a given edge :math:`C(r)` with control points :math:`x_j, y_j`, the integral can be simplified: .. math:: \int_C -y \, dx + x \, dy = \int_0^1 (x y' - y x') \, dr = \sum_{i < j} (x_i y_j - y_i x_j) \int_0^1 b_{i, d} b'_{j, d} \, dr where :math:`b_{i, d}, b_{j, d}` are Bernstein basis polynomials. Returns: float: The area of the current surface. Raises: NotImplementedError: If the current surface isn't in :math:`\mathbf{R}^2`.
[ "r", "The", "area", "of", "the", "current", "surface", "." ]
python
train
35.723404
cloud9ers/gurumate
environment/lib/python2.7/site-packages/IPython/zmq/session.py
https://github.com/cloud9ers/gurumate/blob/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e/environment/lib/python2.7/site-packages/IPython/zmq/session.py#L190-L207
def extract_header(msg_or_header): """Given a message or header, return the header.""" if not msg_or_header: return {} try: # See if msg_or_header is the entire message. h = msg_or_header['header'] except KeyError: try: # See if msg_or_header is just the header h = msg_or_header['msg_id'] except KeyError: raise else: h = msg_or_header if not isinstance(h, dict): h = dict(h) return h
[ "def", "extract_header", "(", "msg_or_header", ")", ":", "if", "not", "msg_or_header", ":", "return", "{", "}", "try", ":", "# See if msg_or_header is the entire message.", "h", "=", "msg_or_header", "[", "'header'", "]", "except", "KeyError", ":", "try", ":", "# See if msg_or_header is just the header", "h", "=", "msg_or_header", "[", "'msg_id'", "]", "except", "KeyError", ":", "raise", "else", ":", "h", "=", "msg_or_header", "if", "not", "isinstance", "(", "h", ",", "dict", ")", ":", "h", "=", "dict", "(", "h", ")", "return", "h" ]
Given a message or header, return the header.
[ "Given", "a", "message", "or", "header", "return", "the", "header", "." ]
python
test
27.555556
adafruit/Adafruit_CircuitPython_OneWire
adafruit_onewire/bus.py
https://github.com/adafruit/Adafruit_CircuitPython_OneWire/blob/113ca99b9087f7031f0b46a963472ad106520f9b/adafruit_onewire/bus.py#L108-L124
def readinto(self, buf, *, start=0, end=None): """ Read into ``buf`` from the device. The number of bytes read will be the length of ``buf``. If ``start`` or ``end`` is provided, then the buffer will be sliced as if ``buf[start:end]``. This will not cause an allocation like ``buf[start:end]`` will so it saves memory. :param bytearray buf: buffer to write into :param int start: Index to start writing at :param int end: Index to write up to but not include """ if end is None: end = len(buf) for i in range(start, end): buf[i] = self._readbyte()
[ "def", "readinto", "(", "self", ",", "buf", ",", "*", ",", "start", "=", "0", ",", "end", "=", "None", ")", ":", "if", "end", "is", "None", ":", "end", "=", "len", "(", "buf", ")", "for", "i", "in", "range", "(", "start", ",", "end", ")", ":", "buf", "[", "i", "]", "=", "self", ".", "_readbyte", "(", ")" ]
Read into ``buf`` from the device. The number of bytes read will be the length of ``buf``. If ``start`` or ``end`` is provided, then the buffer will be sliced as if ``buf[start:end]``. This will not cause an allocation like ``buf[start:end]`` will so it saves memory. :param bytearray buf: buffer to write into :param int start: Index to start writing at :param int end: Index to write up to but not include
[ "Read", "into", "buf", "from", "the", "device", ".", "The", "number", "of", "bytes", "read", "will", "be", "the", "length", "of", "buf", "." ]
python
train
38.411765
danilobellini/audiolazy
audiolazy/lazy_lpc.py
https://github.com/danilobellini/audiolazy/blob/dba0a278937909980ed40b976d866b8e97c35dee/audiolazy/lazy_lpc.py#L398-L425
def parcor_stable(filt): """ Tests whether the given filter is stable or not by using the partial correlation coefficients (reflection coefficients) of the given filter. Parameters ---------- filt : A LTI filter as a LinearFilter object. Returns ------- A boolean that is true only when all correlation coefficients are inside the unit circle. Critical stability (i.e., when outer coefficient has magnitude equals to one) is seem as an instability, and returns False. See Also -------- parcor : Partial correlation coefficients generator. lsf_stable : Tests filter stability with Line Spectral Frequencies (LSF) values. """ try: return all(abs(k) < 1 for k in parcor(ZFilter(filt.denpoly))) except ParCorError: return False
[ "def", "parcor_stable", "(", "filt", ")", ":", "try", ":", "return", "all", "(", "abs", "(", "k", ")", "<", "1", "for", "k", "in", "parcor", "(", "ZFilter", "(", "filt", ".", "denpoly", ")", ")", ")", "except", "ParCorError", ":", "return", "False" ]
Tests whether the given filter is stable or not by using the partial correlation coefficients (reflection coefficients) of the given filter. Parameters ---------- filt : A LTI filter as a LinearFilter object. Returns ------- A boolean that is true only when all correlation coefficients are inside the unit circle. Critical stability (i.e., when outer coefficient has magnitude equals to one) is seem as an instability, and returns False. See Also -------- parcor : Partial correlation coefficients generator. lsf_stable : Tests filter stability with Line Spectral Frequencies (LSF) values.
[ "Tests", "whether", "the", "given", "filter", "is", "stable", "or", "not", "by", "using", "the", "partial", "correlation", "coefficients", "(", "reflection", "coefficients", ")", "of", "the", "given", "filter", "." ]
python
train
27
apple/turicreate
src/external/coremltools_wrap/coremltools/coremltools/converters/sklearn/_converter_internal.py
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/src/external/coremltools_wrap/coremltools/coremltools/converters/sklearn/_converter_internal.py#L87-L100
def _get_converter_module(sk_obj): """ Returns the module holding the conversion functions for a particular model). """ try: cv_idx = _converter_lookup[sk_obj.__class__] except KeyError: raise ValueError( "Transformer '%s' not supported; supported transformers are %s." % (repr(sk_obj), ",".join(k.__name__ for k in _converter_module_list))) return _converter_module_list[cv_idx]
[ "def", "_get_converter_module", "(", "sk_obj", ")", ":", "try", ":", "cv_idx", "=", "_converter_lookup", "[", "sk_obj", ".", "__class__", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "\"Transformer '%s' not supported; supported transformers are %s.\"", "%", "(", "repr", "(", "sk_obj", ")", ",", "\",\"", ".", "join", "(", "k", ".", "__name__", "for", "k", "in", "_converter_module_list", ")", ")", ")", "return", "_converter_module_list", "[", "cv_idx", "]" ]
Returns the module holding the conversion functions for a particular model).
[ "Returns", "the", "module", "holding", "the", "conversion", "functions", "for", "a", "particular", "model", ")", "." ]
python
train
33.071429
The-Politico/politico-civic-geography
geography/models/division.py
https://github.com/The-Politico/politico-civic-geography/blob/032b3ee773b50b65cfe672f230dda772df0f89e0/geography/models/division.py#L109-L113
def set_intersection(self, division, intersection): """Set intersection percentage of intersecting divisions.""" IntersectRelationship.objects.filter( from_division=self, to_division=division ).update(intersection=intersection)
[ "def", "set_intersection", "(", "self", ",", "division", ",", "intersection", ")", ":", "IntersectRelationship", ".", "objects", ".", "filter", "(", "from_division", "=", "self", ",", "to_division", "=", "division", ")", ".", "update", "(", "intersection", "=", "intersection", ")" ]
Set intersection percentage of intersecting divisions.
[ "Set", "intersection", "percentage", "of", "intersecting", "divisions", "." ]
python
train
51.8
nitmir/django-cas-server
cas_server/utils.py
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L118-L140
def import_attr(path): """ transform a python dotted path to the attr :param path: A dotted path to a python object or a python object :type path: :obj:`unicode` or :obj:`str` or anything :return: The python object pointed by the dotted path or the python object unchanged """ # if we got a str, decode it to unicode (normally it should only contain ascii) if isinstance(path, six.binary_type): path = path.decode("utf-8") # if path is not an unicode, return it unchanged (may be it is already the attribute to import) if not isinstance(path, six.text_type): return path if u"." not in path: ValueError("%r should be of the form `module.attr` and we just got `attr`" % path) module, attr = path.rsplit(u'.', 1) try: return getattr(import_module(module), attr) except ImportError: raise ImportError("Module %r not found" % module) except AttributeError: raise AttributeError("Module %r has not attribut %r" % (module, attr))
[ "def", "import_attr", "(", "path", ")", ":", "# if we got a str, decode it to unicode (normally it should only contain ascii)", "if", "isinstance", "(", "path", ",", "six", ".", "binary_type", ")", ":", "path", "=", "path", ".", "decode", "(", "\"utf-8\"", ")", "# if path is not an unicode, return it unchanged (may be it is already the attribute to import)", "if", "not", "isinstance", "(", "path", ",", "six", ".", "text_type", ")", ":", "return", "path", "if", "u\".\"", "not", "in", "path", ":", "ValueError", "(", "\"%r should be of the form `module.attr` and we just got `attr`\"", "%", "path", ")", "module", ",", "attr", "=", "path", ".", "rsplit", "(", "u'.'", ",", "1", ")", "try", ":", "return", "getattr", "(", "import_module", "(", "module", ")", ",", "attr", ")", "except", "ImportError", ":", "raise", "ImportError", "(", "\"Module %r not found\"", "%", "module", ")", "except", "AttributeError", ":", "raise", "AttributeError", "(", "\"Module %r has not attribut %r\"", "%", "(", "module", ",", "attr", ")", ")" ]
transform a python dotted path to the attr :param path: A dotted path to a python object or a python object :type path: :obj:`unicode` or :obj:`str` or anything :return: The python object pointed by the dotted path or the python object unchanged
[ "transform", "a", "python", "dotted", "path", "to", "the", "attr" ]
python
train
44.608696
saltstack/salt
salt/modules/elasticsearch.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/elasticsearch.py#L699-L719
def mapping_get(index, doc_type, hosts=None, profile=None): ''' Retrieve mapping definition of index or index/type index Index for the mapping doc_type Name of the document type CLI example:: salt myminion elasticsearch.mapping_get testindex user ''' es = _get_instance(hosts, profile) try: return es.indices.get_mapping(index=index, doc_type=doc_type) except elasticsearch.exceptions.NotFoundError: return None except elasticsearch.TransportError as e: raise CommandExecutionError("Cannot retrieve mapping {0}, server returned code {1} with message {2}".format(index, e.status_code, e.error))
[ "def", "mapping_get", "(", "index", ",", "doc_type", ",", "hosts", "=", "None", ",", "profile", "=", "None", ")", ":", "es", "=", "_get_instance", "(", "hosts", ",", "profile", ")", "try", ":", "return", "es", ".", "indices", ".", "get_mapping", "(", "index", "=", "index", ",", "doc_type", "=", "doc_type", ")", "except", "elasticsearch", ".", "exceptions", ".", "NotFoundError", ":", "return", "None", "except", "elasticsearch", ".", "TransportError", "as", "e", ":", "raise", "CommandExecutionError", "(", "\"Cannot retrieve mapping {0}, server returned code {1} with message {2}\"", ".", "format", "(", "index", ",", "e", ".", "status_code", ",", "e", ".", "error", ")", ")" ]
Retrieve mapping definition of index or index/type index Index for the mapping doc_type Name of the document type CLI example:: salt myminion elasticsearch.mapping_get testindex user
[ "Retrieve", "mapping", "definition", "of", "index", "or", "index", "/", "type" ]
python
train
31.666667
pycontribs/pyrax
pyrax/utils.py
https://github.com/pycontribs/pyrax/blob/9ddfd5064b3a292d7337906f3b2d5dce95b50b99/pyrax/utils.py#L630-L649
def update_exc(exc, msg, before=True, separator="\n"): """ Adds additional text to an exception's error message. The new text will be added before the existing text by default; to append it after the original text, pass False to the `before` parameter. By default the old and new text will be separated by a newline. If you wish to use a different separator, pass that as the `separator` parameter. """ emsg = exc.message if before: parts = (msg, separator, emsg) else: parts = (emsg, separator, msg) new_msg = "%s%s%s" % parts new_args = (new_msg, ) + exc.args[1:] exc.message = new_msg exc.args = new_args return exc
[ "def", "update_exc", "(", "exc", ",", "msg", ",", "before", "=", "True", ",", "separator", "=", "\"\\n\"", ")", ":", "emsg", "=", "exc", ".", "message", "if", "before", ":", "parts", "=", "(", "msg", ",", "separator", ",", "emsg", ")", "else", ":", "parts", "=", "(", "emsg", ",", "separator", ",", "msg", ")", "new_msg", "=", "\"%s%s%s\"", "%", "parts", "new_args", "=", "(", "new_msg", ",", ")", "+", "exc", ".", "args", "[", "1", ":", "]", "exc", ".", "message", "=", "new_msg", "exc", ".", "args", "=", "new_args", "return", "exc" ]
Adds additional text to an exception's error message. The new text will be added before the existing text by default; to append it after the original text, pass False to the `before` parameter. By default the old and new text will be separated by a newline. If you wish to use a different separator, pass that as the `separator` parameter.
[ "Adds", "additional", "text", "to", "an", "exception", "s", "error", "message", "." ]
python
train
33.85
willhardy/django-seo
rollyourown/seo/admin.py
https://github.com/willhardy/django-seo/blob/3089686a3c490091315860979ad15ef2527c3e3e/rollyourown/seo/admin.py#L211-L220
def _with_inline(func, admin_site, metadata_class, inline_class): """ Decorator for register function that adds an appropriate inline.""" def register(model_or_iterable, admin_class=None, **options): # Call the (bound) function we were given. # We have to assume it will be bound to admin_site func(model_or_iterable, admin_class, **options) _monkey_inline(model_or_iterable, admin_site._registry[model_or_iterable], metadata_class, inline_class, admin_site) return register
[ "def", "_with_inline", "(", "func", ",", "admin_site", ",", "metadata_class", ",", "inline_class", ")", ":", "def", "register", "(", "model_or_iterable", ",", "admin_class", "=", "None", ",", "*", "*", "options", ")", ":", "# Call the (bound) function we were given.", "# We have to assume it will be bound to admin_site", "func", "(", "model_or_iterable", ",", "admin_class", ",", "*", "*", "options", ")", "_monkey_inline", "(", "model_or_iterable", ",", "admin_site", ".", "_registry", "[", "model_or_iterable", "]", ",", "metadata_class", ",", "inline_class", ",", "admin_site", ")", "return", "register" ]
Decorator for register function that adds an appropriate inline.
[ "Decorator", "for", "register", "function", "that", "adds", "an", "appropriate", "inline", "." ]
python
train
51.4
cloud9ers/gurumate
environment/lib/python2.7/site-packages/IPython/core/alias.py
https://github.com/cloud9ers/gurumate/blob/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e/environment/lib/python2.7/site-packages/IPython/core/alias.py#L215-L227
def expand_alias(self, line): """ Expand an alias in the command line Returns the provided command line, possibly with the first word (command) translated according to alias expansion rules. [ipython]|16> _ip.expand_aliases("np myfile.txt") <16> 'q:/opt/np/notepad++.exe myfile.txt' """ pre,_,fn,rest = split_user_input(line) res = pre + self.expand_aliases(fn, rest) return res
[ "def", "expand_alias", "(", "self", ",", "line", ")", ":", "pre", ",", "_", ",", "fn", ",", "rest", "=", "split_user_input", "(", "line", ")", "res", "=", "pre", "+", "self", ".", "expand_aliases", "(", "fn", ",", "rest", ")", "return", "res" ]
Expand an alias in the command line Returns the provided command line, possibly with the first word (command) translated according to alias expansion rules. [ipython]|16> _ip.expand_aliases("np myfile.txt") <16> 'q:/opt/np/notepad++.exe myfile.txt'
[ "Expand", "an", "alias", "in", "the", "command", "line" ]
python
test
34.615385
ngmiller/mipsy
mipsy/util.py
https://github.com/ngmiller/mipsy/blob/78c058f44685765193acd386e81fada3b4187b95/mipsy/util.py#L62-L72
def write(self, label, index): """ Saves a new label, index mapping to the cache. Raises a RuntimeError on a conflict. """ if label in self.cache: if self.cache[label] != index: error_message = 'cache_conflict on label: {} with index: {}\ncache dump: {}'.format(label, index, self.cache) raise RuntimeError(error_message) else: self.cache[label] = index
[ "def", "write", "(", "self", ",", "label", ",", "index", ")", ":", "if", "label", "in", "self", ".", "cache", ":", "if", "self", ".", "cache", "[", "label", "]", "!=", "index", ":", "error_message", "=", "'cache_conflict on label: {} with index: {}\\ncache dump: {}'", ".", "format", "(", "label", ",", "index", ",", "self", ".", "cache", ")", "raise", "RuntimeError", "(", "error_message", ")", "else", ":", "self", ".", "cache", "[", "label", "]", "=", "index" ]
Saves a new label, index mapping to the cache. Raises a RuntimeError on a conflict.
[ "Saves", "a", "new", "label", "index", "mapping", "to", "the", "cache", ".", "Raises", "a", "RuntimeError", "on", "a", "conflict", "." ]
python
train
40.636364
torfsen/service
src/service/__init__.py
https://github.com/torfsen/service/blob/d0dd824fce9237825c1943b30cd14f7b0f5957a6/src/service/__init__.py#L279-L291
def send_signal(self, s): """ Send a signal to the daemon process. The signal must have been enabled using the ``signals`` parameter of :py:meth:`Service.__init__`. Otherwise, a ``ValueError`` is raised. """ self._get_signal_event(s) # Check if signal has been enabled pid = self.get_pid() if not pid: raise ValueError('Daemon is not running.') os.kill(pid, s)
[ "def", "send_signal", "(", "self", ",", "s", ")", ":", "self", ".", "_get_signal_event", "(", "s", ")", "# Check if signal has been enabled", "pid", "=", "self", ".", "get_pid", "(", ")", "if", "not", "pid", ":", "raise", "ValueError", "(", "'Daemon is not running.'", ")", "os", ".", "kill", "(", "pid", ",", "s", ")" ]
Send a signal to the daemon process. The signal must have been enabled using the ``signals`` parameter of :py:meth:`Service.__init__`. Otherwise, a ``ValueError`` is raised.
[ "Send", "a", "signal", "to", "the", "daemon", "process", "." ]
python
train
34
ceph/ceph-deploy
ceph_deploy/osd.py
https://github.com/ceph/ceph-deploy/blob/86943fcc454cd4c99a86e3493e9e93a59c661fef/ceph_deploy/osd.py#L141-L174
def catch_osd_errors(conn, logger, args): """ Look for possible issues when checking the status of an OSD and report them back to the user. """ logger.info('checking OSD status...') status = osd_status_check(conn, args.cluster) osds = int(status.get('num_osds', 0)) up_osds = int(status.get('num_up_osds', 0)) in_osds = int(status.get('num_in_osds', 0)) full = status.get('full', False) nearfull = status.get('nearfull', False) if osds > up_osds: difference = osds - up_osds logger.warning('there %s %d OSD%s down' % ( ['is', 'are'][difference != 1], difference, "s"[difference == 1:]) ) if osds > in_osds: difference = osds - in_osds logger.warning('there %s %d OSD%s out' % ( ['is', 'are'][difference != 1], difference, "s"[difference == 1:]) ) if full: logger.warning('OSDs are full!') if nearfull: logger.warning('OSDs are near full!')
[ "def", "catch_osd_errors", "(", "conn", ",", "logger", ",", "args", ")", ":", "logger", ".", "info", "(", "'checking OSD status...'", ")", "status", "=", "osd_status_check", "(", "conn", ",", "args", ".", "cluster", ")", "osds", "=", "int", "(", "status", ".", "get", "(", "'num_osds'", ",", "0", ")", ")", "up_osds", "=", "int", "(", "status", ".", "get", "(", "'num_up_osds'", ",", "0", ")", ")", "in_osds", "=", "int", "(", "status", ".", "get", "(", "'num_in_osds'", ",", "0", ")", ")", "full", "=", "status", ".", "get", "(", "'full'", ",", "False", ")", "nearfull", "=", "status", ".", "get", "(", "'nearfull'", ",", "False", ")", "if", "osds", ">", "up_osds", ":", "difference", "=", "osds", "-", "up_osds", "logger", ".", "warning", "(", "'there %s %d OSD%s down'", "%", "(", "[", "'is'", ",", "'are'", "]", "[", "difference", "!=", "1", "]", ",", "difference", ",", "\"s\"", "[", "difference", "==", "1", ":", "]", ")", ")", "if", "osds", ">", "in_osds", ":", "difference", "=", "osds", "-", "in_osds", "logger", ".", "warning", "(", "'there %s %d OSD%s out'", "%", "(", "[", "'is'", ",", "'are'", "]", "[", "difference", "!=", "1", "]", ",", "difference", ",", "\"s\"", "[", "difference", "==", "1", ":", "]", ")", ")", "if", "full", ":", "logger", ".", "warning", "(", "'OSDs are full!'", ")", "if", "nearfull", ":", "logger", ".", "warning", "(", "'OSDs are near full!'", ")" ]
Look for possible issues when checking the status of an OSD and report them back to the user.
[ "Look", "for", "possible", "issues", "when", "checking", "the", "status", "of", "an", "OSD", "and", "report", "them", "back", "to", "the", "user", "." ]
python
train
29.588235
haifengat/hf_ctp_py_proxy
generate/generate_enum_cs.py
https://github.com/haifengat/hf_ctp_py_proxy/blob/c2dc6dbde45aa6b097f75380474e91510d3f5d12/generate/generate_enum_cs.py#L147-L161
def run(self): """主函数""" # try: self.fenum.write('\n') self.fcpp = open(os.path.join(os.path.abspath(self.ctp_dir), 'ThostFtdcUserApiDataType.h'), 'r') for idx, line in enumerate(self.fcpp): l = self.process_line(idx, line) self.f_data_type.write(l) self.fcpp.close() self.f_data_type.close() self.fenum.close() print('ctp_data_type.py生成过程完成')
[ "def", "run", "(", "self", ")", ":", "# try:", "self", ".", "fenum", ".", "write", "(", "'\\n'", ")", "self", ".", "fcpp", "=", "open", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "abspath", "(", "self", ".", "ctp_dir", ")", ",", "'ThostFtdcUserApiDataType.h'", ")", ",", "'r'", ")", "for", "idx", ",", "line", "in", "enumerate", "(", "self", ".", "fcpp", ")", ":", "l", "=", "self", ".", "process_line", "(", "idx", ",", "line", ")", "self", ".", "f_data_type", ".", "write", "(", "l", ")", "self", ".", "fcpp", ".", "close", "(", ")", "self", ".", "f_data_type", ".", "close", "(", ")", "self", ".", "fenum", ".", "close", "(", ")", "print", "(", "'ctp_data_type.py生成过程完成')", "" ]
主函数
[ "主函数" ]
python
train
28.533333
ihgazni2/edict
edict/edict.py
https://github.com/ihgazni2/edict/blob/44a08ccc10b196aa3854619b4c51ddb246778a34/edict/edict.py#L1504-L1522
def show_vmatrix(vm): ''' d = {1: {2: {22: 222}}, 3: {'a': 'b'}} vm = [[[222]], ['b']] show_vmatrix(vm) ''' unhandled = vm while(unhandled.__len__()>0): next_unhandled = [] for i in range(0,unhandled.__len__()): ele = unhandled[i] print(ele) cond = elel.is_leaf(ele) if(cond): pass else: children = ele[0] next_unhandled.append(children) unhandled = next_unhandled
[ "def", "show_vmatrix", "(", "vm", ")", ":", "unhandled", "=", "vm", "while", "(", "unhandled", ".", "__len__", "(", ")", ">", "0", ")", ":", "next_unhandled", "=", "[", "]", "for", "i", "in", "range", "(", "0", ",", "unhandled", ".", "__len__", "(", ")", ")", ":", "ele", "=", "unhandled", "[", "i", "]", "print", "(", "ele", ")", "cond", "=", "elel", ".", "is_leaf", "(", "ele", ")", "if", "(", "cond", ")", ":", "pass", "else", ":", "children", "=", "ele", "[", "0", "]", "next_unhandled", ".", "append", "(", "children", ")", "unhandled", "=", "next_unhandled" ]
d = {1: {2: {22: 222}}, 3: {'a': 'b'}} vm = [[[222]], ['b']] show_vmatrix(vm)
[ "d", "=", "{", "1", ":", "{", "2", ":", "{", "22", ":", "222", "}}", "3", ":", "{", "a", ":", "b", "}}", "vm", "=", "[[[", "222", "]]", "[", "b", "]]", "show_vmatrix", "(", "vm", ")" ]
python
train
27.263158
saghul/evergreen
evergreen/patcher.py
https://github.com/saghul/evergreen/blob/22f22f45892f397c23c3e09e6ea1ad4c00b3add8/evergreen/patcher.py#L27-L41
def restore(self): """Restores the modules that the saver knows about into sys.modules. """ try: for modname, mod in self._saved.items(): if mod is not None: sys.modules[modname] = mod else: try: del sys.modules[modname] except KeyError: pass finally: imp.release_lock()
[ "def", "restore", "(", "self", ")", ":", "try", ":", "for", "modname", ",", "mod", "in", "self", ".", "_saved", ".", "items", "(", ")", ":", "if", "mod", "is", "not", "None", ":", "sys", ".", "modules", "[", "modname", "]", "=", "mod", "else", ":", "try", ":", "del", "sys", ".", "modules", "[", "modname", "]", "except", "KeyError", ":", "pass", "finally", ":", "imp", ".", "release_lock", "(", ")" ]
Restores the modules that the saver knows about into sys.modules.
[ "Restores", "the", "modules", "that", "the", "saver", "knows", "about", "into", "sys", ".", "modules", "." ]
python
train
30.666667
AndrewAnnex/SpiceyPy
spiceypy/spiceypy.py
https://github.com/AndrewAnnex/SpiceyPy/blob/fc20a9b9de68b58eed5b332f0c051fb343a6e335/spiceypy/spiceypy.py#L14978-L14993
def vrel(v1, v2): """ Return the relative difference between two 3-dimensional vectors. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vrel_c.html :param v1: First vector :type v1: 3-Element Array of floats :param v2: Second vector :type v2: 3-Element Array of floats :return: the relative difference between v1 and v2. :rtype: float """ v1 = stypes.toDoubleVector(v1) v2 = stypes.toDoubleVector(v2) return libspice.vrel_c(v1, v2)
[ "def", "vrel", "(", "v1", ",", "v2", ")", ":", "v1", "=", "stypes", ".", "toDoubleVector", "(", "v1", ")", "v2", "=", "stypes", ".", "toDoubleVector", "(", "v2", ")", "return", "libspice", ".", "vrel_c", "(", "v1", ",", "v2", ")" ]
Return the relative difference between two 3-dimensional vectors. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vrel_c.html :param v1: First vector :type v1: 3-Element Array of floats :param v2: Second vector :type v2: 3-Element Array of floats :return: the relative difference between v1 and v2. :rtype: float
[ "Return", "the", "relative", "difference", "between", "two", "3", "-", "dimensional", "vectors", "." ]
python
train
30
Jajcus/pyxmpp2
pyxmpp2/sasl/scram.py
https://github.com/Jajcus/pyxmpp2/blob/14a40a3950910a9cd008b55f0d8905aa0186ce18/pyxmpp2/sasl/scram.py#L92-L101
def Normalize(str_): """The Normalize(str) function. This one also accepts Unicode string input (in the RFC only UTF-8 strings are used). """ # pylint: disable=C0103 if isinstance(str_, bytes): str_ = str_.decode("utf-8") return SASLPREP.prepare(str_).encode("utf-8")
[ "def", "Normalize", "(", "str_", ")", ":", "# pylint: disable=C0103", "if", "isinstance", "(", "str_", ",", "bytes", ")", ":", "str_", "=", "str_", ".", "decode", "(", "\"utf-8\"", ")", "return", "SASLPREP", ".", "prepare", "(", "str_", ")", ".", "encode", "(", "\"utf-8\"", ")" ]
The Normalize(str) function. This one also accepts Unicode string input (in the RFC only UTF-8 strings are used).
[ "The", "Normalize", "(", "str", ")", "function", "." ]
python
valid
32.7
thetarkus/django-semanticui-forms
semanticuiforms/templatetags/semanticui.py
https://github.com/thetarkus/django-semanticui-forms/blob/9664c6f01621568c3fa39b36439178586649eafe/semanticuiforms/templatetags/semanticui.py#L63-L72
def set_errors(self): """Set errors markup. """ if not self.field.errors or self.attrs.get("_no_errors"): return self.values["class"].append("error") for error in self.field.errors: self.values["errors"] += ERROR_WRAPPER % {"message": error}
[ "def", "set_errors", "(", "self", ")", ":", "if", "not", "self", ".", "field", ".", "errors", "or", "self", ".", "attrs", ".", "get", "(", "\"_no_errors\"", ")", ":", "return", "self", ".", "values", "[", "\"class\"", "]", ".", "append", "(", "\"error\"", ")", "for", "error", "in", "self", ".", "field", ".", "errors", ":", "self", ".", "values", "[", "\"errors\"", "]", "+=", "ERROR_WRAPPER", "%", "{", "\"message\"", ":", "error", "}" ]
Set errors markup.
[ "Set", "errors", "markup", "." ]
python
train
26.1
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_sql.py
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L111-L128
def _get_output_columns(nodes, context): """Get the output columns for a list of SqlNodes. Args: nodes: List[SqlNode], the nodes to get output columns from. context: CompilationContext, global compilation state and metadata. Returns: List[Column], list of SqlAlchemy Columns to output for this query. """ columns = [] for node in nodes: for sql_output in sql_context_helpers.get_outputs(node, context): field_name = sql_output.field_name column = sql_context_helpers.get_column(field_name, node, context) column = column.label(sql_output.output_name) columns.append(column) return columns
[ "def", "_get_output_columns", "(", "nodes", ",", "context", ")", ":", "columns", "=", "[", "]", "for", "node", "in", "nodes", ":", "for", "sql_output", "in", "sql_context_helpers", ".", "get_outputs", "(", "node", ",", "context", ")", ":", "field_name", "=", "sql_output", ".", "field_name", "column", "=", "sql_context_helpers", ".", "get_column", "(", "field_name", ",", "node", ",", "context", ")", "column", "=", "column", ".", "label", "(", "sql_output", ".", "output_name", ")", "columns", ".", "append", "(", "column", ")", "return", "columns" ]
Get the output columns for a list of SqlNodes. Args: nodes: List[SqlNode], the nodes to get output columns from. context: CompilationContext, global compilation state and metadata. Returns: List[Column], list of SqlAlchemy Columns to output for this query.
[ "Get", "the", "output", "columns", "for", "a", "list", "of", "SqlNodes", "." ]
python
train
37.833333
juju/juju-bundlelib
jujubundlelib/cli.py
https://github.com/juju/juju-bundlelib/blob/c2efa614f53675ed9526027776448bfbb0454ca6/jujubundlelib/cli.py#L26-L58
def get_changeset(args): """Dump the changeset objects as JSON, reading the provided bundle YAML. The YAML can be provided either from stdin or by passing a file path as first argument. """ # Parse the arguments. parser = argparse.ArgumentParser(description=get_changeset.__doc__) parser.add_argument( 'infile', nargs='?', type=argparse.FileType('r'), default=sys.stdin, help='path to the bundle YAML file') parser.add_argument( '--version', action='version', version='%(prog)s {}'.format(version)) options = parser.parse_args(args) # Parse the provided YAML file. try: bundle = yaml.safe_load(options.infile) except Exception: return 'error: the provided bundle is not a valid YAML' # Validate the bundle object. errors = validation.validate(bundle) if errors: return '\n'.join(errors) # Dump the changeset to stdout. print('[') for num, change in enumerate(changeset.parse(bundle)): if num: print(',') print(json.dumps(change)) print(']')
[ "def", "get_changeset", "(", "args", ")", ":", "# Parse the arguments.", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "get_changeset", ".", "__doc__", ")", "parser", ".", "add_argument", "(", "'infile'", ",", "nargs", "=", "'?'", ",", "type", "=", "argparse", ".", "FileType", "(", "'r'", ")", ",", "default", "=", "sys", ".", "stdin", ",", "help", "=", "'path to the bundle YAML file'", ")", "parser", ".", "add_argument", "(", "'--version'", ",", "action", "=", "'version'", ",", "version", "=", "'%(prog)s {}'", ".", "format", "(", "version", ")", ")", "options", "=", "parser", ".", "parse_args", "(", "args", ")", "# Parse the provided YAML file.", "try", ":", "bundle", "=", "yaml", ".", "safe_load", "(", "options", ".", "infile", ")", "except", "Exception", ":", "return", "'error: the provided bundle is not a valid YAML'", "# Validate the bundle object.", "errors", "=", "validation", ".", "validate", "(", "bundle", ")", "if", "errors", ":", "return", "'\\n'", ".", "join", "(", "errors", ")", "# Dump the changeset to stdout.", "print", "(", "'['", ")", "for", "num", ",", "change", "in", "enumerate", "(", "changeset", ".", "parse", "(", "bundle", ")", ")", ":", "if", "num", ":", "print", "(", "','", ")", "print", "(", "json", ".", "dumps", "(", "change", ")", ")", "print", "(", "']'", ")" ]
Dump the changeset objects as JSON, reading the provided bundle YAML. The YAML can be provided either from stdin or by passing a file path as first argument.
[ "Dump", "the", "changeset", "objects", "as", "JSON", "reading", "the", "provided", "bundle", "YAML", "." ]
python
train
32.242424
stephen-bunn/file-config
src/file_config/handlers/xml.py
https://github.com/stephen-bunn/file-config/blob/93429360c949985202e1f2b9cd0340731819ba75/src/file_config/handlers/xml.py#L51-L68
def on_lxml_loads(self, lxml, config, content, **kwargs): """ The `lxml <https://pypi.org/project/lxml/>`_ loads method. :param module lxml: The ``lxml`` module :param class config: The loading config class :param str content: The content to deserialize :param str encoding: The encoding to read the given xml document as, defaults to "utf-8", optional :returns: The deserialized dictionary :rtype: dict """ # NOTE: lazy import of XMLParser because class requires lxml to exist on import from ..contrib.xml_parser import XMLParser return XMLParser.from_xml( content, encoding=kwargs.pop("encoding", "utf-8") ).to_dict()
[ "def", "on_lxml_loads", "(", "self", ",", "lxml", ",", "config", ",", "content", ",", "*", "*", "kwargs", ")", ":", "# NOTE: lazy import of XMLParser because class requires lxml to exist on import", "from", ".", ".", "contrib", ".", "xml_parser", "import", "XMLParser", "return", "XMLParser", ".", "from_xml", "(", "content", ",", "encoding", "=", "kwargs", ".", "pop", "(", "\"encoding\"", ",", "\"utf-8\"", ")", ")", ".", "to_dict", "(", ")" ]
The `lxml <https://pypi.org/project/lxml/>`_ loads method. :param module lxml: The ``lxml`` module :param class config: The loading config class :param str content: The content to deserialize :param str encoding: The encoding to read the given xml document as, defaults to "utf-8", optional :returns: The deserialized dictionary :rtype: dict
[ "The", "lxml", "<https", ":", "//", "pypi", ".", "org", "/", "project", "/", "lxml", "/", ">", "_", "loads", "method", "." ]
python
train
40.277778
Danielhiversen/pyTibber
tibber/__init__.py
https://github.com/Danielhiversen/pyTibber/blob/114ebc3dd49f6affd93665b0862d4cbdea03e9ef/tibber/__init__.py#L192-L220
async def send_notification(self, title, message): """Send notification.""" query = gql( """ mutation{ sendPushNotification(input: { title: "%s", message: "%s", }){ successful pushedToNumberOfDevices } } """ % (title, message) ) res = await self.execute(query) if not res: return False noti = res.get("sendPushNotification", {}) successful = noti.get("successful", False) pushed_to_number_of_devices = noti.get("pushedToNumberOfDevices", 0) _LOGGER.debug( "send_notification: status %s, send to %s devices", successful, pushed_to_number_of_devices, ) return successful
[ "async", "def", "send_notification", "(", "self", ",", "title", ",", "message", ")", ":", "query", "=", "gql", "(", "\"\"\"\n mutation{\n sendPushNotification(input: {\n title: \"%s\",\n message: \"%s\",\n }){\n successful\n pushedToNumberOfDevices\n }\n }\n \"\"\"", "%", "(", "title", ",", "message", ")", ")", "res", "=", "await", "self", ".", "execute", "(", "query", ")", "if", "not", "res", ":", "return", "False", "noti", "=", "res", ".", "get", "(", "\"sendPushNotification\"", ",", "{", "}", ")", "successful", "=", "noti", ".", "get", "(", "\"successful\"", ",", "False", ")", "pushed_to_number_of_devices", "=", "noti", ".", "get", "(", "\"pushedToNumberOfDevices\"", ",", "0", ")", "_LOGGER", ".", "debug", "(", "\"send_notification: status %s, send to %s devices\"", ",", "successful", ",", "pushed_to_number_of_devices", ",", ")", "return", "successful" ]
Send notification.
[ "Send", "notification", "." ]
python
valid
27.689655
CityOfZion/neo-python
neo/Wallets/Wallet.py
https://github.com/CityOfZion/neo-python/blob/fe90f62e123d720d4281c79af0598d9df9e776fb/neo/Wallets/Wallet.py#L638-L668
def ProcessBlocks(self, block_limit=1000): """ Method called on a loop to check the current height of the blockchain. If the height of the blockchain is more than the current stored height in the wallet, we get the next block in line and processes it. In the case that the wallet height is far behind the height of the blockchain, we do this 1000 blocks at a time. Args: block_limit (int): the number of blocks to process synchronously. defaults to 1000. set to 0 to block until the wallet is fully rebuilt. """ self._lock.acquire() try: blockcount = 0 while self._current_height <= Blockchain.Default().Height and (block_limit == 0 or blockcount < block_limit): block = Blockchain.Default().GetBlockByHeight(self._current_height) if block is not None: self.ProcessNewBlock(block) else: self._current_height += 1 blockcount += 1 self.SaveStoredData("Height", self._current_height) except Exception as e: logger.warn("Could not process ::: %s " % e) finally: self._lock.release()
[ "def", "ProcessBlocks", "(", "self", ",", "block_limit", "=", "1000", ")", ":", "self", ".", "_lock", ".", "acquire", "(", ")", "try", ":", "blockcount", "=", "0", "while", "self", ".", "_current_height", "<=", "Blockchain", ".", "Default", "(", ")", ".", "Height", "and", "(", "block_limit", "==", "0", "or", "blockcount", "<", "block_limit", ")", ":", "block", "=", "Blockchain", ".", "Default", "(", ")", ".", "GetBlockByHeight", "(", "self", ".", "_current_height", ")", "if", "block", "is", "not", "None", ":", "self", ".", "ProcessNewBlock", "(", "block", ")", "else", ":", "self", ".", "_current_height", "+=", "1", "blockcount", "+=", "1", "self", ".", "SaveStoredData", "(", "\"Height\"", ",", "self", ".", "_current_height", ")", "except", "Exception", "as", "e", ":", "logger", ".", "warn", "(", "\"Could not process ::: %s \"", "%", "e", ")", "finally", ":", "self", ".", "_lock", ".", "release", "(", ")" ]
Method called on a loop to check the current height of the blockchain. If the height of the blockchain is more than the current stored height in the wallet, we get the next block in line and processes it. In the case that the wallet height is far behind the height of the blockchain, we do this 1000 blocks at a time. Args: block_limit (int): the number of blocks to process synchronously. defaults to 1000. set to 0 to block until the wallet is fully rebuilt.
[ "Method", "called", "on", "a", "loop", "to", "check", "the", "current", "height", "of", "the", "blockchain", ".", "If", "the", "height", "of", "the", "blockchain", "is", "more", "than", "the", "current", "stored", "height", "in", "the", "wallet", "we", "get", "the", "next", "block", "in", "line", "and", "processes", "it", "." ]
python
train
39.580645
log2timeline/plaso
plaso/formatters/manager.py
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/formatters/manager.py#L37-L64
def GetFormatterObject(cls, data_type): """Retrieves the formatter object for a specific data type. Args: data_type (str): data type. Returns: EventFormatter: corresponding formatter or the default formatter if not available. """ data_type = data_type.lower() if data_type not in cls._formatter_objects: formatter_object = None if data_type in cls._formatter_classes: formatter_class = cls._formatter_classes[data_type] # TODO: remove the need to instantiate the Formatter classes # and use class methods only. formatter_object = formatter_class() if not formatter_object: logger.warning( 'Using default formatter for data type: {0:s}'.format(data_type)) formatter_object = default.DefaultFormatter() cls._formatter_objects[data_type] = formatter_object return cls._formatter_objects[data_type]
[ "def", "GetFormatterObject", "(", "cls", ",", "data_type", ")", ":", "data_type", "=", "data_type", ".", "lower", "(", ")", "if", "data_type", "not", "in", "cls", ".", "_formatter_objects", ":", "formatter_object", "=", "None", "if", "data_type", "in", "cls", ".", "_formatter_classes", ":", "formatter_class", "=", "cls", ".", "_formatter_classes", "[", "data_type", "]", "# TODO: remove the need to instantiate the Formatter classes", "# and use class methods only.", "formatter_object", "=", "formatter_class", "(", ")", "if", "not", "formatter_object", ":", "logger", ".", "warning", "(", "'Using default formatter for data type: {0:s}'", ".", "format", "(", "data_type", ")", ")", "formatter_object", "=", "default", ".", "DefaultFormatter", "(", ")", "cls", ".", "_formatter_objects", "[", "data_type", "]", "=", "formatter_object", "return", "cls", ".", "_formatter_objects", "[", "data_type", "]" ]
Retrieves the formatter object for a specific data type. Args: data_type (str): data type. Returns: EventFormatter: corresponding formatter or the default formatter if not available.
[ "Retrieves", "the", "formatter", "object", "for", "a", "specific", "data", "type", "." ]
python
train
32.392857
angr/angr
angr/analyses/bindiff.py
https://github.com/angr/angr/blob/4e2f97d56af5419ee73bdb30482c8dd8ff5f3e40/angr/analyses/bindiff.py#L56-L78
def _get_closest_matches(input_attributes, target_attributes): """ :param input_attributes: First dictionary of objects to attribute tuples. :param target_attributes: Second dictionary of blocks to attribute tuples. :returns: A dictionary of objects in the input_attributes to the closest objects in the target_attributes. """ closest_matches = {} # for each object in the first set find the objects with the closest target attributes for a in input_attributes: best_dist = float('inf') best_matches = [] for b in target_attributes: dist = _euclidean_dist(input_attributes[a], target_attributes[b]) if dist < best_dist: best_matches = [b] best_dist = dist elif dist == best_dist: best_matches.append(b) closest_matches[a] = best_matches return closest_matches
[ "def", "_get_closest_matches", "(", "input_attributes", ",", "target_attributes", ")", ":", "closest_matches", "=", "{", "}", "# for each object in the first set find the objects with the closest target attributes", "for", "a", "in", "input_attributes", ":", "best_dist", "=", "float", "(", "'inf'", ")", "best_matches", "=", "[", "]", "for", "b", "in", "target_attributes", ":", "dist", "=", "_euclidean_dist", "(", "input_attributes", "[", "a", "]", ",", "target_attributes", "[", "b", "]", ")", "if", "dist", "<", "best_dist", ":", "best_matches", "=", "[", "b", "]", "best_dist", "=", "dist", "elif", "dist", "==", "best_dist", ":", "best_matches", ".", "append", "(", "b", ")", "closest_matches", "[", "a", "]", "=", "best_matches", "return", "closest_matches" ]
:param input_attributes: First dictionary of objects to attribute tuples. :param target_attributes: Second dictionary of blocks to attribute tuples. :returns: A dictionary of objects in the input_attributes to the closest objects in the target_attributes.
[ ":", "param", "input_attributes", ":", "First", "dictionary", "of", "objects", "to", "attribute", "tuples", ".", ":", "param", "target_attributes", ":", "Second", "dictionary", "of", "blocks", "to", "attribute", "tuples", ".", ":", "returns", ":", "A", "dictionary", "of", "objects", "in", "the", "input_attributes", "to", "the", "closest", "objects", "in", "the", "target_attributes", "." ]
python
train
41.130435
vingd/encrypted-pickle-python
encryptedpickle/encryptedpickle.py
https://github.com/vingd/encrypted-pickle-python/blob/7656233598e02e65971f69e11849a0f288b2b2a5/encryptedpickle/encryptedpickle.py#L344-L360
def _encode(self, data, algorithm, key=None): '''Encode data with specific algorithm''' if algorithm['type'] == 'hmac': return data + self._hmac_generate(data, algorithm, key) elif algorithm['type'] == 'aes': return self._aes_encrypt(data, algorithm, key) elif algorithm['type'] == 'no-serialization': return data elif algorithm['type'] == 'json': return json.dumps(data) elif algorithm['type'] == 'no-compression': return data elif algorithm['type'] == 'gzip': return self._zlib_compress(data, algorithm) else: raise Exception('Algorithm not supported: %s' % algorithm['type'])
[ "def", "_encode", "(", "self", ",", "data", ",", "algorithm", ",", "key", "=", "None", ")", ":", "if", "algorithm", "[", "'type'", "]", "==", "'hmac'", ":", "return", "data", "+", "self", ".", "_hmac_generate", "(", "data", ",", "algorithm", ",", "key", ")", "elif", "algorithm", "[", "'type'", "]", "==", "'aes'", ":", "return", "self", ".", "_aes_encrypt", "(", "data", ",", "algorithm", ",", "key", ")", "elif", "algorithm", "[", "'type'", "]", "==", "'no-serialization'", ":", "return", "data", "elif", "algorithm", "[", "'type'", "]", "==", "'json'", ":", "return", "json", ".", "dumps", "(", "data", ")", "elif", "algorithm", "[", "'type'", "]", "==", "'no-compression'", ":", "return", "data", "elif", "algorithm", "[", "'type'", "]", "==", "'gzip'", ":", "return", "self", ".", "_zlib_compress", "(", "data", ",", "algorithm", ")", "else", ":", "raise", "Exception", "(", "'Algorithm not supported: %s'", "%", "algorithm", "[", "'type'", "]", ")" ]
Encode data with specific algorithm
[ "Encode", "data", "with", "specific", "algorithm" ]
python
valid
41.823529
fastai/fastai
fastai/widgets/image_downloader.py
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/widgets/image_downloader.py#L119-L125
def _html_to_img_tuples(html:str, format:str='jpg', n_images:int=10) -> list: "Parse the google images html to img tuples containining `(fname, url)`" bs = BeautifulSoup(html, 'html.parser') img_tags = bs.find_all('div', {'class': 'rg_meta'}) metadata_dicts = (json.loads(e.text) for e in img_tags) img_tuples = ((_img_fname(d['ou']), d['ou']) for d in metadata_dicts if d['ity'] == format) return list(itertools.islice(img_tuples, n_images))
[ "def", "_html_to_img_tuples", "(", "html", ":", "str", ",", "format", ":", "str", "=", "'jpg'", ",", "n_images", ":", "int", "=", "10", ")", "->", "list", ":", "bs", "=", "BeautifulSoup", "(", "html", ",", "'html.parser'", ")", "img_tags", "=", "bs", ".", "find_all", "(", "'div'", ",", "{", "'class'", ":", "'rg_meta'", "}", ")", "metadata_dicts", "=", "(", "json", ".", "loads", "(", "e", ".", "text", ")", "for", "e", "in", "img_tags", ")", "img_tuples", "=", "(", "(", "_img_fname", "(", "d", "[", "'ou'", "]", ")", ",", "d", "[", "'ou'", "]", ")", "for", "d", "in", "metadata_dicts", "if", "d", "[", "'ity'", "]", "==", "format", ")", "return", "list", "(", "itertools", ".", "islice", "(", "img_tuples", ",", "n_images", ")", ")" ]
Parse the google images html to img tuples containining `(fname, url)`
[ "Parse", "the", "google", "images", "html", "to", "img", "tuples", "containining", "(", "fname", "url", ")" ]
python
train
66.285714
apple/turicreate
deps/src/boost_1_68_0/tools/build/src/build/feature.py
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/deps/src/boost_1_68_0/tools/build/src/build/feature.py#L165-L184
def set_default (feature, value): """ Sets the default value of the given feature, overriding any previous default. feature: the name of the feature value: the default value to assign """ f = __all_features[feature] bad_attribute = None if f.free: bad_attribute = "free" elif f.optional: bad_attribute = "optional" if bad_attribute: raise InvalidValue ("%s property %s cannot have a default" % (bad_attribute, f.name)) if value not in f.values: raise InvalidValue ("The specified default value, '%s' is invalid.\n" % value + "allowed values are: %s" % f.values) f.set_default(value)
[ "def", "set_default", "(", "feature", ",", "value", ")", ":", "f", "=", "__all_features", "[", "feature", "]", "bad_attribute", "=", "None", "if", "f", ".", "free", ":", "bad_attribute", "=", "\"free\"", "elif", "f", ".", "optional", ":", "bad_attribute", "=", "\"optional\"", "if", "bad_attribute", ":", "raise", "InvalidValue", "(", "\"%s property %s cannot have a default\"", "%", "(", "bad_attribute", ",", "f", ".", "name", ")", ")", "if", "value", "not", "in", "f", ".", "values", ":", "raise", "InvalidValue", "(", "\"The specified default value, '%s' is invalid.\\n\"", "%", "value", "+", "\"allowed values are: %s\"", "%", "f", ".", "values", ")", "f", ".", "set_default", "(", "value", ")" ]
Sets the default value of the given feature, overriding any previous default. feature: the name of the feature value: the default value to assign
[ "Sets", "the", "default", "value", "of", "the", "given", "feature", "overriding", "any", "previous", "default", ".", "feature", ":", "the", "name", "of", "the", "feature", "value", ":", "the", "default", "value", "to", "assign" ]
python
train
32.55
mitsei/dlkit
dlkit/json_/assessment/objects.py
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/json_/assessment/objects.py#L2015-L2027
def clear_score_system(self): """Clears the score system. raise: NoAccess - ``Metadata.isRequired()`` or ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.resource.ResourceForm.clear_avatar_template if (self.get_score_system_metadata().is_read_only() or self.get_score_system_metadata().is_required()): raise errors.NoAccess() self._my_map['scoreSystemId'] = self._score_system_default
[ "def", "clear_score_system", "(", "self", ")", ":", "# Implemented from template for osid.resource.ResourceForm.clear_avatar_template", "if", "(", "self", ".", "get_score_system_metadata", "(", ")", ".", "is_read_only", "(", ")", "or", "self", ".", "get_score_system_metadata", "(", ")", ".", "is_required", "(", ")", ")", ":", "raise", "errors", ".", "NoAccess", "(", ")", "self", ".", "_my_map", "[", "'scoreSystemId'", "]", "=", "self", ".", "_score_system_default" ]
Clears the score system. raise: NoAccess - ``Metadata.isRequired()`` or ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.*
[ "Clears", "the", "score", "system", "." ]
python
train
43.461538
juju/python-libjuju
juju/model.py
https://github.com/juju/python-libjuju/blob/58f0011f4c57cd68830258952fa952eaadca6b38/juju/model.py#L582-L595
async def disconnect(self): """Shut down the watcher task and close websockets. """ if not self._watch_stopped.is_set(): log.debug('Stopping watcher task') self._watch_stopping.set() await self._watch_stopped.wait() self._watch_stopping.clear() if self.is_connected(): log.debug('Closing model connection') await self._connector.disconnect() self._info = None
[ "async", "def", "disconnect", "(", "self", ")", ":", "if", "not", "self", ".", "_watch_stopped", ".", "is_set", "(", ")", ":", "log", ".", "debug", "(", "'Stopping watcher task'", ")", "self", ".", "_watch_stopping", ".", "set", "(", ")", "await", "self", ".", "_watch_stopped", ".", "wait", "(", ")", "self", ".", "_watch_stopping", ".", "clear", "(", ")", "if", "self", ".", "is_connected", "(", ")", ":", "log", ".", "debug", "(", "'Closing model connection'", ")", "await", "self", ".", "_connector", ".", "disconnect", "(", ")", "self", ".", "_info", "=", "None" ]
Shut down the watcher task and close websockets.
[ "Shut", "down", "the", "watcher", "task", "and", "close", "websockets", "." ]
python
train
33.142857
tBaxter/activity-monitor
activity_monitor/models.py
https://github.com/tBaxter/activity-monitor/blob/be6c6edc7c6b4141923b47376502cde0f785eb68/activity_monitor/models.py#L91-L131
def image(self): """ Attempts to provide a representative image from a content_object based on the content object's get_image() method. If there is a another content.object, as in the case of comments and other GFKs, then it will follow to that content_object and then get the image. Requires get_image() to be defined on the related model even if it just returns object.image, to avoid bringing back images you may not want. Note that this expects the image only. Anything related (caption, etc) should be stripped. """ obj = self.content_object # First, try to get from a get_image() helper method try: image = obj.get_image() except AttributeError: try: image = obj.content_object.get_image() except: image = None # if we didn't find one, try to get it from foo.image # This allows get_image to take precedence for greater control. if not image: try: image = obj.image except AttributeError: try: image = obj.content_object.image except: return None # Finally, ensure we're getting an image, not an image object # with caption and byline and other things. try: return image.image except AttributeError: return image
[ "def", "image", "(", "self", ")", ":", "obj", "=", "self", ".", "content_object", "# First, try to get from a get_image() helper method", "try", ":", "image", "=", "obj", ".", "get_image", "(", ")", "except", "AttributeError", ":", "try", ":", "image", "=", "obj", ".", "content_object", ".", "get_image", "(", ")", "except", ":", "image", "=", "None", "# if we didn't find one, try to get it from foo.image", "# This allows get_image to take precedence for greater control.", "if", "not", "image", ":", "try", ":", "image", "=", "obj", ".", "image", "except", "AttributeError", ":", "try", ":", "image", "=", "obj", ".", "content_object", ".", "image", "except", ":", "return", "None", "# Finally, ensure we're getting an image, not an image object", "# with caption and byline and other things.", "try", ":", "return", "image", ".", "image", "except", "AttributeError", ":", "return", "image" ]
Attempts to provide a representative image from a content_object based on the content object's get_image() method. If there is a another content.object, as in the case of comments and other GFKs, then it will follow to that content_object and then get the image. Requires get_image() to be defined on the related model even if it just returns object.image, to avoid bringing back images you may not want. Note that this expects the image only. Anything related (caption, etc) should be stripped.
[ "Attempts", "to", "provide", "a", "representative", "image", "from", "a", "content_object", "based", "on", "the", "content", "object", "s", "get_image", "()", "method", "." ]
python
train
35.317073
spyder-ide/spyder
spyder/plugins/breakpoints/widgets/breakpointsgui.py
https://github.com/spyder-ide/spyder/blob/f76836ce1b924bcc4efd3f74f2960d26a4e528e0/spyder/plugins/breakpoints/widgets/breakpointsgui.py#L57-L67
def set_data(self, data): """Set model data""" self._data = data keys = list(data.keys()) self.breakpoints = [] for key in keys: bp_list = data[key] if bp_list: for item in data[key]: self.breakpoints.append((key, item[0], item[1], "")) self.reset()
[ "def", "set_data", "(", "self", ",", "data", ")", ":", "self", ".", "_data", "=", "data", "keys", "=", "list", "(", "data", ".", "keys", "(", ")", ")", "self", ".", "breakpoints", "=", "[", "]", "for", "key", "in", "keys", ":", "bp_list", "=", "data", "[", "key", "]", "if", "bp_list", ":", "for", "item", "in", "data", "[", "key", "]", ":", "self", ".", "breakpoints", ".", "append", "(", "(", "key", ",", "item", "[", "0", "]", ",", "item", "[", "1", "]", ",", "\"\"", ")", ")", "self", ".", "reset", "(", ")" ]
Set model data
[ "Set", "model", "data" ]
python
train
32.454545
wbond/csrbuilder
csrbuilder/__init__.py
https://github.com/wbond/csrbuilder/blob/269565e7772fb0081bc3e954e622f5b3b8ce3e30/csrbuilder/__init__.py#L442-L520
def build(self, signing_private_key): """ Validates the certificate information, constructs an X.509 certificate and then signs it :param signing_private_key: An asn1crypto.keys.PrivateKeyInfo or oscrypto.asymmetric.PrivateKey object for the private key to sign the request with. This should be the private key that matches the public key. :return: An asn1crypto.csr.CertificationRequest object of the request """ is_oscrypto = isinstance(signing_private_key, asymmetric.PrivateKey) if not isinstance(signing_private_key, keys.PrivateKeyInfo) and not is_oscrypto: raise TypeError(_pretty_message( ''' signing_private_key must be an instance of asn1crypto.keys.PrivateKeyInfo or oscrypto.asymmetric.PrivateKey, not %s ''', _type_name(signing_private_key) )) signature_algo = signing_private_key.algorithm if signature_algo == 'ec': signature_algo = 'ecdsa' signature_algorithm_id = '%s_%s' % (self._hash_algo, signature_algo) def _make_extension(name, value): return { 'extn_id': name, 'critical': self._determine_critical(name), 'extn_value': value } extensions = [] for name in sorted(self._special_extensions): value = getattr(self, '_%s' % name) if value is not None: extensions.append(_make_extension(name, value)) for name in sorted(self._other_extensions.keys()): extensions.append(_make_extension(name, self._other_extensions[name])) attributes = [] if extensions: attributes.append({ 'type': 'extension_request', 'values': [extensions] }) certification_request_info = csr.CertificationRequestInfo({ 'version': 'v1', 'subject': self._subject, 'subject_pk_info': self._subject_public_key, 'attributes': attributes }) if signing_private_key.algorithm == 'rsa': sign_func = asymmetric.rsa_pkcs1v15_sign elif signing_private_key.algorithm == 'dsa': sign_func = asymmetric.dsa_sign elif signing_private_key.algorithm == 'ec': sign_func = asymmetric.ecdsa_sign if not is_oscrypto: signing_private_key = asymmetric.load_private_key(signing_private_key) signature = sign_func(signing_private_key, certification_request_info.dump(), self._hash_algo) return csr.CertificationRequest({ 'certification_request_info': certification_request_info, 'signature_algorithm': { 'algorithm': signature_algorithm_id, }, 'signature': signature })
[ "def", "build", "(", "self", ",", "signing_private_key", ")", ":", "is_oscrypto", "=", "isinstance", "(", "signing_private_key", ",", "asymmetric", ".", "PrivateKey", ")", "if", "not", "isinstance", "(", "signing_private_key", ",", "keys", ".", "PrivateKeyInfo", ")", "and", "not", "is_oscrypto", ":", "raise", "TypeError", "(", "_pretty_message", "(", "'''\n signing_private_key must be an instance of\n asn1crypto.keys.PrivateKeyInfo or\n oscrypto.asymmetric.PrivateKey, not %s\n '''", ",", "_type_name", "(", "signing_private_key", ")", ")", ")", "signature_algo", "=", "signing_private_key", ".", "algorithm", "if", "signature_algo", "==", "'ec'", ":", "signature_algo", "=", "'ecdsa'", "signature_algorithm_id", "=", "'%s_%s'", "%", "(", "self", ".", "_hash_algo", ",", "signature_algo", ")", "def", "_make_extension", "(", "name", ",", "value", ")", ":", "return", "{", "'extn_id'", ":", "name", ",", "'critical'", ":", "self", ".", "_determine_critical", "(", "name", ")", ",", "'extn_value'", ":", "value", "}", "extensions", "=", "[", "]", "for", "name", "in", "sorted", "(", "self", ".", "_special_extensions", ")", ":", "value", "=", "getattr", "(", "self", ",", "'_%s'", "%", "name", ")", "if", "value", "is", "not", "None", ":", "extensions", ".", "append", "(", "_make_extension", "(", "name", ",", "value", ")", ")", "for", "name", "in", "sorted", "(", "self", ".", "_other_extensions", ".", "keys", "(", ")", ")", ":", "extensions", ".", "append", "(", "_make_extension", "(", "name", ",", "self", ".", "_other_extensions", "[", "name", "]", ")", ")", "attributes", "=", "[", "]", "if", "extensions", ":", "attributes", ".", "append", "(", "{", "'type'", ":", "'extension_request'", ",", "'values'", ":", "[", "extensions", "]", "}", ")", "certification_request_info", "=", "csr", ".", "CertificationRequestInfo", "(", "{", "'version'", ":", "'v1'", ",", "'subject'", ":", "self", ".", "_subject", ",", "'subject_pk_info'", ":", "self", ".", "_subject_public_key", ",", "'attributes'", ":", "attributes", "}", ")", "if", "signing_private_key", ".", "algorithm", "==", "'rsa'", ":", "sign_func", "=", "asymmetric", ".", "rsa_pkcs1v15_sign", "elif", "signing_private_key", ".", "algorithm", "==", "'dsa'", ":", "sign_func", "=", "asymmetric", ".", "dsa_sign", "elif", "signing_private_key", ".", "algorithm", "==", "'ec'", ":", "sign_func", "=", "asymmetric", ".", "ecdsa_sign", "if", "not", "is_oscrypto", ":", "signing_private_key", "=", "asymmetric", ".", "load_private_key", "(", "signing_private_key", ")", "signature", "=", "sign_func", "(", "signing_private_key", ",", "certification_request_info", ".", "dump", "(", ")", ",", "self", ".", "_hash_algo", ")", "return", "csr", ".", "CertificationRequest", "(", "{", "'certification_request_info'", ":", "certification_request_info", ",", "'signature_algorithm'", ":", "{", "'algorithm'", ":", "signature_algorithm_id", ",", "}", ",", "'signature'", ":", "signature", "}", ")" ]
Validates the certificate information, constructs an X.509 certificate and then signs it :param signing_private_key: An asn1crypto.keys.PrivateKeyInfo or oscrypto.asymmetric.PrivateKey object for the private key to sign the request with. This should be the private key that matches the public key. :return: An asn1crypto.csr.CertificationRequest object of the request
[ "Validates", "the", "certificate", "information", "constructs", "an", "X", ".", "509", "certificate", "and", "then", "signs", "it" ]
python
train
36.620253
digidotcom/python-devicecloud
devicecloud/util.py
https://github.com/digidotcom/python-devicecloud/blob/32529684a348a7830a269c32601604c78036bcb8/devicecloud/util.py#L13-L16
def conditional_write(strm, fmt, value, *args, **kwargs): """Write to stream using fmt and value if value is not None""" if value is not None: strm.write(fmt.format(value, *args, **kwargs))
[ "def", "conditional_write", "(", "strm", ",", "fmt", ",", "value", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "value", "is", "not", "None", ":", "strm", ".", "write", "(", "fmt", ".", "format", "(", "value", ",", "*", "args", ",", "*", "*", "kwargs", ")", ")" ]
Write to stream using fmt and value if value is not None
[ "Write", "to", "stream", "using", "fmt", "and", "value", "if", "value", "is", "not", "None" ]
python
train
50.5
aliyun/aliyun-log-python-sdk
aliyun/log/logclient.py
https://github.com/aliyun/aliyun-log-python-sdk/blob/ac383db0a16abf1e5ef7df36074374184b43516e/aliyun/log/logclient.py#L1667-L1689
def update_machine_group(self, project_name, group_detail): """ update machine group in a project Unsuccessful opertaion will cause an LogException. :type project_name: string :param project_name: the Project name :type group_detail: MachineGroupDetail :param group_detail: the machine group detail config :return: UpdateMachineGroupResponse :raise: LogException """ headers = {} params = {} resource = "/machinegroups/" + group_detail.group_name headers['Content-Type'] = 'application/json' body = six.b(json.dumps(group_detail.to_json())) headers['x-log-bodyrawsize'] = str(len(body)) (resp, headers) = self._send("PUT", project_name, body, resource, params, headers) return UpdateMachineGroupResponse(headers, resp)
[ "def", "update_machine_group", "(", "self", ",", "project_name", ",", "group_detail", ")", ":", "headers", "=", "{", "}", "params", "=", "{", "}", "resource", "=", "\"/machinegroups/\"", "+", "group_detail", ".", "group_name", "headers", "[", "'Content-Type'", "]", "=", "'application/json'", "body", "=", "six", ".", "b", "(", "json", ".", "dumps", "(", "group_detail", ".", "to_json", "(", ")", ")", ")", "headers", "[", "'x-log-bodyrawsize'", "]", "=", "str", "(", "len", "(", "body", ")", ")", "(", "resp", ",", "headers", ")", "=", "self", ".", "_send", "(", "\"PUT\"", ",", "project_name", ",", "body", ",", "resource", ",", "params", ",", "headers", ")", "return", "UpdateMachineGroupResponse", "(", "headers", ",", "resp", ")" ]
update machine group in a project Unsuccessful opertaion will cause an LogException. :type project_name: string :param project_name: the Project name :type group_detail: MachineGroupDetail :param group_detail: the machine group detail config :return: UpdateMachineGroupResponse :raise: LogException
[ "update", "machine", "group", "in", "a", "project", "Unsuccessful", "opertaion", "will", "cause", "an", "LogException", ".", ":", "type", "project_name", ":", "string", ":", "param", "project_name", ":", "the", "Project", "name", ":", "type", "group_detail", ":", "MachineGroupDetail", ":", "param", "group_detail", ":", "the", "machine", "group", "detail", "config", ":", "return", ":", "UpdateMachineGroupResponse", ":", "raise", ":", "LogException" ]
python
train
37.73913
kylemede/KMlogger
KMlogger/kmLogger.py
https://github.com/kylemede/KMlogger/blob/c904f0aaf35b4aff259567f932b5ca678d7f8176/KMlogger/kmLogger.py#L100-L118
def addFileHandler(self,filename='', dr='',lvl=1): """ This function will add a file handler to a log with the provided level. Args: lvl (int): The severity level of messages printed to the file with the file handler, default = 1. """ fname = self.name if filename != '': fname = filename if '.' not in fname: fname+='.log' fh = logging.FileHandler(os.path.join(dr,fname)) fh.setLevel(lvl) frmtString = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' fFrmt = logging.Formatter(frmtString) fh.setFormatter(fFrmt) self.addHandler(fh)
[ "def", "addFileHandler", "(", "self", ",", "filename", "=", "''", ",", "dr", "=", "''", ",", "lvl", "=", "1", ")", ":", "fname", "=", "self", ".", "name", "if", "filename", "!=", "''", ":", "fname", "=", "filename", "if", "'.'", "not", "in", "fname", ":", "fname", "+=", "'.log'", "fh", "=", "logging", ".", "FileHandler", "(", "os", ".", "path", ".", "join", "(", "dr", ",", "fname", ")", ")", "fh", ".", "setLevel", "(", "lvl", ")", "frmtString", "=", "'%(asctime)s - %(name)s - %(levelname)s - %(message)s'", "fFrmt", "=", "logging", ".", "Formatter", "(", "frmtString", ")", "fh", ".", "setFormatter", "(", "fFrmt", ")", "self", ".", "addHandler", "(", "fh", ")" ]
This function will add a file handler to a log with the provided level. Args: lvl (int): The severity level of messages printed to the file with the file handler, default = 1.
[ "This", "function", "will", "add", "a", "file", "handler", "to", "a", "log", "with", "the", "provided", "level", ".", "Args", ":", "lvl", "(", "int", ")", ":", "The", "severity", "level", "of", "messages", "printed", "to", "the", "file", "with", "the", "file", "handler", "default", "=", "1", "." ]
python
train
36.526316
FelixSchwarz/pymta
pymta/session.py
https://github.com/FelixSchwarz/pymta/blob/1884accc3311e6c2e89259784f9592314f6d34fc/pymta/session.py#L151-L167
def _dispatch_commands(self, from_state, to_state, smtp_command): """This method dispatches a SMTP command to the appropriate handler method. It is called after a new command was received and a valid transition was found.""" #print from_state, ' -> ', to_state, ':', smtp_command name_handler_method = 'smtp_%s' % smtp_command.lower().replace(' ', '_') try: handler_method = getattr(self, name_handler_method) except AttributeError: # base_msg = 'No handler for %s though transition is defined (no method %s)' # print base_msg % (smtp_command, name_handler_method) self.reply(451, 'Temporary Local Problem: Please come back later') else: # Don't catch InvalidDataError here - else the state would be moved # forward. Instead the handle_input will catch it and send out the # appropriate reply. handler_method()
[ "def", "_dispatch_commands", "(", "self", ",", "from_state", ",", "to_state", ",", "smtp_command", ")", ":", "#print from_state, ' -> ', to_state, ':', smtp_command", "name_handler_method", "=", "'smtp_%s'", "%", "smtp_command", ".", "lower", "(", ")", ".", "replace", "(", "' '", ",", "'_'", ")", "try", ":", "handler_method", "=", "getattr", "(", "self", ",", "name_handler_method", ")", "except", "AttributeError", ":", "# base_msg = 'No handler for %s though transition is defined (no method %s)'", "# print base_msg % (smtp_command, name_handler_method)", "self", ".", "reply", "(", "451", ",", "'Temporary Local Problem: Please come back later'", ")", "else", ":", "# Don't catch InvalidDataError here - else the state would be moved", "# forward. Instead the handle_input will catch it and send out the", "# appropriate reply.", "handler_method", "(", ")" ]
This method dispatches a SMTP command to the appropriate handler method. It is called after a new command was received and a valid transition was found.
[ "This", "method", "dispatches", "a", "SMTP", "command", "to", "the", "appropriate", "handler", "method", ".", "It", "is", "called", "after", "a", "new", "command", "was", "received", "and", "a", "valid", "transition", "was", "found", "." ]
python
train
56.117647
IceflowRE/unidown
unidown/core/manager.py
https://github.com/IceflowRE/unidown/blob/2a6f82ab780bb825668bfc55b67c11c4f72ec05c/unidown/core/manager.py#L66-L111
def download_from_plugin(plugin: APlugin): """ Download routine. 1. get newest update time 2. load savestate 3. compare last update time with savestate time 4. get download links 5. compare with savestate 6. download new/updated data 7. check downloads 8. update savestate 9. write new savestate :param plugin: plugin :type plugin: ~unidown.plugin.a_plugin.APlugin """ # get last update date plugin.log.info('Get last update') plugin.update_last_update() # load old save state save_state = plugin.load_save_state() if plugin.last_update <= save_state.last_update: plugin.log.info('No update. Nothing to do.') return # get download links plugin.log.info('Get download links') plugin.update_download_links() # compare with save state down_link_item_dict = plugin.get_updated_data(save_state.link_item_dict) plugin.log.info('Compared with save state: ' + str(len(plugin.download_data))) if not down_link_item_dict: plugin.log.info('No new data. Nothing to do.') return # download new/updated data plugin.log.info(f"Download new {plugin.unit}s: {len(down_link_item_dict)}") plugin.download(down_link_item_dict, plugin.download_path, 'Download new ' + plugin.unit + 's', plugin.unit) # check which downloads are succeeded succeed_link_item_dict, lost_link_item_dict = plugin.check_download(down_link_item_dict, plugin.download_path) plugin.log.info(f"Downloaded: {len(succeed_link_item_dict)}/{len(down_link_item_dict)}") # update savestate link_item_dict with succeeded downloads dict plugin.log.info('Update savestate') plugin.update_dict(save_state.link_item_dict, succeed_link_item_dict) # write new savestate plugin.log.info('Write savestate') plugin.save_save_state(save_state.link_item_dict)
[ "def", "download_from_plugin", "(", "plugin", ":", "APlugin", ")", ":", "# get last update date", "plugin", ".", "log", ".", "info", "(", "'Get last update'", ")", "plugin", ".", "update_last_update", "(", ")", "# load old save state", "save_state", "=", "plugin", ".", "load_save_state", "(", ")", "if", "plugin", ".", "last_update", "<=", "save_state", ".", "last_update", ":", "plugin", ".", "log", ".", "info", "(", "'No update. Nothing to do.'", ")", "return", "# get download links", "plugin", ".", "log", ".", "info", "(", "'Get download links'", ")", "plugin", ".", "update_download_links", "(", ")", "# compare with save state", "down_link_item_dict", "=", "plugin", ".", "get_updated_data", "(", "save_state", ".", "link_item_dict", ")", "plugin", ".", "log", ".", "info", "(", "'Compared with save state: '", "+", "str", "(", "len", "(", "plugin", ".", "download_data", ")", ")", ")", "if", "not", "down_link_item_dict", ":", "plugin", ".", "log", ".", "info", "(", "'No new data. Nothing to do.'", ")", "return", "# download new/updated data", "plugin", ".", "log", ".", "info", "(", "f\"Download new {plugin.unit}s: {len(down_link_item_dict)}\"", ")", "plugin", ".", "download", "(", "down_link_item_dict", ",", "plugin", ".", "download_path", ",", "'Download new '", "+", "plugin", ".", "unit", "+", "'s'", ",", "plugin", ".", "unit", ")", "# check which downloads are succeeded", "succeed_link_item_dict", ",", "lost_link_item_dict", "=", "plugin", ".", "check_download", "(", "down_link_item_dict", ",", "plugin", ".", "download_path", ")", "plugin", ".", "log", ".", "info", "(", "f\"Downloaded: {len(succeed_link_item_dict)}/{len(down_link_item_dict)}\"", ")", "# update savestate link_item_dict with succeeded downloads dict", "plugin", ".", "log", ".", "info", "(", "'Update savestate'", ")", "plugin", ".", "update_dict", "(", "save_state", ".", "link_item_dict", ",", "succeed_link_item_dict", ")", "# write new savestate", "plugin", ".", "log", ".", "info", "(", "'Write savestate'", ")", "plugin", ".", "save_save_state", "(", "save_state", ".", "link_item_dict", ")" ]
Download routine. 1. get newest update time 2. load savestate 3. compare last update time with savestate time 4. get download links 5. compare with savestate 6. download new/updated data 7. check downloads 8. update savestate 9. write new savestate :param plugin: plugin :type plugin: ~unidown.plugin.a_plugin.APlugin
[ "Download", "routine", "." ]
python
train
39.956522
django-salesforce/django-salesforce
salesforce/dbapi/driver.py
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/driver.py#L688-L690
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0): """Patched 'getaddrinfo' with default family IPv4 (enabled by settings IPV4_ONLY=True)""" return orig_getaddrinfo(host, port, family, socktype, proto, flags)
[ "def", "getaddrinfo_wrapper", "(", "host", ",", "port", ",", "family", "=", "socket", ".", "AF_INET", ",", "socktype", "=", "0", ",", "proto", "=", "0", ",", "flags", "=", "0", ")", ":", "return", "orig_getaddrinfo", "(", "host", ",", "port", ",", "family", ",", "socktype", ",", "proto", ",", "flags", ")" ]
Patched 'getaddrinfo' with default family IPv4 (enabled by settings IPV4_ONLY=True)
[ "Patched", "getaddrinfo", "with", "default", "family", "IPv4", "(", "enabled", "by", "settings", "IPV4_ONLY", "=", "True", ")" ]
python
train
84.333333
Metatab/geoid
geoid/civick.py
https://github.com/Metatab/geoid/blob/4b7769406b00e59376fb6046b42a2f8ed706b33b/geoid/civick.py#L41-L46
def summarize(self): """Convert all of the values to their max values. This form is used to represent the summary level""" s = str(self.allval()) return self.parse(s[:2]+ ''.join(['Z']*len(s[2:])))
[ "def", "summarize", "(", "self", ")", ":", "s", "=", "str", "(", "self", ".", "allval", "(", ")", ")", "return", "self", ".", "parse", "(", "s", "[", ":", "2", "]", "+", "''", ".", "join", "(", "[", "'Z'", "]", "*", "len", "(", "s", "[", "2", ":", "]", ")", ")", ")" ]
Convert all of the values to their max values. This form is used to represent the summary level
[ "Convert", "all", "of", "the", "values", "to", "their", "max", "values", ".", "This", "form", "is", "used", "to", "represent", "the", "summary", "level" ]
python
train
36.333333