repo
stringlengths
7
55
path
stringlengths
4
223
url
stringlengths
87
315
code
stringlengths
75
104k
code_tokens
list
docstring
stringlengths
1
46.9k
docstring_tokens
list
language
stringclasses
1 value
partition
stringclasses
3 values
avg_line_len
float64
7.91
980
jslang/responsys
responsys/client.py
https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/client.py#L304-L321
def delete_profile_extension_members(self, profile_extension, query_column, ids_to_delete): """ Responsys.deleteProfileExtensionRecords call Accepts: InteractObject profile_extension list field_list list ids_to_retrieve string query_column default: 'RIID' Returns list of DeleteResults """ profile_extension = profile_extension.get_soap_object(self.client) result = self.call( 'deleteProfileExtensionMembers', profile_extension, query_column, ids_to_delete) if hasattr(result, '__iter__'): return [DeleteResult(delete_result) for delete_result in result] return [DeleteResult(result)]
[ "def", "delete_profile_extension_members", "(", "self", ",", "profile_extension", ",", "query_column", ",", "ids_to_delete", ")", ":", "profile_extension", "=", "profile_extension", ".", "get_soap_object", "(", "self", ".", "client", ")", "result", "=", "self", ".", "call", "(", "'deleteProfileExtensionMembers'", ",", "profile_extension", ",", "query_column", ",", "ids_to_delete", ")", "if", "hasattr", "(", "result", ",", "'__iter__'", ")", ":", "return", "[", "DeleteResult", "(", "delete_result", ")", "for", "delete_result", "in", "result", "]", "return", "[", "DeleteResult", "(", "result", ")", "]" ]
Responsys.deleteProfileExtensionRecords call Accepts: InteractObject profile_extension list field_list list ids_to_retrieve string query_column default: 'RIID' Returns list of DeleteResults
[ "Responsys", ".", "deleteProfileExtensionRecords", "call" ]
python
train
40.055556
buildbot/buildbot
master/buildbot/www/change_hook.py
https://github.com/buildbot/buildbot/blob/5df3cfae6d760557d99156633c32b1822a1e130c/master/buildbot/www/change_hook.py#L113-L134
def makeHandler(self, dialect): """create and cache the handler object for this dialect""" if dialect not in self.dialects: m = "The dialect specified, '{}', wasn't whitelisted in change_hook".format(dialect) log.msg(m) log.msg( "Note: if dialect is 'base' then it's possible your URL is malformed and we didn't regex it properly") raise ValueError(m) if dialect not in self._dialect_handlers: if dialect not in self._plugins: m = "The dialect specified, '{}', is not registered as a buildbot.webhook plugin".format(dialect) log.msg(m) raise ValueError(m) options = self.dialects[dialect] if isinstance(options, dict) and 'custom_class' in options: klass = options['custom_class'] else: klass = self._plugins.get(dialect) self._dialect_handlers[dialect] = klass(self.master, self.dialects[dialect]) return self._dialect_handlers[dialect]
[ "def", "makeHandler", "(", "self", ",", "dialect", ")", ":", "if", "dialect", "not", "in", "self", ".", "dialects", ":", "m", "=", "\"The dialect specified, '{}', wasn't whitelisted in change_hook\"", ".", "format", "(", "dialect", ")", "log", ".", "msg", "(", "m", ")", "log", ".", "msg", "(", "\"Note: if dialect is 'base' then it's possible your URL is malformed and we didn't regex it properly\"", ")", "raise", "ValueError", "(", "m", ")", "if", "dialect", "not", "in", "self", ".", "_dialect_handlers", ":", "if", "dialect", "not", "in", "self", ".", "_plugins", ":", "m", "=", "\"The dialect specified, '{}', is not registered as a buildbot.webhook plugin\"", ".", "format", "(", "dialect", ")", "log", ".", "msg", "(", "m", ")", "raise", "ValueError", "(", "m", ")", "options", "=", "self", ".", "dialects", "[", "dialect", "]", "if", "isinstance", "(", "options", ",", "dict", ")", "and", "'custom_class'", "in", "options", ":", "klass", "=", "options", "[", "'custom_class'", "]", "else", ":", "klass", "=", "self", ".", "_plugins", ".", "get", "(", "dialect", ")", "self", ".", "_dialect_handlers", "[", "dialect", "]", "=", "klass", "(", "self", ".", "master", ",", "self", ".", "dialects", "[", "dialect", "]", ")", "return", "self", ".", "_dialect_handlers", "[", "dialect", "]" ]
create and cache the handler object for this dialect
[ "create", "and", "cache", "the", "handler", "object", "for", "this", "dialect" ]
python
train
47.909091
androguard/androguard
androguard/core/bytecodes/axml/__init__.py
https://github.com/androguard/androguard/blob/984c0d981be2950cf0451e484f7b0d4d53bc4911/androguard/core/bytecodes/axml/__init__.py#L2684-L2692
def format_value(self): """ Return the formatted (interpreted) data according to `data_type`. """ return format_value( self.data_type, self.data, self.parent.stringpool_main.getString )
[ "def", "format_value", "(", "self", ")", ":", "return", "format_value", "(", "self", ".", "data_type", ",", "self", ".", "data", ",", "self", ".", "parent", ".", "stringpool_main", ".", "getString", ")" ]
Return the formatted (interpreted) data according to `data_type`.
[ "Return", "the", "formatted", "(", "interpreted", ")", "data", "according", "to", "data_type", "." ]
python
train
28.111111
python-diamond/Diamond
src/collectors/nagiosperfdata/nagiosperfdata.py
https://github.com/python-diamond/Diamond/blob/0f3eb04327d6d3ed5e53a9967d6c9d2c09714a47/src/collectors/nagiosperfdata/nagiosperfdata.py#L152-L172
def _normalize_to_unit(self, value, unit): """Normalize the value to the unit returned. We use base-1000 for second-based units, and base-1024 for byte-based units. Sadly, the Nagios-Plugins specification doesn't disambiguate base-1000 (KB) and base-1024 (KiB). """ if unit == 'ms': return value / 1000.0 if unit == 'us': return value / 1000000.0 if unit == 'KB': return value * 1024 if unit == 'MB': return value * 1024 * 1024 if unit == 'GB': return value * 1024 * 1024 * 1024 if unit == 'TB': return value * 1024 * 1024 * 1024 * 1024 return value
[ "def", "_normalize_to_unit", "(", "self", ",", "value", ",", "unit", ")", ":", "if", "unit", "==", "'ms'", ":", "return", "value", "/", "1000.0", "if", "unit", "==", "'us'", ":", "return", "value", "/", "1000000.0", "if", "unit", "==", "'KB'", ":", "return", "value", "*", "1024", "if", "unit", "==", "'MB'", ":", "return", "value", "*", "1024", "*", "1024", "if", "unit", "==", "'GB'", ":", "return", "value", "*", "1024", "*", "1024", "*", "1024", "if", "unit", "==", "'TB'", ":", "return", "value", "*", "1024", "*", "1024", "*", "1024", "*", "1024", "return", "value" ]
Normalize the value to the unit returned. We use base-1000 for second-based units, and base-1024 for byte-based units. Sadly, the Nagios-Plugins specification doesn't disambiguate base-1000 (KB) and base-1024 (KiB).
[ "Normalize", "the", "value", "to", "the", "unit", "returned", "." ]
python
train
33.285714
estnltk/estnltk
estnltk/wordnet/eurown.py
https://github.com/estnltk/estnltk/blob/28ae334a68a0673072febc318635f04da0dcc54a/estnltk/wordnet/eurown.py#L2037-L2045
def addInternalLink(self, link): '''Appends InternalLink ''' if isinstance(link, InternalLink): self.internalLinks.append(link) else: raise InternalLinkError( 'link Type should be InternalLink, not %s' % type(link))
[ "def", "addInternalLink", "(", "self", ",", "link", ")", ":", "if", "isinstance", "(", "link", ",", "InternalLink", ")", ":", "self", ".", "internalLinks", ".", "append", "(", "link", ")", "else", ":", "raise", "InternalLinkError", "(", "'link Type should be InternalLink, not %s'", "%", "type", "(", "link", ")", ")" ]
Appends InternalLink
[ "Appends", "InternalLink" ]
python
train
32.111111
mikedh/trimesh
trimesh/viewer/trackball.py
https://github.com/mikedh/trimesh/blob/25e059bf6d4caa74f62ffd58ce4f61a90ee4e518/trimesh/viewer/trackball.py#L220-L240
def rotate(self, azimuth, axis=None): """Rotate the trackball about the "Up" axis by azimuth radians. Parameters ---------- azimuth : float The number of radians to rotate. """ target = self._target y_axis = self._n_pose[:3, 1].flatten() if axis is not None: y_axis = axis x_rot_mat = transformations.rotation_matrix(azimuth, y_axis, target) self._n_pose = x_rot_mat.dot(self._n_pose) y_axis = self._pose[:3, 1].flatten() if axis is not None: y_axis = axis x_rot_mat = transformations.rotation_matrix(azimuth, y_axis, target) self._pose = x_rot_mat.dot(self._pose)
[ "def", "rotate", "(", "self", ",", "azimuth", ",", "axis", "=", "None", ")", ":", "target", "=", "self", ".", "_target", "y_axis", "=", "self", ".", "_n_pose", "[", ":", "3", ",", "1", "]", ".", "flatten", "(", ")", "if", "axis", "is", "not", "None", ":", "y_axis", "=", "axis", "x_rot_mat", "=", "transformations", ".", "rotation_matrix", "(", "azimuth", ",", "y_axis", ",", "target", ")", "self", ".", "_n_pose", "=", "x_rot_mat", ".", "dot", "(", "self", ".", "_n_pose", ")", "y_axis", "=", "self", ".", "_pose", "[", ":", "3", ",", "1", "]", ".", "flatten", "(", ")", "if", "axis", "is", "not", "None", ":", "y_axis", "=", "axis", "x_rot_mat", "=", "transformations", ".", "rotation_matrix", "(", "azimuth", ",", "y_axis", ",", "target", ")", "self", ".", "_pose", "=", "x_rot_mat", ".", "dot", "(", "self", ".", "_pose", ")" ]
Rotate the trackball about the "Up" axis by azimuth radians. Parameters ---------- azimuth : float The number of radians to rotate.
[ "Rotate", "the", "trackball", "about", "the", "Up", "axis", "by", "azimuth", "radians", "." ]
python
train
33.095238
daler/trackhub
trackhub/track.py
https://github.com/daler/trackhub/blob/e4655f79177822529f80b923df117e38e28df702/trackhub/track.py#L454-L472
def add_subgroups(self, subgroups): """ Add a list of SubGroupDefinition objects to this composite. Note that in contrast to :meth:`BaseTrack`, which takes a single dictionary indicating the particular subgroups for the track, this method takes a list of :class:`SubGroupDefinition` objects representing the allowed subgroups for the composite. :param subgroups: List of SubGroupDefinition objects. """ if subgroups is None: subgroups = {} _subgroups = {} for sg in subgroups: assert isinstance(sg, SubGroupDefinition) _subgroups[sg.name] = sg self.subgroups = _subgroups
[ "def", "add_subgroups", "(", "self", ",", "subgroups", ")", ":", "if", "subgroups", "is", "None", ":", "subgroups", "=", "{", "}", "_subgroups", "=", "{", "}", "for", "sg", "in", "subgroups", ":", "assert", "isinstance", "(", "sg", ",", "SubGroupDefinition", ")", "_subgroups", "[", "sg", ".", "name", "]", "=", "sg", "self", ".", "subgroups", "=", "_subgroups" ]
Add a list of SubGroupDefinition objects to this composite. Note that in contrast to :meth:`BaseTrack`, which takes a single dictionary indicating the particular subgroups for the track, this method takes a list of :class:`SubGroupDefinition` objects representing the allowed subgroups for the composite. :param subgroups: List of SubGroupDefinition objects.
[ "Add", "a", "list", "of", "SubGroupDefinition", "objects", "to", "this", "composite", "." ]
python
train
36.789474
lk-geimfari/mimesis
mimesis/providers/numbers.py
https://github.com/lk-geimfari/mimesis/blob/4b16ee7a8dba6281a904654a88dbb4b052869fc5/mimesis/providers/numbers.py#L20-L28
def floats(self, n: int = 2) -> List[float]: """Generate a list of random float numbers. :param n: Raise 10 to the 'n' power. :return: The list of floating-point numbers. """ nums = [self.random.random() for _ in range(10 ** int(n))] return nums
[ "def", "floats", "(", "self", ",", "n", ":", "int", "=", "2", ")", "->", "List", "[", "float", "]", ":", "nums", "=", "[", "self", ".", "random", ".", "random", "(", ")", "for", "_", "in", "range", "(", "10", "**", "int", "(", "n", ")", ")", "]", "return", "nums" ]
Generate a list of random float numbers. :param n: Raise 10 to the 'n' power. :return: The list of floating-point numbers.
[ "Generate", "a", "list", "of", "random", "float", "numbers", "." ]
python
train
33.555556
cuihantao/andes
andes/variables/dae.py
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L582-L595
def add_jac(self, m, val, row, col): """Add tuples (val, row, col) to the Jacobian matrix ``m`` Implemented in numpy.arrays for temporary storage. """ assert m in ('Fx', 'Fy', 'Gx', 'Gy', 'Fx0', 'Fy0', 'Gx0', 'Gy0'), \ 'Wrong Jacobian matrix name <{0}>'.format(m) if isinstance(val, (int, float)): val = val * ones(len(row), 1) self._temp[m]['I'] = matrix([self._temp[m]['I'], matrix(row)]) self._temp[m]['J'] = matrix([self._temp[m]['J'], matrix(col)]) self._temp[m]['V'] = matrix([self._temp[m]['V'], matrix(val)])
[ "def", "add_jac", "(", "self", ",", "m", ",", "val", ",", "row", ",", "col", ")", ":", "assert", "m", "in", "(", "'Fx'", ",", "'Fy'", ",", "'Gx'", ",", "'Gy'", ",", "'Fx0'", ",", "'Fy0'", ",", "'Gx0'", ",", "'Gy0'", ")", ",", "'Wrong Jacobian matrix name <{0}>'", ".", "format", "(", "m", ")", "if", "isinstance", "(", "val", ",", "(", "int", ",", "float", ")", ")", ":", "val", "=", "val", "*", "ones", "(", "len", "(", "row", ")", ",", "1", ")", "self", ".", "_temp", "[", "m", "]", "[", "'I'", "]", "=", "matrix", "(", "[", "self", ".", "_temp", "[", "m", "]", "[", "'I'", "]", ",", "matrix", "(", "row", ")", "]", ")", "self", ".", "_temp", "[", "m", "]", "[", "'J'", "]", "=", "matrix", "(", "[", "self", ".", "_temp", "[", "m", "]", "[", "'J'", "]", ",", "matrix", "(", "col", ")", "]", ")", "self", ".", "_temp", "[", "m", "]", "[", "'V'", "]", "=", "matrix", "(", "[", "self", ".", "_temp", "[", "m", "]", "[", "'V'", "]", ",", "matrix", "(", "val", ")", "]", ")" ]
Add tuples (val, row, col) to the Jacobian matrix ``m`` Implemented in numpy.arrays for temporary storage.
[ "Add", "tuples", "(", "val", "row", "col", ")", "to", "the", "Jacobian", "matrix", "m" ]
python
train
42.428571
zalando/patroni
patroni/utils.py
https://github.com/zalando/patroni/blob/f6d29081c90af52064b981cdd877a07338d86038/patroni/utils.py#L234-L238
def copy(self): """Return a clone of this retry manager""" return Retry(max_tries=self.max_tries, delay=self.delay, backoff=self.backoff, max_jitter=self.max_jitter / 100.0, max_delay=self.max_delay, sleep_func=self.sleep_func, deadline=self.deadline, retry_exceptions=self.retry_exceptions)
[ "def", "copy", "(", "self", ")", ":", "return", "Retry", "(", "max_tries", "=", "self", ".", "max_tries", ",", "delay", "=", "self", ".", "delay", ",", "backoff", "=", "self", ".", "backoff", ",", "max_jitter", "=", "self", ".", "max_jitter", "/", "100.0", ",", "max_delay", "=", "self", ".", "max_delay", ",", "sleep_func", "=", "self", ".", "sleep_func", ",", "deadline", "=", "self", ".", "deadline", ",", "retry_exceptions", "=", "self", ".", "retry_exceptions", ")" ]
Return a clone of this retry manager
[ "Return", "a", "clone", "of", "this", "retry", "manager" ]
python
train
69
CS207-Final-Project-Group-10/cs207-FinalProject
solar_system/eight_planets.py
https://github.com/CS207-Final-Project-Group-10/cs207-FinalProject/blob/842e9c2d3ca1490cef18c086dfde81856d8d3a82/solar_system/eight_planets.py#L365-L380
def accel_fl(q: np.ndarray): """Accelaration in the earth-sun system using Fluxion potential energy""" # Infer number of dimensions from q dims: int = len(q) # Number of celestial bodies B: int = dims // 3 # The force given the positions q of the bodies f = force(q) # The accelerations from this force a = np.zeros(dims) for i in range(B): a[slices[i]] = f[slices[i]] / mass[i] return a
[ "def", "accel_fl", "(", "q", ":", "np", ".", "ndarray", ")", ":", "# Infer number of dimensions from q", "dims", ":", "int", "=", "len", "(", "q", ")", "# Number of celestial bodies", "B", ":", "int", "=", "dims", "//", "3", "# The force given the positions q of the bodies", "f", "=", "force", "(", "q", ")", "# The accelerations from this force", "a", "=", "np", ".", "zeros", "(", "dims", ")", "for", "i", "in", "range", "(", "B", ")", ":", "a", "[", "slices", "[", "i", "]", "]", "=", "f", "[", "slices", "[", "i", "]", "]", "/", "mass", "[", "i", "]", "return", "a" ]
Accelaration in the earth-sun system using Fluxion potential energy
[ "Accelaration", "in", "the", "earth", "-", "sun", "system", "using", "Fluxion", "potential", "energy" ]
python
train
26.9375
novopl/peltak
src/peltak/commands/lint.py
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/commands/lint.py#L54-L90
def lint_cli(ctx, exclude, skip_untracked, commit_only): # type: (click.Context, List[str], bool, bool) -> None """ Run pep8 and pylint on all project files. You can configure the linting paths using the lint.paths config variable. This should be a list of paths that will be linted. If a path to a directory is given, all files in that directory and it's subdirectories will be used. The pep8 and pylint config paths are by default stored in ops/tools/pep8.ini and ops/tools/pylint.ini. You can customise those paths in your config with lint.pep8_cfg and lint.pylint_cfg variables. **Config Example**:: \b lint: pylint_cfg: 'ops/tools/pylint.ini' pep8_cfg: 'ops/tools/pep8.ini' paths: - 'src/mypkg' **Examples**:: \b $ peltak lint # Run linter in default mode, skip untracked $ peltak lint --commit # Lint only files staged for commit $ peltak lint --all # Lint all files, including untracked. $ peltak lint --pretend # Print the list of files to lint $ peltak lint -e "*.tox*" # Don't lint files inside .tox directory """ if ctx.invoked_subcommand: return from peltak.logic import lint lint.lint(exclude, skip_untracked, commit_only)
[ "def", "lint_cli", "(", "ctx", ",", "exclude", ",", "skip_untracked", ",", "commit_only", ")", ":", "# type: (click.Context, List[str], bool, bool) -> None", "if", "ctx", ".", "invoked_subcommand", ":", "return", "from", "peltak", ".", "logic", "import", "lint", "lint", ".", "lint", "(", "exclude", ",", "skip_untracked", ",", "commit_only", ")" ]
Run pep8 and pylint on all project files. You can configure the linting paths using the lint.paths config variable. This should be a list of paths that will be linted. If a path to a directory is given, all files in that directory and it's subdirectories will be used. The pep8 and pylint config paths are by default stored in ops/tools/pep8.ini and ops/tools/pylint.ini. You can customise those paths in your config with lint.pep8_cfg and lint.pylint_cfg variables. **Config Example**:: \b lint: pylint_cfg: 'ops/tools/pylint.ini' pep8_cfg: 'ops/tools/pep8.ini' paths: - 'src/mypkg' **Examples**:: \b $ peltak lint # Run linter in default mode, skip untracked $ peltak lint --commit # Lint only files staged for commit $ peltak lint --all # Lint all files, including untracked. $ peltak lint --pretend # Print the list of files to lint $ peltak lint -e "*.tox*" # Don't lint files inside .tox directory
[ "Run", "pep8", "and", "pylint", "on", "all", "project", "files", "." ]
python
train
35.513514
openstax/cnx-litezip
litezip/validate.py
https://github.com/openstax/cnx-litezip/blob/5e613f486f29fe350999d6b990d32847ac16a1b8/litezip/validate.py#L26-L33
def validate_content(*objs): """Runs the correct validator for given `obj`ects. Assumes all same type""" from .main import Collection, Module validator = { Collection: cnxml.validate_collxml, Module: cnxml.validate_cnxml, }[type(objs[0])] return validator(*[obj.file for obj in objs])
[ "def", "validate_content", "(", "*", "objs", ")", ":", "from", ".", "main", "import", "Collection", ",", "Module", "validator", "=", "{", "Collection", ":", "cnxml", ".", "validate_collxml", ",", "Module", ":", "cnxml", ".", "validate_cnxml", ",", "}", "[", "type", "(", "objs", "[", "0", "]", ")", "]", "return", "validator", "(", "*", "[", "obj", ".", "file", "for", "obj", "in", "objs", "]", ")" ]
Runs the correct validator for given `obj`ects. Assumes all same type
[ "Runs", "the", "correct", "validator", "for", "given", "obj", "ects", ".", "Assumes", "all", "same", "type" ]
python
valid
39.125
QuantEcon/QuantEcon.py
quantecon/game_theory/normal_form_game.py
https://github.com/QuantEcon/QuantEcon.py/blob/26a66c552f2a73967d7efb6e1f4b4c4985a12643/quantecon/game_theory/normal_form_game.py#L276-L309
def is_best_response(self, own_action, opponents_actions, tol=None): """ Return True if `own_action` is a best response to `opponents_actions`. Parameters ---------- own_action : scalar(int) or array_like(float, ndim=1) An integer representing a pure action, or an array of floats representing a mixed action. opponents_actions : see `best_response` tol : scalar(float), optional(default=None) Tolerance level used in determining best responses. If None, default to the value of the `tol` attribute. Returns ------- bool True if `own_action` is a best response to `opponents_actions`; False otherwise. """ if tol is None: tol = self.tol payoff_vector = self.payoff_vector(opponents_actions) payoff_max = payoff_vector.max() if isinstance(own_action, numbers.Integral): return payoff_vector[own_action] >= payoff_max - tol else: return np.dot(own_action, payoff_vector) >= payoff_max - tol
[ "def", "is_best_response", "(", "self", ",", "own_action", ",", "opponents_actions", ",", "tol", "=", "None", ")", ":", "if", "tol", "is", "None", ":", "tol", "=", "self", ".", "tol", "payoff_vector", "=", "self", ".", "payoff_vector", "(", "opponents_actions", ")", "payoff_max", "=", "payoff_vector", ".", "max", "(", ")", "if", "isinstance", "(", "own_action", ",", "numbers", ".", "Integral", ")", ":", "return", "payoff_vector", "[", "own_action", "]", ">=", "payoff_max", "-", "tol", "else", ":", "return", "np", ".", "dot", "(", "own_action", ",", "payoff_vector", ")", ">=", "payoff_max", "-", "tol" ]
Return True if `own_action` is a best response to `opponents_actions`. Parameters ---------- own_action : scalar(int) or array_like(float, ndim=1) An integer representing a pure action, or an array of floats representing a mixed action. opponents_actions : see `best_response` tol : scalar(float), optional(default=None) Tolerance level used in determining best responses. If None, default to the value of the `tol` attribute. Returns ------- bool True if `own_action` is a best response to `opponents_actions`; False otherwise.
[ "Return", "True", "if", "own_action", "is", "a", "best", "response", "to", "opponents_actions", "." ]
python
train
32.529412
klahnakoski/mo-files
mo_files/__init__.py
https://github.com/klahnakoski/mo-files/blob/f6974a997cdc9fdabccb60c19edee13356a5787a/mo_files/__init__.py#L192-L202
def set_name(self, name): """ RETURN NEW FILE WITH GIVEN EXTENSION """ path = self._filename.split("/") parts = path[-1].split(".") if len(parts) == 1: path[-1] = name else: path[-1] = name + "." + parts[-1] return File("/".join(path))
[ "def", "set_name", "(", "self", ",", "name", ")", ":", "path", "=", "self", ".", "_filename", ".", "split", "(", "\"/\"", ")", "parts", "=", "path", "[", "-", "1", "]", ".", "split", "(", "\".\"", ")", "if", "len", "(", "parts", ")", "==", "1", ":", "path", "[", "-", "1", "]", "=", "name", "else", ":", "path", "[", "-", "1", "]", "=", "name", "+", "\".\"", "+", "parts", "[", "-", "1", "]", "return", "File", "(", "\"/\"", ".", "join", "(", "path", ")", ")" ]
RETURN NEW FILE WITH GIVEN EXTENSION
[ "RETURN", "NEW", "FILE", "WITH", "GIVEN", "EXTENSION" ]
python
train
28.454545
rdireen/spherepy
spherepy/spherepy.py
https://github.com/rdireen/spherepy/blob/241521401d4d76851d4a1a564a365cfab8e98496/spherepy/spherepy.py#L1294-L1305
def _vector_pattern_uniform_op_right(func): """decorator for operator overloading when VectorPatternUniform is on the right""" @wraps(func) def verif(self, patt): if isinstance(patt, numbers.Number): return TransversePatternUniform(func(self, self._tdsphere, patt), func(self, self._pdsphere, patt), doublesphere=True) else: raise TypeError(err_msg['no_combi_VP']) return verif
[ "def", "_vector_pattern_uniform_op_right", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "verif", "(", "self", ",", "patt", ")", ":", "if", "isinstance", "(", "patt", ",", "numbers", ".", "Number", ")", ":", "return", "TransversePatternUniform", "(", "func", "(", "self", ",", "self", ".", "_tdsphere", ",", "patt", ")", ",", "func", "(", "self", ",", "self", ".", "_pdsphere", ",", "patt", ")", ",", "doublesphere", "=", "True", ")", "else", ":", "raise", "TypeError", "(", "err_msg", "[", "'no_combi_VP'", "]", ")", "return", "verif" ]
decorator for operator overloading when VectorPatternUniform is on the right
[ "decorator", "for", "operator", "overloading", "when", "VectorPatternUniform", "is", "on", "the", "right" ]
python
train
46.75
saltstack/salt
salt/modules/file.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/file.py#L3675-L3699
def statvfs(path): ''' .. versionadded:: 2014.1.0 Perform a statvfs call against the filesystem that the file resides on CLI Example: .. code-block:: bash salt '*' file.statvfs /path/to/file ''' path = os.path.expanduser(path) if not os.path.isabs(path): raise SaltInvocationError('File path must be absolute.') try: stv = os.statvfs(path) return dict((key, getattr(stv, key)) for key in ('f_bavail', 'f_bfree', 'f_blocks', 'f_bsize', 'f_favail', 'f_ffree', 'f_files', 'f_flag', 'f_frsize', 'f_namemax')) except (OSError, IOError): raise CommandExecutionError('Could not statvfs \'{0}\''.format(path)) return False
[ "def", "statvfs", "(", "path", ")", ":", "path", "=", "os", ".", "path", ".", "expanduser", "(", "path", ")", "if", "not", "os", ".", "path", ".", "isabs", "(", "path", ")", ":", "raise", "SaltInvocationError", "(", "'File path must be absolute.'", ")", "try", ":", "stv", "=", "os", ".", "statvfs", "(", "path", ")", "return", "dict", "(", "(", "key", ",", "getattr", "(", "stv", ",", "key", ")", ")", "for", "key", "in", "(", "'f_bavail'", ",", "'f_bfree'", ",", "'f_blocks'", ",", "'f_bsize'", ",", "'f_favail'", ",", "'f_ffree'", ",", "'f_files'", ",", "'f_flag'", ",", "'f_frsize'", ",", "'f_namemax'", ")", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "raise", "CommandExecutionError", "(", "'Could not statvfs \\'{0}\\''", ".", "format", "(", "path", ")", ")", "return", "False" ]
.. versionadded:: 2014.1.0 Perform a statvfs call against the filesystem that the file resides on CLI Example: .. code-block:: bash salt '*' file.statvfs /path/to/file
[ "..", "versionadded", "::", "2014", ".", "1", ".", "0" ]
python
train
28.16
JensAstrup/pyOutlook
pyOutlook/core/main.py
https://github.com/JensAstrup/pyOutlook/blob/f4ca9d4a8629c0a41f78102ce84fab702a841167/pyOutlook/core/main.py#L243-L254
def get_folders(self): """ Returns a list of all folders for this account Returns: List[:class:`Folder <pyOutlook.core.folder.Folder>`] """ endpoint = 'https://outlook.office.com/api/v2.0/me/MailFolders/' r = requests.get(endpoint, headers=self._headers) if check_response(r): return Folder._json_to_folders(self, r.json())
[ "def", "get_folders", "(", "self", ")", ":", "endpoint", "=", "'https://outlook.office.com/api/v2.0/me/MailFolders/'", "r", "=", "requests", ".", "get", "(", "endpoint", ",", "headers", "=", "self", ".", "_headers", ")", "if", "check_response", "(", "r", ")", ":", "return", "Folder", ".", "_json_to_folders", "(", "self", ",", "r", ".", "json", "(", ")", ")" ]
Returns a list of all folders for this account Returns: List[:class:`Folder <pyOutlook.core.folder.Folder>`]
[ "Returns", "a", "list", "of", "all", "folders", "for", "this", "account" ]
python
train
32.916667
google/grr
grr/server/grr_response_server/artifact.py
https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/server/grr_response_server/artifact.py#L464-L514
def UploadArtifactYamlFile(file_content, overwrite=True, overwrite_system_artifacts=False): """Upload a yaml or json file as an artifact to the datastore.""" loaded_artifacts = [] registry_obj = artifact_registry.REGISTRY # Make sure all artifacts are loaded so we don't accidentally overwrite one. registry_obj.GetArtifacts(reload_datastore_artifacts=True) new_artifacts = registry_obj.ArtifactsFromYaml(file_content) new_artifact_names = set() # A quick syntax check before we upload anything. for artifact_value in new_artifacts: artifact_registry.ValidateSyntax(artifact_value) new_artifact_names.add(artifact_value.name) # Iterate through each artifact adding it to the collection. artifact_coll = artifact_registry.ArtifactCollection(ARTIFACT_STORE_ROOT_URN) current_artifacts = list(artifact_coll) # We need to remove artifacts we are overwriting. filtered_artifacts = [ art for art in current_artifacts if art.name not in new_artifact_names ] artifact_coll.Delete() with data_store.DB.GetMutationPool() as pool: for artifact_value in filtered_artifacts: artifact_coll.Add(artifact_value, mutation_pool=pool) for artifact_value in new_artifacts: registry_obj.RegisterArtifact( artifact_value, source="datastore:%s" % ARTIFACT_STORE_ROOT_URN, overwrite_if_exists=overwrite, overwrite_system_artifacts=overwrite_system_artifacts) artifact_coll.Add(artifact_value, mutation_pool=pool) if data_store.RelationalDBEnabled(): data_store.REL_DB.WriteArtifact(artifact_value) loaded_artifacts.append(artifact_value) name = artifact_value.name logging.info("Uploaded artifact %s to %s", name, ARTIFACT_STORE_ROOT_URN) # Once all artifacts are loaded we can validate dependencies. Note that we do # not have to perform a syntax validation because it is already done after # YAML is parsed. for artifact_value in loaded_artifacts: artifact_registry.ValidateDependencies(artifact_value)
[ "def", "UploadArtifactYamlFile", "(", "file_content", ",", "overwrite", "=", "True", ",", "overwrite_system_artifacts", "=", "False", ")", ":", "loaded_artifacts", "=", "[", "]", "registry_obj", "=", "artifact_registry", ".", "REGISTRY", "# Make sure all artifacts are loaded so we don't accidentally overwrite one.", "registry_obj", ".", "GetArtifacts", "(", "reload_datastore_artifacts", "=", "True", ")", "new_artifacts", "=", "registry_obj", ".", "ArtifactsFromYaml", "(", "file_content", ")", "new_artifact_names", "=", "set", "(", ")", "# A quick syntax check before we upload anything.", "for", "artifact_value", "in", "new_artifacts", ":", "artifact_registry", ".", "ValidateSyntax", "(", "artifact_value", ")", "new_artifact_names", ".", "add", "(", "artifact_value", ".", "name", ")", "# Iterate through each artifact adding it to the collection.", "artifact_coll", "=", "artifact_registry", ".", "ArtifactCollection", "(", "ARTIFACT_STORE_ROOT_URN", ")", "current_artifacts", "=", "list", "(", "artifact_coll", ")", "# We need to remove artifacts we are overwriting.", "filtered_artifacts", "=", "[", "art", "for", "art", "in", "current_artifacts", "if", "art", ".", "name", "not", "in", "new_artifact_names", "]", "artifact_coll", ".", "Delete", "(", ")", "with", "data_store", ".", "DB", ".", "GetMutationPool", "(", ")", "as", "pool", ":", "for", "artifact_value", "in", "filtered_artifacts", ":", "artifact_coll", ".", "Add", "(", "artifact_value", ",", "mutation_pool", "=", "pool", ")", "for", "artifact_value", "in", "new_artifacts", ":", "registry_obj", ".", "RegisterArtifact", "(", "artifact_value", ",", "source", "=", "\"datastore:%s\"", "%", "ARTIFACT_STORE_ROOT_URN", ",", "overwrite_if_exists", "=", "overwrite", ",", "overwrite_system_artifacts", "=", "overwrite_system_artifacts", ")", "artifact_coll", ".", "Add", "(", "artifact_value", ",", "mutation_pool", "=", "pool", ")", "if", "data_store", ".", "RelationalDBEnabled", "(", ")", ":", "data_store", ".", "REL_DB", ".", "WriteArtifact", "(", "artifact_value", ")", "loaded_artifacts", ".", "append", "(", "artifact_value", ")", "name", "=", "artifact_value", ".", "name", "logging", ".", "info", "(", "\"Uploaded artifact %s to %s\"", ",", "name", ",", "ARTIFACT_STORE_ROOT_URN", ")", "# Once all artifacts are loaded we can validate dependencies. Note that we do", "# not have to perform a syntax validation because it is already done after", "# YAML is parsed.", "for", "artifact_value", "in", "loaded_artifacts", ":", "artifact_registry", ".", "ValidateDependencies", "(", "artifact_value", ")" ]
Upload a yaml or json file as an artifact to the datastore.
[ "Upload", "a", "yaml", "or", "json", "file", "as", "an", "artifact", "to", "the", "datastore", "." ]
python
train
40.215686
pypa/pipenv
pipenv/vendor/jinja2/environment.py
https://github.com/pypa/pipenv/blob/cae8d76c210b9777e90aab76e9c4b0e53bb19cde/pipenv/vendor/jinja2/environment.py#L100-L110
def _environment_sanity_check(environment): """Perform a sanity check on the environment.""" assert issubclass(environment.undefined, Undefined), 'undefined must ' \ 'be a subclass of undefined because filters depend on it.' assert environment.block_start_string != \ environment.variable_start_string != \ environment.comment_start_string, 'block, variable and comment ' \ 'start strings must be different' assert environment.newline_sequence in ('\r', '\r\n', '\n'), \ 'newline_sequence set to unknown line ending string.' return environment
[ "def", "_environment_sanity_check", "(", "environment", ")", ":", "assert", "issubclass", "(", "environment", ".", "undefined", ",", "Undefined", ")", ",", "'undefined must '", "'be a subclass of undefined because filters depend on it.'", "assert", "environment", ".", "block_start_string", "!=", "environment", ".", "variable_start_string", "!=", "environment", ".", "comment_start_string", ",", "'block, variable and comment '", "'start strings must be different'", "assert", "environment", ".", "newline_sequence", "in", "(", "'\\r'", ",", "'\\r\\n'", ",", "'\\n'", ")", ",", "'newline_sequence set to unknown line ending string.'", "return", "environment" ]
Perform a sanity check on the environment.
[ "Perform", "a", "sanity", "check", "on", "the", "environment", "." ]
python
train
53.909091
geophysics-ubonn/reda
lib/reda/containers/ERT.py
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/containers/ERT.py#L236-L259
def filter(self, query, inplace=True): """Use a query statement to filter data. Note that you specify the data to be removed! Parameters ---------- query : string The query string to be evaluated. Is directly provided to pandas.DataFrame.query inplace : bool if True, change the container dataframe in place (defaults to True) Returns ------- result : :py:class:`pandas.DataFrame` DataFrame that contains the result of the filter application """ with LogDataChanges(self, filter_action='filter', filter_query=query): result = self.data.query( 'not ({0})'.format(query), inplace=inplace, ) return result
[ "def", "filter", "(", "self", ",", "query", ",", "inplace", "=", "True", ")", ":", "with", "LogDataChanges", "(", "self", ",", "filter_action", "=", "'filter'", ",", "filter_query", "=", "query", ")", ":", "result", "=", "self", ".", "data", ".", "query", "(", "'not ({0})'", ".", "format", "(", "query", ")", ",", "inplace", "=", "inplace", ",", ")", "return", "result" ]
Use a query statement to filter data. Note that you specify the data to be removed! Parameters ---------- query : string The query string to be evaluated. Is directly provided to pandas.DataFrame.query inplace : bool if True, change the container dataframe in place (defaults to True) Returns ------- result : :py:class:`pandas.DataFrame` DataFrame that contains the result of the filter application
[ "Use", "a", "query", "statement", "to", "filter", "data", ".", "Note", "that", "you", "specify", "the", "data", "to", "be", "removed!" ]
python
train
32.583333
rackerlabs/simpl
simpl/config.py
https://github.com/rackerlabs/simpl/blob/60ed3336a931cd6a7a7246e60f26165d9dc7c99c/simpl/config.py#L958-L976
def main(): # pragma: no cover """Simple tests.""" opts = [ Option('--foo'), Option('--bar'), Option('--baz'), Option('--key', group='secret', mutually_exclusive=True), Option('--key-file', group='secret', mutually_exclusive=True), Option('--key-thing', group='secret'), Option('--this', group='things'), Option('--who', group='group of its own'), # Option('--more', mutually_exclusive=True), # should fail Option('--more', mutually_exclusive=True, dest='more'), # should be ok Option('--less', mutually_exclusive=True, dest='more'), # should be ok ] myconf = Config(options=opts) if len(sys.argv) == 1: sys.argv.append('--help') myconf.parse()
[ "def", "main", "(", ")", ":", "# pragma: no cover", "opts", "=", "[", "Option", "(", "'--foo'", ")", ",", "Option", "(", "'--bar'", ")", ",", "Option", "(", "'--baz'", ")", ",", "Option", "(", "'--key'", ",", "group", "=", "'secret'", ",", "mutually_exclusive", "=", "True", ")", ",", "Option", "(", "'--key-file'", ",", "group", "=", "'secret'", ",", "mutually_exclusive", "=", "True", ")", ",", "Option", "(", "'--key-thing'", ",", "group", "=", "'secret'", ")", ",", "Option", "(", "'--this'", ",", "group", "=", "'things'", ")", ",", "Option", "(", "'--who'", ",", "group", "=", "'group of its own'", ")", ",", "# Option('--more', mutually_exclusive=True), # should fail", "Option", "(", "'--more'", ",", "mutually_exclusive", "=", "True", ",", "dest", "=", "'more'", ")", ",", "# should be ok", "Option", "(", "'--less'", ",", "mutually_exclusive", "=", "True", ",", "dest", "=", "'more'", ")", ",", "# should be ok", "]", "myconf", "=", "Config", "(", "options", "=", "opts", ")", "if", "len", "(", "sys", ".", "argv", ")", "==", "1", ":", "sys", ".", "argv", ".", "append", "(", "'--help'", ")", "myconf", ".", "parse", "(", ")" ]
Simple tests.
[ "Simple", "tests", "." ]
python
train
39.526316
saltstack/salt
salt/modules/libcloud_loadbalancer.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/libcloud_loadbalancer.py#L203-L228
def destroy_balancer(balancer_id, profile, **libcloud_kwargs): ''' Destroy a load balancer :param balancer_id: LoadBalancer ID which should be used :type balancer_id: ``str`` :param profile: The profile key :type profile: ``str`` :param libcloud_kwargs: Extra arguments for the driver's destroy_balancer method :type libcloud_kwargs: ``dict`` :return: ``True`` if the destroy was successful, otherwise ``False``. :rtype: ``bool`` CLI Example: .. code-block:: bash salt myminion libcloud_storage.destroy_balancer balancer_1 profile1 ''' conn = _get_driver(profile=profile) libcloud_kwargs = salt.utils.args.clean_kwargs(**libcloud_kwargs) balancer = conn.get_balancer(balancer_id) return conn.destroy_balancer(balancer, **libcloud_kwargs)
[ "def", "destroy_balancer", "(", "balancer_id", ",", "profile", ",", "*", "*", "libcloud_kwargs", ")", ":", "conn", "=", "_get_driver", "(", "profile", "=", "profile", ")", "libcloud_kwargs", "=", "salt", ".", "utils", ".", "args", ".", "clean_kwargs", "(", "*", "*", "libcloud_kwargs", ")", "balancer", "=", "conn", ".", "get_balancer", "(", "balancer_id", ")", "return", "conn", ".", "destroy_balancer", "(", "balancer", ",", "*", "*", "libcloud_kwargs", ")" ]
Destroy a load balancer :param balancer_id: LoadBalancer ID which should be used :type balancer_id: ``str`` :param profile: The profile key :type profile: ``str`` :param libcloud_kwargs: Extra arguments for the driver's destroy_balancer method :type libcloud_kwargs: ``dict`` :return: ``True`` if the destroy was successful, otherwise ``False``. :rtype: ``bool`` CLI Example: .. code-block:: bash salt myminion libcloud_storage.destroy_balancer balancer_1 profile1
[ "Destroy", "a", "load", "balancer" ]
python
train
30.692308
samuraisam/django-json-rpc
jsonrpc/__init__.py
https://github.com/samuraisam/django-json-rpc/blob/a88d744d960e828f3eb21265da0f10a694b8ebcf/jsonrpc/__init__.py#L141-L277
def jsonrpc_method(name, authenticated=False, authentication_arguments=['username', 'password'], safe=False, validate=False, site=default_site): """ Wraps a function turns it into a json-rpc method. Adds several attributes to the function specific to the JSON-RPC machinery and adds it to the default jsonrpc_site if one isn't provided. You must import the module containing these functions in your urls.py. name The name of your method. IE: `namespace.methodName` The method name can include type information, like `ns.method(String, Array) -> Nil`. authenticated=False Adds `username` and `password` arguments to the beginning of your method if the user hasn't already been authenticated. These will be used to authenticate the user against `django.contrib.authenticate` If you use HTTP auth or other authentication middleware, `username` and `password` will not be added, and this method will only check against `request.user.is_authenticated`. You may pass a callable to replace `django.contrib.auth.authenticate` as the authentication method. It must return either a User or `None` and take the keyword arguments `username` and `password`. safe=False Designates whether or not your method may be accessed by HTTP GET. By default this is turned off. validate=False Validates the arguments passed to your method based on type information provided in the signature. Supply type information by including types in your method declaration. Like so: @jsonrpc_method('myapp.specialSauce(Array, String)', validate=True) def special_sauce(self, ingredients, instructions): return SpecialSauce(ingredients, instructions) Calls to `myapp.specialSauce` will now check each arguments type before calling `special_sauce`, throwing an `InvalidParamsError` when it encounters a discrepancy. This can significantly reduce the amount of code required to write JSON-RPC services. site=default_site Defines which site the jsonrpc method will be added to. Can be any object that provides a `register(name, func)` method. """ def decorator(func): arg_names = getargspec(func)[0][1:] X = {'name': name, 'arg_names': arg_names} if authenticated: if authenticated is True or six.callable(authenticated): # TODO: this is an assumption X['arg_names'] = authentication_arguments + X['arg_names'] X['name'] = _inject_args(X['name'], ('String', 'String')) from django.contrib.auth import authenticate as _authenticate from django.contrib.auth.models import User else: authenticate = authenticated @six.wraps(func) def _func(request, *args, **kwargs): user = getattr(request, 'user', None) is_authenticated = getattr(user, 'is_authenticated', lambda: False) if ((user is not None and six.callable(is_authenticated) and not is_authenticated()) or user is None): user = None try: creds = args[:len(authentication_arguments)] if len(creds) == 0: raise IndexError # Django's authenticate() method takes arguments as dict user = _authenticate(username=creds[0], password=creds[1], *creds[2:]) if user is not None: args = args[len(authentication_arguments):] except IndexError: auth_kwargs = {} try: for auth_kwarg in authentication_arguments: auth_kwargs[auth_kwarg] = kwargs[auth_kwarg] except KeyError: raise InvalidParamsError( 'Authenticated methods require at least ' '[%(arguments)s] or {%(arguments)s} arguments' % {'arguments': ', '.join(authentication_arguments)}) user = _authenticate(**auth_kwargs) if user is not None: for auth_kwarg in authentication_arguments: kwargs.pop(auth_kwarg) if user is None: raise InvalidCredentialsError request.user = user return func(request, *args, **kwargs) else: _func = func @six.wraps(_func) def exc_printer(*a, **kw): try: return _func(*a, **kw) except Exception as e: try: print('JSONRPC SERVICE EXCEPTION') import traceback traceback.print_exc() except: pass six.reraise(*sys.exc_info()) ret_func = exc_printer method, arg_types, return_type = \ _parse_sig(X['name'], X['arg_names'], validate) ret_func.json_args = X['arg_names'] ret_func.json_arg_types = arg_types ret_func.json_return_type = return_type ret_func.json_method = method ret_func.json_safe = safe ret_func.json_sig = X['name'] ret_func.json_validate = validate site.register(method, ret_func) return ret_func return decorator
[ "def", "jsonrpc_method", "(", "name", ",", "authenticated", "=", "False", ",", "authentication_arguments", "=", "[", "'username'", ",", "'password'", "]", ",", "safe", "=", "False", ",", "validate", "=", "False", ",", "site", "=", "default_site", ")", ":", "def", "decorator", "(", "func", ")", ":", "arg_names", "=", "getargspec", "(", "func", ")", "[", "0", "]", "[", "1", ":", "]", "X", "=", "{", "'name'", ":", "name", ",", "'arg_names'", ":", "arg_names", "}", "if", "authenticated", ":", "if", "authenticated", "is", "True", "or", "six", ".", "callable", "(", "authenticated", ")", ":", "# TODO: this is an assumption", "X", "[", "'arg_names'", "]", "=", "authentication_arguments", "+", "X", "[", "'arg_names'", "]", "X", "[", "'name'", "]", "=", "_inject_args", "(", "X", "[", "'name'", "]", ",", "(", "'String'", ",", "'String'", ")", ")", "from", "django", ".", "contrib", ".", "auth", "import", "authenticate", "as", "_authenticate", "from", "django", ".", "contrib", ".", "auth", ".", "models", "import", "User", "else", ":", "authenticate", "=", "authenticated", "@", "six", ".", "wraps", "(", "func", ")", "def", "_func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "user", "=", "getattr", "(", "request", ",", "'user'", ",", "None", ")", "is_authenticated", "=", "getattr", "(", "user", ",", "'is_authenticated'", ",", "lambda", ":", "False", ")", "if", "(", "(", "user", "is", "not", "None", "and", "six", ".", "callable", "(", "is_authenticated", ")", "and", "not", "is_authenticated", "(", ")", ")", "or", "user", "is", "None", ")", ":", "user", "=", "None", "try", ":", "creds", "=", "args", "[", ":", "len", "(", "authentication_arguments", ")", "]", "if", "len", "(", "creds", ")", "==", "0", ":", "raise", "IndexError", "# Django's authenticate() method takes arguments as dict", "user", "=", "_authenticate", "(", "username", "=", "creds", "[", "0", "]", ",", "password", "=", "creds", "[", "1", "]", ",", "*", "creds", "[", "2", ":", "]", ")", "if", "user", "is", "not", "None", ":", "args", "=", "args", "[", "len", "(", "authentication_arguments", ")", ":", "]", "except", "IndexError", ":", "auth_kwargs", "=", "{", "}", "try", ":", "for", "auth_kwarg", "in", "authentication_arguments", ":", "auth_kwargs", "[", "auth_kwarg", "]", "=", "kwargs", "[", "auth_kwarg", "]", "except", "KeyError", ":", "raise", "InvalidParamsError", "(", "'Authenticated methods require at least '", "'[%(arguments)s] or {%(arguments)s} arguments'", "%", "{", "'arguments'", ":", "', '", ".", "join", "(", "authentication_arguments", ")", "}", ")", "user", "=", "_authenticate", "(", "*", "*", "auth_kwargs", ")", "if", "user", "is", "not", "None", ":", "for", "auth_kwarg", "in", "authentication_arguments", ":", "kwargs", ".", "pop", "(", "auth_kwarg", ")", "if", "user", "is", "None", ":", "raise", "InvalidCredentialsError", "request", ".", "user", "=", "user", "return", "func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "_func", "=", "func", "@", "six", ".", "wraps", "(", "_func", ")", "def", "exc_printer", "(", "*", "a", ",", "*", "*", "kw", ")", ":", "try", ":", "return", "_func", "(", "*", "a", ",", "*", "*", "kw", ")", "except", "Exception", "as", "e", ":", "try", ":", "print", "(", "'JSONRPC SERVICE EXCEPTION'", ")", "import", "traceback", "traceback", ".", "print_exc", "(", ")", "except", ":", "pass", "six", ".", "reraise", "(", "*", "sys", ".", "exc_info", "(", ")", ")", "ret_func", "=", "exc_printer", "method", ",", "arg_types", ",", "return_type", "=", "_parse_sig", "(", "X", "[", "'name'", "]", ",", "X", "[", "'arg_names'", "]", ",", "validate", ")", "ret_func", ".", "json_args", "=", "X", "[", "'arg_names'", "]", "ret_func", ".", "json_arg_types", "=", "arg_types", "ret_func", ".", "json_return_type", "=", "return_type", "ret_func", ".", "json_method", "=", "method", "ret_func", ".", "json_safe", "=", "safe", "ret_func", ".", "json_sig", "=", "X", "[", "'name'", "]", "ret_func", ".", "json_validate", "=", "validate", "site", ".", "register", "(", "method", ",", "ret_func", ")", "return", "ret_func", "return", "decorator" ]
Wraps a function turns it into a json-rpc method. Adds several attributes to the function specific to the JSON-RPC machinery and adds it to the default jsonrpc_site if one isn't provided. You must import the module containing these functions in your urls.py. name The name of your method. IE: `namespace.methodName` The method name can include type information, like `ns.method(String, Array) -> Nil`. authenticated=False Adds `username` and `password` arguments to the beginning of your method if the user hasn't already been authenticated. These will be used to authenticate the user against `django.contrib.authenticate` If you use HTTP auth or other authentication middleware, `username` and `password` will not be added, and this method will only check against `request.user.is_authenticated`. You may pass a callable to replace `django.contrib.auth.authenticate` as the authentication method. It must return either a User or `None` and take the keyword arguments `username` and `password`. safe=False Designates whether or not your method may be accessed by HTTP GET. By default this is turned off. validate=False Validates the arguments passed to your method based on type information provided in the signature. Supply type information by including types in your method declaration. Like so: @jsonrpc_method('myapp.specialSauce(Array, String)', validate=True) def special_sauce(self, ingredients, instructions): return SpecialSauce(ingredients, instructions) Calls to `myapp.specialSauce` will now check each arguments type before calling `special_sauce`, throwing an `InvalidParamsError` when it encounters a discrepancy. This can significantly reduce the amount of code required to write JSON-RPC services. site=default_site Defines which site the jsonrpc method will be added to. Can be any object that provides a `register(name, func)` method.
[ "Wraps", "a", "function", "turns", "it", "into", "a", "json", "-", "rpc", "method", ".", "Adds", "several", "attributes", "to", "the", "function", "specific", "to", "the", "JSON", "-", "RPC", "machinery", "and", "adds", "it", "to", "the", "default", "jsonrpc_site", "if", "one", "isn", "t", "provided", ".", "You", "must", "import", "the", "module", "containing", "these", "functions", "in", "your", "urls", ".", "py", "." ]
python
train
42.817518
cgrok/clashroyale
clashroyale/official_api/client.py
https://github.com/cgrok/clashroyale/blob/2618f4da22a84ad3e36d2446e23436d87c423163/clashroyale/official_api/client.py#L589-L601
def get_rarity_info(self, rarity: str): """Returns card info from constants Parameters --------- rarity: str A rarity name Returns None or Constants """ for c in self.constants.rarities: if c.name == rarity: return c
[ "def", "get_rarity_info", "(", "self", ",", "rarity", ":", "str", ")", ":", "for", "c", "in", "self", ".", "constants", ".", "rarities", ":", "if", "c", ".", "name", "==", "rarity", ":", "return", "c" ]
Returns card info from constants Parameters --------- rarity: str A rarity name Returns None or Constants
[ "Returns", "card", "info", "from", "constants" ]
python
valid
23.230769
metakirby5/colorz
colorz.py
https://github.com/metakirby5/colorz/blob/11fd47a28d7a4af5b91d29978524335c8fef8cc9/colorz.py#L64-L71
def clamp(color, min_v, max_v): """ Clamps a color such that the value is between min_v and max_v. """ h, s, v = rgb_to_hsv(*map(down_scale, color)) min_v, max_v = map(down_scale, (min_v, max_v)) v = min(max(min_v, v), max_v) return tuple(map(up_scale, hsv_to_rgb(h, s, v)))
[ "def", "clamp", "(", "color", ",", "min_v", ",", "max_v", ")", ":", "h", ",", "s", ",", "v", "=", "rgb_to_hsv", "(", "*", "map", "(", "down_scale", ",", "color", ")", ")", "min_v", ",", "max_v", "=", "map", "(", "down_scale", ",", "(", "min_v", ",", "max_v", ")", ")", "v", "=", "min", "(", "max", "(", "min_v", ",", "v", ")", ",", "max_v", ")", "return", "tuple", "(", "map", "(", "up_scale", ",", "hsv_to_rgb", "(", "h", ",", "s", ",", "v", ")", ")", ")" ]
Clamps a color such that the value is between min_v and max_v.
[ "Clamps", "a", "color", "such", "that", "the", "value", "is", "between", "min_v", "and", "max_v", "." ]
python
train
36.875
mitsei/dlkit
dlkit/json_/assessment/objects.py
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/json_/assessment/objects.py#L1025-L1032
def _init_map(self, record_types=None, **kwargs): """Initialize form map""" osid_objects.OsidObjectForm._init_map(self, record_types=record_types) self._my_map['rubricId'] = self._rubric_default self._my_map['assignedBankIds'] = [str(kwargs['bank_id'])] self._my_map['levelId'] = self._level_default if self._supports_simple_sequencing(): self._my_map['childIds'] = []
[ "def", "_init_map", "(", "self", ",", "record_types", "=", "None", ",", "*", "*", "kwargs", ")", ":", "osid_objects", ".", "OsidObjectForm", ".", "_init_map", "(", "self", ",", "record_types", "=", "record_types", ")", "self", ".", "_my_map", "[", "'rubricId'", "]", "=", "self", ".", "_rubric_default", "self", ".", "_my_map", "[", "'assignedBankIds'", "]", "=", "[", "str", "(", "kwargs", "[", "'bank_id'", "]", ")", "]", "self", ".", "_my_map", "[", "'levelId'", "]", "=", "self", ".", "_level_default", "if", "self", ".", "_supports_simple_sequencing", "(", ")", ":", "self", ".", "_my_map", "[", "'childIds'", "]", "=", "[", "]" ]
Initialize form map
[ "Initialize", "form", "map" ]
python
train
52.625
akfullfo/taskforce
taskforce/http.py
https://github.com/akfullfo/taskforce/blob/bc6dd744bd33546447d085dbd18a350532220193/taskforce/http.py#L281-L292
def getresponse(self): """ Pass-thru method to make this class behave a little like HTTPConnection """ resp = self.http.getresponse() self.log.info("resp is %s", str(resp)) if resp.status < 400: return resp else: errtext = resp.read() content_type = resp.getheader('Content-Type', 'text/plain') raise HttpError(code=resp.status, content_type=content_type, content=errtext)
[ "def", "getresponse", "(", "self", ")", ":", "resp", "=", "self", ".", "http", ".", "getresponse", "(", ")", "self", ".", "log", ".", "info", "(", "\"resp is %s\"", ",", "str", "(", "resp", ")", ")", "if", "resp", ".", "status", "<", "400", ":", "return", "resp", "else", ":", "errtext", "=", "resp", ".", "read", "(", ")", "content_type", "=", "resp", ".", "getheader", "(", "'Content-Type'", ",", "'text/plain'", ")", "raise", "HttpError", "(", "code", "=", "resp", ".", "status", ",", "content_type", "=", "content_type", ",", "content", "=", "errtext", ")" ]
Pass-thru method to make this class behave a little like HTTPConnection
[ "Pass", "-", "thru", "method", "to", "make", "this", "class", "behave", "a", "little", "like", "HTTPConnection" ]
python
train
38.416667
sukujgrg/pptx-builder-from-yaml
pptx_builder/__init__.py
https://github.com/sukujgrg/pptx-builder-from-yaml/blob/2218290f2394a133f0937e432e588f687b2deb7b/pptx_builder/__init__.py#L176-L229
def cli(yaml_paths, pptx_template_path, font_size, master_slide_idx, slide_layout_idx, dst_dir, font_name, slide_txt_alignment, validate): """ A powerpoint builder https://github.com/sukujgrg/pptx-builder-from-yaml """ dst_dir = Path(dst_dir) pptx_template_path = Path(pptx_template_path) pptx_template = pick_master_slide(pptx_template_path) yamlfiles = [] for yaml_path in yaml_paths: yaml_path = Path(yaml_path) if yaml_path.is_dir(): yamlfiles.extend([yml for yml in yaml_path.iterdir()]) else: yamlfiles.append(yaml_path) if validate: exit_fail = False for yamlfile in yamlfiles: try: validate_yaml_file(SCHEMA_FOR_YAML, Path(yamlfile)) msg = f"VALIDATE: Validation of {yamlfile} passed" click.echo(click.style(msg, fg="blue")) except jsonschema.exceptions.ValidationError as err: msg = f"ERR: {yamlfile} {str(err.message)} {err.path}" click.echo(click.style(msg, fg="red"), nl=True) exit_fail = True except Exception: raise if exit_fail: sys.exit(1) for yamlfile in yamlfiles: try: r = build_slide( Path(yamlfile), pptx_template, master_slide_idx, slide_layout_idx, font_size, dst_dir, font_name, slide_txt_alignment ) msg = f"PPTX: {r}" click.echo(click.style(msg, fg="green")) except Exception: raise
[ "def", "cli", "(", "yaml_paths", ",", "pptx_template_path", ",", "font_size", ",", "master_slide_idx", ",", "slide_layout_idx", ",", "dst_dir", ",", "font_name", ",", "slide_txt_alignment", ",", "validate", ")", ":", "dst_dir", "=", "Path", "(", "dst_dir", ")", "pptx_template_path", "=", "Path", "(", "pptx_template_path", ")", "pptx_template", "=", "pick_master_slide", "(", "pptx_template_path", ")", "yamlfiles", "=", "[", "]", "for", "yaml_path", "in", "yaml_paths", ":", "yaml_path", "=", "Path", "(", "yaml_path", ")", "if", "yaml_path", ".", "is_dir", "(", ")", ":", "yamlfiles", ".", "extend", "(", "[", "yml", "for", "yml", "in", "yaml_path", ".", "iterdir", "(", ")", "]", ")", "else", ":", "yamlfiles", ".", "append", "(", "yaml_path", ")", "if", "validate", ":", "exit_fail", "=", "False", "for", "yamlfile", "in", "yamlfiles", ":", "try", ":", "validate_yaml_file", "(", "SCHEMA_FOR_YAML", ",", "Path", "(", "yamlfile", ")", ")", "msg", "=", "f\"VALIDATE: Validation of {yamlfile} passed\"", "click", ".", "echo", "(", "click", ".", "style", "(", "msg", ",", "fg", "=", "\"blue\"", ")", ")", "except", "jsonschema", ".", "exceptions", ".", "ValidationError", "as", "err", ":", "msg", "=", "f\"ERR: {yamlfile} {str(err.message)} {err.path}\"", "click", ".", "echo", "(", "click", ".", "style", "(", "msg", ",", "fg", "=", "\"red\"", ")", ",", "nl", "=", "True", ")", "exit_fail", "=", "True", "except", "Exception", ":", "raise", "if", "exit_fail", ":", "sys", ".", "exit", "(", "1", ")", "for", "yamlfile", "in", "yamlfiles", ":", "try", ":", "r", "=", "build_slide", "(", "Path", "(", "yamlfile", ")", ",", "pptx_template", ",", "master_slide_idx", ",", "slide_layout_idx", ",", "font_size", ",", "dst_dir", ",", "font_name", ",", "slide_txt_alignment", ")", "msg", "=", "f\"PPTX: {r}\"", "click", ".", "echo", "(", "click", ".", "style", "(", "msg", ",", "fg", "=", "\"green\"", ")", ")", "except", "Exception", ":", "raise" ]
A powerpoint builder https://github.com/sukujgrg/pptx-builder-from-yaml
[ "A", "powerpoint", "builder" ]
python
train
31.222222
fake-name/ChromeController
ChromeController/Generator/Generated.py
https://github.com/fake-name/ChromeController/blob/914dd136184e8f1165c7aa6ef30418aaf10c61f0/ChromeController/Generator/Generated.py#L66-L84
def Memory_setPressureNotificationsSuppressed(self, suppressed): """ Function path: Memory.setPressureNotificationsSuppressed Domain: Memory Method name: setPressureNotificationsSuppressed Parameters: Required arguments: 'suppressed' (type: boolean) -> If true, memory pressure notifications will be suppressed. No return value. Description: Enable/disable suppressing memory pressure notifications in all processes. """ assert isinstance(suppressed, (bool,) ), "Argument 'suppressed' must be of type '['bool']'. Received type: '%s'" % type( suppressed) subdom_funcs = self.synchronous_command( 'Memory.setPressureNotificationsSuppressed', suppressed=suppressed) return subdom_funcs
[ "def", "Memory_setPressureNotificationsSuppressed", "(", "self", ",", "suppressed", ")", ":", "assert", "isinstance", "(", "suppressed", ",", "(", "bool", ",", ")", ")", ",", "\"Argument 'suppressed' must be of type '['bool']'. Received type: '%s'\"", "%", "type", "(", "suppressed", ")", "subdom_funcs", "=", "self", ".", "synchronous_command", "(", "'Memory.setPressureNotificationsSuppressed'", ",", "suppressed", "=", "suppressed", ")", "return", "subdom_funcs" ]
Function path: Memory.setPressureNotificationsSuppressed Domain: Memory Method name: setPressureNotificationsSuppressed Parameters: Required arguments: 'suppressed' (type: boolean) -> If true, memory pressure notifications will be suppressed. No return value. Description: Enable/disable suppressing memory pressure notifications in all processes.
[ "Function", "path", ":", "Memory", ".", "setPressureNotificationsSuppressed", "Domain", ":", "Memory", "Method", "name", ":", "setPressureNotificationsSuppressed", "Parameters", ":", "Required", "arguments", ":", "suppressed", "(", "type", ":", "boolean", ")", "-", ">", "If", "true", "memory", "pressure", "notifications", "will", "be", "suppressed", ".", "No", "return", "value", ".", "Description", ":", "Enable", "/", "disable", "suppressing", "memory", "pressure", "notifications", "in", "all", "processes", "." ]
python
train
38.105263
aloetesting/aloe_webdriver
aloe_webdriver/css.py
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L216-L224
def follow_link_by_selector(self, selector): """ Navigate to the href of the element matching the CSS selector. N.B. this does not click the link, but changes the browser's URL. """ elem = find_element_by_jquery(world.browser, selector) href = elem.get_attribute('href') world.browser.get(href)
[ "def", "follow_link_by_selector", "(", "self", ",", "selector", ")", ":", "elem", "=", "find_element_by_jquery", "(", "world", ".", "browser", ",", "selector", ")", "href", "=", "elem", ".", "get_attribute", "(", "'href'", ")", "world", ".", "browser", ".", "get", "(", "href", ")" ]
Navigate to the href of the element matching the CSS selector. N.B. this does not click the link, but changes the browser's URL.
[ "Navigate", "to", "the", "href", "of", "the", "element", "matching", "the", "CSS", "selector", "." ]
python
train
35
saltstack/salt
salt/modules/boto_ec2.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/boto_ec2.py#L1879-L1909
def get_all_tags(filters=None, region=None, key=None, keyid=None, profile=None): ''' Describe all tags matching the filter criteria, or all tags in the account otherwise. .. versionadded:: 2018.3.0 filters (dict) - Additional constraints on which volumes to return. Note that valid filters vary extensively depending on the resource type. When in doubt, search first without a filter and then use the returned data to help fine-tune your search. You can generally garner the resource type from its ID (e.g. `vol-XXXXX` is a volume, `i-XXXXX` is an instance, etc. CLI Example: .. code-block:: bash salt-call boto_ec2.get_all_tags '{"tag:Name": myInstanceNameTag, resource-type: instance}' ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: ret = conn.get_all_tags(filters) tags = {} for t in ret: if t.res_id not in tags: tags[t.res_id] = {} tags[t.res_id][t.name] = t.value return tags except boto.exception.BotoServerError as e: log.error(e) return {}
[ "def", "get_all_tags", "(", "filters", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "ret", "=", "conn", ".", "get_all_tags", "(", "filters", ")", "tags", "=", "{", "}", "for", "t", "in", "ret", ":", "if", "t", ".", "res_id", "not", "in", "tags", ":", "tags", "[", "t", ".", "res_id", "]", "=", "{", "}", "tags", "[", "t", ".", "res_id", "]", "[", "t", ".", "name", "]", "=", "t", ".", "value", "return", "tags", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "error", "(", "e", ")", "return", "{", "}" ]
Describe all tags matching the filter criteria, or all tags in the account otherwise. .. versionadded:: 2018.3.0 filters (dict) - Additional constraints on which volumes to return. Note that valid filters vary extensively depending on the resource type. When in doubt, search first without a filter and then use the returned data to help fine-tune your search. You can generally garner the resource type from its ID (e.g. `vol-XXXXX` is a volume, `i-XXXXX` is an instance, etc. CLI Example: .. code-block:: bash salt-call boto_ec2.get_all_tags '{"tag:Name": myInstanceNameTag, resource-type: instance}'
[ "Describe", "all", "tags", "matching", "the", "filter", "criteria", "or", "all", "tags", "in", "the", "account", "otherwise", "." ]
python
train
36.387097
SuperCowPowers/workbench
workbench/server/data_store.py
https://github.com/SuperCowPowers/workbench/blob/710232756dd717f734253315e3d0b33c9628dafb/workbench/server/data_store.py#L114-L122
def expire_data(self): """Expire data within the samples collection.""" # Do we need to start deleting stuff? while self.sample_storage_size() > self.samples_cap: # This should return the 'oldest' record in samples record = self.database[self.sample_collection].find().sort('import_time',pymongo.ASCENDING).limit(1)[0] self.remove_sample(record['md5'])
[ "def", "expire_data", "(", "self", ")", ":", "# Do we need to start deleting stuff?", "while", "self", ".", "sample_storage_size", "(", ")", ">", "self", ".", "samples_cap", ":", "# This should return the 'oldest' record in samples", "record", "=", "self", ".", "database", "[", "self", ".", "sample_collection", "]", ".", "find", "(", ")", ".", "sort", "(", "'import_time'", ",", "pymongo", ".", "ASCENDING", ")", ".", "limit", "(", "1", ")", "[", "0", "]", "self", ".", "remove_sample", "(", "record", "[", "'md5'", "]", ")" ]
Expire data within the samples collection.
[ "Expire", "data", "within", "the", "samples", "collection", "." ]
python
train
45.111111
saltstack/salt
salt/utils/gitfs.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/utils/gitfs.py#L764-L791
def fetch(self): ''' Fetch the repo. If the local copy was updated, return True. If the local copy was already up-to-date, return False. This function requires that a _fetch() function be implemented in a sub-class. ''' try: with self.gen_lock(lock_type='update'): log.debug('Fetching %s remote \'%s\'', self.role, self.id) # Run provider-specific fetch code return self._fetch() except GitLockError as exc: if exc.errno == errno.EEXIST: log.warning( 'Update lock file is present for %s remote \'%s\', ' 'skipping. If this warning persists, it is possible that ' 'the update process was interrupted, but the lock could ' 'also have been manually set. Removing %s or running ' '\'salt-run cache.clear_git_lock %s type=update\' will ' 'allow updates to continue for this remote.', self.role, self.id, self._get_lock_file(lock_type='update'), self.role, ) return False
[ "def", "fetch", "(", "self", ")", ":", "try", ":", "with", "self", ".", "gen_lock", "(", "lock_type", "=", "'update'", ")", ":", "log", ".", "debug", "(", "'Fetching %s remote \\'%s\\''", ",", "self", ".", "role", ",", "self", ".", "id", ")", "# Run provider-specific fetch code", "return", "self", ".", "_fetch", "(", ")", "except", "GitLockError", "as", "exc", ":", "if", "exc", ".", "errno", "==", "errno", ".", "EEXIST", ":", "log", ".", "warning", "(", "'Update lock file is present for %s remote \\'%s\\', '", "'skipping. If this warning persists, it is possible that '", "'the update process was interrupted, but the lock could '", "'also have been manually set. Removing %s or running '", "'\\'salt-run cache.clear_git_lock %s type=update\\' will '", "'allow updates to continue for this remote.'", ",", "self", ".", "role", ",", "self", ".", "id", ",", "self", ".", "_get_lock_file", "(", "lock_type", "=", "'update'", ")", ",", "self", ".", "role", ",", ")", "return", "False" ]
Fetch the repo. If the local copy was updated, return True. If the local copy was already up-to-date, return False. This function requires that a _fetch() function be implemented in a sub-class.
[ "Fetch", "the", "repo", ".", "If", "the", "local", "copy", "was", "updated", "return", "True", ".", "If", "the", "local", "copy", "was", "already", "up", "-", "to", "-", "date", "return", "False", "." ]
python
train
43.535714
osrg/ryu
ryu/services/protocols/bgp/core_managers/table_manager.py
https://github.com/osrg/ryu/blob/6f906e72c92e10bd0264c9b91a2f7bb85b97780c/ryu/services/protocols/bgp/core_managers/table_manager.py#L342-L355
def get_ipv4fs_table(self): """Returns global IPv4 Flow Specification table. Creates the table if it does not exist. """ ipv4fs_table = self._global_tables.get(RF_IPv4_FLOWSPEC) # Lazy initialization of the table. if not ipv4fs_table: ipv4fs_table = IPv4FlowSpecTable(self._core_service, self._signal_bus) self._global_tables[RF_IPv4_FLOWSPEC] = ipv4fs_table self._tables[(None, RF_IPv4_FLOWSPEC)] = ipv4fs_table return ipv4fs_table
[ "def", "get_ipv4fs_table", "(", "self", ")", ":", "ipv4fs_table", "=", "self", ".", "_global_tables", ".", "get", "(", "RF_IPv4_FLOWSPEC", ")", "# Lazy initialization of the table.", "if", "not", "ipv4fs_table", ":", "ipv4fs_table", "=", "IPv4FlowSpecTable", "(", "self", ".", "_core_service", ",", "self", ".", "_signal_bus", ")", "self", ".", "_global_tables", "[", "RF_IPv4_FLOWSPEC", "]", "=", "ipv4fs_table", "self", ".", "_tables", "[", "(", "None", ",", "RF_IPv4_FLOWSPEC", ")", "]", "=", "ipv4fs_table", "return", "ipv4fs_table" ]
Returns global IPv4 Flow Specification table. Creates the table if it does not exist.
[ "Returns", "global", "IPv4", "Flow", "Specification", "table", "." ]
python
train
39.857143
h2oai/datatable
ci/make_fast.py
https://github.com/h2oai/datatable/blob/dd5fba74d2ca85b66f82ae3c1e0b6ea2fd792564/ci/make_fast.py#L15-L32
def get_files(): """ Return the list of all source/header files in `c/` directory. The files will have pathnames relative to the current folder, for example "c/csv/reader_utils.cc". """ sources = [] headers = ["datatable/include/datatable.h"] assert os.path.isfile(headers[0]) for dirpath, _, filenames in os.walk("c"): for f in filenames: fullname = os.path.join(dirpath, f) if f.endswith(".h") or f.endswith(".inc"): headers.append(fullname) elif f.endswith(".c") or f.endswith(".cc"): sources.append(fullname) return (sources, headers)
[ "def", "get_files", "(", ")", ":", "sources", "=", "[", "]", "headers", "=", "[", "\"datatable/include/datatable.h\"", "]", "assert", "os", ".", "path", ".", "isfile", "(", "headers", "[", "0", "]", ")", "for", "dirpath", ",", "_", ",", "filenames", "in", "os", ".", "walk", "(", "\"c\"", ")", ":", "for", "f", "in", "filenames", ":", "fullname", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "f", ")", "if", "f", ".", "endswith", "(", "\".h\"", ")", "or", "f", ".", "endswith", "(", "\".inc\"", ")", ":", "headers", ".", "append", "(", "fullname", ")", "elif", "f", ".", "endswith", "(", "\".c\"", ")", "or", "f", ".", "endswith", "(", "\".cc\"", ")", ":", "sources", ".", "append", "(", "fullname", ")", "return", "(", "sources", ",", "headers", ")" ]
Return the list of all source/header files in `c/` directory. The files will have pathnames relative to the current folder, for example "c/csv/reader_utils.cc".
[ "Return", "the", "list", "of", "all", "source", "/", "header", "files", "in", "c", "/", "directory", "." ]
python
train
35.444444
apple/turicreate
src/external/coremltools_wrap/coremltools/deps/protobuf/python/google/protobuf/internal/well_known_types.py
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/src/external/coremltools_wrap/coremltools/deps/protobuf/python/google/protobuf/internal/well_known_types.py#L70-L76
def Pack(self, msg, type_url_prefix='type.googleapis.com/'): """Packs the specified message into current Any message.""" if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) else: self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) self.value = msg.SerializeToString()
[ "def", "Pack", "(", "self", ",", "msg", ",", "type_url_prefix", "=", "'type.googleapis.com/'", ")", ":", "if", "len", "(", "type_url_prefix", ")", "<", "1", "or", "type_url_prefix", "[", "-", "1", "]", "!=", "'/'", ":", "self", ".", "type_url", "=", "'%s/%s'", "%", "(", "type_url_prefix", ",", "msg", ".", "DESCRIPTOR", ".", "full_name", ")", "else", ":", "self", ".", "type_url", "=", "'%s%s'", "%", "(", "type_url_prefix", ",", "msg", ".", "DESCRIPTOR", ".", "full_name", ")", "self", ".", "value", "=", "msg", ".", "SerializeToString", "(", ")" ]
Packs the specified message into current Any message.
[ "Packs", "the", "specified", "message", "into", "current", "Any", "message", "." ]
python
train
54.714286
inspirehep/harvesting-kit
harvestingkit/inspire_cds_package/from_inspire.py
https://github.com/inspirehep/harvesting-kit/blob/33a7f8aa9dade1d863110c6d8b27dfd955cb471f/harvestingkit/inspire_cds_package/from_inspire.py#L446-L457
def update_dois(self): """Remove duplicate BibMatch DOIs.""" dois = record_get_field_instances(self.record, '024', ind1="7") all_dois = {} for field in dois: subs = field_get_subfield_instances(field) subs_dict = dict(subs) if subs_dict.get('a'): if subs_dict['a'] in all_dois: record_delete_field(self.record, tag='024', ind1='7', field_position_global=field[4]) continue all_dois[subs_dict['a']] = field
[ "def", "update_dois", "(", "self", ")", ":", "dois", "=", "record_get_field_instances", "(", "self", ".", "record", ",", "'024'", ",", "ind1", "=", "\"7\"", ")", "all_dois", "=", "{", "}", "for", "field", "in", "dois", ":", "subs", "=", "field_get_subfield_instances", "(", "field", ")", "subs_dict", "=", "dict", "(", "subs", ")", "if", "subs_dict", ".", "get", "(", "'a'", ")", ":", "if", "subs_dict", "[", "'a'", "]", "in", "all_dois", ":", "record_delete_field", "(", "self", ".", "record", ",", "tag", "=", "'024'", ",", "ind1", "=", "'7'", ",", "field_position_global", "=", "field", "[", "4", "]", ")", "continue", "all_dois", "[", "subs_dict", "[", "'a'", "]", "]", "=", "field" ]
Remove duplicate BibMatch DOIs.
[ "Remove", "duplicate", "BibMatch", "DOIs", "." ]
python
valid
44.5
matthiask/django-authlib
authlib/email.py
https://github.com/matthiask/django-authlib/blob/a142da7e27fe9d30f34a84b12f24f686f9d2c8e1/authlib/email.py#L90-L115
def send_registration_mail(email, *, request, **kwargs): """send_registration_mail(email, *, request, **kwargs) Sends the registration mail * ``email``: The email address where the registration link should be sent to. * ``request``: A HTTP request instance, used to construct the complete URL (including protocol and domain) for the registration link. * Additional keyword arguments for ``get_confirmation_url`` respectively ``get_confirmation_code``. The mail is rendered using the following two templates: * ``registration/email_registration_email.txt``: The first line of this template will be the subject, the third to the last line the body of the email. * ``registration/email_registration_email.html``: The body of the HTML version of the mail. This template is **NOT** available by default and is not required either. """ render_to_mail( "registration/email_registration_email", {"url": get_confirmation_url(email, request, **kwargs)}, to=[email], ).send()
[ "def", "send_registration_mail", "(", "email", ",", "*", ",", "request", ",", "*", "*", "kwargs", ")", ":", "render_to_mail", "(", "\"registration/email_registration_email\"", ",", "{", "\"url\"", ":", "get_confirmation_url", "(", "email", ",", "request", ",", "*", "*", "kwargs", ")", "}", ",", "to", "=", "[", "email", "]", ",", ")", ".", "send", "(", ")" ]
send_registration_mail(email, *, request, **kwargs) Sends the registration mail * ``email``: The email address where the registration link should be sent to. * ``request``: A HTTP request instance, used to construct the complete URL (including protocol and domain) for the registration link. * Additional keyword arguments for ``get_confirmation_url`` respectively ``get_confirmation_code``. The mail is rendered using the following two templates: * ``registration/email_registration_email.txt``: The first line of this template will be the subject, the third to the last line the body of the email. * ``registration/email_registration_email.html``: The body of the HTML version of the mail. This template is **NOT** available by default and is not required either.
[ "send_registration_mail", "(", "email", "*", "request", "**", "kwargs", ")", "Sends", "the", "registration", "mail" ]
python
train
40.5
orb-framework/orb
orb/core/column.py
https://github.com/orb-framework/orb/blob/575be2689cb269e65a0a2678232ff940acc19e5a/orb/core/column.py#L452-L472
def store(self, value, context=None): """ Converts the value to one that is safe to store on a record within the record values dictionary :param value | <variant> :return <variant> """ if isinstance(value, (str, unicode)): value = self.valueFromString(value) # store the internationalized property if self.testFlag(self.Flags.I18n): if not isinstance(value, dict): context = context or orb.Context() return {context.locale: value} else: return value else: return value
[ "def", "store", "(", "self", ",", "value", ",", "context", "=", "None", ")", ":", "if", "isinstance", "(", "value", ",", "(", "str", ",", "unicode", ")", ")", ":", "value", "=", "self", ".", "valueFromString", "(", "value", ")", "# store the internationalized property", "if", "self", ".", "testFlag", "(", "self", ".", "Flags", ".", "I18n", ")", ":", "if", "not", "isinstance", "(", "value", ",", "dict", ")", ":", "context", "=", "context", "or", "orb", ".", "Context", "(", ")", "return", "{", "context", ".", "locale", ":", "value", "}", "else", ":", "return", "value", "else", ":", "return", "value" ]
Converts the value to one that is safe to store on a record within the record values dictionary :param value | <variant> :return <variant>
[ "Converts", "the", "value", "to", "one", "that", "is", "safe", "to", "store", "on", "a", "record", "within", "the", "record", "values", "dictionary" ]
python
train
30.285714
Jaymon/endpoints
endpoints/http.py
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L1144-L1153
def code(self): """the http status code to return to the client, by default, 200 if a body is present otherwise 204""" code = getattr(self, '_code', None) if not code: if self.has_body(): code = 200 else: code = 204 return code
[ "def", "code", "(", "self", ")", ":", "code", "=", "getattr", "(", "self", ",", "'_code'", ",", "None", ")", "if", "not", "code", ":", "if", "self", ".", "has_body", "(", ")", ":", "code", "=", "200", "else", ":", "code", "=", "204", "return", "code" ]
the http status code to return to the client, by default, 200 if a body is present otherwise 204
[ "the", "http", "status", "code", "to", "return", "to", "the", "client", "by", "default", "200", "if", "a", "body", "is", "present", "otherwise", "204" ]
python
train
30.7
Kortemme-Lab/klab
klab/bio/pdb.py
https://github.com/Kortemme-Lab/klab/blob/6d410ad08f1bd9f7cbbb28d7d946e94fbaaa2b6b/klab/bio/pdb.py#L1755-L1766
def get_rosetta_sequence_to_atom_json_map(self): '''Returns the mapping from Rosetta residue IDs to PDB ATOM residue IDs in JSON format.''' import json if not self.rosetta_to_atom_sequence_maps and self.rosetta_sequences: raise Exception('The PDB to Rosetta mapping has not been determined. Please call construct_pdb_to_rosetta_residue_map first.') d = {} for c, sm in self.rosetta_to_atom_sequence_maps.iteritems(): for k, v in sm.map.iteritems(): d[k] = v #d[c] = sm.map return json.dumps(d, indent = 4, sort_keys = True)
[ "def", "get_rosetta_sequence_to_atom_json_map", "(", "self", ")", ":", "import", "json", "if", "not", "self", ".", "rosetta_to_atom_sequence_maps", "and", "self", ".", "rosetta_sequences", ":", "raise", "Exception", "(", "'The PDB to Rosetta mapping has not been determined. Please call construct_pdb_to_rosetta_residue_map first.'", ")", "d", "=", "{", "}", "for", "c", ",", "sm", "in", "self", ".", "rosetta_to_atom_sequence_maps", ".", "iteritems", "(", ")", ":", "for", "k", ",", "v", "in", "sm", ".", "map", ".", "iteritems", "(", ")", ":", "d", "[", "k", "]", "=", "v", "#d[c] = sm.map", "return", "json", ".", "dumps", "(", "d", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ")" ]
Returns the mapping from Rosetta residue IDs to PDB ATOM residue IDs in JSON format.
[ "Returns", "the", "mapping", "from", "Rosetta", "residue", "IDs", "to", "PDB", "ATOM", "residue", "IDs", "in", "JSON", "format", "." ]
python
train
51.083333
timknip/pycsg
csg/core.py
https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L454-L508
def cylinder(cls, **kwargs): """ Returns a cylinder. Kwargs: start (list): Start of cylinder, default [0, -1, 0]. end (list): End of cylinder, default [0, 1, 0]. radius (float): Radius of cylinder, default 1.0. slices (int): Number of slices, default 16. """ s = kwargs.get('start', Vector(0.0, -1.0, 0.0)) e = kwargs.get('end', Vector(0.0, 1.0, 0.0)) if isinstance(s, list): s = Vector(*s) if isinstance(e, list): e = Vector(*e) r = kwargs.get('radius', 1.0) slices = kwargs.get('slices', 16) ray = e.minus(s) axisZ = ray.unit() isY = (math.fabs(axisZ.y) > 0.5) axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit() axisY = axisX.cross(axisZ).unit() start = Vertex(s, axisZ.negated()) end = Vertex(e, axisZ.unit()) polygons = [] def point(stack, angle, normalBlend): out = axisX.times(math.cos(angle)).plus( axisY.times(math.sin(angle))) pos = s.plus(ray.times(stack)).plus(out.times(r)) normal = out.times(1.0 - math.fabs(normalBlend)).plus( axisZ.times(normalBlend)) return Vertex(pos, normal) dt = math.pi * 2.0 / float(slices) for i in range(0, slices): t0 = i * dt i1 = (i + 1) % slices t1 = i1 * dt polygons.append(Polygon([start.clone(), point(0., t0, -1.), point(0., t1, -1.)])) polygons.append(Polygon([point(0., t1, 0.), point(0., t0, 0.), point(1., t0, 0.), point(1., t1, 0.)])) polygons.append(Polygon([end.clone(), point(1., t1, 1.), point(1., t0, 1.)])) return CSG.fromPolygons(polygons)
[ "def", "cylinder", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "s", "=", "kwargs", ".", "get", "(", "'start'", ",", "Vector", "(", "0.0", ",", "-", "1.0", ",", "0.0", ")", ")", "e", "=", "kwargs", ".", "get", "(", "'end'", ",", "Vector", "(", "0.0", ",", "1.0", ",", "0.0", ")", ")", "if", "isinstance", "(", "s", ",", "list", ")", ":", "s", "=", "Vector", "(", "*", "s", ")", "if", "isinstance", "(", "e", ",", "list", ")", ":", "e", "=", "Vector", "(", "*", "e", ")", "r", "=", "kwargs", ".", "get", "(", "'radius'", ",", "1.0", ")", "slices", "=", "kwargs", ".", "get", "(", "'slices'", ",", "16", ")", "ray", "=", "e", ".", "minus", "(", "s", ")", "axisZ", "=", "ray", ".", "unit", "(", ")", "isY", "=", "(", "math", ".", "fabs", "(", "axisZ", ".", "y", ")", ">", "0.5", ")", "axisX", "=", "Vector", "(", "float", "(", "isY", ")", ",", "float", "(", "not", "isY", ")", ",", "0", ")", ".", "cross", "(", "axisZ", ")", ".", "unit", "(", ")", "axisY", "=", "axisX", ".", "cross", "(", "axisZ", ")", ".", "unit", "(", ")", "start", "=", "Vertex", "(", "s", ",", "axisZ", ".", "negated", "(", ")", ")", "end", "=", "Vertex", "(", "e", ",", "axisZ", ".", "unit", "(", ")", ")", "polygons", "=", "[", "]", "def", "point", "(", "stack", ",", "angle", ",", "normalBlend", ")", ":", "out", "=", "axisX", ".", "times", "(", "math", ".", "cos", "(", "angle", ")", ")", ".", "plus", "(", "axisY", ".", "times", "(", "math", ".", "sin", "(", "angle", ")", ")", ")", "pos", "=", "s", ".", "plus", "(", "ray", ".", "times", "(", "stack", ")", ")", ".", "plus", "(", "out", ".", "times", "(", "r", ")", ")", "normal", "=", "out", ".", "times", "(", "1.0", "-", "math", ".", "fabs", "(", "normalBlend", ")", ")", ".", "plus", "(", "axisZ", ".", "times", "(", "normalBlend", ")", ")", "return", "Vertex", "(", "pos", ",", "normal", ")", "dt", "=", "math", ".", "pi", "*", "2.0", "/", "float", "(", "slices", ")", "for", "i", "in", "range", "(", "0", ",", "slices", ")", ":", "t0", "=", "i", "*", "dt", "i1", "=", "(", "i", "+", "1", ")", "%", "slices", "t1", "=", "i1", "*", "dt", "polygons", ".", "append", "(", "Polygon", "(", "[", "start", ".", "clone", "(", ")", ",", "point", "(", "0.", ",", "t0", ",", "-", "1.", ")", ",", "point", "(", "0.", ",", "t1", ",", "-", "1.", ")", "]", ")", ")", "polygons", ".", "append", "(", "Polygon", "(", "[", "point", "(", "0.", ",", "t1", ",", "0.", ")", ",", "point", "(", "0.", ",", "t0", ",", "0.", ")", ",", "point", "(", "1.", ",", "t0", ",", "0.", ")", ",", "point", "(", "1.", ",", "t1", ",", "0.", ")", "]", ")", ")", "polygons", ".", "append", "(", "Polygon", "(", "[", "end", ".", "clone", "(", ")", ",", "point", "(", "1.", ",", "t1", ",", "1.", ")", ",", "point", "(", "1.", ",", "t0", ",", "1.", ")", "]", ")", ")", "return", "CSG", ".", "fromPolygons", "(", "polygons", ")" ]
Returns a cylinder. Kwargs: start (list): Start of cylinder, default [0, -1, 0]. end (list): End of cylinder, default [0, 1, 0]. radius (float): Radius of cylinder, default 1.0. slices (int): Number of slices, default 16.
[ "Returns", "a", "cylinder", ".", "Kwargs", ":", "start", "(", "list", ")", ":", "Start", "of", "cylinder", "default", "[", "0", "-", "1", "0", "]", ".", "end", "(", "list", ")", ":", "End", "of", "cylinder", "default", "[", "0", "1", "0", "]", ".", "radius", "(", "float", ")", ":", "Radius", "of", "cylinder", "default", "1", ".", "0", ".", "slices", "(", "int", ")", ":", "Number", "of", "slices", "default", "16", "." ]
python
train
38.781818
edx/edx-django-release-util
release_util/management/commands/__init__.py
https://github.com/edx/edx-django-release-util/blob/de0fde41d6a19885ab7dc309472b94fd0fccbc1d/release_util/management/commands/__init__.py#L247-L292
def __apply(self, migration=None, run_all=False): """ If a migration is supplied, runs that migration and appends to state. If run_all==True, runs all migrations. Raises a ValueError if neither "migration" nor "run_all" are provided. """ out = StringIO() trace = None migrate_kwargs = { 'interactive': False, 'stdout': out, 'database': self._database_name, } if migration is not None: migrate_kwargs.update({ 'app_label': migration[0], 'migration_name': migration[1], }) elif not run_all: raise ValueError('Either a migration must be provided or "run_all" must be True') start = self._timer() try: call_command("migrate", **migrate_kwargs) except Exception: trace = ''.join(traceback.format_exception(*sys.exc_info())) finally: end = self._timer() successes, failure = self._parse_migrate_output(out.getvalue()) self._migration_state.append({ 'database': self._database_name, 'migration': 'all' if run_all else (migration[0], migration[1]), 'duration': end - start, 'output': _remove_escape_characters(out.getvalue()), 'succeeded_migrations': successes, # [(app, migration), ...] 'failed_migration': failure, # (app, migration) 'traceback': trace, 'succeeded': failure is None and trace is None, }) if failure is not None: raise CommandError("Migration failed for app '{}' - migration '{}'.\n".format(*failure)) elif trace is not None: raise CommandError("Migrations failed unexpectedly. See self.state['traceback'] for details.")
[ "def", "__apply", "(", "self", ",", "migration", "=", "None", ",", "run_all", "=", "False", ")", ":", "out", "=", "StringIO", "(", ")", "trace", "=", "None", "migrate_kwargs", "=", "{", "'interactive'", ":", "False", ",", "'stdout'", ":", "out", ",", "'database'", ":", "self", ".", "_database_name", ",", "}", "if", "migration", "is", "not", "None", ":", "migrate_kwargs", ".", "update", "(", "{", "'app_label'", ":", "migration", "[", "0", "]", ",", "'migration_name'", ":", "migration", "[", "1", "]", ",", "}", ")", "elif", "not", "run_all", ":", "raise", "ValueError", "(", "'Either a migration must be provided or \"run_all\" must be True'", ")", "start", "=", "self", ".", "_timer", "(", ")", "try", ":", "call_command", "(", "\"migrate\"", ",", "*", "*", "migrate_kwargs", ")", "except", "Exception", ":", "trace", "=", "''", ".", "join", "(", "traceback", ".", "format_exception", "(", "*", "sys", ".", "exc_info", "(", ")", ")", ")", "finally", ":", "end", "=", "self", ".", "_timer", "(", ")", "successes", ",", "failure", "=", "self", ".", "_parse_migrate_output", "(", "out", ".", "getvalue", "(", ")", ")", "self", ".", "_migration_state", ".", "append", "(", "{", "'database'", ":", "self", ".", "_database_name", ",", "'migration'", ":", "'all'", "if", "run_all", "else", "(", "migration", "[", "0", "]", ",", "migration", "[", "1", "]", ")", ",", "'duration'", ":", "end", "-", "start", ",", "'output'", ":", "_remove_escape_characters", "(", "out", ".", "getvalue", "(", ")", ")", ",", "'succeeded_migrations'", ":", "successes", ",", "# [(app, migration), ...]", "'failed_migration'", ":", "failure", ",", "# (app, migration)", "'traceback'", ":", "trace", ",", "'succeeded'", ":", "failure", "is", "None", "and", "trace", "is", "None", ",", "}", ")", "if", "failure", "is", "not", "None", ":", "raise", "CommandError", "(", "\"Migration failed for app '{}' - migration '{}'.\\n\"", ".", "format", "(", "*", "failure", ")", ")", "elif", "trace", "is", "not", "None", ":", "raise", "CommandError", "(", "\"Migrations failed unexpectedly. See self.state['traceback'] for details.\"", ")" ]
If a migration is supplied, runs that migration and appends to state. If run_all==True, runs all migrations. Raises a ValueError if neither "migration" nor "run_all" are provided.
[ "If", "a", "migration", "is", "supplied", "runs", "that", "migration", "and", "appends", "to", "state", ".", "If", "run_all", "==", "True", "runs", "all", "migrations", ".", "Raises", "a", "ValueError", "if", "neither", "migration", "nor", "run_all", "are", "provided", "." ]
python
train
40.717391
sprockets/sprockets.mixins.amqp
sprockets/mixins/amqp/__init__.py
https://github.com/sprockets/sprockets.mixins.amqp/blob/de22b85aec1315bc01e47774637098c34525692b/sprockets/mixins/amqp/__init__.py#L428-L441
def on_connection_open(self, connection): """This method is called by pika once the connection to RabbitMQ has been established. :type connection: pika.TornadoConnection """ LOGGER.debug('Connection opened') connection.add_on_connection_blocked_callback( self.on_connection_blocked) connection.add_on_connection_unblocked_callback( self.on_connection_unblocked) connection.add_backpressure_callback(self.on_back_pressure_detected) self.channel = self._open_channel()
[ "def", "on_connection_open", "(", "self", ",", "connection", ")", ":", "LOGGER", ".", "debug", "(", "'Connection opened'", ")", "connection", ".", "add_on_connection_blocked_callback", "(", "self", ".", "on_connection_blocked", ")", "connection", ".", "add_on_connection_unblocked_callback", "(", "self", ".", "on_connection_unblocked", ")", "connection", ".", "add_backpressure_callback", "(", "self", ".", "on_back_pressure_detected", ")", "self", ".", "channel", "=", "self", ".", "_open_channel", "(", ")" ]
This method is called by pika once the connection to RabbitMQ has been established. :type connection: pika.TornadoConnection
[ "This", "method", "is", "called", "by", "pika", "once", "the", "connection", "to", "RabbitMQ", "has", "been", "established", "." ]
python
train
39.357143
ludeeus/pyruter
pyruter/api.py
https://github.com/ludeeus/pyruter/blob/415d8b9c8bfd48caa82c1a1201bfd3beb670a117/pyruter/api.py#L21-L53
async def get_departures(self): """Get departure info from stopid.""" from .common import CommonFunctions common = CommonFunctions(self.loop, self.session) departures = [] endpoint = '{}/StopVisit/GetDepartures/{}'.format(BASE_URL, str(self.stopid)) data = await common.api_call(endpoint) for entries in data or []: try: data = entries['MonitoredVehicleJourney'] if self.destination is not None: if data['DestinationName'] == self.destination: data = entries['MonitoredVehicleJourney'] line = data['LineRef'] destinationname = data['DestinationName'] monitored = data['MonitoredCall'] time = monitored['ExpectedDepartureTime'] departures.append({"time": time, "line": line, "destination": destinationname}) else: data = entries['MonitoredVehicleJourney'] line = data['LineRef'] destinationname = data['DestinationName'] monitored = data['MonitoredCall'] time = monitored['ExpectedDepartureTime'] departures.append({"time": time, "line": line, "destination": destinationname}) except (TypeError, KeyError, IndexError) as error: LOGGER.error('Error connecting to Ruter, %s', error) self._departures = await common.sort_data(departures, 'time')
[ "async", "def", "get_departures", "(", "self", ")", ":", "from", ".", "common", "import", "CommonFunctions", "common", "=", "CommonFunctions", "(", "self", ".", "loop", ",", "self", ".", "session", ")", "departures", "=", "[", "]", "endpoint", "=", "'{}/StopVisit/GetDepartures/{}'", ".", "format", "(", "BASE_URL", ",", "str", "(", "self", ".", "stopid", ")", ")", "data", "=", "await", "common", ".", "api_call", "(", "endpoint", ")", "for", "entries", "in", "data", "or", "[", "]", ":", "try", ":", "data", "=", "entries", "[", "'MonitoredVehicleJourney'", "]", "if", "self", ".", "destination", "is", "not", "None", ":", "if", "data", "[", "'DestinationName'", "]", "==", "self", ".", "destination", ":", "data", "=", "entries", "[", "'MonitoredVehicleJourney'", "]", "line", "=", "data", "[", "'LineRef'", "]", "destinationname", "=", "data", "[", "'DestinationName'", "]", "monitored", "=", "data", "[", "'MonitoredCall'", "]", "time", "=", "monitored", "[", "'ExpectedDepartureTime'", "]", "departures", ".", "append", "(", "{", "\"time\"", ":", "time", ",", "\"line\"", ":", "line", ",", "\"destination\"", ":", "destinationname", "}", ")", "else", ":", "data", "=", "entries", "[", "'MonitoredVehicleJourney'", "]", "line", "=", "data", "[", "'LineRef'", "]", "destinationname", "=", "data", "[", "'DestinationName'", "]", "monitored", "=", "data", "[", "'MonitoredCall'", "]", "time", "=", "monitored", "[", "'ExpectedDepartureTime'", "]", "departures", ".", "append", "(", "{", "\"time\"", ":", "time", ",", "\"line\"", ":", "line", ",", "\"destination\"", ":", "destinationname", "}", ")", "except", "(", "TypeError", ",", "KeyError", ",", "IndexError", ")", "as", "error", ":", "LOGGER", ".", "error", "(", "'Error connecting to Ruter, %s'", ",", "error", ")", "self", ".", "_departures", "=", "await", "common", ".", "sort_data", "(", "departures", ",", "'time'", ")" ]
Get departure info from stopid.
[ "Get", "departure", "info", "from", "stopid", "." ]
python
train
53.545455
ARMmbed/icetea
build_docs.py
https://github.com/ARMmbed/icetea/blob/b2b97ac607429830cf7d62dae2e3903692c7c778/build_docs.py#L23-L60
def build_docs(location="doc-source", target=None, library="icetea_lib"): """ Build documentation for Icetea. Start by autogenerating module documentation and finish by building html. :param location: Documentation source :param target: Documentation target path :param library: Library location for autodoc. :return: -1 if something fails. 0 if successfull. """ cmd_ar = ["sphinx-apidoc", "-o", location, library] try: print("Generating api docs.") retcode = check_call(cmd_ar) except CalledProcessError as error: print("Documentation build failed. Return code: {}".format(error.returncode)) return 3 except OSError as error: print(error) print("Documentation build failed. Are you missing Sphinx? Please install sphinx using " "'pip install sphinx'.") return 3 target = "doc{}html".format(os.sep) if target is None else target cmd_ar = ["sphinx-build", "-b", "html", location, target] try: print("Building html documentation.") retcode = check_call(cmd_ar) except CalledProcessError as error: print("Documentation build failed. Return code: {}".format(error.returncode)) return 3 except OSError as error: print(error) print("Documentation build failed. Are you missing Sphinx? Please install sphinx using " "'pip install sphinx'.") return 3 print("Documentation built.") return 0
[ "def", "build_docs", "(", "location", "=", "\"doc-source\"", ",", "target", "=", "None", ",", "library", "=", "\"icetea_lib\"", ")", ":", "cmd_ar", "=", "[", "\"sphinx-apidoc\"", ",", "\"-o\"", ",", "location", ",", "library", "]", "try", ":", "print", "(", "\"Generating api docs.\"", ")", "retcode", "=", "check_call", "(", "cmd_ar", ")", "except", "CalledProcessError", "as", "error", ":", "print", "(", "\"Documentation build failed. Return code: {}\"", ".", "format", "(", "error", ".", "returncode", ")", ")", "return", "3", "except", "OSError", "as", "error", ":", "print", "(", "error", ")", "print", "(", "\"Documentation build failed. Are you missing Sphinx? Please install sphinx using \"", "\"'pip install sphinx'.\"", ")", "return", "3", "target", "=", "\"doc{}html\"", ".", "format", "(", "os", ".", "sep", ")", "if", "target", "is", "None", "else", "target", "cmd_ar", "=", "[", "\"sphinx-build\"", ",", "\"-b\"", ",", "\"html\"", ",", "location", ",", "target", "]", "try", ":", "print", "(", "\"Building html documentation.\"", ")", "retcode", "=", "check_call", "(", "cmd_ar", ")", "except", "CalledProcessError", "as", "error", ":", "print", "(", "\"Documentation build failed. Return code: {}\"", ".", "format", "(", "error", ".", "returncode", ")", ")", "return", "3", "except", "OSError", "as", "error", ":", "print", "(", "error", ")", "print", "(", "\"Documentation build failed. Are you missing Sphinx? Please install sphinx using \"", "\"'pip install sphinx'.\"", ")", "return", "3", "print", "(", "\"Documentation built.\"", ")", "return", "0" ]
Build documentation for Icetea. Start by autogenerating module documentation and finish by building html. :param location: Documentation source :param target: Documentation target path :param library: Library location for autodoc. :return: -1 if something fails. 0 if successfull.
[ "Build", "documentation", "for", "Icetea", ".", "Start", "by", "autogenerating", "module", "documentation", "and", "finish", "by", "building", "html", "." ]
python
train
38.473684
ManiacalLabs/BiblioPixel
bibliopixel/control/control.py
https://github.com/ManiacalLabs/BiblioPixel/blob/fd97e6c651a4bbcade64733847f4eec8f7704b7c/bibliopixel/control/control.py#L49-L69
def _receive(self, msg): """ Receive a message from the input source and perhaps raise an Exception. """ msg = self._convert(msg) if msg is None: return str_msg = self.verbose and self._msg_to_str(msg) if self.verbose and log.is_debug(): log.debug('Message %s', str_msg) if self.pre_routing: self.pre_routing.receive(msg) receiver, msg = self.routing.receive(msg) if receiver: receiver.receive(msg) if self.verbose: log.info('Routed message %s (%s) to %s', str_msg[:128], msg, repr(receiver))
[ "def", "_receive", "(", "self", ",", "msg", ")", ":", "msg", "=", "self", ".", "_convert", "(", "msg", ")", "if", "msg", "is", "None", ":", "return", "str_msg", "=", "self", ".", "verbose", "and", "self", ".", "_msg_to_str", "(", "msg", ")", "if", "self", ".", "verbose", "and", "log", ".", "is_debug", "(", ")", ":", "log", ".", "debug", "(", "'Message %s'", ",", "str_msg", ")", "if", "self", ".", "pre_routing", ":", "self", ".", "pre_routing", ".", "receive", "(", "msg", ")", "receiver", ",", "msg", "=", "self", ".", "routing", ".", "receive", "(", "msg", ")", "if", "receiver", ":", "receiver", ".", "receive", "(", "msg", ")", "if", "self", ".", "verbose", ":", "log", ".", "info", "(", "'Routed message %s (%s) to %s'", ",", "str_msg", "[", ":", "128", "]", ",", "msg", ",", "repr", "(", "receiver", ")", ")" ]
Receive a message from the input source and perhaps raise an Exception.
[ "Receive", "a", "message", "from", "the", "input", "source", "and", "perhaps", "raise", "an", "Exception", "." ]
python
valid
31.238095
vimalkvn/riboplot
riboplot/ribocore.py
https://github.com/vimalkvn/riboplot/blob/914515df54eccc2e726ba71e751c3260f2066d97/riboplot/ribocore.py#L260-L299
def get_ribo_counts(ribo_fileobj, transcript_name, read_lengths, read_offsets): """For each mapped read of the given transcript in the BAM file (pysam AlignmentFile object), return the position (+1) and the corresponding frame (1, 2 or 3) to which it aligns. Keyword arguments: ribo_fileobj -- file object - BAM file opened using pysam AlignmentFile transcript_name -- Name of transcript to get counts for read_length (optional) -- If provided, get counts only for reads of this length. """ read_counts = {} total_reads = 0 for record in ribo_fileobj.fetch(transcript_name): query_length = record.query_length position_ref = record.pos + 1 for index, read_length in enumerate(read_lengths): position = position_ref # reset position if read_length == 0 or read_length == query_length: # if an offset is specified, increment position by that offset. position += read_offsets[index] else: # ignore other reads/lengths continue total_reads += 1 try: read_counts[position] except KeyError: read_counts[position] = {1: 0, 2: 0, 3: 0} # calculate the frame of the read from position rem = position % 3 if rem == 0: read_counts[position][3] += 1 else: read_counts[position][rem] += 1 log.debug('Total read counts: {}'.format(total_reads)) log.debug('RiboSeq read counts for transcript: {0}\n{1}'.format(transcript_name, read_counts)) return read_counts, total_reads
[ "def", "get_ribo_counts", "(", "ribo_fileobj", ",", "transcript_name", ",", "read_lengths", ",", "read_offsets", ")", ":", "read_counts", "=", "{", "}", "total_reads", "=", "0", "for", "record", "in", "ribo_fileobj", ".", "fetch", "(", "transcript_name", ")", ":", "query_length", "=", "record", ".", "query_length", "position_ref", "=", "record", ".", "pos", "+", "1", "for", "index", ",", "read_length", "in", "enumerate", "(", "read_lengths", ")", ":", "position", "=", "position_ref", "# reset position", "if", "read_length", "==", "0", "or", "read_length", "==", "query_length", ":", "# if an offset is specified, increment position by that offset.", "position", "+=", "read_offsets", "[", "index", "]", "else", ":", "# ignore other reads/lengths", "continue", "total_reads", "+=", "1", "try", ":", "read_counts", "[", "position", "]", "except", "KeyError", ":", "read_counts", "[", "position", "]", "=", "{", "1", ":", "0", ",", "2", ":", "0", ",", "3", ":", "0", "}", "# calculate the frame of the read from position", "rem", "=", "position", "%", "3", "if", "rem", "==", "0", ":", "read_counts", "[", "position", "]", "[", "3", "]", "+=", "1", "else", ":", "read_counts", "[", "position", "]", "[", "rem", "]", "+=", "1", "log", ".", "debug", "(", "'Total read counts: {}'", ".", "format", "(", "total_reads", ")", ")", "log", ".", "debug", "(", "'RiboSeq read counts for transcript: {0}\\n{1}'", ".", "format", "(", "transcript_name", ",", "read_counts", ")", ")", "return", "read_counts", ",", "total_reads" ]
For each mapped read of the given transcript in the BAM file (pysam AlignmentFile object), return the position (+1) and the corresponding frame (1, 2 or 3) to which it aligns. Keyword arguments: ribo_fileobj -- file object - BAM file opened using pysam AlignmentFile transcript_name -- Name of transcript to get counts for read_length (optional) -- If provided, get counts only for reads of this length.
[ "For", "each", "mapped", "read", "of", "the", "given", "transcript", "in", "the", "BAM", "file", "(", "pysam", "AlignmentFile", "object", ")", "return", "the", "position", "(", "+", "1", ")", "and", "the", "corresponding", "frame", "(", "1", "2", "or", "3", ")", "to", "which", "it", "aligns", "." ]
python
train
41.275
mrstephenneal/mysql-toolkit
mysql/toolkit/components/operations/clone.py
https://github.com/mrstephenneal/mysql-toolkit/blob/6964f718f4b72eb30f2259adfcfaf3090526c53d/mysql/toolkit/components/operations/clone.py#L22-L40
def _get_select_commands(self, source, tables): """ Create select queries for all of the tables from a source database. :param source: Source database name :param tables: Iterable of table names :return: Dictionary of table keys, command values """ # Create dictionary of select queries row_queries = {tbl: self.select_all(tbl, execute=False) for tbl in tqdm(tables, total=len(tables), desc='Getting {0} select queries'.format(source))} # Convert command strings into lists of commands for tbl, command in row_queries.items(): if isinstance(command, str): row_queries[tbl] = [command] # Pack commands into list of tuples return [(tbl, cmd) for tbl, cmds in row_queries.items() for cmd in cmds]
[ "def", "_get_select_commands", "(", "self", ",", "source", ",", "tables", ")", ":", "# Create dictionary of select queries", "row_queries", "=", "{", "tbl", ":", "self", ".", "select_all", "(", "tbl", ",", "execute", "=", "False", ")", "for", "tbl", "in", "tqdm", "(", "tables", ",", "total", "=", "len", "(", "tables", ")", ",", "desc", "=", "'Getting {0} select queries'", ".", "format", "(", "source", ")", ")", "}", "# Convert command strings into lists of commands", "for", "tbl", ",", "command", "in", "row_queries", ".", "items", "(", ")", ":", "if", "isinstance", "(", "command", ",", "str", ")", ":", "row_queries", "[", "tbl", "]", "=", "[", "command", "]", "# Pack commands into list of tuples", "return", "[", "(", "tbl", ",", "cmd", ")", "for", "tbl", ",", "cmds", "in", "row_queries", ".", "items", "(", ")", "for", "cmd", "in", "cmds", "]" ]
Create select queries for all of the tables from a source database. :param source: Source database name :param tables: Iterable of table names :return: Dictionary of table keys, command values
[ "Create", "select", "queries", "for", "all", "of", "the", "tables", "from", "a", "source", "database", "." ]
python
train
43.421053
release-engineering/productmd
productmd/images.py
https://github.com/release-engineering/productmd/blob/49256bf2e8c84124f42346241140b986ad7bfc38/productmd/images.py#L207-L225
def identify_image(image): """Provides a tuple of image's UNIQUE_IMAGE_ATTRIBUTES. Note: this is not guaranteed to be unique (and will often not be) for pre-1.1 metadata, as subvariant did not exist. Provided as a function so consumers can use it on plain image dicts read from the metadata or PDC. """ try: # Image instance case attrs = tuple(getattr(image, attr) for attr in UNIQUE_IMAGE_ATTRIBUTES) except AttributeError: # Plain dict case attrs = tuple(image.get(attr, None) for attr in UNIQUE_IMAGE_ATTRIBUTES) ui = UniqueImage(*attrs) # If unified is None (which could happen in the dict case, we want default # value of False instead. Also convert additional_variants to a list. return ui._replace( unified=ui.unified or False, additional_variants=ui.additional_variants or [] )
[ "def", "identify_image", "(", "image", ")", ":", "try", ":", "# Image instance case", "attrs", "=", "tuple", "(", "getattr", "(", "image", ",", "attr", ")", "for", "attr", "in", "UNIQUE_IMAGE_ATTRIBUTES", ")", "except", "AttributeError", ":", "# Plain dict case", "attrs", "=", "tuple", "(", "image", ".", "get", "(", "attr", ",", "None", ")", "for", "attr", "in", "UNIQUE_IMAGE_ATTRIBUTES", ")", "ui", "=", "UniqueImage", "(", "*", "attrs", ")", "# If unified is None (which could happen in the dict case, we want default", "# value of False instead. Also convert additional_variants to a list.", "return", "ui", ".", "_replace", "(", "unified", "=", "ui", ".", "unified", "or", "False", ",", "additional_variants", "=", "ui", ".", "additional_variants", "or", "[", "]", ")" ]
Provides a tuple of image's UNIQUE_IMAGE_ATTRIBUTES. Note: this is not guaranteed to be unique (and will often not be) for pre-1.1 metadata, as subvariant did not exist. Provided as a function so consumers can use it on plain image dicts read from the metadata or PDC.
[ "Provides", "a", "tuple", "of", "image", "s", "UNIQUE_IMAGE_ATTRIBUTES", ".", "Note", ":", "this", "is", "not", "guaranteed", "to", "be", "unique", "(", "and", "will", "often", "not", "be", ")", "for", "pre", "-", "1", ".", "1", "metadata", "as", "subvariant", "did", "not", "exist", ".", "Provided", "as", "a", "function", "so", "consumers", "can", "use", "it", "on", "plain", "image", "dicts", "read", "from", "the", "metadata", "or", "PDC", "." ]
python
train
45.210526
ungarj/mapchete
mapchete/io/vector.py
https://github.com/ungarj/mapchete/blob/d482918d0e66a5b414dff6aa7cc854e01fc60ee4/mapchete/io/vector.py#L34-L105
def reproject_geometry( geometry, src_crs=None, dst_crs=None, error_on_clip=False, validity_check=True, antimeridian_cutting=False ): """ Reproject a geometry to target CRS. Also, clips geometry if it lies outside the destination CRS boundary. Supported destination CRSes for clipping: 4326 (WGS84), 3857 (Spherical Mercator) and 3035 (ETRS89 / ETRS-LAEA). Parameters ---------- geometry : ``shapely.geometry`` src_crs : ``rasterio.crs.CRS`` or EPSG code CRS of source data dst_crs : ``rasterio.crs.CRS`` or EPSG code target CRS error_on_clip : bool raises a ``RuntimeError`` if a geometry is outside of CRS bounds (default: False) validity_check : bool checks if reprojected geometry is valid and throws ``TopologicalError`` if invalid (default: True) antimeridian_cutting : bool cut geometry at Antimeridian; can result in a multipart output geometry Returns ------- geometry : ``shapely.geometry`` """ src_crs = _validated_crs(src_crs) dst_crs = _validated_crs(dst_crs) def _reproject_geom(geometry, src_crs, dst_crs): if geometry.is_empty: return geometry else: out_geom = to_shape( transform_geom( src_crs.to_dict(), dst_crs.to_dict(), mapping(geometry), antimeridian_cutting=antimeridian_cutting ) ) return _repair(out_geom) if validity_check else out_geom # return repaired geometry if no reprojection needed if src_crs == dst_crs or geometry.is_empty: return _repair(geometry) # geometry needs to be clipped to its CRS bounds elif ( dst_crs.is_epsg_code and # just in case for an CRS with EPSG code dst_crs.get("init") in CRS_BOUNDS and # if CRS has defined bounds dst_crs.get("init") != "epsg:4326" # and is not WGS84 (does not need clipping) ): wgs84_crs = CRS().from_epsg(4326) # get dst_crs boundaries crs_bbox = box(*CRS_BOUNDS[dst_crs.get("init")]) # reproject geometry to WGS84 geometry_4326 = _reproject_geom(geometry, src_crs, wgs84_crs) # raise error if geometry has to be clipped if error_on_clip and not geometry_4326.within(crs_bbox): raise RuntimeError("geometry outside target CRS bounds") # clip geometry dst_crs boundaries and return return _reproject_geom(crs_bbox.intersection(geometry_4326), wgs84_crs, dst_crs) # return without clipping if destination CRS does not have defined bounds else: return _reproject_geom(geometry, src_crs, dst_crs)
[ "def", "reproject_geometry", "(", "geometry", ",", "src_crs", "=", "None", ",", "dst_crs", "=", "None", ",", "error_on_clip", "=", "False", ",", "validity_check", "=", "True", ",", "antimeridian_cutting", "=", "False", ")", ":", "src_crs", "=", "_validated_crs", "(", "src_crs", ")", "dst_crs", "=", "_validated_crs", "(", "dst_crs", ")", "def", "_reproject_geom", "(", "geometry", ",", "src_crs", ",", "dst_crs", ")", ":", "if", "geometry", ".", "is_empty", ":", "return", "geometry", "else", ":", "out_geom", "=", "to_shape", "(", "transform_geom", "(", "src_crs", ".", "to_dict", "(", ")", ",", "dst_crs", ".", "to_dict", "(", ")", ",", "mapping", "(", "geometry", ")", ",", "antimeridian_cutting", "=", "antimeridian_cutting", ")", ")", "return", "_repair", "(", "out_geom", ")", "if", "validity_check", "else", "out_geom", "# return repaired geometry if no reprojection needed", "if", "src_crs", "==", "dst_crs", "or", "geometry", ".", "is_empty", ":", "return", "_repair", "(", "geometry", ")", "# geometry needs to be clipped to its CRS bounds", "elif", "(", "dst_crs", ".", "is_epsg_code", "and", "# just in case for an CRS with EPSG code", "dst_crs", ".", "get", "(", "\"init\"", ")", "in", "CRS_BOUNDS", "and", "# if CRS has defined bounds", "dst_crs", ".", "get", "(", "\"init\"", ")", "!=", "\"epsg:4326\"", "# and is not WGS84 (does not need clipping)", ")", ":", "wgs84_crs", "=", "CRS", "(", ")", ".", "from_epsg", "(", "4326", ")", "# get dst_crs boundaries", "crs_bbox", "=", "box", "(", "*", "CRS_BOUNDS", "[", "dst_crs", ".", "get", "(", "\"init\"", ")", "]", ")", "# reproject geometry to WGS84", "geometry_4326", "=", "_reproject_geom", "(", "geometry", ",", "src_crs", ",", "wgs84_crs", ")", "# raise error if geometry has to be clipped", "if", "error_on_clip", "and", "not", "geometry_4326", ".", "within", "(", "crs_bbox", ")", ":", "raise", "RuntimeError", "(", "\"geometry outside target CRS bounds\"", ")", "# clip geometry dst_crs boundaries and return", "return", "_reproject_geom", "(", "crs_bbox", ".", "intersection", "(", "geometry_4326", ")", ",", "wgs84_crs", ",", "dst_crs", ")", "# return without clipping if destination CRS does not have defined bounds", "else", ":", "return", "_reproject_geom", "(", "geometry", ",", "src_crs", ",", "dst_crs", ")" ]
Reproject a geometry to target CRS. Also, clips geometry if it lies outside the destination CRS boundary. Supported destination CRSes for clipping: 4326 (WGS84), 3857 (Spherical Mercator) and 3035 (ETRS89 / ETRS-LAEA). Parameters ---------- geometry : ``shapely.geometry`` src_crs : ``rasterio.crs.CRS`` or EPSG code CRS of source data dst_crs : ``rasterio.crs.CRS`` or EPSG code target CRS error_on_clip : bool raises a ``RuntimeError`` if a geometry is outside of CRS bounds (default: False) validity_check : bool checks if reprojected geometry is valid and throws ``TopologicalError`` if invalid (default: True) antimeridian_cutting : bool cut geometry at Antimeridian; can result in a multipart output geometry Returns ------- geometry : ``shapely.geometry``
[ "Reproject", "a", "geometry", "to", "target", "CRS", "." ]
python
valid
37.458333
saulpw/visidata
visidata/vdtui.py
https://github.com/saulpw/visidata/blob/32771e0cea6c24fc7902683d14558391395c591f/visidata/vdtui.py#L1651-L1697
def checkCursor(self): 'Keep cursor in bounds of data and screen.' # keep cursor within actual available rowset if self.nRows == 0 or self.cursorRowIndex <= 0: self.cursorRowIndex = 0 elif self.cursorRowIndex >= self.nRows: self.cursorRowIndex = self.nRows-1 if self.cursorVisibleColIndex <= 0: self.cursorVisibleColIndex = 0 elif self.cursorVisibleColIndex >= self.nVisibleCols: self.cursorVisibleColIndex = self.nVisibleCols-1 if self.topRowIndex <= 0: self.topRowIndex = 0 elif self.topRowIndex > self.nRows-1: self.topRowIndex = self.nRows-1 # (x,y) is relative cell within screen viewport x = self.cursorVisibleColIndex - self.leftVisibleColIndex y = self.cursorRowIndex - self.topRowIndex + 1 # header # check bounds, scroll if necessary if y < 1: self.topRowIndex = self.cursorRowIndex elif y > self.nVisibleRows: self.topRowIndex = self.cursorRowIndex-self.nVisibleRows+1 if x <= 0: self.leftVisibleColIndex = self.cursorVisibleColIndex else: while True: if self.leftVisibleColIndex == self.cursorVisibleColIndex: # not much more we can do break self.calcColLayout() mincolidx, maxcolidx = min(self.visibleColLayout.keys()), max(self.visibleColLayout.keys()) if self.cursorVisibleColIndex < mincolidx: self.leftVisibleColIndex -= max((self.cursorVisibleColIndex - mincolid)//2, 1) continue elif self.cursorVisibleColIndex > maxcolidx: self.leftVisibleColIndex += max((maxcolidx - self.cursorVisibleColIndex)//2, 1) continue cur_x, cur_w = self.visibleColLayout[self.cursorVisibleColIndex] if cur_x+cur_w < self.vd.windowWidth: # current columns fit entirely on screen break self.leftVisibleColIndex += 1
[ "def", "checkCursor", "(", "self", ")", ":", "# keep cursor within actual available rowset", "if", "self", ".", "nRows", "==", "0", "or", "self", ".", "cursorRowIndex", "<=", "0", ":", "self", ".", "cursorRowIndex", "=", "0", "elif", "self", ".", "cursorRowIndex", ">=", "self", ".", "nRows", ":", "self", ".", "cursorRowIndex", "=", "self", ".", "nRows", "-", "1", "if", "self", ".", "cursorVisibleColIndex", "<=", "0", ":", "self", ".", "cursorVisibleColIndex", "=", "0", "elif", "self", ".", "cursorVisibleColIndex", ">=", "self", ".", "nVisibleCols", ":", "self", ".", "cursorVisibleColIndex", "=", "self", ".", "nVisibleCols", "-", "1", "if", "self", ".", "topRowIndex", "<=", "0", ":", "self", ".", "topRowIndex", "=", "0", "elif", "self", ".", "topRowIndex", ">", "self", ".", "nRows", "-", "1", ":", "self", ".", "topRowIndex", "=", "self", ".", "nRows", "-", "1", "# (x,y) is relative cell within screen viewport", "x", "=", "self", ".", "cursorVisibleColIndex", "-", "self", ".", "leftVisibleColIndex", "y", "=", "self", ".", "cursorRowIndex", "-", "self", ".", "topRowIndex", "+", "1", "# header", "# check bounds, scroll if necessary", "if", "y", "<", "1", ":", "self", ".", "topRowIndex", "=", "self", ".", "cursorRowIndex", "elif", "y", ">", "self", ".", "nVisibleRows", ":", "self", ".", "topRowIndex", "=", "self", ".", "cursorRowIndex", "-", "self", ".", "nVisibleRows", "+", "1", "if", "x", "<=", "0", ":", "self", ".", "leftVisibleColIndex", "=", "self", ".", "cursorVisibleColIndex", "else", ":", "while", "True", ":", "if", "self", ".", "leftVisibleColIndex", "==", "self", ".", "cursorVisibleColIndex", ":", "# not much more we can do", "break", "self", ".", "calcColLayout", "(", ")", "mincolidx", ",", "maxcolidx", "=", "min", "(", "self", ".", "visibleColLayout", ".", "keys", "(", ")", ")", ",", "max", "(", "self", ".", "visibleColLayout", ".", "keys", "(", ")", ")", "if", "self", ".", "cursorVisibleColIndex", "<", "mincolidx", ":", "self", ".", "leftVisibleColIndex", "-=", "max", "(", "(", "self", ".", "cursorVisibleColIndex", "-", "mincolid", ")", "//", "2", ",", "1", ")", "continue", "elif", "self", ".", "cursorVisibleColIndex", ">", "maxcolidx", ":", "self", ".", "leftVisibleColIndex", "+=", "max", "(", "(", "maxcolidx", "-", "self", ".", "cursorVisibleColIndex", ")", "//", "2", ",", "1", ")", "continue", "cur_x", ",", "cur_w", "=", "self", ".", "visibleColLayout", "[", "self", ".", "cursorVisibleColIndex", "]", "if", "cur_x", "+", "cur_w", "<", "self", ".", "vd", ".", "windowWidth", ":", "# current columns fit entirely on screen", "break", "self", ".", "leftVisibleColIndex", "+=", "1" ]
Keep cursor in bounds of data and screen.
[ "Keep", "cursor", "in", "bounds", "of", "data", "and", "screen", "." ]
python
train
44.042553
mcash/merchant-api-python-sdk
mcash/mapi_client/mapi_client.py
https://github.com/mcash/merchant-api-python-sdk/blob/ebe8734126790354b71077aca519ff263235944e/mcash/mapi_client/mapi_client.py#L372-L455
def update_payment_request(self, tid, currency=None, amount=None, action=None, ledger=None, callback_uri=None, display_message_uri=None, capture_id=None, additional_amount=None, text=None, refund_id=None, required_scope=None, required_scope_text=None, line_items=None): """Update payment request, reauthorize, capture, release or abort It is possible to update ledger and the callback URIs for a payment request. Changes are always appended to the open report of a ledger, and notifications are sent to the callback registered at the time of notification. Capturing an authorized payment or reauthorizing is done with the action field. The call is idempotent; that is, if one posts the same amount, additional_amount and capture_id twice with action CAPTURE, only one capture is performed. Similarly, if one posts twice with action CAPTURE without any amount stated, to capture the full amount, only one full capture is performed. Arguments: ledger: Log entries will be added to the open report on the specified ledger display_message_uri: Messages that can be used to inform the POS operator about the progress of the payment request will be POSTed to this URI if provided callback_uri: If provided, mCASH will POST to this URI when the status of the payment request changes, using the message mechanism described in the introduction. The data in the "object" part of the message is the same as what can be retrieved by calling GET on the "/payment_request/<tid>/outcome/" resource URI. currency: 3 chars https://en.wikipedia.org/wiki/ISO_4217 amount: The base amount of the payment additional_amount: Typically cash withdrawal or gratuity capture_id: Local id for capture. Must be set if amount is set, otherwise capture_id must be unset. tid: Transaction id assigned by mCASH refund_id: Refund id needed when doing partial refund text: For example reason for refund. action: Action to perform. required_scope: Scopes required to fulfill payment line_items: An updated line_items. Will fail if line_items already set in the payment request or if the sum of the totals is different from the original amount. required_scope_text: Text that is shown to user when asked for permission. """ arguments = {'ledger': ledger, 'display_message_uri': display_message_uri, 'callback_uri': callback_uri, 'currency': currency, 'amount': amount, 'additional_amount': additional_amount, 'capture_id': capture_id, 'action': action, 'text': text, 'refund_id': refund_id} if required_scope: arguments['required_scope'] = required_scope arguments['required_scope_text'] = required_scope_text if line_items: arguments['line_items'] = line_items arguments = {k: v for k, v in arguments.items() if v is not None} return self.do_req('PUT', self.merchant_api_base_url + '/payment_request/' + tid + '/', arguments)
[ "def", "update_payment_request", "(", "self", ",", "tid", ",", "currency", "=", "None", ",", "amount", "=", "None", ",", "action", "=", "None", ",", "ledger", "=", "None", ",", "callback_uri", "=", "None", ",", "display_message_uri", "=", "None", ",", "capture_id", "=", "None", ",", "additional_amount", "=", "None", ",", "text", "=", "None", ",", "refund_id", "=", "None", ",", "required_scope", "=", "None", ",", "required_scope_text", "=", "None", ",", "line_items", "=", "None", ")", ":", "arguments", "=", "{", "'ledger'", ":", "ledger", ",", "'display_message_uri'", ":", "display_message_uri", ",", "'callback_uri'", ":", "callback_uri", ",", "'currency'", ":", "currency", ",", "'amount'", ":", "amount", ",", "'additional_amount'", ":", "additional_amount", ",", "'capture_id'", ":", "capture_id", ",", "'action'", ":", "action", ",", "'text'", ":", "text", ",", "'refund_id'", ":", "refund_id", "}", "if", "required_scope", ":", "arguments", "[", "'required_scope'", "]", "=", "required_scope", "arguments", "[", "'required_scope_text'", "]", "=", "required_scope_text", "if", "line_items", ":", "arguments", "[", "'line_items'", "]", "=", "line_items", "arguments", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "arguments", ".", "items", "(", ")", "if", "v", "is", "not", "None", "}", "return", "self", ".", "do_req", "(", "'PUT'", ",", "self", ".", "merchant_api_base_url", "+", "'/payment_request/'", "+", "tid", "+", "'/'", ",", "arguments", ")" ]
Update payment request, reauthorize, capture, release or abort It is possible to update ledger and the callback URIs for a payment request. Changes are always appended to the open report of a ledger, and notifications are sent to the callback registered at the time of notification. Capturing an authorized payment or reauthorizing is done with the action field. The call is idempotent; that is, if one posts the same amount, additional_amount and capture_id twice with action CAPTURE, only one capture is performed. Similarly, if one posts twice with action CAPTURE without any amount stated, to capture the full amount, only one full capture is performed. Arguments: ledger: Log entries will be added to the open report on the specified ledger display_message_uri: Messages that can be used to inform the POS operator about the progress of the payment request will be POSTed to this URI if provided callback_uri: If provided, mCASH will POST to this URI when the status of the payment request changes, using the message mechanism described in the introduction. The data in the "object" part of the message is the same as what can be retrieved by calling GET on the "/payment_request/<tid>/outcome/" resource URI. currency: 3 chars https://en.wikipedia.org/wiki/ISO_4217 amount: The base amount of the payment additional_amount: Typically cash withdrawal or gratuity capture_id: Local id for capture. Must be set if amount is set, otherwise capture_id must be unset. tid: Transaction id assigned by mCASH refund_id: Refund id needed when doing partial refund text: For example reason for refund. action: Action to perform. required_scope: Scopes required to fulfill payment line_items: An updated line_items. Will fail if line_items already set in the payment request or if the sum of the totals is different from the original amount. required_scope_text: Text that is shown to user when asked for permission.
[ "Update", "payment", "request", "reauthorize", "capture", "release", "or", "abort" ]
python
train
45.5
swevm/scaleio-py
scaleiopy/api/scaleio/provisioning/volume.py
https://github.com/swevm/scaleio-py/blob/d043a0137cb925987fd5c895a3210968ce1d9028/scaleiopy/api/scaleio/provisioning/volume.py#L228-L240
def create_snapshot(self, systemId, snapshotSpecificationObject): """ Create snapshot for list of volumes :param systemID: Cluster ID :param snapshotSpecificationObject: Of class SnapshotSpecification :rtype: SnapshotGroupId """ self.conn.connection._check_login() #try: response = self.conn.connection._do_post("{}/{}{}/{}".format(self.conn.connection._api_url, "instances/System::", systemId, 'action/snapshotVolumes'), json=snapshotSpecificationObject.__to_dict__()) #except: # raise RuntimeError("create_snapshot_by_system_id() - Error communicating with ScaleIO gateway") return response
[ "def", "create_snapshot", "(", "self", ",", "systemId", ",", "snapshotSpecificationObject", ")", ":", "self", ".", "conn", ".", "connection", ".", "_check_login", "(", ")", "#try:", "response", "=", "self", ".", "conn", ".", "connection", ".", "_do_post", "(", "\"{}/{}{}/{}\"", ".", "format", "(", "self", ".", "conn", ".", "connection", ".", "_api_url", ",", "\"instances/System::\"", ",", "systemId", ",", "'action/snapshotVolumes'", ")", ",", "json", "=", "snapshotSpecificationObject", ".", "__to_dict__", "(", ")", ")", "#except:", "# raise RuntimeError(\"create_snapshot_by_system_id() - Error communicating with ScaleIO gateway\")", "return", "response" ]
Create snapshot for list of volumes :param systemID: Cluster ID :param snapshotSpecificationObject: Of class SnapshotSpecification :rtype: SnapshotGroupId
[ "Create", "snapshot", "for", "list", "of", "volumes", ":", "param", "systemID", ":", "Cluster", "ID", ":", "param", "snapshotSpecificationObject", ":", "Of", "class", "SnapshotSpecification", ":", "rtype", ":", "SnapshotGroupId" ]
python
train
52.307692
fulfilio/fulfil-python-api
fulfil_client/client.py
https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/client.py#L173-L196
def login(self, login, password, set_auth=False): """ Attempts a login to the remote server and on success returns user id and session or None Warning: Do not depend on this. This will be deprecated with SSO. param set_auth: sets the authentication on the client """ rv = self.session.post( self.host, dumps({ "method": "common.db.login", "params": [login, password] }), ) rv = loads(rv.content)['result'] if set_auth: self.set_auth( SessionAuth(login, *rv) ) return rv
[ "def", "login", "(", "self", ",", "login", ",", "password", ",", "set_auth", "=", "False", ")", ":", "rv", "=", "self", ".", "session", ".", "post", "(", "self", ".", "host", ",", "dumps", "(", "{", "\"method\"", ":", "\"common.db.login\"", ",", "\"params\"", ":", "[", "login", ",", "password", "]", "}", ")", ",", ")", "rv", "=", "loads", "(", "rv", ".", "content", ")", "[", "'result'", "]", "if", "set_auth", ":", "self", ".", "set_auth", "(", "SessionAuth", "(", "login", ",", "*", "rv", ")", ")", "return", "rv" ]
Attempts a login to the remote server and on success returns user id and session or None Warning: Do not depend on this. This will be deprecated with SSO. param set_auth: sets the authentication on the client
[ "Attempts", "a", "login", "to", "the", "remote", "server", "and", "on", "success", "returns", "user", "id", "and", "session", "or", "None" ]
python
train
27.5
danielperna84/pyhomematic
pyhomematic/devicetypes/generic.py
https://github.com/danielperna84/pyhomematic/blob/8b91f3e84c83f05d289c740d507293a0d6759d8e/pyhomematic/devicetypes/generic.py#L110-L128
def putParamset(self, paramset, data={}): """ Some devices act upon changes to paramsets. A "putted" paramset must not contain all keys available in the specified paramset, just the ones which are writable and should be changed. """ try: if paramset in self._PARAMSETS and data: self._proxy.putParamset(self._ADDRESS, paramset, data) # We update all paramsets to at least have a temporarily accurate state for the device. # This might not be true for tasks that take long to complete (lifting a rollershutter completely etc.). # For this the server-process has to call the updateParamsets-method when it receives events for the device. self.updateParamsets() return True else: return False except Exception as err: LOG.error("HMGeneric.putParamset: Exception: " + str(err)) return False
[ "def", "putParamset", "(", "self", ",", "paramset", ",", "data", "=", "{", "}", ")", ":", "try", ":", "if", "paramset", "in", "self", ".", "_PARAMSETS", "and", "data", ":", "self", ".", "_proxy", ".", "putParamset", "(", "self", ".", "_ADDRESS", ",", "paramset", ",", "data", ")", "# We update all paramsets to at least have a temporarily accurate state for the device.", "# This might not be true for tasks that take long to complete (lifting a rollershutter completely etc.).", "# For this the server-process has to call the updateParamsets-method when it receives events for the device.", "self", ".", "updateParamsets", "(", ")", "return", "True", "else", ":", "return", "False", "except", "Exception", "as", "err", ":", "LOG", ".", "error", "(", "\"HMGeneric.putParamset: Exception: \"", "+", "str", "(", "err", ")", ")", "return", "False" ]
Some devices act upon changes to paramsets. A "putted" paramset must not contain all keys available in the specified paramset, just the ones which are writable and should be changed.
[ "Some", "devices", "act", "upon", "changes", "to", "paramsets", ".", "A", "putted", "paramset", "must", "not", "contain", "all", "keys", "available", "in", "the", "specified", "paramset", "just", "the", "ones", "which", "are", "writable", "and", "should", "be", "changed", "." ]
python
train
51.789474
wmayner/pyphi
pyphi/connectivity.py
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L26-L28
def get_outputs_from_cm(index, cm): """Return indices of the outputs of node with the given index.""" return tuple(i for i in range(cm.shape[0]) if cm[index][i])
[ "def", "get_outputs_from_cm", "(", "index", ",", "cm", ")", ":", "return", "tuple", "(", "i", "for", "i", "in", "range", "(", "cm", ".", "shape", "[", "0", "]", ")", "if", "cm", "[", "index", "]", "[", "i", "]", ")" ]
Return indices of the outputs of node with the given index.
[ "Return", "indices", "of", "the", "outputs", "of", "node", "with", "the", "given", "index", "." ]
python
train
55.666667
iotile/coretools
iotileemulate/iotile/emulate/virtual/emulation_mixin.py
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L128-L149
def load_scenario(self, scenario_name, **kwargs): """Load a scenario into the emulated object. Scenarios are specific states of an an object that can be customized with keyword parameters. Typical examples are: - data logger with full storage - device with low battery indication on Args: scenario_name (str): The name of the scenario that we wish to load. **kwargs: Any arguments that should be passed to configure the scenario. These arguments will be passed directly to the scenario handler. """ scenario = self._known_scenarios.get(scenario_name) if scenario is None: raise ArgumentError("Unknown scenario %s" % scenario_name, known_scenarios=list(self._known_scenarios)) scenario(**kwargs)
[ "def", "load_scenario", "(", "self", ",", "scenario_name", ",", "*", "*", "kwargs", ")", ":", "scenario", "=", "self", ".", "_known_scenarios", ".", "get", "(", "scenario_name", ")", "if", "scenario", "is", "None", ":", "raise", "ArgumentError", "(", "\"Unknown scenario %s\"", "%", "scenario_name", ",", "known_scenarios", "=", "list", "(", "self", ".", "_known_scenarios", ")", ")", "scenario", "(", "*", "*", "kwargs", ")" ]
Load a scenario into the emulated object. Scenarios are specific states of an an object that can be customized with keyword parameters. Typical examples are: - data logger with full storage - device with low battery indication on Args: scenario_name (str): The name of the scenario that we wish to load. **kwargs: Any arguments that should be passed to configure the scenario. These arguments will be passed directly to the scenario handler.
[ "Load", "a", "scenario", "into", "the", "emulated", "object", "." ]
python
train
38.545455
O365/python-o365
O365/excel.py
https://github.com/O365/python-o365/blob/02a71cf3775cc6a3c042e003365d6a07c8c75a73/O365/excel.py#L615-L636
def to_api_data(self, restrict_keys=None): """ Returns a dict to communicate with the server :param restrict_keys: a set of keys to restrict the returned data to :rtype: dict """ cc = self._cc # alias data = { cc('column_hidden'): self._column_hidden, cc('row_hidden'): self._row_hidden, cc('formulas'): self._formulas, cc('formulas_local'): self._formulas_local, cc('formulas_r1_c1'): self._formulas_r1_c1, cc('number_format'): self._number_format, cc('values'): self._values, } if restrict_keys: for key in list(data.keys()): if key not in restrict_keys: del data[key] return data
[ "def", "to_api_data", "(", "self", ",", "restrict_keys", "=", "None", ")", ":", "cc", "=", "self", ".", "_cc", "# alias", "data", "=", "{", "cc", "(", "'column_hidden'", ")", ":", "self", ".", "_column_hidden", ",", "cc", "(", "'row_hidden'", ")", ":", "self", ".", "_row_hidden", ",", "cc", "(", "'formulas'", ")", ":", "self", ".", "_formulas", ",", "cc", "(", "'formulas_local'", ")", ":", "self", ".", "_formulas_local", ",", "cc", "(", "'formulas_r1_c1'", ")", ":", "self", ".", "_formulas_r1_c1", ",", "cc", "(", "'number_format'", ")", ":", "self", ".", "_number_format", ",", "cc", "(", "'values'", ")", ":", "self", ".", "_values", ",", "}", "if", "restrict_keys", ":", "for", "key", "in", "list", "(", "data", ".", "keys", "(", ")", ")", ":", "if", "key", "not", "in", "restrict_keys", ":", "del", "data", "[", "key", "]", "return", "data" ]
Returns a dict to communicate with the server :param restrict_keys: a set of keys to restrict the returned data to :rtype: dict
[ "Returns", "a", "dict", "to", "communicate", "with", "the", "server" ]
python
train
34.909091
guaix-ucm/numina
numina/core/oresult.py
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/core/oresult.py#L75-L83
def get_sample_frame(self): """Return first available image in observation result""" for frame in self.frames: return frame.open() for res in self.results.values(): return res.open() return None
[ "def", "get_sample_frame", "(", "self", ")", ":", "for", "frame", "in", "self", ".", "frames", ":", "return", "frame", ".", "open", "(", ")", "for", "res", "in", "self", ".", "results", ".", "values", "(", ")", ":", "return", "res", ".", "open", "(", ")", "return", "None" ]
Return first available image in observation result
[ "Return", "first", "available", "image", "in", "observation", "result" ]
python
train
27.111111
openvax/isovar
isovar/read_helpers.py
https://github.com/openvax/isovar/blob/b39b684920e3f6b344851d6598a1a1c67bce913b/isovar/read_helpers.py#L25-L39
def get_single_allele_from_reads(allele_reads): """ Given a sequence of AlleleRead objects, which are expected to all have the same allele, return that allele. """ allele_reads = list(allele_reads) if len(allele_reads) == 0: raise ValueError("Expected non-empty list of AlleleRead objects") seq = allele_reads[0].allele if any(read.allele != seq for read in allele_reads): raise ValueError("Expected all AlleleRead objects to have same allele '%s', got %s" % ( seq, allele_reads)) return seq
[ "def", "get_single_allele_from_reads", "(", "allele_reads", ")", ":", "allele_reads", "=", "list", "(", "allele_reads", ")", "if", "len", "(", "allele_reads", ")", "==", "0", ":", "raise", "ValueError", "(", "\"Expected non-empty list of AlleleRead objects\"", ")", "seq", "=", "allele_reads", "[", "0", "]", ".", "allele", "if", "any", "(", "read", ".", "allele", "!=", "seq", "for", "read", "in", "allele_reads", ")", ":", "raise", "ValueError", "(", "\"Expected all AlleleRead objects to have same allele '%s', got %s\"", "%", "(", "seq", ",", "allele_reads", ")", ")", "return", "seq" ]
Given a sequence of AlleleRead objects, which are expected to all have the same allele, return that allele.
[ "Given", "a", "sequence", "of", "AlleleRead", "objects", "which", "are", "expected", "to", "all", "have", "the", "same", "allele", "return", "that", "allele", "." ]
python
train
36.2
gabstopper/smc-python
smc/core/sub_interfaces.py
https://github.com/gabstopper/smc-python/blob/e027b8a5dcfaf884eada32d113d41c1e56b32457/smc/core/sub_interfaces.py#L491-L509
def delete(self): """ Delete a loopback cluster virtual interface from this engine. Changes to the engine configuration are done immediately. You can find cluster virtual loopbacks by iterating at the engine level:: for loopbacks in engine.loopback_interface: ... :raises UpdateElementFailed: failure to delete loopback interface :return: None """ self._engine.data[self.typeof] = \ [loopback for loopback in self._engine.data.get(self.typeof, []) if loopback.get('address') != self.address] self._engine.update()
[ "def", "delete", "(", "self", ")", ":", "self", ".", "_engine", ".", "data", "[", "self", ".", "typeof", "]", "=", "[", "loopback", "for", "loopback", "in", "self", ".", "_engine", ".", "data", ".", "get", "(", "self", ".", "typeof", ",", "[", "]", ")", "if", "loopback", ".", "get", "(", "'address'", ")", "!=", "self", ".", "address", "]", "self", ".", "_engine", ".", "update", "(", ")" ]
Delete a loopback cluster virtual interface from this engine. Changes to the engine configuration are done immediately. You can find cluster virtual loopbacks by iterating at the engine level:: for loopbacks in engine.loopback_interface: ... :raises UpdateElementFailed: failure to delete loopback interface :return: None
[ "Delete", "a", "loopback", "cluster", "virtual", "interface", "from", "this", "engine", ".", "Changes", "to", "the", "engine", "configuration", "are", "done", "immediately", ".", "You", "can", "find", "cluster", "virtual", "loopbacks", "by", "iterating", "at", "the", "engine", "level", "::", "for", "loopbacks", "in", "engine", ".", "loopback_interface", ":", "...", ":", "raises", "UpdateElementFailed", ":", "failure", "to", "delete", "loopback", "interface", ":", "return", ":", "None" ]
python
train
35.210526
adafruit/Adafruit_Python_LED_Backpack
Adafruit_LED_Backpack/HT16K33.py
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/HT16K33.py#L93-L96
def write_display(self): """Write display buffer to display hardware.""" for i, value in enumerate(self.buffer): self._device.write8(i, value)
[ "def", "write_display", "(", "self", ")", ":", "for", "i", ",", "value", "in", "enumerate", "(", "self", ".", "buffer", ")", ":", "self", ".", "_device", ".", "write8", "(", "i", ",", "value", ")" ]
Write display buffer to display hardware.
[ "Write", "display", "buffer", "to", "display", "hardware", "." ]
python
train
41.75
oasis-open/cti-stix-validator
stix2validator/v21/enums.py
https://github.com/oasis-open/cti-stix-validator/blob/a607014e3fa500a7678f8b61b278456ca581f9d0/stix2validator/v21/enums.py#L1659-L1683
def char_sets(): """Return a list of the IANA Character Sets, or an empty list if the IANA website is unreachable. Store it as a function attribute so that we only build the list once. """ if not hasattr(char_sets, 'setlist'): clist = [] try: data = requests.get('http://www.iana.org/assignments/character-' 'sets/character-sets-1.csv') except requests.exceptions.RequestException: return [] for line in data.iter_lines(): if line: line = line.decode("utf-8") if line.count(',') > 0: vals = line.split(',') if vals[0]: clist.append(vals[0]) else: clist.append(vals[1]) char_sets.setlist = clist return char_sets.setlist
[ "def", "char_sets", "(", ")", ":", "if", "not", "hasattr", "(", "char_sets", ",", "'setlist'", ")", ":", "clist", "=", "[", "]", "try", ":", "data", "=", "requests", ".", "get", "(", "'http://www.iana.org/assignments/character-'", "'sets/character-sets-1.csv'", ")", "except", "requests", ".", "exceptions", ".", "RequestException", ":", "return", "[", "]", "for", "line", "in", "data", ".", "iter_lines", "(", ")", ":", "if", "line", ":", "line", "=", "line", ".", "decode", "(", "\"utf-8\"", ")", "if", "line", ".", "count", "(", "','", ")", ">", "0", ":", "vals", "=", "line", ".", "split", "(", "','", ")", "if", "vals", "[", "0", "]", ":", "clist", ".", "append", "(", "vals", "[", "0", "]", ")", "else", ":", "clist", ".", "append", "(", "vals", "[", "1", "]", ")", "char_sets", ".", "setlist", "=", "clist", "return", "char_sets", ".", "setlist" ]
Return a list of the IANA Character Sets, or an empty list if the IANA website is unreachable. Store it as a function attribute so that we only build the list once.
[ "Return", "a", "list", "of", "the", "IANA", "Character", "Sets", "or", "an", "empty", "list", "if", "the", "IANA", "website", "is", "unreachable", ".", "Store", "it", "as", "a", "function", "attribute", "so", "that", "we", "only", "build", "the", "list", "once", "." ]
python
train
34.76
neuropsychology/NeuroKit.py
neurokit/miscellaneous/miscellaneous.py
https://github.com/neuropsychology/NeuroKit.py/blob/c9589348fbbde0fa7e986048c48f38e6b488adfe/neurokit/miscellaneous/miscellaneous.py#L84-L119
def get(self, reset=True): """ Get time since last initialisation / reset. Parameters ---------- reset = bool, optional Should the clock be reset after returning time? Returns ---------- float Time passed in milliseconds. Example ---------- >>> import neurokit as nk >>> time_passed_since_neurobox_loading = nk.time.get() >>> nk.time.reset() >>> time_passed_since_reset = nk.time.get() Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ *Dependencies* - time """ t = (builtin_time.clock()-self.clock)*1000 if reset is True: self.reset() return(t)
[ "def", "get", "(", "self", ",", "reset", "=", "True", ")", ":", "t", "=", "(", "builtin_time", ".", "clock", "(", ")", "-", "self", ".", "clock", ")", "*", "1000", "if", "reset", "is", "True", ":", "self", ".", "reset", "(", ")", "return", "(", "t", ")" ]
Get time since last initialisation / reset. Parameters ---------- reset = bool, optional Should the clock be reset after returning time? Returns ---------- float Time passed in milliseconds. Example ---------- >>> import neurokit as nk >>> time_passed_since_neurobox_loading = nk.time.get() >>> nk.time.reset() >>> time_passed_since_reset = nk.time.get() Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ *Dependencies* - time
[ "Get", "time", "since", "last", "initialisation", "/", "reset", "." ]
python
train
21.833333
lrq3000/pyFileFixity
pyFileFixity/lib/brownanrs/rs.py
https://github.com/lrq3000/pyFileFixity/blob/fd5ef23bb13835faf1e3baa773619b86a1cc9bdf/pyFileFixity/lib/brownanrs/rs.py#L225-L246
def check_fast(self, r, k=None): '''Fast check if there's any error in a message+ecc. Can be used before decoding, in addition to hashes to detect if the message was tampered, or after decoding to check that the message was fully recovered. returns True/False ''' n = self.n if not k: k = self.k #h = self.h[k] g = self.g[k] # If we were given a string, convert to a list (important to support fields above 2^8) if isinstance(r, _str): r = [ord(x) for x in r] # Turn r into a polynomial r = Polynomial([GF2int(x) for x in r]) # Compute the syndromes: sz = self._syndromes(r, k=k) # Checking that the syndrome is all 0 is sufficient to check if there are no more any errors in the decoded message #return all(int(x) == 0 for x in sz) return sz.coefficients.count(GF2int(0)) == len(sz)
[ "def", "check_fast", "(", "self", ",", "r", ",", "k", "=", "None", ")", ":", "n", "=", "self", ".", "n", "if", "not", "k", ":", "k", "=", "self", ".", "k", "#h = self.h[k]", "g", "=", "self", ".", "g", "[", "k", "]", "# If we were given a string, convert to a list (important to support fields above 2^8)", "if", "isinstance", "(", "r", ",", "_str", ")", ":", "r", "=", "[", "ord", "(", "x", ")", "for", "x", "in", "r", "]", "# Turn r into a polynomial", "r", "=", "Polynomial", "(", "[", "GF2int", "(", "x", ")", "for", "x", "in", "r", "]", ")", "# Compute the syndromes:", "sz", "=", "self", ".", "_syndromes", "(", "r", ",", "k", "=", "k", ")", "# Checking that the syndrome is all 0 is sufficient to check if there are no more any errors in the decoded message", "#return all(int(x) == 0 for x in sz)", "return", "sz", ".", "coefficients", ".", "count", "(", "GF2int", "(", "0", ")", ")", "==", "len", "(", "sz", ")" ]
Fast check if there's any error in a message+ecc. Can be used before decoding, in addition to hashes to detect if the message was tampered, or after decoding to check that the message was fully recovered. returns True/False
[ "Fast", "check", "if", "there", "s", "any", "error", "in", "a", "message", "+", "ecc", ".", "Can", "be", "used", "before", "decoding", "in", "addition", "to", "hashes", "to", "detect", "if", "the", "message", "was", "tampered", "or", "after", "decoding", "to", "check", "that", "the", "message", "was", "fully", "recovered", ".", "returns", "True", "/", "False" ]
python
train
41.181818
ranaroussi/pywallet
pywallet/utils/keys.py
https://github.com/ranaroussi/pywallet/blob/206ff224389c490d8798f660c9e79fe97ebb64cf/pywallet/utils/keys.py#L302-L313
def create_point(self, x, y): """Create an ECDSA point on the SECP256k1 curve with the given coords. :param x: The x coordinate on the curve :type x: long :param y: The y coodinate on the curve :type y: long """ if (not isinstance(x, six.integer_types) or not isinstance(y, six.integer_types)): raise ValueError("The coordinates must be longs.") return _ECDSA_Point(SECP256k1.curve, x, y)
[ "def", "create_point", "(", "self", ",", "x", ",", "y", ")", ":", "if", "(", "not", "isinstance", "(", "x", ",", "six", ".", "integer_types", ")", "or", "not", "isinstance", "(", "y", ",", "six", ".", "integer_types", ")", ")", ":", "raise", "ValueError", "(", "\"The coordinates must be longs.\"", ")", "return", "_ECDSA_Point", "(", "SECP256k1", ".", "curve", ",", "x", ",", "y", ")" ]
Create an ECDSA point on the SECP256k1 curve with the given coords. :param x: The x coordinate on the curve :type x: long :param y: The y coodinate on the curve :type y: long
[ "Create", "an", "ECDSA", "point", "on", "the", "SECP256k1", "curve", "with", "the", "given", "coords", "." ]
python
train
39.166667
mesowx/MesoPy
MesoPy.py
https://github.com/mesowx/MesoPy/blob/cd1e837e108ed7a110d81cf789f19afcdd52145b/MesoPy.py#L422-L495
def timeseries(self, start, end, **kwargs): r""" Returns a time series of observations at a user specified location for a specified time. Users must specify at least one geographic search parameter ('stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', or 'subgacc') to obtain observation data. Other parameters may also be included. See below mandatory and optional parameters. Also see the metadata() function for station IDs. Arguments: ---------- start: string, mandatory Start date in form of YYYYMMDDhhmm. MUST BE USED WITH THE END PARAMETER. Default time is UTC e.g., start='201306011800' end: string, mandatory End date in form of YYYYMMDDhhmm. MUST BE USED WITH THE START PARAMETER. Default time is UTC e.g., end='201306011800' obtimezone: string, optional Set to either UTC or local. Sets timezone of obs. Default is UTC. e.g. obtimezone='local' showemptystations: string, optional Set to '1' to show stations even if no obs exist that match the time period. Stations without obs are omitted by default. stid: string, optional Single or comma separated list of MesoWest station IDs. e.g. stid='kden,kslc,wbb' county: string, optional County/parish/borough (US/Canada only), full name e.g. county='Larimer' state: string, optional US state, 2-letter ID e.g. state='CO' country: string, optional Single or comma separated list of abbreviated 2 or 3 character countries e.g. country='us,ca,mx' radius: string, optional Distance from a lat/lon pt or stid as [lat,lon,radius (mi)] or [stid, radius (mi)]. e.g. radius="-120,40,20" bbox: string, optional Stations within a [lon/lat] box in the order [lonmin,latmin,lonmax,latmax] e.g. bbox="-120,40,-119,41" cwa: string, optional NWS county warning area. See http://www.nws.noaa.gov/organization.php for CWA list. e.g. cwa='LOX' nwsfirezone: string, optional NWS fire zones. See http://www.nws.noaa.gov/geodata/catalog/wsom/html/firezone.htm for a shapefile containing the full list of zones. e.g. nwsfirezone='LOX241' gacc: string, optional Name of Geographic Area Coordination Center e.g. gacc='EBCC' See http://gacc.nifc.gov/ for a list of GACCs. subgacc: string, optional Name of Sub GACC e.g. subgacc='EB07' vars: string, optional Single or comma separated list of sensor variables. Will return all stations that match one of provided variables. Useful for filtering all stations that sense only certain vars. Do not request vars twice in the query. e.g. vars='wind_speed,pressure' Use the variables function to see a list of sensor vars. status: string, optional A value of either active or inactive returns stations currently set as active or inactive in the archive. Omitting this param returns all stations. e.g. status='active' units: string, optional String or set of strings and pipes separated by commas. Default is metric units. Set units='ENGLISH' for FREEDOM UNITS ;) Valid other combinations are as follows: temp|C, temp|F, temp|K; speed|mps, speed|mph, speed|kph, speed|kts; pres|pa, pres|mb; height|m, height|ft; precip|mm, precip|cm, precip|in; alti|pa, alti|inhg. e.g. units='temp|F,speed|kph,metric' groupby: string, optional Results can be grouped by key words: state, county, country, cwa, nwszone, mwsfirezone, gacc, subgacc e.g. groupby='state' timeformat: string, optional A python format string for returning customized date-time groups for observation times. Can include characters. e.g. timeformat='%m/%d/%Y at %H:%M' Returns: -------- Dictionary of time series observations through the get_response() function. Raises: ------- None. """ self._check_geo_param(kwargs) kwargs['start'] = start kwargs['end'] = end kwargs['token'] = self.token return self._get_response('stations/timeseries', kwargs)
[ "def", "timeseries", "(", "self", ",", "start", ",", "end", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_check_geo_param", "(", "kwargs", ")", "kwargs", "[", "'start'", "]", "=", "start", "kwargs", "[", "'end'", "]", "=", "end", "kwargs", "[", "'token'", "]", "=", "self", ".", "token", "return", "self", ".", "_get_response", "(", "'stations/timeseries'", ",", "kwargs", ")" ]
r""" Returns a time series of observations at a user specified location for a specified time. Users must specify at least one geographic search parameter ('stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', or 'subgacc') to obtain observation data. Other parameters may also be included. See below mandatory and optional parameters. Also see the metadata() function for station IDs. Arguments: ---------- start: string, mandatory Start date in form of YYYYMMDDhhmm. MUST BE USED WITH THE END PARAMETER. Default time is UTC e.g., start='201306011800' end: string, mandatory End date in form of YYYYMMDDhhmm. MUST BE USED WITH THE START PARAMETER. Default time is UTC e.g., end='201306011800' obtimezone: string, optional Set to either UTC or local. Sets timezone of obs. Default is UTC. e.g. obtimezone='local' showemptystations: string, optional Set to '1' to show stations even if no obs exist that match the time period. Stations without obs are omitted by default. stid: string, optional Single or comma separated list of MesoWest station IDs. e.g. stid='kden,kslc,wbb' county: string, optional County/parish/borough (US/Canada only), full name e.g. county='Larimer' state: string, optional US state, 2-letter ID e.g. state='CO' country: string, optional Single or comma separated list of abbreviated 2 or 3 character countries e.g. country='us,ca,mx' radius: string, optional Distance from a lat/lon pt or stid as [lat,lon,radius (mi)] or [stid, radius (mi)]. e.g. radius="-120,40,20" bbox: string, optional Stations within a [lon/lat] box in the order [lonmin,latmin,lonmax,latmax] e.g. bbox="-120,40,-119,41" cwa: string, optional NWS county warning area. See http://www.nws.noaa.gov/organization.php for CWA list. e.g. cwa='LOX' nwsfirezone: string, optional NWS fire zones. See http://www.nws.noaa.gov/geodata/catalog/wsom/html/firezone.htm for a shapefile containing the full list of zones. e.g. nwsfirezone='LOX241' gacc: string, optional Name of Geographic Area Coordination Center e.g. gacc='EBCC' See http://gacc.nifc.gov/ for a list of GACCs. subgacc: string, optional Name of Sub GACC e.g. subgacc='EB07' vars: string, optional Single or comma separated list of sensor variables. Will return all stations that match one of provided variables. Useful for filtering all stations that sense only certain vars. Do not request vars twice in the query. e.g. vars='wind_speed,pressure' Use the variables function to see a list of sensor vars. status: string, optional A value of either active or inactive returns stations currently set as active or inactive in the archive. Omitting this param returns all stations. e.g. status='active' units: string, optional String or set of strings and pipes separated by commas. Default is metric units. Set units='ENGLISH' for FREEDOM UNITS ;) Valid other combinations are as follows: temp|C, temp|F, temp|K; speed|mps, speed|mph, speed|kph, speed|kts; pres|pa, pres|mb; height|m, height|ft; precip|mm, precip|cm, precip|in; alti|pa, alti|inhg. e.g. units='temp|F,speed|kph,metric' groupby: string, optional Results can be grouped by key words: state, county, country, cwa, nwszone, mwsfirezone, gacc, subgacc e.g. groupby='state' timeformat: string, optional A python format string for returning customized date-time groups for observation times. Can include characters. e.g. timeformat='%m/%d/%Y at %H:%M' Returns: -------- Dictionary of time series observations through the get_response() function. Raises: ------- None.
[ "r", "Returns", "a", "time", "series", "of", "observations", "at", "a", "user", "specified", "location", "for", "a", "specified", "time", ".", "Users", "must", "specify", "at", "least", "one", "geographic", "search", "parameter", "(", "stid", "state", "country", "county", "radius", "bbox", "cwa", "nwsfirezone", "gacc", "or", "subgacc", ")", "to", "obtain", "observation", "data", ".", "Other", "parameters", "may", "also", "be", "included", ".", "See", "below", "mandatory", "and", "optional", "parameters", ".", "Also", "see", "the", "metadata", "()", "function", "for", "station", "IDs", "." ]
python
train
58.391892
log2timeline/dfvfs
dfvfs/vfs/sqlite_blob_file_system.py
https://github.com/log2timeline/dfvfs/blob/2b3ccd115f9901d89f383397d4a1376a873c83c4/dfvfs/vfs/sqlite_blob_file_system.py#L83-L102
def GetFileEntryByPathSpec(self, path_spec): """Retrieves a file entry for a path specification. Args: path_spec (PathSpec): path specification. Returns: FileEntry: a file entry or None. """ row_index = getattr(path_spec, 'row_index', None) row_condition = getattr(path_spec, 'row_condition', None) # If no row_index or row_condition is provided, return a directory. if row_index is None and row_condition is None: return sqlite_blob_file_entry.SQLiteBlobFileEntry( self._resolver_context, self, path_spec, is_root=True, is_virtual=True) return sqlite_blob_file_entry.SQLiteBlobFileEntry( self._resolver_context, self, path_spec)
[ "def", "GetFileEntryByPathSpec", "(", "self", ",", "path_spec", ")", ":", "row_index", "=", "getattr", "(", "path_spec", ",", "'row_index'", ",", "None", ")", "row_condition", "=", "getattr", "(", "path_spec", ",", "'row_condition'", ",", "None", ")", "# If no row_index or row_condition is provided, return a directory.", "if", "row_index", "is", "None", "and", "row_condition", "is", "None", ":", "return", "sqlite_blob_file_entry", ".", "SQLiteBlobFileEntry", "(", "self", ".", "_resolver_context", ",", "self", ",", "path_spec", ",", "is_root", "=", "True", ",", "is_virtual", "=", "True", ")", "return", "sqlite_blob_file_entry", ".", "SQLiteBlobFileEntry", "(", "self", ".", "_resolver_context", ",", "self", ",", "path_spec", ")" ]
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): path specification. Returns: FileEntry: a file entry or None.
[ "Retrieves", "a", "file", "entry", "for", "a", "path", "specification", "." ]
python
train
34.8
adewes/blitzdb
blitzdb/backends/mongo/backend.py
https://github.com/adewes/blitzdb/blob/4b459e0bcde9e1f6224dd4e3bea74194586864b0/blitzdb/backends/mongo/backend.py#L294-L334
def _canonicalize_query(self, query): """ Transform the query dictionary to replace e.g. documents with __ref__ fields. """ def transform_query(q): for encoder in self.query_encoders: q = encoder.encode(q,[]) if isinstance(q, dict): nq = {} for key,value in q.items(): new_key = key if isinstance(value,dict) and len(value) == 1 and list(value.keys())[0].startswith('$'): if list(value.keys())[0] in ('$all','$in'): if list(value.values())[0] and isinstance(list(value.values())[0][0],Document): if self._use_pk_based_refs: new_key+='.pk' else: new_key+='.__ref__' elif isinstance(value,Document): if self._use_pk_based_refs: new_key+='.pk' else: new_key+='.__ref__' nq[new_key] = transform_query(value) return nq elif isinstance(q, (list,QuerySet,tuple)): return [transform_query(x) for x in q] elif isinstance(q,Document): collection = self.get_collection_for_obj(q) if self._use_pk_based_refs: return q.pk else: return "%s:%s" % (collection,q.pk) else: return q return transform_query(query)
[ "def", "_canonicalize_query", "(", "self", ",", "query", ")", ":", "def", "transform_query", "(", "q", ")", ":", "for", "encoder", "in", "self", ".", "query_encoders", ":", "q", "=", "encoder", ".", "encode", "(", "q", ",", "[", "]", ")", "if", "isinstance", "(", "q", ",", "dict", ")", ":", "nq", "=", "{", "}", "for", "key", ",", "value", "in", "q", ".", "items", "(", ")", ":", "new_key", "=", "key", "if", "isinstance", "(", "value", ",", "dict", ")", "and", "len", "(", "value", ")", "==", "1", "and", "list", "(", "value", ".", "keys", "(", ")", ")", "[", "0", "]", ".", "startswith", "(", "'$'", ")", ":", "if", "list", "(", "value", ".", "keys", "(", ")", ")", "[", "0", "]", "in", "(", "'$all'", ",", "'$in'", ")", ":", "if", "list", "(", "value", ".", "values", "(", ")", ")", "[", "0", "]", "and", "isinstance", "(", "list", "(", "value", ".", "values", "(", ")", ")", "[", "0", "]", "[", "0", "]", ",", "Document", ")", ":", "if", "self", ".", "_use_pk_based_refs", ":", "new_key", "+=", "'.pk'", "else", ":", "new_key", "+=", "'.__ref__'", "elif", "isinstance", "(", "value", ",", "Document", ")", ":", "if", "self", ".", "_use_pk_based_refs", ":", "new_key", "+=", "'.pk'", "else", ":", "new_key", "+=", "'.__ref__'", "nq", "[", "new_key", "]", "=", "transform_query", "(", "value", ")", "return", "nq", "elif", "isinstance", "(", "q", ",", "(", "list", ",", "QuerySet", ",", "tuple", ")", ")", ":", "return", "[", "transform_query", "(", "x", ")", "for", "x", "in", "q", "]", "elif", "isinstance", "(", "q", ",", "Document", ")", ":", "collection", "=", "self", ".", "get_collection_for_obj", "(", "q", ")", "if", "self", ".", "_use_pk_based_refs", ":", "return", "q", ".", "pk", "else", ":", "return", "\"%s:%s\"", "%", "(", "collection", ",", "q", ".", "pk", ")", "else", ":", "return", "q", "return", "transform_query", "(", "query", ")" ]
Transform the query dictionary to replace e.g. documents with __ref__ fields.
[ "Transform", "the", "query", "dictionary", "to", "replace", "e", ".", "g", ".", "documents", "with", "__ref__", "fields", "." ]
python
train
39.390244
bitesofcode/projexui
projexui/widgets/xcalendarwidget/xcalendarwidget.py
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xcalendarwidget/xcalendarwidget.py#L107-L119
def dragMoveEvent( self, event ): """ Processes the drag drop event using the filter set by the \ setDragDropFilter :param event | <QDragEvent> """ filt = self.dragDropFilter() if ( not filt ): super(XCalendarWidget, self).dragMoveEvent(event) return filt(self, event)
[ "def", "dragMoveEvent", "(", "self", ",", "event", ")", ":", "filt", "=", "self", ".", "dragDropFilter", "(", ")", "if", "(", "not", "filt", ")", ":", "super", "(", "XCalendarWidget", ",", "self", ")", ".", "dragMoveEvent", "(", "event", ")", "return", "filt", "(", "self", ",", "event", ")" ]
Processes the drag drop event using the filter set by the \ setDragDropFilter :param event | <QDragEvent>
[ "Processes", "the", "drag", "drop", "event", "using", "the", "filter", "set", "by", "the", "\\", "setDragDropFilter", ":", "param", "event", "|", "<QDragEvent", ">" ]
python
train
29.153846
ArduPilot/MAVProxy
MAVProxy/modules/mavproxy_map/__init__.py
https://github.com/ArduPilot/MAVProxy/blob/f50bdeff33064876f7dc8dc4683d278ff47f75d5/MAVProxy/modules/mavproxy_map/__init__.py#L136-L173
def cmd_map(self, args): '''map commands''' from MAVProxy.modules.mavproxy_map import mp_slipmap if len(args) < 1: print("usage: map <icon|set>") elif args[0] == "icon": if len(args) < 3: print("Usage: map icon <lat> <lon> <icon>") else: lat = args[1] lon = args[2] flag = 'flag.png' if len(args) > 3: flag = args[3] + '.png' icon = self.map.icon(flag) self.map.add_object(mp_slipmap.SlipIcon('icon - %s [%u]' % (str(flag),self.icon_counter), (float(lat),float(lon)), icon, layer=3, rotation=0, follow=False)) self.icon_counter += 1 elif args[0] == "set": self.map_settings.command(args[1:]) self.map.add_object(mp_slipmap.SlipBrightness(self.map_settings.brightness)) elif args[0] == "sethome": self.cmd_set_home(args) elif args[0] == "sethomepos": self.cmd_set_homepos(args) elif args[0] == "setorigin": self.cmd_set_origin(args) elif args[0] == "setoriginpos": self.cmd_set_originpos(args) elif args[0] == "zoom": self.cmd_zoom(args) elif args[0] == "center": self.cmd_center(args) elif args[0] == "follow": self.cmd_follow(args) else: print("usage: map <icon|set>")
[ "def", "cmd_map", "(", "self", ",", "args", ")", ":", "from", "MAVProxy", ".", "modules", ".", "mavproxy_map", "import", "mp_slipmap", "if", "len", "(", "args", ")", "<", "1", ":", "print", "(", "\"usage: map <icon|set>\"", ")", "elif", "args", "[", "0", "]", "==", "\"icon\"", ":", "if", "len", "(", "args", ")", "<", "3", ":", "print", "(", "\"Usage: map icon <lat> <lon> <icon>\"", ")", "else", ":", "lat", "=", "args", "[", "1", "]", "lon", "=", "args", "[", "2", "]", "flag", "=", "'flag.png'", "if", "len", "(", "args", ")", ">", "3", ":", "flag", "=", "args", "[", "3", "]", "+", "'.png'", "icon", "=", "self", ".", "map", ".", "icon", "(", "flag", ")", "self", ".", "map", ".", "add_object", "(", "mp_slipmap", ".", "SlipIcon", "(", "'icon - %s [%u]'", "%", "(", "str", "(", "flag", ")", ",", "self", ".", "icon_counter", ")", ",", "(", "float", "(", "lat", ")", ",", "float", "(", "lon", ")", ")", ",", "icon", ",", "layer", "=", "3", ",", "rotation", "=", "0", ",", "follow", "=", "False", ")", ")", "self", ".", "icon_counter", "+=", "1", "elif", "args", "[", "0", "]", "==", "\"set\"", ":", "self", ".", "map_settings", ".", "command", "(", "args", "[", "1", ":", "]", ")", "self", ".", "map", ".", "add_object", "(", "mp_slipmap", ".", "SlipBrightness", "(", "self", ".", "map_settings", ".", "brightness", ")", ")", "elif", "args", "[", "0", "]", "==", "\"sethome\"", ":", "self", ".", "cmd_set_home", "(", "args", ")", "elif", "args", "[", "0", "]", "==", "\"sethomepos\"", ":", "self", ".", "cmd_set_homepos", "(", "args", ")", "elif", "args", "[", "0", "]", "==", "\"setorigin\"", ":", "self", ".", "cmd_set_origin", "(", "args", ")", "elif", "args", "[", "0", "]", "==", "\"setoriginpos\"", ":", "self", ".", "cmd_set_originpos", "(", "args", ")", "elif", "args", "[", "0", "]", "==", "\"zoom\"", ":", "self", ".", "cmd_zoom", "(", "args", ")", "elif", "args", "[", "0", "]", "==", "\"center\"", ":", "self", ".", "cmd_center", "(", "args", ")", "elif", "args", "[", "0", "]", "==", "\"follow\"", ":", "self", ".", "cmd_follow", "(", "args", ")", "else", ":", "print", "(", "\"usage: map <icon|set>\"", ")" ]
map commands
[ "map", "commands" ]
python
train
40.789474
MrYsLab/pymata-aio
pymata_aio/pymata_iot.py
https://github.com/MrYsLab/pymata-aio/blob/015081a4628b9d47dfe3f8d6c698ff903f107810/pymata_aio/pymata_iot.py#L374-L411
async def i2c_read_request(self, command): """ This method sends an I2C read request to Firmata. It is qualified by a single shot, continuous read, or stop reading command. Special Note: for the read type supply one of the following string values: "0" = I2C_READ "1" = I2C_READ | I2C_END_TX_MASK" "2" = I2C_READ_CONTINUOUSLY "3" = I2C_READ_CONTINUOUSLY | I2C_END_TX_MASK "4" = I2C_STOP_READING :param command: {"method": "i2c_read_request", "params": [I2C_ADDRESS, I2C_REGISTER, NUMBER_OF_BYTES, I2C_READ_TYPE ]} :returns: {"method": "i2c_read_request_reply", "params": [DATA]} """ device_address = int(command[0]) register = int(command[1]) number_of_bytes = int(command[2]) if command[3] == "0": read_type = Constants.I2C_READ_CONTINUOUSLY elif command[3] == "1": read_type = Constants.I2C_READ elif command[3] == "2": read_type = Constants.I2C_READ | Constants.I2C_END_TX_MASK elif command[3] == "3": read_type = Constants.I2C_READ_CONTINUOUSLY | Constants.I2C_END_TX_MASK else: # the default case stop reading valid request or invalid request read_type = Constants.I2C_STOP_READING await self.core.i2c_read_request(device_address, register, number_of_bytes, read_type, self.i2c_read_request_callback) await asyncio.sleep(.1)
[ "async", "def", "i2c_read_request", "(", "self", ",", "command", ")", ":", "device_address", "=", "int", "(", "command", "[", "0", "]", ")", "register", "=", "int", "(", "command", "[", "1", "]", ")", "number_of_bytes", "=", "int", "(", "command", "[", "2", "]", ")", "if", "command", "[", "3", "]", "==", "\"0\"", ":", "read_type", "=", "Constants", ".", "I2C_READ_CONTINUOUSLY", "elif", "command", "[", "3", "]", "==", "\"1\"", ":", "read_type", "=", "Constants", ".", "I2C_READ", "elif", "command", "[", "3", "]", "==", "\"2\"", ":", "read_type", "=", "Constants", ".", "I2C_READ", "|", "Constants", ".", "I2C_END_TX_MASK", "elif", "command", "[", "3", "]", "==", "\"3\"", ":", "read_type", "=", "Constants", ".", "I2C_READ_CONTINUOUSLY", "|", "Constants", ".", "I2C_END_TX_MASK", "else", ":", "# the default case stop reading valid request or invalid request", "read_type", "=", "Constants", ".", "I2C_STOP_READING", "await", "self", ".", "core", ".", "i2c_read_request", "(", "device_address", ",", "register", ",", "number_of_bytes", ",", "read_type", ",", "self", ".", "i2c_read_request_callback", ")", "await", "asyncio", ".", "sleep", "(", ".1", ")" ]
This method sends an I2C read request to Firmata. It is qualified by a single shot, continuous read, or stop reading command. Special Note: for the read type supply one of the following string values: "0" = I2C_READ "1" = I2C_READ | I2C_END_TX_MASK" "2" = I2C_READ_CONTINUOUSLY "3" = I2C_READ_CONTINUOUSLY | I2C_END_TX_MASK "4" = I2C_STOP_READING :param command: {"method": "i2c_read_request", "params": [I2C_ADDRESS, I2C_REGISTER, NUMBER_OF_BYTES, I2C_READ_TYPE ]} :returns: {"method": "i2c_read_request_reply", "params": [DATA]}
[ "This", "method", "sends", "an", "I2C", "read", "request", "to", "Firmata", ".", "It", "is", "qualified", "by", "a", "single", "shot", "continuous", "read", "or", "stop", "reading", "command", ".", "Special", "Note", ":", "for", "the", "read", "type", "supply", "one", "of", "the", "following", "string", "values", ":" ]
python
train
39.421053
tensorlayer/tensorlayer
tensorlayer/visualize.py
https://github.com/tensorlayer/tensorlayer/blob/aa9e52e36c7058a7e6fd81d36563ca6850b21956/tensorlayer/visualize.py#L34-L50
def read_image(image, path=''): """Read one image. Parameters ----------- image : str The image file name. path : str The image folder path. Returns ------- numpy.array The image. """ return imageio.imread(os.path.join(path, image))
[ "def", "read_image", "(", "image", ",", "path", "=", "''", ")", ":", "return", "imageio", ".", "imread", "(", "os", ".", "path", ".", "join", "(", "path", ",", "image", ")", ")" ]
Read one image. Parameters ----------- image : str The image file name. path : str The image folder path. Returns ------- numpy.array The image.
[ "Read", "one", "image", "." ]
python
valid
16.647059
quiltdata/quilt
compiler/quilt/util.py
https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/util.py#L9-L22
def save(package, data, params={}, is_public=False): """Build and push data to Quilt registry at user/package/data_node, associating params as metadata for the data node. :param package: short package specifier string, i.e. 'team:user/pkg/subpath' :param data: data to save (np.ndarray or pd.DataFrame) :param params: metadata dictionary :param is_public: boolean kwarg to push the packages publicly """ for key, value in params.items(): if isinstance(value, np.ndarray): value = value.astype(float) params[key] = value.tolist() build_from_node(package, nodes.DataNode(None, None, data, params)) push('{}'.format(package), is_public=is_public)
[ "def", "save", "(", "package", ",", "data", ",", "params", "=", "{", "}", ",", "is_public", "=", "False", ")", ":", "for", "key", ",", "value", "in", "params", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "np", ".", "ndarray", ")", ":", "value", "=", "value", ".", "astype", "(", "float", ")", "params", "[", "key", "]", "=", "value", ".", "tolist", "(", ")", "build_from_node", "(", "package", ",", "nodes", ".", "DataNode", "(", "None", ",", "None", ",", "data", ",", "params", ")", ")", "push", "(", "'{}'", ".", "format", "(", "package", ")", ",", "is_public", "=", "is_public", ")" ]
Build and push data to Quilt registry at user/package/data_node, associating params as metadata for the data node. :param package: short package specifier string, i.e. 'team:user/pkg/subpath' :param data: data to save (np.ndarray or pd.DataFrame) :param params: metadata dictionary :param is_public: boolean kwarg to push the packages publicly
[ "Build", "and", "push", "data", "to", "Quilt", "registry", "at", "user", "/", "package", "/", "data_node", "associating", "params", "as", "metadata", "for", "the", "data", "node", ".", ":", "param", "package", ":", "short", "package", "specifier", "string", "i", ".", "e", ".", "team", ":", "user", "/", "pkg", "/", "subpath", ":", "param", "data", ":", "data", "to", "save", "(", "np", ".", "ndarray", "or", "pd", ".", "DataFrame", ")", ":", "param", "params", ":", "metadata", "dictionary", ":", "param", "is_public", ":", "boolean", "kwarg", "to", "push", "the", "packages", "publicly" ]
python
train
50.142857
wesm/feather
cpp/build-support/cpplint.py
https://github.com/wesm/feather/blob/99267b30461c46b9e437f95e1d9338a92a854270/cpp/build-support/cpplint.py#L4501-L4525
def _DropCommonSuffixes(filename): """Drops common suffixes like _test.cc or -inl.h from filename. For example: >>> _DropCommonSuffixes('foo/foo-inl.h') 'foo/foo' >>> _DropCommonSuffixes('foo/bar/foo.cc') 'foo/bar/foo' >>> _DropCommonSuffixes('foo/foo_internal.h') 'foo/foo' >>> _DropCommonSuffixes('foo/foo_unusualinternal.h') 'foo/foo_unusualinternal' Args: filename: The input filename. Returns: The filename with the common suffix removed. """ for suffix in ('test.cc', 'regtest.cc', 'unittest.cc', 'inl.h', 'impl.h', 'internal.h'): if (filename.endswith(suffix) and len(filename) > len(suffix) and filename[-len(suffix) - 1] in ('-', '_')): return filename[:-len(suffix) - 1] return os.path.splitext(filename)[0]
[ "def", "_DropCommonSuffixes", "(", "filename", ")", ":", "for", "suffix", "in", "(", "'test.cc'", ",", "'regtest.cc'", ",", "'unittest.cc'", ",", "'inl.h'", ",", "'impl.h'", ",", "'internal.h'", ")", ":", "if", "(", "filename", ".", "endswith", "(", "suffix", ")", "and", "len", "(", "filename", ")", ">", "len", "(", "suffix", ")", "and", "filename", "[", "-", "len", "(", "suffix", ")", "-", "1", "]", "in", "(", "'-'", ",", "'_'", ")", ")", ":", "return", "filename", "[", ":", "-", "len", "(", "suffix", ")", "-", "1", "]", "return", "os", ".", "path", ".", "splitext", "(", "filename", ")", "[", "0", "]" ]
Drops common suffixes like _test.cc or -inl.h from filename. For example: >>> _DropCommonSuffixes('foo/foo-inl.h') 'foo/foo' >>> _DropCommonSuffixes('foo/bar/foo.cc') 'foo/bar/foo' >>> _DropCommonSuffixes('foo/foo_internal.h') 'foo/foo' >>> _DropCommonSuffixes('foo/foo_unusualinternal.h') 'foo/foo_unusualinternal' Args: filename: The input filename. Returns: The filename with the common suffix removed.
[ "Drops", "common", "suffixes", "like", "_test", ".", "cc", "or", "-", "inl", ".", "h", "from", "filename", "." ]
python
train
31.4
gwastro/pycbc
pycbc/tmpltbank/option_utils.py
https://github.com/gwastro/pycbc/blob/7a64cdd104d263f1b6ea0b01e6841837d05a4cb3/pycbc/tmpltbank/option_utils.py#L236-L245
def psd(self): """ A pyCBC FrequencySeries holding the appropriate PSD. Return the PSD used in the metric calculation. """ if not self._psd: errMsg = "The PSD has not been set in the metricParameters " errMsg += "instance." raise ValueError(errMsg) return self._psd
[ "def", "psd", "(", "self", ")", ":", "if", "not", "self", ".", "_psd", ":", "errMsg", "=", "\"The PSD has not been set in the metricParameters \"", "errMsg", "+=", "\"instance.\"", "raise", "ValueError", "(", "errMsg", ")", "return", "self", ".", "_psd" ]
A pyCBC FrequencySeries holding the appropriate PSD. Return the PSD used in the metric calculation.
[ "A", "pyCBC", "FrequencySeries", "holding", "the", "appropriate", "PSD", ".", "Return", "the", "PSD", "used", "in", "the", "metric", "calculation", "." ]
python
train
34
invenia/Arbiter
arbiter/scheduler.py
https://github.com/invenia/Arbiter/blob/51008393ae8797da85bcd67807259a157f941dfd/arbiter/scheduler.py#L145-L152
def fail_remaining(self): """ Mark all unfinished tasks (including currently running ones) as failed. """ self._failed.update(self._graph.nodes) self._graph = Graph() self._running = set()
[ "def", "fail_remaining", "(", "self", ")", ":", "self", ".", "_failed", ".", "update", "(", "self", ".", "_graph", ".", "nodes", ")", "self", ".", "_graph", "=", "Graph", "(", ")", "self", ".", "_running", "=", "set", "(", ")" ]
Mark all unfinished tasks (including currently running ones) as failed.
[ "Mark", "all", "unfinished", "tasks", "(", "including", "currently", "running", "ones", ")", "as", "failed", "." ]
python
train
29.625
abseil/abseil-py
absl/flags/_flagvalues.py
https://github.com/abseil/abseil-py/blob/9d73fdaa23a6b6726aa5731390f388c0c6250ee5/absl/flags/_flagvalues.py#L888-L892
def _render_our_module_flags(self, module, output_lines, prefix=''): """Returns a help string for a given module.""" flags = self._get_flags_defined_by_module(module) if flags: self._render_module_flags(module, flags, output_lines, prefix)
[ "def", "_render_our_module_flags", "(", "self", ",", "module", ",", "output_lines", ",", "prefix", "=", "''", ")", ":", "flags", "=", "self", ".", "_get_flags_defined_by_module", "(", "module", ")", "if", "flags", ":", "self", ".", "_render_module_flags", "(", "module", ",", "flags", ",", "output_lines", ",", "prefix", ")" ]
Returns a help string for a given module.
[ "Returns", "a", "help", "string", "for", "a", "given", "module", "." ]
python
train
50.6
joferkington/mplstereonet
mplstereonet/contouring.py
https://github.com/joferkington/mplstereonet/blob/f6d78ca49807915d4223e864e12bb24d497cc2d6/mplstereonet/contouring.py#L209-L214
def _kamb_count(cos_dist, sigma=3): """Original Kamb kernel function (raw count within radius).""" n = float(cos_dist.size) dist = _kamb_radius(n, sigma) count = (cos_dist >= dist).astype(float) return count, _kamb_units(n, dist)
[ "def", "_kamb_count", "(", "cos_dist", ",", "sigma", "=", "3", ")", ":", "n", "=", "float", "(", "cos_dist", ".", "size", ")", "dist", "=", "_kamb_radius", "(", "n", ",", "sigma", ")", "count", "=", "(", "cos_dist", ">=", "dist", ")", ".", "astype", "(", "float", ")", "return", "count", ",", "_kamb_units", "(", "n", ",", "dist", ")" ]
Original Kamb kernel function (raw count within radius).
[ "Original", "Kamb", "kernel", "function", "(", "raw", "count", "within", "radius", ")", "." ]
python
train
40.666667
striglia/stockfighter
stockfighter/stockfighter.py
https://github.com/striglia/stockfighter/blob/df908f5919d6f861601cd00c906a049d04253d47/stockfighter/stockfighter.py#L107-L118
def cancel_order(self, order_id, stock): """Cancel An Order https://starfighter.readme.io/docs/cancel-an-order """ url_fragment = 'venues/{venue}/stocks/{stock}/orders/{order_id}'.format( venue=self.venue, stock=stock, order_id=order_id, ) url = urljoin(self.base_url, url_fragment) return self.session.delete(url).json()
[ "def", "cancel_order", "(", "self", ",", "order_id", ",", "stock", ")", ":", "url_fragment", "=", "'venues/{venue}/stocks/{stock}/orders/{order_id}'", ".", "format", "(", "venue", "=", "self", ".", "venue", ",", "stock", "=", "stock", ",", "order_id", "=", "order_id", ",", ")", "url", "=", "urljoin", "(", "self", ".", "base_url", ",", "url_fragment", ")", "return", "self", ".", "session", ".", "delete", "(", "url", ")", ".", "json", "(", ")" ]
Cancel An Order https://starfighter.readme.io/docs/cancel-an-order
[ "Cancel", "An", "Order" ]
python
train
33.583333
LogicalDash/LiSE
allegedb/allegedb/query.py
https://github.com/LogicalDash/LiSE/blob/fe6fd4f0a7c1780e065f4c9babb9bc443af6bb84/allegedb/allegedb/query.py#L295-L314
def _flush_graph_val(self): """Send all new and changed graph values to the database.""" if not self._graphvals2set: return delafter = {} for graph, key, branch, turn, tick, value in self._graphvals2set: if (graph, key, branch) in delafter: delafter[graph, key, branch] = min(( (turn, tick), delafter[graph, key, branch] )) else: delafter[graph, key, branch] = (turn, tick) self.sqlmany( 'del_graph_val_after', *((graph, key, branch, turn, turn, tick) for ((graph, key, branch), (turn, tick)) in delafter.items()) ) self.sqlmany('graph_val_insert', *self._graphvals2set) self._graphvals2set = []
[ "def", "_flush_graph_val", "(", "self", ")", ":", "if", "not", "self", ".", "_graphvals2set", ":", "return", "delafter", "=", "{", "}", "for", "graph", ",", "key", ",", "branch", ",", "turn", ",", "tick", ",", "value", "in", "self", ".", "_graphvals2set", ":", "if", "(", "graph", ",", "key", ",", "branch", ")", "in", "delafter", ":", "delafter", "[", "graph", ",", "key", ",", "branch", "]", "=", "min", "(", "(", "(", "turn", ",", "tick", ")", ",", "delafter", "[", "graph", ",", "key", ",", "branch", "]", ")", ")", "else", ":", "delafter", "[", "graph", ",", "key", ",", "branch", "]", "=", "(", "turn", ",", "tick", ")", "self", ".", "sqlmany", "(", "'del_graph_val_after'", ",", "*", "(", "(", "graph", ",", "key", ",", "branch", ",", "turn", ",", "turn", ",", "tick", ")", "for", "(", "(", "graph", ",", "key", ",", "branch", ")", ",", "(", "turn", ",", "tick", ")", ")", "in", "delafter", ".", "items", "(", ")", ")", ")", "self", ".", "sqlmany", "(", "'graph_val_insert'", ",", "*", "self", ".", "_graphvals2set", ")", "self", ".", "_graphvals2set", "=", "[", "]" ]
Send all new and changed graph values to the database.
[ "Send", "all", "new", "and", "changed", "graph", "values", "to", "the", "database", "." ]
python
train
40.1
praekeltfoundation/seed-control-interface-service
services/tasks.py
https://github.com/praekeltfoundation/seed-control-interface-service/blob/0c8ec58ae61e72d4443e6c9a4d8b7dd12dd8a86e/services/tasks.py#L23-L37
def run(self, target, payload, instance_id=None, hook_id=None, **kwargs): """ target: the url to receive the payload. payload: a python primitive data structure instance_id: a possibly None "trigger" instance ID hook_id: the ID of defining Hook object """ requests.post( url=target, data=json.dumps(payload), headers={ 'Content-Type': 'application/json', 'Authorization': 'Token %s' % settings.HOOK_AUTH_TOKEN } )
[ "def", "run", "(", "self", ",", "target", ",", "payload", ",", "instance_id", "=", "None", ",", "hook_id", "=", "None", ",", "*", "*", "kwargs", ")", ":", "requests", ".", "post", "(", "url", "=", "target", ",", "data", "=", "json", ".", "dumps", "(", "payload", ")", ",", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", ",", "'Authorization'", ":", "'Token %s'", "%", "settings", ".", "HOOK_AUTH_TOKEN", "}", ")" ]
target: the url to receive the payload. payload: a python primitive data structure instance_id: a possibly None "trigger" instance ID hook_id: the ID of defining Hook object
[ "target", ":", "the", "url", "to", "receive", "the", "payload", ".", "payload", ":", "a", "python", "primitive", "data", "structure", "instance_id", ":", "a", "possibly", "None", "trigger", "instance", "ID", "hook_id", ":", "the", "ID", "of", "defining", "Hook", "object" ]
python
train
37.2
MinchinWeb/colourettu
tasks.py
https://github.com/MinchinWeb/colourettu/blob/f0b2f6b1d44055f3ccee62ac2759829f1e16a252/tasks.py#L199-L246
def make_release(cts): '''Make and upload the release. Changelog: - v0.2.1 -- 2016-11-18 -- specify downloading of non-cached version of the package for multiple formats can be properly and individually tested. - 0.2.2 -- 2016-11028 -- move configuration to top of file ''' make_release_version = __version__ colorama.init() text.title("Minchin 'Make Release' for Python v{}".format(make_release_version)) print() text.subtitle("Configuration") print("base dir -> {}".format(here_directory())) print("source -> .\{}\\".format(source_directory().relative_to(here_directory()))) print("test dir -> .\{}\\".format(test_directory().relative_to(here_directory()))) #print("doc dir -> .\{}\\".format(doc_directory().relative_to(here_directory()))) print("version file -> .\{}".format(version_file().relative_to(here_directory()))) print() text.subtitle("Git -- Clean directory?") print() text.subtitle("Sort Import Statements") print() text.subtitle("Run Tests") print() text.subtitle("Update Version Number") new_version = update_version_number(None) print() text.subtitle("Add Release to Changelog") print() text.subtitle("Build Documentation") print() text.query_yes_quit('All good and ready to go?') text.subtitle("Build Distributions") build_distribution() for server in [ #"local", #"testpypi", "pypi", ]: for file_format in ["tar.gz", "whl"]: print() text.subtitle("Test {} Build {}".format(file_format, server)) check_local_install(new_version, file_format, server)
[ "def", "make_release", "(", "cts", ")", ":", "make_release_version", "=", "__version__", "colorama", ".", "init", "(", ")", "text", ".", "title", "(", "\"Minchin 'Make Release' for Python v{}\"", ".", "format", "(", "make_release_version", ")", ")", "print", "(", ")", "text", ".", "subtitle", "(", "\"Configuration\"", ")", "print", "(", "\"base dir -> {}\"", ".", "format", "(", "here_directory", "(", ")", ")", ")", "print", "(", "\"source -> .\\{}\\\\\"", ".", "format", "(", "source_directory", "(", ")", ".", "relative_to", "(", "here_directory", "(", ")", ")", ")", ")", "print", "(", "\"test dir -> .\\{}\\\\\"", ".", "format", "(", "test_directory", "(", ")", ".", "relative_to", "(", "here_directory", "(", ")", ")", ")", ")", "#print(\"doc dir -> .\\{}\\\\\".format(doc_directory().relative_to(here_directory())))", "print", "(", "\"version file -> .\\{}\"", ".", "format", "(", "version_file", "(", ")", ".", "relative_to", "(", "here_directory", "(", ")", ")", ")", ")", "print", "(", ")", "text", ".", "subtitle", "(", "\"Git -- Clean directory?\"", ")", "print", "(", ")", "text", ".", "subtitle", "(", "\"Sort Import Statements\"", ")", "print", "(", ")", "text", ".", "subtitle", "(", "\"Run Tests\"", ")", "print", "(", ")", "text", ".", "subtitle", "(", "\"Update Version Number\"", ")", "new_version", "=", "update_version_number", "(", "None", ")", "print", "(", ")", "text", ".", "subtitle", "(", "\"Add Release to Changelog\"", ")", "print", "(", ")", "text", ".", "subtitle", "(", "\"Build Documentation\"", ")", "print", "(", ")", "text", ".", "query_yes_quit", "(", "'All good and ready to go?'", ")", "text", ".", "subtitle", "(", "\"Build Distributions\"", ")", "build_distribution", "(", ")", "for", "server", "in", "[", "#\"local\",", "#\"testpypi\",", "\"pypi\"", ",", "]", ":", "for", "file_format", "in", "[", "\"tar.gz\"", ",", "\"whl\"", "]", ":", "print", "(", ")", "text", ".", "subtitle", "(", "\"Test {} Build {}\"", ".", "format", "(", "file_format", ",", "server", ")", ")", "check_local_install", "(", "new_version", ",", "file_format", ",", "server", ")" ]
Make and upload the release. Changelog: - v0.2.1 -- 2016-11-18 -- specify downloading of non-cached version of the package for multiple formats can be properly and individually tested. - 0.2.2 -- 2016-11028 -- move configuration to top of file
[ "Make", "and", "upload", "the", "release", "." ]
python
train
36.6875
MrYsLab/pymata-aio
pymata_aio/pymata_core.py
https://github.com/MrYsLab/pymata-aio/blob/015081a4628b9d47dfe3f8d6c698ff903f107810/pymata_aio/pymata_core.py#L1945-L1975
async def _process_latching(self, key, latching_entry): """ This is a private utility method. This method process latching events and either returns them via callback or stores them in the latch map :param key: Encoded pin :param latching_entry: a latch table entry :returns: Callback or store data in latch map """ if latching_entry[Constants.LATCH_CALLBACK]: # auto clear entry and execute the callback if latching_entry[Constants.LATCH_CALLBACK_TYPE]: await latching_entry[Constants.LATCH_CALLBACK] \ ([key, latching_entry[Constants.LATCHED_DATA], time.time()]) # noinspection PyPep8 else: latching_entry[Constants.LATCH_CALLBACK] \ ([key, latching_entry[Constants.LATCHED_DATA], time.time()]) self.latch_map[key] = [0, 0, 0, 0, 0, None] else: updated_latch_entry = latching_entry updated_latch_entry[Constants.LATCH_STATE] = \ Constants.LATCH_LATCHED updated_latch_entry[Constants.LATCHED_DATA] = \ latching_entry[Constants.LATCHED_DATA] # time stamp it updated_latch_entry[Constants.LATCHED_TIME_STAMP] = time.time() self.latch_map[key] = updated_latch_entry
[ "async", "def", "_process_latching", "(", "self", ",", "key", ",", "latching_entry", ")", ":", "if", "latching_entry", "[", "Constants", ".", "LATCH_CALLBACK", "]", ":", "# auto clear entry and execute the callback", "if", "latching_entry", "[", "Constants", ".", "LATCH_CALLBACK_TYPE", "]", ":", "await", "latching_entry", "[", "Constants", ".", "LATCH_CALLBACK", "]", "(", "[", "key", ",", "latching_entry", "[", "Constants", ".", "LATCHED_DATA", "]", ",", "time", ".", "time", "(", ")", "]", ")", "# noinspection PyPep8", "else", ":", "latching_entry", "[", "Constants", ".", "LATCH_CALLBACK", "]", "(", "[", "key", ",", "latching_entry", "[", "Constants", ".", "LATCHED_DATA", "]", ",", "time", ".", "time", "(", ")", "]", ")", "self", ".", "latch_map", "[", "key", "]", "=", "[", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "None", "]", "else", ":", "updated_latch_entry", "=", "latching_entry", "updated_latch_entry", "[", "Constants", ".", "LATCH_STATE", "]", "=", "Constants", ".", "LATCH_LATCHED", "updated_latch_entry", "[", "Constants", ".", "LATCHED_DATA", "]", "=", "latching_entry", "[", "Constants", ".", "LATCHED_DATA", "]", "# time stamp it", "updated_latch_entry", "[", "Constants", ".", "LATCHED_TIME_STAMP", "]", "=", "time", ".", "time", "(", ")", "self", ".", "latch_map", "[", "key", "]", "=", "updated_latch_entry" ]
This is a private utility method. This method process latching events and either returns them via callback or stores them in the latch map :param key: Encoded pin :param latching_entry: a latch table entry :returns: Callback or store data in latch map
[ "This", "is", "a", "private", "utility", "method", ".", "This", "method", "process", "latching", "events", "and", "either", "returns", "them", "via", "callback", "or", "stores", "them", "in", "the", "latch", "map" ]
python
train
43.612903
edx/XBlock
xblock/fields.py
https://github.com/edx/XBlock/blob/368bf46e2c0ee69bbb21817f428c4684936e18ee/xblock/fields.py#L874-L888
def _sanitize(self, value): """ Remove the control characters that are not allowed in XML: https://www.w3.org/TR/xml/#charsets Leave all other characters. """ if isinstance(value, six.binary_type): value = value.decode('utf-8') if isinstance(value, six.text_type): new_value = ''.join(ch for ch in value if self._valid_char(ch)) else: return value # The new string will be equivalent to the original string if no control characters are present. # If equivalent, return the original string - some tests check for object equality instead of string equality. return value if value == new_value else new_value
[ "def", "_sanitize", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "six", ".", "binary_type", ")", ":", "value", "=", "value", ".", "decode", "(", "'utf-8'", ")", "if", "isinstance", "(", "value", ",", "six", ".", "text_type", ")", ":", "new_value", "=", "''", ".", "join", "(", "ch", "for", "ch", "in", "value", "if", "self", ".", "_valid_char", "(", "ch", ")", ")", "else", ":", "return", "value", "# The new string will be equivalent to the original string if no control characters are present.", "# If equivalent, return the original string - some tests check for object equality instead of string equality.", "return", "value", "if", "value", "==", "new_value", "else", "new_value" ]
Remove the control characters that are not allowed in XML: https://www.w3.org/TR/xml/#charsets Leave all other characters.
[ "Remove", "the", "control", "characters", "that", "are", "not", "allowed", "in", "XML", ":", "https", ":", "//", "www", ".", "w3", ".", "org", "/", "TR", "/", "xml", "/", "#charsets", "Leave", "all", "other", "characters", "." ]
python
train
47.666667
tensorlayer/tensorlayer
tensorlayer/files/utils.py
https://github.com/tensorlayer/tensorlayer/blob/aa9e52e36c7058a7e6fd81d36563ca6850b21956/tensorlayer/files/utils.py#L937-L974
def download_file_from_google_drive(ID, destination): """Download file from Google Drive. See ``tl.files.load_celebA_dataset`` for example. Parameters -------------- ID : str The driver ID. destination : str The destination for save file. """ def save_response_content(response, destination, chunk_size=32 * 1024): total_size = int(response.headers.get('content-length', 0)) with open(destination, "wb") as f: for chunk in tqdm(response.iter_content(chunk_size), total=total_size, unit='B', unit_scale=True, desc=destination): if chunk: # filter out keep-alive new chunks f.write(chunk) def get_confirm_token(response): for key, value in response.cookies.items(): if key.startswith('download_warning'): return value return None URL = "https://docs.google.com/uc?export=download" session = requests.Session() response = session.get(URL, params={'id': ID}, stream=True) token = get_confirm_token(response) if token: params = {'id': ID, 'confirm': token} response = session.get(URL, params=params, stream=True) save_response_content(response, destination)
[ "def", "download_file_from_google_drive", "(", "ID", ",", "destination", ")", ":", "def", "save_response_content", "(", "response", ",", "destination", ",", "chunk_size", "=", "32", "*", "1024", ")", ":", "total_size", "=", "int", "(", "response", ".", "headers", ".", "get", "(", "'content-length'", ",", "0", ")", ")", "with", "open", "(", "destination", ",", "\"wb\"", ")", "as", "f", ":", "for", "chunk", "in", "tqdm", "(", "response", ".", "iter_content", "(", "chunk_size", ")", ",", "total", "=", "total_size", ",", "unit", "=", "'B'", ",", "unit_scale", "=", "True", ",", "desc", "=", "destination", ")", ":", "if", "chunk", ":", "# filter out keep-alive new chunks", "f", ".", "write", "(", "chunk", ")", "def", "get_confirm_token", "(", "response", ")", ":", "for", "key", ",", "value", "in", "response", ".", "cookies", ".", "items", "(", ")", ":", "if", "key", ".", "startswith", "(", "'download_warning'", ")", ":", "return", "value", "return", "None", "URL", "=", "\"https://docs.google.com/uc?export=download\"", "session", "=", "requests", ".", "Session", "(", ")", "response", "=", "session", ".", "get", "(", "URL", ",", "params", "=", "{", "'id'", ":", "ID", "}", ",", "stream", "=", "True", ")", "token", "=", "get_confirm_token", "(", "response", ")", "if", "token", ":", "params", "=", "{", "'id'", ":", "ID", ",", "'confirm'", ":", "token", "}", "response", "=", "session", ".", "get", "(", "URL", ",", "params", "=", "params", ",", "stream", "=", "True", ")", "save_response_content", "(", "response", ",", "destination", ")" ]
Download file from Google Drive. See ``tl.files.load_celebA_dataset`` for example. Parameters -------------- ID : str The driver ID. destination : str The destination for save file.
[ "Download", "file", "from", "Google", "Drive", "." ]
python
valid
33
openvax/pyensembl
pyensembl/locus.py
https://github.com/openvax/pyensembl/blob/4b995fb72e848206d6fbf11950cf30964cd9b3aa/pyensembl/locus.py#L136-L158
def offset_range(self, start, end): """ Database start/end entries are always ordered such that start < end. This makes computing a relative position (e.g. of a stop codon relative to its transcript) complicated since the "end" position of a backwards locus is actually earlir on the strand. This function correctly selects a start vs. end value depending on this locuses's strand and determines that position's offset from the earliest position in this locus. """ assert start <= end, \ "Locations should always have start < end, got start=%d, end=%d" % ( start, end) if start < self.start or end > self.end: raise ValueError("Range (%d, %d) falls outside %s" % ( start, end, self)) if self.on_forward_strand: return (start - self.start, end - self.start) else: return (self.end - end, self.end - start)
[ "def", "offset_range", "(", "self", ",", "start", ",", "end", ")", ":", "assert", "start", "<=", "end", ",", "\"Locations should always have start < end, got start=%d, end=%d\"", "%", "(", "start", ",", "end", ")", "if", "start", "<", "self", ".", "start", "or", "end", ">", "self", ".", "end", ":", "raise", "ValueError", "(", "\"Range (%d, %d) falls outside %s\"", "%", "(", "start", ",", "end", ",", "self", ")", ")", "if", "self", ".", "on_forward_strand", ":", "return", "(", "start", "-", "self", ".", "start", ",", "end", "-", "self", ".", "start", ")", "else", ":", "return", "(", "self", ".", "end", "-", "end", ",", "self", ".", "end", "-", "start", ")" ]
Database start/end entries are always ordered such that start < end. This makes computing a relative position (e.g. of a stop codon relative to its transcript) complicated since the "end" position of a backwards locus is actually earlir on the strand. This function correctly selects a start vs. end value depending on this locuses's strand and determines that position's offset from the earliest position in this locus.
[ "Database", "start", "/", "end", "entries", "are", "always", "ordered", "such", "that", "start", "<", "end", ".", "This", "makes", "computing", "a", "relative", "position", "(", "e", ".", "g", ".", "of", "a", "stop", "codon", "relative", "to", "its", "transcript", ")", "complicated", "since", "the", "end", "position", "of", "a", "backwards", "locus", "is", "actually", "earlir", "on", "the", "strand", ".", "This", "function", "correctly", "selects", "a", "start", "vs", ".", "end", "value", "depending", "on", "this", "locuses", "s", "strand", "and", "determines", "that", "position", "s", "offset", "from", "the", "earliest", "position", "in", "this", "locus", "." ]
python
train
42.130435
blockstack/blockstack-core
blockstack/lib/rpc.py
https://github.com/blockstack/blockstack-core/blob/1dcfdd39b152d29ce13e736a6a1a0981401a0505/blockstack/lib/rpc.py#L905-L921
def GET_user_profile( self, path_info, user_id ): """ Get a user profile. Reply the profile on success Return 404 on failure to load """ if not check_name(user_id) and not check_subdomain(user_id): return self._reply_json({'error': 'Invalid name or subdomain'}, status_code=400) blockstackd_url = get_blockstackd_url() resp = blockstackd_client.resolve_profile(user_id, hostport=blockstackd_url) if json_is_error(resp): self._reply_json({'error': resp['error']}, status_code=404) return self._reply_json(resp['profile']) return
[ "def", "GET_user_profile", "(", "self", ",", "path_info", ",", "user_id", ")", ":", "if", "not", "check_name", "(", "user_id", ")", "and", "not", "check_subdomain", "(", "user_id", ")", ":", "return", "self", ".", "_reply_json", "(", "{", "'error'", ":", "'Invalid name or subdomain'", "}", ",", "status_code", "=", "400", ")", "blockstackd_url", "=", "get_blockstackd_url", "(", ")", "resp", "=", "blockstackd_client", ".", "resolve_profile", "(", "user_id", ",", "hostport", "=", "blockstackd_url", ")", "if", "json_is_error", "(", "resp", ")", ":", "self", ".", "_reply_json", "(", "{", "'error'", ":", "resp", "[", "'error'", "]", "}", ",", "status_code", "=", "404", ")", "return", "self", ".", "_reply_json", "(", "resp", "[", "'profile'", "]", ")", "return" ]
Get a user profile. Reply the profile on success Return 404 on failure to load
[ "Get", "a", "user", "profile", ".", "Reply", "the", "profile", "on", "success", "Return", "404", "on", "failure", "to", "load" ]
python
train
37.470588
wummel/patool
patoolib/programs/xz.py
https://github.com/wummel/patool/blob/d7e64d9fd60faaa4b3f824bd97c43ce59b185c40/patoolib/programs/xz.py#L24-L31
def list_xz (archive, compression, cmd, verbosity, interactive): """List a XZ archive.""" cmdlist = [cmd] cmdlist.append('-l') if verbosity > 1: cmdlist.append('-v') cmdlist.append(archive) return cmdlist
[ "def", "list_xz", "(", "archive", ",", "compression", ",", "cmd", ",", "verbosity", ",", "interactive", ")", ":", "cmdlist", "=", "[", "cmd", "]", "cmdlist", ".", "append", "(", "'-l'", ")", "if", "verbosity", ">", "1", ":", "cmdlist", ".", "append", "(", "'-v'", ")", "cmdlist", ".", "append", "(", "archive", ")", "return", "cmdlist" ]
List a XZ archive.
[ "List", "a", "XZ", "archive", "." ]
python
train
28.625
tensorflow/tensor2tensor
tensor2tensor/models/image_transformer_2d.py
https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/models/image_transformer_2d.py#L501-L519
def imagetransformer_base_10l_8h_big_uncond_dr03_dan_64_2d(): """big 1d model for unconditional generation on imagenet.""" hparams = image_transformer2d_base() hparams.unconditional = True hparams.hidden_size = 512 hparams.batch_size = 1 hparams.img_len = 64 hparams.num_heads = 8 hparams.filter_size = 2048 hparams.batch_size = 1 hparams.max_length = 3075 hparams.max_length = 14000 hparams.layer_preprocess_sequence = "none" hparams.layer_postprocess_sequence = "dan" hparams.layer_prepostprocess_dropout = 0.1 hparams.dec_attention_type = cia.AttentionType.LOCAL_2D hparams.query_shape = (16, 16) hparams.memory_flange = (8, 8) return hparams
[ "def", "imagetransformer_base_10l_8h_big_uncond_dr03_dan_64_2d", "(", ")", ":", "hparams", "=", "image_transformer2d_base", "(", ")", "hparams", ".", "unconditional", "=", "True", "hparams", ".", "hidden_size", "=", "512", "hparams", ".", "batch_size", "=", "1", "hparams", ".", "img_len", "=", "64", "hparams", ".", "num_heads", "=", "8", "hparams", ".", "filter_size", "=", "2048", "hparams", ".", "batch_size", "=", "1", "hparams", ".", "max_length", "=", "3075", "hparams", ".", "max_length", "=", "14000", "hparams", ".", "layer_preprocess_sequence", "=", "\"none\"", "hparams", ".", "layer_postprocess_sequence", "=", "\"dan\"", "hparams", ".", "layer_prepostprocess_dropout", "=", "0.1", "hparams", ".", "dec_attention_type", "=", "cia", ".", "AttentionType", ".", "LOCAL_2D", "hparams", ".", "query_shape", "=", "(", "16", ",", "16", ")", "hparams", ".", "memory_flange", "=", "(", "8", ",", "8", ")", "return", "hparams" ]
big 1d model for unconditional generation on imagenet.
[ "big", "1d", "model", "for", "unconditional", "generation", "on", "imagenet", "." ]
python
train
34.894737
pypa/setuptools
setuptools/dep_util.py
https://github.com/pypa/setuptools/blob/83c667e0b2a98193851c07115d1af65011ed0fb6/setuptools/dep_util.py#L6-L23
def newer_pairwise_group(sources_groups, targets): """Walk both arguments in parallel, testing if each source group is newer than its corresponding target. Returns a pair of lists (sources_groups, targets) where sources is newer than target, according to the semantics of 'newer_group()'. """ if len(sources_groups) != len(targets): raise ValueError("'sources_group' and 'targets' must be the same length") # build a pair of lists (sources_groups, targets) where source is newer n_sources = [] n_targets = [] for i in range(len(sources_groups)): if newer_group(sources_groups[i], targets[i]): n_sources.append(sources_groups[i]) n_targets.append(targets[i]) return n_sources, n_targets
[ "def", "newer_pairwise_group", "(", "sources_groups", ",", "targets", ")", ":", "if", "len", "(", "sources_groups", ")", "!=", "len", "(", "targets", ")", ":", "raise", "ValueError", "(", "\"'sources_group' and 'targets' must be the same length\"", ")", "# build a pair of lists (sources_groups, targets) where source is newer", "n_sources", "=", "[", "]", "n_targets", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "sources_groups", ")", ")", ":", "if", "newer_group", "(", "sources_groups", "[", "i", "]", ",", "targets", "[", "i", "]", ")", ":", "n_sources", ".", "append", "(", "sources_groups", "[", "i", "]", ")", "n_targets", ".", "append", "(", "targets", "[", "i", "]", ")", "return", "n_sources", ",", "n_targets" ]
Walk both arguments in parallel, testing if each source group is newer than its corresponding target. Returns a pair of lists (sources_groups, targets) where sources is newer than target, according to the semantics of 'newer_group()'.
[ "Walk", "both", "arguments", "in", "parallel", "testing", "if", "each", "source", "group", "is", "newer", "than", "its", "corresponding", "target", ".", "Returns", "a", "pair", "of", "lists", "(", "sources_groups", "targets", ")", "where", "sources", "is", "newer", "than", "target", "according", "to", "the", "semantics", "of", "newer_group", "()", "." ]
python
train
41.888889
ninuxorg/nodeshot
nodeshot/community/notifications/views.py
https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/community/notifications/views.py#L48-L60
def get_unread(self, request, notifications, mark_as_read): """ return unread notifications and mark as read (unless read=false param is passed) """ notifications = notifications.filter(is_read=False) serializer = UnreadNotificationSerializer(list(notifications), # evaluate queryset many=True, context=self.get_serializer_context()) # retrieve unread notifications as read (default behaviour) if mark_as_read: notifications.update(is_read=True) return Response(serializer.data)
[ "def", "get_unread", "(", "self", ",", "request", ",", "notifications", ",", "mark_as_read", ")", ":", "notifications", "=", "notifications", ".", "filter", "(", "is_read", "=", "False", ")", "serializer", "=", "UnreadNotificationSerializer", "(", "list", "(", "notifications", ")", ",", "# evaluate queryset", "many", "=", "True", ",", "context", "=", "self", ".", "get_serializer_context", "(", ")", ")", "# retrieve unread notifications as read (default behaviour)", "if", "mark_as_read", ":", "notifications", ".", "update", "(", "is_read", "=", "True", ")", "return", "Response", "(", "serializer", ".", "data", ")" ]
return unread notifications and mark as read (unless read=false param is passed)
[ "return", "unread", "notifications", "and", "mark", "as", "read", "(", "unless", "read", "=", "false", "param", "is", "passed", ")" ]
python
train
50.076923
elastic/elasticsearch-py
elasticsearch/client/xpack/ilm.py
https://github.com/elastic/elasticsearch-py/blob/2aab285c8f506f3863cbdaba3c90a685c510ba00/elasticsearch/client/xpack/ilm.py#L6-L14
def delete_lifecycle(self, policy=None, params=None): """ `<https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-delete-lifecycle.html>`_ :arg policy: The name of the index lifecycle policy """ return self.transport.perform_request( "DELETE", _make_path("_ilm", "policy", policy), params=params )
[ "def", "delete_lifecycle", "(", "self", ",", "policy", "=", "None", ",", "params", "=", "None", ")", ":", "return", "self", ".", "transport", ".", "perform_request", "(", "\"DELETE\"", ",", "_make_path", "(", "\"_ilm\"", ",", "\"policy\"", ",", "policy", ")", ",", "params", "=", "params", ")" ]
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-delete-lifecycle.html>`_ :arg policy: The name of the index lifecycle policy
[ "<https", ":", "//", "www", ".", "elastic", ".", "co", "/", "guide", "/", "en", "/", "elasticsearch", "/", "reference", "/", "current", "/", "ilm", "-", "delete", "-", "lifecycle", ".", "html", ">", "_" ]
python
train
40.444444
rstoneback/pysatMagVect
pysatMagVect/_core.py
https://github.com/rstoneback/pysatMagVect/blob/3fdc87ffbe05be58123f80f880d1237c2f34c7be/pysatMagVect/_core.py#L24-L48
def geocentric_to_ecef(latitude, longitude, altitude): """Convert geocentric coordinates into ECEF Parameters ---------- latitude : float or array_like Geocentric latitude (degrees) longitude : float or array_like Geocentric longitude (degrees) altitude : float or array_like Height (km) above presumed spherical Earth with radius 6371 km. Returns ------- x, y, z numpy arrays of x, y, z locations in km """ r = earth_geo_radius + altitude x = r * np.cos(np.deg2rad(latitude)) * np.cos(np.deg2rad(longitude)) y = r * np.cos(np.deg2rad(latitude)) * np.sin(np.deg2rad(longitude)) z = r * np.sin(np.deg2rad(latitude)) return x, y, z
[ "def", "geocentric_to_ecef", "(", "latitude", ",", "longitude", ",", "altitude", ")", ":", "r", "=", "earth_geo_radius", "+", "altitude", "x", "=", "r", "*", "np", ".", "cos", "(", "np", ".", "deg2rad", "(", "latitude", ")", ")", "*", "np", ".", "cos", "(", "np", ".", "deg2rad", "(", "longitude", ")", ")", "y", "=", "r", "*", "np", ".", "cos", "(", "np", ".", "deg2rad", "(", "latitude", ")", ")", "*", "np", ".", "sin", "(", "np", ".", "deg2rad", "(", "longitude", ")", ")", "z", "=", "r", "*", "np", ".", "sin", "(", "np", ".", "deg2rad", "(", "latitude", ")", ")", "return", "x", ",", "y", ",", "z" ]
Convert geocentric coordinates into ECEF Parameters ---------- latitude : float or array_like Geocentric latitude (degrees) longitude : float or array_like Geocentric longitude (degrees) altitude : float or array_like Height (km) above presumed spherical Earth with radius 6371 km. Returns ------- x, y, z numpy arrays of x, y, z locations in km
[ "Convert", "geocentric", "coordinates", "into", "ECEF", "Parameters", "----------", "latitude", ":", "float", "or", "array_like", "Geocentric", "latitude", "(", "degrees", ")", "longitude", ":", "float", "or", "array_like", "Geocentric", "longitude", "(", "degrees", ")", "altitude", ":", "float", "or", "array_like", "Height", "(", "km", ")", "above", "presumed", "spherical", "Earth", "with", "radius", "6371", "km", ".", "Returns", "-------", "x", "y", "z", "numpy", "arrays", "of", "x", "y", "z", "locations", "in", "km" ]
python
train
28.88
saltstack/salt
salt/modules/layman.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/layman.py#L68-L98
def delete(overlay): ''' Remove the given overlay from the your locally installed overlays. Specify 'ALL' to remove all overlays. Return a list of the overlays(s) that were removed: CLI Example: .. code-block:: bash salt '*' layman.delete <overlay name> ''' ret = list() old_overlays = list_local() cmd = 'layman --quietness=0 --delete {0}'.format(overlay) delete_attempt = __salt__['cmd.run_all'](cmd, python_shell=False) if delete_attempt['retcode'] != 0: raise salt.exceptions.CommandExecutionError(delete_attempt['stdout']) new_overlays = list_local() # If we now have no overlays added, We need to ensure that the make.conf # does not source layman's make.conf, as it will break emerge if not new_overlays: srcline = 'source /var/lib/layman/make.conf' makeconf = _get_makeconf() if __salt__['file.contains'](makeconf, 'layman'): __salt__['file.sed'](makeconf, srcline, '') ret = [overlay for overlay in old_overlays if overlay not in new_overlays] return ret
[ "def", "delete", "(", "overlay", ")", ":", "ret", "=", "list", "(", ")", "old_overlays", "=", "list_local", "(", ")", "cmd", "=", "'layman --quietness=0 --delete {0}'", ".", "format", "(", "overlay", ")", "delete_attempt", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "if", "delete_attempt", "[", "'retcode'", "]", "!=", "0", ":", "raise", "salt", ".", "exceptions", ".", "CommandExecutionError", "(", "delete_attempt", "[", "'stdout'", "]", ")", "new_overlays", "=", "list_local", "(", ")", "# If we now have no overlays added, We need to ensure that the make.conf", "# does not source layman's make.conf, as it will break emerge", "if", "not", "new_overlays", ":", "srcline", "=", "'source /var/lib/layman/make.conf'", "makeconf", "=", "_get_makeconf", "(", ")", "if", "__salt__", "[", "'file.contains'", "]", "(", "makeconf", ",", "'layman'", ")", ":", "__salt__", "[", "'file.sed'", "]", "(", "makeconf", ",", "srcline", ",", "''", ")", "ret", "=", "[", "overlay", "for", "overlay", "in", "old_overlays", "if", "overlay", "not", "in", "new_overlays", "]", "return", "ret" ]
Remove the given overlay from the your locally installed overlays. Specify 'ALL' to remove all overlays. Return a list of the overlays(s) that were removed: CLI Example: .. code-block:: bash salt '*' layman.delete <overlay name>
[ "Remove", "the", "given", "overlay", "from", "the", "your", "locally", "installed", "overlays", ".", "Specify", "ALL", "to", "remove", "all", "overlays", "." ]
python
train
34.290323
andrenarchy/krypy
krypy/utils.py
https://github.com/andrenarchy/krypy/blob/4883ec9a61d64ea56489e15c35cc40f0633ab2f1/krypy/utils.py#L98-L111
def shape_vecs(*args): '''Reshape all ndarrays with ``shape==(n,)`` to ``shape==(n,1)``. Recognizes ndarrays and ignores all others.''' ret_args = [] flat_vecs = True for arg in args: if type(arg) is numpy.ndarray: if len(arg.shape) == 1: arg = shape_vec(arg) else: flat_vecs = False ret_args.append(arg) return flat_vecs, ret_args
[ "def", "shape_vecs", "(", "*", "args", ")", ":", "ret_args", "=", "[", "]", "flat_vecs", "=", "True", "for", "arg", "in", "args", ":", "if", "type", "(", "arg", ")", "is", "numpy", ".", "ndarray", ":", "if", "len", "(", "arg", ".", "shape", ")", "==", "1", ":", "arg", "=", "shape_vec", "(", "arg", ")", "else", ":", "flat_vecs", "=", "False", "ret_args", ".", "append", "(", "arg", ")", "return", "flat_vecs", ",", "ret_args" ]
Reshape all ndarrays with ``shape==(n,)`` to ``shape==(n,1)``. Recognizes ndarrays and ignores all others.
[ "Reshape", "all", "ndarrays", "with", "shape", "==", "(", "n", ")", "to", "shape", "==", "(", "n", "1", ")", "." ]
python
train
29.642857
sdispater/pendulum
pendulum/datetime.py
https://github.com/sdispater/pendulum/blob/94d28b0d3cb524ae02361bd1ed7ea03e2e655e4e/pendulum/datetime.py#L1187-L1212
def _first_of_month(self, day_of_week): """ Modify to the first occurrence of a given day of the week in the current month. If no day_of_week is provided, modify to the first day of the month. Use the supplied consts to indicate the desired day_of_week, ex. DateTime.MONDAY. :type day_of_week: int :rtype: DateTime """ dt = self.start_of("day") if day_of_week is None: return dt.set(day=1) month = calendar.monthcalendar(dt.year, dt.month) calendar_day = (day_of_week - 1) % 7 if month[0][calendar_day] > 0: day_of_month = month[0][calendar_day] else: day_of_month = month[1][calendar_day] return dt.set(day=day_of_month)
[ "def", "_first_of_month", "(", "self", ",", "day_of_week", ")", ":", "dt", "=", "self", ".", "start_of", "(", "\"day\"", ")", "if", "day_of_week", "is", "None", ":", "return", "dt", ".", "set", "(", "day", "=", "1", ")", "month", "=", "calendar", ".", "monthcalendar", "(", "dt", ".", "year", ",", "dt", ".", "month", ")", "calendar_day", "=", "(", "day_of_week", "-", "1", ")", "%", "7", "if", "month", "[", "0", "]", "[", "calendar_day", "]", ">", "0", ":", "day_of_month", "=", "month", "[", "0", "]", "[", "calendar_day", "]", "else", ":", "day_of_month", "=", "month", "[", "1", "]", "[", "calendar_day", "]", "return", "dt", ".", "set", "(", "day", "=", "day_of_month", ")" ]
Modify to the first occurrence of a given day of the week in the current month. If no day_of_week is provided, modify to the first day of the month. Use the supplied consts to indicate the desired day_of_week, ex. DateTime.MONDAY. :type day_of_week: int :rtype: DateTime
[ "Modify", "to", "the", "first", "occurrence", "of", "a", "given", "day", "of", "the", "week", "in", "the", "current", "month", ".", "If", "no", "day_of_week", "is", "provided", "modify", "to", "the", "first", "day", "of", "the", "month", ".", "Use", "the", "supplied", "consts", "to", "indicate", "the", "desired", "day_of_week", "ex", ".", "DateTime", ".", "MONDAY", "." ]
python
train
29.192308
bfrog/whizzer
whizzer/server.py
https://github.com/bfrog/whizzer/blob/a1e43084b3ac8c1f3fb4ada081777cdbf791fd77/whizzer/server.py#L48-L55
def closed(self, reason): """Callback performed when the transport is closed.""" self.server.remove_connection(self) self.protocol.connection_lost(reason) if not isinstance(reason, ConnectionClosed): logger.warn("connection closed, reason: %s" % str(reason)) else: logger.info("connection closed")
[ "def", "closed", "(", "self", ",", "reason", ")", ":", "self", ".", "server", ".", "remove_connection", "(", "self", ")", "self", ".", "protocol", ".", "connection_lost", "(", "reason", ")", "if", "not", "isinstance", "(", "reason", ",", "ConnectionClosed", ")", ":", "logger", ".", "warn", "(", "\"connection closed, reason: %s\"", "%", "str", "(", "reason", ")", ")", "else", ":", "logger", ".", "info", "(", "\"connection closed\"", ")" ]
Callback performed when the transport is closed.
[ "Callback", "performed", "when", "the", "transport", "is", "closed", "." ]
python
train
44.25