repo
stringlengths
7
55
path
stringlengths
4
223
url
stringlengths
87
315
code
stringlengths
75
104k
code_tokens
list
docstring
stringlengths
1
46.9k
docstring_tokens
list
language
stringclasses
1 value
partition
stringclasses
3 values
avg_line_len
float64
7.91
980
wummel/linkchecker
linkcheck/checker/itmsservicesurl.py
https://github.com/wummel/linkchecker/blob/c2ce810c3fb00b895a841a7be6b2e78c64e7b042/linkcheck/checker/itmsservicesurl.py#L28-L32
def check_syntax(self): """Only logs that this URL is unknown.""" super(ItmsServicesUrl, self).check_syntax() if u"url=" not in self.urlparts[3]: self.set_result(_("Missing required url parameter"), valid=False)
[ "def", "check_syntax", "(", "self", ")", ":", "super", "(", "ItmsServicesUrl", ",", "self", ")", ".", "check_syntax", "(", ")", "if", "u\"url=\"", "not", "in", "self", ".", "urlparts", "[", "3", "]", ":", "self", ".", "set_result", "(", "_", "(", "\"Missing required url parameter\"", ")", ",", "valid", "=", "False", ")" ]
Only logs that this URL is unknown.
[ "Only", "logs", "that", "this", "URL", "is", "unknown", "." ]
python
train
48.6
arne-cl/discoursegraphs
src/discoursegraphs/discoursegraph.py
https://github.com/arne-cl/discoursegraphs/blob/842f0068a3190be2c75905754521b176b25a54fb/src/discoursegraphs/discoursegraph.py#L905-L920
def get_node_annotation_layers(docgraph): """ WARNING: this is higly inefficient! Fix this via Issue #36. Returns ------- all_layers : set or dict the set of all annotation layers used for annotating nodes in the given graph """ all_layers = set() for node_id, node_attribs in docgraph.nodes_iter(data=True): for layer in node_attribs['layers']: all_layers.add(layer) return all_layers
[ "def", "get_node_annotation_layers", "(", "docgraph", ")", ":", "all_layers", "=", "set", "(", ")", "for", "node_id", ",", "node_attribs", "in", "docgraph", ".", "nodes_iter", "(", "data", "=", "True", ")", ":", "for", "layer", "in", "node_attribs", "[", "'layers'", "]", ":", "all_layers", ".", "add", "(", "layer", ")", "return", "all_layers" ]
WARNING: this is higly inefficient! Fix this via Issue #36. Returns ------- all_layers : set or dict the set of all annotation layers used for annotating nodes in the given graph
[ "WARNING", ":", "this", "is", "higly", "inefficient!", "Fix", "this", "via", "Issue", "#36", "." ]
python
train
27.9375
frictionlessdata/tableschema-sql-py
tableschema_sql/storage.py
https://github.com/frictionlessdata/tableschema-sql-py/blob/81ca4b564f6dac5fe3adc6553b353826190df6f8/tableschema_sql/storage.py#L162-L178
def iter(self, bucket): """https://github.com/frictionlessdata/tableschema-sql-py#storage """ # Get table and fallbacks table = self.__get_table(bucket) schema = tableschema.Schema(self.describe(bucket)) # Open and close transaction with self.__connection.begin(): # Streaming could be not working for some backends: # http://docs.sqlalchemy.org/en/latest/core/connections.html select = table.select().execution_options(stream_results=True) result = select.execute() for row in result: row = self.__mapper.restore_row(row, schema=schema) yield row
[ "def", "iter", "(", "self", ",", "bucket", ")", ":", "# Get table and fallbacks", "table", "=", "self", ".", "__get_table", "(", "bucket", ")", "schema", "=", "tableschema", ".", "Schema", "(", "self", ".", "describe", "(", "bucket", ")", ")", "# Open and close transaction", "with", "self", ".", "__connection", ".", "begin", "(", ")", ":", "# Streaming could be not working for some backends:", "# http://docs.sqlalchemy.org/en/latest/core/connections.html", "select", "=", "table", ".", "select", "(", ")", ".", "execution_options", "(", "stream_results", "=", "True", ")", "result", "=", "select", ".", "execute", "(", ")", "for", "row", "in", "result", ":", "row", "=", "self", ".", "__mapper", ".", "restore_row", "(", "row", ",", "schema", "=", "schema", ")", "yield", "row" ]
https://github.com/frictionlessdata/tableschema-sql-py#storage
[ "https", ":", "//", "github", ".", "com", "/", "frictionlessdata", "/", "tableschema", "-", "sql", "-", "py#storage" ]
python
train
40.058824
Nic30/hwt
hwt/synthesizer/rtlLevel/optimalizator.py
https://github.com/Nic30/hwt/blob/8cbb399e326da3b22c233b98188a9d08dec057e6/hwt/synthesizer/rtlLevel/optimalizator.py#L14-L63
def removeUnconnectedSignals(netlist): """ If signal is not driving anything remove it """ toDelete = set() toSearch = netlist.signals while toSearch: _toSearch = set() for sig in toSearch: if not sig.endpoints: try: if sig._interface is not None: # skip interfaces before we want to check them, # they should not be optimized out from design continue except AttributeError: pass for e in sig.drivers: # drivers of this signal are useless rm them if isinstance(e, Operator): inputs = e.operands if e.result is sig: e.result = None else: inputs = e._inputs netlist.statements.discard(e) for op in inputs: if not isinstance(op, Value): try: op.endpoints.remove(e) except KeyError: # this operator has 2x+ same operand continue _toSearch.add(op) toDelete.add(sig) if toDelete: for sig in toDelete: if sig.ctx == netlist: netlist.signals.remove(sig) _toSearch.discard(sig) toDelete = set() toSearch = _toSearch
[ "def", "removeUnconnectedSignals", "(", "netlist", ")", ":", "toDelete", "=", "set", "(", ")", "toSearch", "=", "netlist", ".", "signals", "while", "toSearch", ":", "_toSearch", "=", "set", "(", ")", "for", "sig", "in", "toSearch", ":", "if", "not", "sig", ".", "endpoints", ":", "try", ":", "if", "sig", ".", "_interface", "is", "not", "None", ":", "# skip interfaces before we want to check them,", "# they should not be optimized out from design", "continue", "except", "AttributeError", ":", "pass", "for", "e", "in", "sig", ".", "drivers", ":", "# drivers of this signal are useless rm them", "if", "isinstance", "(", "e", ",", "Operator", ")", ":", "inputs", "=", "e", ".", "operands", "if", "e", ".", "result", "is", "sig", ":", "e", ".", "result", "=", "None", "else", ":", "inputs", "=", "e", ".", "_inputs", "netlist", ".", "statements", ".", "discard", "(", "e", ")", "for", "op", "in", "inputs", ":", "if", "not", "isinstance", "(", "op", ",", "Value", ")", ":", "try", ":", "op", ".", "endpoints", ".", "remove", "(", "e", ")", "except", "KeyError", ":", "# this operator has 2x+ same operand", "continue", "_toSearch", ".", "add", "(", "op", ")", "toDelete", ".", "add", "(", "sig", ")", "if", "toDelete", ":", "for", "sig", "in", "toDelete", ":", "if", "sig", ".", "ctx", "==", "netlist", ":", "netlist", ".", "signals", ".", "remove", "(", "sig", ")", "_toSearch", ".", "discard", "(", "sig", ")", "toDelete", "=", "set", "(", ")", "toSearch", "=", "_toSearch" ]
If signal is not driving anything remove it
[ "If", "signal", "is", "not", "driving", "anything", "remove", "it" ]
python
test
31.86
pyGrowler/Growler
growler/http/response.py
https://github.com/pyGrowler/Growler/blob/90c923ff204f28b86a01d741224987a22f69540f/growler/http/response.py#L228-L247
def send_file(self, filename, status=200): """ Reads in the file 'filename' and sends bytes to client Parameters ---------- filename : str Filename of the file to read status : int, optional The HTTP status code, defaults to 200 (OK) """ if isinstance(filename, Path) and sys.version_info >= (3, 5): self.message = filename.read_bytes() else: with io.FileIO(str(filename)) as f: self.message = f.read() self.status_code = status self.send_headers() self.write() self.write_eof()
[ "def", "send_file", "(", "self", ",", "filename", ",", "status", "=", "200", ")", ":", "if", "isinstance", "(", "filename", ",", "Path", ")", "and", "sys", ".", "version_info", ">=", "(", "3", ",", "5", ")", ":", "self", ".", "message", "=", "filename", ".", "read_bytes", "(", ")", "else", ":", "with", "io", ".", "FileIO", "(", "str", "(", "filename", ")", ")", "as", "f", ":", "self", ".", "message", "=", "f", ".", "read", "(", ")", "self", ".", "status_code", "=", "status", "self", ".", "send_headers", "(", ")", "self", ".", "write", "(", ")", "self", ".", "write_eof", "(", ")" ]
Reads in the file 'filename' and sends bytes to client Parameters ---------- filename : str Filename of the file to read status : int, optional The HTTP status code, defaults to 200 (OK)
[ "Reads", "in", "the", "file", "filename", "and", "sends", "bytes", "to", "client" ]
python
train
31.4
paylogic/pip-accel
pip_accel/config.py
https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L267-L270
def python_executable(self): """The absolute pathname of the Python executable (a string).""" return self.get(property_name='python_executable', default=sys.executable or os.path.join(self.install_prefix, 'bin', 'python'))
[ "def", "python_executable", "(", "self", ")", ":", "return", "self", ".", "get", "(", "property_name", "=", "'python_executable'", ",", "default", "=", "sys", ".", "executable", "or", "os", ".", "path", ".", "join", "(", "self", ".", "install_prefix", ",", "'bin'", ",", "'python'", ")", ")" ]
The absolute pathname of the Python executable (a string).
[ "The", "absolute", "pathname", "of", "the", "Python", "executable", "(", "a", "string", ")", "." ]
python
train
64.75
ZEDGR/pychal
challonge/api.py
https://github.com/ZEDGR/pychal/blob/3600fa9e0557a2a14eb1ad0c0711d28dad3693d7/challonge/api.py#L94-L97
def fetch_and_parse(method, uri, params_prefix=None, **params): """Fetch the given uri and return python dictionary with parsed data-types.""" response = fetch(method, uri, params_prefix, **params) return _parse(json.loads(response.text))
[ "def", "fetch_and_parse", "(", "method", ",", "uri", ",", "params_prefix", "=", "None", ",", "*", "*", "params", ")", ":", "response", "=", "fetch", "(", "method", ",", "uri", ",", "params_prefix", ",", "*", "*", "params", ")", "return", "_parse", "(", "json", ".", "loads", "(", "response", ".", "text", ")", ")" ]
Fetch the given uri and return python dictionary with parsed data-types.
[ "Fetch", "the", "given", "uri", "and", "return", "python", "dictionary", "with", "parsed", "data", "-", "types", "." ]
python
train
61.75
python-diamond/Diamond
src/collectors/cpu/cpu.py
https://github.com/python-diamond/Diamond/blob/0f3eb04327d6d3ed5e53a9967d6c9d2c09714a47/src/collectors/cpu/cpu.py#L50-L62
def get_default_config(self): """ Returns the default collector settings """ config = super(CPUCollector, self).get_default_config() config.update({ 'path': 'cpu', 'percore': 'True', 'xenfix': None, 'simple': 'False', 'normalize': 'False', }) return config
[ "def", "get_default_config", "(", "self", ")", ":", "config", "=", "super", "(", "CPUCollector", ",", "self", ")", ".", "get_default_config", "(", ")", "config", ".", "update", "(", "{", "'path'", ":", "'cpu'", ",", "'percore'", ":", "'True'", ",", "'xenfix'", ":", "None", ",", "'simple'", ":", "'False'", ",", "'normalize'", ":", "'False'", ",", "}", ")", "return", "config" ]
Returns the default collector settings
[ "Returns", "the", "default", "collector", "settings" ]
python
train
28.384615
pri22296/beautifultable
beautifultable/rows.py
https://github.com/pri22296/beautifultable/blob/c9638f73dff4bb1f341c9ee783e4e47f26efba0b/beautifultable/rows.py#L9-L63
def _get_row_within_width(self, row): """Process a row so that it is clamped by column_width. Parameters ---------- row : array_like A single row. Returns ------- list of list: List representation of the `row` after it has been processed according to width exceed policy. """ table = self._table lpw, rpw = table.left_padding_widths, table.right_padding_widths wep = table.width_exceed_policy list_of_rows = [] if (wep is WidthExceedPolicy.WEP_STRIP or wep is WidthExceedPolicy.WEP_ELLIPSIS): # Let's strip the row delimiter = '' if wep is WidthExceedPolicy.WEP_STRIP else '...' row_item_list = [] for index, row_item in enumerate(row): left_pad = table._column_pad * lpw[index] right_pad = table._column_pad * rpw[index] clmp_str = (left_pad + self._clamp_string(row_item, index, delimiter) + right_pad) row_item_list.append(clmp_str) list_of_rows.append(row_item_list) elif wep is WidthExceedPolicy.WEP_WRAP: # Let's wrap the row string_partition = [] for index, row_item in enumerate(row): width = table.column_widths[index] - lpw[index] - rpw[index] string_partition.append(textwrap(row_item, width)) for row_items in zip_longest(*string_partition, fillvalue=''): row_item_list = [] for index, row_item in enumerate(row_items): left_pad = table._column_pad * lpw[index] right_pad = table._column_pad * rpw[index] row_item_list.append(left_pad + row_item + right_pad) list_of_rows.append(row_item_list) if len(list_of_rows) == 0: return [[''] * table.column_count] else: return list_of_rows
[ "def", "_get_row_within_width", "(", "self", ",", "row", ")", ":", "table", "=", "self", ".", "_table", "lpw", ",", "rpw", "=", "table", ".", "left_padding_widths", ",", "table", ".", "right_padding_widths", "wep", "=", "table", ".", "width_exceed_policy", "list_of_rows", "=", "[", "]", "if", "(", "wep", "is", "WidthExceedPolicy", ".", "WEP_STRIP", "or", "wep", "is", "WidthExceedPolicy", ".", "WEP_ELLIPSIS", ")", ":", "# Let's strip the row", "delimiter", "=", "''", "if", "wep", "is", "WidthExceedPolicy", ".", "WEP_STRIP", "else", "'...'", "row_item_list", "=", "[", "]", "for", "index", ",", "row_item", "in", "enumerate", "(", "row", ")", ":", "left_pad", "=", "table", ".", "_column_pad", "*", "lpw", "[", "index", "]", "right_pad", "=", "table", ".", "_column_pad", "*", "rpw", "[", "index", "]", "clmp_str", "=", "(", "left_pad", "+", "self", ".", "_clamp_string", "(", "row_item", ",", "index", ",", "delimiter", ")", "+", "right_pad", ")", "row_item_list", ".", "append", "(", "clmp_str", ")", "list_of_rows", ".", "append", "(", "row_item_list", ")", "elif", "wep", "is", "WidthExceedPolicy", ".", "WEP_WRAP", ":", "# Let's wrap the row", "string_partition", "=", "[", "]", "for", "index", ",", "row_item", "in", "enumerate", "(", "row", ")", ":", "width", "=", "table", ".", "column_widths", "[", "index", "]", "-", "lpw", "[", "index", "]", "-", "rpw", "[", "index", "]", "string_partition", ".", "append", "(", "textwrap", "(", "row_item", ",", "width", ")", ")", "for", "row_items", "in", "zip_longest", "(", "*", "string_partition", ",", "fillvalue", "=", "''", ")", ":", "row_item_list", "=", "[", "]", "for", "index", ",", "row_item", "in", "enumerate", "(", "row_items", ")", ":", "left_pad", "=", "table", ".", "_column_pad", "*", "lpw", "[", "index", "]", "right_pad", "=", "table", ".", "_column_pad", "*", "rpw", "[", "index", "]", "row_item_list", ".", "append", "(", "left_pad", "+", "row_item", "+", "right_pad", ")", "list_of_rows", ".", "append", "(", "row_item_list", ")", "if", "len", "(", "list_of_rows", ")", "==", "0", ":", "return", "[", "[", "''", "]", "*", "table", ".", "column_count", "]", "else", ":", "return", "list_of_rows" ]
Process a row so that it is clamped by column_width. Parameters ---------- row : array_like A single row. Returns ------- list of list: List representation of the `row` after it has been processed according to width exceed policy.
[ "Process", "a", "row", "so", "that", "it", "is", "clamped", "by", "column_width", "." ]
python
train
36.745455
saltstack/salt
salt/utils/openstack/neutron.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/utils/openstack/neutron.py#L422-L428
def update_subnet(self, subnet, name=None): ''' Updates a subnet ''' subnet_id = self._find_subnet_id(subnet) return self.network_conn.update_subnet( subnet=subnet_id, body={'subnet': {'name': name}})
[ "def", "update_subnet", "(", "self", ",", "subnet", ",", "name", "=", "None", ")", ":", "subnet_id", "=", "self", ".", "_find_subnet_id", "(", "subnet", ")", "return", "self", ".", "network_conn", ".", "update_subnet", "(", "subnet", "=", "subnet_id", ",", "body", "=", "{", "'subnet'", ":", "{", "'name'", ":", "name", "}", "}", ")" ]
Updates a subnet
[ "Updates", "a", "subnet" ]
python
train
35.142857
pymacaron/pymacaron
pymacaron/api.py
https://github.com/pymacaron/pymacaron/blob/af244f203f8216108b39d374d46bf8e1813f13d5/pymacaron/api.py#L27-L35
def do_version(): """Return version details of the running server api""" v = ApiPool.ping.model.Version( name=ApiPool().current_server_name, version=ApiPool().current_server_api.get_version(), container=get_container_version(), ) log.info("/version: " + pprint.pformat(v)) return v
[ "def", "do_version", "(", ")", ":", "v", "=", "ApiPool", ".", "ping", ".", "model", ".", "Version", "(", "name", "=", "ApiPool", "(", ")", ".", "current_server_name", ",", "version", "=", "ApiPool", "(", ")", ".", "current_server_api", ".", "get_version", "(", ")", ",", "container", "=", "get_container_version", "(", ")", ",", ")", "log", ".", "info", "(", "\"/version: \"", "+", "pprint", ".", "pformat", "(", "v", ")", ")", "return", "v" ]
Return version details of the running server api
[ "Return", "version", "details", "of", "the", "running", "server", "api" ]
python
train
35.222222
huntrar/scrape
scrape/utils.py
https://github.com/huntrar/scrape/blob/bf877f6da5df3ed0f2bea60a95acf7df63c88002/scrape/utils.py#L309-L316
def parse_html(infile, xpath): """Filter HTML using XPath.""" if not isinstance(infile, lh.HtmlElement): infile = lh.fromstring(infile) infile = infile.xpath(xpath) if not infile: raise ValueError('XPath {0} returned no results.'.format(xpath)) return infile
[ "def", "parse_html", "(", "infile", ",", "xpath", ")", ":", "if", "not", "isinstance", "(", "infile", ",", "lh", ".", "HtmlElement", ")", ":", "infile", "=", "lh", ".", "fromstring", "(", "infile", ")", "infile", "=", "infile", ".", "xpath", "(", "xpath", ")", "if", "not", "infile", ":", "raise", "ValueError", "(", "'XPath {0} returned no results.'", ".", "format", "(", "xpath", ")", ")", "return", "infile" ]
Filter HTML using XPath.
[ "Filter", "HTML", "using", "XPath", "." ]
python
train
35.875
fermiPy/fermipy
fermipy/roi_model.py
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1031-L1046
def create_from_xmlfile(cls, xmlfile, extdir=None): """Create a Source object from an XML file. Parameters ---------- xmlfile : str Path to XML file. extdir : str Path to the extended source archive. """ root = ElementTree.ElementTree(file=xmlfile).getroot() srcs = root.findall('source') if len(srcs) == 0: raise Exception('No sources found.') return cls.create_from_xml(srcs[0], extdir=extdir)
[ "def", "create_from_xmlfile", "(", "cls", ",", "xmlfile", ",", "extdir", "=", "None", ")", ":", "root", "=", "ElementTree", ".", "ElementTree", "(", "file", "=", "xmlfile", ")", ".", "getroot", "(", ")", "srcs", "=", "root", ".", "findall", "(", "'source'", ")", "if", "len", "(", "srcs", ")", "==", "0", ":", "raise", "Exception", "(", "'No sources found.'", ")", "return", "cls", ".", "create_from_xml", "(", "srcs", "[", "0", "]", ",", "extdir", "=", "extdir", ")" ]
Create a Source object from an XML file. Parameters ---------- xmlfile : str Path to XML file. extdir : str Path to the extended source archive.
[ "Create", "a", "Source", "object", "from", "an", "XML", "file", "." ]
python
train
31.125
10gen/mongo-orchestration
mongo_orchestration/sharded_clusters.py
https://github.com/10gen/mongo-orchestration/blob/81fd2224205922ea2178b08190b53a33aec47261/mongo_orchestration/sharded_clusters.py#L540-L545
def router_del(self, cluster_id, router_id): """remove router from the ShardedCluster""" cluster = self._storage[cluster_id] result = cluster.router_remove(router_id) self._storage[cluster_id] = cluster return result
[ "def", "router_del", "(", "self", ",", "cluster_id", ",", "router_id", ")", ":", "cluster", "=", "self", ".", "_storage", "[", "cluster_id", "]", "result", "=", "cluster", ".", "router_remove", "(", "router_id", ")", "self", ".", "_storage", "[", "cluster_id", "]", "=", "cluster", "return", "result" ]
remove router from the ShardedCluster
[ "remove", "router", "from", "the", "ShardedCluster" ]
python
train
41.833333
tensorflow/tensor2tensor
tensor2tensor/utils/t2t_model.py
https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/utils/t2t_model.py#L63-L87
def _flatten_dict(original_dict): """Flatten dict of dicts into a single dict with appropriate prefixes. Handles only 2 levels of nesting in the original dict. Args: original_dict: Dict which may contain one or more dicts. Returns: flat_dict: Dict without any nesting. Any dicts in the original dict have their keys as prefixes in the new dict. Raises: ValueError if the original dict has more than two levels of nesting. """ flat_dict = {} for key, value in original_dict.items(): if isinstance(value, dict): for name, tensor in value.items(): if isinstance(tensor, dict): raise ValueError("flatten_dict only handles 2 levels of nesting.") flat_key = "__" + key + "_" + name flat_dict[flat_key] = tensor else: flat_dict[key] = value return flat_dict
[ "def", "_flatten_dict", "(", "original_dict", ")", ":", "flat_dict", "=", "{", "}", "for", "key", ",", "value", "in", "original_dict", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "for", "name", ",", "tensor", "in", "value", ".", "items", "(", ")", ":", "if", "isinstance", "(", "tensor", ",", "dict", ")", ":", "raise", "ValueError", "(", "\"flatten_dict only handles 2 levels of nesting.\"", ")", "flat_key", "=", "\"__\"", "+", "key", "+", "\"_\"", "+", "name", "flat_dict", "[", "flat_key", "]", "=", "tensor", "else", ":", "flat_dict", "[", "key", "]", "=", "value", "return", "flat_dict" ]
Flatten dict of dicts into a single dict with appropriate prefixes. Handles only 2 levels of nesting in the original dict. Args: original_dict: Dict which may contain one or more dicts. Returns: flat_dict: Dict without any nesting. Any dicts in the original dict have their keys as prefixes in the new dict. Raises: ValueError if the original dict has more than two levels of nesting.
[ "Flatten", "dict", "of", "dicts", "into", "a", "single", "dict", "with", "appropriate", "prefixes", "." ]
python
train
32.76
mitsei/dlkit
dlkit/json_/repository/sessions.py
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/json_/repository/sessions.py#L4510-L4533
def assign_composition_to_repository(self, composition_id, repository_id): """Adds an existing ``Composition`` to a ``Repository``. arg: composition_id (osid.id.Id): the ``Id`` of the ``Composition`` arg: repository_id (osid.id.Id): the ``Id`` of the ``Repository`` raise: AlreadyExists - ``composition_id`` already assigned to ``repository_id`` raise: NotFound - ``composition_id`` or ``repository_id`` not found raise: NullArgument - ``composition_id`` or ``repository_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceBinAssignmentSession.assign_resource_to_bin mgr = self._get_provider_manager('REPOSITORY', local=True) lookup_session = mgr.get_repository_lookup_session(proxy=self._proxy) lookup_session.get_repository(repository_id) # to raise NotFound self._assign_object_to_catalog(composition_id, repository_id)
[ "def", "assign_composition_to_repository", "(", "self", ",", "composition_id", ",", "repository_id", ")", ":", "# Implemented from template for", "# osid.resource.ResourceBinAssignmentSession.assign_resource_to_bin", "mgr", "=", "self", ".", "_get_provider_manager", "(", "'REPOSITORY'", ",", "local", "=", "True", ")", "lookup_session", "=", "mgr", ".", "get_repository_lookup_session", "(", "proxy", "=", "self", ".", "_proxy", ")", "lookup_session", ".", "get_repository", "(", "repository_id", ")", "# to raise NotFound", "self", ".", "_assign_object_to_catalog", "(", "composition_id", ",", "repository_id", ")" ]
Adds an existing ``Composition`` to a ``Repository``. arg: composition_id (osid.id.Id): the ``Id`` of the ``Composition`` arg: repository_id (osid.id.Id): the ``Id`` of the ``Repository`` raise: AlreadyExists - ``composition_id`` already assigned to ``repository_id`` raise: NotFound - ``composition_id`` or ``repository_id`` not found raise: NullArgument - ``composition_id`` or ``repository_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.*
[ "Adds", "an", "existing", "Composition", "to", "a", "Repository", "." ]
python
train
50.25
DiamondLightSource/python-workflows
workflows/services/common_service.py
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/services/common_service.py#L170-L183
def start_transport(self): """If a transport object has been defined then connect it now.""" if self.transport: if self.transport.connect(): self.log.debug("Service successfully connected to transport layer") else: raise RuntimeError("Service could not connect to transport layer") # direct all transport callbacks into the main queue self._transport_interceptor_counter = itertools.count() self.transport.subscription_callback_set_intercept( self._transport_interceptor ) else: self.log.debug("No transport layer defined for service. Skipping.")
[ "def", "start_transport", "(", "self", ")", ":", "if", "self", ".", "transport", ":", "if", "self", ".", "transport", ".", "connect", "(", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Service successfully connected to transport layer\"", ")", "else", ":", "raise", "RuntimeError", "(", "\"Service could not connect to transport layer\"", ")", "# direct all transport callbacks into the main queue", "self", ".", "_transport_interceptor_counter", "=", "itertools", ".", "count", "(", ")", "self", ".", "transport", ".", "subscription_callback_set_intercept", "(", "self", ".", "_transport_interceptor", ")", "else", ":", "self", ".", "log", ".", "debug", "(", "\"No transport layer defined for service. Skipping.\"", ")" ]
If a transport object has been defined then connect it now.
[ "If", "a", "transport", "object", "has", "been", "defined", "then", "connect", "it", "now", "." ]
python
train
49.214286
Dallinger/Dallinger
dallinger/models.py
https://github.com/Dallinger/Dallinger/blob/76ca8217c709989c116d0ebd8fca37bd22f591af/dallinger/models.py#L1136-L1142
def flatten(self, lst): """Turn a list of lists into a list.""" if lst == []: return lst if isinstance(lst[0], list): return self.flatten(lst[0]) + self.flatten(lst[1:]) return lst[:1] + self.flatten(lst[1:])
[ "def", "flatten", "(", "self", ",", "lst", ")", ":", "if", "lst", "==", "[", "]", ":", "return", "lst", "if", "isinstance", "(", "lst", "[", "0", "]", ",", "list", ")", ":", "return", "self", ".", "flatten", "(", "lst", "[", "0", "]", ")", "+", "self", ".", "flatten", "(", "lst", "[", "1", ":", "]", ")", "return", "lst", "[", ":", "1", "]", "+", "self", ".", "flatten", "(", "lst", "[", "1", ":", "]", ")" ]
Turn a list of lists into a list.
[ "Turn", "a", "list", "of", "lists", "into", "a", "list", "." ]
python
train
36.857143
ten10solutions/Geist
geist/vision.py
https://github.com/ten10solutions/Geist/blob/a1ef16d8b4c3777735008b671a50acfde3ce7bf1/geist/vision.py#L450-L485
def find_threshold_near_density(img, density, low=0, high=255): """Find a threshold where the fraction of pixels above the threshold is closest to density where density is (count of pixels above threshold / count of pixels). The highest threshold closest to the desired density will be returned. Use low and high to exclude undesirable thresholds. :param img: target image :type img: 2d :class:`numpy.ndarray` :param density: target density :type density: float between 0.0 and 1.0 :param low: min threshold to test :type low: ubyte :param migh: max threshold to test :type low: ubyte :rtype: ubyte """ size = numpy.size(img) densities = [] last_t = None while True: t = ((high - low) // 2) + low if t == last_t: densities.sort(key=lambda x: (abs(x[0] - density), 256 - x[1])) return densities[0][1] else: last_t = t d = numpy.count_nonzero(img > t) / size densities.append((d, t)) if d < density: high = t elif d >= density: # search away from low low = t
[ "def", "find_threshold_near_density", "(", "img", ",", "density", ",", "low", "=", "0", ",", "high", "=", "255", ")", ":", "size", "=", "numpy", ".", "size", "(", "img", ")", "densities", "=", "[", "]", "last_t", "=", "None", "while", "True", ":", "t", "=", "(", "(", "high", "-", "low", ")", "//", "2", ")", "+", "low", "if", "t", "==", "last_t", ":", "densities", ".", "sort", "(", "key", "=", "lambda", "x", ":", "(", "abs", "(", "x", "[", "0", "]", "-", "density", ")", ",", "256", "-", "x", "[", "1", "]", ")", ")", "return", "densities", "[", "0", "]", "[", "1", "]", "else", ":", "last_t", "=", "t", "d", "=", "numpy", ".", "count_nonzero", "(", "img", ">", "t", ")", "/", "size", "densities", ".", "append", "(", "(", "d", ",", "t", ")", ")", "if", "d", "<", "density", ":", "high", "=", "t", "elif", "d", ">=", "density", ":", "# search away from low\r", "low", "=", "t" ]
Find a threshold where the fraction of pixels above the threshold is closest to density where density is (count of pixels above threshold / count of pixels). The highest threshold closest to the desired density will be returned. Use low and high to exclude undesirable thresholds. :param img: target image :type img: 2d :class:`numpy.ndarray` :param density: target density :type density: float between 0.0 and 1.0 :param low: min threshold to test :type low: ubyte :param migh: max threshold to test :type low: ubyte :rtype: ubyte
[ "Find", "a", "threshold", "where", "the", "fraction", "of", "pixels", "above", "the", "threshold", "is", "closest", "to", "density", "where", "density", "is", "(", "count", "of", "pixels", "above", "threshold", "/", "count", "of", "pixels", ")", ".", "The", "highest", "threshold", "closest", "to", "the", "desired", "density", "will", "be", "returned", ".", "Use", "low", "and", "high", "to", "exclude", "undesirable", "thresholds", ".", ":", "param", "img", ":", "target", "image", ":", "type", "img", ":", "2d", ":", "class", ":", "numpy", ".", "ndarray", ":", "param", "density", ":", "target", "density", ":", "type", "density", ":", "float", "between", "0", ".", "0", "and", "1", ".", "0", ":", "param", "low", ":", "min", "threshold", "to", "test", ":", "type", "low", ":", "ubyte", ":", "param", "migh", ":", "max", "threshold", "to", "test", ":", "type", "low", ":", "ubyte", ":", "rtype", ":", "ubyte" ]
python
train
32.027778
nickmckay/LiPD-utilities
Python/lipd/timeseries.py
https://github.com/nickmckay/LiPD-utilities/blob/5dab6bbeffc5effd68e3a6beaca6b76aa928e860/Python/lipd/timeseries.py#L372-L388
def _extract_columns(d, tmp_tso, pc): """ Extract data from one paleoData column :param dict d: Column dictionary :param dict tmp_tso: TSO dictionary with only root items :return dict: Finished TSO """ logger_ts.info("enter extract_columns") for k, v in d.items(): if isinstance(v, dict): flat_data = _extract_nested(pc + "_" + k, v, {}) for n,m in flat_data.items(): tmp_tso[n] = m else: # Assume if it's not a special nested case, then it's a string value tmp_tso[pc + '_' + k] = v return tmp_tso
[ "def", "_extract_columns", "(", "d", ",", "tmp_tso", ",", "pc", ")", ":", "logger_ts", ".", "info", "(", "\"enter extract_columns\"", ")", "for", "k", ",", "v", "in", "d", ".", "items", "(", ")", ":", "if", "isinstance", "(", "v", ",", "dict", ")", ":", "flat_data", "=", "_extract_nested", "(", "pc", "+", "\"_\"", "+", "k", ",", "v", ",", "{", "}", ")", "for", "n", ",", "m", "in", "flat_data", ".", "items", "(", ")", ":", "tmp_tso", "[", "n", "]", "=", "m", "else", ":", "# Assume if it's not a special nested case, then it's a string value", "tmp_tso", "[", "pc", "+", "'_'", "+", "k", "]", "=", "v", "return", "tmp_tso" ]
Extract data from one paleoData column :param dict d: Column dictionary :param dict tmp_tso: TSO dictionary with only root items :return dict: Finished TSO
[ "Extract", "data", "from", "one", "paleoData", "column", ":", "param", "dict", "d", ":", "Column", "dictionary", ":", "param", "dict", "tmp_tso", ":", "TSO", "dictionary", "with", "only", "root", "items", ":", "return", "dict", ":", "Finished", "TSO" ]
python
train
35.176471
jxtech/wechatpy
wechatpy/client/api/material.py
https://github.com/jxtech/wechatpy/blob/4df0da795618c0895a10f1c2cde9e9d5c0a93aaa/wechatpy/client/api/material.py#L245-L255
def delete_comment(self, msg_data_id, index, user_comment_id): """ 删除评论 """ return self._post( 'comment/delete', data={ 'msg_data_id': msg_data_id, 'index': index, 'user_comment_id': user_comment_id, })
[ "def", "delete_comment", "(", "self", ",", "msg_data_id", ",", "index", ",", "user_comment_id", ")", ":", "return", "self", ".", "_post", "(", "'comment/delete'", ",", "data", "=", "{", "'msg_data_id'", ":", "msg_data_id", ",", "'index'", ":", "index", ",", "'user_comment_id'", ":", "user_comment_id", ",", "}", ")" ]
删除评论
[ "删除评论" ]
python
train
28
ktdreyer/txkoji
txkoji/task.py
https://github.com/ktdreyer/txkoji/blob/a7de380f29f745bf11730b27217208f6d4da7733/txkoji/task.py#L328-L340
def params(self): """ Return a list of parameters in this task's request. If self.request is already a list, simply return it. If self.request is a raw XML-RPC string, parse it and return the params. """ if isinstance(self.request, list): return unmunchify(self.request) (params, _) = xmlrpc.loads(self.request) return params
[ "def", "params", "(", "self", ")", ":", "if", "isinstance", "(", "self", ".", "request", ",", "list", ")", ":", "return", "unmunchify", "(", "self", ".", "request", ")", "(", "params", ",", "_", ")", "=", "xmlrpc", ".", "loads", "(", "self", ".", "request", ")", "return", "params" ]
Return a list of parameters in this task's request. If self.request is already a list, simply return it. If self.request is a raw XML-RPC string, parse it and return the params.
[ "Return", "a", "list", "of", "parameters", "in", "this", "task", "s", "request", "." ]
python
train
30.692308
klmitch/bark
bark/format.py
https://github.com/klmitch/bark/blob/6e0e002d55f01fee27e3e45bb86e30af1bfeef36/bark/format.py#L421-L444
def convert(self, request, response, data): """ Performs the desired formatting. :param request: The webob Request object describing the request. :param response: The webob Response object describing the response. :param data: The data dictionary list returned by the prepare() method. :returns: A string, the results of which are the desired conversion. """ result = [] for conv, datum in zip(self.conversions, data): # Only include conversion if it's allowed if conv.modifier.accept(response.status_code): result.append(conv.convert(request, response, datum)) else: result.append('-') return ''.join(result)
[ "def", "convert", "(", "self", ",", "request", ",", "response", ",", "data", ")", ":", "result", "=", "[", "]", "for", "conv", ",", "datum", "in", "zip", "(", "self", ".", "conversions", ",", "data", ")", ":", "# Only include conversion if it's allowed", "if", "conv", ".", "modifier", ".", "accept", "(", "response", ".", "status_code", ")", ":", "result", ".", "append", "(", "conv", ".", "convert", "(", "request", ",", "response", ",", "datum", ")", ")", "else", ":", "result", ".", "append", "(", "'-'", ")", "return", "''", ".", "join", "(", "result", ")" ]
Performs the desired formatting. :param request: The webob Request object describing the request. :param response: The webob Response object describing the response. :param data: The data dictionary list returned by the prepare() method. :returns: A string, the results of which are the desired conversion.
[ "Performs", "the", "desired", "formatting", "." ]
python
train
34.416667
PmagPy/PmagPy
pmagpy/validate_upload2.py
https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload2.py#L78-L247
def read_upload(up_file, data_model=None): """ take a file that should be ready for upload using the data model, check that all required columns are full, and that all numeric data is in fact numeric. print out warnings for any validation problems return True if there were no problems, otherwise return False """ print("-I- Running validation for your upload file") ## Read file f = open(up_file) lines = f.readlines() f.close() data = split_lines(lines) data_dicts = get_dicts(data) ## initialize invalid_data = {} missing_data = {} non_numeric = {} bad_vocab = {} bad_coords = {} invalid_col_names = {} missing_file_type = False ## make sure you have the data model if not data_model: data_model = get_data_model() reqd_file_types = ['er_locations'] provided_file_types = set() if not data_model: return False, None ## Iterate through data # each dictionary is one tab delimited line in a csv file for dictionary in data_dicts: for k, v in list(dictionary.items()): if k == "file_type": # meta data provided_file_types.add(v) continue file_type = dictionary['file_type'] # need to deal with pmag_criteria type file, too item_type = file_type.split('_')[1][:-1] if item_type == 'criteria': item_name = dictionary.get('criteria_definition') elif item_type == 'result': item_name = dictionary.get('pmag_result_name', None) elif item_type in ('specimen', 'sample', 'site', 'location'): item_name = dictionary.get('er_' + item_type + '_name', None) elif item_type == 'age': # get the lowest level er_*_name column that is filled in for dtype in ('specimen', 'sample', 'site', 'location'): item_name = dictionary.get('er_' + dtype + '_name', None) if item_name: break elif item_type == 'measurement': exp_name = dictionary.get('magic_experiment_name') meas_num = dictionary.get('measurement_number') item_name = exp_name + '_' + str(meas_num) else: item_name = None if file_type not in list(data_model.keys()): continue specific_data_model = data_model[file_type] ## Function for building problems list def add_to_invalid_data(item_name, item_type, invalid_data, validation, problem_type): """ correctly create or add to the dictionary of invalid values """ if item_name: if item_type not in invalid_data: invalid_data[item_type] = {} if item_name not in invalid_data[item_type]: invalid_data[item_type][item_name] = {} if problem_type not in invalid_data[item_type][item_name]: invalid_data[item_type][item_name][problem_type] = [] invalid_data[item_type][item_name][problem_type].append(validation) ## Validate for each problem type # check if column header is in the data model invalid_col_name = validate_for_recognized_column(k, v, specific_data_model) if invalid_col_name: if item_type not in list(invalid_col_names.keys()): invalid_col_names[item_type] = set() invalid_col_names[item_type].add(invalid_col_name) # skip to next item, as additional validations won't work # (key is not in the data model) ## new style add_to_invalid_data(item_name, item_type, invalid_data, invalid_col_name, 'invalid_col') # skip to next item, as additional validations won't work # (key is not in the data model) continue # make a list of missing, required data missing_item = validate_for_presence(k, v, specific_data_model) #print 'k, v', k, v if missing_item: if item_type not in list(missing_data.keys()): missing_data[item_type] = set() missing_data[item_type].add(missing_item) if item_name: # don't double count if a site is missing its parent location if item_type == 'age' and missing_item == 'er_location_name': pass # ignore er_synthetic_name (data model is incorrect here) if missing_item == 'er_synthetic_name': pass else: add_to_invalid_data(item_name, item_type, invalid_data, missing_item, 'missing_data') # vocabulary problems vocab_problem = validate_for_controlled_vocab(k, v, specific_data_model) if vocab_problem: if item_type not in list(bad_vocab.keys()): bad_vocab[item_type] = set() bad_vocab[item_type].add(vocab_problem) add_to_invalid_data(item_name, item_type, invalid_data, vocab_problem, 'vocab_problem') # illegal coordinates coord_problem = validate_for_coordinates(k, v, specific_data_model) if coord_problem: if item_type not in list(bad_coords.keys()): bad_coords[item_type] = set() bad_coords[item_type].add(coord_problem) add_to_invalid_data(item_name, item_type, invalid_data, coord_problem, 'coordinates') # make a list of data that should be numeric, but aren't number_fail = validate_for_numericality(k, v, specific_data_model) if number_fail: if item_type not in list(non_numeric.keys()): non_numeric[item_type] = set() non_numeric[item_type].add(number_fail) add_to_invalid_data(item_name, item_type, invalid_data, number_fail, 'number_fail') ## Print out all issues for file_type, invalid_names in list(invalid_col_names.items()): print("-W- In your {} file, you are using the following unrecognized columns: {}".format(file_type, ', '.join(invalid_names))) for file_type, wrong_cols in list(non_numeric.items()): print("-W- In your {} file, you must provide only valid numbers, in the following columns: {}".format(file_type, ', '.join(wrong_cols))) for file_type, empty_cols in list(missing_data.items()): print("-W- In your {} file, you are missing data in the following required columns: {}".format(file_type, ', '.join(empty_cols))) for file_type in reqd_file_types: if file_type not in provided_file_types: print("-W- You have not provided a(n) {} type file, which is required data".format(file_type)) missing_file_type = True for file_type, vocab_types in list(bad_vocab.items()): print("-W- In your {} file, you are using an unrecognized value for these controlled vocabularies: {}".format(file_type, ', '.join(vocab_types))) for file_type, coords in list(bad_coords.items()): print("-W- In your {} file, you are using an illegal value for these columns: {}. (Latitude must be between -90 and +90)".format(file_type, ', '.join(coords))) if any((invalid_col_names, non_numeric, missing_data, missing_file_type, bad_vocab, bad_coords)): return False, invalid_data else: print("-I- validation was successful") return True, None
[ "def", "read_upload", "(", "up_file", ",", "data_model", "=", "None", ")", ":", "print", "(", "\"-I- Running validation for your upload file\"", ")", "## Read file", "f", "=", "open", "(", "up_file", ")", "lines", "=", "f", ".", "readlines", "(", ")", "f", ".", "close", "(", ")", "data", "=", "split_lines", "(", "lines", ")", "data_dicts", "=", "get_dicts", "(", "data", ")", "## initialize", "invalid_data", "=", "{", "}", "missing_data", "=", "{", "}", "non_numeric", "=", "{", "}", "bad_vocab", "=", "{", "}", "bad_coords", "=", "{", "}", "invalid_col_names", "=", "{", "}", "missing_file_type", "=", "False", "## make sure you have the data model", "if", "not", "data_model", ":", "data_model", "=", "get_data_model", "(", ")", "reqd_file_types", "=", "[", "'er_locations'", "]", "provided_file_types", "=", "set", "(", ")", "if", "not", "data_model", ":", "return", "False", ",", "None", "## Iterate through data", "# each dictionary is one tab delimited line in a csv file", "for", "dictionary", "in", "data_dicts", ":", "for", "k", ",", "v", "in", "list", "(", "dictionary", ".", "items", "(", ")", ")", ":", "if", "k", "==", "\"file_type\"", ":", "# meta data", "provided_file_types", ".", "add", "(", "v", ")", "continue", "file_type", "=", "dictionary", "[", "'file_type'", "]", "# need to deal with pmag_criteria type file, too", "item_type", "=", "file_type", ".", "split", "(", "'_'", ")", "[", "1", "]", "[", ":", "-", "1", "]", "if", "item_type", "==", "'criteria'", ":", "item_name", "=", "dictionary", ".", "get", "(", "'criteria_definition'", ")", "elif", "item_type", "==", "'result'", ":", "item_name", "=", "dictionary", ".", "get", "(", "'pmag_result_name'", ",", "None", ")", "elif", "item_type", "in", "(", "'specimen'", ",", "'sample'", ",", "'site'", ",", "'location'", ")", ":", "item_name", "=", "dictionary", ".", "get", "(", "'er_'", "+", "item_type", "+", "'_name'", ",", "None", ")", "elif", "item_type", "==", "'age'", ":", "# get the lowest level er_*_name column that is filled in", "for", "dtype", "in", "(", "'specimen'", ",", "'sample'", ",", "'site'", ",", "'location'", ")", ":", "item_name", "=", "dictionary", ".", "get", "(", "'er_'", "+", "dtype", "+", "'_name'", ",", "None", ")", "if", "item_name", ":", "break", "elif", "item_type", "==", "'measurement'", ":", "exp_name", "=", "dictionary", ".", "get", "(", "'magic_experiment_name'", ")", "meas_num", "=", "dictionary", ".", "get", "(", "'measurement_number'", ")", "item_name", "=", "exp_name", "+", "'_'", "+", "str", "(", "meas_num", ")", "else", ":", "item_name", "=", "None", "if", "file_type", "not", "in", "list", "(", "data_model", ".", "keys", "(", ")", ")", ":", "continue", "specific_data_model", "=", "data_model", "[", "file_type", "]", "## Function for building problems list", "def", "add_to_invalid_data", "(", "item_name", ",", "item_type", ",", "invalid_data", ",", "validation", ",", "problem_type", ")", ":", "\"\"\"\n correctly create or add to the dictionary of invalid values\n \"\"\"", "if", "item_name", ":", "if", "item_type", "not", "in", "invalid_data", ":", "invalid_data", "[", "item_type", "]", "=", "{", "}", "if", "item_name", "not", "in", "invalid_data", "[", "item_type", "]", ":", "invalid_data", "[", "item_type", "]", "[", "item_name", "]", "=", "{", "}", "if", "problem_type", "not", "in", "invalid_data", "[", "item_type", "]", "[", "item_name", "]", ":", "invalid_data", "[", "item_type", "]", "[", "item_name", "]", "[", "problem_type", "]", "=", "[", "]", "invalid_data", "[", "item_type", "]", "[", "item_name", "]", "[", "problem_type", "]", ".", "append", "(", "validation", ")", "## Validate for each problem type", "# check if column header is in the data model", "invalid_col_name", "=", "validate_for_recognized_column", "(", "k", ",", "v", ",", "specific_data_model", ")", "if", "invalid_col_name", ":", "if", "item_type", "not", "in", "list", "(", "invalid_col_names", ".", "keys", "(", ")", ")", ":", "invalid_col_names", "[", "item_type", "]", "=", "set", "(", ")", "invalid_col_names", "[", "item_type", "]", ".", "add", "(", "invalid_col_name", ")", "# skip to next item, as additional validations won't work", "# (key is not in the data model)", "## new style", "add_to_invalid_data", "(", "item_name", ",", "item_type", ",", "invalid_data", ",", "invalid_col_name", ",", "'invalid_col'", ")", "# skip to next item, as additional validations won't work", "# (key is not in the data model)", "continue", "# make a list of missing, required data", "missing_item", "=", "validate_for_presence", "(", "k", ",", "v", ",", "specific_data_model", ")", "#print 'k, v', k, v", "if", "missing_item", ":", "if", "item_type", "not", "in", "list", "(", "missing_data", ".", "keys", "(", ")", ")", ":", "missing_data", "[", "item_type", "]", "=", "set", "(", ")", "missing_data", "[", "item_type", "]", ".", "add", "(", "missing_item", ")", "if", "item_name", ":", "# don't double count if a site is missing its parent location", "if", "item_type", "==", "'age'", "and", "missing_item", "==", "'er_location_name'", ":", "pass", "# ignore er_synthetic_name (data model is incorrect here)", "if", "missing_item", "==", "'er_synthetic_name'", ":", "pass", "else", ":", "add_to_invalid_data", "(", "item_name", ",", "item_type", ",", "invalid_data", ",", "missing_item", ",", "'missing_data'", ")", "# vocabulary problems", "vocab_problem", "=", "validate_for_controlled_vocab", "(", "k", ",", "v", ",", "specific_data_model", ")", "if", "vocab_problem", ":", "if", "item_type", "not", "in", "list", "(", "bad_vocab", ".", "keys", "(", ")", ")", ":", "bad_vocab", "[", "item_type", "]", "=", "set", "(", ")", "bad_vocab", "[", "item_type", "]", ".", "add", "(", "vocab_problem", ")", "add_to_invalid_data", "(", "item_name", ",", "item_type", ",", "invalid_data", ",", "vocab_problem", ",", "'vocab_problem'", ")", "# illegal coordinates", "coord_problem", "=", "validate_for_coordinates", "(", "k", ",", "v", ",", "specific_data_model", ")", "if", "coord_problem", ":", "if", "item_type", "not", "in", "list", "(", "bad_coords", ".", "keys", "(", ")", ")", ":", "bad_coords", "[", "item_type", "]", "=", "set", "(", ")", "bad_coords", "[", "item_type", "]", ".", "add", "(", "coord_problem", ")", "add_to_invalid_data", "(", "item_name", ",", "item_type", ",", "invalid_data", ",", "coord_problem", ",", "'coordinates'", ")", "# make a list of data that should be numeric, but aren't", "number_fail", "=", "validate_for_numericality", "(", "k", ",", "v", ",", "specific_data_model", ")", "if", "number_fail", ":", "if", "item_type", "not", "in", "list", "(", "non_numeric", ".", "keys", "(", ")", ")", ":", "non_numeric", "[", "item_type", "]", "=", "set", "(", ")", "non_numeric", "[", "item_type", "]", ".", "add", "(", "number_fail", ")", "add_to_invalid_data", "(", "item_name", ",", "item_type", ",", "invalid_data", ",", "number_fail", ",", "'number_fail'", ")", "## Print out all issues", "for", "file_type", ",", "invalid_names", "in", "list", "(", "invalid_col_names", ".", "items", "(", ")", ")", ":", "print", "(", "\"-W- In your {} file, you are using the following unrecognized columns: {}\"", ".", "format", "(", "file_type", ",", "', '", ".", "join", "(", "invalid_names", ")", ")", ")", "for", "file_type", ",", "wrong_cols", "in", "list", "(", "non_numeric", ".", "items", "(", ")", ")", ":", "print", "(", "\"-W- In your {} file, you must provide only valid numbers, in the following columns: {}\"", ".", "format", "(", "file_type", ",", "', '", ".", "join", "(", "wrong_cols", ")", ")", ")", "for", "file_type", ",", "empty_cols", "in", "list", "(", "missing_data", ".", "items", "(", ")", ")", ":", "print", "(", "\"-W- In your {} file, you are missing data in the following required columns: {}\"", ".", "format", "(", "file_type", ",", "', '", ".", "join", "(", "empty_cols", ")", ")", ")", "for", "file_type", "in", "reqd_file_types", ":", "if", "file_type", "not", "in", "provided_file_types", ":", "print", "(", "\"-W- You have not provided a(n) {} type file, which is required data\"", ".", "format", "(", "file_type", ")", ")", "missing_file_type", "=", "True", "for", "file_type", ",", "vocab_types", "in", "list", "(", "bad_vocab", ".", "items", "(", ")", ")", ":", "print", "(", "\"-W- In your {} file, you are using an unrecognized value for these controlled vocabularies: {}\"", ".", "format", "(", "file_type", ",", "', '", ".", "join", "(", "vocab_types", ")", ")", ")", "for", "file_type", ",", "coords", "in", "list", "(", "bad_coords", ".", "items", "(", ")", ")", ":", "print", "(", "\"-W- In your {} file, you are using an illegal value for these columns: {}. (Latitude must be between -90 and +90)\"", ".", "format", "(", "file_type", ",", "', '", ".", "join", "(", "coords", ")", ")", ")", "if", "any", "(", "(", "invalid_col_names", ",", "non_numeric", ",", "missing_data", ",", "missing_file_type", ",", "bad_vocab", ",", "bad_coords", ")", ")", ":", "return", "False", ",", "invalid_data", "else", ":", "print", "(", "\"-I- validation was successful\"", ")", "return", "True", ",", "None" ]
take a file that should be ready for upload using the data model, check that all required columns are full, and that all numeric data is in fact numeric. print out warnings for any validation problems return True if there were no problems, otherwise return False
[ "take", "a", "file", "that", "should", "be", "ready", "for", "upload", "using", "the", "data", "model", "check", "that", "all", "required", "columns", "are", "full", "and", "that", "all", "numeric", "data", "is", "in", "fact", "numeric", ".", "print", "out", "warnings", "for", "any", "validation", "problems", "return", "True", "if", "there", "were", "no", "problems", "otherwise", "return", "False" ]
python
train
46.276471
sbusard/wagoner
wagoner/tree.py
https://github.com/sbusard/wagoner/blob/7f83d66bbd0e009e4d4232ffdf319bd5a2a5683b/wagoner/tree.py#L33-L70
def from_table(cls, table, length, prefix=0, flatten=False): """ Extract from the given table a tree for word length, taking only prefixes of prefix length (if greater than 0) into account to compute successors. :param table: the table to extract the tree from; :param length: the length of words generated by the extracted tree; greater or equal to 1; :param prefix: if greater than 0, the length of the prefixes used for computing successors; :param flatten: whether to flatten the table or not; :return: the tree corresponding to words of length from table. """ # Build the expanded tree with necessary suffix and length tree = defaultdict(dict) # The tree pending = {(">", 0)} # The nodes to expand while pending: suffix, size = pending.pop() if size < length: choices = table.weighted_choices(suffix, exclude={"<"}, flatten=flatten) # The word length is not reached yet, expand for successor, weight in choices.items(): expanded = suffix + successor if prefix > 0: expanded = expanded[-prefix:] new_node = (expanded, size + 1) tree[(suffix, size)][new_node] = weight pending.add(new_node) else: choices = table.weighted_choices(suffix, flatten=flatten) # The word length is reached, only add < if present if "<" in choices: tree[(suffix, size)][("<", size + 1)] = 1 else: tree[(suffix, size)] = dict() return cls(cls.trim_tree(tree))
[ "def", "from_table", "(", "cls", ",", "table", ",", "length", ",", "prefix", "=", "0", ",", "flatten", "=", "False", ")", ":", "# Build the expanded tree with necessary suffix and length", "tree", "=", "defaultdict", "(", "dict", ")", "# The tree", "pending", "=", "{", "(", "\">\"", ",", "0", ")", "}", "# The nodes to expand", "while", "pending", ":", "suffix", ",", "size", "=", "pending", ".", "pop", "(", ")", "if", "size", "<", "length", ":", "choices", "=", "table", ".", "weighted_choices", "(", "suffix", ",", "exclude", "=", "{", "\"<\"", "}", ",", "flatten", "=", "flatten", ")", "# The word length is not reached yet, expand", "for", "successor", ",", "weight", "in", "choices", ".", "items", "(", ")", ":", "expanded", "=", "suffix", "+", "successor", "if", "prefix", ">", "0", ":", "expanded", "=", "expanded", "[", "-", "prefix", ":", "]", "new_node", "=", "(", "expanded", ",", "size", "+", "1", ")", "tree", "[", "(", "suffix", ",", "size", ")", "]", "[", "new_node", "]", "=", "weight", "pending", ".", "add", "(", "new_node", ")", "else", ":", "choices", "=", "table", ".", "weighted_choices", "(", "suffix", ",", "flatten", "=", "flatten", ")", "# The word length is reached, only add < if present", "if", "\"<\"", "in", "choices", ":", "tree", "[", "(", "suffix", ",", "size", ")", "]", "[", "(", "\"<\"", ",", "size", "+", "1", ")", "]", "=", "1", "else", ":", "tree", "[", "(", "suffix", ",", "size", ")", "]", "=", "dict", "(", ")", "return", "cls", "(", "cls", ".", "trim_tree", "(", "tree", ")", ")" ]
Extract from the given table a tree for word length, taking only prefixes of prefix length (if greater than 0) into account to compute successors. :param table: the table to extract the tree from; :param length: the length of words generated by the extracted tree; greater or equal to 1; :param prefix: if greater than 0, the length of the prefixes used for computing successors; :param flatten: whether to flatten the table or not; :return: the tree corresponding to words of length from table.
[ "Extract", "from", "the", "given", "table", "a", "tree", "for", "word", "length", "taking", "only", "prefixes", "of", "prefix", "length", "(", "if", "greater", "than", "0", ")", "into", "account", "to", "compute", "successors", ".", ":", "param", "table", ":", "the", "table", "to", "extract", "the", "tree", "from", ";", ":", "param", "length", ":", "the", "length", "of", "words", "generated", "by", "the", "extracted", "tree", ";", "greater", "or", "equal", "to", "1", ";", ":", "param", "prefix", ":", "if", "greater", "than", "0", "the", "length", "of", "the", "prefixes", "used", "for", "computing", "successors", ";", ":", "param", "flatten", ":", "whether", "to", "flatten", "the", "table", "or", "not", ";", ":", "return", ":", "the", "tree", "corresponding", "to", "words", "of", "length", "from", "table", "." ]
python
train
48.394737
mitsei/dlkit
dlkit/handcar/repository/objects.py
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/handcar/repository/objects.py#L1149-L1169
def set_principal_credit_string(self, credit_string=None): """Sets the principal credit string. :param credit_string: the new credit string :type credit_string: ``string`` :raise: ``InvalidArgument`` -- ``credit_string`` is invalid :raise: ``NoAccess`` -- ``Metadata.isReadOnly()`` is ``true`` :raise: ``NullArgument`` -- ``credit_string`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ if credit_string is None: raise NullArgument() metadata = Metadata(**settings.METADATA['principal_credit_string']) if metadata.is_read_only(): raise NoAccess() if self._is_valid_input(credit_string, metadata, array=False): self._my_map['principalCreditString']['text'] = credit_string else: raise InvalidArgument()
[ "def", "set_principal_credit_string", "(", "self", ",", "credit_string", "=", "None", ")", ":", "if", "credit_string", "is", "None", ":", "raise", "NullArgument", "(", ")", "metadata", "=", "Metadata", "(", "*", "*", "settings", ".", "METADATA", "[", "'principal_credit_string'", "]", ")", "if", "metadata", ".", "is_read_only", "(", ")", ":", "raise", "NoAccess", "(", ")", "if", "self", ".", "_is_valid_input", "(", "credit_string", ",", "metadata", ",", "array", "=", "False", ")", ":", "self", ".", "_my_map", "[", "'principalCreditString'", "]", "[", "'text'", "]", "=", "credit_string", "else", ":", "raise", "InvalidArgument", "(", ")" ]
Sets the principal credit string. :param credit_string: the new credit string :type credit_string: ``string`` :raise: ``InvalidArgument`` -- ``credit_string`` is invalid :raise: ``NoAccess`` -- ``Metadata.isReadOnly()`` is ``true`` :raise: ``NullArgument`` -- ``credit_string`` is ``null`` *compliance: mandatory -- This method must be implemented.*
[ "Sets", "the", "principal", "credit", "string", "." ]
python
train
41.190476
Azure/azure-sdk-for-python
azure-eventgrid/azure/eventgrid/event_grid_client.py
https://github.com/Azure/azure-sdk-for-python/blob/d7306fde32f60a293a7567678692bdad31e4b667/azure-eventgrid/azure/eventgrid/event_grid_client.py#L67-L116
def publish_events( self, topic_hostname, events, custom_headers=None, raw=False, **operation_config): """Publishes a batch of events to an Azure Event Grid topic. :param topic_hostname: The host name of the topic, e.g. topic1.westus2-1.eventgrid.azure.net :type topic_hostname: str :param events: An array of events to be published to Event Grid. :type events: list[~azure.eventgrid.models.EventGridEvent] :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: None or ClientRawResponse if raw=true :rtype: None or ~msrest.pipeline.ClientRawResponse :raises: :class:`HttpOperationError<msrest.exceptions.HttpOperationError>` """ # Construct URL url = self.publish_events.metadata['url'] path_format_arguments = { 'topicHostname': self._serialize.url("topic_hostname", topic_hostname, 'str', skip_quote=True) } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct body body_content = self._serialize.body(events, '[EventGridEvent]') # Construct and send request request = self._client.post(url, query_parameters) response = self._client.send( request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: raise HttpOperationError(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response
[ "def", "publish_events", "(", "self", ",", "topic_hostname", ",", "events", ",", "custom_headers", "=", "None", ",", "raw", "=", "False", ",", "*", "*", "operation_config", ")", ":", "# Construct URL", "url", "=", "self", ".", "publish_events", ".", "metadata", "[", "'url'", "]", "path_format_arguments", "=", "{", "'topicHostname'", ":", "self", ".", "_serialize", ".", "url", "(", "\"topic_hostname\"", ",", "topic_hostname", ",", "'str'", ",", "skip_quote", "=", "True", ")", "}", "url", "=", "self", ".", "_client", ".", "format_url", "(", "url", ",", "*", "*", "path_format_arguments", ")", "# Construct parameters", "query_parameters", "=", "{", "}", "query_parameters", "[", "'api-version'", "]", "=", "self", ".", "_serialize", ".", "query", "(", "\"self.api_version\"", ",", "self", ".", "api_version", ",", "'str'", ")", "# Construct headers", "header_parameters", "=", "{", "}", "header_parameters", "[", "'Content-Type'", "]", "=", "'application/json; charset=utf-8'", "if", "custom_headers", ":", "header_parameters", ".", "update", "(", "custom_headers", ")", "# Construct body", "body_content", "=", "self", ".", "_serialize", ".", "body", "(", "events", ",", "'[EventGridEvent]'", ")", "# Construct and send request", "request", "=", "self", ".", "_client", ".", "post", "(", "url", ",", "query_parameters", ")", "response", "=", "self", ".", "_client", ".", "send", "(", "request", ",", "header_parameters", ",", "body_content", ",", "stream", "=", "False", ",", "*", "*", "operation_config", ")", "if", "response", ".", "status_code", "not", "in", "[", "200", "]", ":", "raise", "HttpOperationError", "(", "self", ".", "_deserialize", ",", "response", ")", "if", "raw", ":", "client_raw_response", "=", "ClientRawResponse", "(", "None", ",", "response", ")", "return", "client_raw_response" ]
Publishes a batch of events to an Azure Event Grid topic. :param topic_hostname: The host name of the topic, e.g. topic1.westus2-1.eventgrid.azure.net :type topic_hostname: str :param events: An array of events to be published to Event Grid. :type events: list[~azure.eventgrid.models.EventGridEvent] :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: None or ClientRawResponse if raw=true :rtype: None or ~msrest.pipeline.ClientRawResponse :raises: :class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
[ "Publishes", "a", "batch", "of", "events", "to", "an", "Azure", "Event", "Grid", "topic", "." ]
python
test
43.32
lemieuxl/pyGenClean
pyGenClean/Misc/compare_gold_standard.py
https://github.com/lemieuxl/pyGenClean/blob/6173a48ccc0cf3a3bd711b1f2a1fa16248b8bf55/pyGenClean/Misc/compare_gold_standard.py#L516-L625
def findFlippedSNPs(goldFrqFile1, sourceAlleles, outPrefix): """Find flipped SNPs and flip them in the data1.""" goldAlleles = {} with open(goldFrqFile1, "r") as inputFile: headerIndex = None for i, line in enumerate(inputFile): row = createRowFromPlinkSpacedOutput(line) if i == 0: # This is the header headerIndex = dict([ (row[j], j) for j in xrange(len(row)) ]) # Checking the columns for columnName in ["SNP", "A1", "A2"]: if columnName not in headerIndex: msg = "%(fileName)s: no column named " \ "%(columnName)s" % locals() raise ProgramError(msg) else: snpName = row[headerIndex["SNP"]] allele1 = row[headerIndex["A1"]] allele2 = row[headerIndex["A2"]] alleles = set([allele1, allele2]) if "0" in alleles: alleles.remove("0") goldAlleles[snpName] = alleles # Finding the SNPs to flip toFlipOutputFile = None try: toFlipOutputFile = open(outPrefix + ".snp_to_flip_in_reference", "w") except IOError: msg = "%(outPrefix)s.snp_to_flip_in_reference: can't write " \ "file" % locals() raise ProgramError(msg) toRemoveOutputFile = None try: toRemoveOutputFile = open(outPrefix + ".snp_to_remove", "w") except IOError: msg = "%(outPrefix)s.snp_to_remove: can't write file" % locals() raise ProgramError(msg) toRemoveOutputFileExplanation = None try: toRemoveOutputFileExplanation = open( outPrefix + ".snp_to_remove.explanation", "w", ) print >>toRemoveOutputFileExplanation, "\t".join(["Name", "Reason", "Alleles 1", "Alleles 2"]) except IOError: msg = "%(outPrefix)s.snp_to_remove: can't write file" % locals() raise ProgramError(msg) for snpName in goldAlleles.iterkeys(): alleles1 = goldAlleles[snpName] alleles2 = sourceAlleles[snpName] if (len(alleles1) == 2) and (len(alleles2) == 2): # Both are heterozygous if (({"A", "T"} == alleles1 and {"A", "T"} == alleles2) or ({"C", "G"} == alleles1 and {"C", "G"} == alleles2)): # We can't flip those..., so we remove them print >>toRemoveOutputFile, snpName print >>toRemoveOutputFileExplanation, "\t".join([ snpName, "Undetermined", "".join(alleles1), "".join(alleles2), ]) else: if alleles1 != alleles2: # Let's try the flip one if flipGenotype(alleles1) == alleles2: # We need to flip it print >>toFlipOutputFile, snpName else: # Those SNP are discordant... print >>toRemoveOutputFile, snpName print >>toRemoveOutputFileExplanation, "\t".join([ snpName, "Invalid", "".join(alleles1), "".join(alleles2), ]) else: # We want to remove this SNP, because there is at least one # homozygous individual print >>toRemoveOutputFile, snpName tmp_allele1 = "".join(alleles1) if len(alleles1) == 1: tmp_allele1 += tmp_allele1 tmp_allele2 = "".join(alleles1) if len(alleles1) == 1: tmp_allele2 += tmp_allele2 print >>toRemoveOutputFileExplanation, "\t".join([snpName, "Homozygous", tmp_allele1, tmp_allele2]) # Closing output files toFlipOutputFile.close() toRemoveOutputFile.close() toRemoveOutputFileExplanation.close()
[ "def", "findFlippedSNPs", "(", "goldFrqFile1", ",", "sourceAlleles", ",", "outPrefix", ")", ":", "goldAlleles", "=", "{", "}", "with", "open", "(", "goldFrqFile1", ",", "\"r\"", ")", "as", "inputFile", ":", "headerIndex", "=", "None", "for", "i", ",", "line", "in", "enumerate", "(", "inputFile", ")", ":", "row", "=", "createRowFromPlinkSpacedOutput", "(", "line", ")", "if", "i", "==", "0", ":", "# This is the header", "headerIndex", "=", "dict", "(", "[", "(", "row", "[", "j", "]", ",", "j", ")", "for", "j", "in", "xrange", "(", "len", "(", "row", ")", ")", "]", ")", "# Checking the columns", "for", "columnName", "in", "[", "\"SNP\"", ",", "\"A1\"", ",", "\"A2\"", "]", ":", "if", "columnName", "not", "in", "headerIndex", ":", "msg", "=", "\"%(fileName)s: no column named \"", "\"%(columnName)s\"", "%", "locals", "(", ")", "raise", "ProgramError", "(", "msg", ")", "else", ":", "snpName", "=", "row", "[", "headerIndex", "[", "\"SNP\"", "]", "]", "allele1", "=", "row", "[", "headerIndex", "[", "\"A1\"", "]", "]", "allele2", "=", "row", "[", "headerIndex", "[", "\"A2\"", "]", "]", "alleles", "=", "set", "(", "[", "allele1", ",", "allele2", "]", ")", "if", "\"0\"", "in", "alleles", ":", "alleles", ".", "remove", "(", "\"0\"", ")", "goldAlleles", "[", "snpName", "]", "=", "alleles", "# Finding the SNPs to flip", "toFlipOutputFile", "=", "None", "try", ":", "toFlipOutputFile", "=", "open", "(", "outPrefix", "+", "\".snp_to_flip_in_reference\"", ",", "\"w\"", ")", "except", "IOError", ":", "msg", "=", "\"%(outPrefix)s.snp_to_flip_in_reference: can't write \"", "\"file\"", "%", "locals", "(", ")", "raise", "ProgramError", "(", "msg", ")", "toRemoveOutputFile", "=", "None", "try", ":", "toRemoveOutputFile", "=", "open", "(", "outPrefix", "+", "\".snp_to_remove\"", ",", "\"w\"", ")", "except", "IOError", ":", "msg", "=", "\"%(outPrefix)s.snp_to_remove: can't write file\"", "%", "locals", "(", ")", "raise", "ProgramError", "(", "msg", ")", "toRemoveOutputFileExplanation", "=", "None", "try", ":", "toRemoveOutputFileExplanation", "=", "open", "(", "outPrefix", "+", "\".snp_to_remove.explanation\"", ",", "\"w\"", ",", ")", "print", ">>", "toRemoveOutputFileExplanation", ",", "\"\\t\"", ".", "join", "(", "[", "\"Name\"", ",", "\"Reason\"", ",", "\"Alleles 1\"", ",", "\"Alleles 2\"", "]", ")", "except", "IOError", ":", "msg", "=", "\"%(outPrefix)s.snp_to_remove: can't write file\"", "%", "locals", "(", ")", "raise", "ProgramError", "(", "msg", ")", "for", "snpName", "in", "goldAlleles", ".", "iterkeys", "(", ")", ":", "alleles1", "=", "goldAlleles", "[", "snpName", "]", "alleles2", "=", "sourceAlleles", "[", "snpName", "]", "if", "(", "len", "(", "alleles1", ")", "==", "2", ")", "and", "(", "len", "(", "alleles2", ")", "==", "2", ")", ":", "# Both are heterozygous", "if", "(", "(", "{", "\"A\"", ",", "\"T\"", "}", "==", "alleles1", "and", "{", "\"A\"", ",", "\"T\"", "}", "==", "alleles2", ")", "or", "(", "{", "\"C\"", ",", "\"G\"", "}", "==", "alleles1", "and", "{", "\"C\"", ",", "\"G\"", "}", "==", "alleles2", ")", ")", ":", "# We can't flip those..., so we remove them", "print", ">>", "toRemoveOutputFile", ",", "snpName", "print", ">>", "toRemoveOutputFileExplanation", ",", "\"\\t\"", ".", "join", "(", "[", "snpName", ",", "\"Undetermined\"", ",", "\"\"", ".", "join", "(", "alleles1", ")", ",", "\"\"", ".", "join", "(", "alleles2", ")", ",", "]", ")", "else", ":", "if", "alleles1", "!=", "alleles2", ":", "# Let's try the flip one", "if", "flipGenotype", "(", "alleles1", ")", "==", "alleles2", ":", "# We need to flip it", "print", ">>", "toFlipOutputFile", ",", "snpName", "else", ":", "# Those SNP are discordant...", "print", ">>", "toRemoveOutputFile", ",", "snpName", "print", ">>", "toRemoveOutputFileExplanation", ",", "\"\\t\"", ".", "join", "(", "[", "snpName", ",", "\"Invalid\"", ",", "\"\"", ".", "join", "(", "alleles1", ")", ",", "\"\"", ".", "join", "(", "alleles2", ")", ",", "]", ")", "else", ":", "# We want to remove this SNP, because there is at least one", "# homozygous individual", "print", ">>", "toRemoveOutputFile", ",", "snpName", "tmp_allele1", "=", "\"\"", ".", "join", "(", "alleles1", ")", "if", "len", "(", "alleles1", ")", "==", "1", ":", "tmp_allele1", "+=", "tmp_allele1", "tmp_allele2", "=", "\"\"", ".", "join", "(", "alleles1", ")", "if", "len", "(", "alleles1", ")", "==", "1", ":", "tmp_allele2", "+=", "tmp_allele2", "print", ">>", "toRemoveOutputFileExplanation", ",", "\"\\t\"", ".", "join", "(", "[", "snpName", ",", "\"Homozygous\"", ",", "tmp_allele1", ",", "tmp_allele2", "]", ")", "# Closing output files", "toFlipOutputFile", ".", "close", "(", ")", "toRemoveOutputFile", ".", "close", "(", ")", "toRemoveOutputFileExplanation", ".", "close", "(", ")" ]
Find flipped SNPs and flip them in the data1.
[ "Find", "flipped", "SNPs", "and", "flip", "them", "in", "the", "data1", "." ]
python
train
39.309091
hubo1016/vlcp
vlcp/utils/http.py
https://github.com/hubo1016/vlcp/blob/239055229ec93a99cc7e15208075724ccf543bd1/vlcp/utils/http.py#L263-L270
def nl2br(self, text): """ Replace \'\n\' with \'<br/>\\n\' """ if isinstance(text, bytes): return text.replace(b'\n', b'<br/>\n') else: return text.replace('\n', '<br/>\n')
[ "def", "nl2br", "(", "self", ",", "text", ")", ":", "if", "isinstance", "(", "text", ",", "bytes", ")", ":", "return", "text", ".", "replace", "(", "b'\\n'", ",", "b'<br/>\\n'", ")", "else", ":", "return", "text", ".", "replace", "(", "'\\n'", ",", "'<br/>\\n'", ")" ]
Replace \'\n\' with \'<br/>\\n\'
[ "Replace", "\\", "\\", "n", "\\", "with", "\\", "<br", "/", ">", "\\\\", "n", "\\" ]
python
train
28.75
enkore/i3pystatus
i3pystatus/pulseaudio/__init__.py
https://github.com/enkore/i3pystatus/blob/14cfde967cecf79b40e223e35a04600f4c875af7/i3pystatus/pulseaudio/__init__.py#L141-L148
def update_cb(self, context, t, idx, userdata): """A sink property changed, calls request_update""" if t & PA_SUBSCRIPTION_EVENT_FACILITY_MASK == PA_SUBSCRIPTION_EVENT_SERVER: pa_operation_unref( pa_context_get_server_info(context, self._server_info_cb, None)) self.request_update(context)
[ "def", "update_cb", "(", "self", ",", "context", ",", "t", ",", "idx", ",", "userdata", ")", ":", "if", "t", "&", "PA_SUBSCRIPTION_EVENT_FACILITY_MASK", "==", "PA_SUBSCRIPTION_EVENT_SERVER", ":", "pa_operation_unref", "(", "pa_context_get_server_info", "(", "context", ",", "self", ".", "_server_info_cb", ",", "None", ")", ")", "self", ".", "request_update", "(", "context", ")" ]
A sink property changed, calls request_update
[ "A", "sink", "property", "changed", "calls", "request_update" ]
python
train
42
childsish/lhc-python
lhc/misc/performance_measures.py
https://github.com/childsish/lhc-python/blob/0a669f46a40a39f24d28665e8b5b606dc7e86beb/lhc/misc/performance_measures.py#L64-L79
def mcc(tp, tn, fp, fn): """ Matthew's Correlation Coefficient [-1, 1] 0 = you're just guessing :param int tp: number of true positives :param int tn: number of true negatives :param int fp: number of false positives :param int fn: number of false negatives :rtype: float """ if tp + fp == 0 or tp + fn == 0 or tn + fp == 0 or tn + fn == 0: den = 1.0 else: den = math.sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn)) return (tp * tn - fp * fn) / den
[ "def", "mcc", "(", "tp", ",", "tn", ",", "fp", ",", "fn", ")", ":", "if", "tp", "+", "fp", "==", "0", "or", "tp", "+", "fn", "==", "0", "or", "tn", "+", "fp", "==", "0", "or", "tn", "+", "fn", "==", "0", ":", "den", "=", "1.0", "else", ":", "den", "=", "math", ".", "sqrt", "(", "(", "tp", "+", "fp", ")", "*", "(", "tp", "+", "fn", ")", "*", "(", "tn", "+", "fp", ")", "*", "(", "tn", "+", "fn", ")", ")", "return", "(", "tp", "*", "tn", "-", "fp", "*", "fn", ")", "/", "den" ]
Matthew's Correlation Coefficient [-1, 1] 0 = you're just guessing :param int tp: number of true positives :param int tn: number of true negatives :param int fp: number of false positives :param int fn: number of false negatives :rtype: float
[ "Matthew", "s", "Correlation", "Coefficient", "[", "-", "1", "1", "]", "0", "=", "you", "re", "just", "guessing", ":", "param", "int", "tp", ":", "number", "of", "true", "positives", ":", "param", "int", "tn", ":", "number", "of", "true", "negatives", ":", "param", "int", "fp", ":", "number", "of", "false", "positives", ":", "param", "int", "fn", ":", "number", "of", "false", "negatives", ":", "rtype", ":", "float" ]
python
train
31.75
swevm/scaleio-py
scaleiopy/im.py
https://github.com/swevm/scaleio-py/blob/d043a0137cb925987fd5c895a3210968ce1d9028/scaleiopy/im.py#L275-L299
def set_abort_pending(self, newstate): """ Method to set Abort state if something goes wrong during provisioning Method also used to finish provisioning process when all is completed Method: POST """ self.logger.debug("set_abort_pending(" + "{})".format(newstate)) # NOT TO BE USED #default_minimal_cluster_config = '{"installationId":null,"mdmIPs":["192.168.102.12","192.168.102.13"],"mdmPassword":"Scaleio123","liaPassword":"Scaleio123","licenseKey":null,"primaryMdm":{"node":{"ostype":"linux","nodeName":null,"nodeIPs":["192.168.102.12"],"domain":null,"userName":"root","password":"vagrant","liaPassword":null},"nodeInfo":null,"managementIPs":null,"mdmIPs":["192.168.102.12"]},"secondaryMdm":{"node":{"ostype":"linux","nodeName":null,"nodeIPs":["192.168.102.13"],"domain":null,"userName":"root","password":"vagrant","liaPassword":null},"nodeInfo":null,"managementIPs":null,"mdmIPs":["192.168.102.13"]},"tb":{"node":{"ostype":"linux","nodeName":null,"nodeIPs":["192.168.102.11"],"domain":null,"userName":"root","password":"vagrant","liaPassword":null},"nodeInfo":null,"tbIPs":["192.168.102.11"]},"sdsList":[{"node":{"ostype":"linux","nodeName":null,"nodeIPs":["192.168.102.11"],"domain":null,"userName":"root","password":"vagrant","liaPassword":null},"nodeInfo":null,"sdsName":"SDS_[192.168.102.11]","protectionDomain":"default","faultSet":null,"allIPs":["192.168.102.11"],"sdsOnlyIPs":null,"sdcOnlyIPs":null,"devices":[{"devicePath":"/home/vagrant/scaleio1","storagePool":null,"deviceName":null}],"optimized":false,"port":7072},{"node":{"ostype":"linux","nodeName":null,"nodeIPs":["192.168.102.12"],"domain":null,"userName":"root","password":"vagrant","liaPassword":null},"nodeInfo":null,"sdsName":"SDS_[192.168.102.12]","protectionDomain":"default","faultSet":null,"allIPs":["192.168.102.12"],"sdsOnlyIPs":null,"sdcOnlyIPs":null,"devices":[{"devicePath":"/home/vagrant/scaleio1","storagePool":null,"deviceName":null}],"optimized":false,"port":7072},{"node":{"ostype":"linux","nodeName":null,"nodeIPs":["192.168.102.13"],"domain":null,"userName":"root","password":"vagrant","liaPassword":null},"nodeInfo":null,"sdsName":"SDS_[192.168.102.13]","protectionDomain":"default","faultSet":null,"allIPs":["192.168.102.13"],"sdsOnlyIPs":null,"sdcOnlyIPs":null,"devices":[{"devicePath":"/home/vagrant/scaleio1","storagePool":null,"deviceName":null}],"optimized":false,"port":7072}],"sdcList":[{"node":{"ostype":"linux","nodeName":null,"nodeIPs":["192.168.102.11"],"domain":null,"userName":"root","password":"vagrant","liaPassword":null},"nodeInfo":null,"splitterRpaIp":null},{"node":{"ostype":"linux","nodeName":null,"nodeIPs":["192.168.102.12"],"domain":null,"userName":"root","password":"vagrant","liaPassword":null},"nodeInfo":null,"splitterRpaIp":null},{"node":{"ostype":"linux","nodeName":null,"nodeIPs":["192.168.102.13"],"domain":null,"userName":"root","password":"vagrant","liaPassword":null},"nodeInfo":null,"splitterRpaIp":null}],"callHomeConfiguration":null,"remoteSyslogConfiguration":null}' r1 = self._im_session.post( "{}/{}".format(self._im_api_url,"types/Command/instances/actions/abortPending"), headers={'Content-type':'application/json','Version':'1.0'}, verify=self._im_verify_ssl, data = newstate, stream=True ) if not r1.ok: # Something went wrong self.logger.error("Error set_abort_pending(" +"{})".format(newstate)) #print "Response after set_abort_pending()" # RESPONSE NEED TO BE WRAPPED IN try/catch. Cannot assume JSON is returned. #print r1.text #pprint (json.loads(r1.text)) return r1.text
[ "def", "set_abort_pending", "(", "self", ",", "newstate", ")", ":", "self", ".", "logger", ".", "debug", "(", "\"set_abort_pending(\"", "+", "\"{})\"", ".", "format", "(", "newstate", ")", ")", "# NOT TO BE USED", "#default_minimal_cluster_config = '{\"installationId\":null,\"mdmIPs\":[\"192.168.102.12\",\"192.168.102.13\"],\"mdmPassword\":\"Scaleio123\",\"liaPassword\":\"Scaleio123\",\"licenseKey\":null,\"primaryMdm\":{\"node\":{\"ostype\":\"linux\",\"nodeName\":null,\"nodeIPs\":[\"192.168.102.12\"],\"domain\":null,\"userName\":\"root\",\"password\":\"vagrant\",\"liaPassword\":null},\"nodeInfo\":null,\"managementIPs\":null,\"mdmIPs\":[\"192.168.102.12\"]},\"secondaryMdm\":{\"node\":{\"ostype\":\"linux\",\"nodeName\":null,\"nodeIPs\":[\"192.168.102.13\"],\"domain\":null,\"userName\":\"root\",\"password\":\"vagrant\",\"liaPassword\":null},\"nodeInfo\":null,\"managementIPs\":null,\"mdmIPs\":[\"192.168.102.13\"]},\"tb\":{\"node\":{\"ostype\":\"linux\",\"nodeName\":null,\"nodeIPs\":[\"192.168.102.11\"],\"domain\":null,\"userName\":\"root\",\"password\":\"vagrant\",\"liaPassword\":null},\"nodeInfo\":null,\"tbIPs\":[\"192.168.102.11\"]},\"sdsList\":[{\"node\":{\"ostype\":\"linux\",\"nodeName\":null,\"nodeIPs\":[\"192.168.102.11\"],\"domain\":null,\"userName\":\"root\",\"password\":\"vagrant\",\"liaPassword\":null},\"nodeInfo\":null,\"sdsName\":\"SDS_[192.168.102.11]\",\"protectionDomain\":\"default\",\"faultSet\":null,\"allIPs\":[\"192.168.102.11\"],\"sdsOnlyIPs\":null,\"sdcOnlyIPs\":null,\"devices\":[{\"devicePath\":\"/home/vagrant/scaleio1\",\"storagePool\":null,\"deviceName\":null}],\"optimized\":false,\"port\":7072},{\"node\":{\"ostype\":\"linux\",\"nodeName\":null,\"nodeIPs\":[\"192.168.102.12\"],\"domain\":null,\"userName\":\"root\",\"password\":\"vagrant\",\"liaPassword\":null},\"nodeInfo\":null,\"sdsName\":\"SDS_[192.168.102.12]\",\"protectionDomain\":\"default\",\"faultSet\":null,\"allIPs\":[\"192.168.102.12\"],\"sdsOnlyIPs\":null,\"sdcOnlyIPs\":null,\"devices\":[{\"devicePath\":\"/home/vagrant/scaleio1\",\"storagePool\":null,\"deviceName\":null}],\"optimized\":false,\"port\":7072},{\"node\":{\"ostype\":\"linux\",\"nodeName\":null,\"nodeIPs\":[\"192.168.102.13\"],\"domain\":null,\"userName\":\"root\",\"password\":\"vagrant\",\"liaPassword\":null},\"nodeInfo\":null,\"sdsName\":\"SDS_[192.168.102.13]\",\"protectionDomain\":\"default\",\"faultSet\":null,\"allIPs\":[\"192.168.102.13\"],\"sdsOnlyIPs\":null,\"sdcOnlyIPs\":null,\"devices\":[{\"devicePath\":\"/home/vagrant/scaleio1\",\"storagePool\":null,\"deviceName\":null}],\"optimized\":false,\"port\":7072}],\"sdcList\":[{\"node\":{\"ostype\":\"linux\",\"nodeName\":null,\"nodeIPs\":[\"192.168.102.11\"],\"domain\":null,\"userName\":\"root\",\"password\":\"vagrant\",\"liaPassword\":null},\"nodeInfo\":null,\"splitterRpaIp\":null},{\"node\":{\"ostype\":\"linux\",\"nodeName\":null,\"nodeIPs\":[\"192.168.102.12\"],\"domain\":null,\"userName\":\"root\",\"password\":\"vagrant\",\"liaPassword\":null},\"nodeInfo\":null,\"splitterRpaIp\":null},{\"node\":{\"ostype\":\"linux\",\"nodeName\":null,\"nodeIPs\":[\"192.168.102.13\"],\"domain\":null,\"userName\":\"root\",\"password\":\"vagrant\",\"liaPassword\":null},\"nodeInfo\":null,\"splitterRpaIp\":null}],\"callHomeConfiguration\":null,\"remoteSyslogConfiguration\":null}'", "r1", "=", "self", ".", "_im_session", ".", "post", "(", "\"{}/{}\"", ".", "format", "(", "self", ".", "_im_api_url", ",", "\"types/Command/instances/actions/abortPending\"", ")", ",", "headers", "=", "{", "'Content-type'", ":", "'application/json'", ",", "'Version'", ":", "'1.0'", "}", ",", "verify", "=", "self", ".", "_im_verify_ssl", ",", "data", "=", "newstate", ",", "stream", "=", "True", ")", "if", "not", "r1", ".", "ok", ":", "# Something went wrong", "self", ".", "logger", ".", "error", "(", "\"Error set_abort_pending(\"", "+", "\"{})\"", ".", "format", "(", "newstate", ")", ")", "#print \"Response after set_abort_pending()\"", "# RESPONSE NEED TO BE WRAPPED IN try/catch. Cannot assume JSON is returned.", "#print r1.text", "#pprint (json.loads(r1.text))", "return", "r1", ".", "text" ]
Method to set Abort state if something goes wrong during provisioning Method also used to finish provisioning process when all is completed Method: POST
[ "Method", "to", "set", "Abort", "state", "if", "something", "goes", "wrong", "during", "provisioning", "Method", "also", "used", "to", "finish", "provisioning", "process", "when", "all", "is", "completed", "Method", ":", "POST" ]
python
train
148.12
twilio/twilio-python
twilio/rest/api/v2010/account/conference/__init__.py
https://github.com/twilio/twilio-python/blob/c867895f55dcc29f522e6e8b8868d0d18483132f/twilio/rest/api/v2010/account/conference/__init__.py#L351-L364
def recordings(self): """ Access the recordings :returns: twilio.rest.api.v2010.account.conference.recording.RecordingList :rtype: twilio.rest.api.v2010.account.conference.recording.RecordingList """ if self._recordings is None: self._recordings = RecordingList( self._version, account_sid=self._solution['account_sid'], conference_sid=self._solution['sid'], ) return self._recordings
[ "def", "recordings", "(", "self", ")", ":", "if", "self", ".", "_recordings", "is", "None", ":", "self", ".", "_recordings", "=", "RecordingList", "(", "self", ".", "_version", ",", "account_sid", "=", "self", ".", "_solution", "[", "'account_sid'", "]", ",", "conference_sid", "=", "self", ".", "_solution", "[", "'sid'", "]", ",", ")", "return", "self", ".", "_recordings" ]
Access the recordings :returns: twilio.rest.api.v2010.account.conference.recording.RecordingList :rtype: twilio.rest.api.v2010.account.conference.recording.RecordingList
[ "Access", "the", "recordings" ]
python
train
35.714286
jazzband/django-widget-tweaks
widget_tweaks/templatetags/widget_tweaks.py
https://github.com/jazzband/django-widget-tweaks/blob/f50ee92410d68e81528a7643a10544e7331af8fb/widget_tweaks/templatetags/widget_tweaks.py#L97-L105
def field_type(field): """ Template filter that returns field class name (in lower case). E.g. if field is CharField then {{ field|field_type }} will return 'charfield'. """ if hasattr(field, 'field') and field.field: return field.field.__class__.__name__.lower() return ''
[ "def", "field_type", "(", "field", ")", ":", "if", "hasattr", "(", "field", ",", "'field'", ")", "and", "field", ".", "field", ":", "return", "field", ".", "field", ".", "__class__", ".", "__name__", ".", "lower", "(", ")", "return", "''" ]
Template filter that returns field class name (in lower case). E.g. if field is CharField then {{ field|field_type }} will return 'charfield'.
[ "Template", "filter", "that", "returns", "field", "class", "name", "(", "in", "lower", "case", ")", ".", "E", ".", "g", ".", "if", "field", "is", "CharField", "then", "{{", "field|field_type", "}}", "will", "return", "charfield", "." ]
python
train
33.444444
echinopsii/net.echinopsii.ariane.community.cli.python3
ariane_clip3/directory.py
https://github.com/echinopsii/net.echinopsii.ariane.community.cli.python3/blob/0a7feddebf66fee4bef38d64f456d93a7e9fcd68/ariane_clip3/directory.py#L4139-L4161
def remove(self): """ remove this object from Ariane server :return: """ LOGGER.debug("Environment.remove") if self.id is None: return None else: params = { 'id': self.id } args = {'http_operation': 'GET', 'operation_path': 'delete', 'parameters': params} response = EnvironmentService.requester.call(args) if response.rc != 0: LOGGER.warning( 'Environment.remove - Problem while deleting environment ' + self.name + '. Reason: ' + str(response.response_content) + '-' + str(response.error_message) + " (" + str(response.rc) + ")" ) return self else: return None
[ "def", "remove", "(", "self", ")", ":", "LOGGER", ".", "debug", "(", "\"Environment.remove\"", ")", "if", "self", ".", "id", "is", "None", ":", "return", "None", "else", ":", "params", "=", "{", "'id'", ":", "self", ".", "id", "}", "args", "=", "{", "'http_operation'", ":", "'GET'", ",", "'operation_path'", ":", "'delete'", ",", "'parameters'", ":", "params", "}", "response", "=", "EnvironmentService", ".", "requester", ".", "call", "(", "args", ")", "if", "response", ".", "rc", "!=", "0", ":", "LOGGER", ".", "warning", "(", "'Environment.remove - Problem while deleting environment '", "+", "self", ".", "name", "+", "'. Reason: '", "+", "str", "(", "response", ".", "response_content", ")", "+", "'-'", "+", "str", "(", "response", ".", "error_message", ")", "+", "\" (\"", "+", "str", "(", "response", ".", "rc", ")", "+", "\")\"", ")", "return", "self", "else", ":", "return", "None" ]
remove this object from Ariane server :return:
[ "remove", "this", "object", "from", "Ariane", "server", ":", "return", ":" ]
python
train
35.652174
watson-developer-cloud/python-sdk
ibm_watson/assistant_v1.py
https://github.com/watson-developer-cloud/python-sdk/blob/4c2c9df4466fcde88975da9ecd834e6ba95eb353/ibm_watson/assistant_v1.py#L534-L593
def create_intent(self, workspace_id, intent, description=None, examples=None, **kwargs): """ Create intent. Create a new intent. This operation is limited to 2000 requests per 30 minutes. For more information, see **Rate limiting**. :param str workspace_id: Unique identifier of the workspace. :param str intent: The name of the intent. This string must conform to the following restrictions: - It can contain only Unicode alphanumeric, underscore, hyphen, and dot characters. - It cannot begin with the reserved prefix `sys-`. - It must be no longer than 128 characters. :param str description: The description of the intent. This string cannot contain carriage return, newline, or tab characters, and it must be no longer than 128 characters. :param list[Example] examples: An array of user input examples for the intent. :param dict headers: A `dict` containing the request headers :return: A `DetailedResponse` containing the result, headers and HTTP status code. :rtype: DetailedResponse """ if workspace_id is None: raise ValueError('workspace_id must be provided') if intent is None: raise ValueError('intent must be provided') if examples is not None: examples = [self._convert_model(x, Example) for x in examples] headers = {} if 'headers' in kwargs: headers.update(kwargs.get('headers')) sdk_headers = get_sdk_headers('conversation', 'V1', 'create_intent') headers.update(sdk_headers) params = {'version': self.version} data = { 'intent': intent, 'description': description, 'examples': examples } url = '/v1/workspaces/{0}/intents'.format( *self._encode_path_vars(workspace_id)) response = self.request( method='POST', url=url, headers=headers, params=params, json=data, accept_json=True) return response
[ "def", "create_intent", "(", "self", ",", "workspace_id", ",", "intent", ",", "description", "=", "None", ",", "examples", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "workspace_id", "is", "None", ":", "raise", "ValueError", "(", "'workspace_id must be provided'", ")", "if", "intent", "is", "None", ":", "raise", "ValueError", "(", "'intent must be provided'", ")", "if", "examples", "is", "not", "None", ":", "examples", "=", "[", "self", ".", "_convert_model", "(", "x", ",", "Example", ")", "for", "x", "in", "examples", "]", "headers", "=", "{", "}", "if", "'headers'", "in", "kwargs", ":", "headers", ".", "update", "(", "kwargs", ".", "get", "(", "'headers'", ")", ")", "sdk_headers", "=", "get_sdk_headers", "(", "'conversation'", ",", "'V1'", ",", "'create_intent'", ")", "headers", ".", "update", "(", "sdk_headers", ")", "params", "=", "{", "'version'", ":", "self", ".", "version", "}", "data", "=", "{", "'intent'", ":", "intent", ",", "'description'", ":", "description", ",", "'examples'", ":", "examples", "}", "url", "=", "'/v1/workspaces/{0}/intents'", ".", "format", "(", "*", "self", ".", "_encode_path_vars", "(", "workspace_id", ")", ")", "response", "=", "self", ".", "request", "(", "method", "=", "'POST'", ",", "url", "=", "url", ",", "headers", "=", "headers", ",", "params", "=", "params", ",", "json", "=", "data", ",", "accept_json", "=", "True", ")", "return", "response" ]
Create intent. Create a new intent. This operation is limited to 2000 requests per 30 minutes. For more information, see **Rate limiting**. :param str workspace_id: Unique identifier of the workspace. :param str intent: The name of the intent. This string must conform to the following restrictions: - It can contain only Unicode alphanumeric, underscore, hyphen, and dot characters. - It cannot begin with the reserved prefix `sys-`. - It must be no longer than 128 characters. :param str description: The description of the intent. This string cannot contain carriage return, newline, or tab characters, and it must be no longer than 128 characters. :param list[Example] examples: An array of user input examples for the intent. :param dict headers: A `dict` containing the request headers :return: A `DetailedResponse` containing the result, headers and HTTP status code. :rtype: DetailedResponse
[ "Create", "intent", "." ]
python
train
36.766667
nickmilon/Hellas
Hellas/Athens.py
https://github.com/nickmilon/Hellas/blob/542e4778692fbec90753942946f20100412ec9ee/Hellas/Athens.py#L83-L88
def dms2dd(degrees, minutes, seconds, direction): """convert degrees, minutes, seconds to dd :param string direction: one of N S W E """ dd = (degrees + minutes/60.0) + (seconds/3600.0) # 60.0 fraction for python 2+ compatibility return dd * -1 if direction == 'S' or direction == 'W' else dd
[ "def", "dms2dd", "(", "degrees", ",", "minutes", ",", "seconds", ",", "direction", ")", ":", "dd", "=", "(", "degrees", "+", "minutes", "/", "60.0", ")", "+", "(", "seconds", "/", "3600.0", ")", "# 60.0 fraction for python 2+ compatibility", "return", "dd", "*", "-", "1", "if", "direction", "==", "'S'", "or", "direction", "==", "'W'", "else", "dd" ]
convert degrees, minutes, seconds to dd :param string direction: one of N S W E
[ "convert", "degrees", "minutes", "seconds", "to", "dd", ":", "param", "string", "direction", ":", "one", "of", "N", "S", "W", "E" ]
python
train
52.833333
echinopsii/net.echinopsii.ariane.community.cli.python3
ariane_clip3/directory.py
https://github.com/echinopsii/net.echinopsii.ariane.community.cli.python3/blob/0a7feddebf66fee4bef38d64f456d93a7e9fcd68/ariane_clip3/directory.py#L2461-L2495
def add_team(self, team, sync=True): """ add a team to this OS instance. :param team: the team to add on this OS instance :param sync: If sync=True(default) synchronize with Ariane server. If sync=False, add the team object on list to be added on next save(). :return: """ LOGGER.debug("OSInstance.add_team") if not sync: self.team_2_add.append(team) else: if team.id is None: team.save() if self.id is not None and team.id is not None: params = { 'id': self.id, 'teamID': team.id } args = {'http_operation': 'GET', 'operation_path': 'update/teams/add', 'parameters': params} response = OSInstanceService.requester.call(args) if response.rc != 0: LOGGER.warning( 'OSInstance.add_team - Problem while updating OS instance ' + self.name + '. Reason: ' + str(response.response_content) + '-' + str(response.error_message) + " (" + str(response.rc) + ")" ) else: self.team_ids.append(team.id) team.osi_ids.append(self.id) else: LOGGER.warning( 'OSInstance.add_team - Problem while updating OS instance ' + self.name + '. Reason: application ' + team.name + ' id is None' )
[ "def", "add_team", "(", "self", ",", "team", ",", "sync", "=", "True", ")", ":", "LOGGER", ".", "debug", "(", "\"OSInstance.add_team\"", ")", "if", "not", "sync", ":", "self", ".", "team_2_add", ".", "append", "(", "team", ")", "else", ":", "if", "team", ".", "id", "is", "None", ":", "team", ".", "save", "(", ")", "if", "self", ".", "id", "is", "not", "None", "and", "team", ".", "id", "is", "not", "None", ":", "params", "=", "{", "'id'", ":", "self", ".", "id", ",", "'teamID'", ":", "team", ".", "id", "}", "args", "=", "{", "'http_operation'", ":", "'GET'", ",", "'operation_path'", ":", "'update/teams/add'", ",", "'parameters'", ":", "params", "}", "response", "=", "OSInstanceService", ".", "requester", ".", "call", "(", "args", ")", "if", "response", ".", "rc", "!=", "0", ":", "LOGGER", ".", "warning", "(", "'OSInstance.add_team - Problem while updating OS instance '", "+", "self", ".", "name", "+", "'. Reason: '", "+", "str", "(", "response", ".", "response_content", ")", "+", "'-'", "+", "str", "(", "response", ".", "error_message", ")", "+", "\" (\"", "+", "str", "(", "response", ".", "rc", ")", "+", "\")\"", ")", "else", ":", "self", ".", "team_ids", ".", "append", "(", "team", ".", "id", ")", "team", ".", "osi_ids", ".", "append", "(", "self", ".", "id", ")", "else", ":", "LOGGER", ".", "warning", "(", "'OSInstance.add_team - Problem while updating OS instance '", "+", "self", ".", "name", "+", "'. Reason: application '", "+", "team", ".", "name", "+", "' id is None'", ")" ]
add a team to this OS instance. :param team: the team to add on this OS instance :param sync: If sync=True(default) synchronize with Ariane server. If sync=False, add the team object on list to be added on next save(). :return:
[ "add", "a", "team", "to", "this", "OS", "instance", ".", ":", "param", "team", ":", "the", "team", "to", "add", "on", "this", "OS", "instance", ":", "param", "sync", ":", "If", "sync", "=", "True", "(", "default", ")", "synchronize", "with", "Ariane", "server", ".", "If", "sync", "=", "False", "add", "the", "team", "object", "on", "list", "to", "be", "added", "on", "next", "save", "()", ".", ":", "return", ":" ]
python
train
43.942857
erdewit/ib_insync
ib_insync/util.py
https://github.com/erdewit/ib_insync/blob/d0646a482590f5cb7bfddbd1f0870f8c4bc1df80/ib_insync/util.py#L52-L69
def tree(obj): """ Convert object to a tree of lists, dicts and simple values. The result can be serialized to JSON. """ from .objects import Object if isinstance(obj, (bool, int, float, str, bytes)): return obj elif isinstance(obj, (datetime.date, datetime.time)): return obj.isoformat() elif isinstance(obj, dict): return {k: tree(v) for k, v in obj.items()} elif isinstance(obj, (list, tuple, set)): return [tree(i) for i in obj] elif isinstance(obj, Object): return {obj.__class__.__qualname__: tree(obj.nonDefaults())} else: return str(obj)
[ "def", "tree", "(", "obj", ")", ":", "from", ".", "objects", "import", "Object", "if", "isinstance", "(", "obj", ",", "(", "bool", ",", "int", ",", "float", ",", "str", ",", "bytes", ")", ")", ":", "return", "obj", "elif", "isinstance", "(", "obj", ",", "(", "datetime", ".", "date", ",", "datetime", ".", "time", ")", ")", ":", "return", "obj", ".", "isoformat", "(", ")", "elif", "isinstance", "(", "obj", ",", "dict", ")", ":", "return", "{", "k", ":", "tree", "(", "v", ")", "for", "k", ",", "v", "in", "obj", ".", "items", "(", ")", "}", "elif", "isinstance", "(", "obj", ",", "(", "list", ",", "tuple", ",", "set", ")", ")", ":", "return", "[", "tree", "(", "i", ")", "for", "i", "in", "obj", "]", "elif", "isinstance", "(", "obj", ",", "Object", ")", ":", "return", "{", "obj", ".", "__class__", ".", "__qualname__", ":", "tree", "(", "obj", ".", "nonDefaults", "(", ")", ")", "}", "else", ":", "return", "str", "(", "obj", ")" ]
Convert object to a tree of lists, dicts and simple values. The result can be serialized to JSON.
[ "Convert", "object", "to", "a", "tree", "of", "lists", "dicts", "and", "simple", "values", ".", "The", "result", "can", "be", "serialized", "to", "JSON", "." ]
python
train
34.444444
djaodjin/djaodjin-deployutils
deployutils/apps/django/mockup/views.py
https://github.com/djaodjin/djaodjin-deployutils/blob/a0fe3cf3030dbbf09025c69ce75a69b326565dd8/deployutils/apps/django/mockup/views.py#L51-L65
def validate_redirect_url(next_url): """ Returns the next_url path if next_url matches allowed hosts. """ if not next_url: return None parts = urlparse(next_url) if parts.netloc: domain, _ = split_domain_port(parts.netloc) allowed_hosts = (['*'] if django_settings.DEBUG else django_settings.ALLOWED_HOSTS) if not (domain and validate_host(domain, allowed_hosts)): return None return urlunparse(("", "", parts.path, parts.params, parts.query, parts.fragment))
[ "def", "validate_redirect_url", "(", "next_url", ")", ":", "if", "not", "next_url", ":", "return", "None", "parts", "=", "urlparse", "(", "next_url", ")", "if", "parts", ".", "netloc", ":", "domain", ",", "_", "=", "split_domain_port", "(", "parts", ".", "netloc", ")", "allowed_hosts", "=", "(", "[", "'*'", "]", "if", "django_settings", ".", "DEBUG", "else", "django_settings", ".", "ALLOWED_HOSTS", ")", "if", "not", "(", "domain", "and", "validate_host", "(", "domain", ",", "allowed_hosts", ")", ")", ":", "return", "None", "return", "urlunparse", "(", "(", "\"\"", ",", "\"\"", ",", "parts", ".", "path", ",", "parts", ".", "params", ",", "parts", ".", "query", ",", "parts", ".", "fragment", ")", ")" ]
Returns the next_url path if next_url matches allowed hosts.
[ "Returns", "the", "next_url", "path", "if", "next_url", "matches", "allowed", "hosts", "." ]
python
train
39.533333
closeio/tasktiger
tasktiger/worker.py
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L177-L220
def _wait_for_new_tasks(self, timeout=0, batch_timeout=0): """ Check activity channel and wait as necessary. This method is also used to slow down the main processing loop to reduce the effects of rapidly sending Redis commands. This method will exit for any of these conditions: 1. _did_work is True, suggests there could be more work pending 2. Found new queue and after batch timeout. Note batch timeout can be zero so it will exit immediately. 3. Timeout seconds have passed, this is the maximum time to stay in this method """ new_queue_found = False start_time = batch_exit = time.time() while True: # Check to see if batch_exit has been updated if batch_exit > start_time: pubsub_sleep = batch_exit - time.time() else: pubsub_sleep = start_time + timeout - time.time() message = self._pubsub.get_message(timeout=0 if pubsub_sleep < 0 or self._did_work else pubsub_sleep) # Pull remaining messages off of channel while message: if message['type'] == 'message': new_queue_found, batch_exit = self._process_queue_message( message['data'], new_queue_found, batch_exit, start_time, timeout, batch_timeout ) message = self._pubsub.get_message() if self._did_work: break # Exit immediately if we did work during the last # execution loop because there might be more work to do elif time.time() >= batch_exit and new_queue_found: break # After finding a new queue we can wait until the # batch timeout expires elif time.time() - start_time > timeout: break
[ "def", "_wait_for_new_tasks", "(", "self", ",", "timeout", "=", "0", ",", "batch_timeout", "=", "0", ")", ":", "new_queue_found", "=", "False", "start_time", "=", "batch_exit", "=", "time", ".", "time", "(", ")", "while", "True", ":", "# Check to see if batch_exit has been updated", "if", "batch_exit", ">", "start_time", ":", "pubsub_sleep", "=", "batch_exit", "-", "time", ".", "time", "(", ")", "else", ":", "pubsub_sleep", "=", "start_time", "+", "timeout", "-", "time", ".", "time", "(", ")", "message", "=", "self", ".", "_pubsub", ".", "get_message", "(", "timeout", "=", "0", "if", "pubsub_sleep", "<", "0", "or", "self", ".", "_did_work", "else", "pubsub_sleep", ")", "# Pull remaining messages off of channel", "while", "message", ":", "if", "message", "[", "'type'", "]", "==", "'message'", ":", "new_queue_found", ",", "batch_exit", "=", "self", ".", "_process_queue_message", "(", "message", "[", "'data'", "]", ",", "new_queue_found", ",", "batch_exit", ",", "start_time", ",", "timeout", ",", "batch_timeout", ")", "message", "=", "self", ".", "_pubsub", ".", "get_message", "(", ")", "if", "self", ".", "_did_work", ":", "break", "# Exit immediately if we did work during the last", "# execution loop because there might be more work to do", "elif", "time", ".", "time", "(", ")", ">=", "batch_exit", "and", "new_queue_found", ":", "break", "# After finding a new queue we can wait until the", "# batch timeout expires", "elif", "time", ".", "time", "(", ")", "-", "start_time", ">", "timeout", ":", "break" ]
Check activity channel and wait as necessary. This method is also used to slow down the main processing loop to reduce the effects of rapidly sending Redis commands. This method will exit for any of these conditions: 1. _did_work is True, suggests there could be more work pending 2. Found new queue and after batch timeout. Note batch timeout can be zero so it will exit immediately. 3. Timeout seconds have passed, this is the maximum time to stay in this method
[ "Check", "activity", "channel", "and", "wait", "as", "necessary", "." ]
python
train
45.568182
dw/mitogen
ansible_mitogen/connection.py
https://github.com/dw/mitogen/blob/a7fdb55e1300a7e0a5e404b09eb730cf9a525da7/ansible_mitogen/connection.py#L860-L869
def spawn_isolated_child(self): """ Fork or launch a new child off the target context. :returns: mitogen.core.Context of the new child. """ return self.get_chain(use_fork=True).call( ansible_mitogen.target.spawn_isolated_child )
[ "def", "spawn_isolated_child", "(", "self", ")", ":", "return", "self", ".", "get_chain", "(", "use_fork", "=", "True", ")", ".", "call", "(", "ansible_mitogen", ".", "target", ".", "spawn_isolated_child", ")" ]
Fork or launch a new child off the target context. :returns: mitogen.core.Context of the new child.
[ "Fork", "or", "launch", "a", "new", "child", "off", "the", "target", "context", "." ]
python
train
29.2
SheffieldML/GPy
GPy/inference/latent_function_inference/laplace.py
https://github.com/SheffieldML/GPy/blob/54c32d79d289d622fb18b898aee65a2a431d90cf/GPy/inference/latent_function_inference/laplace.py#L233-L306
def mode_computations(self, f_hat, Ki_f, K, Y, likelihood, kern, Y_metadata): """ At the mode, compute the hessian and effective covariance matrix. returns: logZ : approximation to the marginal likelihood woodbury_inv : variable required for calculating the approximation to the covariance matrix dL_dthetaL : array of derivatives (1 x num_kernel_params) dL_dthetaL : array of derivatives (1 x num_likelihood_params) """ #At this point get the hessian matrix (or vector as W is diagonal) W = -likelihood.d2logpdf_df2(f_hat, Y, Y_metadata=Y_metadata) if np.any(np.isnan(W)): raise ValueError('One or more element(s) of W is NaN') K_Wi_i, logdet_I_KW, I_KW_i, Ki_W_i = self._compute_B_statistics(K, W, likelihood.log_concave) #compute the log marginal log_marginal = -0.5*np.sum(np.dot(Ki_f.T, f_hat)) + np.sum(likelihood.logpdf(f_hat, Y, Y_metadata=Y_metadata)) - 0.5*logdet_I_KW # Compute matrices for derivatives dW_df = -likelihood.d3logpdf_df3(f_hat, Y, Y_metadata=Y_metadata) # -d3lik_d3fhat if np.any(np.isnan(dW_df)): raise ValueError('One or more element(s) of dW_df is NaN') dL_dfhat = -0.5*(np.diag(Ki_W_i)[:, None]*dW_df) # s2 in R&W p126 line 9. #BiK, _ = dpotrs(L, K, lower=1) #dL_dfhat = 0.5*np.diag(BiK)[:, None]*dW_df I_KW_i = np.eye(Y.shape[0]) - np.dot(K, K_Wi_i) #################### # compute dL_dK # #################### if kern.size > 0 and not kern.is_fixed: #Explicit explicit_part = 0.5*(np.dot(Ki_f, Ki_f.T) - K_Wi_i) #Implicit implicit_part = np.dot(Ki_f, dL_dfhat.T).dot(I_KW_i) dL_dK = explicit_part + implicit_part else: dL_dK = np.zeros(likelihood.size) #################### #compute dL_dthetaL# #################### if likelihood.size > 0 and not likelihood.is_fixed: dlik_dthetaL, dlik_grad_dthetaL, dlik_hess_dthetaL = likelihood._laplace_gradients(f_hat, Y, Y_metadata=Y_metadata) num_params = likelihood.size # make space for one derivative for each likelihood parameter dL_dthetaL = np.zeros(num_params) for thetaL_i in range(num_params): #Explicit dL_dthetaL_exp = ( np.sum(dlik_dthetaL[thetaL_i,:, :]) # The + comes from the fact that dlik_hess_dthetaL == -dW_dthetaL + 0.5*np.sum(np.diag(Ki_W_i)*np.squeeze(dlik_hess_dthetaL[thetaL_i, :, :])) ) #Implicit dfhat_dthetaL = mdot(I_KW_i, K, dlik_grad_dthetaL[thetaL_i, :, :]) #dfhat_dthetaL = mdot(Ki_W_i, dlik_grad_dthetaL[thetaL_i, :, :]) dL_dthetaL_imp = np.dot(dL_dfhat.T, dfhat_dthetaL) dL_dthetaL[thetaL_i] = np.sum(dL_dthetaL_exp + dL_dthetaL_imp) else: dL_dthetaL = np.zeros(likelihood.size) #Cache some things for speedy LOO self.Ki_W_i = Ki_W_i self.K = K self.W = W self.f_hat = f_hat return log_marginal, K_Wi_i, dL_dK, dL_dthetaL
[ "def", "mode_computations", "(", "self", ",", "f_hat", ",", "Ki_f", ",", "K", ",", "Y", ",", "likelihood", ",", "kern", ",", "Y_metadata", ")", ":", "#At this point get the hessian matrix (or vector as W is diagonal)", "W", "=", "-", "likelihood", ".", "d2logpdf_df2", "(", "f_hat", ",", "Y", ",", "Y_metadata", "=", "Y_metadata", ")", "if", "np", ".", "any", "(", "np", ".", "isnan", "(", "W", ")", ")", ":", "raise", "ValueError", "(", "'One or more element(s) of W is NaN'", ")", "K_Wi_i", ",", "logdet_I_KW", ",", "I_KW_i", ",", "Ki_W_i", "=", "self", ".", "_compute_B_statistics", "(", "K", ",", "W", ",", "likelihood", ".", "log_concave", ")", "#compute the log marginal", "log_marginal", "=", "-", "0.5", "*", "np", ".", "sum", "(", "np", ".", "dot", "(", "Ki_f", ".", "T", ",", "f_hat", ")", ")", "+", "np", ".", "sum", "(", "likelihood", ".", "logpdf", "(", "f_hat", ",", "Y", ",", "Y_metadata", "=", "Y_metadata", ")", ")", "-", "0.5", "*", "logdet_I_KW", "# Compute matrices for derivatives", "dW_df", "=", "-", "likelihood", ".", "d3logpdf_df3", "(", "f_hat", ",", "Y", ",", "Y_metadata", "=", "Y_metadata", ")", "# -d3lik_d3fhat", "if", "np", ".", "any", "(", "np", ".", "isnan", "(", "dW_df", ")", ")", ":", "raise", "ValueError", "(", "'One or more element(s) of dW_df is NaN'", ")", "dL_dfhat", "=", "-", "0.5", "*", "(", "np", ".", "diag", "(", "Ki_W_i", ")", "[", ":", ",", "None", "]", "*", "dW_df", ")", "# s2 in R&W p126 line 9.", "#BiK, _ = dpotrs(L, K, lower=1)", "#dL_dfhat = 0.5*np.diag(BiK)[:, None]*dW_df", "I_KW_i", "=", "np", ".", "eye", "(", "Y", ".", "shape", "[", "0", "]", ")", "-", "np", ".", "dot", "(", "K", ",", "K_Wi_i", ")", "####################", "# compute dL_dK #", "####################", "if", "kern", ".", "size", ">", "0", "and", "not", "kern", ".", "is_fixed", ":", "#Explicit", "explicit_part", "=", "0.5", "*", "(", "np", ".", "dot", "(", "Ki_f", ",", "Ki_f", ".", "T", ")", "-", "K_Wi_i", ")", "#Implicit", "implicit_part", "=", "np", ".", "dot", "(", "Ki_f", ",", "dL_dfhat", ".", "T", ")", ".", "dot", "(", "I_KW_i", ")", "dL_dK", "=", "explicit_part", "+", "implicit_part", "else", ":", "dL_dK", "=", "np", ".", "zeros", "(", "likelihood", ".", "size", ")", "####################", "#compute dL_dthetaL#", "####################", "if", "likelihood", ".", "size", ">", "0", "and", "not", "likelihood", ".", "is_fixed", ":", "dlik_dthetaL", ",", "dlik_grad_dthetaL", ",", "dlik_hess_dthetaL", "=", "likelihood", ".", "_laplace_gradients", "(", "f_hat", ",", "Y", ",", "Y_metadata", "=", "Y_metadata", ")", "num_params", "=", "likelihood", ".", "size", "# make space for one derivative for each likelihood parameter", "dL_dthetaL", "=", "np", ".", "zeros", "(", "num_params", ")", "for", "thetaL_i", "in", "range", "(", "num_params", ")", ":", "#Explicit", "dL_dthetaL_exp", "=", "(", "np", ".", "sum", "(", "dlik_dthetaL", "[", "thetaL_i", ",", ":", ",", ":", "]", ")", "# The + comes from the fact that dlik_hess_dthetaL == -dW_dthetaL", "+", "0.5", "*", "np", ".", "sum", "(", "np", ".", "diag", "(", "Ki_W_i", ")", "*", "np", ".", "squeeze", "(", "dlik_hess_dthetaL", "[", "thetaL_i", ",", ":", ",", ":", "]", ")", ")", ")", "#Implicit", "dfhat_dthetaL", "=", "mdot", "(", "I_KW_i", ",", "K", ",", "dlik_grad_dthetaL", "[", "thetaL_i", ",", ":", ",", ":", "]", ")", "#dfhat_dthetaL = mdot(Ki_W_i, dlik_grad_dthetaL[thetaL_i, :, :])", "dL_dthetaL_imp", "=", "np", ".", "dot", "(", "dL_dfhat", ".", "T", ",", "dfhat_dthetaL", ")", "dL_dthetaL", "[", "thetaL_i", "]", "=", "np", ".", "sum", "(", "dL_dthetaL_exp", "+", "dL_dthetaL_imp", ")", "else", ":", "dL_dthetaL", "=", "np", ".", "zeros", "(", "likelihood", ".", "size", ")", "#Cache some things for speedy LOO", "self", ".", "Ki_W_i", "=", "Ki_W_i", "self", ".", "K", "=", "K", "self", ".", "W", "=", "W", "self", ".", "f_hat", "=", "f_hat", "return", "log_marginal", ",", "K_Wi_i", ",", "dL_dK", ",", "dL_dthetaL" ]
At the mode, compute the hessian and effective covariance matrix. returns: logZ : approximation to the marginal likelihood woodbury_inv : variable required for calculating the approximation to the covariance matrix dL_dthetaL : array of derivatives (1 x num_kernel_params) dL_dthetaL : array of derivatives (1 x num_likelihood_params)
[ "At", "the", "mode", "compute", "the", "hessian", "and", "effective", "covariance", "matrix", "." ]
python
train
44.094595
PonteIneptique/collatinus-python
pycollatinus/parser.py
https://github.com/PonteIneptique/collatinus-python/blob/fca37b0b77bc60f47d3c24ab42f6d0bdca6ba0f5/pycollatinus/parser.py#L104-L113
def lisFichierLexique(self, filepath): """ Lecture des lemmes, et enregistrement de leurs radicaux :param filepath: Chemin du fichier à charger :type filepath: str """ orig = int(filepath.endswith("ext.la")) lignes = lignesFichier(filepath) for ligne in lignes: self.parse_lemme(ligne, orig)
[ "def", "lisFichierLexique", "(", "self", ",", "filepath", ")", ":", "orig", "=", "int", "(", "filepath", ".", "endswith", "(", "\"ext.la\"", ")", ")", "lignes", "=", "lignesFichier", "(", "filepath", ")", "for", "ligne", "in", "lignes", ":", "self", ".", "parse_lemme", "(", "ligne", ",", "orig", ")" ]
Lecture des lemmes, et enregistrement de leurs radicaux :param filepath: Chemin du fichier à charger :type filepath: str
[ "Lecture", "des", "lemmes", "et", "enregistrement", "de", "leurs", "radicaux" ]
python
train
35.1
ethereum/eth-abi
eth_abi/registry.py
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/registry.py#L354-L361
def register_encoder(self, lookup: Lookup, encoder: Encoder, label: str=None) -> None: """ Registers the given ``encoder`` under the given ``lookup``. A unique string label may be optionally provided that can be used to refer to the registration by name. For more information about arguments, refer to :any:`register`. """ self._register_coder(self._encoders, lookup, encoder, label=label)
[ "def", "register_encoder", "(", "self", ",", "lookup", ":", "Lookup", ",", "encoder", ":", "Encoder", ",", "label", ":", "str", "=", "None", ")", "->", "None", ":", "self", ".", "_register_coder", "(", "self", ".", "_encoders", ",", "lookup", ",", "encoder", ",", "label", "=", "label", ")" ]
Registers the given ``encoder`` under the given ``lookup``. A unique string label may be optionally provided that can be used to refer to the registration by name. For more information about arguments, refer to :any:`register`.
[ "Registers", "the", "given", "encoder", "under", "the", "given", "lookup", ".", "A", "unique", "string", "label", "may", "be", "optionally", "provided", "that", "can", "be", "used", "to", "refer", "to", "the", "registration", "by", "name", ".", "For", "more", "information", "about", "arguments", "refer", "to", ":", "any", ":", "register", "." ]
python
train
55
spdx/tools-python
spdx/parsers/rdf.py
https://github.com/spdx/tools-python/blob/301d72f6ae57c832c1da7f6402fa49b192de6810/spdx/parsers/rdf.py#L83-L91
def value_error(self, key, bad_value): """Reports a value error using ERROR_MESSAGES dict. key - key to use for ERROR_MESSAGES. bad_value - is passed to format which is called on what key maps to in ERROR_MESSAGES. """ msg = ERROR_MESSAGES[key].format(bad_value) self.logger.log(msg) self.error = True
[ "def", "value_error", "(", "self", ",", "key", ",", "bad_value", ")", ":", "msg", "=", "ERROR_MESSAGES", "[", "key", "]", ".", "format", "(", "bad_value", ")", "self", ".", "logger", ".", "log", "(", "msg", ")", "self", ".", "error", "=", "True" ]
Reports a value error using ERROR_MESSAGES dict. key - key to use for ERROR_MESSAGES. bad_value - is passed to format which is called on what key maps to in ERROR_MESSAGES.
[ "Reports", "a", "value", "error", "using", "ERROR_MESSAGES", "dict", ".", "key", "-", "key", "to", "use", "for", "ERROR_MESSAGES", ".", "bad_value", "-", "is", "passed", "to", "format", "which", "is", "called", "on", "what", "key", "maps", "to", "in", "ERROR_MESSAGES", "." ]
python
valid
39.666667
jobovy/galpy
galpy/potential/TwoPowerSphericalPotential.py
https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/potential/TwoPowerSphericalPotential.py#L205-L222
def _mass(self,R,z=0.,t=0.): """ NAME: _mass PURPOSE: evaluate the mass within R for this potential INPUT: R - Galactocentric cylindrical radius z - vertical height t - time OUTPUT: the mass enclosed HISTORY: 2014-04-01 - Written - Erkal (IoA) """ if z is None: r= R else: r= numpy.sqrt(R**2.+z**2.) return (r/self.a)**(3.-self.alpha)/(3.-self.alpha)*special.hyp2f1(3.-self.alpha,-self.alpha+self.beta,4.-self.alpha,-r/self.a)
[ "def", "_mass", "(", "self", ",", "R", ",", "z", "=", "0.", ",", "t", "=", "0.", ")", ":", "if", "z", "is", "None", ":", "r", "=", "R", "else", ":", "r", "=", "numpy", ".", "sqrt", "(", "R", "**", "2.", "+", "z", "**", "2.", ")", "return", "(", "r", "/", "self", ".", "a", ")", "**", "(", "3.", "-", "self", ".", "alpha", ")", "/", "(", "3.", "-", "self", ".", "alpha", ")", "*", "special", ".", "hyp2f1", "(", "3.", "-", "self", ".", "alpha", ",", "-", "self", ".", "alpha", "+", "self", ".", "beta", ",", "4.", "-", "self", ".", "alpha", ",", "-", "r", "/", "self", ".", "a", ")" ]
NAME: _mass PURPOSE: evaluate the mass within R for this potential INPUT: R - Galactocentric cylindrical radius z - vertical height t - time OUTPUT: the mass enclosed HISTORY: 2014-04-01 - Written - Erkal (IoA)
[ "NAME", ":", "_mass", "PURPOSE", ":", "evaluate", "the", "mass", "within", "R", "for", "this", "potential", "INPUT", ":", "R", "-", "Galactocentric", "cylindrical", "radius", "z", "-", "vertical", "height", "t", "-", "time", "OUTPUT", ":", "the", "mass", "enclosed", "HISTORY", ":", "2014", "-", "04", "-", "01", "-", "Written", "-", "Erkal", "(", "IoA", ")" ]
python
train
31.444444
pycontribs/jira
jira/client.py
https://github.com/pycontribs/jira/blob/397db5d78441ed6a680a9b7db4c62030ade1fd8a/jira/client.py#L3591-L3603
def remove_user_from_group(self, username, groupname): """Remove a user from a group. :param username: The user to remove from the group. :param groupname: The group that the user will be removed from. """ url = self._options['server'] + '/rest/api/latest/group/user' x = {'groupname': groupname, 'username': username} self._session.delete(url, params=x) return True
[ "def", "remove_user_from_group", "(", "self", ",", "username", ",", "groupname", ")", ":", "url", "=", "self", ".", "_options", "[", "'server'", "]", "+", "'/rest/api/latest/group/user'", "x", "=", "{", "'groupname'", ":", "groupname", ",", "'username'", ":", "username", "}", "self", ".", "_session", ".", "delete", "(", "url", ",", "params", "=", "x", ")", "return", "True" ]
Remove a user from a group. :param username: The user to remove from the group. :param groupname: The group that the user will be removed from.
[ "Remove", "a", "user", "from", "a", "group", "." ]
python
train
33.384615
Alignak-monitoring/alignak
alignak/external_command.py
https://github.com/Alignak-monitoring/alignak/blob/f3c145207e83159b799d3714e4241399c7740a64/alignak/external_command.py#L3677-L3705
def schedule_servicegroup_host_downtime(self, servicegroup, start_time, end_time, fixed, trigger_id, duration, author, comment): """Schedule a host downtime for each host of services in a servicegroup Format of the line that triggers function call:: SCHEDULE_SERVICEGROUP_HOST_DOWNTIME;<servicegroup_name>;<start_time>;<end_time>;<fixed>; <trigger_id>;<duration>;<author>;<comment> :param servicegroup: servicegroup to schedule downtime :type servicegroup: alignak.object.servicegroup.Servicegroup :param start_time: downtime start time :type start_time: :param end_time: downtime end time :type end_time: :param fixed: is downtime fixed :type fixed: bool :param trigger_id: downtime id that triggered this one :type trigger_id: str :param duration: downtime duration :type duration: int :param author: downtime author :type author: str :param comment: downtime comment :type comment: str :return: None """ for host in [s.host for s in servicegroup.get_services()]: self.schedule_host_downtime(host, start_time, end_time, fixed, trigger_id, duration, author, comment)
[ "def", "schedule_servicegroup_host_downtime", "(", "self", ",", "servicegroup", ",", "start_time", ",", "end_time", ",", "fixed", ",", "trigger_id", ",", "duration", ",", "author", ",", "comment", ")", ":", "for", "host", "in", "[", "s", ".", "host", "for", "s", "in", "servicegroup", ".", "get_services", "(", ")", "]", ":", "self", ".", "schedule_host_downtime", "(", "host", ",", "start_time", ",", "end_time", ",", "fixed", ",", "trigger_id", ",", "duration", ",", "author", ",", "comment", ")" ]
Schedule a host downtime for each host of services in a servicegroup Format of the line that triggers function call:: SCHEDULE_SERVICEGROUP_HOST_DOWNTIME;<servicegroup_name>;<start_time>;<end_time>;<fixed>; <trigger_id>;<duration>;<author>;<comment> :param servicegroup: servicegroup to schedule downtime :type servicegroup: alignak.object.servicegroup.Servicegroup :param start_time: downtime start time :type start_time: :param end_time: downtime end time :type end_time: :param fixed: is downtime fixed :type fixed: bool :param trigger_id: downtime id that triggered this one :type trigger_id: str :param duration: downtime duration :type duration: int :param author: downtime author :type author: str :param comment: downtime comment :type comment: str :return: None
[ "Schedule", "a", "host", "downtime", "for", "each", "host", "of", "services", "in", "a", "servicegroup", "Format", "of", "the", "line", "that", "triggers", "function", "call", "::" ]
python
train
45.551724
GoogleCloudPlatform/datastore-ndb-python
demo/task_list.py
https://github.com/GoogleCloudPlatform/datastore-ndb-python/blob/cf4cab3f1f69cd04e1a9229871be466b53729f3f/demo/task_list.py#L61-L74
def mark_done(task_id): """Marks a task as done. Args: task_id: The integer id of the task to update. Raises: ValueError: if the requested task doesn't exist. """ task = Task.get_by_id(task_id) if task is None: raise ValueError('Task with id %d does not exist' % task_id) task.done = True task.put()
[ "def", "mark_done", "(", "task_id", ")", ":", "task", "=", "Task", ".", "get_by_id", "(", "task_id", ")", "if", "task", "is", "None", ":", "raise", "ValueError", "(", "'Task with id %d does not exist'", "%", "task_id", ")", "task", ".", "done", "=", "True", "task", ".", "put", "(", ")" ]
Marks a task as done. Args: task_id: The integer id of the task to update. Raises: ValueError: if the requested task doesn't exist.
[ "Marks", "a", "task", "as", "done", "." ]
python
train
22.571429
pandas-dev/pandas
pandas/core/arrays/datetimelike.py
https://github.com/pandas-dev/pandas/blob/9feb3ad92cc0397a04b665803a49299ee7aa1037/pandas/core/arrays/datetimelike.py#L1141-L1177
def _time_shift(self, periods, freq=None): """ Shift each value by `periods`. Note this is different from ExtensionArray.shift, which shifts the *position* of each element, padding the end with missing values. Parameters ---------- periods : int Number of periods to shift by. freq : pandas.DateOffset, pandas.Timedelta, or string Frequency increment to shift by. """ if freq is not None and freq != self.freq: if isinstance(freq, str): freq = frequencies.to_offset(freq) offset = periods * freq result = self + offset return result if periods == 0: # immutable so OK return self.copy() if self.freq is None: raise NullFrequencyError("Cannot shift with no freq") start = self[0] + periods * self.freq end = self[-1] + periods * self.freq # Note: in the DatetimeTZ case, _generate_range will infer the # appropriate timezone from `start` and `end`, so tz does not need # to be passed explicitly. return self._generate_range(start=start, end=end, periods=None, freq=self.freq)
[ "def", "_time_shift", "(", "self", ",", "periods", ",", "freq", "=", "None", ")", ":", "if", "freq", "is", "not", "None", "and", "freq", "!=", "self", ".", "freq", ":", "if", "isinstance", "(", "freq", ",", "str", ")", ":", "freq", "=", "frequencies", ".", "to_offset", "(", "freq", ")", "offset", "=", "periods", "*", "freq", "result", "=", "self", "+", "offset", "return", "result", "if", "periods", "==", "0", ":", "# immutable so OK", "return", "self", ".", "copy", "(", ")", "if", "self", ".", "freq", "is", "None", ":", "raise", "NullFrequencyError", "(", "\"Cannot shift with no freq\"", ")", "start", "=", "self", "[", "0", "]", "+", "periods", "*", "self", ".", "freq", "end", "=", "self", "[", "-", "1", "]", "+", "periods", "*", "self", ".", "freq", "# Note: in the DatetimeTZ case, _generate_range will infer the", "# appropriate timezone from `start` and `end`, so tz does not need", "# to be passed explicitly.", "return", "self", ".", "_generate_range", "(", "start", "=", "start", ",", "end", "=", "end", ",", "periods", "=", "None", ",", "freq", "=", "self", ".", "freq", ")" ]
Shift each value by `periods`. Note this is different from ExtensionArray.shift, which shifts the *position* of each element, padding the end with missing values. Parameters ---------- periods : int Number of periods to shift by. freq : pandas.DateOffset, pandas.Timedelta, or string Frequency increment to shift by.
[ "Shift", "each", "value", "by", "periods", "." ]
python
train
34
kubernetes-client/python
kubernetes/client/apis/rbac_authorization_v1_api.py
https://github.com/kubernetes-client/python/blob/5e512ff564c244c50cab780d821542ed56aa965a/kubernetes/client/apis/rbac_authorization_v1_api.py#L2062-L2088
def list_role_binding_for_all_namespaces(self, **kwargs): """ list or watch objects of kind RoleBinding This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_role_binding_for_all_namespaces(async_req=True) >>> result = thread.get() :param async_req bool :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str pretty: If 'true', then the output is pretty printed. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1RoleBindingList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_role_binding_for_all_namespaces_with_http_info(**kwargs) else: (data) = self.list_role_binding_for_all_namespaces_with_http_info(**kwargs) return data
[ "def", "list_role_binding_for_all_namespaces", "(", "self", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "list_role_binding_for_all_namespaces_with_http_info", "(", "*", "*", "kwargs", ")", "else", ":", "(", "data", ")", "=", "self", ".", "list_role_binding_for_all_namespaces_with_http_info", "(", "*", "*", "kwargs", ")", "return", "data" ]
list or watch objects of kind RoleBinding This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_role_binding_for_all_namespaces(async_req=True) >>> result = thread.get() :param async_req bool :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str pretty: If 'true', then the output is pretty printed. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1RoleBindingList If the method is called asynchronously, returns the request thread.
[ "list", "or", "watch", "objects", "of", "kind", "RoleBinding", "This", "method", "makes", "a", "synchronous", "HTTP", "request", "by", "default", ".", "To", "make", "an", "asynchronous", "HTTP", "request", "please", "pass", "async_req", "=", "True", ">>>", "thread", "=", "api", ".", "list_role_binding_for_all_namespaces", "(", "async_req", "=", "True", ")", ">>>", "result", "=", "thread", ".", "get", "()" ]
python
train
169.222222
dropbox/pygerduty
pygerduty/events.py
https://github.com/dropbox/pygerduty/blob/11b28bfb66306aa7fc2b95ab9df65eb97ea831cf/pygerduty/events.py#L57-L65
def resolve_incident(self, incident_key, description=None, details=None): """ Causes the referenced incident to enter resolved state. Send a resolve event when the problem that caused the initial trigger has been fixed. """ return self.create_event(description, "resolve", details, incident_key)
[ "def", "resolve_incident", "(", "self", ",", "incident_key", ",", "description", "=", "None", ",", "details", "=", "None", ")", ":", "return", "self", ".", "create_event", "(", "description", ",", "\"resolve\"", ",", "details", ",", "incident_key", ")" ]
Causes the referenced incident to enter resolved state. Send a resolve event when the problem that caused the initial trigger has been fixed.
[ "Causes", "the", "referenced", "incident", "to", "enter", "resolved", "state", ".", "Send", "a", "resolve", "event", "when", "the", "problem", "that", "caused", "the", "initial", "trigger", "has", "been", "fixed", "." ]
python
train
42.888889
kensho-technologies/graphql-compiler
graphql_compiler/compiler/workarounds/orientdb_query_execution.py
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L208-L216
def _assert_type_bounds_are_not_conflicting(current_type_bound, previous_type_bound, location, match_query): """Ensure that the two bounds either are an exact match, or one of them is None.""" if all((current_type_bound is not None, previous_type_bound is not None, current_type_bound != previous_type_bound)): raise AssertionError( u'Conflicting type bounds calculated at location {}: {} vs {} ' u'for query {}'.format(location, previous_type_bound, current_type_bound, match_query))
[ "def", "_assert_type_bounds_are_not_conflicting", "(", "current_type_bound", ",", "previous_type_bound", ",", "location", ",", "match_query", ")", ":", "if", "all", "(", "(", "current_type_bound", "is", "not", "None", ",", "previous_type_bound", "is", "not", "None", ",", "current_type_bound", "!=", "previous_type_bound", ")", ")", ":", "raise", "AssertionError", "(", "u'Conflicting type bounds calculated at location {}: {} vs {} '", "u'for query {}'", ".", "format", "(", "location", ",", "previous_type_bound", ",", "current_type_bound", ",", "match_query", ")", ")" ]
Ensure that the two bounds either are an exact match, or one of them is None.
[ "Ensure", "that", "the", "two", "bounds", "either", "are", "an", "exact", "match", "or", "one", "of", "them", "is", "None", "." ]
python
train
64.888889
LonamiWebs/Telethon
telethon/tl/custom/chatgetter.py
https://github.com/LonamiWebs/Telethon/blob/1ead9757d366b58c1e0567cddb0196e20f1a445f/telethon/tl/custom/chatgetter.py#L97-L105
def is_group(self): """True if the message was sent on a group or megagroup.""" if self._broadcast is None and self.chat: self._broadcast = getattr(self.chat, 'broadcast', None) return ( isinstance(self._chat_peer, (types.PeerChat, types.PeerChannel)) and not self._broadcast )
[ "def", "is_group", "(", "self", ")", ":", "if", "self", ".", "_broadcast", "is", "None", "and", "self", ".", "chat", ":", "self", ".", "_broadcast", "=", "getattr", "(", "self", ".", "chat", ",", "'broadcast'", ",", "None", ")", "return", "(", "isinstance", "(", "self", ".", "_chat_peer", ",", "(", "types", ".", "PeerChat", ",", "types", ".", "PeerChannel", ")", ")", "and", "not", "self", ".", "_broadcast", ")" ]
True if the message was sent on a group or megagroup.
[ "True", "if", "the", "message", "was", "sent", "on", "a", "group", "or", "megagroup", "." ]
python
train
37.555556
idlesign/django-sitetree
sitetree/admin.py
https://github.com/idlesign/django-sitetree/blob/61de4608e6e415247c75fe8691027d7c4ed0d1e7/sitetree/admin.py#L232-L262
def item_move(self, request, tree_id, item_id, direction): """Moves item up or down by swapping 'sort_order' field values of neighboring items.""" current_item = MODEL_TREE_ITEM_CLASS._default_manager.get(pk=item_id) if direction == 'up': sort_order = 'sort_order' else: sort_order = '-sort_order' siblings = MODEL_TREE_ITEM_CLASS._default_manager.filter( parent=current_item.parent, tree=current_item.tree ).order_by(sort_order) previous_item = None for item in siblings: if item != current_item: previous_item = item else: break if previous_item is not None: current_item_sort_order = current_item.sort_order previous_item_sort_order = previous_item.sort_order current_item.sort_order = previous_item_sort_order previous_item.sort_order = current_item_sort_order current_item.save() previous_item.save() return HttpResponseRedirect('../../')
[ "def", "item_move", "(", "self", ",", "request", ",", "tree_id", ",", "item_id", ",", "direction", ")", ":", "current_item", "=", "MODEL_TREE_ITEM_CLASS", ".", "_default_manager", ".", "get", "(", "pk", "=", "item_id", ")", "if", "direction", "==", "'up'", ":", "sort_order", "=", "'sort_order'", "else", ":", "sort_order", "=", "'-sort_order'", "siblings", "=", "MODEL_TREE_ITEM_CLASS", ".", "_default_manager", ".", "filter", "(", "parent", "=", "current_item", ".", "parent", ",", "tree", "=", "current_item", ".", "tree", ")", ".", "order_by", "(", "sort_order", ")", "previous_item", "=", "None", "for", "item", "in", "siblings", ":", "if", "item", "!=", "current_item", ":", "previous_item", "=", "item", "else", ":", "break", "if", "previous_item", "is", "not", "None", ":", "current_item_sort_order", "=", "current_item", ".", "sort_order", "previous_item_sort_order", "=", "previous_item", ".", "sort_order", "current_item", ".", "sort_order", "=", "previous_item_sort_order", "previous_item", ".", "sort_order", "=", "current_item_sort_order", "current_item", ".", "save", "(", ")", "previous_item", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "'../../'", ")" ]
Moves item up or down by swapping 'sort_order' field values of neighboring items.
[ "Moves", "item", "up", "or", "down", "by", "swapping", "sort_order", "field", "values", "of", "neighboring", "items", "." ]
python
test
34.677419
vmware/pyvmomi
pyVmomi/Differ.py
https://github.com/vmware/pyvmomi/blob/3ffcb23bf77d757175c0d5216ba9a25345d824cd/pyVmomi/Differ.py#L94-L111
def DiffDoArrays(self, oldObj, newObj, isElementLinks): """Diff two DataObject arrays""" if len(oldObj) != len(newObj): __Log__.debug('DiffDoArrays: Array lengths do not match %d != %d' % (len(oldObj), len(newObj))) return False for i, j in zip(oldObj, newObj): if isElementLinks: if i.GetKey() != j.GetKey(): __Log__.debug('DiffDoArrays: Keys do not match %s != %s' % (i.GetKey(), j.GetKey())) return False else: if not self.DiffDataObjects(i, j): __Log__.debug( 'DiffDoArrays: one of the elements do not match') return False return True
[ "def", "DiffDoArrays", "(", "self", ",", "oldObj", ",", "newObj", ",", "isElementLinks", ")", ":", "if", "len", "(", "oldObj", ")", "!=", "len", "(", "newObj", ")", ":", "__Log__", ".", "debug", "(", "'DiffDoArrays: Array lengths do not match %d != %d'", "%", "(", "len", "(", "oldObj", ")", ",", "len", "(", "newObj", ")", ")", ")", "return", "False", "for", "i", ",", "j", "in", "zip", "(", "oldObj", ",", "newObj", ")", ":", "if", "isElementLinks", ":", "if", "i", ".", "GetKey", "(", ")", "!=", "j", ".", "GetKey", "(", ")", ":", "__Log__", ".", "debug", "(", "'DiffDoArrays: Keys do not match %s != %s'", "%", "(", "i", ".", "GetKey", "(", ")", ",", "j", ".", "GetKey", "(", ")", ")", ")", "return", "False", "else", ":", "if", "not", "self", ".", "DiffDataObjects", "(", "i", ",", "j", ")", ":", "__Log__", ".", "debug", "(", "'DiffDoArrays: one of the elements do not match'", ")", "return", "False", "return", "True" ]
Diff two DataObject arrays
[ "Diff", "two", "DataObject", "arrays" ]
python
train
39.611111
kislyuk/aegea
aegea/packages/github3/github.py
https://github.com/kislyuk/aegea/blob/94957e9dba036eae3052e2662c208b259c08399a/aegea/packages/github3/github.py#L1158-L1221
def search_issues(self, query, sort=None, order=None, per_page=None, text_match=False, number=-1, etag=None): """Find issues by state and keyword The query can contain any combination of the following supported qualifers: - ``type`` With this qualifier you can restrict the search to issues or pull request only. - ``in`` Qualifies which fields are searched. With this qualifier you can restrict the search to just the title, body, comments, or any combination of these. - ``author`` Finds issues created by a certain user. - ``assignee`` Finds issues that are assigned to a certain user. - ``mentions`` Finds issues that mention a certain user. - ``commenter`` Finds issues that a certain user commented on. - ``involves`` Finds issues that were either created by a certain user, assigned to that user, mention that user, or were commented on by that user. - ``state`` Filter issues based on whether they’re open or closed. - ``labels`` Filters issues based on their labels. - ``language`` Searches for issues within repositories that match a certain language. - ``created`` or ``updated`` Filters issues based on times of creation, or when they were last updated. - ``comments`` Filters issues based on the quantity of comments. - ``user`` or ``repo`` Limits searches to a specific user or repository. For more information about these qualifiers, see: http://git.io/d1oELA :param str query: (required), a valid query as described above, e.g., ``windows label:bug`` :param str sort: (optional), how the results should be sorted; options: ``created``, ``comments``, ``updated``; default: best match :param str order: (optional), the direction of the sorted results, options: ``asc``, ``desc``; default: ``desc`` :param int per_page: (optional) :param bool text_match: (optional), if True, return matching search terms. See http://git.io/QLQuSQ for more information :param int number: (optional), number of issues to return. Default: -1, returns all available issues :param str etag: (optional), previous ETag header value :return: generator of :class:`IssueSearchResult <github3.search.IssueSearchResult>` """ params = {'q': query} headers = {} if sort in ('comments', 'created', 'updated'): params['sort'] = sort if order in ('asc', 'desc'): params['order'] = order if text_match: headers = { 'Accept': 'application/vnd.github.v3.full.text-match+json' } url = self._build_url('search', 'issues') return SearchIterator(number, url, IssueSearchResult, self, params, etag, headers)
[ "def", "search_issues", "(", "self", ",", "query", ",", "sort", "=", "None", ",", "order", "=", "None", ",", "per_page", "=", "None", ",", "text_match", "=", "False", ",", "number", "=", "-", "1", ",", "etag", "=", "None", ")", ":", "params", "=", "{", "'q'", ":", "query", "}", "headers", "=", "{", "}", "if", "sort", "in", "(", "'comments'", ",", "'created'", ",", "'updated'", ")", ":", "params", "[", "'sort'", "]", "=", "sort", "if", "order", "in", "(", "'asc'", ",", "'desc'", ")", ":", "params", "[", "'order'", "]", "=", "order", "if", "text_match", ":", "headers", "=", "{", "'Accept'", ":", "'application/vnd.github.v3.full.text-match+json'", "}", "url", "=", "self", ".", "_build_url", "(", "'search'", ",", "'issues'", ")", "return", "SearchIterator", "(", "number", ",", "url", ",", "IssueSearchResult", ",", "self", ",", "params", ",", "etag", ",", "headers", ")" ]
Find issues by state and keyword The query can contain any combination of the following supported qualifers: - ``type`` With this qualifier you can restrict the search to issues or pull request only. - ``in`` Qualifies which fields are searched. With this qualifier you can restrict the search to just the title, body, comments, or any combination of these. - ``author`` Finds issues created by a certain user. - ``assignee`` Finds issues that are assigned to a certain user. - ``mentions`` Finds issues that mention a certain user. - ``commenter`` Finds issues that a certain user commented on. - ``involves`` Finds issues that were either created by a certain user, assigned to that user, mention that user, or were commented on by that user. - ``state`` Filter issues based on whether they’re open or closed. - ``labels`` Filters issues based on their labels. - ``language`` Searches for issues within repositories that match a certain language. - ``created`` or ``updated`` Filters issues based on times of creation, or when they were last updated. - ``comments`` Filters issues based on the quantity of comments. - ``user`` or ``repo`` Limits searches to a specific user or repository. For more information about these qualifiers, see: http://git.io/d1oELA :param str query: (required), a valid query as described above, e.g., ``windows label:bug`` :param str sort: (optional), how the results should be sorted; options: ``created``, ``comments``, ``updated``; default: best match :param str order: (optional), the direction of the sorted results, options: ``asc``, ``desc``; default: ``desc`` :param int per_page: (optional) :param bool text_match: (optional), if True, return matching search terms. See http://git.io/QLQuSQ for more information :param int number: (optional), number of issues to return. Default: -1, returns all available issues :param str etag: (optional), previous ETag header value :return: generator of :class:`IssueSearchResult <github3.search.IssueSearchResult>`
[ "Find", "issues", "by", "state", "and", "keyword" ]
python
train
46.40625
napalm-automation/napalm-junos
napalm_junos/junos.py
https://github.com/napalm-automation/napalm-junos/blob/78c0d161daf2abf26af5835b773f6db57c46efff/napalm_junos/junos.py#L1574-L1601
def get_probes_results(self): """Return the results of the RPM probes.""" probes_results = {} probes_results_table = junos_views.junos_rpm_probes_results_table(self.device) probes_results_table.get() probes_results_items = probes_results_table.items() for probe_result in probes_results_items: probe_name = py23_compat.text_type(probe_result[0]) test_results = { p[0]: p[1] for p in probe_result[1] } test_results['last_test_loss'] = napalm_base.helpers.convert( int, test_results.pop('last_test_loss'), 0) for test_param_name, test_param_value in test_results.items(): if isinstance(test_param_value, float): test_results[test_param_name] = test_param_value * 1e-3 # convert from useconds to mseconds test_name = test_results.pop('test_name', '') source = test_results.get('source', u'') if source is None: test_results['source'] = u'' if probe_name not in probes_results.keys(): probes_results[probe_name] = {} probes_results[probe_name][test_name] = test_results return probes_results
[ "def", "get_probes_results", "(", "self", ")", ":", "probes_results", "=", "{", "}", "probes_results_table", "=", "junos_views", ".", "junos_rpm_probes_results_table", "(", "self", ".", "device", ")", "probes_results_table", ".", "get", "(", ")", "probes_results_items", "=", "probes_results_table", ".", "items", "(", ")", "for", "probe_result", "in", "probes_results_items", ":", "probe_name", "=", "py23_compat", ".", "text_type", "(", "probe_result", "[", "0", "]", ")", "test_results", "=", "{", "p", "[", "0", "]", ":", "p", "[", "1", "]", "for", "p", "in", "probe_result", "[", "1", "]", "}", "test_results", "[", "'last_test_loss'", "]", "=", "napalm_base", ".", "helpers", ".", "convert", "(", "int", ",", "test_results", ".", "pop", "(", "'last_test_loss'", ")", ",", "0", ")", "for", "test_param_name", ",", "test_param_value", "in", "test_results", ".", "items", "(", ")", ":", "if", "isinstance", "(", "test_param_value", ",", "float", ")", ":", "test_results", "[", "test_param_name", "]", "=", "test_param_value", "*", "1e-3", "# convert from useconds to mseconds", "test_name", "=", "test_results", ".", "pop", "(", "'test_name'", ",", "''", ")", "source", "=", "test_results", ".", "get", "(", "'source'", ",", "u''", ")", "if", "source", "is", "None", ":", "test_results", "[", "'source'", "]", "=", "u''", "if", "probe_name", "not", "in", "probes_results", ".", "keys", "(", ")", ":", "probes_results", "[", "probe_name", "]", "=", "{", "}", "probes_results", "[", "probe_name", "]", "[", "test_name", "]", "=", "test_results", "return", "probes_results" ]
Return the results of the RPM probes.
[ "Return", "the", "results", "of", "the", "RPM", "probes", "." ]
python
train
44.964286
chaimleib/intervaltree
intervaltree/node.py
https://github.com/chaimleib/intervaltree/blob/ffb2b1667f8b832e89324a75a175be8440504c9d/intervaltree/node.py#L109-L117
def compute_depth(self): """ Recursively computes true depth of the subtree. Should only be needed for debugging. Unless something is wrong, the depth field should reflect the correct depth of the subtree. """ left_depth = self.left_node.compute_depth() if self.left_node else 0 right_depth = self.right_node.compute_depth() if self.right_node else 0 return 1 + max(left_depth, right_depth)
[ "def", "compute_depth", "(", "self", ")", ":", "left_depth", "=", "self", ".", "left_node", ".", "compute_depth", "(", ")", "if", "self", ".", "left_node", "else", "0", "right_depth", "=", "self", ".", "right_node", ".", "compute_depth", "(", ")", "if", "self", ".", "right_node", "else", "0", "return", "1", "+", "max", "(", "left_depth", ",", "right_depth", ")" ]
Recursively computes true depth of the subtree. Should only be needed for debugging. Unless something is wrong, the depth field should reflect the correct depth of the subtree.
[ "Recursively", "computes", "true", "depth", "of", "the", "subtree", ".", "Should", "only", "be", "needed", "for", "debugging", ".", "Unless", "something", "is", "wrong", "the", "depth", "field", "should", "reflect", "the", "correct", "depth", "of", "the", "subtree", "." ]
python
train
49.555556
odlgroup/odl
odl/tomo/geometry/parallel.py
https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/tomo/geometry/parallel.py#L1471-L1587
def parallel_beam_geometry(space, num_angles=None, det_shape=None): r"""Create default parallel beam geometry from ``space``. This is intended for simple test cases where users do not need the full flexibility of the geometries, but simply want a geometry that works. This default geometry gives a fully sampled sinogram according to the Nyquist criterion, which in general results in a very large number of samples. In particular, a ``space`` that is not centered at the origin can result in very large detectors. Parameters ---------- space : `DiscreteLp` Reconstruction space, the space of the volumetric data to be projected. Needs to be 2d or 3d. num_angles : int, optional Number of angles. Default: Enough to fully sample the data, see Notes. det_shape : int or sequence of int, optional Number of detector pixels. Default: Enough to fully sample the data, see Notes. Returns ------- geometry : `ParallelBeamGeometry` If ``space`` is 2d, return a `Parallel2dGeometry`. If ``space`` is 3d, return a `Parallel3dAxisGeometry`. Examples -------- Create a parallel beam geometry from a 2d space: >>> space = odl.uniform_discr([-1, -1], [1, 1], (20, 20)) >>> geometry = parallel_beam_geometry(space) >>> geometry.angles.size 45 >>> geometry.detector.size 31 Notes ----- According to [NW2001]_, pages 72--74, a function :math:`f : \mathbb{R}^2 \to \mathbb{R}` that has compact support .. math:: \| x \| > \rho \implies f(x) = 0, and is essentially bandlimited .. math:: \| \xi \| > \Omega \implies \hat{f}(\xi) \approx 0, can be fully reconstructed from a parallel beam ray transform if (1) the projection angles are sampled with a spacing of :math:`\Delta \psi` such that .. math:: \Delta \psi \leq \frac{\pi}{\rho \Omega}, and (2) the detector is sampled with an interval :math:`\Delta s` that satisfies .. math:: \Delta s \leq \frac{\pi}{\Omega}. The geometry returned by this function satisfies these conditions exactly. If the domain is 3-dimensional, the geometry is "separable", in that each slice along the z-dimension of the data is treated as independed 2d data. References ---------- .. [NW2001] Natterer, F and Wuebbeling, F. *Mathematical Methods in Image Reconstruction*. SIAM, 2001. https://dx.doi.org/10.1137/1.9780898718324 """ # Find maximum distance from rotation axis corners = space.domain.corners()[:, :2] rho = np.max(np.linalg.norm(corners, axis=1)) # Find default values according to Nyquist criterion. # We assume that the function is bandlimited by a wave along the x or y # axis. The highest frequency we can measure is then a standing wave with # period of twice the inter-node distance. min_side = min(space.partition.cell_sides[:2]) omega = np.pi / min_side num_px_horiz = 2 * int(np.ceil(rho * omega / np.pi)) + 1 if space.ndim == 2: det_min_pt = -rho det_max_pt = rho if det_shape is None: det_shape = num_px_horiz elif space.ndim == 3: num_px_vert = space.shape[2] min_h = space.domain.min_pt[2] max_h = space.domain.max_pt[2] det_min_pt = [-rho, min_h] det_max_pt = [rho, max_h] if det_shape is None: det_shape = [num_px_horiz, num_px_vert] if num_angles is None: num_angles = int(np.ceil(omega * rho)) angle_partition = uniform_partition(0, np.pi, num_angles) det_partition = uniform_partition(det_min_pt, det_max_pt, det_shape) if space.ndim == 2: return Parallel2dGeometry(angle_partition, det_partition) elif space.ndim == 3: return Parallel3dAxisGeometry(angle_partition, det_partition) else: raise ValueError('``space.ndim`` must be 2 or 3.')
[ "def", "parallel_beam_geometry", "(", "space", ",", "num_angles", "=", "None", ",", "det_shape", "=", "None", ")", ":", "# Find maximum distance from rotation axis", "corners", "=", "space", ".", "domain", ".", "corners", "(", ")", "[", ":", ",", ":", "2", "]", "rho", "=", "np", ".", "max", "(", "np", ".", "linalg", ".", "norm", "(", "corners", ",", "axis", "=", "1", ")", ")", "# Find default values according to Nyquist criterion.", "# We assume that the function is bandlimited by a wave along the x or y", "# axis. The highest frequency we can measure is then a standing wave with", "# period of twice the inter-node distance.", "min_side", "=", "min", "(", "space", ".", "partition", ".", "cell_sides", "[", ":", "2", "]", ")", "omega", "=", "np", ".", "pi", "/", "min_side", "num_px_horiz", "=", "2", "*", "int", "(", "np", ".", "ceil", "(", "rho", "*", "omega", "/", "np", ".", "pi", ")", ")", "+", "1", "if", "space", ".", "ndim", "==", "2", ":", "det_min_pt", "=", "-", "rho", "det_max_pt", "=", "rho", "if", "det_shape", "is", "None", ":", "det_shape", "=", "num_px_horiz", "elif", "space", ".", "ndim", "==", "3", ":", "num_px_vert", "=", "space", ".", "shape", "[", "2", "]", "min_h", "=", "space", ".", "domain", ".", "min_pt", "[", "2", "]", "max_h", "=", "space", ".", "domain", ".", "max_pt", "[", "2", "]", "det_min_pt", "=", "[", "-", "rho", ",", "min_h", "]", "det_max_pt", "=", "[", "rho", ",", "max_h", "]", "if", "det_shape", "is", "None", ":", "det_shape", "=", "[", "num_px_horiz", ",", "num_px_vert", "]", "if", "num_angles", "is", "None", ":", "num_angles", "=", "int", "(", "np", ".", "ceil", "(", "omega", "*", "rho", ")", ")", "angle_partition", "=", "uniform_partition", "(", "0", ",", "np", ".", "pi", ",", "num_angles", ")", "det_partition", "=", "uniform_partition", "(", "det_min_pt", ",", "det_max_pt", ",", "det_shape", ")", "if", "space", ".", "ndim", "==", "2", ":", "return", "Parallel2dGeometry", "(", "angle_partition", ",", "det_partition", ")", "elif", "space", ".", "ndim", "==", "3", ":", "return", "Parallel3dAxisGeometry", "(", "angle_partition", ",", "det_partition", ")", "else", ":", "raise", "ValueError", "(", "'``space.ndim`` must be 2 or 3.'", ")" ]
r"""Create default parallel beam geometry from ``space``. This is intended for simple test cases where users do not need the full flexibility of the geometries, but simply want a geometry that works. This default geometry gives a fully sampled sinogram according to the Nyquist criterion, which in general results in a very large number of samples. In particular, a ``space`` that is not centered at the origin can result in very large detectors. Parameters ---------- space : `DiscreteLp` Reconstruction space, the space of the volumetric data to be projected. Needs to be 2d or 3d. num_angles : int, optional Number of angles. Default: Enough to fully sample the data, see Notes. det_shape : int or sequence of int, optional Number of detector pixels. Default: Enough to fully sample the data, see Notes. Returns ------- geometry : `ParallelBeamGeometry` If ``space`` is 2d, return a `Parallel2dGeometry`. If ``space`` is 3d, return a `Parallel3dAxisGeometry`. Examples -------- Create a parallel beam geometry from a 2d space: >>> space = odl.uniform_discr([-1, -1], [1, 1], (20, 20)) >>> geometry = parallel_beam_geometry(space) >>> geometry.angles.size 45 >>> geometry.detector.size 31 Notes ----- According to [NW2001]_, pages 72--74, a function :math:`f : \mathbb{R}^2 \to \mathbb{R}` that has compact support .. math:: \| x \| > \rho \implies f(x) = 0, and is essentially bandlimited .. math:: \| \xi \| > \Omega \implies \hat{f}(\xi) \approx 0, can be fully reconstructed from a parallel beam ray transform if (1) the projection angles are sampled with a spacing of :math:`\Delta \psi` such that .. math:: \Delta \psi \leq \frac{\pi}{\rho \Omega}, and (2) the detector is sampled with an interval :math:`\Delta s` that satisfies .. math:: \Delta s \leq \frac{\pi}{\Omega}. The geometry returned by this function satisfies these conditions exactly. If the domain is 3-dimensional, the geometry is "separable", in that each slice along the z-dimension of the data is treated as independed 2d data. References ---------- .. [NW2001] Natterer, F and Wuebbeling, F. *Mathematical Methods in Image Reconstruction*. SIAM, 2001. https://dx.doi.org/10.1137/1.9780898718324
[ "r", "Create", "default", "parallel", "beam", "geometry", "from", "space", "." ]
python
train
33.222222
SpheMakh/Stimela
stimela/__init__.py
https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/__init__.py#L194-L203
def info(cabdir, header=False): """ prints out help information about a cab """ # First check if cab exists pfile = "{}/parameters.json".format(cabdir) if not os.path.exists(pfile): raise RuntimeError("Cab could not be found at : {}".format(cabdir)) # Get cab info cab_definition = cab.CabDefinition(parameter_file=pfile) cab_definition.display(header)
[ "def", "info", "(", "cabdir", ",", "header", "=", "False", ")", ":", "# First check if cab exists", "pfile", "=", "\"{}/parameters.json\"", ".", "format", "(", "cabdir", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "pfile", ")", ":", "raise", "RuntimeError", "(", "\"Cab could not be found at : {}\"", ".", "format", "(", "cabdir", ")", ")", "# Get cab info", "cab_definition", "=", "cab", ".", "CabDefinition", "(", "parameter_file", "=", "pfile", ")", "cab_definition", ".", "display", "(", "header", ")" ]
prints out help information about a cab
[ "prints", "out", "help", "information", "about", "a", "cab" ]
python
train
38
meejah/txtorcon
txtorcon/controller.py
https://github.com/meejah/txtorcon/blob/14053b95adf0b4bd9dd9c317bece912a26578a93/txtorcon/controller.py#L1330-L1349
def processEnded(self, status): """ :api:`twisted.internet.protocol.ProcessProtocol <ProcessProtocol>` API """ self.cleanup() if status.value.exitCode is None: if self._did_timeout: err = RuntimeError("Timeout waiting for Tor launch.") else: err = RuntimeError( "Tor was killed (%s)." % status.value.signal) else: err = RuntimeError( "Tor exited with error-code %d" % status.value.exitCode) # hmmm, this log() should probably go away...not always an # error (e.g. .quit() log.err(err) self._maybe_notify_connected(Failure(err))
[ "def", "processEnded", "(", "self", ",", "status", ")", ":", "self", ".", "cleanup", "(", ")", "if", "status", ".", "value", ".", "exitCode", "is", "None", ":", "if", "self", ".", "_did_timeout", ":", "err", "=", "RuntimeError", "(", "\"Timeout waiting for Tor launch.\"", ")", "else", ":", "err", "=", "RuntimeError", "(", "\"Tor was killed (%s).\"", "%", "status", ".", "value", ".", "signal", ")", "else", ":", "err", "=", "RuntimeError", "(", "\"Tor exited with error-code %d\"", "%", "status", ".", "value", ".", "exitCode", ")", "# hmmm, this log() should probably go away...not always an", "# error (e.g. .quit()", "log", ".", "err", "(", "err", ")", "self", ".", "_maybe_notify_connected", "(", "Failure", "(", "err", ")", ")" ]
:api:`twisted.internet.protocol.ProcessProtocol <ProcessProtocol>` API
[ ":", "api", ":", "twisted", ".", "internet", ".", "protocol", ".", "ProcessProtocol", "<ProcessProtocol", ">", "API" ]
python
train
34.7
senseobservationsystems/commonsense-python-lib
senseapi.py
https://github.com/senseobservationsystems/commonsense-python-lib/blob/aac59a1751ef79eb830b3ca1fab6ef2c83931f87/senseapi.py#L1704-L1723
def DomainsGet(self, parameters = None, domain_id = -1): """ This method returns the domains of the current user. The list also contains the domains to which the users has not yet been accepted. @param parameters (dictonary) - Dictionary containing the parameters of the request. @return (bool) - Boolean indicating whether DomainsGet was successful. """ url = '' if parameters is None and domain_id <> -1: url = '/domains/{0}.json'.format(domain_id) else: url = '/domains.json' if self.__SenseApiCall__(url, 'GET', parameters = parameters): return True else: self.__error__ = "api call unsuccessful" return False
[ "def", "DomainsGet", "(", "self", ",", "parameters", "=", "None", ",", "domain_id", "=", "-", "1", ")", ":", "url", "=", "''", "if", "parameters", "is", "None", "and", "domain_id", "<>", "-", "1", ":", "url", "=", "'/domains/{0}.json'", ".", "format", "(", "domain_id", ")", "else", ":", "url", "=", "'/domains.json'", "if", "self", ".", "__SenseApiCall__", "(", "url", ",", "'GET'", ",", "parameters", "=", "parameters", ")", ":", "return", "True", "else", ":", "self", ".", "__error__", "=", "\"api call unsuccessful\"", "return", "False" ]
This method returns the domains of the current user. The list also contains the domains to which the users has not yet been accepted. @param parameters (dictonary) - Dictionary containing the parameters of the request. @return (bool) - Boolean indicating whether DomainsGet was successful.
[ "This", "method", "returns", "the", "domains", "of", "the", "current", "user", ".", "The", "list", "also", "contains", "the", "domains", "to", "which", "the", "users", "has", "not", "yet", "been", "accepted", "." ]
python
train
41.45
datastore/datastore
datastore/core/basic.py
https://github.com/datastore/datastore/blob/7ccf0cd4748001d3dbf5e6dda369b0f63e0269d3/datastore/core/basic.py#L398-L403
def contains(self, key): '''Returns whether the object named by `key` exists. First checks ``cache_datastore``. ''' return self.cache_datastore.contains(key) \ or self.child_datastore.contains(key)
[ "def", "contains", "(", "self", ",", "key", ")", ":", "return", "self", ".", "cache_datastore", ".", "contains", "(", "key", ")", "or", "self", ".", "child_datastore", ".", "contains", "(", "key", ")" ]
Returns whether the object named by `key` exists. First checks ``cache_datastore``.
[ "Returns", "whether", "the", "object", "named", "by", "key", "exists", ".", "First", "checks", "cache_datastore", "." ]
python
train
36.5
Microsoft/nni
tools/nni_cmd/nnictl_utils.py
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/tools/nni_cmd/nnictl_utils.py#L364-L387
def experiment_list(args): '''get the information of all experiments''' experiment_config = Experiments() experiment_dict = experiment_config.get_all_experiments() if not experiment_dict: print('There is no experiment running...') exit(1) update_experiment() experiment_id_list = [] if args.all and args.all == 'all': for key in experiment_dict.keys(): experiment_id_list.append(key) else: for key in experiment_dict.keys(): if experiment_dict[key]['status'] != 'STOPPED': experiment_id_list.append(key) if not experiment_id_list: print_warning('There is no experiment running...\nYou can use \'nnictl experiment list all\' to list all stopped experiments!') experiment_information = "" for key in experiment_id_list: experiment_information += (EXPERIMENT_DETAIL_FORMAT % (key, experiment_dict[key]['status'], experiment_dict[key]['port'],\ experiment_dict[key].get('platform'), experiment_dict[key]['startTime'], experiment_dict[key]['endTime'])) print(EXPERIMENT_INFORMATION_FORMAT % experiment_information)
[ "def", "experiment_list", "(", "args", ")", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "if", "not", "experiment_dict", ":", "print", "(", "'There is no experiment running...'", ")", "exit", "(", "1", ")", "update_experiment", "(", ")", "experiment_id_list", "=", "[", "]", "if", "args", ".", "all", "and", "args", ".", "all", "==", "'all'", ":", "for", "key", "in", "experiment_dict", ".", "keys", "(", ")", ":", "experiment_id_list", ".", "append", "(", "key", ")", "else", ":", "for", "key", "in", "experiment_dict", ".", "keys", "(", ")", ":", "if", "experiment_dict", "[", "key", "]", "[", "'status'", "]", "!=", "'STOPPED'", ":", "experiment_id_list", ".", "append", "(", "key", ")", "if", "not", "experiment_id_list", ":", "print_warning", "(", "'There is no experiment running...\\nYou can use \\'nnictl experiment list all\\' to list all stopped experiments!'", ")", "experiment_information", "=", "\"\"", "for", "key", "in", "experiment_id_list", ":", "experiment_information", "+=", "(", "EXPERIMENT_DETAIL_FORMAT", "%", "(", "key", ",", "experiment_dict", "[", "key", "]", "[", "'status'", "]", ",", "experiment_dict", "[", "key", "]", "[", "'port'", "]", ",", "experiment_dict", "[", "key", "]", ".", "get", "(", "'platform'", ")", ",", "experiment_dict", "[", "key", "]", "[", "'startTime'", "]", ",", "experiment_dict", "[", "key", "]", "[", "'endTime'", "]", ")", ")", "print", "(", "EXPERIMENT_INFORMATION_FORMAT", "%", "experiment_information", ")" ]
get the information of all experiments
[ "get", "the", "information", "of", "all", "experiments" ]
python
train
47.791667
uktrade/directory-validators
directory_validators/company.py
https://github.com/uktrade/directory-validators/blob/e01f9d2aec683e34d978e4f67ed383ea2f9b85a0/directory_validators/company.py#L45-L57
def keywords_special_characters(keywords): """ Confirms that the keywords don't contain special characters Args: keywords (str) Raises: django.forms.ValidationError """ invalid_chars = '!\"#$%&\'()*+-./:;<=>?@[\\]^_{|}~\t\n' if any(char in invalid_chars for char in keywords): raise ValidationError(MESSAGE_KEYWORD_SPECIAL_CHARS)
[ "def", "keywords_special_characters", "(", "keywords", ")", ":", "invalid_chars", "=", "'!\\\"#$%&\\'()*+-./:;<=>?@[\\\\]^_{|}~\\t\\n'", "if", "any", "(", "char", "in", "invalid_chars", "for", "char", "in", "keywords", ")", ":", "raise", "ValidationError", "(", "MESSAGE_KEYWORD_SPECIAL_CHARS", ")" ]
Confirms that the keywords don't contain special characters Args: keywords (str) Raises: django.forms.ValidationError
[ "Confirms", "that", "the", "keywords", "don", "t", "contain", "special", "characters" ]
python
train
28.538462
KE-works/pykechain
pykechain/models/scope.py
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L56-L62
def team(self): """Team to which the scope is assigned.""" team_dict = self._json_data.get('team') if team_dict and team_dict.get('id'): return self._client.team(id=team_dict.get('id')) else: return None
[ "def", "team", "(", "self", ")", ":", "team_dict", "=", "self", ".", "_json_data", ".", "get", "(", "'team'", ")", "if", "team_dict", "and", "team_dict", ".", "get", "(", "'id'", ")", ":", "return", "self", ".", "_client", ".", "team", "(", "id", "=", "team_dict", ".", "get", "(", "'id'", ")", ")", "else", ":", "return", "None" ]
Team to which the scope is assigned.
[ "Team", "to", "which", "the", "scope", "is", "assigned", "." ]
python
train
36.142857
saltstack/salt
salt/scripts.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/scripts.py#L442-L451
def salt_run(): ''' Execute a salt convenience routine. ''' import salt.cli.run if '' in sys.path: sys.path.remove('') client = salt.cli.run.SaltRun() _install_signal_handlers(client) client.run()
[ "def", "salt_run", "(", ")", ":", "import", "salt", ".", "cli", ".", "run", "if", "''", "in", "sys", ".", "path", ":", "sys", ".", "path", ".", "remove", "(", "''", ")", "client", "=", "salt", ".", "cli", ".", "run", ".", "SaltRun", "(", ")", "_install_signal_handlers", "(", "client", ")", "client", ".", "run", "(", ")" ]
Execute a salt convenience routine.
[ "Execute", "a", "salt", "convenience", "routine", "." ]
python
train
22.7
LLNL/scraper
scraper/doecode/__init__.py
https://github.com/LLNL/scraper/blob/881a316e4c04dfa5a9cf491b7c7f9f997a7c56ea/scraper/doecode/__init__.py#L41-L54
def process(filename=None, url=None, key=None): """ Yeilds DOE CODE records based on provided input sources param: filename (str): Path to a DOE CODE .json file url (str): URL for a DOE CODE server json file key (str): API Key for connecting to DOE CODE server """ if filename is not None: yield from process_json(filename) elif url and key: yield from process_url(url, key)
[ "def", "process", "(", "filename", "=", "None", ",", "url", "=", "None", ",", "key", "=", "None", ")", ":", "if", "filename", "is", "not", "None", ":", "yield", "from", "process_json", "(", "filename", ")", "elif", "url", "and", "key", ":", "yield", "from", "process_url", "(", "url", ",", "key", ")" ]
Yeilds DOE CODE records based on provided input sources param: filename (str): Path to a DOE CODE .json file url (str): URL for a DOE CODE server json file key (str): API Key for connecting to DOE CODE server
[ "Yeilds", "DOE", "CODE", "records", "based", "on", "provided", "input", "sources" ]
python
test
30.5
bspaans/python-mingus
mingus/midi/midi_track.py
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/midi/midi_track.py#L77-L88
def play_NoteContainer(self, notecontainer): """Convert a mingus.containers.NoteContainer to the equivalent MIDI events and add it to the track_data. Note.channel and Note.velocity can be set as well. """ if len(notecontainer) <= 1: [self.play_Note(x) for x in notecontainer] else: self.play_Note(notecontainer[0]) self.set_deltatime(0) [self.play_Note(x) for x in notecontainer[1:]]
[ "def", "play_NoteContainer", "(", "self", ",", "notecontainer", ")", ":", "if", "len", "(", "notecontainer", ")", "<=", "1", ":", "[", "self", ".", "play_Note", "(", "x", ")", "for", "x", "in", "notecontainer", "]", "else", ":", "self", ".", "play_Note", "(", "notecontainer", "[", "0", "]", ")", "self", ".", "set_deltatime", "(", "0", ")", "[", "self", ".", "play_Note", "(", "x", ")", "for", "x", "in", "notecontainer", "[", "1", ":", "]", "]" ]
Convert a mingus.containers.NoteContainer to the equivalent MIDI events and add it to the track_data. Note.channel and Note.velocity can be set as well.
[ "Convert", "a", "mingus", ".", "containers", ".", "NoteContainer", "to", "the", "equivalent", "MIDI", "events", "and", "add", "it", "to", "the", "track_data", "." ]
python
train
39.083333
saltstack/salt
salt/cloud/clouds/ec2.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/cloud/clouds/ec2.py#L2178-L2304
def query_instance(vm_=None, call=None): ''' Query an instance upon creation from the EC2 API ''' if call == 'function': # Technically this function may be called other ways too, but it # definitely cannot be called with --function. raise SaltCloudSystemExit( 'The query_instance action must be called with -a or --action.' ) instance_id = vm_['instance_id'] location = vm_.get('location', get_location(vm_)) __utils__['cloud.fire_event']( 'event', 'querying instance', 'salt/cloud/{0}/querying'.format(vm_['name']), args={'instance_id': instance_id}, sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) log.debug('The new VM instance_id is %s', instance_id) params = {'Action': 'DescribeInstances', 'InstanceId.1': instance_id} provider = get_provider(vm_) attempts = 0 while attempts < aws.AWS_MAX_RETRIES: data, requesturl = aws.query(params, # pylint: disable=unbalanced-tuple-unpacking location=location, provider=provider, opts=__opts__, return_url=True, sigver='4') log.debug('The query returned: %s', data) if isinstance(data, dict) and 'error' in data: log.warning( 'There was an error in the query. %s attempts ' 'remaining: %s', attempts, data['error'] ) elif isinstance(data, list) and not data: log.warning( 'Query returned an empty list. %s attempts ' 'remaining.', attempts ) else: break aws.sleep_exponential_backoff(attempts) attempts += 1 continue else: raise SaltCloudSystemExit( 'An error occurred while creating VM: {0}'.format(data['error']) ) def __query_ip_address(params, url): # pylint: disable=W0613 data = aws.query(params, location=location, provider=provider, opts=__opts__, sigver='4') if not data: log.error( 'There was an error while querying EC2. Empty response' ) # Trigger a failure in the wait for IP function return False if isinstance(data, dict) and 'error' in data: log.warning('There was an error in the query. %s', data['error']) # Trigger a failure in the wait for IP function return False log.debug('Returned query data: %s', data) if ssh_interface(vm_) == 'public_ips': if 'ipAddress' in data[0]['instancesSet']['item']: return data else: log.error( 'Public IP not detected.' ) if ssh_interface(vm_) == 'private_ips': if 'privateIpAddress' in data[0]['instancesSet']['item']: return data else: log.error( 'Private IP not detected.' ) try: data = salt.utils.cloud.wait_for_ip( __query_ip_address, update_args=(params, requesturl), timeout=config.get_cloud_config_value( 'wait_for_ip_timeout', vm_, __opts__, default=10 * 60), interval=config.get_cloud_config_value( 'wait_for_ip_interval', vm_, __opts__, default=10), interval_multiplier=config.get_cloud_config_value( 'wait_for_ip_interval_multiplier', vm_, __opts__, default=1), ) except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc: try: # It might be already up, let's destroy it! destroy(vm_['name']) except SaltCloudSystemExit: pass finally: raise SaltCloudSystemExit(six.text_type(exc)) if 'reactor' in vm_ and vm_['reactor'] is True: __utils__['cloud.fire_event']( 'event', 'instance queried', 'salt/cloud/{0}/query_reactor'.format(vm_['name']), args={'data': data}, sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) return data
[ "def", "query_instance", "(", "vm_", "=", "None", ",", "call", "=", "None", ")", ":", "if", "call", "==", "'function'", ":", "# Technically this function may be called other ways too, but it", "# definitely cannot be called with --function.", "raise", "SaltCloudSystemExit", "(", "'The query_instance action must be called with -a or --action.'", ")", "instance_id", "=", "vm_", "[", "'instance_id'", "]", "location", "=", "vm_", ".", "get", "(", "'location'", ",", "get_location", "(", "vm_", ")", ")", "__utils__", "[", "'cloud.fire_event'", "]", "(", "'event'", ",", "'querying instance'", ",", "'salt/cloud/{0}/querying'", ".", "format", "(", "vm_", "[", "'name'", "]", ")", ",", "args", "=", "{", "'instance_id'", ":", "instance_id", "}", ",", "sock_dir", "=", "__opts__", "[", "'sock_dir'", "]", ",", "transport", "=", "__opts__", "[", "'transport'", "]", ")", "log", ".", "debug", "(", "'The new VM instance_id is %s'", ",", "instance_id", ")", "params", "=", "{", "'Action'", ":", "'DescribeInstances'", ",", "'InstanceId.1'", ":", "instance_id", "}", "provider", "=", "get_provider", "(", "vm_", ")", "attempts", "=", "0", "while", "attempts", "<", "aws", ".", "AWS_MAX_RETRIES", ":", "data", ",", "requesturl", "=", "aws", ".", "query", "(", "params", ",", "# pylint: disable=unbalanced-tuple-unpacking", "location", "=", "location", ",", "provider", "=", "provider", ",", "opts", "=", "__opts__", ",", "return_url", "=", "True", ",", "sigver", "=", "'4'", ")", "log", ".", "debug", "(", "'The query returned: %s'", ",", "data", ")", "if", "isinstance", "(", "data", ",", "dict", ")", "and", "'error'", "in", "data", ":", "log", ".", "warning", "(", "'There was an error in the query. %s attempts '", "'remaining: %s'", ",", "attempts", ",", "data", "[", "'error'", "]", ")", "elif", "isinstance", "(", "data", ",", "list", ")", "and", "not", "data", ":", "log", ".", "warning", "(", "'Query returned an empty list. %s attempts '", "'remaining.'", ",", "attempts", ")", "else", ":", "break", "aws", ".", "sleep_exponential_backoff", "(", "attempts", ")", "attempts", "+=", "1", "continue", "else", ":", "raise", "SaltCloudSystemExit", "(", "'An error occurred while creating VM: {0}'", ".", "format", "(", "data", "[", "'error'", "]", ")", ")", "def", "__query_ip_address", "(", "params", ",", "url", ")", ":", "# pylint: disable=W0613", "data", "=", "aws", ".", "query", "(", "params", ",", "location", "=", "location", ",", "provider", "=", "provider", ",", "opts", "=", "__opts__", ",", "sigver", "=", "'4'", ")", "if", "not", "data", ":", "log", ".", "error", "(", "'There was an error while querying EC2. Empty response'", ")", "# Trigger a failure in the wait for IP function", "return", "False", "if", "isinstance", "(", "data", ",", "dict", ")", "and", "'error'", "in", "data", ":", "log", ".", "warning", "(", "'There was an error in the query. %s'", ",", "data", "[", "'error'", "]", ")", "# Trigger a failure in the wait for IP function", "return", "False", "log", ".", "debug", "(", "'Returned query data: %s'", ",", "data", ")", "if", "ssh_interface", "(", "vm_", ")", "==", "'public_ips'", ":", "if", "'ipAddress'", "in", "data", "[", "0", "]", "[", "'instancesSet'", "]", "[", "'item'", "]", ":", "return", "data", "else", ":", "log", ".", "error", "(", "'Public IP not detected.'", ")", "if", "ssh_interface", "(", "vm_", ")", "==", "'private_ips'", ":", "if", "'privateIpAddress'", "in", "data", "[", "0", "]", "[", "'instancesSet'", "]", "[", "'item'", "]", ":", "return", "data", "else", ":", "log", ".", "error", "(", "'Private IP not detected.'", ")", "try", ":", "data", "=", "salt", ".", "utils", ".", "cloud", ".", "wait_for_ip", "(", "__query_ip_address", ",", "update_args", "=", "(", "params", ",", "requesturl", ")", ",", "timeout", "=", "config", ".", "get_cloud_config_value", "(", "'wait_for_ip_timeout'", ",", "vm_", ",", "__opts__", ",", "default", "=", "10", "*", "60", ")", ",", "interval", "=", "config", ".", "get_cloud_config_value", "(", "'wait_for_ip_interval'", ",", "vm_", ",", "__opts__", ",", "default", "=", "10", ")", ",", "interval_multiplier", "=", "config", ".", "get_cloud_config_value", "(", "'wait_for_ip_interval_multiplier'", ",", "vm_", ",", "__opts__", ",", "default", "=", "1", ")", ",", ")", "except", "(", "SaltCloudExecutionTimeout", ",", "SaltCloudExecutionFailure", ")", "as", "exc", ":", "try", ":", "# It might be already up, let's destroy it!", "destroy", "(", "vm_", "[", "'name'", "]", ")", "except", "SaltCloudSystemExit", ":", "pass", "finally", ":", "raise", "SaltCloudSystemExit", "(", "six", ".", "text_type", "(", "exc", ")", ")", "if", "'reactor'", "in", "vm_", "and", "vm_", "[", "'reactor'", "]", "is", "True", ":", "__utils__", "[", "'cloud.fire_event'", "]", "(", "'event'", ",", "'instance queried'", ",", "'salt/cloud/{0}/query_reactor'", ".", "format", "(", "vm_", "[", "'name'", "]", ")", ",", "args", "=", "{", "'data'", ":", "data", "}", ",", "sock_dir", "=", "__opts__", "[", "'sock_dir'", "]", ",", "transport", "=", "__opts__", "[", "'transport'", "]", ")", "return", "data" ]
Query an instance upon creation from the EC2 API
[ "Query", "an", "instance", "upon", "creation", "from", "the", "EC2", "API" ]
python
train
34.488189
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cvf.py
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cvf.py#L36-L49
def generate(env): """Add Builders and construction variables for compaq visual fortran to an Environment.""" fortran.generate(env) env['FORTRAN'] = 'f90' env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['OBJSUFFIX'] = '.obj' env['FORTRANMODDIR'] = '${TARGET.dir}' env['FORTRANMODDIRPREFIX'] = '/module:' env['FORTRANMODDIRSUFFIX'] = ''
[ "def", "generate", "(", "env", ")", ":", "fortran", ".", "generate", "(", "env", ")", "env", "[", "'FORTRAN'", "]", "=", "'f90'", "env", "[", "'FORTRANCOM'", "]", "=", "'$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'", "env", "[", "'FORTRANPPCOM'", "]", "=", "'$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'", "env", "[", "'SHFORTRANCOM'", "]", "=", "'$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'", "env", "[", "'SHFORTRANPPCOM'", "]", "=", "'$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'", "env", "[", "'OBJSUFFIX'", "]", "=", "'.obj'", "env", "[", "'FORTRANMODDIR'", "]", "=", "'${TARGET.dir}'", "env", "[", "'FORTRANMODDIRPREFIX'", "]", "=", "'/module:'", "env", "[", "'FORTRANMODDIRSUFFIX'", "]", "=", "''" ]
Add Builders and construction variables for compaq visual fortran to an Environment.
[ "Add", "Builders", "and", "construction", "variables", "for", "compaq", "visual", "fortran", "to", "an", "Environment", "." ]
python
train
68.857143
NASA-AMMOS/AIT-Core
ait/core/bsc.py
https://github.com/NASA-AMMOS/AIT-Core/blob/9d85bd9c738e7a6a6fbdff672bea708238b02a3a/ait/core/bsc.py#L350-L377
def _should_rotate_log(self, handler): ''' Determine if a log file rotation is necessary ''' if handler['rotate_log']: rotate_time_index = handler.get('rotate_log_index', 'day') try: rotate_time_index = self._decode_time_rotation_index(rotate_time_index) except ValueError: rotate_time_index = 2 rotate_time_delta = handler.get('rotate_log_delta', 1) cur_t = time.gmtime() first_different_index = 9 for i in range(9): if cur_t[i] != handler['log_rot_time'][i]: first_different_index = i break if first_different_index < rotate_time_index: # If the time deltas differ by a time step greater than what we # have set for the rotation (I.e., months instead of days) we will # automatically rotate. return True else: time_delta = cur_t[rotate_time_index] - handler['log_rot_time'][rotate_time_index] return time_delta >= rotate_time_delta return False
[ "def", "_should_rotate_log", "(", "self", ",", "handler", ")", ":", "if", "handler", "[", "'rotate_log'", "]", ":", "rotate_time_index", "=", "handler", ".", "get", "(", "'rotate_log_index'", ",", "'day'", ")", "try", ":", "rotate_time_index", "=", "self", ".", "_decode_time_rotation_index", "(", "rotate_time_index", ")", "except", "ValueError", ":", "rotate_time_index", "=", "2", "rotate_time_delta", "=", "handler", ".", "get", "(", "'rotate_log_delta'", ",", "1", ")", "cur_t", "=", "time", ".", "gmtime", "(", ")", "first_different_index", "=", "9", "for", "i", "in", "range", "(", "9", ")", ":", "if", "cur_t", "[", "i", "]", "!=", "handler", "[", "'log_rot_time'", "]", "[", "i", "]", ":", "first_different_index", "=", "i", "break", "if", "first_different_index", "<", "rotate_time_index", ":", "# If the time deltas differ by a time step greater than what we", "# have set for the rotation (I.e., months instead of days) we will", "# automatically rotate.", "return", "True", "else", ":", "time_delta", "=", "cur_t", "[", "rotate_time_index", "]", "-", "handler", "[", "'log_rot_time'", "]", "[", "rotate_time_index", "]", "return", "time_delta", ">=", "rotate_time_delta", "return", "False" ]
Determine if a log file rotation is necessary
[ "Determine", "if", "a", "log", "file", "rotation", "is", "necessary" ]
python
train
40.678571
decryptus/sonicprobe
sonicprobe/validator/country.py
https://github.com/decryptus/sonicprobe/blob/72f73f3a40d2982d79ad68686e36aa31d94b76f8/sonicprobe/validator/country.py#L277-L287
def validate(value): """ checks if given value is a valid country codes @param string value @return bool """ if not helpers.has_len(value): return False return COUNTRIES.has_key(str(value).lower())
[ "def", "validate", "(", "value", ")", ":", "if", "not", "helpers", ".", "has_len", "(", "value", ")", ":", "return", "False", "return", "COUNTRIES", ".", "has_key", "(", "str", "(", "value", ")", ".", "lower", "(", ")", ")" ]
checks if given value is a valid country codes @param string value @return bool
[ "checks", "if", "given", "value", "is", "a", "valid", "country", "codes" ]
python
train
23.363636
spencerahill/aospy
aospy/utils/vertcoord.py
https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L92-L94
def pfull_from_ps(bk, pk, ps, pfull_coord): """Compute pressure at full levels from surface pressure.""" return to_pfull_from_phalf(phalf_from_ps(bk, pk, ps), pfull_coord)
[ "def", "pfull_from_ps", "(", "bk", ",", "pk", ",", "ps", ",", "pfull_coord", ")", ":", "return", "to_pfull_from_phalf", "(", "phalf_from_ps", "(", "bk", ",", "pk", ",", "ps", ")", ",", "pfull_coord", ")" ]
Compute pressure at full levels from surface pressure.
[ "Compute", "pressure", "at", "full", "levels", "from", "surface", "pressure", "." ]
python
train
59
gestaolivre/brazil-types
brazil_types/types.py
https://github.com/gestaolivre/brazil-types/blob/13db32c17d66d85166a2459f6b3d6fcff2c2b954/brazil_types/types.py#L290-L314
def validate(cls, cpf): u""" Válida o CPF. >>> CPF.validate(58119443659) True >>> CPF.validate(58119443650) False >>> CPF.validate('58119443659') True >>> CPF.validate('581.194.436-59') True """ if cpf is None: return False cpf = CPF.clean(cpf) def mod11(value): return (value % 11) % 10 dig1 = mod11(sum([(i + 1) * int(cpf[i]) for i in range(0, 9)])) dig2 = mod11(sum([i * int(cpf[i]) for i in range(1, 10)])) return cpf[-2:] == '{0}{1}'.format(dig1, dig2)
[ "def", "validate", "(", "cls", ",", "cpf", ")", ":", "if", "cpf", "is", "None", ":", "return", "False", "cpf", "=", "CPF", ".", "clean", "(", "cpf", ")", "def", "mod11", "(", "value", ")", ":", "return", "(", "value", "%", "11", ")", "%", "10", "dig1", "=", "mod11", "(", "sum", "(", "[", "(", "i", "+", "1", ")", "*", "int", "(", "cpf", "[", "i", "]", ")", "for", "i", "in", "range", "(", "0", ",", "9", ")", "]", ")", ")", "dig2", "=", "mod11", "(", "sum", "(", "[", "i", "*", "int", "(", "cpf", "[", "i", "]", ")", "for", "i", "in", "range", "(", "1", ",", "10", ")", "]", ")", ")", "return", "cpf", "[", "-", "2", ":", "]", "==", "'{0}{1}'", ".", "format", "(", "dig1", ",", "dig2", ")" ]
u""" Válida o CPF. >>> CPF.validate(58119443659) True >>> CPF.validate(58119443650) False >>> CPF.validate('58119443659') True >>> CPF.validate('581.194.436-59') True
[ "u", "Válida", "o", "CPF", "." ]
python
train
23.92
tensorflow/tensor2tensor
tensor2tensor/layers/discretization.py
https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/layers/discretization.py#L1374-L1390
def tanh_discrete_bottleneck(x, bottleneck_bits, bottleneck_noise, discretize_warmup_steps, mode): """Simple discretization through tanh, flip bottleneck_noise many bits.""" x = tf.layers.dense(x, bottleneck_bits, name="tanh_discrete_bottleneck") d0 = tf.stop_gradient(2.0 * tf.to_float(tf.less(0.0, x))) - 1.0 if mode == tf.estimator.ModeKeys.TRAIN: x += tf.truncated_normal( common_layers.shape_list(x), mean=0.0, stddev=0.2) x = tf.tanh(x) d = x + tf.stop_gradient(2.0 * tf.to_float(tf.less(0.0, x)) - 1.0 - x) if mode == tf.estimator.ModeKeys.TRAIN: noise = tf.random_uniform(common_layers.shape_list(x)) noise = 2.0 * tf.to_float(tf.less(bottleneck_noise, noise)) - 1.0 d *= noise d = common_layers.mix(d, x, discretize_warmup_steps, mode == tf.estimator.ModeKeys.TRAIN) return d, d0
[ "def", "tanh_discrete_bottleneck", "(", "x", ",", "bottleneck_bits", ",", "bottleneck_noise", ",", "discretize_warmup_steps", ",", "mode", ")", ":", "x", "=", "tf", ".", "layers", ".", "dense", "(", "x", ",", "bottleneck_bits", ",", "name", "=", "\"tanh_discrete_bottleneck\"", ")", "d0", "=", "tf", ".", "stop_gradient", "(", "2.0", "*", "tf", ".", "to_float", "(", "tf", ".", "less", "(", "0.0", ",", "x", ")", ")", ")", "-", "1.0", "if", "mode", "==", "tf", ".", "estimator", ".", "ModeKeys", ".", "TRAIN", ":", "x", "+=", "tf", ".", "truncated_normal", "(", "common_layers", ".", "shape_list", "(", "x", ")", ",", "mean", "=", "0.0", ",", "stddev", "=", "0.2", ")", "x", "=", "tf", ".", "tanh", "(", "x", ")", "d", "=", "x", "+", "tf", ".", "stop_gradient", "(", "2.0", "*", "tf", ".", "to_float", "(", "tf", ".", "less", "(", "0.0", ",", "x", ")", ")", "-", "1.0", "-", "x", ")", "if", "mode", "==", "tf", ".", "estimator", ".", "ModeKeys", ".", "TRAIN", ":", "noise", "=", "tf", ".", "random_uniform", "(", "common_layers", ".", "shape_list", "(", "x", ")", ")", "noise", "=", "2.0", "*", "tf", ".", "to_float", "(", "tf", ".", "less", "(", "bottleneck_noise", ",", "noise", ")", ")", "-", "1.0", "d", "*=", "noise", "d", "=", "common_layers", ".", "mix", "(", "d", ",", "x", ",", "discretize_warmup_steps", ",", "mode", "==", "tf", ".", "estimator", ".", "ModeKeys", ".", "TRAIN", ")", "return", "d", ",", "d0" ]
Simple discretization through tanh, flip bottleneck_noise many bits.
[ "Simple", "discretization", "through", "tanh", "flip", "bottleneck_noise", "many", "bits", "." ]
python
train
51
Diaoul/subliminal
subliminal/extensions.py
https://github.com/Diaoul/subliminal/blob/a952dfb2032eb0fd6eb1eb89f04080923c11c4cf/subliminal/extensions.py#L70-L86
def unregister(self, entry_point): """Unregister a provider :param str entry_point: provider to unregister (entry point syntax). """ if entry_point not in self.registered_extensions: raise ValueError('Extension not registered') ep = EntryPoint.parse(entry_point) self.registered_extensions.remove(entry_point) if self._extensions_by_name is not None: del self._extensions_by_name[ep.name] for i, ext in enumerate(self.extensions): if ext.name == ep.name: del self.extensions[i] break
[ "def", "unregister", "(", "self", ",", "entry_point", ")", ":", "if", "entry_point", "not", "in", "self", ".", "registered_extensions", ":", "raise", "ValueError", "(", "'Extension not registered'", ")", "ep", "=", "EntryPoint", ".", "parse", "(", "entry_point", ")", "self", ".", "registered_extensions", ".", "remove", "(", "entry_point", ")", "if", "self", ".", "_extensions_by_name", "is", "not", "None", ":", "del", "self", ".", "_extensions_by_name", "[", "ep", ".", "name", "]", "for", "i", ",", "ext", "in", "enumerate", "(", "self", ".", "extensions", ")", ":", "if", "ext", ".", "name", "==", "ep", ".", "name", ":", "del", "self", ".", "extensions", "[", "i", "]", "break" ]
Unregister a provider :param str entry_point: provider to unregister (entry point syntax).
[ "Unregister", "a", "provider" ]
python
train
35.411765
cpburnz/python-path-specification
pathspec/util.py
https://github.com/cpburnz/python-path-specification/blob/6fc7567a58cb68ec7d72cc287e7fb97dbe22c017/pathspec/util.py#L139-L156
def match_file(patterns, file): """ Matches the file to the patterns. *patterns* (:class:`~collections.abc.Iterable` of :class:`~pathspec.pattern.Pattern`) contains the patterns to use. *file* (:class:`str`) is the normalized file path to be matched against *patterns*. Returns :data:`True` if *file* matched; otherwise, :data:`False`. """ matched = False for pattern in patterns: if pattern.include is not None: if file in pattern.match((file,)): matched = pattern.include return matched
[ "def", "match_file", "(", "patterns", ",", "file", ")", ":", "matched", "=", "False", "for", "pattern", "in", "patterns", ":", "if", "pattern", ".", "include", "is", "not", "None", ":", "if", "file", "in", "pattern", ".", "match", "(", "(", "file", ",", ")", ")", ":", "matched", "=", "pattern", ".", "include", "return", "matched" ]
Matches the file to the patterns. *patterns* (:class:`~collections.abc.Iterable` of :class:`~pathspec.pattern.Pattern`) contains the patterns to use. *file* (:class:`str`) is the normalized file path to be matched against *patterns*. Returns :data:`True` if *file* matched; otherwise, :data:`False`.
[ "Matches", "the", "file", "to", "the", "patterns", "." ]
python
train
27.444444
CodyKochmann/generators
generators/average.py
https://github.com/CodyKochmann/generators/blob/e4ca4dd25d5023a94b0349c69d6224070cc2526f/generators/average.py#L14-L21
def average(): """ generator that holds a rolling average """ count = 0 total = total() i=0 while 1: i = yield ((total.send(i)*1.0)/count if count else 0) count += 1
[ "def", "average", "(", ")", ":", "count", "=", "0", "total", "=", "total", "(", ")", "i", "=", "0", "while", "1", ":", "i", "=", "yield", "(", "(", "total", ".", "send", "(", "i", ")", "*", "1.0", ")", "/", "count", "if", "count", "else", "0", ")", "count", "+=", "1" ]
generator that holds a rolling average
[ "generator", "that", "holds", "a", "rolling", "average" ]
python
train
24.25
mamrhein/specification
specification/_extd_ast_expr.py
https://github.com/mamrhein/specification/blob/a4c09a0d286cda7a04e8a189f12e23edd97f64ea/specification/_extd_ast_expr.py#L404-L410
def visit_keyword(self, node: AST, dfltChaining: bool = True) -> str: """Return representation of `node` as keyword arg.""" arg = node.arg if arg is None: return f"**{self.visit(node.value)}" else: return f"{arg}={self.visit(node.value)}"
[ "def", "visit_keyword", "(", "self", ",", "node", ":", "AST", ",", "dfltChaining", ":", "bool", "=", "True", ")", "->", "str", ":", "arg", "=", "node", ".", "arg", "if", "arg", "is", "None", ":", "return", "f\"**{self.visit(node.value)}\"", "else", ":", "return", "f\"{arg}={self.visit(node.value)}\"" ]
Return representation of `node` as keyword arg.
[ "Return", "representation", "of", "node", "as", "keyword", "arg", "." ]
python
train
41.142857
bpsmith/tia
tia/bbg/bbg_com.py
https://github.com/bpsmith/tia/blob/a7043b6383e557aeea8fc7112bbffd6e36a230e9/tia/bbg/bbg_com.py#L430-L438
def response_as_single(self, copy=0): """ convert the response map to a single data frame with Multi-Index columns """ arr = [] for sid, frame in self.response.iteritems(): if copy: frame = frame.copy() 'security' not in frame and frame.insert(0, 'security', sid) arr.append(frame.reset_index().set_index(['date', 'security'])) return concat(arr).unstack()
[ "def", "response_as_single", "(", "self", ",", "copy", "=", "0", ")", ":", "arr", "=", "[", "]", "for", "sid", ",", "frame", "in", "self", ".", "response", ".", "iteritems", "(", ")", ":", "if", "copy", ":", "frame", "=", "frame", ".", "copy", "(", ")", "'security'", "not", "in", "frame", "and", "frame", ".", "insert", "(", "0", ",", "'security'", ",", "sid", ")", "arr", ".", "append", "(", "frame", ".", "reset_index", "(", ")", ".", "set_index", "(", "[", "'date'", ",", "'security'", "]", ")", ")", "return", "concat", "(", "arr", ")", ".", "unstack", "(", ")" ]
convert the response map to a single data frame with Multi-Index columns
[ "convert", "the", "response", "map", "to", "a", "single", "data", "frame", "with", "Multi", "-", "Index", "columns" ]
python
train
48
SectorLabs/django-postgres-extra
psqlextra/expressions.py
https://github.com/SectorLabs/django-postgres-extra/blob/eef2ed5504d225858d4e4f5d77a838082ca6053e/psqlextra/expressions.py#L104-L112
def relabeled_clone(self, relabels): """Gets a re-labeled clone of this expression.""" return self.__class__( relabels.get(self.alias, self.alias), self.target, self.hstore_key, self.output_field )
[ "def", "relabeled_clone", "(", "self", ",", "relabels", ")", ":", "return", "self", ".", "__class__", "(", "relabels", ".", "get", "(", "self", ".", "alias", ",", "self", ".", "alias", ")", ",", "self", ".", "target", ",", "self", ".", "hstore_key", ",", "self", ".", "output_field", ")" ]
Gets a re-labeled clone of this expression.
[ "Gets", "a", "re", "-", "labeled", "clone", "of", "this", "expression", "." ]
python
test
29.111111
metachris/RPIO
source/RPIO/_RPIO.py
https://github.com/metachris/RPIO/blob/be1942a69b2592ddacd9dc833d2668a19aafd8d2/source/RPIO/_RPIO.py#L114-L217
def add_interrupt_callback(self, gpio_id, callback, edge='both', pull_up_down=_GPIO.PUD_OFF, threaded_callback=False, debounce_timeout_ms=None): """ Add a callback to be executed when the value on 'gpio_id' changes to the edge specified via the 'edge' parameter (default='both'). `pull_up_down` can be set to `RPIO.PUD_UP`, `RPIO.PUD_DOWN`, and `RPIO.PUD_OFF`. If `threaded_callback` is True, the callback will be started inside a Thread. """ gpio_id = _GPIO.channel_to_gpio(gpio_id) debug("Adding callback for GPIO %s" % gpio_id) if not edge in ["falling", "rising", "both", "none"]: raise AttributeError("'%s' is not a valid edge." % edge) if not pull_up_down in [_GPIO.PUD_UP, _GPIO.PUD_DOWN, _GPIO.PUD_OFF]: raise AttributeError("'%s' is not a valid pull_up_down." % edge) # Make sure the gpio_id is valid if not gpio_id in set(chain(RPIO.GPIO_LIST_R1, RPIO.GPIO_LIST_R2, \ RPIO.GPIO_LIST_R3)): raise AttributeError("GPIO %s is not a valid gpio-id." % gpio_id) # Require INPUT pin setup; and set the correct PULL_UPDN if RPIO.gpio_function(int(gpio_id)) == RPIO.IN: RPIO.set_pullupdn(gpio_id, pull_up_down) else: debug("- changing gpio function from %s to INPUT" % \ (GPIO_FUNCTIONS[RPIO.gpio_function(int(gpio_id))])) RPIO.setup(gpio_id, RPIO.IN, pull_up_down) # Prepare the callback (wrap in Thread if needed) cb = callback if not threaded_callback else \ partial(_threaded_callback, callback) # Prepare the /sys/class path of this gpio path_gpio = "%sgpio%s/" % (_SYS_GPIO_ROOT, gpio_id) # If initial callback for this GPIO then set everything up. Else make # sure the edge detection is the same. if gpio_id in self._map_gpioid_to_callbacks: with open(path_gpio + "edge", "r") as f: e = f.read().strip() if e != edge: raise AttributeError(("Cannot add callback for gpio %s:" " edge detection '%s' not compatible with existing" " edge detection '%s'.") % (gpio_id, edge, e)) # Check whether edge is the same, else throw Exception debug("- kernel interface already setup for GPIO %s" % gpio_id) self._map_gpioid_to_callbacks[gpio_id].append(cb) else: # If kernel interface already exists unexport first for clean setup if os.path.exists(path_gpio): if self._show_warnings: warn("Kernel interface for GPIO %s already exists." % \ gpio_id) debug("- unexporting kernel interface for GPIO %s" % gpio_id) with open(_SYS_GPIO_ROOT + "unexport", "w") as f: f.write("%s" % gpio_id) time.sleep(0.1) # Export kernel interface /sys/class/gpio/gpioN with open(_SYS_GPIO_ROOT + "export", "w") as f: f.write("%s" % gpio_id) self._gpio_kernel_interfaces_created.append(gpio_id) debug("- kernel interface exported for GPIO %s" % gpio_id) # Configure gpio as input with open(path_gpio + "direction", "w") as f: f.write("in") # Configure gpio edge detection with open(path_gpio + "edge", "w") as f: f.write(edge) debug(("- kernel interface configured for GPIO %s " "(edge='%s', pullupdn=%s)") % (gpio_id, edge, \ _PULL_UPDN[pull_up_down])) # Open the gpio value stream and read the initial value f = open(path_gpio + "value", 'r') val_initial = f.read().strip() debug("- inital gpio value: %s" % val_initial) f.seek(0) # Add callback info to the mapping dictionaries self._map_fileno_to_file[f.fileno()] = f self._map_fileno_to_gpioid[f.fileno()] = gpio_id self._map_fileno_to_options[f.fileno()] = { "debounce_timeout_s": debounce_timeout_ms / 1000.0 if \ debounce_timeout_ms else 0, "interrupt_last": 0, "edge": edge } self._map_gpioid_to_fileno[gpio_id] = f.fileno() self._map_gpioid_to_callbacks[gpio_id] = [cb] # Add to epoll self._epoll.register(f.fileno(), select.EPOLLPRI | select.EPOLLERR)
[ "def", "add_interrupt_callback", "(", "self", ",", "gpio_id", ",", "callback", ",", "edge", "=", "'both'", ",", "pull_up_down", "=", "_GPIO", ".", "PUD_OFF", ",", "threaded_callback", "=", "False", ",", "debounce_timeout_ms", "=", "None", ")", ":", "gpio_id", "=", "_GPIO", ".", "channel_to_gpio", "(", "gpio_id", ")", "debug", "(", "\"Adding callback for GPIO %s\"", "%", "gpio_id", ")", "if", "not", "edge", "in", "[", "\"falling\"", ",", "\"rising\"", ",", "\"both\"", ",", "\"none\"", "]", ":", "raise", "AttributeError", "(", "\"'%s' is not a valid edge.\"", "%", "edge", ")", "if", "not", "pull_up_down", "in", "[", "_GPIO", ".", "PUD_UP", ",", "_GPIO", ".", "PUD_DOWN", ",", "_GPIO", ".", "PUD_OFF", "]", ":", "raise", "AttributeError", "(", "\"'%s' is not a valid pull_up_down.\"", "%", "edge", ")", "# Make sure the gpio_id is valid", "if", "not", "gpio_id", "in", "set", "(", "chain", "(", "RPIO", ".", "GPIO_LIST_R1", ",", "RPIO", ".", "GPIO_LIST_R2", ",", "RPIO", ".", "GPIO_LIST_R3", ")", ")", ":", "raise", "AttributeError", "(", "\"GPIO %s is not a valid gpio-id.\"", "%", "gpio_id", ")", "# Require INPUT pin setup; and set the correct PULL_UPDN", "if", "RPIO", ".", "gpio_function", "(", "int", "(", "gpio_id", ")", ")", "==", "RPIO", ".", "IN", ":", "RPIO", ".", "set_pullupdn", "(", "gpio_id", ",", "pull_up_down", ")", "else", ":", "debug", "(", "\"- changing gpio function from %s to INPUT\"", "%", "(", "GPIO_FUNCTIONS", "[", "RPIO", ".", "gpio_function", "(", "int", "(", "gpio_id", ")", ")", "]", ")", ")", "RPIO", ".", "setup", "(", "gpio_id", ",", "RPIO", ".", "IN", ",", "pull_up_down", ")", "# Prepare the callback (wrap in Thread if needed)", "cb", "=", "callback", "if", "not", "threaded_callback", "else", "partial", "(", "_threaded_callback", ",", "callback", ")", "# Prepare the /sys/class path of this gpio", "path_gpio", "=", "\"%sgpio%s/\"", "%", "(", "_SYS_GPIO_ROOT", ",", "gpio_id", ")", "# If initial callback for this GPIO then set everything up. Else make", "# sure the edge detection is the same.", "if", "gpio_id", "in", "self", ".", "_map_gpioid_to_callbacks", ":", "with", "open", "(", "path_gpio", "+", "\"edge\"", ",", "\"r\"", ")", "as", "f", ":", "e", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "if", "e", "!=", "edge", ":", "raise", "AttributeError", "(", "(", "\"Cannot add callback for gpio %s:\"", "\" edge detection '%s' not compatible with existing\"", "\" edge detection '%s'.\"", ")", "%", "(", "gpio_id", ",", "edge", ",", "e", ")", ")", "# Check whether edge is the same, else throw Exception", "debug", "(", "\"- kernel interface already setup for GPIO %s\"", "%", "gpio_id", ")", "self", ".", "_map_gpioid_to_callbacks", "[", "gpio_id", "]", ".", "append", "(", "cb", ")", "else", ":", "# If kernel interface already exists unexport first for clean setup", "if", "os", ".", "path", ".", "exists", "(", "path_gpio", ")", ":", "if", "self", ".", "_show_warnings", ":", "warn", "(", "\"Kernel interface for GPIO %s already exists.\"", "%", "gpio_id", ")", "debug", "(", "\"- unexporting kernel interface for GPIO %s\"", "%", "gpio_id", ")", "with", "open", "(", "_SYS_GPIO_ROOT", "+", "\"unexport\"", ",", "\"w\"", ")", "as", "f", ":", "f", ".", "write", "(", "\"%s\"", "%", "gpio_id", ")", "time", ".", "sleep", "(", "0.1", ")", "# Export kernel interface /sys/class/gpio/gpioN", "with", "open", "(", "_SYS_GPIO_ROOT", "+", "\"export\"", ",", "\"w\"", ")", "as", "f", ":", "f", ".", "write", "(", "\"%s\"", "%", "gpio_id", ")", "self", ".", "_gpio_kernel_interfaces_created", ".", "append", "(", "gpio_id", ")", "debug", "(", "\"- kernel interface exported for GPIO %s\"", "%", "gpio_id", ")", "# Configure gpio as input", "with", "open", "(", "path_gpio", "+", "\"direction\"", ",", "\"w\"", ")", "as", "f", ":", "f", ".", "write", "(", "\"in\"", ")", "# Configure gpio edge detection", "with", "open", "(", "path_gpio", "+", "\"edge\"", ",", "\"w\"", ")", "as", "f", ":", "f", ".", "write", "(", "edge", ")", "debug", "(", "(", "\"- kernel interface configured for GPIO %s \"", "\"(edge='%s', pullupdn=%s)\"", ")", "%", "(", "gpio_id", ",", "edge", ",", "_PULL_UPDN", "[", "pull_up_down", "]", ")", ")", "# Open the gpio value stream and read the initial value", "f", "=", "open", "(", "path_gpio", "+", "\"value\"", ",", "'r'", ")", "val_initial", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "debug", "(", "\"- inital gpio value: %s\"", "%", "val_initial", ")", "f", ".", "seek", "(", "0", ")", "# Add callback info to the mapping dictionaries", "self", ".", "_map_fileno_to_file", "[", "f", ".", "fileno", "(", ")", "]", "=", "f", "self", ".", "_map_fileno_to_gpioid", "[", "f", ".", "fileno", "(", ")", "]", "=", "gpio_id", "self", ".", "_map_fileno_to_options", "[", "f", ".", "fileno", "(", ")", "]", "=", "{", "\"debounce_timeout_s\"", ":", "debounce_timeout_ms", "/", "1000.0", "if", "debounce_timeout_ms", "else", "0", ",", "\"interrupt_last\"", ":", "0", ",", "\"edge\"", ":", "edge", "}", "self", ".", "_map_gpioid_to_fileno", "[", "gpio_id", "]", "=", "f", ".", "fileno", "(", ")", "self", ".", "_map_gpioid_to_callbacks", "[", "gpio_id", "]", "=", "[", "cb", "]", "# Add to epoll", "self", ".", "_epoll", ".", "register", "(", "f", ".", "fileno", "(", ")", ",", "select", ".", "EPOLLPRI", "|", "select", ".", "EPOLLERR", ")" ]
Add a callback to be executed when the value on 'gpio_id' changes to the edge specified via the 'edge' parameter (default='both'). `pull_up_down` can be set to `RPIO.PUD_UP`, `RPIO.PUD_DOWN`, and `RPIO.PUD_OFF`. If `threaded_callback` is True, the callback will be started inside a Thread.
[ "Add", "a", "callback", "to", "be", "executed", "when", "the", "value", "on", "gpio_id", "changes", "to", "the", "edge", "specified", "via", "the", "edge", "parameter", "(", "default", "=", "both", ")", "." ]
python
train
44.548077
chrismattmann/tika-python
tika/tika.py
https://github.com/chrismattmann/tika-python/blob/ffd3879ac3eaa9142c0fb6557cc1dc52d458a75a/tika/tika.py#L253-L282
def parseAndSave(option, urlOrPaths, outDir=None, serverEndpoint=ServerEndpoint, verbose=Verbose, tikaServerJar=TikaServerJar, responseMimeType='application/json', metaExtension='_meta.json', services={'meta': '/meta', 'text': '/tika', 'all': '/rmeta'}): ''' Parse the objects and write extracted metadata and/or text in JSON format to matching filename with an extension of '_meta.json'. :param option: :param urlOrPaths: :param outDir: :param serverEndpoint: :param verbose: :param tikaServerJar: :param responseMimeType: :param metaExtension: :param services: :return: ''' metaPaths = [] paths = getPaths(urlOrPaths) for path in paths: if outDir is None: metaPath = path + metaExtension else: metaPath = os.path.join(outDir, os.path.split(path)[1] + metaExtension) log.info('Writing %s' % metaPath) with open(metaPath, 'w', 'utf-8') as f: f.write(parse1(option, path, serverEndpoint, verbose, tikaServerJar, \ responseMimeType, services)[1] + u"\n") metaPaths.append(metaPath) return metaPaths
[ "def", "parseAndSave", "(", "option", ",", "urlOrPaths", ",", "outDir", "=", "None", ",", "serverEndpoint", "=", "ServerEndpoint", ",", "verbose", "=", "Verbose", ",", "tikaServerJar", "=", "TikaServerJar", ",", "responseMimeType", "=", "'application/json'", ",", "metaExtension", "=", "'_meta.json'", ",", "services", "=", "{", "'meta'", ":", "'/meta'", ",", "'text'", ":", "'/tika'", ",", "'all'", ":", "'/rmeta'", "}", ")", ":", "metaPaths", "=", "[", "]", "paths", "=", "getPaths", "(", "urlOrPaths", ")", "for", "path", "in", "paths", ":", "if", "outDir", "is", "None", ":", "metaPath", "=", "path", "+", "metaExtension", "else", ":", "metaPath", "=", "os", ".", "path", ".", "join", "(", "outDir", ",", "os", ".", "path", ".", "split", "(", "path", ")", "[", "1", "]", "+", "metaExtension", ")", "log", ".", "info", "(", "'Writing %s'", "%", "metaPath", ")", "with", "open", "(", "metaPath", ",", "'w'", ",", "'utf-8'", ")", "as", "f", ":", "f", ".", "write", "(", "parse1", "(", "option", ",", "path", ",", "serverEndpoint", ",", "verbose", ",", "tikaServerJar", ",", "responseMimeType", ",", "services", ")", "[", "1", "]", "+", "u\"\\n\"", ")", "metaPaths", ".", "append", "(", "metaPath", ")", "return", "metaPaths" ]
Parse the objects and write extracted metadata and/or text in JSON format to matching filename with an extension of '_meta.json'. :param option: :param urlOrPaths: :param outDir: :param serverEndpoint: :param verbose: :param tikaServerJar: :param responseMimeType: :param metaExtension: :param services: :return:
[ "Parse", "the", "objects", "and", "write", "extracted", "metadata", "and", "/", "or", "text", "in", "JSON", "format", "to", "matching", "filename", "with", "an", "extension", "of", "_meta", ".", "json", ".", ":", "param", "option", ":", ":", "param", "urlOrPaths", ":", ":", "param", "outDir", ":", ":", "param", "serverEndpoint", ":", ":", "param", "verbose", ":", ":", "param", "tikaServerJar", ":", ":", "param", "responseMimeType", ":", ":", "param", "metaExtension", ":", ":", "param", "services", ":", ":", "return", ":" ]
python
train
39.9
aarongarrett/inspyred
inspyred/ec/replacers.py
https://github.com/aarongarrett/inspyred/blob/d5976ab503cc9d51c6f586cbb7bb601a38c01128/inspyred/ec/replacers.py#L194-L214
def comma_replacement(random, population, parents, offspring, args): """Performs "comma" replacement. This function performs "comma" replacement, which means that the entire existing population is replaced by the best population-many elements from the offspring. This function makes the assumption that the size of the offspring is at least as large as the original population. Otherwise, the population size will not be constant. .. Arguments: random -- the random number generator object population -- the population of individuals parents -- the list of parent individuals offspring -- the list of offspring individuals args -- a dictionary of keyword arguments """ offspring.sort(reverse=True) survivors = offspring[:len(population)] return survivors
[ "def", "comma_replacement", "(", "random", ",", "population", ",", "parents", ",", "offspring", ",", "args", ")", ":", "offspring", ".", "sort", "(", "reverse", "=", "True", ")", "survivors", "=", "offspring", "[", ":", "len", "(", "population", ")", "]", "return", "survivors" ]
Performs "comma" replacement. This function performs "comma" replacement, which means that the entire existing population is replaced by the best population-many elements from the offspring. This function makes the assumption that the size of the offspring is at least as large as the original population. Otherwise, the population size will not be constant. .. Arguments: random -- the random number generator object population -- the population of individuals parents -- the list of parent individuals offspring -- the list of offspring individuals args -- a dictionary of keyword arguments
[ "Performs", "comma", "replacement", ".", "This", "function", "performs", "comma", "replacement", "which", "means", "that", "the", "entire", "existing", "population", "is", "replaced", "by", "the", "best", "population", "-", "many", "elements", "from", "the", "offspring", ".", "This", "function", "makes", "the", "assumption", "that", "the", "size", "of", "the", "offspring", "is", "at", "least", "as", "large", "as", "the", "original", "population", ".", "Otherwise", "the", "population", "size", "will", "not", "be", "constant", ".", "..", "Arguments", ":", "random", "--", "the", "random", "number", "generator", "object", "population", "--", "the", "population", "of", "individuals", "parents", "--", "the", "list", "of", "parent", "individuals", "offspring", "--", "the", "list", "of", "offspring", "individuals", "args", "--", "a", "dictionary", "of", "keyword", "arguments" ]
python
train
39.809524
androguard/androguard
androguard/core/analysis/analysis.py
https://github.com/androguard/androguard/blob/984c0d981be2950cf0451e484f7b0d4d53bc4911/androguard/core/analysis/analysis.py#L439-L461
def show(self): """ Prints the content of this method to stdout. This will print the method signature and the decompiled code. """ args, ret = self.method.get_descriptor()[1:].split(")") if self.code: # We patch the descriptor here and add the registers, if code is available args = args.split(" ") reg_len = self.code.get_registers_size() nb_args = len(args) start_reg = reg_len - nb_args args = ["{} v{}".format(a, start_reg + i) for i, a in enumerate(args)] print("METHOD {} {} {} ({}){}".format( self.method.get_class_name(), self.method.get_access_flags_string(), self.method.get_name(), ", ".join(args), ret)) bytecode.PrettyShow(self, self.basic_blocks.gets(), self.method.notes)
[ "def", "show", "(", "self", ")", ":", "args", ",", "ret", "=", "self", ".", "method", ".", "get_descriptor", "(", ")", "[", "1", ":", "]", ".", "split", "(", "\")\"", ")", "if", "self", ".", "code", ":", "# We patch the descriptor here and add the registers, if code is available", "args", "=", "args", ".", "split", "(", "\" \"", ")", "reg_len", "=", "self", ".", "code", ".", "get_registers_size", "(", ")", "nb_args", "=", "len", "(", "args", ")", "start_reg", "=", "reg_len", "-", "nb_args", "args", "=", "[", "\"{} v{}\"", ".", "format", "(", "a", ",", "start_reg", "+", "i", ")", "for", "i", ",", "a", "in", "enumerate", "(", "args", ")", "]", "print", "(", "\"METHOD {} {} {} ({}){}\"", ".", "format", "(", "self", ".", "method", ".", "get_class_name", "(", ")", ",", "self", ".", "method", ".", "get_access_flags_string", "(", ")", ",", "self", ".", "method", ".", "get_name", "(", ")", ",", "\", \"", ".", "join", "(", "args", ")", ",", "ret", ")", ")", "bytecode", ".", "PrettyShow", "(", "self", ",", "self", ".", "basic_blocks", ".", "gets", "(", ")", ",", "self", ".", "method", ".", "notes", ")" ]
Prints the content of this method to stdout. This will print the method signature and the decompiled code.
[ "Prints", "the", "content", "of", "this", "method", "to", "stdout", "." ]
python
train
37.391304
saltstack/salt
salt/modules/win_iis.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/win_iis.py#L822-L930
def create_cert_binding(name, site, hostheader='', ipaddress='*', port=443, sslflags=0): ''' Assign a certificate to an IIS Web Binding. .. versionadded:: 2016.11.0 .. note:: The web binding that the certificate is being assigned to must already exist. Args: name (str): The thumbprint of the certificate. site (str): The IIS site name. hostheader (str): The host header of the binding. ipaddress (str): The IP address of the binding. port (int): The TCP port of the binding. sslflags (int): Flags representing certificate type and certificate storage of the binding. Returns: bool: True if successful, otherwise False CLI Example: .. code-block:: bash salt '*' win_iis.create_cert_binding name='AAA000' site='site0' hostheader='example.com' ipaddress='*' port='443' ''' name = six.text_type(name).upper() binding_info = _get_binding_info(hostheader, ipaddress, port) if _iisVersion() < 8: # IIS 7.5 and earlier don't support SNI for HTTPS, therefore cert bindings don't contain the host header binding_info = binding_info.rpartition(':')[0] + ':' binding_path = r"IIS:\SslBindings\{0}".format(binding_info.replace(':', '!')) if sslflags not in _VALID_SSL_FLAGS: message = ("Invalid sslflags '{0}' specified. Valid sslflags range: " "{1}..{2}").format(sslflags, _VALID_SSL_FLAGS[0], _VALID_SSL_FLAGS[-1]) raise SaltInvocationError(message) # Verify that the target binding exists. current_bindings = list_bindings(site) if binding_info not in current_bindings: log.error('Binding not present: %s', binding_info) return False # Check to see if the certificate is already assigned. current_name = None for current_binding in current_bindings: if binding_info == current_binding: current_name = current_bindings[current_binding]['certificatehash'] log.debug('Current certificate thumbprint: %s', current_name) log.debug('New certificate thumbprint: %s', name) if name == current_name: log.debug('Certificate already present for binding: %s', name) return True # Verify that the certificate exists. certs = _list_certs() if name not in certs: log.error('Certificate not present: %s', name) return False if _iisVersion() < 8: # IIS 7.5 and earlier have different syntax for associating a certificate with a site # Modify IP spec to IIS 7.5 format iis7path = binding_path.replace(r"\*!", "\\0.0.0.0!") # win 2008 uses the following format: ip!port and not ip!port! if iis7path.endswith("!"): iis7path = iis7path[:-1] ps_cmd = ['New-Item', '-Path', "'{0}'".format(iis7path), '-Thumbprint', "'{0}'".format(name)] else: ps_cmd = ['New-Item', '-Path', "'{0}'".format(binding_path), '-Thumbprint', "'{0}'".format(name), '-SSLFlags', '{0}'.format(sslflags)] cmd_ret = _srvmgr(ps_cmd) if cmd_ret['retcode'] != 0: msg = 'Unable to create certificate binding: {0}\nError: {1}' \ ''.format(name, cmd_ret['stderr']) raise CommandExecutionError(msg) new_cert_bindings = list_cert_bindings(site) if binding_info not in new_cert_bindings: log.error('Binding not present: %s', binding_info) return False if name == new_cert_bindings[binding_info]['certificatehash']: log.debug('Certificate binding created successfully: %s', name) return True log.error('Unable to create certificate binding: %s', name) return False
[ "def", "create_cert_binding", "(", "name", ",", "site", ",", "hostheader", "=", "''", ",", "ipaddress", "=", "'*'", ",", "port", "=", "443", ",", "sslflags", "=", "0", ")", ":", "name", "=", "six", ".", "text_type", "(", "name", ")", ".", "upper", "(", ")", "binding_info", "=", "_get_binding_info", "(", "hostheader", ",", "ipaddress", ",", "port", ")", "if", "_iisVersion", "(", ")", "<", "8", ":", "# IIS 7.5 and earlier don't support SNI for HTTPS, therefore cert bindings don't contain the host header", "binding_info", "=", "binding_info", ".", "rpartition", "(", "':'", ")", "[", "0", "]", "+", "':'", "binding_path", "=", "r\"IIS:\\SslBindings\\{0}\"", ".", "format", "(", "binding_info", ".", "replace", "(", "':'", ",", "'!'", ")", ")", "if", "sslflags", "not", "in", "_VALID_SSL_FLAGS", ":", "message", "=", "(", "\"Invalid sslflags '{0}' specified. Valid sslflags range: \"", "\"{1}..{2}\"", ")", ".", "format", "(", "sslflags", ",", "_VALID_SSL_FLAGS", "[", "0", "]", ",", "_VALID_SSL_FLAGS", "[", "-", "1", "]", ")", "raise", "SaltInvocationError", "(", "message", ")", "# Verify that the target binding exists.", "current_bindings", "=", "list_bindings", "(", "site", ")", "if", "binding_info", "not", "in", "current_bindings", ":", "log", ".", "error", "(", "'Binding not present: %s'", ",", "binding_info", ")", "return", "False", "# Check to see if the certificate is already assigned.", "current_name", "=", "None", "for", "current_binding", "in", "current_bindings", ":", "if", "binding_info", "==", "current_binding", ":", "current_name", "=", "current_bindings", "[", "current_binding", "]", "[", "'certificatehash'", "]", "log", ".", "debug", "(", "'Current certificate thumbprint: %s'", ",", "current_name", ")", "log", ".", "debug", "(", "'New certificate thumbprint: %s'", ",", "name", ")", "if", "name", "==", "current_name", ":", "log", ".", "debug", "(", "'Certificate already present for binding: %s'", ",", "name", ")", "return", "True", "# Verify that the certificate exists.", "certs", "=", "_list_certs", "(", ")", "if", "name", "not", "in", "certs", ":", "log", ".", "error", "(", "'Certificate not present: %s'", ",", "name", ")", "return", "False", "if", "_iisVersion", "(", ")", "<", "8", ":", "# IIS 7.5 and earlier have different syntax for associating a certificate with a site", "# Modify IP spec to IIS 7.5 format", "iis7path", "=", "binding_path", ".", "replace", "(", "r\"\\*!\"", ",", "\"\\\\0.0.0.0!\"", ")", "# win 2008 uses the following format: ip!port and not ip!port!", "if", "iis7path", ".", "endswith", "(", "\"!\"", ")", ":", "iis7path", "=", "iis7path", "[", ":", "-", "1", "]", "ps_cmd", "=", "[", "'New-Item'", ",", "'-Path'", ",", "\"'{0}'\"", ".", "format", "(", "iis7path", ")", ",", "'-Thumbprint'", ",", "\"'{0}'\"", ".", "format", "(", "name", ")", "]", "else", ":", "ps_cmd", "=", "[", "'New-Item'", ",", "'-Path'", ",", "\"'{0}'\"", ".", "format", "(", "binding_path", ")", ",", "'-Thumbprint'", ",", "\"'{0}'\"", ".", "format", "(", "name", ")", ",", "'-SSLFlags'", ",", "'{0}'", ".", "format", "(", "sslflags", ")", "]", "cmd_ret", "=", "_srvmgr", "(", "ps_cmd", ")", "if", "cmd_ret", "[", "'retcode'", "]", "!=", "0", ":", "msg", "=", "'Unable to create certificate binding: {0}\\nError: {1}'", "''", ".", "format", "(", "name", ",", "cmd_ret", "[", "'stderr'", "]", ")", "raise", "CommandExecutionError", "(", "msg", ")", "new_cert_bindings", "=", "list_cert_bindings", "(", "site", ")", "if", "binding_info", "not", "in", "new_cert_bindings", ":", "log", ".", "error", "(", "'Binding not present: %s'", ",", "binding_info", ")", "return", "False", "if", "name", "==", "new_cert_bindings", "[", "binding_info", "]", "[", "'certificatehash'", "]", ":", "log", ".", "debug", "(", "'Certificate binding created successfully: %s'", ",", "name", ")", "return", "True", "log", ".", "error", "(", "'Unable to create certificate binding: %s'", ",", "name", ")", "return", "False" ]
Assign a certificate to an IIS Web Binding. .. versionadded:: 2016.11.0 .. note:: The web binding that the certificate is being assigned to must already exist. Args: name (str): The thumbprint of the certificate. site (str): The IIS site name. hostheader (str): The host header of the binding. ipaddress (str): The IP address of the binding. port (int): The TCP port of the binding. sslflags (int): Flags representing certificate type and certificate storage of the binding. Returns: bool: True if successful, otherwise False CLI Example: .. code-block:: bash salt '*' win_iis.create_cert_binding name='AAA000' site='site0' hostheader='example.com' ipaddress='*' port='443'
[ "Assign", "a", "certificate", "to", "an", "IIS", "Web", "Binding", "." ]
python
train
34.266055
jwkvam/plotlywrapper
plotlywrapper.py
https://github.com/jwkvam/plotlywrapper/blob/762b42912e824fecb1212c186900f2ebdd0ab12b/plotlywrapper.py#L31-L55
def _detect_notebook() -> bool: """Detect if code is running in a Jupyter Notebook. This isn't 100% correct but seems good enough Returns ------- bool True if it detects this is a notebook, otherwise False. """ try: from IPython import get_ipython from ipykernel import zmqshell except ImportError: return False kernel = get_ipython() try: from spyder.utils.ipython.spyder_kernel import SpyderKernel if isinstance(kernel.kernel, SpyderKernel): return False except (ImportError, AttributeError): pass return isinstance(kernel, zmqshell.ZMQInteractiveShell)
[ "def", "_detect_notebook", "(", ")", "->", "bool", ":", "try", ":", "from", "IPython", "import", "get_ipython", "from", "ipykernel", "import", "zmqshell", "except", "ImportError", ":", "return", "False", "kernel", "=", "get_ipython", "(", ")", "try", ":", "from", "spyder", ".", "utils", ".", "ipython", ".", "spyder_kernel", "import", "SpyderKernel", "if", "isinstance", "(", "kernel", ".", "kernel", ",", "SpyderKernel", ")", ":", "return", "False", "except", "(", "ImportError", ",", "AttributeError", ")", ":", "pass", "return", "isinstance", "(", "kernel", ",", "zmqshell", ".", "ZMQInteractiveShell", ")" ]
Detect if code is running in a Jupyter Notebook. This isn't 100% correct but seems good enough Returns ------- bool True if it detects this is a notebook, otherwise False.
[ "Detect", "if", "code", "is", "running", "in", "a", "Jupyter", "Notebook", "." ]
python
train
26.04
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/crypto/authentication.py
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/authentication.py#L74-L83
def from_key_bytes(cls, algorithm, key_bytes): """Builds a `Signer` from an algorithm suite and a raw signing key. :param algorithm: Algorithm on which to base signer :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes key_bytes: Raw signing key :rtype: aws_encryption_sdk.internal.crypto.Signer """ key = serialization.load_der_private_key(data=key_bytes, password=None, backend=default_backend()) return cls(algorithm, key)
[ "def", "from_key_bytes", "(", "cls", ",", "algorithm", ",", "key_bytes", ")", ":", "key", "=", "serialization", ".", "load_der_private_key", "(", "data", "=", "key_bytes", ",", "password", "=", "None", ",", "backend", "=", "default_backend", "(", ")", ")", "return", "cls", "(", "algorithm", ",", "key", ")" ]
Builds a `Signer` from an algorithm suite and a raw signing key. :param algorithm: Algorithm on which to base signer :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes key_bytes: Raw signing key :rtype: aws_encryption_sdk.internal.crypto.Signer
[ "Builds", "a", "Signer", "from", "an", "algorithm", "suite", "and", "a", "raw", "signing", "key", "." ]
python
train
50
peepall/FancyLogger
FancyLogger/__init__.py
https://github.com/peepall/FancyLogger/blob/7f13f1397e76ed768fb6b6358194118831fafc6d/FancyLogger/__init__.py#L387-L396
def error(self, text): """ Posts an error message adding a timestamp and logging level to it for both file and console handlers. Logger uses a redraw rate because of console flickering. That means it will not draw new messages or progress at the very time they are being logged but their timestamp will be captured at the right time. Logger will redraw at a given time period AND when new messages or progress are logged. If you still want to force redraw immediately (may produce flickering) then call 'flush' method. :param text: The text to log into file and console. """ self.queue.put(dill.dumps(LogMessageCommand(text=text, level=logging.ERROR)))
[ "def", "error", "(", "self", ",", "text", ")", ":", "self", ".", "queue", ".", "put", "(", "dill", ".", "dumps", "(", "LogMessageCommand", "(", "text", "=", "text", ",", "level", "=", "logging", ".", "ERROR", ")", ")", ")" ]
Posts an error message adding a timestamp and logging level to it for both file and console handlers. Logger uses a redraw rate because of console flickering. That means it will not draw new messages or progress at the very time they are being logged but their timestamp will be captured at the right time. Logger will redraw at a given time period AND when new messages or progress are logged. If you still want to force redraw immediately (may produce flickering) then call 'flush' method. :param text: The text to log into file and console.
[ "Posts", "an", "error", "message", "adding", "a", "timestamp", "and", "logging", "level", "to", "it", "for", "both", "file", "and", "console", "handlers", ".", "Logger", "uses", "a", "redraw", "rate", "because", "of", "console", "flickering", ".", "That", "means", "it", "will", "not", "draw", "new", "messages", "or", "progress", "at", "the", "very", "time", "they", "are", "being", "logged", "but", "their", "timestamp", "will", "be", "captured", "at", "the", "right", "time", ".", "Logger", "will", "redraw", "at", "a", "given", "time", "period", "AND", "when", "new", "messages", "or", "progress", "are", "logged", ".", "If", "you", "still", "want", "to", "force", "redraw", "immediately", "(", "may", "produce", "flickering", ")", "then", "call", "flush", "method", ".", ":", "param", "text", ":", "The", "text", "to", "log", "into", "file", "and", "console", "." ]
python
train
71.5
zetaops/pyoko
pyoko/db/adapter/db_riak.py
https://github.com/zetaops/pyoko/blob/236c509ad85640933ac0f89ad8f7ed95f62adf07/pyoko/db/adapter/db_riak.py#L412-L470
def save_model(self, model, meta_data=None, index_fields=None): """ model (instance): Model instance. meta (dict): JSON serializable meta data for logging of save operation. {'lorem': 'ipsum', 'dolar': 5} index_fields (list): Tuple list for indexing keys in riak (with 'bin' or 'int'). [('lorem','bin'),('dolar','int')] :return: """ # if model: # self._model = model if settings.DEBUG: t1 = time.time() clean_value = model.clean_value() model._data = clean_value if settings.DEBUG: t2 = time.time() if not model.exist: obj = self.bucket.new(data=clean_value).store() model.key = obj.key new_obj = True else: new_obj = False obj = self.bucket.get(model.key) obj.data = clean_value obj.store() if settings.ENABLE_VERSIONS: version_key = self._write_version(clean_value, model) else: version_key = '' if settings.ENABLE_CACHING: self.set_to_cache((clean_value, model.key)) meta_data = meta_data or model.save_meta_data if settings.ENABLE_ACTIVITY_LOGGING and meta_data: self._write_log(version_key, meta_data, index_fields) if self.COLLECT_SAVES and self.COLLECT_SAVES_FOR_MODEL == model.__class__.__name__: self.block_saved_keys.append(obj.key) if settings.DEBUG: if new_obj: sys.PYOKO_STAT_COUNTER['save'] += 1 sys.PYOKO_LOGS['new'].append(obj.key) else: sys.PYOKO_LOGS[self._model_class.__name__].append(obj.key) sys.PYOKO_STAT_COUNTER['update'] += 1 # sys._debug_db_queries.append({ # 'TIMESTAMP': t1, # 'KEY': obj.key, # 'BUCKET': self.index_name, # 'SAVE_IS_NEW': new_obj, # 'SERIALIZATION_TIME': round(t2 - t1, 5), # 'TIME': round(time.time() - t2, 5) # }) return model
[ "def", "save_model", "(", "self", ",", "model", ",", "meta_data", "=", "None", ",", "index_fields", "=", "None", ")", ":", "# if model:", "# self._model = model", "if", "settings", ".", "DEBUG", ":", "t1", "=", "time", ".", "time", "(", ")", "clean_value", "=", "model", ".", "clean_value", "(", ")", "model", ".", "_data", "=", "clean_value", "if", "settings", ".", "DEBUG", ":", "t2", "=", "time", ".", "time", "(", ")", "if", "not", "model", ".", "exist", ":", "obj", "=", "self", ".", "bucket", ".", "new", "(", "data", "=", "clean_value", ")", ".", "store", "(", ")", "model", ".", "key", "=", "obj", ".", "key", "new_obj", "=", "True", "else", ":", "new_obj", "=", "False", "obj", "=", "self", ".", "bucket", ".", "get", "(", "model", ".", "key", ")", "obj", ".", "data", "=", "clean_value", "obj", ".", "store", "(", ")", "if", "settings", ".", "ENABLE_VERSIONS", ":", "version_key", "=", "self", ".", "_write_version", "(", "clean_value", ",", "model", ")", "else", ":", "version_key", "=", "''", "if", "settings", ".", "ENABLE_CACHING", ":", "self", ".", "set_to_cache", "(", "(", "clean_value", ",", "model", ".", "key", ")", ")", "meta_data", "=", "meta_data", "or", "model", ".", "save_meta_data", "if", "settings", ".", "ENABLE_ACTIVITY_LOGGING", "and", "meta_data", ":", "self", ".", "_write_log", "(", "version_key", ",", "meta_data", ",", "index_fields", ")", "if", "self", ".", "COLLECT_SAVES", "and", "self", ".", "COLLECT_SAVES_FOR_MODEL", "==", "model", ".", "__class__", ".", "__name__", ":", "self", ".", "block_saved_keys", ".", "append", "(", "obj", ".", "key", ")", "if", "settings", ".", "DEBUG", ":", "if", "new_obj", ":", "sys", ".", "PYOKO_STAT_COUNTER", "[", "'save'", "]", "+=", "1", "sys", ".", "PYOKO_LOGS", "[", "'new'", "]", ".", "append", "(", "obj", ".", "key", ")", "else", ":", "sys", ".", "PYOKO_LOGS", "[", "self", ".", "_model_class", ".", "__name__", "]", ".", "append", "(", "obj", ".", "key", ")", "sys", ".", "PYOKO_STAT_COUNTER", "[", "'update'", "]", "+=", "1", "# sys._debug_db_queries.append({", "# 'TIMESTAMP': t1,", "# 'KEY': obj.key,", "# 'BUCKET': self.index_name,", "# 'SAVE_IS_NEW': new_obj,", "# 'SERIALIZATION_TIME': round(t2 - t1, 5),", "# 'TIME': round(time.time() - t2, 5)", "# })", "return", "model" ]
model (instance): Model instance. meta (dict): JSON serializable meta data for logging of save operation. {'lorem': 'ipsum', 'dolar': 5} index_fields (list): Tuple list for indexing keys in riak (with 'bin' or 'int'). [('lorem','bin'),('dolar','int')] :return:
[ "model", "(", "instance", ")", ":", "Model", "instance", ".", "meta", "(", "dict", ")", ":", "JSON", "serializable", "meta", "data", "for", "logging", "of", "save", "operation", ".", "{", "lorem", ":", "ipsum", "dolar", ":", "5", "}", "index_fields", "(", "list", ")", ":", "Tuple", "list", "for", "indexing", "keys", "in", "riak", "(", "with", "bin", "or", "int", ")", ".", "[", "(", "lorem", "bin", ")", "(", "dolar", "int", ")", "]", ":", "return", ":" ]
python
train
35.881356
HewlettPackard/python-hpOneView
hpOneView/resources/servers/server_profiles.py
https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/servers/server_profiles.py#L82-L106
def update(self, data=None, timeout=-1, force=''): """Updates server profile template. Args: data: Data to update the resource. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. force: Force the update operation. Returns: A dict with the updated resource data. """ uri = self.data['uri'] resource = deepcopy(self.data) resource.update(data) # Removes related fields to serverHardware in case of unassign if resource.get('serverHardwareUri') is None: resource.pop('enclosureBay', None) resource.pop('enclosureUri', None) self.data = self._helper.update(resource, uri, force, timeout) return self
[ "def", "update", "(", "self", ",", "data", "=", "None", ",", "timeout", "=", "-", "1", ",", "force", "=", "''", ")", ":", "uri", "=", "self", ".", "data", "[", "'uri'", "]", "resource", "=", "deepcopy", "(", "self", ".", "data", ")", "resource", ".", "update", "(", "data", ")", "# Removes related fields to serverHardware in case of unassign", "if", "resource", ".", "get", "(", "'serverHardwareUri'", ")", "is", "None", ":", "resource", ".", "pop", "(", "'enclosureBay'", ",", "None", ")", "resource", ".", "pop", "(", "'enclosureUri'", ",", "None", ")", "self", ".", "data", "=", "self", ".", "_helper", ".", "update", "(", "resource", ",", "uri", ",", "force", ",", "timeout", ")", "return", "self" ]
Updates server profile template. Args: data: Data to update the resource. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. force: Force the update operation. Returns: A dict with the updated resource data.
[ "Updates", "server", "profile", "template", "." ]
python
train
34.52
loli/medpy
doc/numpydoc/numpydoc/plot_directive.py
https://github.com/loli/medpy/blob/95216b9e22e7ce301f0edf953ee2a2f1b6c6aee5/doc/numpydoc/numpydoc/plot_directive.py#L416-L436
def split_code_at_show(text): """ Split code at plt.show() """ parts = [] is_doctest = contains_doctest(text) part = [] for line in text.split("\n"): if (not is_doctest and line.strip() == 'plt.show()') or \ (is_doctest and line.strip() == '>>> plt.show()'): part.append(line) parts.append("\n".join(part)) part = [] else: part.append(line) if "\n".join(part).strip(): parts.append("\n".join(part)) return parts
[ "def", "split_code_at_show", "(", "text", ")", ":", "parts", "=", "[", "]", "is_doctest", "=", "contains_doctest", "(", "text", ")", "part", "=", "[", "]", "for", "line", "in", "text", ".", "split", "(", "\"\\n\"", ")", ":", "if", "(", "not", "is_doctest", "and", "line", ".", "strip", "(", ")", "==", "'plt.show()'", ")", "or", "(", "is_doctest", "and", "line", ".", "strip", "(", ")", "==", "'>>> plt.show()'", ")", ":", "part", ".", "append", "(", "line", ")", "parts", ".", "append", "(", "\"\\n\"", ".", "join", "(", "part", ")", ")", "part", "=", "[", "]", "else", ":", "part", ".", "append", "(", "line", ")", "if", "\"\\n\"", ".", "join", "(", "part", ")", ".", "strip", "(", ")", ":", "parts", ".", "append", "(", "\"\\n\"", ".", "join", "(", "part", ")", ")", "return", "parts" ]
Split code at plt.show()
[ "Split", "code", "at", "plt", ".", "show", "()" ]
python
train
24.619048
lvieirajr/mongorest
mongorest/collection.py
https://github.com/lvieirajr/mongorest/blob/00f4487ded33254434bc51ff09d48c7a936bd465/mongorest/collection.py#L316-L321
def update_many(cls, filter, update, upsert=False): """ Updates all documents that pass the filter with the update value Will upsert a new document if upsert=True and no document is filtered """ return cls.collection.update_many(filter, update, upsert).raw_result
[ "def", "update_many", "(", "cls", ",", "filter", ",", "update", ",", "upsert", "=", "False", ")", ":", "return", "cls", ".", "collection", ".", "update_many", "(", "filter", ",", "update", ",", "upsert", ")", ".", "raw_result" ]
Updates all documents that pass the filter with the update value Will upsert a new document if upsert=True and no document is filtered
[ "Updates", "all", "documents", "that", "pass", "the", "filter", "with", "the", "update", "value", "Will", "upsert", "a", "new", "document", "if", "upsert", "=", "True", "and", "no", "document", "is", "filtered" ]
python
train
49.666667
danijar/sets
sets/core/step.py
https://github.com/danijar/sets/blob/2542c28f43d0af18932cb5b82f54ffb6ae557d12/sets/core/step.py#L12-L21
def disk_cache(cls, basename, function, *args, method=True, **kwargs): """ Cache the return value in the correct cache directory. Set 'method' to false for static methods. """ @utility.disk_cache(basename, cls.directory(), method=method) def wrapper(*args, **kwargs): return function(*args, **kwargs) return wrapper(*args, **kwargs)
[ "def", "disk_cache", "(", "cls", ",", "basename", ",", "function", ",", "*", "args", ",", "method", "=", "True", ",", "*", "*", "kwargs", ")", ":", "@", "utility", ".", "disk_cache", "(", "basename", ",", "cls", ".", "directory", "(", ")", ",", "method", "=", "method", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "function", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Cache the return value in the correct cache directory. Set 'method' to false for static methods.
[ "Cache", "the", "return", "value", "in", "the", "correct", "cache", "directory", ".", "Set", "method", "to", "false", "for", "static", "methods", "." ]
python
train
39.2
inveniosoftware/invenio-jsonschemas
invenio_jsonschemas/ext.py
https://github.com/inveniosoftware/invenio-jsonschemas/blob/93019b8fe3bf549335e94c84198c9c0b76d8fde2/invenio_jsonschemas/ext.py#L79-L90
def get_schema_dir(self, path): """Retrieve the directory containing the given schema. :param path: Schema path, relative to the directory where it was registered. :raises invenio_jsonschemas.errors.JSONSchemaNotFound: If no schema was found in the specified path. :returns: The schema directory. """ if path not in self.schemas: raise JSONSchemaNotFound(path) return self.schemas[path]
[ "def", "get_schema_dir", "(", "self", ",", "path", ")", ":", "if", "path", "not", "in", "self", ".", "schemas", ":", "raise", "JSONSchemaNotFound", "(", "path", ")", "return", "self", ".", "schemas", "[", "path", "]" ]
Retrieve the directory containing the given schema. :param path: Schema path, relative to the directory where it was registered. :raises invenio_jsonschemas.errors.JSONSchemaNotFound: If no schema was found in the specified path. :returns: The schema directory.
[ "Retrieve", "the", "directory", "containing", "the", "given", "schema", "." ]
python
train
39
materialsproject/pymatgen
pymatgen/core/surface.py
https://github.com/materialsproject/pymatgen/blob/4ca558cf72f8d5f8a1f21dfdfc0181a971c186da/pymatgen/core/surface.py#L386-L393
def center_of_mass(self): """ Calculates the center of mass of the slab """ weights = [s.species.weight for s in self] center_of_mass = np.average(self.frac_coords, weights=weights, axis=0) return center_of_mass
[ "def", "center_of_mass", "(", "self", ")", ":", "weights", "=", "[", "s", ".", "species", ".", "weight", "for", "s", "in", "self", "]", "center_of_mass", "=", "np", ".", "average", "(", "self", ".", "frac_coords", ",", "weights", "=", "weights", ",", "axis", "=", "0", ")", "return", "center_of_mass" ]
Calculates the center of mass of the slab
[ "Calculates", "the", "center", "of", "mass", "of", "the", "slab" ]
python
train
36
shoebot/shoebot
shoebot/sbio/shell.py
https://github.com/shoebot/shoebot/blob/d554c1765c1899fa25727c9fc6805d221585562b/shoebot/sbio/shell.py#L182-L192
def do_speed(self, speed): """ rewind """ if speed: try: self.bot._speed = float(speed) except Exception as e: self.print_response('%s is not a valid framerate' % speed) return self.print_response('Speed: %s FPS' % self.bot._speed)
[ "def", "do_speed", "(", "self", ",", "speed", ")", ":", "if", "speed", ":", "try", ":", "self", ".", "bot", ".", "_speed", "=", "float", "(", "speed", ")", "except", "Exception", "as", "e", ":", "self", ".", "print_response", "(", "'%s is not a valid framerate'", "%", "speed", ")", "return", "self", ".", "print_response", "(", "'Speed: %s FPS'", "%", "self", ".", "bot", ".", "_speed", ")" ]
rewind
[ "rewind" ]
python
valid
30.272727