repo
stringlengths
7
55
path
stringlengths
4
223
url
stringlengths
87
315
code
stringlengths
75
104k
code_tokens
list
docstring
stringlengths
1
46.9k
docstring_tokens
list
language
stringclasses
1 value
partition
stringclasses
3 values
avg_line_len
float64
7.91
980
brunobord/tdaemon
tdaemon.py
https://github.com/brunobord/tdaemon/blob/733b5bddb4b12bc3db326a192ce5606f28768307/tdaemon.py#L199-L202
def file_sizes(self): """Returns total filesize (in MB)""" size = sum(map(os.path.getsize, self.file_list)) return size / 1024 / 1024
[ "def", "file_sizes", "(", "self", ")", ":", "size", "=", "sum", "(", "map", "(", "os", ".", "path", ".", "getsize", ",", "self", ".", "file_list", ")", ")", "return", "size", "/", "1024", "/", "1024" ]
Returns total filesize (in MB)
[ "Returns", "total", "filesize", "(", "in", "MB", ")" ]
python
train
38.5
jmcarp/robobrowser
robobrowser/browser.py
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/browser.py#L230-L242
def _traverse(self, n=1): """Traverse state history. Used by `back` and `forward` methods. :param int n: Cursor increment. Positive values move forward in the browser history; negative values move backward. """ if not self.history: raise exceptions.RoboError('Not tracking history') cursor = self._cursor + n if cursor >= len(self._states) or cursor < 0: raise exceptions.RoboError('Index out of range') self._cursor = cursor
[ "def", "_traverse", "(", "self", ",", "n", "=", "1", ")", ":", "if", "not", "self", ".", "history", ":", "raise", "exceptions", ".", "RoboError", "(", "'Not tracking history'", ")", "cursor", "=", "self", ".", "_cursor", "+", "n", "if", "cursor", ">=", "len", "(", "self", ".", "_states", ")", "or", "cursor", "<", "0", ":", "raise", "exceptions", ".", "RoboError", "(", "'Index out of range'", ")", "self", ".", "_cursor", "=", "cursor" ]
Traverse state history. Used by `back` and `forward` methods. :param int n: Cursor increment. Positive values move forward in the browser history; negative values move backward.
[ "Traverse", "state", "history", ".", "Used", "by", "back", "and", "forward", "methods", "." ]
python
train
39
J535D165/recordlinkage
recordlinkage/utils.py
https://github.com/J535D165/recordlinkage/blob/87a5f4af904e0834047cd07ff1c70146b1e6d693/recordlinkage/utils.py#L139-L174
def index_split(index, chunks): """Function to split pandas.Index and pandas.MultiIndex objects. Split :class:`pandas.Index` and :class:`pandas.MultiIndex` objects into chunks. This function is based on :func:`numpy.array_split`. Parameters ---------- index : pandas.Index, pandas.MultiIndex A pandas.Index or pandas.MultiIndex to split into chunks. chunks : int The number of parts to split the index into. Returns ------- list A list with chunked pandas.Index or pandas.MultiIndex objects. """ Ntotal = index.shape[0] Nsections = int(chunks) if Nsections <= 0: raise ValueError('number sections must be larger than 0.') Neach_section, extras = divmod(Ntotal, Nsections) section_sizes = ([0] + extras * [Neach_section + 1] + (Nsections - extras) * [Neach_section]) div_points = numpy.array(section_sizes).cumsum() sub_ind = [] for i in range(Nsections): st = div_points[i] end = div_points[i + 1] sub_ind.append(index[st:end]) return sub_ind
[ "def", "index_split", "(", "index", ",", "chunks", ")", ":", "Ntotal", "=", "index", ".", "shape", "[", "0", "]", "Nsections", "=", "int", "(", "chunks", ")", "if", "Nsections", "<=", "0", ":", "raise", "ValueError", "(", "'number sections must be larger than 0.'", ")", "Neach_section", ",", "extras", "=", "divmod", "(", "Ntotal", ",", "Nsections", ")", "section_sizes", "=", "(", "[", "0", "]", "+", "extras", "*", "[", "Neach_section", "+", "1", "]", "+", "(", "Nsections", "-", "extras", ")", "*", "[", "Neach_section", "]", ")", "div_points", "=", "numpy", ".", "array", "(", "section_sizes", ")", ".", "cumsum", "(", ")", "sub_ind", "=", "[", "]", "for", "i", "in", "range", "(", "Nsections", ")", ":", "st", "=", "div_points", "[", "i", "]", "end", "=", "div_points", "[", "i", "+", "1", "]", "sub_ind", ".", "append", "(", "index", "[", "st", ":", "end", "]", ")", "return", "sub_ind" ]
Function to split pandas.Index and pandas.MultiIndex objects. Split :class:`pandas.Index` and :class:`pandas.MultiIndex` objects into chunks. This function is based on :func:`numpy.array_split`. Parameters ---------- index : pandas.Index, pandas.MultiIndex A pandas.Index or pandas.MultiIndex to split into chunks. chunks : int The number of parts to split the index into. Returns ------- list A list with chunked pandas.Index or pandas.MultiIndex objects.
[ "Function", "to", "split", "pandas", ".", "Index", "and", "pandas", ".", "MultiIndex", "objects", "." ]
python
train
29.75
ardydedase/pycouchbase
pycouchbase/viewsync.py
https://github.com/ardydedase/pycouchbase/blob/6f010b4d2ef41aead2366878d0cf0b1284c0db0e/pycouchbase/viewsync.py#L117-L176
def upload(cls): """Uploads all the local views from :attr:`VIEW_PATHS` directory to CouchBase server This method **over-writes** all the server-side views with the same named ones coming from :attr:`VIEW_PATHS` folder. """ cls._check_folder() os.chdir(cls.VIEWS_PATH) buckets = dict() # iterate local folders for bucket_name in os.listdir(cls.VIEWS_PATH): if not os.path.isdir(bucket_name): continue # get bucket object if bucket_name not in buckets: try: bucket = Connection.bucket(bucket_name) except BucketNotFoundError as why: print("[WARNING] %s" % str(why)) continue else: buckets[bucket_name] = bucket else: bucket = buckets[bucket_name] # go through design docs for ddoc_name in os.listdir(bucket_name): views_path = '%s/%s/views' % (bucket_name, ddoc_name) spatial_path = '%s/%s/spatial' % (bucket_name, ddoc_name) if not (os.path.exists(views_path) and os.path.isdir(views_path)) and \ not (os.path.exists(spatial_path) and os.path.isdir(spatial_path)): continue # initialize design doc new_ddoc = { 'views': {}, 'spatial': {}, } # map and reduces if os.path.exists(views_path) and os.path.isdir(views_path): for filename in os.listdir(views_path): if not os.path.isfile('%s/%s' % (views_path, filename)) or \ not filename.endswith(('.map.js', '.reduce.js')): continue view_name, view_type, js = filename.rsplit('.', 2) if view_name not in new_ddoc['views']: new_ddoc['views'][view_name] = {} with open('%s/%s' % (views_path, filename), 'r') as f: new_ddoc['views'][view_name][view_type] = f.read() # spatial views if os.path.exists(spatial_path) and os.path.isdir(spatial_path): for filename in os.listdir(spatial_path): if not os.path.isfile('%s/%s' % (spatial_path, filename)) or \ not filename.endswith('.spatial.js'): continue view_name = filename.rsplit('.', 2)[0] with open('%s/%s' % (spatial_path, filename), 'r') as f: new_ddoc['spatial'][view_name] = f.read() bucket['_design/%s' % ddoc_name] = new_ddoc print('Uploaded design document: %s' % ddoc_name) pass
[ "def", "upload", "(", "cls", ")", ":", "cls", ".", "_check_folder", "(", ")", "os", ".", "chdir", "(", "cls", ".", "VIEWS_PATH", ")", "buckets", "=", "dict", "(", ")", "# iterate local folders", "for", "bucket_name", "in", "os", ".", "listdir", "(", "cls", ".", "VIEWS_PATH", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "bucket_name", ")", ":", "continue", "# get bucket object", "if", "bucket_name", "not", "in", "buckets", ":", "try", ":", "bucket", "=", "Connection", ".", "bucket", "(", "bucket_name", ")", "except", "BucketNotFoundError", "as", "why", ":", "print", "(", "\"[WARNING] %s\"", "%", "str", "(", "why", ")", ")", "continue", "else", ":", "buckets", "[", "bucket_name", "]", "=", "bucket", "else", ":", "bucket", "=", "buckets", "[", "bucket_name", "]", "# go through design docs", "for", "ddoc_name", "in", "os", ".", "listdir", "(", "bucket_name", ")", ":", "views_path", "=", "'%s/%s/views'", "%", "(", "bucket_name", ",", "ddoc_name", ")", "spatial_path", "=", "'%s/%s/spatial'", "%", "(", "bucket_name", ",", "ddoc_name", ")", "if", "not", "(", "os", ".", "path", ".", "exists", "(", "views_path", ")", "and", "os", ".", "path", ".", "isdir", "(", "views_path", ")", ")", "and", "not", "(", "os", ".", "path", ".", "exists", "(", "spatial_path", ")", "and", "os", ".", "path", ".", "isdir", "(", "spatial_path", ")", ")", ":", "continue", "# initialize design doc", "new_ddoc", "=", "{", "'views'", ":", "{", "}", ",", "'spatial'", ":", "{", "}", ",", "}", "# map and reduces", "if", "os", ".", "path", ".", "exists", "(", "views_path", ")", "and", "os", ".", "path", ".", "isdir", "(", "views_path", ")", ":", "for", "filename", "in", "os", ".", "listdir", "(", "views_path", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "'%s/%s'", "%", "(", "views_path", ",", "filename", ")", ")", "or", "not", "filename", ".", "endswith", "(", "(", "'.map.js'", ",", "'.reduce.js'", ")", ")", ":", "continue", "view_name", ",", "view_type", ",", "js", "=", "filename", ".", "rsplit", "(", "'.'", ",", "2", ")", "if", "view_name", "not", "in", "new_ddoc", "[", "'views'", "]", ":", "new_ddoc", "[", "'views'", "]", "[", "view_name", "]", "=", "{", "}", "with", "open", "(", "'%s/%s'", "%", "(", "views_path", ",", "filename", ")", ",", "'r'", ")", "as", "f", ":", "new_ddoc", "[", "'views'", "]", "[", "view_name", "]", "[", "view_type", "]", "=", "f", ".", "read", "(", ")", "# spatial views", "if", "os", ".", "path", ".", "exists", "(", "spatial_path", ")", "and", "os", ".", "path", ".", "isdir", "(", "spatial_path", ")", ":", "for", "filename", "in", "os", ".", "listdir", "(", "spatial_path", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "'%s/%s'", "%", "(", "spatial_path", ",", "filename", ")", ")", "or", "not", "filename", ".", "endswith", "(", "'.spatial.js'", ")", ":", "continue", "view_name", "=", "filename", ".", "rsplit", "(", "'.'", ",", "2", ")", "[", "0", "]", "with", "open", "(", "'%s/%s'", "%", "(", "spatial_path", ",", "filename", ")", ",", "'r'", ")", "as", "f", ":", "new_ddoc", "[", "'spatial'", "]", "[", "view_name", "]", "=", "f", ".", "read", "(", ")", "bucket", "[", "'_design/%s'", "%", "ddoc_name", "]", "=", "new_ddoc", "print", "(", "'Uploaded design document: %s'", "%", "ddoc_name", ")", "pass" ]
Uploads all the local views from :attr:`VIEW_PATHS` directory to CouchBase server This method **over-writes** all the server-side views with the same named ones coming from :attr:`VIEW_PATHS` folder.
[ "Uploads", "all", "the", "local", "views", "from", ":", "attr", ":", "VIEW_PATHS", "directory", "to", "CouchBase", "server" ]
python
train
48.566667
amzn/ion-python
amazon/ion/reader_binary.py
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L229-L233
def remaining(self): """Determines how many bytes are remaining in the current context.""" if self.depth == 0: return _STREAM_REMAINING return self.limit - self.queue.position
[ "def", "remaining", "(", "self", ")", ":", "if", "self", ".", "depth", "==", "0", ":", "return", "_STREAM_REMAINING", "return", "self", ".", "limit", "-", "self", ".", "queue", ".", "position" ]
Determines how many bytes are remaining in the current context.
[ "Determines", "how", "many", "bytes", "are", "remaining", "in", "the", "current", "context", "." ]
python
train
41.4
josuebrunel/myql
myql/myql.py
https://github.com/josuebrunel/myql/blob/891bad29cc83a81b3f5ebc4d0401d6f2c22f119e/myql/myql.py#L315-L332
def where(self, *args): ''' This method simulates a where condition. Use as follow: >>> yql.select('mytable').where(['name', '=', 'alain'], ['location', '!=', 'paris']) ''' if not self._table: raise errors.NoTableSelectedError('No Table Selected') clause = [] self._query += ' WHERE ' clause = [ self._clause_formatter(x) for x in args if x ] self._query += ' AND '.join(clause) payload = self._payload_builder(self._query) response = self.execute_query(payload) return response
[ "def", "where", "(", "self", ",", "*", "args", ")", ":", "if", "not", "self", ".", "_table", ":", "raise", "errors", ".", "NoTableSelectedError", "(", "'No Table Selected'", ")", "clause", "=", "[", "]", "self", ".", "_query", "+=", "' WHERE '", "clause", "=", "[", "self", ".", "_clause_formatter", "(", "x", ")", "for", "x", "in", "args", "if", "x", "]", "self", ".", "_query", "+=", "' AND '", ".", "join", "(", "clause", ")", "payload", "=", "self", ".", "_payload_builder", "(", "self", ".", "_query", ")", "response", "=", "self", ".", "execute_query", "(", "payload", ")", "return", "response" ]
This method simulates a where condition. Use as follow: >>> yql.select('mytable').where(['name', '=', 'alain'], ['location', '!=', 'paris'])
[ "This", "method", "simulates", "a", "where", "condition", ".", "Use", "as", "follow", ":", ">>>", "yql", ".", "select", "(", "mytable", ")", ".", "where", "(", "[", "name", "=", "alain", "]", "[", "location", "!", "=", "paris", "]", ")" ]
python
train
31.444444
bxlab/bx-python
lib/bx_extras/pstat.py
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx_extras/pstat.py#L642-L658
def sortby(listoflists,sortcols): """ Sorts a list of lists on the column(s) specified in the sequence sortcols. Usage: sortby(listoflists,sortcols) Returns: sorted list, unchanged column ordering """ newlist = abut(colex(listoflists,sortcols),listoflists) newlist.sort() try: numcols = len(sortcols) except TypeError: numcols = 1 crit = '[' + str(numcols) + ':]' newlist = colex(newlist,crit) return newlist
[ "def", "sortby", "(", "listoflists", ",", "sortcols", ")", ":", "newlist", "=", "abut", "(", "colex", "(", "listoflists", ",", "sortcols", ")", ",", "listoflists", ")", "newlist", ".", "sort", "(", ")", "try", ":", "numcols", "=", "len", "(", "sortcols", ")", "except", "TypeError", ":", "numcols", "=", "1", "crit", "=", "'['", "+", "str", "(", "numcols", ")", "+", "':]'", "newlist", "=", "colex", "(", "newlist", ",", "crit", ")", "return", "newlist" ]
Sorts a list of lists on the column(s) specified in the sequence sortcols. Usage: sortby(listoflists,sortcols) Returns: sorted list, unchanged column ordering
[ "Sorts", "a", "list", "of", "lists", "on", "the", "column", "(", "s", ")", "specified", "in", "the", "sequence", "sortcols", "." ]
python
train
26.058824
SBRG/ssbio
ssbio/io/__init__.py
https://github.com/SBRG/ssbio/blob/e9449e64ffc1a1f5ad07e5849aa12a650095f8a2/ssbio/io/__init__.py#L51-L67
def load_pickle(file, encoding=None): """Load a pickle file. Args: file (str): Path to pickle file Returns: object: Loaded object from pickle file """ # TODO: test set encoding='latin1' for 2/3 incompatibility if encoding: with open(file, 'rb') as f: return pickle.load(f, encoding=encoding) with open(file, 'rb') as f: return pickle.load(f)
[ "def", "load_pickle", "(", "file", ",", "encoding", "=", "None", ")", ":", "# TODO: test set encoding='latin1' for 2/3 incompatibility", "if", "encoding", ":", "with", "open", "(", "file", ",", "'rb'", ")", "as", "f", ":", "return", "pickle", ".", "load", "(", "f", ",", "encoding", "=", "encoding", ")", "with", "open", "(", "file", ",", "'rb'", ")", "as", "f", ":", "return", "pickle", ".", "load", "(", "f", ")" ]
Load a pickle file. Args: file (str): Path to pickle file Returns: object: Loaded object from pickle file
[ "Load", "a", "pickle", "file", "." ]
python
train
23.529412
bd808/python-iptools
iptools/ipv4.py
https://github.com/bd808/python-iptools/blob/5d3fae0056297540355bb7c6c112703cfaa4b6ce/iptools/ipv4.py#L617-L634
def _block_from_ip_and_prefix(ip, prefix): """Create a tuple of (start, end) dotted-quad addresses from the given ip address and prefix length. :param ip: Ip address in block :type ip: long :param prefix: Prefix size for block :type prefix: int :returns: Tuple of block (start, end) """ # keep left most prefix bits of ip shift = 32 - prefix block_start = ip >> shift << shift # expand right most 32 - prefix bits to 1 mask = (1 << shift) - 1 block_end = block_start | mask return (long2ip(block_start), long2ip(block_end))
[ "def", "_block_from_ip_and_prefix", "(", "ip", ",", "prefix", ")", ":", "# keep left most prefix bits of ip", "shift", "=", "32", "-", "prefix", "block_start", "=", "ip", ">>", "shift", "<<", "shift", "# expand right most 32 - prefix bits to 1", "mask", "=", "(", "1", "<<", "shift", ")", "-", "1", "block_end", "=", "block_start", "|", "mask", "return", "(", "long2ip", "(", "block_start", ")", ",", "long2ip", "(", "block_end", ")", ")" ]
Create a tuple of (start, end) dotted-quad addresses from the given ip address and prefix length. :param ip: Ip address in block :type ip: long :param prefix: Prefix size for block :type prefix: int :returns: Tuple of block (start, end)
[ "Create", "a", "tuple", "of", "(", "start", "end", ")", "dotted", "-", "quad", "addresses", "from", "the", "given", "ip", "address", "and", "prefix", "length", "." ]
python
train
31.555556
inasafe/inasafe
safe/gui/widgets/dock.py
https://github.com/inasafe/inasafe/blob/831d60abba919f6d481dc94a8d988cc205130724/safe/gui/widgets/dock.py#L307-L332
def _show_organisation_logo(self): """Show the organisation logo in the dock if possible.""" dock_width = float(self.width()) # Don't let the image be more tha 100px height maximum_height = 100.0 # px pixmap = QPixmap(self.organisation_logo_path) if pixmap.height() < 1 or pixmap.width() < 1: return height_ratio = maximum_height / pixmap.height() maximum_width = int(pixmap.width() * height_ratio) # Don't let the image be more than the dock width wide if maximum_width > dock_width: width_ratio = dock_width / float(pixmap.width()) maximum_height = int(pixmap.height() * width_ratio) maximum_width = dock_width too_high = pixmap.height() > maximum_height too_wide = pixmap.width() > dock_width if too_wide or too_high: pixmap = pixmap.scaled( maximum_width, maximum_height, Qt.KeepAspectRatio) self.organisation_logo.setMaximumWidth(maximum_width) # We have manually scaled using logic above self.organisation_logo.setScaledContents(False) self.organisation_logo.setPixmap(pixmap) self.organisation_logo.show()
[ "def", "_show_organisation_logo", "(", "self", ")", ":", "dock_width", "=", "float", "(", "self", ".", "width", "(", ")", ")", "# Don't let the image be more tha 100px height", "maximum_height", "=", "100.0", "# px", "pixmap", "=", "QPixmap", "(", "self", ".", "organisation_logo_path", ")", "if", "pixmap", ".", "height", "(", ")", "<", "1", "or", "pixmap", ".", "width", "(", ")", "<", "1", ":", "return", "height_ratio", "=", "maximum_height", "/", "pixmap", ".", "height", "(", ")", "maximum_width", "=", "int", "(", "pixmap", ".", "width", "(", ")", "*", "height_ratio", ")", "# Don't let the image be more than the dock width wide", "if", "maximum_width", ">", "dock_width", ":", "width_ratio", "=", "dock_width", "/", "float", "(", "pixmap", ".", "width", "(", ")", ")", "maximum_height", "=", "int", "(", "pixmap", ".", "height", "(", ")", "*", "width_ratio", ")", "maximum_width", "=", "dock_width", "too_high", "=", "pixmap", ".", "height", "(", ")", ">", "maximum_height", "too_wide", "=", "pixmap", ".", "width", "(", ")", ">", "dock_width", "if", "too_wide", "or", "too_high", ":", "pixmap", "=", "pixmap", ".", "scaled", "(", "maximum_width", ",", "maximum_height", ",", "Qt", ".", "KeepAspectRatio", ")", "self", ".", "organisation_logo", ".", "setMaximumWidth", "(", "maximum_width", ")", "# We have manually scaled using logic above", "self", ".", "organisation_logo", ".", "setScaledContents", "(", "False", ")", "self", ".", "organisation_logo", ".", "setPixmap", "(", "pixmap", ")", "self", ".", "organisation_logo", ".", "show", "(", ")" ]
Show the organisation logo in the dock if possible.
[ "Show", "the", "organisation", "logo", "in", "the", "dock", "if", "possible", "." ]
python
train
46.5
acsone/setuptools-odoo
setuptools_odoo/core.py
https://github.com/acsone/setuptools-odoo/blob/cc4d7a63cf99fb3651c8c92f66f7dd13bf2afe6b/setuptools_odoo/core.py#L78-L95
def _get_odoo_version_info(addons_dir, odoo_version_override=None): """ Detect Odoo version from an addons directory """ odoo_version_info = None addons = os.listdir(addons_dir) for addon in addons: addon_dir = os.path.join(addons_dir, addon) if is_installable_addon(addon_dir): manifest = read_manifest(addon_dir) _, _, addon_odoo_version_info = _get_version( addon_dir, manifest, odoo_version_override, git_post_version=False) if odoo_version_info is not None and \ odoo_version_info != addon_odoo_version_info: raise DistutilsSetupError("Not all addons are for the same " "odoo version in %s (error detected " "in %s)" % (addons_dir, addon)) odoo_version_info = addon_odoo_version_info return odoo_version_info
[ "def", "_get_odoo_version_info", "(", "addons_dir", ",", "odoo_version_override", "=", "None", ")", ":", "odoo_version_info", "=", "None", "addons", "=", "os", ".", "listdir", "(", "addons_dir", ")", "for", "addon", "in", "addons", ":", "addon_dir", "=", "os", ".", "path", ".", "join", "(", "addons_dir", ",", "addon", ")", "if", "is_installable_addon", "(", "addon_dir", ")", ":", "manifest", "=", "read_manifest", "(", "addon_dir", ")", "_", ",", "_", ",", "addon_odoo_version_info", "=", "_get_version", "(", "addon_dir", ",", "manifest", ",", "odoo_version_override", ",", "git_post_version", "=", "False", ")", "if", "odoo_version_info", "is", "not", "None", "and", "odoo_version_info", "!=", "addon_odoo_version_info", ":", "raise", "DistutilsSetupError", "(", "\"Not all addons are for the same \"", "\"odoo version in %s (error detected \"", "\"in %s)\"", "%", "(", "addons_dir", ",", "addon", ")", ")", "odoo_version_info", "=", "addon_odoo_version_info", "return", "odoo_version_info" ]
Detect Odoo version from an addons directory
[ "Detect", "Odoo", "version", "from", "an", "addons", "directory" ]
python
train
51.777778
aodag/WebDispatch
webdispatch/methoddispatcher.py
https://github.com/aodag/WebDispatch/blob/55f8658a2b4100498e098a80303a346c3940f1bc/webdispatch/methoddispatcher.py#L21-L30
def on_view_not_found( self, _, start_response: Callable[[str, List[Tuple[str, str]]], None], ) -> Iterable[bytes]: """ called when valid view is not found """ start_response( "405 Method Not Allowed", [('Content-type', 'text/plain')]) return [b"Method Not Allowed"]
[ "def", "on_view_not_found", "(", "self", ",", "_", ",", "start_response", ":", "Callable", "[", "[", "str", ",", "List", "[", "Tuple", "[", "str", ",", "str", "]", "]", "]", ",", "None", "]", ",", ")", "->", "Iterable", "[", "bytes", "]", ":", "start_response", "(", "\"405 Method Not Allowed\"", ",", "[", "(", "'Content-type'", ",", "'text/plain'", ")", "]", ")", "return", "[", "b\"Method Not Allowed\"", "]" ]
called when valid view is not found
[ "called", "when", "valid", "view", "is", "not", "found" ]
python
train
33.4
log2timeline/dfvfs
dfvfs/helpers/source_scanner.py
https://github.com/log2timeline/dfvfs/blob/2b3ccd115f9901d89f383397d4a1376a873c83c4/dfvfs/helpers/source_scanner.py#L395-L538
def _ScanNode(self, scan_context, scan_node, auto_recurse=True): """Scans a node for supported formats. Args: scan_context (SourceScannerContext): source scanner context. scan_node (SourceScanNode): source scan node. auto_recurse (Optional[bool]): True if the scan should automatically recurse as far as possible. Raises: BackEndError: if the source cannot be scanned. ValueError: if the scan context or scan node is invalid. """ if not scan_context: raise ValueError('Invalid scan context.') if not scan_node: raise ValueError('Invalid scan node.') scan_path_spec = scan_node.path_spec system_level_file_entry = None if scan_node.IsSystemLevel(): system_level_file_entry = resolver.Resolver.OpenFileEntry( scan_node.path_spec, resolver_context=self._resolver_context) if system_level_file_entry is None: raise errors.BackEndError('Unable to open file entry.') if system_level_file_entry.IsDirectory(): scan_context.SetSourceType(definitions.SOURCE_TYPE_DIRECTORY) return source_path_spec = self.ScanForStorageMediaImage(scan_node.path_spec) if source_path_spec: scan_node.scanned = True scan_node = scan_context.AddScanNode(source_path_spec, scan_node) if system_level_file_entry.IsDevice(): source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE else: source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE scan_context.SetSourceType(source_type) if not auto_recurse: return # In case we did not find a storage media image type we keep looking # since not all RAW storage media image naming schemas are known and # its type can only detected by its content. source_path_spec = None while True: if scan_node.IsFileSystem(): # No need to scan a file systems scan node for volume systems. break if scan_node.SupportsEncryption(): self._ScanEncryptedVolumeNode(scan_context, scan_node) if scan_context.IsLockedScanNode(scan_node.path_spec): # Scan node is locked, such as an encrypted volume, and we cannot # scan it for a volume system. break source_path_spec = self.ScanForVolumeSystem(scan_node.path_spec) if not source_path_spec: # No volume system found continue with a file system scan. break if not scan_context.HasScanNode(source_path_spec): scan_node.scanned = True scan_node = scan_context.AddScanNode(source_path_spec, scan_node) if system_level_file_entry and system_level_file_entry.IsDevice(): source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE else: source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE scan_context.SetSourceType(source_type) if scan_node.IsVolumeSystemRoot(): self._ScanVolumeSystemRootNode( scan_context, scan_node, auto_recurse=auto_recurse) # We already have already scanned for the file systems. return if not auto_recurse and scan_context.updated: return # Nothing new found. if not scan_context.updated: break # In case we did not find a volume system type we keep looking # since we could be dealing with a storage media image that contains # a single volume. # No need to scan the root of a volume system for a file system. if scan_node.IsVolumeSystemRoot(): pass elif scan_context.IsLockedScanNode(scan_node.path_spec): # Scan node is locked, such as an encrypted volume, and we cannot # scan it for a file system. pass elif (scan_node.type_indicator == definitions.TYPE_INDICATOR_VSHADOW and auto_recurse and scan_node.path_spec != scan_path_spec): # Since scanning for file systems in VSS snapshot volumes can # be expensive we only do this when explicitly asked for. pass elif not scan_node.IsFileSystem(): source_path_spec = self.ScanForFileSystem(scan_node.path_spec) if not source_path_spec: # Since RAW storage media image can only be determined by naming schema # we could have single file that is not a RAW storage media image yet # matches the naming schema. if scan_node.path_spec.type_indicator == definitions.TYPE_INDICATOR_RAW: scan_node = scan_context.RemoveScanNode(scan_node.path_spec) # Make sure to override the previously assigned source type. scan_context.source_type = definitions.SOURCE_TYPE_FILE else: scan_context.SetSourceType(definitions.SOURCE_TYPE_FILE) elif not scan_context.HasScanNode(source_path_spec): scan_node.scanned = True scan_node = scan_context.AddScanNode(source_path_spec, scan_node) if system_level_file_entry and system_level_file_entry.IsDevice(): source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE else: source_type = definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE scan_context.SetSourceType(source_type) # If all scans failed mark the scan node as scanned so we do not scan it # again. if not scan_node.scanned: scan_node.scanned = True
[ "def", "_ScanNode", "(", "self", ",", "scan_context", ",", "scan_node", ",", "auto_recurse", "=", "True", ")", ":", "if", "not", "scan_context", ":", "raise", "ValueError", "(", "'Invalid scan context.'", ")", "if", "not", "scan_node", ":", "raise", "ValueError", "(", "'Invalid scan node.'", ")", "scan_path_spec", "=", "scan_node", ".", "path_spec", "system_level_file_entry", "=", "None", "if", "scan_node", ".", "IsSystemLevel", "(", ")", ":", "system_level_file_entry", "=", "resolver", ".", "Resolver", ".", "OpenFileEntry", "(", "scan_node", ".", "path_spec", ",", "resolver_context", "=", "self", ".", "_resolver_context", ")", "if", "system_level_file_entry", "is", "None", ":", "raise", "errors", ".", "BackEndError", "(", "'Unable to open file entry.'", ")", "if", "system_level_file_entry", ".", "IsDirectory", "(", ")", ":", "scan_context", ".", "SetSourceType", "(", "definitions", ".", "SOURCE_TYPE_DIRECTORY", ")", "return", "source_path_spec", "=", "self", ".", "ScanForStorageMediaImage", "(", "scan_node", ".", "path_spec", ")", "if", "source_path_spec", ":", "scan_node", ".", "scanned", "=", "True", "scan_node", "=", "scan_context", ".", "AddScanNode", "(", "source_path_spec", ",", "scan_node", ")", "if", "system_level_file_entry", ".", "IsDevice", "(", ")", ":", "source_type", "=", "definitions", ".", "SOURCE_TYPE_STORAGE_MEDIA_DEVICE", "else", ":", "source_type", "=", "definitions", ".", "SOURCE_TYPE_STORAGE_MEDIA_IMAGE", "scan_context", ".", "SetSourceType", "(", "source_type", ")", "if", "not", "auto_recurse", ":", "return", "# In case we did not find a storage media image type we keep looking", "# since not all RAW storage media image naming schemas are known and", "# its type can only detected by its content.", "source_path_spec", "=", "None", "while", "True", ":", "if", "scan_node", ".", "IsFileSystem", "(", ")", ":", "# No need to scan a file systems scan node for volume systems.", "break", "if", "scan_node", ".", "SupportsEncryption", "(", ")", ":", "self", ".", "_ScanEncryptedVolumeNode", "(", "scan_context", ",", "scan_node", ")", "if", "scan_context", ".", "IsLockedScanNode", "(", "scan_node", ".", "path_spec", ")", ":", "# Scan node is locked, such as an encrypted volume, and we cannot", "# scan it for a volume system.", "break", "source_path_spec", "=", "self", ".", "ScanForVolumeSystem", "(", "scan_node", ".", "path_spec", ")", "if", "not", "source_path_spec", ":", "# No volume system found continue with a file system scan.", "break", "if", "not", "scan_context", ".", "HasScanNode", "(", "source_path_spec", ")", ":", "scan_node", ".", "scanned", "=", "True", "scan_node", "=", "scan_context", ".", "AddScanNode", "(", "source_path_spec", ",", "scan_node", ")", "if", "system_level_file_entry", "and", "system_level_file_entry", ".", "IsDevice", "(", ")", ":", "source_type", "=", "definitions", ".", "SOURCE_TYPE_STORAGE_MEDIA_DEVICE", "else", ":", "source_type", "=", "definitions", ".", "SOURCE_TYPE_STORAGE_MEDIA_IMAGE", "scan_context", ".", "SetSourceType", "(", "source_type", ")", "if", "scan_node", ".", "IsVolumeSystemRoot", "(", ")", ":", "self", ".", "_ScanVolumeSystemRootNode", "(", "scan_context", ",", "scan_node", ",", "auto_recurse", "=", "auto_recurse", ")", "# We already have already scanned for the file systems.", "return", "if", "not", "auto_recurse", "and", "scan_context", ".", "updated", ":", "return", "# Nothing new found.", "if", "not", "scan_context", ".", "updated", ":", "break", "# In case we did not find a volume system type we keep looking", "# since we could be dealing with a storage media image that contains", "# a single volume.", "# No need to scan the root of a volume system for a file system.", "if", "scan_node", ".", "IsVolumeSystemRoot", "(", ")", ":", "pass", "elif", "scan_context", ".", "IsLockedScanNode", "(", "scan_node", ".", "path_spec", ")", ":", "# Scan node is locked, such as an encrypted volume, and we cannot", "# scan it for a file system.", "pass", "elif", "(", "scan_node", ".", "type_indicator", "==", "definitions", ".", "TYPE_INDICATOR_VSHADOW", "and", "auto_recurse", "and", "scan_node", ".", "path_spec", "!=", "scan_path_spec", ")", ":", "# Since scanning for file systems in VSS snapshot volumes can", "# be expensive we only do this when explicitly asked for.", "pass", "elif", "not", "scan_node", ".", "IsFileSystem", "(", ")", ":", "source_path_spec", "=", "self", ".", "ScanForFileSystem", "(", "scan_node", ".", "path_spec", ")", "if", "not", "source_path_spec", ":", "# Since RAW storage media image can only be determined by naming schema", "# we could have single file that is not a RAW storage media image yet", "# matches the naming schema.", "if", "scan_node", ".", "path_spec", ".", "type_indicator", "==", "definitions", ".", "TYPE_INDICATOR_RAW", ":", "scan_node", "=", "scan_context", ".", "RemoveScanNode", "(", "scan_node", ".", "path_spec", ")", "# Make sure to override the previously assigned source type.", "scan_context", ".", "source_type", "=", "definitions", ".", "SOURCE_TYPE_FILE", "else", ":", "scan_context", ".", "SetSourceType", "(", "definitions", ".", "SOURCE_TYPE_FILE", ")", "elif", "not", "scan_context", ".", "HasScanNode", "(", "source_path_spec", ")", ":", "scan_node", ".", "scanned", "=", "True", "scan_node", "=", "scan_context", ".", "AddScanNode", "(", "source_path_spec", ",", "scan_node", ")", "if", "system_level_file_entry", "and", "system_level_file_entry", ".", "IsDevice", "(", ")", ":", "source_type", "=", "definitions", ".", "SOURCE_TYPE_STORAGE_MEDIA_DEVICE", "else", ":", "source_type", "=", "definitions", ".", "SOURCE_TYPE_STORAGE_MEDIA_IMAGE", "scan_context", ".", "SetSourceType", "(", "source_type", ")", "# If all scans failed mark the scan node as scanned so we do not scan it", "# again.", "if", "not", "scan_node", ".", "scanned", ":", "scan_node", ".", "scanned", "=", "True" ]
Scans a node for supported formats. Args: scan_context (SourceScannerContext): source scanner context. scan_node (SourceScanNode): source scan node. auto_recurse (Optional[bool]): True if the scan should automatically recurse as far as possible. Raises: BackEndError: if the source cannot be scanned. ValueError: if the scan context or scan node is invalid.
[ "Scans", "a", "node", "for", "supported", "formats", "." ]
python
train
36.048611
saeschdivara/ArangoPy
arangodb/orm/models.py
https://github.com/saeschdivara/ArangoPy/blob/b924cc57bed71520fc2ef528b917daeb98e10eca/arangodb/orm/models.py#L580-L619
def get_all_fields(cls, class_obj=None, fields=None): """ TODO: This needs to be properly used """ def return_fields(obj): internal_fields = fields if internal_fields is None: internal_fields = {} for attribute in dir(obj): try: attr_val = getattr(obj, attribute) attr_cls = attr_val.__class__ # If it is a model field, call on init if issubclass(attr_cls, ModelField): internal_fields[attribute] = attr_val except: pass return internal_fields if class_obj is None: class_obj = cls fields = return_fields(class_obj) for parent_class in cls.__bases__: parent_fields = cls.get_all_fields(parent_class, fields) for field_name, field_value in list(parent_fields.items()): if not field_name in fields: fields[field_name] = field_value return fields else: if not isinstance(class_obj, CollectionModel): return fields
[ "def", "get_all_fields", "(", "cls", ",", "class_obj", "=", "None", ",", "fields", "=", "None", ")", ":", "def", "return_fields", "(", "obj", ")", ":", "internal_fields", "=", "fields", "if", "internal_fields", "is", "None", ":", "internal_fields", "=", "{", "}", "for", "attribute", "in", "dir", "(", "obj", ")", ":", "try", ":", "attr_val", "=", "getattr", "(", "obj", ",", "attribute", ")", "attr_cls", "=", "attr_val", ".", "__class__", "# If it is a model field, call on init", "if", "issubclass", "(", "attr_cls", ",", "ModelField", ")", ":", "internal_fields", "[", "attribute", "]", "=", "attr_val", "except", ":", "pass", "return", "internal_fields", "if", "class_obj", "is", "None", ":", "class_obj", "=", "cls", "fields", "=", "return_fields", "(", "class_obj", ")", "for", "parent_class", "in", "cls", ".", "__bases__", ":", "parent_fields", "=", "cls", ".", "get_all_fields", "(", "parent_class", ",", "fields", ")", "for", "field_name", ",", "field_value", "in", "list", "(", "parent_fields", ".", "items", "(", ")", ")", ":", "if", "not", "field_name", "in", "fields", ":", "fields", "[", "field_name", "]", "=", "field_value", "return", "fields", "else", ":", "if", "not", "isinstance", "(", "class_obj", ",", "CollectionModel", ")", ":", "return", "fields" ]
TODO: This needs to be properly used
[ "TODO", ":", "This", "needs", "to", "be", "properly", "used" ]
python
train
30.125
lemieuxl/pyGenClean
pyGenClean/SexCheck/sex_check.py
https://github.com/lemieuxl/pyGenClean/blob/6173a48ccc0cf3a3bd711b1f2a1fa16248b8bf55/pyGenClean/SexCheck/sex_check.py#L469-L485
def createPedChr23UsingPlink(options): """Run Plink to create a ped format. :param options: the options. :type options: argparse.Namespace Uses Plink to create a ``ped`` file of markers on the chromosome ``23``. It uses the ``recodeA`` options to use additive coding. It also subsets the data to keep only samples with sex problems. """ plinkCommand = ["plink", "--noweb", "--bfile", options.bfile, "--chr", "23", "--recodeA", "--keep", options.out + ".list_problem_sex_ids", "--out", options.out + ".chr23_recodeA"] runCommand(plinkCommand)
[ "def", "createPedChr23UsingPlink", "(", "options", ")", ":", "plinkCommand", "=", "[", "\"plink\"", ",", "\"--noweb\"", ",", "\"--bfile\"", ",", "options", ".", "bfile", ",", "\"--chr\"", ",", "\"23\"", ",", "\"--recodeA\"", ",", "\"--keep\"", ",", "options", ".", "out", "+", "\".list_problem_sex_ids\"", ",", "\"--out\"", ",", "options", ".", "out", "+", "\".chr23_recodeA\"", "]", "runCommand", "(", "plinkCommand", ")" ]
Run Plink to create a ped format. :param options: the options. :type options: argparse.Namespace Uses Plink to create a ``ped`` file of markers on the chromosome ``23``. It uses the ``recodeA`` options to use additive coding. It also subsets the data to keep only samples with sex problems.
[ "Run", "Plink", "to", "create", "a", "ped", "format", "." ]
python
train
36.764706
AkihikoITOH/capybara
capybara/virtualenv/lib/python2.7/site-packages/flask/blueprints.py
https://github.com/AkihikoITOH/capybara/blob/e86c2173ea386654f4ae061148e8fbe3f25e715c/capybara/virtualenv/lib/python2.7/site-packages/flask/blueprints.py#L301-L307
def after_app_request(self, f): """Like :meth:`Flask.after_request` but for a blueprint. Such a function is executed after each request, even if outside of the blueprint. """ self.record_once(lambda s: s.app.after_request_funcs .setdefault(None, []).append(f)) return f
[ "def", "after_app_request", "(", "self", ",", "f", ")", ":", "self", ".", "record_once", "(", "lambda", "s", ":", "s", ".", "app", ".", "after_request_funcs", ".", "setdefault", "(", "None", ",", "[", "]", ")", ".", "append", "(", "f", ")", ")", "return", "f" ]
Like :meth:`Flask.after_request` but for a blueprint. Such a function is executed after each request, even if outside of the blueprint.
[ "Like", ":", "meth", ":", "Flask", ".", "after_request", "but", "for", "a", "blueprint", ".", "Such", "a", "function", "is", "executed", "after", "each", "request", "even", "if", "outside", "of", "the", "blueprint", "." ]
python
test
45.142857
Erotemic/utool
utool/util_inspect.py
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L161-L187
def get_internal_call_graph(fpath, with_doctests=False): """ CommandLine: python -m utool.util_inspect get_internal_call_graph --show --modpath=~/code/ibeis/ibeis/init/main_helpers.py --show python -m utool.util_inspect get_internal_call_graph --show --modpath=~/code/dtool/dtool/depcache_table.py --show Example: >>> # DISABLE_DOCTEST >>> from utool.util_inspect import * # NOQA >>> import utool as ut >>> fpath = ut.get_argval('--modpath', default='.') >>> with_doctests = ut.get_argflag('--with_doctests') >>> G = get_internal_call_graph(fpath, with_doctests) >>> ut.quit_if_noshow() >>> import plottool as pt >>> pt.qt4ensure() >>> pt.show_nx(G, fontsize=8, as_directed=False) >>> z = pt.zoom_factory() >>> p = pt.pan_factory() >>> ut.show_if_requested() """ import utool as ut fpath = ut.truepath(fpath) sourcecode = ut.readfrom(fpath) self = ut.BaronWraper(sourcecode) G = self.internal_call_graph(with_doctests=with_doctests) return G
[ "def", "get_internal_call_graph", "(", "fpath", ",", "with_doctests", "=", "False", ")", ":", "import", "utool", "as", "ut", "fpath", "=", "ut", ".", "truepath", "(", "fpath", ")", "sourcecode", "=", "ut", ".", "readfrom", "(", "fpath", ")", "self", "=", "ut", ".", "BaronWraper", "(", "sourcecode", ")", "G", "=", "self", ".", "internal_call_graph", "(", "with_doctests", "=", "with_doctests", ")", "return", "G" ]
CommandLine: python -m utool.util_inspect get_internal_call_graph --show --modpath=~/code/ibeis/ibeis/init/main_helpers.py --show python -m utool.util_inspect get_internal_call_graph --show --modpath=~/code/dtool/dtool/depcache_table.py --show Example: >>> # DISABLE_DOCTEST >>> from utool.util_inspect import * # NOQA >>> import utool as ut >>> fpath = ut.get_argval('--modpath', default='.') >>> with_doctests = ut.get_argflag('--with_doctests') >>> G = get_internal_call_graph(fpath, with_doctests) >>> ut.quit_if_noshow() >>> import plottool as pt >>> pt.qt4ensure() >>> pt.show_nx(G, fontsize=8, as_directed=False) >>> z = pt.zoom_factory() >>> p = pt.pan_factory() >>> ut.show_if_requested()
[ "CommandLine", ":", "python", "-", "m", "utool", ".", "util_inspect", "get_internal_call_graph", "--", "show", "--", "modpath", "=", "~", "/", "code", "/", "ibeis", "/", "ibeis", "/", "init", "/", "main_helpers", ".", "py", "--", "show", "python", "-", "m", "utool", ".", "util_inspect", "get_internal_call_graph", "--", "show", "--", "modpath", "=", "~", "/", "code", "/", "dtool", "/", "dtool", "/", "depcache_table", ".", "py", "--", "show" ]
python
train
39.888889
harlowja/constructs
constructs/tree.py
https://github.com/harlowja/constructs/blob/53f20a8422bbd56294d5c0161081cb5875511fab/constructs/tree.py#L119-L128
def child_count(self, only_direct=True): """Returns how many children this node has, either only the direct children of this node or inclusive of all children nodes of this node. """ if not only_direct: count = 0 for _node in self.dfs_iter(): count += 1 return count return len(self._children)
[ "def", "child_count", "(", "self", ",", "only_direct", "=", "True", ")", ":", "if", "not", "only_direct", ":", "count", "=", "0", "for", "_node", "in", "self", ".", "dfs_iter", "(", ")", ":", "count", "+=", "1", "return", "count", "return", "len", "(", "self", ".", "_children", ")" ]
Returns how many children this node has, either only the direct children of this node or inclusive of all children nodes of this node.
[ "Returns", "how", "many", "children", "this", "node", "has", "either", "only", "the", "direct", "children", "of", "this", "node", "or", "inclusive", "of", "all", "children", "nodes", "of", "this", "node", "." ]
python
train
37.6
PedalPi/Raspberry-Physical
physical/liquidcristal/liquid_crystal.py
https://github.com/PedalPi/Raspberry-Physical/blob/3dc71b6997ef36d0de256c5db7a1b38178937fd5/physical/liquidcristal/liquid_crystal.py#L206-L209
def left_to_right(self): """This is for text that flows Left to Right""" self._entry_mode |= Command.MODE_INCREMENT self.command(self._entry_mode)
[ "def", "left_to_right", "(", "self", ")", ":", "self", ".", "_entry_mode", "|=", "Command", ".", "MODE_INCREMENT", "self", ".", "command", "(", "self", ".", "_entry_mode", ")" ]
This is for text that flows Left to Right
[ "This", "is", "for", "text", "that", "flows", "Left", "to", "Right" ]
python
train
41.75
geertj/gruvi
lib/gruvi/http.py
https://github.com/geertj/gruvi/blob/1d77ca439600b6ea7a19aa1ee85dca0f3be3f3f8/lib/gruvi/http.py#L188-L203
def parse_te(header): """Parse the "TE" header.""" pos = 0 names = [] while pos < len(header): name, pos = expect_re(re_token, header, pos) _, pos = accept_ws(header, pos) _, pos = accept_lit(';', header, pos) _, pos = accept_ws(header, pos) qvalue, pos = accept_re(re_qvalue, header, pos) if name: names.append((name, qvalue)) _, pos = accept_ws(header, pos) _, pos = expect_lit(',', header, pos) _, pos = accept_ws(header, pos) return names
[ "def", "parse_te", "(", "header", ")", ":", "pos", "=", "0", "names", "=", "[", "]", "while", "pos", "<", "len", "(", "header", ")", ":", "name", ",", "pos", "=", "expect_re", "(", "re_token", ",", "header", ",", "pos", ")", "_", ",", "pos", "=", "accept_ws", "(", "header", ",", "pos", ")", "_", ",", "pos", "=", "accept_lit", "(", "';'", ",", "header", ",", "pos", ")", "_", ",", "pos", "=", "accept_ws", "(", "header", ",", "pos", ")", "qvalue", ",", "pos", "=", "accept_re", "(", "re_qvalue", ",", "header", ",", "pos", ")", "if", "name", ":", "names", ".", "append", "(", "(", "name", ",", "qvalue", ")", ")", "_", ",", "pos", "=", "accept_ws", "(", "header", ",", "pos", ")", "_", ",", "pos", "=", "expect_lit", "(", "','", ",", "header", ",", "pos", ")", "_", ",", "pos", "=", "accept_ws", "(", "header", ",", "pos", ")", "return", "names" ]
Parse the "TE" header.
[ "Parse", "the", "TE", "header", "." ]
python
train
33.1875
callowayproject/Transmogrify
transmogrify/utils.py
https://github.com/callowayproject/Transmogrify/blob/f1f891b8b923b3a1ede5eac7f60531c1c472379e/transmogrify/utils.py#L252-L264
def get_cached_files(url, server_name="", document_root=None): """ Given a URL, return a list of paths of all cached variations of that file. Doesn't include the original file. """ import glob url_info = process_url(url, server_name, document_root, check_security=False) # get path to cache directory with basename of file (no extension) filedir = os.path.dirname(url_info['requested_file']) fileglob = '{0}*{1}'.format(url_info['base_filename'], url_info['ext']) return glob.glob(os.path.join(filedir, fileglob))
[ "def", "get_cached_files", "(", "url", ",", "server_name", "=", "\"\"", ",", "document_root", "=", "None", ")", ":", "import", "glob", "url_info", "=", "process_url", "(", "url", ",", "server_name", ",", "document_root", ",", "check_security", "=", "False", ")", "# get path to cache directory with basename of file (no extension)", "filedir", "=", "os", ".", "path", ".", "dirname", "(", "url_info", "[", "'requested_file'", "]", ")", "fileglob", "=", "'{0}*{1}'", ".", "format", "(", "url_info", "[", "'base_filename'", "]", ",", "url_info", "[", "'ext'", "]", ")", "return", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "filedir", ",", "fileglob", ")", ")" ]
Given a URL, return a list of paths of all cached variations of that file. Doesn't include the original file.
[ "Given", "a", "URL", "return", "a", "list", "of", "paths", "of", "all", "cached", "variations", "of", "that", "file", "." ]
python
train
41.769231
lrq3000/pyFileFixity
pyFileFixity/lib/profilers/visual/pympler/util/bottle2.py
https://github.com/lrq3000/pyFileFixity/blob/fd5ef23bb13835faf1e3baa773619b86a1cc9bdf/pyFileFixity/lib/profilers/visual/pympler/util/bottle2.py#L354-L369
def mount(self, app, script_path): ''' Mount a Bottle application to a specific URL prefix ''' if not isinstance(app, Bottle): raise TypeError('Only Bottle instances are supported for now.') script_path = '/'.join(filter(None, script_path.split('/'))) path_depth = script_path.count('/') + 1 if not script_path: raise TypeError('Empty script_path. Perhaps you want a merge()?') for other in self.mounts: if other.startswith(script_path): raise TypeError('Conflict with existing mount: %s' % other) @self.route('/%s/:#.*#' % script_path, method="ANY") def mountpoint(): request.path_shift(path_depth) return app.handle(request.path, request.method) self.mounts[script_path] = app
[ "def", "mount", "(", "self", ",", "app", ",", "script_path", ")", ":", "if", "not", "isinstance", "(", "app", ",", "Bottle", ")", ":", "raise", "TypeError", "(", "'Only Bottle instances are supported for now.'", ")", "script_path", "=", "'/'", ".", "join", "(", "filter", "(", "None", ",", "script_path", ".", "split", "(", "'/'", ")", ")", ")", "path_depth", "=", "script_path", ".", "count", "(", "'/'", ")", "+", "1", "if", "not", "script_path", ":", "raise", "TypeError", "(", "'Empty script_path. Perhaps you want a merge()?'", ")", "for", "other", "in", "self", ".", "mounts", ":", "if", "other", ".", "startswith", "(", "script_path", ")", ":", "raise", "TypeError", "(", "'Conflict with existing mount: %s'", "%", "other", ")", "@", "self", ".", "route", "(", "'/%s/:#.*#'", "%", "script_path", ",", "method", "=", "\"ANY\"", ")", "def", "mountpoint", "(", ")", ":", "request", ".", "path_shift", "(", "path_depth", ")", "return", "app", ".", "handle", "(", "request", ".", "path", ",", "request", ".", "method", ")", "self", ".", "mounts", "[", "script_path", "]", "=", "app" ]
Mount a Bottle application to a specific URL prefix
[ "Mount", "a", "Bottle", "application", "to", "a", "specific", "URL", "prefix" ]
python
train
50.6875
tensorflow/tensor2tensor
tensor2tensor/trax/layers/core.py
https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/trax/layers/core.py#L42-L46
def RandomNormalInitializer(stddev=1e-2): """An initializer function for random normal coefficients.""" def init(shape, rng): return (stddev * backend.random.normal(rng, shape)).astype('float32') return init
[ "def", "RandomNormalInitializer", "(", "stddev", "=", "1e-2", ")", ":", "def", "init", "(", "shape", ",", "rng", ")", ":", "return", "(", "stddev", "*", "backend", ".", "random", ".", "normal", "(", "rng", ",", "shape", ")", ")", ".", "astype", "(", "'float32'", ")", "return", "init" ]
An initializer function for random normal coefficients.
[ "An", "initializer", "function", "for", "random", "normal", "coefficients", "." ]
python
train
42.6
sosreport/sos
sos/plugins/vdsm.py
https://github.com/sosreport/sos/blob/2ebc04da53dc871c8dd5243567afa4f8592dca29/sos/plugins/vdsm.py#L144-L150
def _add_vdsm_forbidden_paths(self): """Add confidential sysprep vfds under /var/run/vdsm to forbidden paths """ for file_path in glob.glob("/var/run/vdsm/*"): if file_path.endswith(('.vfd', '/isoUploader', '/storage')): self.add_forbidden_path(file_path)
[ "def", "_add_vdsm_forbidden_paths", "(", "self", ")", ":", "for", "file_path", "in", "glob", ".", "glob", "(", "\"/var/run/vdsm/*\"", ")", ":", "if", "file_path", ".", "endswith", "(", "(", "'.vfd'", ",", "'/isoUploader'", ",", "'/storage'", ")", ")", ":", "self", ".", "add_forbidden_path", "(", "file_path", ")" ]
Add confidential sysprep vfds under /var/run/vdsm to forbidden paths
[ "Add", "confidential", "sysprep", "vfds", "under", "/", "var", "/", "run", "/", "vdsm", "to", "forbidden", "paths" ]
python
train
43.285714
DataONEorg/d1_python
gmn/src/d1_gmn/app/views/decorators.py
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/views/decorators.py#L39-L54
def resolve_sid(f): """View handler decorator that adds SID resolve and PID validation. - For v1 calls, assume that ``did`` is a pid and raise NotFound exception if it's not valid. - For v2 calls, if DID is a valid PID, return it. If not, try to resolve it as a SID and, if successful, return the new PID. Else, raise NotFound exception. """ @functools.wraps(f) def wrapper(request, did, *args, **kwargs): pid = resolve_sid_func(request, did) return f(request, pid, *args, **kwargs) return wrapper
[ "def", "resolve_sid", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapper", "(", "request", ",", "did", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "pid", "=", "resolve_sid_func", "(", "request", ",", "did", ")", "return", "f", "(", "request", ",", "pid", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
View handler decorator that adds SID resolve and PID validation. - For v1 calls, assume that ``did`` is a pid and raise NotFound exception if it's not valid. - For v2 calls, if DID is a valid PID, return it. If not, try to resolve it as a SID and, if successful, return the new PID. Else, raise NotFound exception.
[ "View", "handler", "decorator", "that", "adds", "SID", "resolve", "and", "PID", "validation", "." ]
python
train
33.875
lord63/tldr.py
tldr/cli.py
https://github.com/lord63/tldr.py/blob/73cf9f86254691b2476910ea6a743b6d8bd04963/tldr/cli.py#L22-L26
def parse_man_page(command, platform): """Parse the man page and return the parsed lines.""" page_path = find_page_location(command, platform) output_lines = parse_page(page_path) return output_lines
[ "def", "parse_man_page", "(", "command", ",", "platform", ")", ":", "page_path", "=", "find_page_location", "(", "command", ",", "platform", ")", "output_lines", "=", "parse_page", "(", "page_path", ")", "return", "output_lines" ]
Parse the man page and return the parsed lines.
[ "Parse", "the", "man", "page", "and", "return", "the", "parsed", "lines", "." ]
python
train
42.2
SatelliteQE/nailgun
nailgun/entities.py
https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L1649-L1657
def create_payload(self): """Wrap submitted data within an extra dict.""" payload = super(JobTemplate, self).create_payload() effective_user = payload.pop(u'effective_user', None) if effective_user: payload[u'ssh'] = {u'effective_user': effective_user} return {u'job_template': payload}
[ "def", "create_payload", "(", "self", ")", ":", "payload", "=", "super", "(", "JobTemplate", ",", "self", ")", ".", "create_payload", "(", ")", "effective_user", "=", "payload", ".", "pop", "(", "u'effective_user'", ",", "None", ")", "if", "effective_user", ":", "payload", "[", "u'ssh'", "]", "=", "{", "u'effective_user'", ":", "effective_user", "}", "return", "{", "u'job_template'", ":", "payload", "}" ]
Wrap submitted data within an extra dict.
[ "Wrap", "submitted", "data", "within", "an", "extra", "dict", "." ]
python
train
36.888889
codeforamerica/epa_python
epa/envirofacts/envirofacts_api.py
https://github.com/codeforamerica/epa_python/blob/62a53da62936bea8daa487a01a52b973e9062b2c/epa/envirofacts/envirofacts_api.py#L29-L40
def catalog(self, table='', column=''): """Lookup the values available for querying.""" lookup_table = self.lookup_table if lookup_table is not None: if table: if column: column = column.upper() return lookup_table[table][column] return lookup_table[table] # Show what methods are available. return self.lookup_methods return None
[ "def", "catalog", "(", "self", ",", "table", "=", "''", ",", "column", "=", "''", ")", ":", "lookup_table", "=", "self", ".", "lookup_table", "if", "lookup_table", "is", "not", "None", ":", "if", "table", ":", "if", "column", ":", "column", "=", "column", ".", "upper", "(", ")", "return", "lookup_table", "[", "table", "]", "[", "column", "]", "return", "lookup_table", "[", "table", "]", "# Show what methods are available.", "return", "self", ".", "lookup_methods", "return", "None" ]
Lookup the values available for querying.
[ "Lookup", "the", "values", "available", "for", "querying", "." ]
python
train
38.25
alex-kostirin/pyatomac
atomac/ldtpd/core.py
https://github.com/alex-kostirin/pyatomac/blob/3f46f6feb4504315eec07abb18bb41be4d257aeb/atomac/ldtpd/core.py#L187-L207
def getcpustat(self, process_name): """ get CPU stat for the give process name @param process_name: Process name, ex: firefox-bin. @type process_name: string @return: cpu stat list on success, else empty list If same process name, running multiple instance, get the stat of all the process CPU usage @rtype: list """ # Create an instance of process stat _stat_inst = ProcessStats(process_name) _stat_list = [] for p in _stat_inst.get_cpu_memory_stat(): try: _stat_list.append(p.get_cpu_percent()) except psutil.AccessDenied: pass return _stat_list
[ "def", "getcpustat", "(", "self", ",", "process_name", ")", ":", "# Create an instance of process stat", "_stat_inst", "=", "ProcessStats", "(", "process_name", ")", "_stat_list", "=", "[", "]", "for", "p", "in", "_stat_inst", ".", "get_cpu_memory_stat", "(", ")", ":", "try", ":", "_stat_list", ".", "append", "(", "p", ".", "get_cpu_percent", "(", ")", ")", "except", "psutil", ".", "AccessDenied", ":", "pass", "return", "_stat_list" ]
get CPU stat for the give process name @param process_name: Process name, ex: firefox-bin. @type process_name: string @return: cpu stat list on success, else empty list If same process name, running multiple instance, get the stat of all the process CPU usage @rtype: list
[ "get", "CPU", "stat", "for", "the", "give", "process", "name" ]
python
valid
33.952381
belbio/bel
bel/lang/completion.py
https://github.com/belbio/bel/blob/60333e8815625b942b4836903f3b618cf44b3771/bel/lang/completion.py#L494-L582
def add_completions( replace_list: list, belstr: str, replace_span: Span, completion_text: str ) -> List[Mapping[str, Any]]: """Create completions to return given replacement list Args: replace_list: list of completion replacement values belstr: BEL String replace_span: start, stop of belstr to replace completion_text: text to use for completion - used for creating highlight Returns: [{ "replacement": replacement, "cursor_loc": cursor_loc, "highlight": highlight, "label": label, }] """ completions = [] for r in replace_list: # if '(' not in belstr: # replacement = f'{r["replacement"]}()' # cursor_loc = len(replacement) - 1 # inside parenthesis # elif r['type'] == 'Function' and replace_span[1] == len(belstr) - 1: if len(belstr) > 0: belstr_end = len(belstr) - 1 else: belstr_end = 0 log.debug( f'Replace list {r} Replace_span {replace_span} BELstr: {belstr} Len: {belstr_end} Test1 {r["type"] == "Function"} Test2 {replace_span[1] + 1 == len(belstr)}' ) # Put a space between comma and following function arg if ( r["type"] == "Function" and replace_span[0] > 0 and belstr[replace_span[0] - 1] == "," ): log.debug("prior char is a comma") replacement = ( belstr[0 : replace_span[0]] + " " + f"{r['replacement']}()" + belstr[replace_span[1] + 1 :] ) cursor_loc = len( belstr[0 : replace_span[0]] + " " + f"{r['replacement']}()" ) # Put a space between comman and following NSArg or StrArg elif replace_span[0] > 0 and belstr[replace_span[0] - 1] == ",": log.debug("prior char is a comma") replacement = ( belstr[0 : replace_span[0]] + " " + r["replacement"] + belstr[replace_span[1] + 1 :] ) cursor_loc = len(belstr[0 : replace_span[0]] + " " + r["replacement"]) # Add function to end of belstr elif r["type"] == "Function" and replace_span[1] >= belstr_end: replacement = belstr[0 : replace_span[0]] + f"{r['replacement']}()" cursor_loc = len(replacement) - 1 # inside parenthesis log.debug(f"Replacement: {replacement}") # Insert replacement in beginning or middle of belstr else: replacement = ( belstr[0 : replace_span[0]] + r["replacement"] + belstr[replace_span[1] + 1 :] ) cursor_loc = len( belstr[0 : replace_span[0]] + r["replacement"] ) # move cursor just past replacement completions.append( { "replacement": replacement, "cursor_loc": cursor_loc, "highlight": r["highlight"], "label": r["label"], } ) return completions
[ "def", "add_completions", "(", "replace_list", ":", "list", ",", "belstr", ":", "str", ",", "replace_span", ":", "Span", ",", "completion_text", ":", "str", ")", "->", "List", "[", "Mapping", "[", "str", ",", "Any", "]", "]", ":", "completions", "=", "[", "]", "for", "r", "in", "replace_list", ":", "# if '(' not in belstr:", "# replacement = f'{r[\"replacement\"]}()'", "# cursor_loc = len(replacement) - 1 # inside parenthesis", "# elif r['type'] == 'Function' and replace_span[1] == len(belstr) - 1:", "if", "len", "(", "belstr", ")", ">", "0", ":", "belstr_end", "=", "len", "(", "belstr", ")", "-", "1", "else", ":", "belstr_end", "=", "0", "log", ".", "debug", "(", "f'Replace list {r} Replace_span {replace_span} BELstr: {belstr} Len: {belstr_end} Test1 {r[\"type\"] == \"Function\"} Test2 {replace_span[1] + 1 == len(belstr)}'", ")", "# Put a space between comma and following function arg", "if", "(", "r", "[", "\"type\"", "]", "==", "\"Function\"", "and", "replace_span", "[", "0", "]", ">", "0", "and", "belstr", "[", "replace_span", "[", "0", "]", "-", "1", "]", "==", "\",\"", ")", ":", "log", ".", "debug", "(", "\"prior char is a comma\"", ")", "replacement", "=", "(", "belstr", "[", "0", ":", "replace_span", "[", "0", "]", "]", "+", "\" \"", "+", "f\"{r['replacement']}()\"", "+", "belstr", "[", "replace_span", "[", "1", "]", "+", "1", ":", "]", ")", "cursor_loc", "=", "len", "(", "belstr", "[", "0", ":", "replace_span", "[", "0", "]", "]", "+", "\" \"", "+", "f\"{r['replacement']}()\"", ")", "# Put a space between comman and following NSArg or StrArg", "elif", "replace_span", "[", "0", "]", ">", "0", "and", "belstr", "[", "replace_span", "[", "0", "]", "-", "1", "]", "==", "\",\"", ":", "log", ".", "debug", "(", "\"prior char is a comma\"", ")", "replacement", "=", "(", "belstr", "[", "0", ":", "replace_span", "[", "0", "]", "]", "+", "\" \"", "+", "r", "[", "\"replacement\"", "]", "+", "belstr", "[", "replace_span", "[", "1", "]", "+", "1", ":", "]", ")", "cursor_loc", "=", "len", "(", "belstr", "[", "0", ":", "replace_span", "[", "0", "]", "]", "+", "\" \"", "+", "r", "[", "\"replacement\"", "]", ")", "# Add function to end of belstr", "elif", "r", "[", "\"type\"", "]", "==", "\"Function\"", "and", "replace_span", "[", "1", "]", ">=", "belstr_end", ":", "replacement", "=", "belstr", "[", "0", ":", "replace_span", "[", "0", "]", "]", "+", "f\"{r['replacement']}()\"", "cursor_loc", "=", "len", "(", "replacement", ")", "-", "1", "# inside parenthesis", "log", ".", "debug", "(", "f\"Replacement: {replacement}\"", ")", "# Insert replacement in beginning or middle of belstr", "else", ":", "replacement", "=", "(", "belstr", "[", "0", ":", "replace_span", "[", "0", "]", "]", "+", "r", "[", "\"replacement\"", "]", "+", "belstr", "[", "replace_span", "[", "1", "]", "+", "1", ":", "]", ")", "cursor_loc", "=", "len", "(", "belstr", "[", "0", ":", "replace_span", "[", "0", "]", "]", "+", "r", "[", "\"replacement\"", "]", ")", "# move cursor just past replacement", "completions", ".", "append", "(", "{", "\"replacement\"", ":", "replacement", ",", "\"cursor_loc\"", ":", "cursor_loc", ",", "\"highlight\"", ":", "r", "[", "\"highlight\"", "]", ",", "\"label\"", ":", "r", "[", "\"label\"", "]", ",", "}", ")", "return", "completions" ]
Create completions to return given replacement list Args: replace_list: list of completion replacement values belstr: BEL String replace_span: start, stop of belstr to replace completion_text: text to use for completion - used for creating highlight Returns: [{ "replacement": replacement, "cursor_loc": cursor_loc, "highlight": highlight, "label": label, }]
[ "Create", "completions", "to", "return", "given", "replacement", "list" ]
python
train
35
hydraplatform/hydra-base
hydra_base/lib/users.py
https://github.com/hydraplatform/hydra-base/blob/9251ff7946505f7a272c87837390acd1c435bc6e/hydra_base/lib/users.py#L75-L81
def get_usernames_like(username,**kwargs): """ Return a list of usernames like the given string. """ checkname = "%%%s%%"%username rs = db.DBSession.query(User.username).filter(User.username.like(checkname)).all() return [r.username for r in rs]
[ "def", "get_usernames_like", "(", "username", ",", "*", "*", "kwargs", ")", ":", "checkname", "=", "\"%%%s%%\"", "%", "username", "rs", "=", "db", ".", "DBSession", ".", "query", "(", "User", ".", "username", ")", ".", "filter", "(", "User", ".", "username", ".", "like", "(", "checkname", ")", ")", ".", "all", "(", ")", "return", "[", "r", ".", "username", "for", "r", "in", "rs", "]" ]
Return a list of usernames like the given string.
[ "Return", "a", "list", "of", "usernames", "like", "the", "given", "string", "." ]
python
train
38.142857
devision-io/metasdk
metasdk/services/ApiProxyService.py
https://github.com/devision-io/metasdk/blob/1a1af5ceeb8ade843fd656c9c27c8b9ff789fc68/metasdk/services/ApiProxyService.py#L102-L114
def call_proxy(self, engine, payload, method, analyze_json_error_param, retry_request_substr_variants, stream=False): """ :param engine: Система :param payload: Данные для запроса :param method: string Может содержать native_call | tsv | json_newline :param analyze_json_error_param: Нужно ли производить анализ параметра error в ответе прокси :param retry_request_substr_variants: Список подстрок, при наличии которых в ответе будет происходить перезапрос :param stream: :return: """ return self.__api_proxy_call(engine, payload, method, analyze_json_error_param, retry_request_substr_variants, stream)
[ "def", "call_proxy", "(", "self", ",", "engine", ",", "payload", ",", "method", ",", "analyze_json_error_param", ",", "retry_request_substr_variants", ",", "stream", "=", "False", ")", ":", "return", "self", ".", "__api_proxy_call", "(", "engine", ",", "payload", ",", "method", ",", "analyze_json_error_param", ",", "retry_request_substr_variants", ",", "stream", ")" ]
:param engine: Система :param payload: Данные для запроса :param method: string Может содержать native_call | tsv | json_newline :param analyze_json_error_param: Нужно ли производить анализ параметра error в ответе прокси :param retry_request_substr_variants: Список подстрок, при наличии которых в ответе будет происходить перезапрос :param stream: :return:
[ ":", "param", "engine", ":", "Система", ":", "param", "payload", ":", "Данные", "для", "запроса", ":", "param", "method", ":", "string", "Может", "содержать", "native_call", "|", "tsv", "|", "json_newline", ":", "param", "analyze_json_error_param", ":", "Нужно", "ли", "производить", "анализ", "параметра", "error", "в", "ответе", "прокси", ":", "param", "retry_request_substr_variants", ":", "Список", "подстрок", "при", "наличии", "которых", "в", "ответе", "будет", "происходить", "перезапрос", ":", "param", "stream", ":", ":", "return", ":" ]
python
train
55.923077
xiaocong/uiautomator
uiautomator/__init__.py
https://github.com/xiaocong/uiautomator/blob/9a0c892ffd056713f91aa2153d1533c5b0553a1c/uiautomator/__init__.py#L1090-L1095
def child(self, **kwargs): '''set childSelector.''' return AutomatorDeviceObject( self.device, self.selector.clone().child(**kwargs) )
[ "def", "child", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "AutomatorDeviceObject", "(", "self", ".", "device", ",", "self", ".", "selector", ".", "clone", "(", ")", ".", "child", "(", "*", "*", "kwargs", ")", ")" ]
set childSelector.
[ "set", "childSelector", "." ]
python
train
29.5
softlayer/softlayer-python
SoftLayer/CLI/user/detail.py
https://github.com/softlayer/softlayer-python/blob/9f181be08cc3668353b05a6de0cb324f52cff6fa/SoftLayer/CLI/user/detail.py#L28-L56
def cli(env, identifier, keys, permissions, hardware, virtual, logins, events): """User details.""" mgr = SoftLayer.UserManager(env.client) user_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'username') object_mask = "userStatus[name], parent[id, username], apiAuthenticationKeys[authenticationKey], "\ "unsuccessfulLogins, successfulLogins" user = mgr.get_user(user_id, object_mask) env.fout(basic_info(user, keys)) if permissions: perms = mgr.get_user_permissions(user_id) env.fout(print_permissions(perms)) if hardware: mask = "id, hardware, dedicatedHosts" access = mgr.get_user(user_id, mask) env.fout(print_dedicated_access(access.get('dedicatedHosts', []))) env.fout(print_access(access.get('hardware', []), 'Hardware')) if virtual: mask = "id, virtualGuests" access = mgr.get_user(user_id, mask) env.fout(print_access(access.get('virtualGuests', []), 'Virtual Guests')) if logins: login_log = mgr.get_logins(user_id) env.fout(print_logins(login_log)) if events: event_log = mgr.get_events(user_id) env.fout(print_events(event_log))
[ "def", "cli", "(", "env", ",", "identifier", ",", "keys", ",", "permissions", ",", "hardware", ",", "virtual", ",", "logins", ",", "events", ")", ":", "mgr", "=", "SoftLayer", ".", "UserManager", "(", "env", ".", "client", ")", "user_id", "=", "helpers", ".", "resolve_id", "(", "mgr", ".", "resolve_ids", ",", "identifier", ",", "'username'", ")", "object_mask", "=", "\"userStatus[name], parent[id, username], apiAuthenticationKeys[authenticationKey], \"", "\"unsuccessfulLogins, successfulLogins\"", "user", "=", "mgr", ".", "get_user", "(", "user_id", ",", "object_mask", ")", "env", ".", "fout", "(", "basic_info", "(", "user", ",", "keys", ")", ")", "if", "permissions", ":", "perms", "=", "mgr", ".", "get_user_permissions", "(", "user_id", ")", "env", ".", "fout", "(", "print_permissions", "(", "perms", ")", ")", "if", "hardware", ":", "mask", "=", "\"id, hardware, dedicatedHosts\"", "access", "=", "mgr", ".", "get_user", "(", "user_id", ",", "mask", ")", "env", ".", "fout", "(", "print_dedicated_access", "(", "access", ".", "get", "(", "'dedicatedHosts'", ",", "[", "]", ")", ")", ")", "env", ".", "fout", "(", "print_access", "(", "access", ".", "get", "(", "'hardware'", ",", "[", "]", ")", ",", "'Hardware'", ")", ")", "if", "virtual", ":", "mask", "=", "\"id, virtualGuests\"", "access", "=", "mgr", ".", "get_user", "(", "user_id", ",", "mask", ")", "env", ".", "fout", "(", "print_access", "(", "access", ".", "get", "(", "'virtualGuests'", ",", "[", "]", ")", ",", "'Virtual Guests'", ")", ")", "if", "logins", ":", "login_log", "=", "mgr", ".", "get_logins", "(", "user_id", ")", "env", ".", "fout", "(", "print_logins", "(", "login_log", ")", ")", "if", "events", ":", "event_log", "=", "mgr", ".", "get_events", "(", "user_id", ")", "env", ".", "fout", "(", "print_events", "(", "event_log", ")", ")" ]
User details.
[ "User", "details", "." ]
python
train
40.931034
boriel/zxbasic
symbols/binary.py
https://github.com/boriel/zxbasic/blob/23b28db10e41117805bdb3c0f78543590853b132/symbols/binary.py#L68-L135
def make_node(cls, operator, left, right, lineno, func=None, type_=None): """ Creates a binary node for a binary operation, e.g. A + 6 => '+' (A, 6) in prefix notation. Parameters: -operator: the binary operation token. e.g. 'PLUS' for A + 6 -left: left operand -right: right operand -func: is a lambda function used when constant folding is applied -type_: resulting type (to enforce it). If no type_ is specified the resulting one will be guessed. """ if left is None or right is None: return None a, b = left, right # short form names # Check for constant non-numeric operations c_type = common_type(a, b) # Resulting operation type or None if c_type: # there must be a common type for a and b if is_numeric(a, b) and (is_const(a) or is_number(a)) and \ (is_const(b) or is_number(b)): if func is not None: a = SymbolTYPECAST.make_node(c_type, a, lineno) # ensure type b = SymbolTYPECAST.make_node(c_type, b, lineno) # ensure type return SymbolNUMBER(func(a.value, b.value), type_=type_, lineno=lineno) if is_static(a, b): a = SymbolTYPECAST.make_node(c_type, a, lineno) # ensure type b = SymbolTYPECAST.make_node(c_type, b, lineno) # ensure type return SymbolCONST(cls(operator, a, b, lineno, type_=type_, func=func), lineno=lineno) if operator in ('BNOT', 'BAND', 'BOR', 'BXOR', 'NOT', 'AND', 'OR', 'XOR', 'MINUS', 'MULT', 'DIV', 'SHL', 'SHR') and \ not is_numeric(a, b): syntax_error(lineno, 'Operator %s cannot be used with STRINGS' % operator) return None if is_string(a, b) and func is not None: # Are they STRING Constants? if operator == 'PLUS': return SymbolSTRING(func(a.value, b.value), lineno) return SymbolNUMBER(int(func(a.text, b.text)), type_=TYPE.ubyte, lineno=lineno) # Convert to u8 (boolean) if operator in ('BNOT', 'BAND', 'BOR', 'BXOR'): if TYPE.is_decimal(c_type): c_type = TYPE.long_ if a.type_ != b.type_ and TYPE.string in (a.type_, b.type_): c_type = a.type_ # Will give an error based on the fist operand if operator not in ('SHR', 'SHL'): a = SymbolTYPECAST.make_node(c_type, a, lineno) b = SymbolTYPECAST.make_node(c_type, b, lineno) if a is None or b is None: return None if type_ is None: if operator in ('LT', 'GT', 'EQ', 'LE', 'GE', 'NE', 'AND', 'OR', 'XOR', 'NOT'): type_ = TYPE.ubyte # Boolean type else: type_ = c_type return cls(operator, a, b, type_=type_, lineno=lineno)
[ "def", "make_node", "(", "cls", ",", "operator", ",", "left", ",", "right", ",", "lineno", ",", "func", "=", "None", ",", "type_", "=", "None", ")", ":", "if", "left", "is", "None", "or", "right", "is", "None", ":", "return", "None", "a", ",", "b", "=", "left", ",", "right", "# short form names", "# Check for constant non-numeric operations", "c_type", "=", "common_type", "(", "a", ",", "b", ")", "# Resulting operation type or None", "if", "c_type", ":", "# there must be a common type for a and b", "if", "is_numeric", "(", "a", ",", "b", ")", "and", "(", "is_const", "(", "a", ")", "or", "is_number", "(", "a", ")", ")", "and", "(", "is_const", "(", "b", ")", "or", "is_number", "(", "b", ")", ")", ":", "if", "func", "is", "not", "None", ":", "a", "=", "SymbolTYPECAST", ".", "make_node", "(", "c_type", ",", "a", ",", "lineno", ")", "# ensure type", "b", "=", "SymbolTYPECAST", ".", "make_node", "(", "c_type", ",", "b", ",", "lineno", ")", "# ensure type", "return", "SymbolNUMBER", "(", "func", "(", "a", ".", "value", ",", "b", ".", "value", ")", ",", "type_", "=", "type_", ",", "lineno", "=", "lineno", ")", "if", "is_static", "(", "a", ",", "b", ")", ":", "a", "=", "SymbolTYPECAST", ".", "make_node", "(", "c_type", ",", "a", ",", "lineno", ")", "# ensure type", "b", "=", "SymbolTYPECAST", ".", "make_node", "(", "c_type", ",", "b", ",", "lineno", ")", "# ensure type", "return", "SymbolCONST", "(", "cls", "(", "operator", ",", "a", ",", "b", ",", "lineno", ",", "type_", "=", "type_", ",", "func", "=", "func", ")", ",", "lineno", "=", "lineno", ")", "if", "operator", "in", "(", "'BNOT'", ",", "'BAND'", ",", "'BOR'", ",", "'BXOR'", ",", "'NOT'", ",", "'AND'", ",", "'OR'", ",", "'XOR'", ",", "'MINUS'", ",", "'MULT'", ",", "'DIV'", ",", "'SHL'", ",", "'SHR'", ")", "and", "not", "is_numeric", "(", "a", ",", "b", ")", ":", "syntax_error", "(", "lineno", ",", "'Operator %s cannot be used with STRINGS'", "%", "operator", ")", "return", "None", "if", "is_string", "(", "a", ",", "b", ")", "and", "func", "is", "not", "None", ":", "# Are they STRING Constants?", "if", "operator", "==", "'PLUS'", ":", "return", "SymbolSTRING", "(", "func", "(", "a", ".", "value", ",", "b", ".", "value", ")", ",", "lineno", ")", "return", "SymbolNUMBER", "(", "int", "(", "func", "(", "a", ".", "text", ",", "b", ".", "text", ")", ")", ",", "type_", "=", "TYPE", ".", "ubyte", ",", "lineno", "=", "lineno", ")", "# Convert to u8 (boolean)", "if", "operator", "in", "(", "'BNOT'", ",", "'BAND'", ",", "'BOR'", ",", "'BXOR'", ")", ":", "if", "TYPE", ".", "is_decimal", "(", "c_type", ")", ":", "c_type", "=", "TYPE", ".", "long_", "if", "a", ".", "type_", "!=", "b", ".", "type_", "and", "TYPE", ".", "string", "in", "(", "a", ".", "type_", ",", "b", ".", "type_", ")", ":", "c_type", "=", "a", ".", "type_", "# Will give an error based on the fist operand", "if", "operator", "not", "in", "(", "'SHR'", ",", "'SHL'", ")", ":", "a", "=", "SymbolTYPECAST", ".", "make_node", "(", "c_type", ",", "a", ",", "lineno", ")", "b", "=", "SymbolTYPECAST", ".", "make_node", "(", "c_type", ",", "b", ",", "lineno", ")", "if", "a", "is", "None", "or", "b", "is", "None", ":", "return", "None", "if", "type_", "is", "None", ":", "if", "operator", "in", "(", "'LT'", ",", "'GT'", ",", "'EQ'", ",", "'LE'", ",", "'GE'", ",", "'NE'", ",", "'AND'", ",", "'OR'", ",", "'XOR'", ",", "'NOT'", ")", ":", "type_", "=", "TYPE", ".", "ubyte", "# Boolean type", "else", ":", "type_", "=", "c_type", "return", "cls", "(", "operator", ",", "a", ",", "b", ",", "type_", "=", "type_", ",", "lineno", "=", "lineno", ")" ]
Creates a binary node for a binary operation, e.g. A + 6 => '+' (A, 6) in prefix notation. Parameters: -operator: the binary operation token. e.g. 'PLUS' for A + 6 -left: left operand -right: right operand -func: is a lambda function used when constant folding is applied -type_: resulting type (to enforce it). If no type_ is specified the resulting one will be guessed.
[ "Creates", "a", "binary", "node", "for", "a", "binary", "operation", "e", ".", "g", ".", "A", "+", "6", "=", ">", "+", "(", "A", "6", ")", "in", "prefix", "notation", "." ]
python
train
43.808824
DataONEorg/d1_python
client_cli/src/d1_cli/impl/command_parser.py
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_cli/src/d1_cli/impl/command_parser.py#L507-L511
def do_edit(self, line): """edit Edit the queue of write operations.""" self._split_args(line, 0, 0) self._command_processor.get_operation_queue().edit() self._print_info_if_verbose("The write operation queue was successfully edited")
[ "def", "do_edit", "(", "self", ",", "line", ")", ":", "self", ".", "_split_args", "(", "line", ",", "0", ",", "0", ")", "self", ".", "_command_processor", ".", "get_operation_queue", "(", ")", ".", "edit", "(", ")", "self", ".", "_print_info_if_verbose", "(", "\"The write operation queue was successfully edited\"", ")" ]
edit Edit the queue of write operations.
[ "edit", "Edit", "the", "queue", "of", "write", "operations", "." ]
python
train
52.4
cisco-sas/kitty
kitty/model/low_level/container.py
https://github.com/cisco-sas/kitty/blob/cb0760989dcdfe079e43ac574d872d0b18953a32/kitty/model/low_level/container.py#L235-L243
def get_field_by_name(self, name): ''' :param name: name of field to get :return: direct sub-field with the given name :raises: :class:`~kitty.core.KittyException` if no direct subfield with this name ''' if name in self._fields_dict: return self._fields_dict[name] raise KittyException('field named (%s) was not found in (%s)' % (self, name))
[ "def", "get_field_by_name", "(", "self", ",", "name", ")", ":", "if", "name", "in", "self", ".", "_fields_dict", ":", "return", "self", ".", "_fields_dict", "[", "name", "]", "raise", "KittyException", "(", "'field named (%s) was not found in (%s)'", "%", "(", "self", ",", "name", ")", ")" ]
:param name: name of field to get :return: direct sub-field with the given name :raises: :class:`~kitty.core.KittyException` if no direct subfield with this name
[ ":", "param", "name", ":", "name", "of", "field", "to", "get", ":", "return", ":", "direct", "sub", "-", "field", "with", "the", "given", "name", ":", "raises", ":", ":", "class", ":", "~kitty", ".", "core", ".", "KittyException", "if", "no", "direct", "subfield", "with", "this", "name" ]
python
train
44.777778
gem/oq-engine
openquake/hmtk/sources/simple_fault_source.py
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/simple_fault_source.py#L147-L189
def create_geometry(self, input_geometry, dip, upper_depth, lower_depth, mesh_spacing=1.0): ''' If geometry is defined as a numpy array then create instance of nhlib.geo.line.Line class, otherwise if already instance of class accept class :param input_geometry: Trace (line) of the fault source as either i) instance of nhlib.geo.line.Line class ii) numpy.ndarray [Longitude, Latitude] :param float dip: Dip of fault surface (in degrees) :param float upper_depth: Upper seismogenic depth (km) :param float lower_depth: Lower seismogenic depth (km) :param float mesh_spacing: Spacing of the fault mesh (km) {default = 1.0} ''' assert((dip > 0.) and (dip <= 90.)) self.dip = dip self._check_seismogenic_depths(upper_depth, lower_depth) if not isinstance(input_geometry, Line): if not isinstance(input_geometry, np.ndarray): raise ValueError('Unrecognised or unsupported geometry ' 'definition') else: self.fault_trace = Line([Point(row[0], row[1]) for row in input_geometry]) else: self.fault_trace = input_geometry # Build fault surface self.geometry = SimpleFaultSurface.from_fault_data(self.fault_trace, self.upper_depth, self.lower_depth, self.dip, mesh_spacing)
[ "def", "create_geometry", "(", "self", ",", "input_geometry", ",", "dip", ",", "upper_depth", ",", "lower_depth", ",", "mesh_spacing", "=", "1.0", ")", ":", "assert", "(", "(", "dip", ">", "0.", ")", "and", "(", "dip", "<=", "90.", ")", ")", "self", ".", "dip", "=", "dip", "self", ".", "_check_seismogenic_depths", "(", "upper_depth", ",", "lower_depth", ")", "if", "not", "isinstance", "(", "input_geometry", ",", "Line", ")", ":", "if", "not", "isinstance", "(", "input_geometry", ",", "np", ".", "ndarray", ")", ":", "raise", "ValueError", "(", "'Unrecognised or unsupported geometry '", "'definition'", ")", "else", ":", "self", ".", "fault_trace", "=", "Line", "(", "[", "Point", "(", "row", "[", "0", "]", ",", "row", "[", "1", "]", ")", "for", "row", "in", "input_geometry", "]", ")", "else", ":", "self", ".", "fault_trace", "=", "input_geometry", "# Build fault surface", "self", ".", "geometry", "=", "SimpleFaultSurface", ".", "from_fault_data", "(", "self", ".", "fault_trace", ",", "self", ".", "upper_depth", ",", "self", ".", "lower_depth", ",", "self", ".", "dip", ",", "mesh_spacing", ")" ]
If geometry is defined as a numpy array then create instance of nhlib.geo.line.Line class, otherwise if already instance of class accept class :param input_geometry: Trace (line) of the fault source as either i) instance of nhlib.geo.line.Line class ii) numpy.ndarray [Longitude, Latitude] :param float dip: Dip of fault surface (in degrees) :param float upper_depth: Upper seismogenic depth (km) :param float lower_depth: Lower seismogenic depth (km) :param float mesh_spacing: Spacing of the fault mesh (km) {default = 1.0}
[ "If", "geometry", "is", "defined", "as", "a", "numpy", "array", "then", "create", "instance", "of", "nhlib", ".", "geo", ".", "line", ".", "Line", "class", "otherwise", "if", "already", "instance", "of", "class", "accept", "class" ]
python
train
40.697674
inasafe/inasafe
safe/gui/tools/options_dialog.py
https://github.com/inasafe/inasafe/blob/831d60abba919f6d481dc94a8d988cc205130724/safe/gui/tools/options_dialog.py#L766-L790
def restore_population_parameters(self, global_default=True): """Setup UI for population parameter page from setting. :param global_default: If True, set to original default (from the value in definitions). :type global_default: bool """ if global_default: data = generate_default_profile() else: data = setting('population_preference', generate_default_profile()) if not isinstance(data, dict): LOGGER.debug( 'population parameter is not a dictionary. InaSAFE will use ' 'the default one.') data = generate_default_profile() try: self.profile_widget.data = data except KeyError as e: LOGGER.debug( 'Population parameter is not in correct format. InaSAFE will ' 'use the default one.') LOGGER.debug(e) data = generate_default_profile() self.profile_widget.data = data
[ "def", "restore_population_parameters", "(", "self", ",", "global_default", "=", "True", ")", ":", "if", "global_default", ":", "data", "=", "generate_default_profile", "(", ")", "else", ":", "data", "=", "setting", "(", "'population_preference'", ",", "generate_default_profile", "(", ")", ")", "if", "not", "isinstance", "(", "data", ",", "dict", ")", ":", "LOGGER", ".", "debug", "(", "'population parameter is not a dictionary. InaSAFE will use '", "'the default one.'", ")", "data", "=", "generate_default_profile", "(", ")", "try", ":", "self", ".", "profile_widget", ".", "data", "=", "data", "except", "KeyError", "as", "e", ":", "LOGGER", ".", "debug", "(", "'Population parameter is not in correct format. InaSAFE will '", "'use the default one.'", ")", "LOGGER", ".", "debug", "(", "e", ")", "data", "=", "generate_default_profile", "(", ")", "self", ".", "profile_widget", ".", "data", "=", "data" ]
Setup UI for population parameter page from setting. :param global_default: If True, set to original default (from the value in definitions). :type global_default: bool
[ "Setup", "UI", "for", "population", "parameter", "page", "from", "setting", "." ]
python
train
40
inveniosoftware-attic/invenio-client
invenio_client/connector.py
https://github.com/inveniosoftware-attic/invenio-client/blob/3f9ddb6f3b3ce3a21d399d1098d6769bf05cdd6c/invenio_client/connector.py#L264-L289
def get_records_from_basket(self, bskid, group_basket=False, read_cache=True): """ Returns the records from the (public) basket with given bskid """ if bskid not in self.cached_baskets or not read_cache: if self.user: if group_basket: group_basket = '&category=G' else: group_basket = '' results = requests.get( self.server_url + "/yourbaskets/display?of=xm&bskid=" + str(bskid) + group_basket, cookies=self.cookies, stream=True) else: results = requests.get( self.server_url + "/yourbaskets/display_public?of=xm&bskid=" + str(bskid), stream=True) else: return self.cached_baskets[bskid] parsed_records = self._parse_results(results.raw, self.cached_records) self.cached_baskets[bskid] = parsed_records return parsed_records
[ "def", "get_records_from_basket", "(", "self", ",", "bskid", ",", "group_basket", "=", "False", ",", "read_cache", "=", "True", ")", ":", "if", "bskid", "not", "in", "self", ".", "cached_baskets", "or", "not", "read_cache", ":", "if", "self", ".", "user", ":", "if", "group_basket", ":", "group_basket", "=", "'&category=G'", "else", ":", "group_basket", "=", "''", "results", "=", "requests", ".", "get", "(", "self", ".", "server_url", "+", "\"/yourbaskets/display?of=xm&bskid=\"", "+", "str", "(", "bskid", ")", "+", "group_basket", ",", "cookies", "=", "self", ".", "cookies", ",", "stream", "=", "True", ")", "else", ":", "results", "=", "requests", ".", "get", "(", "self", ".", "server_url", "+", "\"/yourbaskets/display_public?of=xm&bskid=\"", "+", "str", "(", "bskid", ")", ",", "stream", "=", "True", ")", "else", ":", "return", "self", ".", "cached_baskets", "[", "bskid", "]", "parsed_records", "=", "self", ".", "_parse_results", "(", "results", ".", "raw", ",", "self", ".", "cached_records", ")", "self", ".", "cached_baskets", "[", "bskid", "]", "=", "parsed_records", "return", "parsed_records" ]
Returns the records from the (public) basket with given bskid
[ "Returns", "the", "records", "from", "the", "(", "public", ")", "basket", "with", "given", "bskid" ]
python
train
40.615385
Neurita/boyle
boyle/commands.py
https://github.com/Neurita/boyle/blob/2dae7199849395a209c887d5f30506e1de8a9ad9/boyle/commands.py#L146-L169
def condor_submit(cmd): """ Submits cmd to HTCondor queue Parameters ---------- cmd: string Command to be submitted Returns ------- int returncode value from calling the submission command. """ is_running = subprocess.call('condor_status', shell=True) == 0 if not is_running: raise CalledProcessError('HTCondor is not running.') sub_cmd = 'condor_qsub -shell n -b y -r y -N ' \ + cmd.split()[0] + ' -m n' log.info('Calling: ' + sub_cmd) return subprocess.call(sub_cmd + ' ' + cmd, shell=True)
[ "def", "condor_submit", "(", "cmd", ")", ":", "is_running", "=", "subprocess", ".", "call", "(", "'condor_status'", ",", "shell", "=", "True", ")", "==", "0", "if", "not", "is_running", ":", "raise", "CalledProcessError", "(", "'HTCondor is not running.'", ")", "sub_cmd", "=", "'condor_qsub -shell n -b y -r y -N '", "+", "cmd", ".", "split", "(", ")", "[", "0", "]", "+", "' -m n'", "log", ".", "info", "(", "'Calling: '", "+", "sub_cmd", ")", "return", "subprocess", ".", "call", "(", "sub_cmd", "+", "' '", "+", "cmd", ",", "shell", "=", "True", ")" ]
Submits cmd to HTCondor queue Parameters ---------- cmd: string Command to be submitted Returns ------- int returncode value from calling the submission command.
[ "Submits", "cmd", "to", "HTCondor", "queue" ]
python
valid
23.666667
azavea/python-sld
sld/__init__.py
https://github.com/azavea/python-sld/blob/70e363782b39249bc9512a78dbbc45aaee52aaf5/sld/__init__.py#L1227-L1274
def create_rule(self, title, symbolizer=None, MinScaleDenominator=None, MaxScaleDenominator=None): """ Create a L{Rule} object on this style. A rule requires a title and symbolizer. If no symbolizer is specified, a PointSymbolizer will be assigned to the rule. @type title: string @param title: The name of the new L{Rule}. @type symbolizer: L{Symbolizer} I{class} @param symbolizer: The symbolizer type. This is the class object (as opposed to a class instance) of the symbolizer to use. @rtype: L{Rule} @return: A newly created rule, attached to this FeatureTypeStyle. """ elem = self._node.makeelement('{%s}Rule' % SLDNode._nsmap['sld'], nsmap=SLDNode._nsmap) self._node.append(elem) rule = Rule(self, len(self._node) - 1) rule.Title = title if MinScaleDenominator is not None: rule.MinScaleDenominator = MinScaleDenominator if MaxScaleDenominator is not None: rule.MaxScaleDenominator = MaxScaleDenominator if symbolizer is None: symbolizer = PointSymbolizer sym = symbolizer(rule) if symbolizer == PointSymbolizer: gph = Graphic(sym) mrk = Mark(gph) mrk.WellKnownName = 'square' fill = Fill(mrk) fill.create_cssparameter('fill', '#ff0000') elif symbolizer == LineSymbolizer: stroke = Stroke(sym) stroke.create_cssparameter('stroke', '#0000ff') elif symbolizer == PolygonSymbolizer: fill = Fill(sym) fill.create_cssparameter('fill', '#AAAAAA') stroke = Stroke(sym) stroke.create_cssparameter('stroke', '#000000') stroke.create_cssparameter('stroke-width', '1') return rule
[ "def", "create_rule", "(", "self", ",", "title", ",", "symbolizer", "=", "None", ",", "MinScaleDenominator", "=", "None", ",", "MaxScaleDenominator", "=", "None", ")", ":", "elem", "=", "self", ".", "_node", ".", "makeelement", "(", "'{%s}Rule'", "%", "SLDNode", ".", "_nsmap", "[", "'sld'", "]", ",", "nsmap", "=", "SLDNode", ".", "_nsmap", ")", "self", ".", "_node", ".", "append", "(", "elem", ")", "rule", "=", "Rule", "(", "self", ",", "len", "(", "self", ".", "_node", ")", "-", "1", ")", "rule", ".", "Title", "=", "title", "if", "MinScaleDenominator", "is", "not", "None", ":", "rule", ".", "MinScaleDenominator", "=", "MinScaleDenominator", "if", "MaxScaleDenominator", "is", "not", "None", ":", "rule", ".", "MaxScaleDenominator", "=", "MaxScaleDenominator", "if", "symbolizer", "is", "None", ":", "symbolizer", "=", "PointSymbolizer", "sym", "=", "symbolizer", "(", "rule", ")", "if", "symbolizer", "==", "PointSymbolizer", ":", "gph", "=", "Graphic", "(", "sym", ")", "mrk", "=", "Mark", "(", "gph", ")", "mrk", ".", "WellKnownName", "=", "'square'", "fill", "=", "Fill", "(", "mrk", ")", "fill", ".", "create_cssparameter", "(", "'fill'", ",", "'#ff0000'", ")", "elif", "symbolizer", "==", "LineSymbolizer", ":", "stroke", "=", "Stroke", "(", "sym", ")", "stroke", ".", "create_cssparameter", "(", "'stroke'", ",", "'#0000ff'", ")", "elif", "symbolizer", "==", "PolygonSymbolizer", ":", "fill", "=", "Fill", "(", "sym", ")", "fill", ".", "create_cssparameter", "(", "'fill'", ",", "'#AAAAAA'", ")", "stroke", "=", "Stroke", "(", "sym", ")", "stroke", ".", "create_cssparameter", "(", "'stroke'", ",", "'#000000'", ")", "stroke", ".", "create_cssparameter", "(", "'stroke-width'", ",", "'1'", ")", "return", "rule" ]
Create a L{Rule} object on this style. A rule requires a title and symbolizer. If no symbolizer is specified, a PointSymbolizer will be assigned to the rule. @type title: string @param title: The name of the new L{Rule}. @type symbolizer: L{Symbolizer} I{class} @param symbolizer: The symbolizer type. This is the class object (as opposed to a class instance) of the symbolizer to use. @rtype: L{Rule} @return: A newly created rule, attached to this FeatureTypeStyle.
[ "Create", "a", "L", "{", "Rule", "}", "object", "on", "this", "style", ".", "A", "rule", "requires", "a", "title", "and", "symbolizer", ".", "If", "no", "symbolizer", "is", "specified", "a", "PointSymbolizer", "will", "be", "assigned", "to", "the", "rule", "." ]
python
train
38.020833
singularityhub/singularity-python
singularity/build/main.py
https://github.com/singularityhub/singularity-python/blob/498c3433724b332f7493fec632d8daf479f47b82/singularity/build/main.py#L170-L203
def send_build_data(build_dir, data, secret, response_url=None,clean_up=True): '''finish build sends the build and data (response) to a response url :param build_dir: the directory of the build :response_url: where to send the response. If None, won't send :param data: the data object to send as a post :param clean_up: If true (default) removes build directory ''' # Send with Authentication header body = '%s|%s|%s|%s|%s' %(data['container_id'], data['commit'], data['branch'], data['token'], data['tag']) signature = generate_header_signature(secret=secret, payload=body, request_type="push") headers = {'Authorization': signature } if response_url is not None: finish = requests.post(response_url,data=data, headers=headers) bot.debug("RECEIVE POST TO SINGULARITY HUB ---------------------") bot.debug(finish.status_code) bot.debug(finish.reason) else: bot.warning("response_url set to None, skipping sending of build.") if clean_up == True: shutil.rmtree(build_dir) # Delay a bit, to give buffer between bringing instance down time.sleep(20)
[ "def", "send_build_data", "(", "build_dir", ",", "data", ",", "secret", ",", "response_url", "=", "None", ",", "clean_up", "=", "True", ")", ":", "# Send with Authentication header", "body", "=", "'%s|%s|%s|%s|%s'", "%", "(", "data", "[", "'container_id'", "]", ",", "data", "[", "'commit'", "]", ",", "data", "[", "'branch'", "]", ",", "data", "[", "'token'", "]", ",", "data", "[", "'tag'", "]", ")", "signature", "=", "generate_header_signature", "(", "secret", "=", "secret", ",", "payload", "=", "body", ",", "request_type", "=", "\"push\"", ")", "headers", "=", "{", "'Authorization'", ":", "signature", "}", "if", "response_url", "is", "not", "None", ":", "finish", "=", "requests", ".", "post", "(", "response_url", ",", "data", "=", "data", ",", "headers", "=", "headers", ")", "bot", ".", "debug", "(", "\"RECEIVE POST TO SINGULARITY HUB ---------------------\"", ")", "bot", ".", "debug", "(", "finish", ".", "status_code", ")", "bot", ".", "debug", "(", "finish", ".", "reason", ")", "else", ":", "bot", ".", "warning", "(", "\"response_url set to None, skipping sending of build.\"", ")", "if", "clean_up", "==", "True", ":", "shutil", ".", "rmtree", "(", "build_dir", ")", "# Delay a bit, to give buffer between bringing instance down", "time", ".", "sleep", "(", "20", ")" ]
finish build sends the build and data (response) to a response url :param build_dir: the directory of the build :response_url: where to send the response. If None, won't send :param data: the data object to send as a post :param clean_up: If true (default) removes build directory
[ "finish", "build", "sends", "the", "build", "and", "data", "(", "response", ")", "to", "a", "response", "url", ":", "param", "build_dir", ":", "the", "directory", "of", "the", "build", ":", "response_url", ":", "where", "to", "send", "the", "response", ".", "If", "None", "won", "t", "send", ":", "param", "data", ":", "the", "data", "object", "to", "send", "as", "a", "post", ":", "param", "clean_up", ":", "If", "true", "(", "default", ")", "removes", "build", "directory" ]
python
train
39.794118
Miserlou/django-zappa
django_zappa/management/commands/zappa_command.py
https://github.com/Miserlou/django-zappa/blob/7a8083ab6257a0bf0f5c9ae460afabd4de4e2215/django_zappa/management/commands/zappa_command.py#L168-L210
def create_package(self): """ Ensure that the package can be properly configured, and then create it. """ # Create the Lambda zip package (includes project and virtualenvironment) # Also define the path the handler file so it can be copied to the zip # root for Lambda. current_file = os.path.dirname(os.path.abspath( inspect.getfile(inspect.currentframe()))) handler_file = os.sep.join(current_file.split(os.sep)[ 0:-2]) + os.sep + 'handler.py' exclude = self.zappa_settings[self.api_stage].get('exclude', []) + ['static', 'media'] self.zip_path = self.zappa.create_lambda_zip( self.lambda_name, handler_file=handler_file, use_precompiled_packages=self.zappa_settings[self.api_stage].get('use_precompiled_packages', True), exclude=exclude ) # Add this environment's Django settings to that zipfile with open(self.settings_file, 'r') as f: contents = f.read() all_contents = contents if not self.zappa_settings[self.api_stage].has_key('domain'): script_name = self.api_stage else: script_name = '' all_contents = all_contents + \ '\n# Automatically added by Zappa:\nSCRIPT_NAME=\'/' + script_name + '\'\n' f.close() with open('zappa_settings.py', 'w') as f: f.write(all_contents) with zipfile.ZipFile(self.zip_path, 'a') as lambda_zip: lambda_zip.write('zappa_settings.py', 'zappa_settings.py') lambda_zip.close() os.unlink('zappa_settings.py')
[ "def", "create_package", "(", "self", ")", ":", "# Create the Lambda zip package (includes project and virtualenvironment)", "# Also define the path the handler file so it can be copied to the zip", "# root for Lambda.", "current_file", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "inspect", ".", "getfile", "(", "inspect", ".", "currentframe", "(", ")", ")", ")", ")", "handler_file", "=", "os", ".", "sep", ".", "join", "(", "current_file", ".", "split", "(", "os", ".", "sep", ")", "[", "0", ":", "-", "2", "]", ")", "+", "os", ".", "sep", "+", "'handler.py'", "exclude", "=", "self", ".", "zappa_settings", "[", "self", ".", "api_stage", "]", ".", "get", "(", "'exclude'", ",", "[", "]", ")", "+", "[", "'static'", ",", "'media'", "]", "self", ".", "zip_path", "=", "self", ".", "zappa", ".", "create_lambda_zip", "(", "self", ".", "lambda_name", ",", "handler_file", "=", "handler_file", ",", "use_precompiled_packages", "=", "self", ".", "zappa_settings", "[", "self", ".", "api_stage", "]", ".", "get", "(", "'use_precompiled_packages'", ",", "True", ")", ",", "exclude", "=", "exclude", ")", "# Add this environment's Django settings to that zipfile", "with", "open", "(", "self", ".", "settings_file", ",", "'r'", ")", "as", "f", ":", "contents", "=", "f", ".", "read", "(", ")", "all_contents", "=", "contents", "if", "not", "self", ".", "zappa_settings", "[", "self", ".", "api_stage", "]", ".", "has_key", "(", "'domain'", ")", ":", "script_name", "=", "self", ".", "api_stage", "else", ":", "script_name", "=", "''", "all_contents", "=", "all_contents", "+", "'\\n# Automatically added by Zappa:\\nSCRIPT_NAME=\\'/'", "+", "script_name", "+", "'\\'\\n'", "f", ".", "close", "(", ")", "with", "open", "(", "'zappa_settings.py'", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "all_contents", ")", "with", "zipfile", ".", "ZipFile", "(", "self", ".", "zip_path", ",", "'a'", ")", "as", "lambda_zip", ":", "lambda_zip", ".", "write", "(", "'zappa_settings.py'", ",", "'zappa_settings.py'", ")", "lambda_zip", ".", "close", "(", ")", "os", ".", "unlink", "(", "'zappa_settings.py'", ")" ]
Ensure that the package can be properly configured, and then create it.
[ "Ensure", "that", "the", "package", "can", "be", "properly", "configured", "and", "then", "create", "it", "." ]
python
train
39.906977
michael-lazar/rtv
rtv/packages/praw/objects.py
https://github.com/michael-lazar/rtv/blob/ccef2af042566ad384977028cf0bde01bc524dda/rtv/packages/praw/objects.py#L297-L306
def unignore_reports(self): """Remove ignoring of future reports on this object. Undoes 'ignore_reports'. Future reports will now cause notifications and appear in the various moderation listings. """ url = self.reddit_session.config['unignore_reports'] data = {'id': self.fullname} return self.reddit_session.request_json(url, data=data)
[ "def", "unignore_reports", "(", "self", ")", ":", "url", "=", "self", ".", "reddit_session", ".", "config", "[", "'unignore_reports'", "]", "data", "=", "{", "'id'", ":", "self", ".", "fullname", "}", "return", "self", ".", "reddit_session", ".", "request_json", "(", "url", ",", "data", "=", "data", ")" ]
Remove ignoring of future reports on this object. Undoes 'ignore_reports'. Future reports will now cause notifications and appear in the various moderation listings.
[ "Remove", "ignoring", "of", "future", "reports", "on", "this", "object", "." ]
python
train
38.7
stitchdata/python-stitch-client
stitchclient/client.py
https://github.com/stitchdata/python-stitch-client/blob/de4dfb3db209e5d0a7b0c0dcef625f3e465c787b/stitchclient/client.py#L136-L152
def _take_batch(self, min_records): '''If we have enough data to build a batch, returns all the data in the buffer and then clears the buffer.''' if not self._buffer: return [] enough_messages = len(self._buffer) >= min_records enough_time = time.time() - self.time_last_batch_sent >= self.batch_delay_seconds ready = enough_messages or enough_time if not ready: return [] result = list(self._buffer) self._buffer.clear() return result
[ "def", "_take_batch", "(", "self", ",", "min_records", ")", ":", "if", "not", "self", ".", "_buffer", ":", "return", "[", "]", "enough_messages", "=", "len", "(", "self", ".", "_buffer", ")", ">=", "min_records", "enough_time", "=", "time", ".", "time", "(", ")", "-", "self", ".", "time_last_batch_sent", ">=", "self", ".", "batch_delay_seconds", "ready", "=", "enough_messages", "or", "enough_time", "if", "not", "ready", ":", "return", "[", "]", "result", "=", "list", "(", "self", ".", "_buffer", ")", "self", ".", "_buffer", ".", "clear", "(", ")", "return", "result" ]
If we have enough data to build a batch, returns all the data in the buffer and then clears the buffer.
[ "If", "we", "have", "enough", "data", "to", "build", "a", "batch", "returns", "all", "the", "data", "in", "the", "buffer", "and", "then", "clears", "the", "buffer", "." ]
python
train
31
F5Networks/f5-common-python
f5sdk_plugins/fixtures.py
https://github.com/F5Networks/f5-common-python/blob/7e67d5acd757a60e3d5f8c88c534bd72208f5494/f5sdk_plugins/fixtures.py#L108-L112
def vcmp_host(opt_vcmp_host, opt_username, opt_password, opt_port): '''vcmp fixture''' m = ManagementRoot( opt_vcmp_host, opt_username, opt_password, port=opt_port) return m
[ "def", "vcmp_host", "(", "opt_vcmp_host", ",", "opt_username", ",", "opt_password", ",", "opt_port", ")", ":", "m", "=", "ManagementRoot", "(", "opt_vcmp_host", ",", "opt_username", ",", "opt_password", ",", "port", "=", "opt_port", ")", "return", "m" ]
vcmp fixture
[ "vcmp", "fixture" ]
python
train
37.8
tyiannak/pyAudioAnalysis
pyAudioAnalysis/audioFeatureExtraction.py
https://github.com/tyiannak/pyAudioAnalysis/blob/e3da991e7247492deba50648a4c7c0f41e684af4/pyAudioAnalysis/audioFeatureExtraction.py#L521-L614
def stFeatureExtraction(signal, fs, win, step): """ This function implements the shor-term windowing process. For each short-term window a set of features is extracted. This results to a sequence of feature vectors, stored in a numpy matrix. ARGUMENTS signal: the input signal samples fs: the sampling freq (in Hz) win: the short-term window size (in samples) step: the short-term window step (in samples) RETURNS st_features: a numpy array (n_feats x numOfShortTermWindows) """ win = int(win) step = int(step) # Signal normalization signal = numpy.double(signal) signal = signal / (2.0 ** 15) DC = signal.mean() MAX = (numpy.abs(signal)).max() signal = (signal - DC) / (MAX + 0.0000000001) N = len(signal) # total number of samples cur_p = 0 count_fr = 0 nFFT = int(win / 2) [fbank, freqs] = mfccInitFilterBanks(fs, nFFT) # compute the triangular filter banks used in the mfcc calculation nChroma, nFreqsPerChroma = stChromaFeaturesInit(nFFT, fs) n_time_spectral_feats = 8 n_harmonic_feats = 0 n_mfcc_feats = 13 n_chroma_feats = 13 n_total_feats = n_time_spectral_feats + n_mfcc_feats + n_harmonic_feats + n_chroma_feats # n_total_feats = n_time_spectral_feats + n_mfcc_feats + n_harmonic_feats feature_names = [] feature_names.append("zcr") feature_names.append("energy") feature_names.append("energy_entropy") feature_names += ["spectral_centroid", "spectral_spread"] feature_names.append("spectral_entropy") feature_names.append("spectral_flux") feature_names.append("spectral_rolloff") feature_names += ["mfcc_{0:d}".format(mfcc_i) for mfcc_i in range(1, n_mfcc_feats+1)] feature_names += ["chroma_{0:d}".format(chroma_i) for chroma_i in range(1, n_chroma_feats)] feature_names.append("chroma_std") st_features = [] while (cur_p + win - 1 < N): # for each short-term window until the end of signal count_fr += 1 x = signal[cur_p:cur_p+win] # get current window cur_p = cur_p + step # update window position X = abs(fft(x)) # get fft magnitude X = X[0:nFFT] # normalize fft X = X / len(X) if count_fr == 1: X_prev = X.copy() # keep previous fft mag (used in spectral flux) curFV = numpy.zeros((n_total_feats, 1)) curFV[0] = stZCR(x) # zero crossing rate curFV[1] = stEnergy(x) # short-term energy curFV[2] = stEnergyEntropy(x) # short-term entropy of energy [curFV[3], curFV[4]] = stSpectralCentroidAndSpread(X, fs) # spectral centroid and spread curFV[5] = stSpectralEntropy(X) # spectral entropy curFV[6] = stSpectralFlux(X, X_prev) # spectral flux curFV[7] = stSpectralRollOff(X, 0.90, fs) # spectral rolloff curFV[n_time_spectral_feats:n_time_spectral_feats+n_mfcc_feats, 0] = \ stMFCC(X, fbank, n_mfcc_feats).copy() # MFCCs chromaNames, chromaF = stChromaFeatures(X, fs, nChroma, nFreqsPerChroma) curFV[n_time_spectral_feats + n_mfcc_feats: n_time_spectral_feats + n_mfcc_feats + n_chroma_feats - 1] = \ chromaF curFV[n_time_spectral_feats + n_mfcc_feats + n_chroma_feats - 1] = \ chromaF.std() st_features.append(curFV) # delta features ''' if count_fr>1: delta = curFV - prevFV curFVFinal = numpy.concatenate((curFV, delta)) else: curFVFinal = numpy.concatenate((curFV, curFV)) prevFV = curFV st_features.append(curFVFinal) ''' # end of delta X_prev = X.copy() st_features = numpy.concatenate(st_features, 1) return st_features, feature_names
[ "def", "stFeatureExtraction", "(", "signal", ",", "fs", ",", "win", ",", "step", ")", ":", "win", "=", "int", "(", "win", ")", "step", "=", "int", "(", "step", ")", "# Signal normalization", "signal", "=", "numpy", ".", "double", "(", "signal", ")", "signal", "=", "signal", "/", "(", "2.0", "**", "15", ")", "DC", "=", "signal", ".", "mean", "(", ")", "MAX", "=", "(", "numpy", ".", "abs", "(", "signal", ")", ")", ".", "max", "(", ")", "signal", "=", "(", "signal", "-", "DC", ")", "/", "(", "MAX", "+", "0.0000000001", ")", "N", "=", "len", "(", "signal", ")", "# total number of samples", "cur_p", "=", "0", "count_fr", "=", "0", "nFFT", "=", "int", "(", "win", "/", "2", ")", "[", "fbank", ",", "freqs", "]", "=", "mfccInitFilterBanks", "(", "fs", ",", "nFFT", ")", "# compute the triangular filter banks used in the mfcc calculation", "nChroma", ",", "nFreqsPerChroma", "=", "stChromaFeaturesInit", "(", "nFFT", ",", "fs", ")", "n_time_spectral_feats", "=", "8", "n_harmonic_feats", "=", "0", "n_mfcc_feats", "=", "13", "n_chroma_feats", "=", "13", "n_total_feats", "=", "n_time_spectral_feats", "+", "n_mfcc_feats", "+", "n_harmonic_feats", "+", "n_chroma_feats", "# n_total_feats = n_time_spectral_feats + n_mfcc_feats + n_harmonic_feats", "feature_names", "=", "[", "]", "feature_names", ".", "append", "(", "\"zcr\"", ")", "feature_names", ".", "append", "(", "\"energy\"", ")", "feature_names", ".", "append", "(", "\"energy_entropy\"", ")", "feature_names", "+=", "[", "\"spectral_centroid\"", ",", "\"spectral_spread\"", "]", "feature_names", ".", "append", "(", "\"spectral_entropy\"", ")", "feature_names", ".", "append", "(", "\"spectral_flux\"", ")", "feature_names", ".", "append", "(", "\"spectral_rolloff\"", ")", "feature_names", "+=", "[", "\"mfcc_{0:d}\"", ".", "format", "(", "mfcc_i", ")", "for", "mfcc_i", "in", "range", "(", "1", ",", "n_mfcc_feats", "+", "1", ")", "]", "feature_names", "+=", "[", "\"chroma_{0:d}\"", ".", "format", "(", "chroma_i", ")", "for", "chroma_i", "in", "range", "(", "1", ",", "n_chroma_feats", ")", "]", "feature_names", ".", "append", "(", "\"chroma_std\"", ")", "st_features", "=", "[", "]", "while", "(", "cur_p", "+", "win", "-", "1", "<", "N", ")", ":", "# for each short-term window until the end of signal", "count_fr", "+=", "1", "x", "=", "signal", "[", "cur_p", ":", "cur_p", "+", "win", "]", "# get current window", "cur_p", "=", "cur_p", "+", "step", "# update window position", "X", "=", "abs", "(", "fft", "(", "x", ")", ")", "# get fft magnitude", "X", "=", "X", "[", "0", ":", "nFFT", "]", "# normalize fft", "X", "=", "X", "/", "len", "(", "X", ")", "if", "count_fr", "==", "1", ":", "X_prev", "=", "X", ".", "copy", "(", ")", "# keep previous fft mag (used in spectral flux)", "curFV", "=", "numpy", ".", "zeros", "(", "(", "n_total_feats", ",", "1", ")", ")", "curFV", "[", "0", "]", "=", "stZCR", "(", "x", ")", "# zero crossing rate", "curFV", "[", "1", "]", "=", "stEnergy", "(", "x", ")", "# short-term energy", "curFV", "[", "2", "]", "=", "stEnergyEntropy", "(", "x", ")", "# short-term entropy of energy", "[", "curFV", "[", "3", "]", ",", "curFV", "[", "4", "]", "]", "=", "stSpectralCentroidAndSpread", "(", "X", ",", "fs", ")", "# spectral centroid and spread", "curFV", "[", "5", "]", "=", "stSpectralEntropy", "(", "X", ")", "# spectral entropy", "curFV", "[", "6", "]", "=", "stSpectralFlux", "(", "X", ",", "X_prev", ")", "# spectral flux", "curFV", "[", "7", "]", "=", "stSpectralRollOff", "(", "X", ",", "0.90", ",", "fs", ")", "# spectral rolloff", "curFV", "[", "n_time_spectral_feats", ":", "n_time_spectral_feats", "+", "n_mfcc_feats", ",", "0", "]", "=", "stMFCC", "(", "X", ",", "fbank", ",", "n_mfcc_feats", ")", ".", "copy", "(", ")", "# MFCCs", "chromaNames", ",", "chromaF", "=", "stChromaFeatures", "(", "X", ",", "fs", ",", "nChroma", ",", "nFreqsPerChroma", ")", "curFV", "[", "n_time_spectral_feats", "+", "n_mfcc_feats", ":", "n_time_spectral_feats", "+", "n_mfcc_feats", "+", "n_chroma_feats", "-", "1", "]", "=", "chromaF", "curFV", "[", "n_time_spectral_feats", "+", "n_mfcc_feats", "+", "n_chroma_feats", "-", "1", "]", "=", "chromaF", ".", "std", "(", ")", "st_features", ".", "append", "(", "curFV", ")", "# delta features", "'''\n if count_fr>1:\n delta = curFV - prevFV\n curFVFinal = numpy.concatenate((curFV, delta)) \n else:\n curFVFinal = numpy.concatenate((curFV, curFV))\n prevFV = curFV\n st_features.append(curFVFinal) \n '''", "# end of delta", "X_prev", "=", "X", ".", "copy", "(", ")", "st_features", "=", "numpy", ".", "concatenate", "(", "st_features", ",", "1", ")", "return", "st_features", ",", "feature_names" ]
This function implements the shor-term windowing process. For each short-term window a set of features is extracted. This results to a sequence of feature vectors, stored in a numpy matrix. ARGUMENTS signal: the input signal samples fs: the sampling freq (in Hz) win: the short-term window size (in samples) step: the short-term window step (in samples) RETURNS st_features: a numpy array (n_feats x numOfShortTermWindows)
[ "This", "function", "implements", "the", "shor", "-", "term", "windowing", "process", ".", "For", "each", "short", "-", "term", "window", "a", "set", "of", "features", "is", "extracted", ".", "This", "results", "to", "a", "sequence", "of", "feature", "vectors", "stored", "in", "a", "numpy", "matrix", "." ]
python
train
43.925532
nakagami/pyfirebirdsql
firebirdsql/xsqlvar.py
https://github.com/nakagami/pyfirebirdsql/blob/5ce366c2fc8318510444f4a89801442f3e9e52ca/firebirdsql/xsqlvar.py#L118-L138
def _parse_date(self, raw_value): "Convert raw data to datetime.date" nday = bytes_to_bint(raw_value) + 678882 century = (4 * nday - 1) // 146097 nday = 4 * nday - 1 - 146097 * century day = nday // 4 nday = (4 * day + 3) // 1461 day = 4 * day + 3 - 1461 * nday day = (day + 4) // 4 month = (5 * day - 3) // 153 day = 5 * day - 3 - 153 * month day = (day + 5) // 5 year = 100 * century + nday if month < 10: month += 3 else: month -= 9 year += 1 return year, month, day
[ "def", "_parse_date", "(", "self", ",", "raw_value", ")", ":", "nday", "=", "bytes_to_bint", "(", "raw_value", ")", "+", "678882", "century", "=", "(", "4", "*", "nday", "-", "1", ")", "//", "146097", "nday", "=", "4", "*", "nday", "-", "1", "-", "146097", "*", "century", "day", "=", "nday", "//", "4", "nday", "=", "(", "4", "*", "day", "+", "3", ")", "//", "1461", "day", "=", "4", "*", "day", "+", "3", "-", "1461", "*", "nday", "day", "=", "(", "day", "+", "4", ")", "//", "4", "month", "=", "(", "5", "*", "day", "-", "3", ")", "//", "153", "day", "=", "5", "*", "day", "-", "3", "-", "153", "*", "month", "day", "=", "(", "day", "+", "5", ")", "//", "5", "year", "=", "100", "*", "century", "+", "nday", "if", "month", "<", "10", ":", "month", "+=", "3", "else", ":", "month", "-=", "9", "year", "+=", "1", "return", "year", ",", "month", ",", "day" ]
Convert raw data to datetime.date
[ "Convert", "raw", "data", "to", "datetime", ".", "date" ]
python
train
28.904762
uw-it-aca/uw-restclients-canvas
uw_canvas/roles.py
https://github.com/uw-it-aca/uw-restclients-canvas/blob/9845faf33d49a8f06908efc22640c001116d6ea2/uw_canvas/roles.py#L50-L55
def get_role_by_account_sis_id(self, account_sis_id, role_id): """ Get information about a single role, for the passed account SIS ID. """ return self.get_role(self._sis_id(account_sis_id, sis_field="account"), role_id)
[ "def", "get_role_by_account_sis_id", "(", "self", ",", "account_sis_id", ",", "role_id", ")", ":", "return", "self", ".", "get_role", "(", "self", ".", "_sis_id", "(", "account_sis_id", ",", "sis_field", "=", "\"account\"", ")", ",", "role_id", ")" ]
Get information about a single role, for the passed account SIS ID.
[ "Get", "information", "about", "a", "single", "role", "for", "the", "passed", "account", "SIS", "ID", "." ]
python
test
45.833333
pypa/pipenv
pipenv/patched/notpip/_internal/wheel.py
https://github.com/pypa/pipenv/blob/cae8d76c210b9777e90aab76e9c4b0e53bb19cde/pipenv/patched/notpip/_internal/wheel.py#L727-L733
def _contains_egg_info( s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): """Determine whether the string looks like an egg_info. :param s: The string to parse. E.g. foo-2.1 """ return bool(_egg_info_re.search(s))
[ "def", "_contains_egg_info", "(", "s", ",", "_egg_info_re", "=", "re", ".", "compile", "(", "r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)'", ",", "re", ".", "I", ")", ")", ":", "return", "bool", "(", "_egg_info_re", ".", "search", "(", "s", ")", ")" ]
Determine whether the string looks like an egg_info. :param s: The string to parse. E.g. foo-2.1
[ "Determine", "whether", "the", "string", "looks", "like", "an", "egg_info", "." ]
python
train
36
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L154-L209
def declarative(member_class=None, parameter='members', add_init_kwargs=True, sort_key=default_sort_key, is_member=None): """ Class decorator to enable classes to be defined in the style of django models. That is, @declarative classes will get an additional argument to constructor, containing an OrderedDict with all class members matching the specified type. :param class member_class: Class(es) to collect :param is_member: Function to determine if an object should be collected :param str parameter: Name of constructor parameter to inject :param bool add_init_kwargs: If constructor parameter should be injected (Default: True) :param sort_key: Function to invoke on members to obtain ordering (Default is to use ordering from `creation_ordered`) :type is_member: (object) -> bool :type sort_key: (object) -> object """ if member_class is None and is_member is None: raise TypeError("The @declarative decorator needs either a member_class parameter or an is_member check function (or both)") def decorator(class_to_decorate): class DeclarativeMeta(class_to_decorate.__class__): # noinspection PyTypeChecker def __init__(cls, name, bases, dict): members = get_members(cls, member_class=member_class, is_member=is_member, sort_key=sort_key, _parameter=parameter) set_declared(cls, members, parameter) super(DeclarativeMeta, cls).__init__(name, bases, dict) new_class = DeclarativeMeta(class_to_decorate.__name__, class_to_decorate.__bases__, {k: v for k, v in class_to_decorate.__dict__.items() if k not in ['__dict__', '__weakref__']}) def get_extra_args_function(self): declared = get_declared(self, parameter) def copy_declared(): for k, v in declared.items(): try: v = copy(v) except TypeError: pass # Not always possible to copy methods yield (k, v) copied_members = OrderedDict(copy_declared()) self.__dict__.update(copied_members) return {parameter: copied_members} if add_init_kwargs: add_args_to_init_call(new_class, get_extra_args_function) else: add_init_call_hook(new_class, get_extra_args_function) setattr(new_class, 'get_declared', classmethod(get_declared)) setattr(new_class, 'set_declared', classmethod(set_declared)) return new_class return decorator
[ "def", "declarative", "(", "member_class", "=", "None", ",", "parameter", "=", "'members'", ",", "add_init_kwargs", "=", "True", ",", "sort_key", "=", "default_sort_key", ",", "is_member", "=", "None", ")", ":", "if", "member_class", "is", "None", "and", "is_member", "is", "None", ":", "raise", "TypeError", "(", "\"The @declarative decorator needs either a member_class parameter or an is_member check function (or both)\"", ")", "def", "decorator", "(", "class_to_decorate", ")", ":", "class", "DeclarativeMeta", "(", "class_to_decorate", ".", "__class__", ")", ":", "# noinspection PyTypeChecker", "def", "__init__", "(", "cls", ",", "name", ",", "bases", ",", "dict", ")", ":", "members", "=", "get_members", "(", "cls", ",", "member_class", "=", "member_class", ",", "is_member", "=", "is_member", ",", "sort_key", "=", "sort_key", ",", "_parameter", "=", "parameter", ")", "set_declared", "(", "cls", ",", "members", ",", "parameter", ")", "super", "(", "DeclarativeMeta", ",", "cls", ")", ".", "__init__", "(", "name", ",", "bases", ",", "dict", ")", "new_class", "=", "DeclarativeMeta", "(", "class_to_decorate", ".", "__name__", ",", "class_to_decorate", ".", "__bases__", ",", "{", "k", ":", "v", "for", "k", ",", "v", "in", "class_to_decorate", ".", "__dict__", ".", "items", "(", ")", "if", "k", "not", "in", "[", "'__dict__'", ",", "'__weakref__'", "]", "}", ")", "def", "get_extra_args_function", "(", "self", ")", ":", "declared", "=", "get_declared", "(", "self", ",", "parameter", ")", "def", "copy_declared", "(", ")", ":", "for", "k", ",", "v", "in", "declared", ".", "items", "(", ")", ":", "try", ":", "v", "=", "copy", "(", "v", ")", "except", "TypeError", ":", "pass", "# Not always possible to copy methods", "yield", "(", "k", ",", "v", ")", "copied_members", "=", "OrderedDict", "(", "copy_declared", "(", ")", ")", "self", ".", "__dict__", ".", "update", "(", "copied_members", ")", "return", "{", "parameter", ":", "copied_members", "}", "if", "add_init_kwargs", ":", "add_args_to_init_call", "(", "new_class", ",", "get_extra_args_function", ")", "else", ":", "add_init_call_hook", "(", "new_class", ",", "get_extra_args_function", ")", "setattr", "(", "new_class", ",", "'get_declared'", ",", "classmethod", "(", "get_declared", ")", ")", "setattr", "(", "new_class", ",", "'set_declared'", ",", "classmethod", "(", "set_declared", ")", ")", "return", "new_class", "return", "decorator" ]
Class decorator to enable classes to be defined in the style of django models. That is, @declarative classes will get an additional argument to constructor, containing an OrderedDict with all class members matching the specified type. :param class member_class: Class(es) to collect :param is_member: Function to determine if an object should be collected :param str parameter: Name of constructor parameter to inject :param bool add_init_kwargs: If constructor parameter should be injected (Default: True) :param sort_key: Function to invoke on members to obtain ordering (Default is to use ordering from `creation_ordered`) :type is_member: (object) -> bool :type sort_key: (object) -> object
[ "Class", "decorator", "to", "enable", "classes", "to", "be", "defined", "in", "the", "style", "of", "django", "models", ".", "That", "is", "@declarative", "classes", "will", "get", "an", "additional", "argument", "to", "constructor", "containing", "an", "OrderedDict", "with", "all", "class", "members", "matching", "the", "specified", "type", "." ]
python
train
47.375
quantumlib/Cirq
cirq/google/sim/mem_manager.py
https://github.com/quantumlib/Cirq/blob/0827da80dd7880e5b923eb69407e980ed9bc0bd2/cirq/google/sim/mem_manager.py#L53-L91
def _create_array(self, arr: np.ndarray) -> int: """Returns the handle of a RawArray created from the given numpy array. Args: arr: A numpy ndarray. Returns: The handle (int) of the array. Raises: ValueError: if arr is not a ndarray or of an unsupported dtype. If the array is of an unsupported type, using a view of the array to another dtype and then converting on get is often a work around. """ if not isinstance(arr, np.ndarray): raise ValueError('Array is not a numpy ndarray.') try: c_arr = np.ctypeslib.as_ctypes(arr) except (KeyError, NotImplementedError): raise ValueError( 'Array has unsupported dtype {}.'.format(arr.dtype)) # pylint: disable=protected-access raw_arr = RawArray(c_arr._type_, c_arr) with self._lock: if self._count >= len(self._arrays): self._arrays += len(self._arrays) * [None] self._get_next_free() # Note storing the shape is a workaround for an issue encountered # when upgrading to numpy 1.15. # See https://github.com/numpy/numpy/issues/11636 self._arrays[self._current] = (raw_arr, arr.shape) self._count += 1 return self._current
[ "def", "_create_array", "(", "self", ",", "arr", ":", "np", ".", "ndarray", ")", "->", "int", ":", "if", "not", "isinstance", "(", "arr", ",", "np", ".", "ndarray", ")", ":", "raise", "ValueError", "(", "'Array is not a numpy ndarray.'", ")", "try", ":", "c_arr", "=", "np", ".", "ctypeslib", ".", "as_ctypes", "(", "arr", ")", "except", "(", "KeyError", ",", "NotImplementedError", ")", ":", "raise", "ValueError", "(", "'Array has unsupported dtype {}.'", ".", "format", "(", "arr", ".", "dtype", ")", ")", "# pylint: disable=protected-access", "raw_arr", "=", "RawArray", "(", "c_arr", ".", "_type_", ",", "c_arr", ")", "with", "self", ".", "_lock", ":", "if", "self", ".", "_count", ">=", "len", "(", "self", ".", "_arrays", ")", ":", "self", ".", "_arrays", "+=", "len", "(", "self", ".", "_arrays", ")", "*", "[", "None", "]", "self", ".", "_get_next_free", "(", ")", "# Note storing the shape is a workaround for an issue encountered", "# when upgrading to numpy 1.15.", "# See https://github.com/numpy/numpy/issues/11636", "self", ".", "_arrays", "[", "self", ".", "_current", "]", "=", "(", "raw_arr", ",", "arr", ".", "shape", ")", "self", ".", "_count", "+=", "1", "return", "self", ".", "_current" ]
Returns the handle of a RawArray created from the given numpy array. Args: arr: A numpy ndarray. Returns: The handle (int) of the array. Raises: ValueError: if arr is not a ndarray or of an unsupported dtype. If the array is of an unsupported type, using a view of the array to another dtype and then converting on get is often a work around.
[ "Returns", "the", "handle", "of", "a", "RawArray", "created", "from", "the", "given", "numpy", "array", "." ]
python
train
34.358974
python-bugzilla/python-bugzilla
bugzilla/bug.py
https://github.com/python-bugzilla/python-bugzilla/blob/7de8b225104f24a1eee3e837bf1e02d60aefe69f/bugzilla/bug.py#L294-L299
def getcomments(self): """ Returns an array of comment dictionaries for this bug """ comment_list = self.bugzilla.get_comments([self.bug_id]) return comment_list['bugs'][str(self.bug_id)]['comments']
[ "def", "getcomments", "(", "self", ")", ":", "comment_list", "=", "self", ".", "bugzilla", ".", "get_comments", "(", "[", "self", ".", "bug_id", "]", ")", "return", "comment_list", "[", "'bugs'", "]", "[", "str", "(", "self", ".", "bug_id", ")", "]", "[", "'comments'", "]" ]
Returns an array of comment dictionaries for this bug
[ "Returns", "an", "array", "of", "comment", "dictionaries", "for", "this", "bug" ]
python
train
39
jazzband/django-push-notifications
push_notifications/wns.py
https://github.com/jazzband/django-push-notifications/blob/c4a0d710711fa27bfb6533c0bf3468cb67a62679/push_notifications/wns.py#L325-L357
def _add_sub_elements_from_dict(parent, sub_dict): """ Add SubElements to the parent element. :param parent: ElementTree.Element: The parent element for the newly created SubElement. :param sub_dict: dict: Used to create a new SubElement. See `dict_to_xml_schema` method docstring for more information. e.g.: {"example": { "attrs": { "key1": "value1", ... }, ... }} """ for key, value in sub_dict.items(): if isinstance(value, list): for repeated_element in value: sub_element = ET.SubElement(parent, key) _add_element_attrs(sub_element, repeated_element.get("attrs", {})) children = repeated_element.get("children", None) if isinstance(children, dict): _add_sub_elements_from_dict(sub_element, children) elif isinstance(children, str): sub_element.text = children else: sub_element = ET.SubElement(parent, key) _add_element_attrs(sub_element, value.get("attrs", {})) children = value.get("children", None) if isinstance(children, dict): _add_sub_elements_from_dict(sub_element, children) elif isinstance(children, str): sub_element.text = children
[ "def", "_add_sub_elements_from_dict", "(", "parent", ",", "sub_dict", ")", ":", "for", "key", ",", "value", "in", "sub_dict", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "list", ")", ":", "for", "repeated_element", "in", "value", ":", "sub_element", "=", "ET", ".", "SubElement", "(", "parent", ",", "key", ")", "_add_element_attrs", "(", "sub_element", ",", "repeated_element", ".", "get", "(", "\"attrs\"", ",", "{", "}", ")", ")", "children", "=", "repeated_element", ".", "get", "(", "\"children\"", ",", "None", ")", "if", "isinstance", "(", "children", ",", "dict", ")", ":", "_add_sub_elements_from_dict", "(", "sub_element", ",", "children", ")", "elif", "isinstance", "(", "children", ",", "str", ")", ":", "sub_element", ".", "text", "=", "children", "else", ":", "sub_element", "=", "ET", ".", "SubElement", "(", "parent", ",", "key", ")", "_add_element_attrs", "(", "sub_element", ",", "value", ".", "get", "(", "\"attrs\"", ",", "{", "}", ")", ")", "children", "=", "value", ".", "get", "(", "\"children\"", ",", "None", ")", "if", "isinstance", "(", "children", ",", "dict", ")", ":", "_add_sub_elements_from_dict", "(", "sub_element", ",", "children", ")", "elif", "isinstance", "(", "children", ",", "str", ")", ":", "sub_element", ".", "text", "=", "children" ]
Add SubElements to the parent element. :param parent: ElementTree.Element: The parent element for the newly created SubElement. :param sub_dict: dict: Used to create a new SubElement. See `dict_to_xml_schema` method docstring for more information. e.g.: {"example": { "attrs": { "key1": "value1", ... }, ... }}
[ "Add", "SubElements", "to", "the", "parent", "element", "." ]
python
train
33.484848
tgbugs/pyontutils
ilxutils/ilxutils/database_client.py
https://github.com/tgbugs/pyontutils/blob/3d913db29c177db39151592909a4f56170ef8b35/ilxutils/ilxutils/database_client.py#L53-L74
def create_df_file_with_query(self, query, output): """ Dumps in df in chunks to avoid crashes. """ chunk_size = 100000 offset = 0 data = defaultdict(lambda : defaultdict(list)) with open(output, 'wb') as outfile: query = query.replace(';', '') query += """ LIMIT {chunk_size} OFFSET {offset};""" while True: print(offset) query = query.format( chunk_size=chunk_size, offset=offset ) df = pd.read_sql(query, self.engine) pickle.dump(df, outfile) offset += chunk_size if len(df) < chunk_size: break outfile.close()
[ "def", "create_df_file_with_query", "(", "self", ",", "query", ",", "output", ")", ":", "chunk_size", "=", "100000", "offset", "=", "0", "data", "=", "defaultdict", "(", "lambda", ":", "defaultdict", "(", "list", ")", ")", "with", "open", "(", "output", ",", "'wb'", ")", "as", "outfile", ":", "query", "=", "query", ".", "replace", "(", "';'", ",", "''", ")", "query", "+=", "\"\"\" LIMIT {chunk_size} OFFSET {offset};\"\"\"", "while", "True", ":", "print", "(", "offset", ")", "query", "=", "query", ".", "format", "(", "chunk_size", "=", "chunk_size", ",", "offset", "=", "offset", ")", "df", "=", "pd", ".", "read_sql", "(", "query", ",", "self", ".", "engine", ")", "pickle", ".", "dump", "(", "df", ",", "outfile", ")", "offset", "+=", "chunk_size", "if", "len", "(", "df", ")", "<", "chunk_size", ":", "break", "outfile", ".", "close", "(", ")" ]
Dumps in df in chunks to avoid crashes.
[ "Dumps", "in", "df", "in", "chunks", "to", "avoid", "crashes", "." ]
python
train
34.590909
saltstack/salt
salt/modules/gpg.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/gpg.py#L1226-L1311
def decrypt(user=None, text=None, filename=None, output=None, use_passphrase=False, gnupghome=None, bare=False): ''' Decrypt a message or file user Which user's keychain to access, defaults to user Salt is running as. Passing the user as ``salt`` will set the GnuPG home directory to the ``/etc/salt/gpgkeys``. text The encrypted text to decrypt. filename The encrypted filename to decrypt. output The filename where the decrypted data will be written, default is standard out. use_passphrase Whether to use a passphrase with the signing key. Passphrase is received from Pillar. gnupghome Specify the location where GPG keyring and related files are stored. bare If ``True``, return the (armored) decrypted block as a string without the standard comment/res dict. CLI Example: .. code-block:: bash salt '*' gpg.decrypt filename='/path/to/important.file.gpg' salt '*' gpg.decrypt filename='/path/to/important.file.gpg' use_passphrase=True ''' ret = { 'res': True, 'comment': '' } gpg = _create_gpg(user, gnupghome) if use_passphrase: gpg_passphrase = __salt__['pillar.get']('gpg_passphrase') if not gpg_passphrase: raise SaltInvocationError('gpg_passphrase not available in pillar.') gpg_passphrase = gpg_passphrase['gpg_passphrase'] else: gpg_passphrase = None if text: result = gpg.decrypt(text, passphrase=gpg_passphrase) elif filename: with salt.utils.files.flopen(filename, 'rb') as _fp: if output: result = gpg.decrypt_file(_fp, passphrase=gpg_passphrase, output=output) else: result = gpg.decrypt_file(_fp, passphrase=gpg_passphrase) else: raise SaltInvocationError('filename or text must be passed.') if result.ok: if not bare: if output: ret['comment'] = 'Decrypted data has been written to {0}'.format(output) else: ret['comment'] = result.data else: ret = result.data else: if not bare: ret['res'] = False ret['comment'] = '{0}.\nPlease check the salt-minion log.'.format(result.status) else: ret = False log.error(result.stderr) return ret
[ "def", "decrypt", "(", "user", "=", "None", ",", "text", "=", "None", ",", "filename", "=", "None", ",", "output", "=", "None", ",", "use_passphrase", "=", "False", ",", "gnupghome", "=", "None", ",", "bare", "=", "False", ")", ":", "ret", "=", "{", "'res'", ":", "True", ",", "'comment'", ":", "''", "}", "gpg", "=", "_create_gpg", "(", "user", ",", "gnupghome", ")", "if", "use_passphrase", ":", "gpg_passphrase", "=", "__salt__", "[", "'pillar.get'", "]", "(", "'gpg_passphrase'", ")", "if", "not", "gpg_passphrase", ":", "raise", "SaltInvocationError", "(", "'gpg_passphrase not available in pillar.'", ")", "gpg_passphrase", "=", "gpg_passphrase", "[", "'gpg_passphrase'", "]", "else", ":", "gpg_passphrase", "=", "None", "if", "text", ":", "result", "=", "gpg", ".", "decrypt", "(", "text", ",", "passphrase", "=", "gpg_passphrase", ")", "elif", "filename", ":", "with", "salt", ".", "utils", ".", "files", ".", "flopen", "(", "filename", ",", "'rb'", ")", "as", "_fp", ":", "if", "output", ":", "result", "=", "gpg", ".", "decrypt_file", "(", "_fp", ",", "passphrase", "=", "gpg_passphrase", ",", "output", "=", "output", ")", "else", ":", "result", "=", "gpg", ".", "decrypt_file", "(", "_fp", ",", "passphrase", "=", "gpg_passphrase", ")", "else", ":", "raise", "SaltInvocationError", "(", "'filename or text must be passed.'", ")", "if", "result", ".", "ok", ":", "if", "not", "bare", ":", "if", "output", ":", "ret", "[", "'comment'", "]", "=", "'Decrypted data has been written to {0}'", ".", "format", "(", "output", ")", "else", ":", "ret", "[", "'comment'", "]", "=", "result", ".", "data", "else", ":", "ret", "=", "result", ".", "data", "else", ":", "if", "not", "bare", ":", "ret", "[", "'res'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'{0}.\\nPlease check the salt-minion log.'", ".", "format", "(", "result", ".", "status", ")", "else", ":", "ret", "=", "False", "log", ".", "error", "(", "result", ".", "stderr", ")", "return", "ret" ]
Decrypt a message or file user Which user's keychain to access, defaults to user Salt is running as. Passing the user as ``salt`` will set the GnuPG home directory to the ``/etc/salt/gpgkeys``. text The encrypted text to decrypt. filename The encrypted filename to decrypt. output The filename where the decrypted data will be written, default is standard out. use_passphrase Whether to use a passphrase with the signing key. Passphrase is received from Pillar. gnupghome Specify the location where GPG keyring and related files are stored. bare If ``True``, return the (armored) decrypted block as a string without the standard comment/res dict. CLI Example: .. code-block:: bash salt '*' gpg.decrypt filename='/path/to/important.file.gpg' salt '*' gpg.decrypt filename='/path/to/important.file.gpg' use_passphrase=True
[ "Decrypt", "a", "message", "or", "file" ]
python
train
28.290698
tk0miya/tk.phpautodoc
src/phply/phpparse.py
https://github.com/tk0miya/tk.phpautodoc/blob/cf789f64abaf76351485cee231a075227e665fb6/src/phply/phpparse.py#L1207-L1212
def p_common_scalar_magic_dir(p): 'common_scalar : DIR' value = getattr(p.lexer, 'filename', None) if value is not None: value = os.path.dirname(value) p[0] = ast.MagicConstant(p[1].upper(), value, lineno=p.lineno(1))
[ "def", "p_common_scalar_magic_dir", "(", "p", ")", ":", "value", "=", "getattr", "(", "p", ".", "lexer", ",", "'filename'", ",", "None", ")", "if", "value", "is", "not", "None", ":", "value", "=", "os", ".", "path", ".", "dirname", "(", "value", ")", "p", "[", "0", "]", "=", "ast", ".", "MagicConstant", "(", "p", "[", "1", "]", ".", "upper", "(", ")", ",", "value", ",", "lineno", "=", "p", ".", "lineno", "(", "1", ")", ")" ]
common_scalar : DIR
[ "common_scalar", ":", "DIR" ]
python
train
39.333333
nkavaldj/myhdl_lib
myhdl_lib/simulation/_DUTer.py
https://github.com/nkavaldj/myhdl_lib/blob/9902afd2031e7847373f692821b2135fd0810aa8/myhdl_lib/simulation/_DUTer.py#L66-L98
def _getCosimulation(self, func, **kwargs): ''' Returns a co-simulation instance of func. Uses the _simulator specified by self._simulator. Enables traces if self._trace is True func - MyHDL function to be simulated kwargs - dict of func interface assignments: for signals and parameters ''' vals = {} vals['topname'] = func.func_name vals['unitname'] = func.func_name.lower() hdlsim = self._simulator if not hdlsim: raise ValueError("No _simulator specified") if not self.sim_reg.has_key(hdlsim): raise ValueError("Simulator {} is not registered".format(hdlsim)) hdl, analyze_cmd, elaborate_cmd, simulate_cmd = self.sim_reg[hdlsim] # Convert to HDL if hdl == "verilog": toVerilog(func, **kwargs) if self._trace: self._enableTracesVerilog("./tb_{topname}.v".format(**vals)) elif hdl == "vhdl": toVHDL(func, **kwargs) # Analyze HDL os.system(analyze_cmd.format(**vals)) # Elaborate if elaborate_cmd: os.system(elaborate_cmd.format(**vals)) # Simulate return Cosimulation(simulate_cmd.format(**vals), **kwargs)
[ "def", "_getCosimulation", "(", "self", ",", "func", ",", "*", "*", "kwargs", ")", ":", "vals", "=", "{", "}", "vals", "[", "'topname'", "]", "=", "func", ".", "func_name", "vals", "[", "'unitname'", "]", "=", "func", ".", "func_name", ".", "lower", "(", ")", "hdlsim", "=", "self", ".", "_simulator", "if", "not", "hdlsim", ":", "raise", "ValueError", "(", "\"No _simulator specified\"", ")", "if", "not", "self", ".", "sim_reg", ".", "has_key", "(", "hdlsim", ")", ":", "raise", "ValueError", "(", "\"Simulator {} is not registered\"", ".", "format", "(", "hdlsim", ")", ")", "hdl", ",", "analyze_cmd", ",", "elaborate_cmd", ",", "simulate_cmd", "=", "self", ".", "sim_reg", "[", "hdlsim", "]", "# Convert to HDL", "if", "hdl", "==", "\"verilog\"", ":", "toVerilog", "(", "func", ",", "*", "*", "kwargs", ")", "if", "self", ".", "_trace", ":", "self", ".", "_enableTracesVerilog", "(", "\"./tb_{topname}.v\"", ".", "format", "(", "*", "*", "vals", ")", ")", "elif", "hdl", "==", "\"vhdl\"", ":", "toVHDL", "(", "func", ",", "*", "*", "kwargs", ")", "# Analyze HDL", "os", ".", "system", "(", "analyze_cmd", ".", "format", "(", "*", "*", "vals", ")", ")", "# Elaborate", "if", "elaborate_cmd", ":", "os", ".", "system", "(", "elaborate_cmd", ".", "format", "(", "*", "*", "vals", ")", ")", "# Simulate", "return", "Cosimulation", "(", "simulate_cmd", ".", "format", "(", "*", "*", "vals", ")", ",", "*", "*", "kwargs", ")" ]
Returns a co-simulation instance of func. Uses the _simulator specified by self._simulator. Enables traces if self._trace is True func - MyHDL function to be simulated kwargs - dict of func interface assignments: for signals and parameters
[ "Returns", "a", "co", "-", "simulation", "instance", "of", "func", ".", "Uses", "the", "_simulator", "specified", "by", "self", ".", "_simulator", ".", "Enables", "traces", "if", "self", ".", "_trace", "is", "True", "func", "-", "MyHDL", "function", "to", "be", "simulated", "kwargs", "-", "dict", "of", "func", "interface", "assignments", ":", "for", "signals", "and", "parameters" ]
python
train
38.454545
twilio/twilio-python
twilio/rest/proxy/v1/service/session/participant/message_interaction.py
https://github.com/twilio/twilio-python/blob/c867895f55dcc29f522e6e8b8868d0d18483132f/twilio/rest/proxy/v1/service/session/participant/message_interaction.py#L44-L68
def create(self, body=values.unset, media_url=values.unset): """ Create a new MessageInteractionInstance :param unicode body: Message body :param unicode media_url: Reserved :returns: Newly created MessageInteractionInstance :rtype: twilio.rest.proxy.v1.service.session.participant.message_interaction.MessageInteractionInstance """ data = values.of({'Body': body, 'MediaUrl': serialize.map(media_url, lambda e: e), }) payload = self._version.create( 'POST', self._uri, data=data, ) return MessageInteractionInstance( self._version, payload, service_sid=self._solution['service_sid'], session_sid=self._solution['session_sid'], participant_sid=self._solution['participant_sid'], )
[ "def", "create", "(", "self", ",", "body", "=", "values", ".", "unset", ",", "media_url", "=", "values", ".", "unset", ")", ":", "data", "=", "values", ".", "of", "(", "{", "'Body'", ":", "body", ",", "'MediaUrl'", ":", "serialize", ".", "map", "(", "media_url", ",", "lambda", "e", ":", "e", ")", ",", "}", ")", "payload", "=", "self", ".", "_version", ".", "create", "(", "'POST'", ",", "self", ".", "_uri", ",", "data", "=", "data", ",", ")", "return", "MessageInteractionInstance", "(", "self", ".", "_version", ",", "payload", ",", "service_sid", "=", "self", ".", "_solution", "[", "'service_sid'", "]", ",", "session_sid", "=", "self", ".", "_solution", "[", "'session_sid'", "]", ",", "participant_sid", "=", "self", ".", "_solution", "[", "'participant_sid'", "]", ",", ")" ]
Create a new MessageInteractionInstance :param unicode body: Message body :param unicode media_url: Reserved :returns: Newly created MessageInteractionInstance :rtype: twilio.rest.proxy.v1.service.session.participant.message_interaction.MessageInteractionInstance
[ "Create", "a", "new", "MessageInteractionInstance" ]
python
train
34.08
ContextLab/quail
quail/helpers.py
https://github.com/ContextLab/quail/blob/71dd53c792dd915dc84879d8237e3582dd68b7a4/quail/helpers.py#L263-L278
def merge_pres_feats(pres, features): """ Helper function to merge pres and features to support legacy features argument """ sub = [] for psub, fsub in zip(pres, features): exp = [] for pexp, fexp in zip(psub, fsub): lst = [] for p, f in zip(pexp, fexp): p.update(f) lst.append(p) exp.append(lst) sub.append(exp) return sub
[ "def", "merge_pres_feats", "(", "pres", ",", "features", ")", ":", "sub", "=", "[", "]", "for", "psub", ",", "fsub", "in", "zip", "(", "pres", ",", "features", ")", ":", "exp", "=", "[", "]", "for", "pexp", ",", "fexp", "in", "zip", "(", "psub", ",", "fsub", ")", ":", "lst", "=", "[", "]", "for", "p", ",", "f", "in", "zip", "(", "pexp", ",", "fexp", ")", ":", "p", ".", "update", "(", "f", ")", "lst", ".", "append", "(", "p", ")", "exp", ".", "append", "(", "lst", ")", "sub", ".", "append", "(", "exp", ")", "return", "sub" ]
Helper function to merge pres and features to support legacy features argument
[ "Helper", "function", "to", "merge", "pres", "and", "features", "to", "support", "legacy", "features", "argument" ]
python
train
26.5625
sanger-pathogens/pymummer
pymummer/alignment.py
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/alignment.py#L90-L93
def reverse_reference(self): '''Changes the coordinates as if the reference sequence has been reverse complemented''' self.ref_start = self.ref_length - self.ref_start - 1 self.ref_end = self.ref_length - self.ref_end - 1
[ "def", "reverse_reference", "(", "self", ")", ":", "self", ".", "ref_start", "=", "self", ".", "ref_length", "-", "self", ".", "ref_start", "-", "1", "self", ".", "ref_end", "=", "self", ".", "ref_length", "-", "self", ".", "ref_end", "-", "1" ]
Changes the coordinates as if the reference sequence has been reverse complemented
[ "Changes", "the", "coordinates", "as", "if", "the", "reference", "sequence", "has", "been", "reverse", "complemented" ]
python
train
60.5
JoelBender/bacpypes
py25/bacpypes/apdu.py
https://github.com/JoelBender/bacpypes/blob/4111b8604a16fa2b7f80d8104a43b9f3e28dfc78/py25/bacpypes/apdu.py#L719-L734
def apdu_contents(self, use_dict=None, as_class=dict): """Return the contents of an object as a dict.""" if _debug: APCISequence._debug("apdu_contents use_dict=%r as_class=%r", use_dict, as_class) # make/extend the dictionary of content if use_dict is None: use_dict = as_class() # set the function based on the class name use_dict.__setitem__('function', self.__class__.__name__) # fill in from the sequence contents Sequence.dict_contents(self, use_dict=use_dict, as_class=as_class) # return what we built/updated return use_dict
[ "def", "apdu_contents", "(", "self", ",", "use_dict", "=", "None", ",", "as_class", "=", "dict", ")", ":", "if", "_debug", ":", "APCISequence", ".", "_debug", "(", "\"apdu_contents use_dict=%r as_class=%r\"", ",", "use_dict", ",", "as_class", ")", "# make/extend the dictionary of content", "if", "use_dict", "is", "None", ":", "use_dict", "=", "as_class", "(", ")", "# set the function based on the class name", "use_dict", ".", "__setitem__", "(", "'function'", ",", "self", ".", "__class__", ".", "__name__", ")", "# fill in from the sequence contents", "Sequence", ".", "dict_contents", "(", "self", ",", "use_dict", "=", "use_dict", ",", "as_class", "=", "as_class", ")", "# return what we built/updated", "return", "use_dict" ]
Return the contents of an object as a dict.
[ "Return", "the", "contents", "of", "an", "object", "as", "a", "dict", "." ]
python
train
38.25
SAP/PyHDB
pyhdb/protocol/message.py
https://github.com/SAP/PyHDB/blob/826539d06b8bcef74fe755e7489b8a8255628f12/pyhdb/protocol/message.py#L47-L69
def pack(self): """ Pack message to binary stream. """ payload = io.BytesIO() # Advance num bytes equal to header size - the header is written later # after the payload of all segments and parts has been written: payload.seek(self.header_size, io.SEEK_CUR) # Write out payload of segments and parts: self.build_payload(payload) packet_length = len(payload.getvalue()) - self.header_size self.header = MessageHeader(self.session_id, self.packet_count, packet_length, constants.MAX_SEGMENT_SIZE, num_segments=len(self.segments), packet_options=0) packed_header = self.header_struct.pack(*self.header) # Go back to begining of payload for writing message header: payload.seek(0) payload.write(packed_header) payload.seek(0, io.SEEK_END) trace(self) return payload
[ "def", "pack", "(", "self", ")", ":", "payload", "=", "io", ".", "BytesIO", "(", ")", "# Advance num bytes equal to header size - the header is written later", "# after the payload of all segments and parts has been written:", "payload", ".", "seek", "(", "self", ".", "header_size", ",", "io", ".", "SEEK_CUR", ")", "# Write out payload of segments and parts:", "self", ".", "build_payload", "(", "payload", ")", "packet_length", "=", "len", "(", "payload", ".", "getvalue", "(", ")", ")", "-", "self", ".", "header_size", "self", ".", "header", "=", "MessageHeader", "(", "self", ".", "session_id", ",", "self", ".", "packet_count", ",", "packet_length", ",", "constants", ".", "MAX_SEGMENT_SIZE", ",", "num_segments", "=", "len", "(", "self", ".", "segments", ")", ",", "packet_options", "=", "0", ")", "packed_header", "=", "self", ".", "header_struct", ".", "pack", "(", "*", "self", ".", "header", ")", "# Go back to begining of payload for writing message header:", "payload", ".", "seek", "(", "0", ")", "payload", ".", "write", "(", "packed_header", ")", "payload", ".", "seek", "(", "0", ",", "io", ".", "SEEK_END", ")", "trace", "(", "self", ")", "return", "payload" ]
Pack message to binary stream.
[ "Pack", "message", "to", "binary", "stream", "." ]
python
train
39.434783
gem/oq-engine
openquake/hmtk/plotting/beachball.py
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/beachball.py#L769-L849
def TDL(AN, BN): """ Helper function for MT2Plane. Adapted from MATLAB script `bb.m <http://www.ceri.memphis.edu/people/olboyd/Software/Software.html>`_ written by Andy Michael and Oliver Boyd. """ XN = AN[0] YN = AN[1] ZN = AN[2] XE = BN[0] YE = BN[1] ZE = BN[2] AAA = 1.0 / (1000000) CON = 57.2957795 if np.fabs(ZN) < AAA: FD = 90. AXN = np.fabs(XN) if AXN > 1.0: AXN = 1.0 FT = np.arcsin(AXN) * CON ST = -XN CT = YN if ST >= 0. and CT < 0: FT = 180. - FT if ST < 0. and CT <= 0: FT = 180. + FT if ST < 0. and CT > 0: FT = 360. - FT FL = np.arcsin(abs(ZE)) * CON SL = -ZE if np.fabs(XN) < AAA: CL = XE / YN else: CL = -YE / XN if SL >= 0. and CL < 0: FL = 180. - FL if SL < 0. and CL <= 0: FL = FL - 180. if SL < 0. and CL > 0: FL = -FL else: if - ZN > 1.0: ZN = -1.0 FDH = np.arccos(-ZN) FD = FDH * CON SD = np.sin(FDH) if SD == 0: return ST = -XN / SD CT = YN / SD SX = np.fabs(ST) if SX > 1.0: SX = 1.0 FT = np.arcsin(SX) * CON if ST >= 0. and CT < 0: FT = 180. - FT if ST < 0. and CT <= 0: FT = 180. + FT if ST < 0. and CT > 0: FT = 360. - FT SL = -ZE / SD SX = np.fabs(SL) if SX > 1.0: SX = 1.0 FL = np.arcsin(SX) * CON if ST == 0: CL = XE / CT else: XXX = YN * ZN * ZE / SD / SD + YE CL = -SD * XXX / XN if CT == 0: CL = YE / ST if SL >= 0. and CL < 0: FL = 180. - FL if SL < 0. and CL <= 0: FL = FL - 180. if SL < 0. and CL > 0: FL = -FL return (FT, FD, FL)
[ "def", "TDL", "(", "AN", ",", "BN", ")", ":", "XN", "=", "AN", "[", "0", "]", "YN", "=", "AN", "[", "1", "]", "ZN", "=", "AN", "[", "2", "]", "XE", "=", "BN", "[", "0", "]", "YE", "=", "BN", "[", "1", "]", "ZE", "=", "BN", "[", "2", "]", "AAA", "=", "1.0", "/", "(", "1000000", ")", "CON", "=", "57.2957795", "if", "np", ".", "fabs", "(", "ZN", ")", "<", "AAA", ":", "FD", "=", "90.", "AXN", "=", "np", ".", "fabs", "(", "XN", ")", "if", "AXN", ">", "1.0", ":", "AXN", "=", "1.0", "FT", "=", "np", ".", "arcsin", "(", "AXN", ")", "*", "CON", "ST", "=", "-", "XN", "CT", "=", "YN", "if", "ST", ">=", "0.", "and", "CT", "<", "0", ":", "FT", "=", "180.", "-", "FT", "if", "ST", "<", "0.", "and", "CT", "<=", "0", ":", "FT", "=", "180.", "+", "FT", "if", "ST", "<", "0.", "and", "CT", ">", "0", ":", "FT", "=", "360.", "-", "FT", "FL", "=", "np", ".", "arcsin", "(", "abs", "(", "ZE", ")", ")", "*", "CON", "SL", "=", "-", "ZE", "if", "np", ".", "fabs", "(", "XN", ")", "<", "AAA", ":", "CL", "=", "XE", "/", "YN", "else", ":", "CL", "=", "-", "YE", "/", "XN", "if", "SL", ">=", "0.", "and", "CL", "<", "0", ":", "FL", "=", "180.", "-", "FL", "if", "SL", "<", "0.", "and", "CL", "<=", "0", ":", "FL", "=", "FL", "-", "180.", "if", "SL", "<", "0.", "and", "CL", ">", "0", ":", "FL", "=", "-", "FL", "else", ":", "if", "-", "ZN", ">", "1.0", ":", "ZN", "=", "-", "1.0", "FDH", "=", "np", ".", "arccos", "(", "-", "ZN", ")", "FD", "=", "FDH", "*", "CON", "SD", "=", "np", ".", "sin", "(", "FDH", ")", "if", "SD", "==", "0", ":", "return", "ST", "=", "-", "XN", "/", "SD", "CT", "=", "YN", "/", "SD", "SX", "=", "np", ".", "fabs", "(", "ST", ")", "if", "SX", ">", "1.0", ":", "SX", "=", "1.0", "FT", "=", "np", ".", "arcsin", "(", "SX", ")", "*", "CON", "if", "ST", ">=", "0.", "and", "CT", "<", "0", ":", "FT", "=", "180.", "-", "FT", "if", "ST", "<", "0.", "and", "CT", "<=", "0", ":", "FT", "=", "180.", "+", "FT", "if", "ST", "<", "0.", "and", "CT", ">", "0", ":", "FT", "=", "360.", "-", "FT", "SL", "=", "-", "ZE", "/", "SD", "SX", "=", "np", ".", "fabs", "(", "SL", ")", "if", "SX", ">", "1.0", ":", "SX", "=", "1.0", "FL", "=", "np", ".", "arcsin", "(", "SX", ")", "*", "CON", "if", "ST", "==", "0", ":", "CL", "=", "XE", "/", "CT", "else", ":", "XXX", "=", "YN", "*", "ZN", "*", "ZE", "/", "SD", "/", "SD", "+", "YE", "CL", "=", "-", "SD", "*", "XXX", "/", "XN", "if", "CT", "==", "0", ":", "CL", "=", "YE", "/", "ST", "if", "SL", ">=", "0.", "and", "CL", "<", "0", ":", "FL", "=", "180.", "-", "FL", "if", "SL", "<", "0.", "and", "CL", "<=", "0", ":", "FL", "=", "FL", "-", "180.", "if", "SL", "<", "0.", "and", "CL", ">", "0", ":", "FL", "=", "-", "FL", "return", "(", "FT", ",", "FD", ",", "FL", ")" ]
Helper function for MT2Plane. Adapted from MATLAB script `bb.m <http://www.ceri.memphis.edu/people/olboyd/Software/Software.html>`_ written by Andy Michael and Oliver Boyd.
[ "Helper", "function", "for", "MT2Plane", "." ]
python
train
24.08642
tensorflow/cleverhans
cleverhans/experimental/certification/dual_formulation.py
https://github.com/tensorflow/cleverhans/blob/97488e215760547b81afc53f5e5de8ba7da5bd98/cleverhans/experimental/certification/dual_formulation.py#L380-L423
def get_full_psd_matrix(self): """Function that returns the tf graph corresponding to the entire matrix M. Returns: matrix_h: unrolled version of tf matrix corresponding to H matrix_m: unrolled tf matrix corresponding to M """ if self.matrix_m is not None: return self.matrix_h, self.matrix_m # Computing the matrix term h_columns = [] for i in range(self.nn_params.num_hidden_layers + 1): current_col_elems = [] for j in range(i): current_col_elems.append( tf.zeros([self.nn_params.sizes[j], self.nn_params.sizes[i]])) # For the first layer, there is no relu constraint if i == 0: current_col_elems.append(utils.diag(self.lambda_lu[i])) else: current_col_elems.append( utils.diag(self.lambda_lu[i] + self.lambda_quad[i])) if i < self.nn_params.num_hidden_layers: current_col_elems.append(tf.matmul( utils.diag(-1 * self.lambda_quad[i + 1]), self.nn_params.weights[i])) for j in range(i + 2, self.nn_params.num_hidden_layers + 1): current_col_elems.append( tf.zeros([self.nn_params.sizes[j], self.nn_params.sizes[i]])) current_column = tf.concat(current_col_elems, 0) h_columns.append(current_column) self.matrix_h = tf.concat(h_columns, 1) self.matrix_h = (self.matrix_h + tf.transpose(self.matrix_h)) self.matrix_m = tf.concat( [ tf.concat([tf.reshape(self.nu, (1, 1)), tf.transpose(self.vector_g)], axis=1), tf.concat([self.vector_g, self.matrix_h], axis=1) ], axis=0) return self.matrix_h, self.matrix_m
[ "def", "get_full_psd_matrix", "(", "self", ")", ":", "if", "self", ".", "matrix_m", "is", "not", "None", ":", "return", "self", ".", "matrix_h", ",", "self", ".", "matrix_m", "# Computing the matrix term", "h_columns", "=", "[", "]", "for", "i", "in", "range", "(", "self", ".", "nn_params", ".", "num_hidden_layers", "+", "1", ")", ":", "current_col_elems", "=", "[", "]", "for", "j", "in", "range", "(", "i", ")", ":", "current_col_elems", ".", "append", "(", "tf", ".", "zeros", "(", "[", "self", ".", "nn_params", ".", "sizes", "[", "j", "]", ",", "self", ".", "nn_params", ".", "sizes", "[", "i", "]", "]", ")", ")", "# For the first layer, there is no relu constraint", "if", "i", "==", "0", ":", "current_col_elems", ".", "append", "(", "utils", ".", "diag", "(", "self", ".", "lambda_lu", "[", "i", "]", ")", ")", "else", ":", "current_col_elems", ".", "append", "(", "utils", ".", "diag", "(", "self", ".", "lambda_lu", "[", "i", "]", "+", "self", ".", "lambda_quad", "[", "i", "]", ")", ")", "if", "i", "<", "self", ".", "nn_params", ".", "num_hidden_layers", ":", "current_col_elems", ".", "append", "(", "tf", ".", "matmul", "(", "utils", ".", "diag", "(", "-", "1", "*", "self", ".", "lambda_quad", "[", "i", "+", "1", "]", ")", ",", "self", ".", "nn_params", ".", "weights", "[", "i", "]", ")", ")", "for", "j", "in", "range", "(", "i", "+", "2", ",", "self", ".", "nn_params", ".", "num_hidden_layers", "+", "1", ")", ":", "current_col_elems", ".", "append", "(", "tf", ".", "zeros", "(", "[", "self", ".", "nn_params", ".", "sizes", "[", "j", "]", ",", "self", ".", "nn_params", ".", "sizes", "[", "i", "]", "]", ")", ")", "current_column", "=", "tf", ".", "concat", "(", "current_col_elems", ",", "0", ")", "h_columns", ".", "append", "(", "current_column", ")", "self", ".", "matrix_h", "=", "tf", ".", "concat", "(", "h_columns", ",", "1", ")", "self", ".", "matrix_h", "=", "(", "self", ".", "matrix_h", "+", "tf", ".", "transpose", "(", "self", ".", "matrix_h", ")", ")", "self", ".", "matrix_m", "=", "tf", ".", "concat", "(", "[", "tf", ".", "concat", "(", "[", "tf", ".", "reshape", "(", "self", ".", "nu", ",", "(", "1", ",", "1", ")", ")", ",", "tf", ".", "transpose", "(", "self", ".", "vector_g", ")", "]", ",", "axis", "=", "1", ")", ",", "tf", ".", "concat", "(", "[", "self", ".", "vector_g", ",", "self", ".", "matrix_h", "]", ",", "axis", "=", "1", ")", "]", ",", "axis", "=", "0", ")", "return", "self", ".", "matrix_h", ",", "self", ".", "matrix_m" ]
Function that returns the tf graph corresponding to the entire matrix M. Returns: matrix_h: unrolled version of tf matrix corresponding to H matrix_m: unrolled tf matrix corresponding to M
[ "Function", "that", "returns", "the", "tf", "graph", "corresponding", "to", "the", "entire", "matrix", "M", "." ]
python
train
37.068182
jrief/django-websocket-redis
ws4redis/websocket.py
https://github.com/jrief/django-websocket-redis/blob/abcddaad2f579d71dbf375e5e34bc35eef795a81/ws4redis/websocket.py#L209-L226
def receive(self): """ Read and return a message from the stream. If `None` is returned, then the socket is considered closed/errored. """ if self._closed: raise WebSocketError("Connection is already closed") try: return self.read_message() except UnicodeError as e: logger.info('websocket.receive: UnicodeError {}'.format(e)) self.close(1007) except WebSocketError as e: logger.info('websocket.receive: WebSocketError {}'.format(e)) self.close(1002) except Exception as e: logger.info('websocket.receive: Unknown error {}'.format(e)) raise e
[ "def", "receive", "(", "self", ")", ":", "if", "self", ".", "_closed", ":", "raise", "WebSocketError", "(", "\"Connection is already closed\"", ")", "try", ":", "return", "self", ".", "read_message", "(", ")", "except", "UnicodeError", "as", "e", ":", "logger", ".", "info", "(", "'websocket.receive: UnicodeError {}'", ".", "format", "(", "e", ")", ")", "self", ".", "close", "(", "1007", ")", "except", "WebSocketError", "as", "e", ":", "logger", ".", "info", "(", "'websocket.receive: WebSocketError {}'", ".", "format", "(", "e", ")", ")", "self", ".", "close", "(", "1002", ")", "except", "Exception", "as", "e", ":", "logger", ".", "info", "(", "'websocket.receive: Unknown error {}'", ".", "format", "(", "e", ")", ")", "raise", "e" ]
Read and return a message from the stream. If `None` is returned, then the socket is considered closed/errored.
[ "Read", "and", "return", "a", "message", "from", "the", "stream", ".", "If", "None", "is", "returned", "then", "the", "socket", "is", "considered", "closed", "/", "errored", "." ]
python
train
38.5
ktbyers/netmiko
netmiko/base_connection.py
https://github.com/ktbyers/netmiko/blob/54e6116c0b4664de2123081937e0a9a27bdfdfea/netmiko/base_connection.py#L935-L948
def _build_ssh_client(self): """Prepare for Paramiko SSH connection.""" # Create instance of SSHClient object remote_conn_pre = paramiko.SSHClient() # Load host_keys for better SSH security if self.system_host_keys: remote_conn_pre.load_system_host_keys() if self.alt_host_keys and path.isfile(self.alt_key_file): remote_conn_pre.load_host_keys(self.alt_key_file) # Default is to automatically add untrusted hosts (make sure appropriate for your env) remote_conn_pre.set_missing_host_key_policy(self.key_policy) return remote_conn_pre
[ "def", "_build_ssh_client", "(", "self", ")", ":", "# Create instance of SSHClient object", "remote_conn_pre", "=", "paramiko", ".", "SSHClient", "(", ")", "# Load host_keys for better SSH security", "if", "self", ".", "system_host_keys", ":", "remote_conn_pre", ".", "load_system_host_keys", "(", ")", "if", "self", ".", "alt_host_keys", "and", "path", ".", "isfile", "(", "self", ".", "alt_key_file", ")", ":", "remote_conn_pre", ".", "load_host_keys", "(", "self", ".", "alt_key_file", ")", "# Default is to automatically add untrusted hosts (make sure appropriate for your env)", "remote_conn_pre", ".", "set_missing_host_key_policy", "(", "self", ".", "key_policy", ")", "return", "remote_conn_pre" ]
Prepare for Paramiko SSH connection.
[ "Prepare", "for", "Paramiko", "SSH", "connection", "." ]
python
train
44.214286
MrYsLab/pymata-aio
pymata_aio/pymata_iot.py
https://github.com/MrYsLab/pymata-aio/blob/015081a4628b9d47dfe3f8d6c698ff903f107810/pymata_aio/pymata_iot.py#L127-L140
async def digital_read(self, command): """ This method reads and returns the last reported value for a digital pin. Normally not used since digital pin updates will be provided automatically as they occur with the digital_message_reply being sent to the client after set_pin_mode is called.. (see enable_digital_reporting for message format) :param command: {"method": "digital_read", "params": [PIN]} :returns: {"method": "digital_read_reply", "params": [PIN, DIGITAL_DATA_VALUE]} """ pin = int(command[0]) data_val = await self.core.digital_read(pin) reply = json.dumps({"method": "digital_read_reply", "params": [pin, data_val]}) await self.websocket.send(reply)
[ "async", "def", "digital_read", "(", "self", ",", "command", ")", ":", "pin", "=", "int", "(", "command", "[", "0", "]", ")", "data_val", "=", "await", "self", ".", "core", ".", "digital_read", "(", "pin", ")", "reply", "=", "json", ".", "dumps", "(", "{", "\"method\"", ":", "\"digital_read_reply\"", ",", "\"params\"", ":", "[", "pin", ",", "data_val", "]", "}", ")", "await", "self", ".", "websocket", ".", "send", "(", "reply", ")" ]
This method reads and returns the last reported value for a digital pin. Normally not used since digital pin updates will be provided automatically as they occur with the digital_message_reply being sent to the client after set_pin_mode is called.. (see enable_digital_reporting for message format) :param command: {"method": "digital_read", "params": [PIN]} :returns: {"method": "digital_read_reply", "params": [PIN, DIGITAL_DATA_VALUE]}
[ "This", "method", "reads", "and", "returns", "the", "last", "reported", "value", "for", "a", "digital", "pin", ".", "Normally", "not", "used", "since", "digital", "pin", "updates", "will", "be", "provided", "automatically", "as", "they", "occur", "with", "the", "digital_message_reply", "being", "sent", "to", "the", "client", "after", "set_pin_mode", "is", "called", "..", "(", "see", "enable_digital_reporting", "for", "message", "format", ")" ]
python
train
53.5
Metatab/metapack
metapack/cli/index.py
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/cli/index.py#L223-L248
def dump_index(args, idx): """Create a metatab file for the index""" import csv import sys from metatab import MetatabDoc doc = MetatabDoc() pack_section = doc.new_section('Packages', ['Identifier', 'Name', 'Nvname', 'Version', 'Format']) r = doc['Root'] r.new_term('Root.Title', 'Package Index') for p in idx.list(): pack_section.new_term('Package', p['url'], identifier=p['ident'], name=p['name'], nvname=p['nvname'], version=p['version'], format=p['format']) doc.write_csv(args.dump)
[ "def", "dump_index", "(", "args", ",", "idx", ")", ":", "import", "csv", "import", "sys", "from", "metatab", "import", "MetatabDoc", "doc", "=", "MetatabDoc", "(", ")", "pack_section", "=", "doc", ".", "new_section", "(", "'Packages'", ",", "[", "'Identifier'", ",", "'Name'", ",", "'Nvname'", ",", "'Version'", ",", "'Format'", "]", ")", "r", "=", "doc", "[", "'Root'", "]", "r", ".", "new_term", "(", "'Root.Title'", ",", "'Package Index'", ")", "for", "p", "in", "idx", ".", "list", "(", ")", ":", "pack_section", ".", "new_term", "(", "'Package'", ",", "p", "[", "'url'", "]", ",", "identifier", "=", "p", "[", "'ident'", "]", ",", "name", "=", "p", "[", "'name'", "]", ",", "nvname", "=", "p", "[", "'nvname'", "]", ",", "version", "=", "p", "[", "'version'", "]", ",", "format", "=", "p", "[", "'format'", "]", ")", "doc", ".", "write_csv", "(", "args", ".", "dump", ")" ]
Create a metatab file for the index
[ "Create", "a", "metatab", "file", "for", "the", "index" ]
python
train
24.269231
anrosent/LT-code
lt/sampler.py
https://github.com/anrosent/LT-code/blob/e13a4c927effc90f9d41ab3884f9fcbd95b9450d/lt/sampler.py#L25-L32
def gen_tau(S, K, delta): """The Robust part of the RSD, we precompute an array for speed """ pivot = floor(K/S) return [S/K * 1/d for d in range(1, pivot)] \ + [S/K * log(S/delta)] \ + [0 for d in range(pivot, K)]
[ "def", "gen_tau", "(", "S", ",", "K", ",", "delta", ")", ":", "pivot", "=", "floor", "(", "K", "/", "S", ")", "return", "[", "S", "/", "K", "*", "1", "/", "d", "for", "d", "in", "range", "(", "1", ",", "pivot", ")", "]", "+", "[", "S", "/", "K", "*", "log", "(", "S", "/", "delta", ")", "]", "+", "[", "0", "for", "d", "in", "range", "(", "pivot", ",", "K", ")", "]" ]
The Robust part of the RSD, we precompute an array for speed
[ "The", "Robust", "part", "of", "the", "RSD", "we", "precompute", "an", "array", "for", "speed" ]
python
train
31.375
docker/docker-py
docker/models/volumes.py
https://github.com/docker/docker-py/blob/613d6aad83acc9931ff2ecfd6a6c7bd8061dc125/docker/models/volumes.py#L14-L25
def remove(self, force=False): """ Remove this volume. Args: force (bool): Force removal of volumes that were already removed out of band by the volume driver plugin. Raises: :py:class:`docker.errors.APIError` If volume failed to remove. """ return self.client.api.remove_volume(self.id, force=force)
[ "def", "remove", "(", "self", ",", "force", "=", "False", ")", ":", "return", "self", ".", "client", ".", "api", ".", "remove_volume", "(", "self", ".", "id", ",", "force", "=", "force", ")" ]
Remove this volume. Args: force (bool): Force removal of volumes that were already removed out of band by the volume driver plugin. Raises: :py:class:`docker.errors.APIError` If volume failed to remove.
[ "Remove", "this", "volume", "." ]
python
train
32.833333
spyder-ide/spyder
spyder/config/gui.py
https://github.com/spyder-ide/spyder/blob/f76836ce1b924bcc4efd3f74f2960d26a4e528e0/spyder/config/gui.py#L142-L147
def get_color_scheme(name): """Get syntax color scheme""" color_scheme = {} for key in sh.COLOR_SCHEME_KEYS: color_scheme[key] = CONF.get("appearance", "%s/%s" % (name, key)) return color_scheme
[ "def", "get_color_scheme", "(", "name", ")", ":", "color_scheme", "=", "{", "}", "for", "key", "in", "sh", ".", "COLOR_SCHEME_KEYS", ":", "color_scheme", "[", "key", "]", "=", "CONF", ".", "get", "(", "\"appearance\"", ",", "\"%s/%s\"", "%", "(", "name", ",", "key", ")", ")", "return", "color_scheme" ]
Get syntax color scheme
[ "Get", "syntax", "color", "scheme" ]
python
train
35.5
maas/python-libmaas
maas/client/viscera/bcaches.py
https://github.com/maas/python-libmaas/blob/4092c68ef7fb1753efc843569848e2bcc3415002/maas/client/viscera/bcaches.py#L62-L65
async def delete(self): """Delete this Bcache.""" await self._handler.delete( system_id=self.node.system_id, id=self.id)
[ "async", "def", "delete", "(", "self", ")", ":", "await", "self", ".", "_handler", ".", "delete", "(", "system_id", "=", "self", ".", "node", ".", "system_id", ",", "id", "=", "self", ".", "id", ")" ]
Delete this Bcache.
[ "Delete", "this", "Bcache", "." ]
python
train
36.25
vtkiorg/vtki
vtki/container.py
https://github.com/vtkiorg/vtki/blob/5ccad7ae6d64a03e9594c9c7474c8aab3eb22dd1/vtki/container.py#L211-L216
def get_index_by_name(self, name): """Find the index number by block name""" for i in range(self.n_blocks): if self.get_block_name(i) == name: return i raise KeyError('Block name ({}) not found'.format(name))
[ "def", "get_index_by_name", "(", "self", ",", "name", ")", ":", "for", "i", "in", "range", "(", "self", ".", "n_blocks", ")", ":", "if", "self", ".", "get_block_name", "(", "i", ")", "==", "name", ":", "return", "i", "raise", "KeyError", "(", "'Block name ({}) not found'", ".", "format", "(", "name", ")", ")" ]
Find the index number by block name
[ "Find", "the", "index", "number", "by", "block", "name" ]
python
train
42.5
pypa/pipenv
pipenv/vendor/requests/sessions.py
https://github.com/pypa/pipenv/blob/cae8d76c210b9777e90aab76e9c4b0e53bb19cde/pipenv/vendor/requests/sessions.py#L719-L731
def get_adapter(self, url): """ Returns the appropriate connection adapter for the given URL. :rtype: requests.adapters.BaseAdapter """ for (prefix, adapter) in self.adapters.items(): if url.lower().startswith(prefix.lower()): return adapter # Nothing matches :-/ raise InvalidSchema("No connection adapters were found for '%s'" % url)
[ "def", "get_adapter", "(", "self", ",", "url", ")", ":", "for", "(", "prefix", ",", "adapter", ")", "in", "self", ".", "adapters", ".", "items", "(", ")", ":", "if", "url", ".", "lower", "(", ")", ".", "startswith", "(", "prefix", ".", "lower", "(", ")", ")", ":", "return", "adapter", "# Nothing matches :-/", "raise", "InvalidSchema", "(", "\"No connection adapters were found for '%s'\"", "%", "url", ")" ]
Returns the appropriate connection adapter for the given URL. :rtype: requests.adapters.BaseAdapter
[ "Returns", "the", "appropriate", "connection", "adapter", "for", "the", "given", "URL", "." ]
python
train
31.538462
ergoithz/unicategories
unicategories/tools.py
https://github.com/ergoithz/unicategories/blob/70ade9fa3662ac3fc62fb2648a29a360a4d82025/unicategories/tools.py#L66-L81
def has(self, character): ''' Get if character (or character code point) is contained by any range on this range group. :param character: character or unicode code point to look for :type character: str or int :returns: True if character is contained by any range, False otherwise :rtype: bool ''' if not self: return False character = character if isinstance(character, int) else ord(character) last = self[-1][-1] start, end = self[bisect.bisect_right(self, (character, last)) - 1] return start <= character < end
[ "def", "has", "(", "self", ",", "character", ")", ":", "if", "not", "self", ":", "return", "False", "character", "=", "character", "if", "isinstance", "(", "character", ",", "int", ")", "else", "ord", "(", "character", ")", "last", "=", "self", "[", "-", "1", "]", "[", "-", "1", "]", "start", ",", "end", "=", "self", "[", "bisect", ".", "bisect_right", "(", "self", ",", "(", "character", ",", "last", ")", ")", "-", "1", "]", "return", "start", "<=", "character", "<", "end" ]
Get if character (or character code point) is contained by any range on this range group. :param character: character or unicode code point to look for :type character: str or int :returns: True if character is contained by any range, False otherwise :rtype: bool
[ "Get", "if", "character", "(", "or", "character", "code", "point", ")", "is", "contained", "by", "any", "range", "on", "this", "range", "group", "." ]
python
train
38.5625
cloudant/python-cloudant
src/cloudant/_client_session.py
https://github.com/cloudant/python-cloudant/blob/e0ba190f6ba07fe3522a668747128214ad573c7e/src/cloudant/_client_session.py#L164-L184
def request(self, method, url, **kwargs): """ Overrides ``requests.Session.request`` to renew the cookie and then retry the original request (if required). """ resp = super(CookieSession, self).request(method, url, **kwargs) if not self._auto_renew: return resp is_expired = any(( resp.status_code == 403 and response_to_json_dict(resp).get('error') == 'credentials_expired', resp.status_code == 401 )) if is_expired: self.login() resp = super(CookieSession, self).request(method, url, **kwargs) return resp
[ "def", "request", "(", "self", ",", "method", ",", "url", ",", "*", "*", "kwargs", ")", ":", "resp", "=", "super", "(", "CookieSession", ",", "self", ")", ".", "request", "(", "method", ",", "url", ",", "*", "*", "kwargs", ")", "if", "not", "self", ".", "_auto_renew", ":", "return", "resp", "is_expired", "=", "any", "(", "(", "resp", ".", "status_code", "==", "403", "and", "response_to_json_dict", "(", "resp", ")", ".", "get", "(", "'error'", ")", "==", "'credentials_expired'", ",", "resp", ".", "status_code", "==", "401", ")", ")", "if", "is_expired", ":", "self", ".", "login", "(", ")", "resp", "=", "super", "(", "CookieSession", ",", "self", ")", ".", "request", "(", "method", ",", "url", ",", "*", "*", "kwargs", ")", "return", "resp" ]
Overrides ``requests.Session.request`` to renew the cookie and then retry the original request (if required).
[ "Overrides", "requests", ".", "Session", ".", "request", "to", "renew", "the", "cookie", "and", "then", "retry", "the", "original", "request", "(", "if", "required", ")", "." ]
python
train
30.619048
pytest-dev/pluggy
pluggy/manager.py
https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/manager.py#L158-L179
def add_hookspecs(self, module_or_class): """ add new hook specifications defined in the given module_or_class. Functions are recognized if they have been decorated accordingly. """ names = [] for name in dir(module_or_class): spec_opts = self.parse_hookspec_opts(module_or_class, name) if spec_opts is not None: hc = getattr(self.hook, name, None) if hc is None: hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts) setattr(self.hook, name, hc) else: # plugins registered this hook without knowing the spec hc.set_specification(module_or_class, spec_opts) for hookfunction in hc.get_hookimpls(): self._verify_hook(hc, hookfunction) names.append(name) if not names: raise ValueError( "did not find any %r hooks in %r" % (self.project_name, module_or_class) )
[ "def", "add_hookspecs", "(", "self", ",", "module_or_class", ")", ":", "names", "=", "[", "]", "for", "name", "in", "dir", "(", "module_or_class", ")", ":", "spec_opts", "=", "self", ".", "parse_hookspec_opts", "(", "module_or_class", ",", "name", ")", "if", "spec_opts", "is", "not", "None", ":", "hc", "=", "getattr", "(", "self", ".", "hook", ",", "name", ",", "None", ")", "if", "hc", "is", "None", ":", "hc", "=", "_HookCaller", "(", "name", ",", "self", ".", "_hookexec", ",", "module_or_class", ",", "spec_opts", ")", "setattr", "(", "self", ".", "hook", ",", "name", ",", "hc", ")", "else", ":", "# plugins registered this hook without knowing the spec", "hc", ".", "set_specification", "(", "module_or_class", ",", "spec_opts", ")", "for", "hookfunction", "in", "hc", ".", "get_hookimpls", "(", ")", ":", "self", ".", "_verify_hook", "(", "hc", ",", "hookfunction", ")", "names", ".", "append", "(", "name", ")", "if", "not", "names", ":", "raise", "ValueError", "(", "\"did not find any %r hooks in %r\"", "%", "(", "self", ".", "project_name", ",", "module_or_class", ")", ")" ]
add new hook specifications defined in the given module_or_class. Functions are recognized if they have been decorated accordingly.
[ "add", "new", "hook", "specifications", "defined", "in", "the", "given", "module_or_class", ".", "Functions", "are", "recognized", "if", "they", "have", "been", "decorated", "accordingly", "." ]
python
train
47.454545
Qiskit/qiskit-terra
qiskit/result/postprocess.py
https://github.com/Qiskit/qiskit-terra/blob/d4f58d903bc96341b816f7c35df936d6421267d1/qiskit/result/postprocess.py#L67-L83
def _list_to_complex_array(complex_list): """Convert nested list of shape (..., 2) to complex numpy array with shape (...) Args: complex_list (list): List to convert. Returns: np.ndarray: Complex numpy aray Raises: QiskitError: If inner most array of input nested list is not of length 2. """ arr = np.asarray(complex_list, dtype=np.complex_) if not arr.shape[-1] == 2: raise QiskitError('Inner most nested list is not of length 2.') return arr[..., 0] + 1j*arr[..., 1]
[ "def", "_list_to_complex_array", "(", "complex_list", ")", ":", "arr", "=", "np", ".", "asarray", "(", "complex_list", ",", "dtype", "=", "np", ".", "complex_", ")", "if", "not", "arr", ".", "shape", "[", "-", "1", "]", "==", "2", ":", "raise", "QiskitError", "(", "'Inner most nested list is not of length 2.'", ")", "return", "arr", "[", "...", ",", "0", "]", "+", "1j", "*", "arr", "[", "...", ",", "1", "]" ]
Convert nested list of shape (..., 2) to complex numpy array with shape (...) Args: complex_list (list): List to convert. Returns: np.ndarray: Complex numpy aray Raises: QiskitError: If inner most array of input nested list is not of length 2.
[ "Convert", "nested", "list", "of", "shape", "(", "...", "2", ")", "to", "complex", "numpy", "array", "with", "shape", "(", "...", ")" ]
python
test
30.647059
user-cont/conu
conu/backend/origin/backend.py
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/origin/backend.py#L318-L334
def all_pods_are_ready(self, app_name): """ Check if all pods are ready for specific app :param app_name: str, name of the app :return: bool """ app_pod_exists = False for pod in self.list_pods(namespace=self.project): if app_name in pod.name and 'build' not in pod.name and 'deploy' not in pod.name: app_pod_exists = True if not pod.is_ready(): return False if app_pod_exists: logger.info("All pods are ready!") return True return False
[ "def", "all_pods_are_ready", "(", "self", ",", "app_name", ")", ":", "app_pod_exists", "=", "False", "for", "pod", "in", "self", ".", "list_pods", "(", "namespace", "=", "self", ".", "project", ")", ":", "if", "app_name", "in", "pod", ".", "name", "and", "'build'", "not", "in", "pod", ".", "name", "and", "'deploy'", "not", "in", "pod", ".", "name", ":", "app_pod_exists", "=", "True", "if", "not", "pod", ".", "is_ready", "(", ")", ":", "return", "False", "if", "app_pod_exists", ":", "logger", ".", "info", "(", "\"All pods are ready!\"", ")", "return", "True", "return", "False" ]
Check if all pods are ready for specific app :param app_name: str, name of the app :return: bool
[ "Check", "if", "all", "pods", "are", "ready", "for", "specific", "app", ":", "param", "app_name", ":", "str", "name", "of", "the", "app", ":", "return", ":", "bool" ]
python
train
34.235294
spacetelescope/synphot_refactor
synphot/spectrum.py
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L579-L606
def force_extrapolation(self): """Force the underlying model to extrapolate. An example where this is useful: You create a source spectrum with non-default extrapolation behavior and you wish to force the underlying empirical model to extrapolate based on nearest point. .. note:: This is only applicable to `~synphot.models.Empirical1D` model and should still work even if the source spectrum has been redshifted. Returns ------- is_forced : bool `True` if the model is successfully forced to be extrapolated, else `False`. """ # We use _model here in case the spectrum is redshifted. if isinstance(self._model, Empirical1D): self._model.fill_value = np.nan is_forced = True else: is_forced = False return is_forced
[ "def", "force_extrapolation", "(", "self", ")", ":", "# We use _model here in case the spectrum is redshifted.", "if", "isinstance", "(", "self", ".", "_model", ",", "Empirical1D", ")", ":", "self", ".", "_model", ".", "fill_value", "=", "np", ".", "nan", "is_forced", "=", "True", "else", ":", "is_forced", "=", "False", "return", "is_forced" ]
Force the underlying model to extrapolate. An example where this is useful: You create a source spectrum with non-default extrapolation behavior and you wish to force the underlying empirical model to extrapolate based on nearest point. .. note:: This is only applicable to `~synphot.models.Empirical1D` model and should still work even if the source spectrum has been redshifted. Returns ------- is_forced : bool `True` if the model is successfully forced to be extrapolated, else `False`.
[ "Force", "the", "underlying", "model", "to", "extrapolate", "." ]
python
train
31.964286
ic-labs/django-icekit
icekit_events/forms.py
https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/forms.py#L99-L125
def render(self, name, value, attrs=None): """ Render the ``icekit_events/recurrence_rule_widget/render.html`` template with the following context: rendered_widgets The rendered widgets. id The ``id`` attribute from the ``attrs`` keyword argument. recurrence_rules A JSON object mapping recurrence rules to their primary keys. The default template adds JavaScript event handlers that update the ``Textarea`` and ``Select`` widgets when they are updated. """ rendered_widgets = super(RecurrenceRuleWidget, self).render( name, value, attrs) template = loader.get_template( 'icekit_events/recurrence_rule_widget/render.html') recurrence_rules = json.dumps(dict( self.queryset.values_list('pk', 'recurrence_rule'))) context = Context({ 'rendered_widgets': rendered_widgets, 'id': attrs['id'], 'recurrence_rules': recurrence_rules, }) return template.render(context)
[ "def", "render", "(", "self", ",", "name", ",", "value", ",", "attrs", "=", "None", ")", ":", "rendered_widgets", "=", "super", "(", "RecurrenceRuleWidget", ",", "self", ")", ".", "render", "(", "name", ",", "value", ",", "attrs", ")", "template", "=", "loader", ".", "get_template", "(", "'icekit_events/recurrence_rule_widget/render.html'", ")", "recurrence_rules", "=", "json", ".", "dumps", "(", "dict", "(", "self", ".", "queryset", ".", "values_list", "(", "'pk'", ",", "'recurrence_rule'", ")", ")", ")", "context", "=", "Context", "(", "{", "'rendered_widgets'", ":", "rendered_widgets", ",", "'id'", ":", "attrs", "[", "'id'", "]", ",", "'recurrence_rules'", ":", "recurrence_rules", ",", "}", ")", "return", "template", ".", "render", "(", "context", ")" ]
Render the ``icekit_events/recurrence_rule_widget/render.html`` template with the following context: rendered_widgets The rendered widgets. id The ``id`` attribute from the ``attrs`` keyword argument. recurrence_rules A JSON object mapping recurrence rules to their primary keys. The default template adds JavaScript event handlers that update the ``Textarea`` and ``Select`` widgets when they are updated.
[ "Render", "the", "icekit_events", "/", "recurrence_rule_widget", "/", "render", ".", "html", "template", "with", "the", "following", "context", ":" ]
python
train
40.333333
specialunderwear/django-easymode
easymode/utils/languagecode.py
https://github.com/specialunderwear/django-easymode/blob/92f674b91fb8c54d6e379e2664e2000872d9c95e/easymode/utils/languagecode.py#L47-L65
def get_all_language_codes(): """ Returns all language codes defined in settings.LANGUAGES and also the settings.MSGID_LANGUAGE if defined. >>> from django.conf import settings >>> settings.MSGID_LANGUAGE = 'en-us' >>> settings.LANGUAGES = (('en','English'),('de','German'),('nl-be','Belgium dutch'),('fr-be','Belgium french'),) >>> sorted( get_language_codes() ) ['en-us', 'en','de', 'nl-be','fr-be'] :rtype: A :class:`list` of language codes. """ languages = get_language_codes() if hasattr(settings, 'MSGID_LANGUAGE'): if not settings.MSGID_LANGUAGE in languages: languages.insert(0, settings.MSGID_LANGUAGE) return languages
[ "def", "get_all_language_codes", "(", ")", ":", "languages", "=", "get_language_codes", "(", ")", "if", "hasattr", "(", "settings", ",", "'MSGID_LANGUAGE'", ")", ":", "if", "not", "settings", ".", "MSGID_LANGUAGE", "in", "languages", ":", "languages", ".", "insert", "(", "0", ",", "settings", ".", "MSGID_LANGUAGE", ")", "return", "languages" ]
Returns all language codes defined in settings.LANGUAGES and also the settings.MSGID_LANGUAGE if defined. >>> from django.conf import settings >>> settings.MSGID_LANGUAGE = 'en-us' >>> settings.LANGUAGES = (('en','English'),('de','German'),('nl-be','Belgium dutch'),('fr-be','Belgium french'),) >>> sorted( get_language_codes() ) ['en-us', 'en','de', 'nl-be','fr-be'] :rtype: A :class:`list` of language codes.
[ "Returns", "all", "language", "codes", "defined", "in", "settings", ".", "LANGUAGES", "and", "also", "the", "settings", ".", "MSGID_LANGUAGE", "if", "defined", ".", ">>>", "from", "django", ".", "conf", "import", "settings", ">>>", "settings", ".", "MSGID_LANGUAGE", "=", "en", "-", "us", ">>>", "settings", ".", "LANGUAGES", "=", "((", "en", "English", ")", "(", "de", "German", ")", "(", "nl", "-", "be", "Belgium", "dutch", ")", "(", "fr", "-", "be", "Belgium", "french", ")", ")", ">>>", "sorted", "(", "get_language_codes", "()", ")", "[", "en", "-", "us", "en", "de", "nl", "-", "be", "fr", "-", "be", "]", ":", "rtype", ":", "A", ":", "class", ":", "list", "of", "language", "codes", "." ]
python
train
37.105263
ClericPy/torequests
torequests/utils.py
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/utils.py#L887-L894
def tick(self): """Return the time cost string as expect.""" string = self.passed if self.rounding: string = round(string) if self.readable: string = self.readable(string) return string
[ "def", "tick", "(", "self", ")", ":", "string", "=", "self", ".", "passed", "if", "self", ".", "rounding", ":", "string", "=", "round", "(", "string", ")", "if", "self", ".", "readable", ":", "string", "=", "self", ".", "readable", "(", "string", ")", "return", "string" ]
Return the time cost string as expect.
[ "Return", "the", "time", "cost", "string", "as", "expect", "." ]
python
train
30.25
galaxyproject/pulsar
pulsar/client/staging/__init__.py
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/staging/__init__.py#L185-L196
def output_extras(self, output_file): """ Returns dict mapping local path to remote name. """ output_directory = dirname(output_file) def local_path(name): return join(output_directory, self.path_helper.local_name(name)) files_directory = "%s_files%s" % (basename(output_file)[0:-len(".dat")], self.path_helper.separator) names = filter(lambda o: o.startswith(files_directory), self.output_directory_contents) return dict(map(lambda name: (local_path(name), name), names))
[ "def", "output_extras", "(", "self", ",", "output_file", ")", ":", "output_directory", "=", "dirname", "(", "output_file", ")", "def", "local_path", "(", "name", ")", ":", "return", "join", "(", "output_directory", ",", "self", ".", "path_helper", ".", "local_name", "(", "name", ")", ")", "files_directory", "=", "\"%s_files%s\"", "%", "(", "basename", "(", "output_file", ")", "[", "0", ":", "-", "len", "(", "\".dat\"", ")", "]", ",", "self", ".", "path_helper", ".", "separator", ")", "names", "=", "filter", "(", "lambda", "o", ":", "o", ".", "startswith", "(", "files_directory", ")", ",", "self", ".", "output_directory_contents", ")", "return", "dict", "(", "map", "(", "lambda", "name", ":", "(", "local_path", "(", "name", ")", ",", "name", ")", ",", "names", ")", ")" ]
Returns dict mapping local path to remote name.
[ "Returns", "dict", "mapping", "local", "path", "to", "remote", "name", "." ]
python
train
44.916667
axialmarket/fsq
libexec/fsq/push.py
https://github.com/axialmarket/fsq/blob/43b84c292cb8a187599d86753b947cf73248f989/libexec/fsq/push.py#L26-L41
def usage(asked_for=0): '''Exit with a usage string, used for bad argument or with -h''' exit = fsq.const('FSQ_SUCCESS') if asked_for else\ fsq.const('FSQ_FAIL_PERM') f = sys.stdout if asked_for else sys.stderr shout('{0} [opts] src_queue trg_queue host item_id [item_id [...]]'.format( os.path.basename(_PROG)), f) if asked_for: shout('{0} [-p|--protocol=jsonrpc] [-L|--no-lock] [-t|--trigger] '\ '[-i|--ignore-listener] <proto>://<host>:<port>/url'\ .format(os.path.basename(_PROG)), f) shout('{0} [-p|--protocol=jsonrpc] [-L|--no-lock] [-t|--trigger]'\ '[-i|--ignore-listener] unix://var/sock/foo.sock'\ .format(os.path.basename(_PROG)), f) shout(' src_queue trg_queue host_queue item [item [...]]', f) return exit
[ "def", "usage", "(", "asked_for", "=", "0", ")", ":", "exit", "=", "fsq", ".", "const", "(", "'FSQ_SUCCESS'", ")", "if", "asked_for", "else", "fsq", ".", "const", "(", "'FSQ_FAIL_PERM'", ")", "f", "=", "sys", ".", "stdout", "if", "asked_for", "else", "sys", ".", "stderr", "shout", "(", "'{0} [opts] src_queue trg_queue host item_id [item_id [...]]'", ".", "format", "(", "os", ".", "path", ".", "basename", "(", "_PROG", ")", ")", ",", "f", ")", "if", "asked_for", ":", "shout", "(", "'{0} [-p|--protocol=jsonrpc] [-L|--no-lock] [-t|--trigger] '", "'[-i|--ignore-listener] <proto>://<host>:<port>/url'", ".", "format", "(", "os", ".", "path", ".", "basename", "(", "_PROG", ")", ")", ",", "f", ")", "shout", "(", "'{0} [-p|--protocol=jsonrpc] [-L|--no-lock] [-t|--trigger]'", "'[-i|--ignore-listener] unix://var/sock/foo.sock'", ".", "format", "(", "os", ".", "path", ".", "basename", "(", "_PROG", ")", ")", ",", "f", ")", "shout", "(", "' src_queue trg_queue host_queue item [item [...]]'", ",", "f", ")", "return", "exit" ]
Exit with a usage string, used for bad argument or with -h
[ "Exit", "with", "a", "usage", "string", "used", "for", "bad", "argument", "or", "with", "-", "h" ]
python
train
52.5
tanghaibao/goatools
goatools/obo_parser.py
https://github.com/tanghaibao/goatools/blob/407682e573a108864a79031f8ca19ee3bf377626/goatools/obo_parser.py#L237-L243
def get_all_parent_edges(self): """Return tuples for all parent GO IDs, containing current GO ID and parent GO ID.""" all_parent_edges = set() for parent in self.parents: all_parent_edges.add((self.item_id, parent.item_id)) all_parent_edges |= parent.get_all_parent_edges() return all_parent_edges
[ "def", "get_all_parent_edges", "(", "self", ")", ":", "all_parent_edges", "=", "set", "(", ")", "for", "parent", "in", "self", ".", "parents", ":", "all_parent_edges", ".", "add", "(", "(", "self", ".", "item_id", ",", "parent", ".", "item_id", ")", ")", "all_parent_edges", "|=", "parent", ".", "get_all_parent_edges", "(", ")", "return", "all_parent_edges" ]
Return tuples for all parent GO IDs, containing current GO ID and parent GO ID.
[ "Return", "tuples", "for", "all", "parent", "GO", "IDs", "containing", "current", "GO", "ID", "and", "parent", "GO", "ID", "." ]
python
train
49.571429
gitpython-developers/GitPython
git/util.py
https://github.com/gitpython-developers/GitPython/blob/1f66e25c25cde2423917ee18c4704fff83b837d1/git/util.py#L619-L627
def committer(cls, config_reader=None): """ :return: Actor instance corresponding to the configured committer. It behaves similar to the git implementation, such that the environment will override configuration values of config_reader. If no value is set at all, it will be generated :param config_reader: ConfigReader to use to retrieve the values from in case they are not set in the environment""" return cls._main_actor(cls.env_committer_name, cls.env_committer_email, config_reader)
[ "def", "committer", "(", "cls", ",", "config_reader", "=", "None", ")", ":", "return", "cls", ".", "_main_actor", "(", "cls", ".", "env_committer_name", ",", "cls", ".", "env_committer_email", ",", "config_reader", ")" ]
:return: Actor instance corresponding to the configured committer. It behaves similar to the git implementation, such that the environment will override configuration values of config_reader. If no value is set at all, it will be generated :param config_reader: ConfigReader to use to retrieve the values from in case they are not set in the environment
[ ":", "return", ":", "Actor", "instance", "corresponding", "to", "the", "configured", "committer", ".", "It", "behaves", "similar", "to", "the", "git", "implementation", "such", "that", "the", "environment", "will", "override", "configuration", "values", "of", "config_reader", ".", "If", "no", "value", "is", "set", "at", "all", "it", "will", "be", "generated", ":", "param", "config_reader", ":", "ConfigReader", "to", "use", "to", "retrieve", "the", "values", "from", "in", "case", "they", "are", "not", "set", "in", "the", "environment" ]
python
train
62.111111
saltstack/salt
salt/modules/salt_version.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/salt_version.py#L151-L170
def _check_release_cmp(name): ''' Helper function to compare release codename versions to the minion's current Salt version. If release codename isn't found, the function returns None. Otherwise, it returns the results of the version comparison as documented by the ``versions_cmp`` function in ``salt.utils.versions.py``. ''' map_version = get_release_number(name) if map_version is None: log.info('Release codename %s was not found.', name) return None current_version = six.text_type(salt.version.SaltStackVersion( *salt.version.__version_info__)) current_version = current_version.rsplit('.', 1)[0] version_cmp = salt.utils.versions.version_cmp(map_version, current_version) return version_cmp
[ "def", "_check_release_cmp", "(", "name", ")", ":", "map_version", "=", "get_release_number", "(", "name", ")", "if", "map_version", "is", "None", ":", "log", ".", "info", "(", "'Release codename %s was not found.'", ",", "name", ")", "return", "None", "current_version", "=", "six", ".", "text_type", "(", "salt", ".", "version", ".", "SaltStackVersion", "(", "*", "salt", ".", "version", ".", "__version_info__", ")", ")", "current_version", "=", "current_version", ".", "rsplit", "(", "'.'", ",", "1", ")", "[", "0", "]", "version_cmp", "=", "salt", ".", "utils", ".", "versions", ".", "version_cmp", "(", "map_version", ",", "current_version", ")", "return", "version_cmp" ]
Helper function to compare release codename versions to the minion's current Salt version. If release codename isn't found, the function returns None. Otherwise, it returns the results of the version comparison as documented by the ``versions_cmp`` function in ``salt.utils.versions.py``.
[ "Helper", "function", "to", "compare", "release", "codename", "versions", "to", "the", "minion", "s", "current", "Salt", "version", "." ]
python
train
37.85
cisco-sas/kitty
kitty/model/low_level/container.py
https://github.com/cisco-sas/kitty/blob/cb0760989dcdfe079e43ac574d872d0b18953a32/kitty/model/low_level/container.py#L964-L969
def reset(self): ''' Reset the state of the container and its internal fields ''' super(TakeFrom, self).reset() self.random.seed(self.seed * self.max_elements + self.min_elements)
[ "def", "reset", "(", "self", ")", ":", "super", "(", "TakeFrom", ",", "self", ")", ".", "reset", "(", ")", "self", ".", "random", ".", "seed", "(", "self", ".", "seed", "*", "self", ".", "max_elements", "+", "self", ".", "min_elements", ")" ]
Reset the state of the container and its internal fields
[ "Reset", "the", "state", "of", "the", "container", "and", "its", "internal", "fields" ]
python
train
35.666667
inveniosoftware/invenio-deposit
invenio_deposit/serializers.py
https://github.com/inveniosoftware/invenio-deposit/blob/f243ea1d01ab0a3bc92ade3262d1abdd2bc32447/invenio_deposit/serializers.py#L32-L48
def json_serializer(pid, data, *args): """Build a JSON Flask response using the given data. :param pid: The `invenio_pidstore.models.PersistentIdentifier` of the record. :param data: The record metadata. :returns: A Flask response with JSON data. :rtype: :py:class:`flask.Response`. """ if data is not None: response = Response( json.dumps(data.dumps()), mimetype='application/json' ) else: response = Response(mimetype='application/json') return response
[ "def", "json_serializer", "(", "pid", ",", "data", ",", "*", "args", ")", ":", "if", "data", "is", "not", "None", ":", "response", "=", "Response", "(", "json", ".", "dumps", "(", "data", ".", "dumps", "(", ")", ")", ",", "mimetype", "=", "'application/json'", ")", "else", ":", "response", "=", "Response", "(", "mimetype", "=", "'application/json'", ")", "return", "response" ]
Build a JSON Flask response using the given data. :param pid: The `invenio_pidstore.models.PersistentIdentifier` of the record. :param data: The record metadata. :returns: A Flask response with JSON data. :rtype: :py:class:`flask.Response`.
[ "Build", "a", "JSON", "Flask", "response", "using", "the", "given", "data", "." ]
python
valid
31.294118
coded-by-hand/mass
env/lib/python2.7/site-packages/pip-1.0.2-py2.7.egg/pip/download.py
https://github.com/coded-by-hand/mass/blob/59005479efed3cd8598a8f0c66791a4482071899/env/lib/python2.7/site-packages/pip-1.0.2-py2.7.egg/pip/download.py#L255-L276
def geturl(urllib2_resp): """ Use instead of urllib.addinfourl.geturl(), which appears to have some issues with dropping the double slash for certain schemes (e.g. file://). This implementation is probably over-eager, as it always restores '://' if it is missing, and it appears some url schemata aren't always followed by '//' after the colon, but as far as I know pip doesn't need any of those. The URI RFC can be found at: http://tools.ietf.org/html/rfc1630 This function assumes that scheme:/foo/bar is the same as scheme:///foo/bar """ url = urllib2_resp.geturl() scheme, rest = url.split(':', 1) if rest.startswith('//'): return url else: # FIXME: write a good test to cover it return '%s://%s' % (scheme, rest)
[ "def", "geturl", "(", "urllib2_resp", ")", ":", "url", "=", "urllib2_resp", ".", "geturl", "(", ")", "scheme", ",", "rest", "=", "url", ".", "split", "(", "':'", ",", "1", ")", "if", "rest", ".", "startswith", "(", "'//'", ")", ":", "return", "url", "else", ":", "# FIXME: write a good test to cover it", "return", "'%s://%s'", "%", "(", "scheme", ",", "rest", ")" ]
Use instead of urllib.addinfourl.geturl(), which appears to have some issues with dropping the double slash for certain schemes (e.g. file://). This implementation is probably over-eager, as it always restores '://' if it is missing, and it appears some url schemata aren't always followed by '//' after the colon, but as far as I know pip doesn't need any of those. The URI RFC can be found at: http://tools.ietf.org/html/rfc1630 This function assumes that scheme:/foo/bar is the same as scheme:///foo/bar
[ "Use", "instead", "of", "urllib", ".", "addinfourl", ".", "geturl", "()", "which", "appears", "to", "have", "some", "issues", "with", "dropping", "the", "double", "slash", "for", "certain", "schemes", "(", "e", ".", "g", ".", "file", ":", "//", ")", ".", "This", "implementation", "is", "probably", "over", "-", "eager", "as", "it", "always", "restores", ":", "//", "if", "it", "is", "missing", "and", "it", "appears", "some", "url", "schemata", "aren", "t", "always", "followed", "by", "//", "after", "the", "colon", "but", "as", "far", "as", "I", "know", "pip", "doesn", "t", "need", "any", "of", "those", ".", "The", "URI", "RFC", "can", "be", "found", "at", ":", "http", ":", "//", "tools", ".", "ietf", ".", "org", "/", "html", "/", "rfc1630" ]
python
train
36.272727
ray-project/ray
python/ray/rllib/agents/impala/vtrace.py
https://github.com/ray-project/ray/blob/4eade036a0505e244c976f36aaa2d64386b5129b/python/ray/rllib/agents/impala/vtrace.py#L389-L395
def get_log_rhos(target_action_log_probs, behaviour_action_log_probs): """With the selected log_probs for multi-discrete actions of behaviour and target policies we compute the log_rhos for calculating the vtrace.""" t = tf.stack(target_action_log_probs) b = tf.stack(behaviour_action_log_probs) log_rhos = tf.reduce_sum(t - b, axis=0) return log_rhos
[ "def", "get_log_rhos", "(", "target_action_log_probs", ",", "behaviour_action_log_probs", ")", ":", "t", "=", "tf", ".", "stack", "(", "target_action_log_probs", ")", "b", "=", "tf", ".", "stack", "(", "behaviour_action_log_probs", ")", "log_rhos", "=", "tf", ".", "reduce_sum", "(", "t", "-", "b", ",", "axis", "=", "0", ")", "return", "log_rhos" ]
With the selected log_probs for multi-discrete actions of behaviour and target policies we compute the log_rhos for calculating the vtrace.
[ "With", "the", "selected", "log_probs", "for", "multi", "-", "discrete", "actions", "of", "behaviour", "and", "target", "policies", "we", "compute", "the", "log_rhos", "for", "calculating", "the", "vtrace", "." ]
python
train
52.714286
chrisspen/burlap
burlap/common.py
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/common.py#L981-L988
def local_renderer(self): """ Retrieves the cached local renderer. """ if not self._local_renderer: r = self.create_local_renderer() self._local_renderer = r return self._local_renderer
[ "def", "local_renderer", "(", "self", ")", ":", "if", "not", "self", ".", "_local_renderer", ":", "r", "=", "self", ".", "create_local_renderer", "(", ")", "self", ".", "_local_renderer", "=", "r", "return", "self", ".", "_local_renderer" ]
Retrieves the cached local renderer.
[ "Retrieves", "the", "cached", "local", "renderer", "." ]
python
valid
30.25
brocade/pynos
pynos/versions/base/yang/brocade_sflow.py
https://github.com/brocade/pynos/blob/bd8a34e98f322de3fc06750827d8bbc3a0c00380/pynos/versions/base/yang/brocade_sflow.py#L83-L92
def sflow_profile_profile_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") sflow_profile = ET.SubElement(config, "sflow-profile", xmlns="urn:brocade.com:mgmt:brocade-sflow") profile_name = ET.SubElement(sflow_profile, "profile-name") profile_name.text = kwargs.pop('profile_name') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "sflow_profile_profile_name", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "sflow_profile", "=", "ET", ".", "SubElement", "(", "config", ",", "\"sflow-profile\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-sflow\"", ")", "profile_name", "=", "ET", ".", "SubElement", "(", "sflow_profile", ",", "\"profile-name\"", ")", "profile_name", ".", "text", "=", "kwargs", ".", "pop", "(", "'profile_name'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
Auto Generated Code
[ "Auto", "Generated", "Code" ]
python
train
44
Clinical-Genomics/scout
scout/adapter/mongo/query.py
https://github.com/Clinical-Genomics/scout/blob/90a551e2e1653a319e654c2405c2866f93d0ebb9/scout/adapter/mongo/query.py#L301-L331
def gene_filter(self, query, mongo_query): """ Adds gene-related filters to the query object Args: query(dict): a dictionary of query filters specified by the users mongo_query(dict): the query that is going to be submitted to the database Returns: mongo_query(dict): returned object contains gene and panel-related filters """ LOG.debug('Adding panel and genes-related parameters to the query') gene_query = [] if query.get('hgnc_symbols') and query.get('gene_panels'): gene_query.append({'hgnc_symbols': {'$in': query['hgnc_symbols']}}) gene_query.append({'panels': {'$in': query['gene_panels']}}) mongo_query['$or']=gene_query else: if query.get('hgnc_symbols'): hgnc_symbols = query['hgnc_symbols'] mongo_query['hgnc_symbols'] = {'$in': hgnc_symbols} LOG.debug("Adding hgnc_symbols: %s to query" % ', '.join(hgnc_symbols)) if query.get('gene_panels'): gene_panels = query['gene_panels'] mongo_query['panels'] = {'$in': gene_panels} return gene_query
[ "def", "gene_filter", "(", "self", ",", "query", ",", "mongo_query", ")", ":", "LOG", ".", "debug", "(", "'Adding panel and genes-related parameters to the query'", ")", "gene_query", "=", "[", "]", "if", "query", ".", "get", "(", "'hgnc_symbols'", ")", "and", "query", ".", "get", "(", "'gene_panels'", ")", ":", "gene_query", ".", "append", "(", "{", "'hgnc_symbols'", ":", "{", "'$in'", ":", "query", "[", "'hgnc_symbols'", "]", "}", "}", ")", "gene_query", ".", "append", "(", "{", "'panels'", ":", "{", "'$in'", ":", "query", "[", "'gene_panels'", "]", "}", "}", ")", "mongo_query", "[", "'$or'", "]", "=", "gene_query", "else", ":", "if", "query", ".", "get", "(", "'hgnc_symbols'", ")", ":", "hgnc_symbols", "=", "query", "[", "'hgnc_symbols'", "]", "mongo_query", "[", "'hgnc_symbols'", "]", "=", "{", "'$in'", ":", "hgnc_symbols", "}", "LOG", ".", "debug", "(", "\"Adding hgnc_symbols: %s to query\"", "%", "', '", ".", "join", "(", "hgnc_symbols", ")", ")", "if", "query", ".", "get", "(", "'gene_panels'", ")", ":", "gene_panels", "=", "query", "[", "'gene_panels'", "]", "mongo_query", "[", "'panels'", "]", "=", "{", "'$in'", ":", "gene_panels", "}", "return", "gene_query" ]
Adds gene-related filters to the query object Args: query(dict): a dictionary of query filters specified by the users mongo_query(dict): the query that is going to be submitted to the database Returns: mongo_query(dict): returned object contains gene and panel-related filters
[ "Adds", "gene", "-", "related", "filters", "to", "the", "query", "object" ]
python
test
38.935484
dnanexus/dx-toolkit
src/python/dxpy/bindings/dxdataobject_functions.py
https://github.com/dnanexus/dx-toolkit/blob/74befb53ad90fcf902d8983ae6d74580f402d619/src/python/dxpy/bindings/dxdataobject_functions.py#L176-L229
def describe(id_or_link, **kwargs): ''' :param id_or_link: String containing an object ID or dict containing a DXLink, or a list of object IDs or dicts containing a DXLink. Given an object ID, calls :meth:`~dxpy.bindings.DXDataObject.describe` on the object. Example:: describe("file-1234") Given a list of object IDs, calls :meth:`~dxpy.api.system_describe_data_objects`. Example:: describe(["file-1234", "workflow-5678"]) Note: If id_or_link is a list and **kwargs contains a "fields" parameter, these fields will be returned in the response for each data object in addition to the fields included by default. Additionally, describe options can be provided for each data object class in the "classDescribeOptions" kwargs argument. See https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/describeDataObjects for input parameters used with the multiple object describe method. ''' # If this is a list, extract the ids. # TODO: modify the procedure to use project ID when possible if isinstance(id_or_link, basestring) or is_dxlink(id_or_link): handler = get_handler(id_or_link) return handler.describe(**kwargs) else: links = [] for link in id_or_link: # If this entry is a dxlink, then get the id. if is_dxlink(link): # Guaranteed by is_dxlink that one of the following will work if isinstance(link['$dnanexus_link'], basestring): link = link['$dnanexus_link'] else: link = link['$dnanexus_link']['id'] links.append(link) # Prepare input to system_describe_data_objects, the same fields will be passed # for all data object classes; if a class doesn't include a field in its describe # output, it will be ignored describe_input = \ dict([(field, True) for field in kwargs['fields']]) if kwargs.get('fields', []) else True describe_links_input = [{'id': link, 'describe': describe_input} for link in links] bulk_describe_input = {'objects': describe_links_input} if 'classDescribeOptions' in kwargs: bulk_describe_input['classDescribeOptions'] = kwargs['classDescribeOptions'] data_object_descriptions = dxpy.api.system_describe_data_objects(bulk_describe_input) return [desc['describe'] for desc in data_object_descriptions['results']]
[ "def", "describe", "(", "id_or_link", ",", "*", "*", "kwargs", ")", ":", "# If this is a list, extract the ids.", "# TODO: modify the procedure to use project ID when possible", "if", "isinstance", "(", "id_or_link", ",", "basestring", ")", "or", "is_dxlink", "(", "id_or_link", ")", ":", "handler", "=", "get_handler", "(", "id_or_link", ")", "return", "handler", ".", "describe", "(", "*", "*", "kwargs", ")", "else", ":", "links", "=", "[", "]", "for", "link", "in", "id_or_link", ":", "# If this entry is a dxlink, then get the id.", "if", "is_dxlink", "(", "link", ")", ":", "# Guaranteed by is_dxlink that one of the following will work", "if", "isinstance", "(", "link", "[", "'$dnanexus_link'", "]", ",", "basestring", ")", ":", "link", "=", "link", "[", "'$dnanexus_link'", "]", "else", ":", "link", "=", "link", "[", "'$dnanexus_link'", "]", "[", "'id'", "]", "links", ".", "append", "(", "link", ")", "# Prepare input to system_describe_data_objects, the same fields will be passed", "# for all data object classes; if a class doesn't include a field in its describe", "# output, it will be ignored", "describe_input", "=", "dict", "(", "[", "(", "field", ",", "True", ")", "for", "field", "in", "kwargs", "[", "'fields'", "]", "]", ")", "if", "kwargs", ".", "get", "(", "'fields'", ",", "[", "]", ")", "else", "True", "describe_links_input", "=", "[", "{", "'id'", ":", "link", ",", "'describe'", ":", "describe_input", "}", "for", "link", "in", "links", "]", "bulk_describe_input", "=", "{", "'objects'", ":", "describe_links_input", "}", "if", "'classDescribeOptions'", "in", "kwargs", ":", "bulk_describe_input", "[", "'classDescribeOptions'", "]", "=", "kwargs", "[", "'classDescribeOptions'", "]", "data_object_descriptions", "=", "dxpy", ".", "api", ".", "system_describe_data_objects", "(", "bulk_describe_input", ")", "return", "[", "desc", "[", "'describe'", "]", "for", "desc", "in", "data_object_descriptions", "[", "'results'", "]", "]" ]
:param id_or_link: String containing an object ID or dict containing a DXLink, or a list of object IDs or dicts containing a DXLink. Given an object ID, calls :meth:`~dxpy.bindings.DXDataObject.describe` on the object. Example:: describe("file-1234") Given a list of object IDs, calls :meth:`~dxpy.api.system_describe_data_objects`. Example:: describe(["file-1234", "workflow-5678"]) Note: If id_or_link is a list and **kwargs contains a "fields" parameter, these fields will be returned in the response for each data object in addition to the fields included by default. Additionally, describe options can be provided for each data object class in the "classDescribeOptions" kwargs argument. See https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/describeDataObjects for input parameters used with the multiple object describe method.
[ ":", "param", "id_or_link", ":", "String", "containing", "an", "object", "ID", "or", "dict", "containing", "a", "DXLink", "or", "a", "list", "of", "object", "IDs", "or", "dicts", "containing", "a", "DXLink", "." ]
python
train
46.037037
PyGithub/PyGithub
github/Project.py
https://github.com/PyGithub/PyGithub/blob/f716df86bbe7dc276c6596699fa9712b61ef974c/github/Project.py#L160-L174
def create_column(self, name): """ calls: `POST https://developer.github.com/v3/projects/columns/#create-a-project-column>`_ :param name: string """ assert isinstance(name, (str, unicode)), name post_parameters = {"name": name} import_header = {"Accept": Consts.mediaTypeProjectsPreview} headers, data = self._requester.requestJsonAndCheck( "POST", self.url + "/columns", headers=import_header, input=post_parameters ) return github.ProjectColumn.ProjectColumn(self._requester, headers, data, completed=True)
[ "def", "create_column", "(", "self", ",", "name", ")", ":", "assert", "isinstance", "(", "name", ",", "(", "str", ",", "unicode", ")", ")", ",", "name", "post_parameters", "=", "{", "\"name\"", ":", "name", "}", "import_header", "=", "{", "\"Accept\"", ":", "Consts", ".", "mediaTypeProjectsPreview", "}", "headers", ",", "data", "=", "self", ".", "_requester", ".", "requestJsonAndCheck", "(", "\"POST\"", ",", "self", ".", "url", "+", "\"/columns\"", ",", "headers", "=", "import_header", ",", "input", "=", "post_parameters", ")", "return", "github", ".", "ProjectColumn", ".", "ProjectColumn", "(", "self", ".", "_requester", ",", "headers", ",", "data", ",", "completed", "=", "True", ")" ]
calls: `POST https://developer.github.com/v3/projects/columns/#create-a-project-column>`_ :param name: string
[ "calls", ":", "POST", "https", ":", "//", "developer", ".", "github", ".", "com", "/", "v3", "/", "projects", "/", "columns", "/", "#create", "-", "a", "-", "project", "-", "column", ">", "_", ":", "param", "name", ":", "string" ]
python
train
41.466667
aws/aws-xray-sdk-python
aws_xray_sdk/core/recorder.py
https://github.com/aws/aws-xray-sdk-python/blob/707358cd3a516d51f2ebf71cf34f00e8d906a667/aws_xray_sdk/core/recorder.py#L383-L392
def stream_subsegments(self): """ Stream all closed subsegments to the daemon and remove reference to the parent segment. No-op for a not sampled segment. """ segment = self.current_segment() if self.streaming.is_eligible(segment): self.streaming.stream(segment, self._stream_subsegment_out)
[ "def", "stream_subsegments", "(", "self", ")", ":", "segment", "=", "self", ".", "current_segment", "(", ")", "if", "self", ".", "streaming", ".", "is_eligible", "(", "segment", ")", ":", "self", ".", "streaming", ".", "stream", "(", "segment", ",", "self", ".", "_stream_subsegment_out", ")" ]
Stream all closed subsegments to the daemon and remove reference to the parent segment. No-op for a not sampled segment.
[ "Stream", "all", "closed", "subsegments", "to", "the", "daemon", "and", "remove", "reference", "to", "the", "parent", "segment", ".", "No", "-", "op", "for", "a", "not", "sampled", "segment", "." ]
python
train
35.1