Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
get_mx_hosts
(email)
Caching the result in MX_DNS_CACHE to improve performance.
Caching the result in MX_DNS_CACHE to improve performance.
async def get_mx_hosts(email): '''Caching the result in MX_DNS_CACHE to improve performance. ''' hostname = email[email.find('@') + 1:] if hostname in MX_DNS_CACHE: mx_hosts = MX_DNS_CACHE[hostname] else: mx_hosts = await get_mx_ip(hostname) return mx_hosts
[ "async", "def", "get_mx_hosts", "(", "email", ")", ":", "hostname", "=", "email", "[", "email", ".", "find", "(", "'@'", ")", "+", "1", ":", "]", "if", "hostname", "in", "MX_DNS_CACHE", ":", "mx_hosts", "=", "MX_DNS_CACHE", "[", "hostname", "]", "else", ":", "mx_hosts", "=", "await", "get_mx_ip", "(", "hostname", ")", "return", "mx_hosts" ]
[ 50, 0 ]
[ 58, 19 ]
python
en
['en', 'en', 'en']
True
_verify_email
(email, timeout=None, verify=True)
Validate email by syntax check, domain check and handler check.
Validate email by syntax check, domain check and handler check.
async def _verify_email(email, timeout=None, verify=True): '''Validate email by syntax check, domain check and handler check. ''' is_valid_syntax = await syntax_check(email) if is_valid_syntax: if verify: mx_hosts = await get_mx_hosts(email) if mx_hosts is None: return False else: return await handler_verify(mx_hosts, email, timeout) else: return False
[ "async", "def", "_verify_email", "(", "email", ",", "timeout", "=", "None", ",", "verify", "=", "True", ")", ":", "is_valid_syntax", "=", "await", "syntax_check", "(", "email", ")", "if", "is_valid_syntax", ":", "if", "verify", ":", "mx_hosts", "=", "await", "get_mx_hosts", "(", "email", ")", "if", "mx_hosts", "is", "None", ":", "return", "False", "else", ":", "return", "await", "handler_verify", "(", "mx_hosts", ",", "email", ",", "timeout", ")", "else", ":", "return", "False" ]
[ 76, 0 ]
[ 88, 20 ]
python
en
['en', 'en', 'en']
True
getmode
(mode)
Gets a mode descriptor for the given mode.
Gets a mode descriptor for the given mode.
def getmode(mode): """Gets a mode descriptor for the given mode.""" global _modes if not _modes: # initialize mode cache modes = {} for m, (basemode, basetype, bands) in { # core modes "1": ("L", "L", ("1",)), "L": ("L", "L", ("L",)), "I": ("L", "I", ("I",)), "F": ("L", "F", ("F",)), "P": ("P", "L", ("P",)), "RGB": ("RGB", "L", ("R", "G", "B")), "RGBX": ("RGB", "L", ("R", "G", "B", "X")), "RGBA": ("RGB", "L", ("R", "G", "B", "A")), "CMYK": ("RGB", "L", ("C", "M", "Y", "K")), "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr")), "LAB": ("RGB", "L", ("L", "A", "B")), "HSV": ("RGB", "L", ("H", "S", "V")), # extra experimental modes "RGBa": ("RGB", "L", ("R", "G", "B", "a")), "LA": ("L", "L", ("L", "A")), "La": ("L", "L", ("L", "a")), "PA": ("RGB", "L", ("P", "A")), }.items(): modes[m] = ModeDescriptor(m, bands, basemode, basetype) # mapping modes for i16mode in ( "I;16", "I;16S", "I;16L", "I;16LS", "I;16B", "I;16BS", "I;16N", "I;16NS", ): modes[i16mode] = ModeDescriptor(i16mode, ("I",), "L", "L") # set global mode cache atomically _modes = modes return _modes[mode]
[ "def", "getmode", "(", "mode", ")", ":", "global", "_modes", "if", "not", "_modes", ":", "# initialize mode cache", "modes", "=", "{", "}", "for", "m", ",", "(", "basemode", ",", "basetype", ",", "bands", ")", "in", "{", "# core modes", "\"1\"", ":", "(", "\"L\"", ",", "\"L\"", ",", "(", "\"1\"", ",", ")", ")", ",", "\"L\"", ":", "(", "\"L\"", ",", "\"L\"", ",", "(", "\"L\"", ",", ")", ")", ",", "\"I\"", ":", "(", "\"L\"", ",", "\"I\"", ",", "(", "\"I\"", ",", ")", ")", ",", "\"F\"", ":", "(", "\"L\"", ",", "\"F\"", ",", "(", "\"F\"", ",", ")", ")", ",", "\"P\"", ":", "(", "\"P\"", ",", "\"L\"", ",", "(", "\"P\"", ",", ")", ")", ",", "\"RGB\"", ":", "(", "\"RGB\"", ",", "\"L\"", ",", "(", "\"R\"", ",", "\"G\"", ",", "\"B\"", ")", ")", ",", "\"RGBX\"", ":", "(", "\"RGB\"", ",", "\"L\"", ",", "(", "\"R\"", ",", "\"G\"", ",", "\"B\"", ",", "\"X\"", ")", ")", ",", "\"RGBA\"", ":", "(", "\"RGB\"", ",", "\"L\"", ",", "(", "\"R\"", ",", "\"G\"", ",", "\"B\"", ",", "\"A\"", ")", ")", ",", "\"CMYK\"", ":", "(", "\"RGB\"", ",", "\"L\"", ",", "(", "\"C\"", ",", "\"M\"", ",", "\"Y\"", ",", "\"K\"", ")", ")", ",", "\"YCbCr\"", ":", "(", "\"RGB\"", ",", "\"L\"", ",", "(", "\"Y\"", ",", "\"Cb\"", ",", "\"Cr\"", ")", ")", ",", "\"LAB\"", ":", "(", "\"RGB\"", ",", "\"L\"", ",", "(", "\"L\"", ",", "\"A\"", ",", "\"B\"", ")", ")", ",", "\"HSV\"", ":", "(", "\"RGB\"", ",", "\"L\"", ",", "(", "\"H\"", ",", "\"S\"", ",", "\"V\"", ")", ")", ",", "# extra experimental modes", "\"RGBa\"", ":", "(", "\"RGB\"", ",", "\"L\"", ",", "(", "\"R\"", ",", "\"G\"", ",", "\"B\"", ",", "\"a\"", ")", ")", ",", "\"LA\"", ":", "(", "\"L\"", ",", "\"L\"", ",", "(", "\"L\"", ",", "\"A\"", ")", ")", ",", "\"La\"", ":", "(", "\"L\"", ",", "\"L\"", ",", "(", "\"L\"", ",", "\"a\"", ")", ")", ",", "\"PA\"", ":", "(", "\"RGB\"", ",", "\"L\"", ",", "(", "\"P\"", ",", "\"A\"", ")", ")", ",", "}", ".", "items", "(", ")", ":", "modes", "[", "m", "]", "=", "ModeDescriptor", "(", "m", ",", "bands", ",", "basemode", ",", "basetype", ")", "# mapping modes", "for", "i16mode", "in", "(", "\"I;16\"", ",", "\"I;16S\"", ",", "\"I;16L\"", ",", "\"I;16LS\"", ",", "\"I;16B\"", ",", "\"I;16BS\"", ",", "\"I;16N\"", ",", "\"I;16NS\"", ",", ")", ":", "modes", "[", "i16mode", "]", "=", "ModeDescriptor", "(", "i16mode", ",", "(", "\"I\"", ",", ")", ",", "\"L\"", ",", "\"L\"", ")", "# set global mode cache atomically", "_modes", "=", "modes", "return", "_modes", "[", "mode", "]" ]
[ 32, 0 ]
[ 73, 23 ]
python
en
['en', 'gl', 'en']
True
_parse_codestream
(fp)
Parse the JPEG 2000 codestream to extract the size and component count from the SIZ marker segment, returning a PIL (size, mode) tuple.
Parse the JPEG 2000 codestream to extract the size and component count from the SIZ marker segment, returning a PIL (size, mode) tuple.
def _parse_codestream(fp): """Parse the JPEG 2000 codestream to extract the size and component count from the SIZ marker segment, returning a PIL (size, mode) tuple.""" hdr = fp.read(2) lsiz = struct.unpack(">H", hdr)[0] siz = hdr + fp.read(lsiz - 2) lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, _, _, _, _, csiz = struct.unpack_from( ">HHIIIIIIIIH", siz ) ssiz = [None] * csiz xrsiz = [None] * csiz yrsiz = [None] * csiz for i in range(csiz): ssiz[i], xrsiz[i], yrsiz[i] = struct.unpack_from(">BBB", siz, 36 + 3 * i) size = (xsiz - xosiz, ysiz - yosiz) if csiz == 1: if (yrsiz[0] & 0x7F) > 8: mode = "I;16" else: mode = "L" elif csiz == 2: mode = "LA" elif csiz == 3: mode = "RGB" elif csiz == 4: mode = "RGBA" else: mode = None return (size, mode)
[ "def", "_parse_codestream", "(", "fp", ")", ":", "hdr", "=", "fp", ".", "read", "(", "2", ")", "lsiz", "=", "struct", ".", "unpack", "(", "\">H\"", ",", "hdr", ")", "[", "0", "]", "siz", "=", "hdr", "+", "fp", ".", "read", "(", "lsiz", "-", "2", ")", "lsiz", ",", "rsiz", ",", "xsiz", ",", "ysiz", ",", "xosiz", ",", "yosiz", ",", "_", ",", "_", ",", "_", ",", "_", ",", "csiz", "=", "struct", ".", "unpack_from", "(", "\">HHIIIIIIIIH\"", ",", "siz", ")", "ssiz", "=", "[", "None", "]", "*", "csiz", "xrsiz", "=", "[", "None", "]", "*", "csiz", "yrsiz", "=", "[", "None", "]", "*", "csiz", "for", "i", "in", "range", "(", "csiz", ")", ":", "ssiz", "[", "i", "]", ",", "xrsiz", "[", "i", "]", ",", "yrsiz", "[", "i", "]", "=", "struct", ".", "unpack_from", "(", "\">BBB\"", ",", "siz", ",", "36", "+", "3", "*", "i", ")", "size", "=", "(", "xsiz", "-", "xosiz", ",", "ysiz", "-", "yosiz", ")", "if", "csiz", "==", "1", ":", "if", "(", "yrsiz", "[", "0", "]", "&", "0x7F", ")", ">", "8", ":", "mode", "=", "\"I;16\"", "else", ":", "mode", "=", "\"L\"", "elif", "csiz", "==", "2", ":", "mode", "=", "\"LA\"", "elif", "csiz", "==", "3", ":", "mode", "=", "\"RGB\"", "elif", "csiz", "==", "4", ":", "mode", "=", "\"RGBA\"", "else", ":", "mode", "=", "None", "return", "(", "size", ",", "mode", ")" ]
[ 21, 0 ]
[ 52, 23 ]
python
en
['en', 'en', 'en']
True
_parse_jp2_header
(fp)
Parse the JP2 header box to extract size, component count and color space information, returning a (size, mode, mimetype) tuple.
Parse the JP2 header box to extract size, component count and color space information, returning a (size, mode, mimetype) tuple.
def _parse_jp2_header(fp): """Parse the JP2 header box to extract size, component count and color space information, returning a (size, mode, mimetype) tuple.""" # Find the JP2 header box header = None mimetype = None while True: lbox, tbox = struct.unpack(">I4s", fp.read(8)) if lbox == 1: lbox = struct.unpack(">Q", fp.read(8))[0] hlen = 16 else: hlen = 8 if lbox < hlen: raise SyntaxError("Invalid JP2 header length") if tbox == b"jp2h": header = fp.read(lbox - hlen) break elif tbox == b"ftyp": if fp.read(4) == b"jpx ": mimetype = "image/jpx" fp.seek(lbox - hlen - 4, os.SEEK_CUR) else: fp.seek(lbox - hlen, os.SEEK_CUR) if header is None: raise SyntaxError("could not find JP2 header") size = None mode = None bpc = None nc = None hio = io.BytesIO(header) while True: lbox, tbox = struct.unpack(">I4s", hio.read(8)) if lbox == 1: lbox = struct.unpack(">Q", hio.read(8))[0] hlen = 16 else: hlen = 8 content = hio.read(lbox - hlen) if tbox == b"ihdr": height, width, nc, bpc, c, unkc, ipr = struct.unpack(">IIHBBBB", content) size = (width, height) if unkc: if nc == 1 and (bpc & 0x7F) > 8: mode = "I;16" elif nc == 1: mode = "L" elif nc == 2: mode = "LA" elif nc == 3: mode = "RGB" elif nc == 4: mode = "RGBA" break elif tbox == b"colr": meth, prec, approx = struct.unpack_from(">BBB", content) if meth == 1: cs = struct.unpack_from(">I", content, 3)[0] if cs == 16: # sRGB if nc == 1 and (bpc & 0x7F) > 8: mode = "I;16" elif nc == 1: mode = "L" elif nc == 3: mode = "RGB" elif nc == 4: mode = "RGBA" break elif cs == 17: # grayscale if nc == 1 and (bpc & 0x7F) > 8: mode = "I;16" elif nc == 1: mode = "L" elif nc == 2: mode = "LA" break elif cs == 18: # sYCC if nc == 3: mode = "RGB" elif nc == 4: mode = "RGBA" break if size is None or mode is None: raise SyntaxError("Malformed jp2 header") return (size, mode, mimetype)
[ "def", "_parse_jp2_header", "(", "fp", ")", ":", "# Find the JP2 header box", "header", "=", "None", "mimetype", "=", "None", "while", "True", ":", "lbox", ",", "tbox", "=", "struct", ".", "unpack", "(", "\">I4s\"", ",", "fp", ".", "read", "(", "8", ")", ")", "if", "lbox", "==", "1", ":", "lbox", "=", "struct", ".", "unpack", "(", "\">Q\"", ",", "fp", ".", "read", "(", "8", ")", ")", "[", "0", "]", "hlen", "=", "16", "else", ":", "hlen", "=", "8", "if", "lbox", "<", "hlen", ":", "raise", "SyntaxError", "(", "\"Invalid JP2 header length\"", ")", "if", "tbox", "==", "b\"jp2h\"", ":", "header", "=", "fp", ".", "read", "(", "lbox", "-", "hlen", ")", "break", "elif", "tbox", "==", "b\"ftyp\"", ":", "if", "fp", ".", "read", "(", "4", ")", "==", "b\"jpx \"", ":", "mimetype", "=", "\"image/jpx\"", "fp", ".", "seek", "(", "lbox", "-", "hlen", "-", "4", ",", "os", ".", "SEEK_CUR", ")", "else", ":", "fp", ".", "seek", "(", "lbox", "-", "hlen", ",", "os", ".", "SEEK_CUR", ")", "if", "header", "is", "None", ":", "raise", "SyntaxError", "(", "\"could not find JP2 header\"", ")", "size", "=", "None", "mode", "=", "None", "bpc", "=", "None", "nc", "=", "None", "hio", "=", "io", ".", "BytesIO", "(", "header", ")", "while", "True", ":", "lbox", ",", "tbox", "=", "struct", ".", "unpack", "(", "\">I4s\"", ",", "hio", ".", "read", "(", "8", ")", ")", "if", "lbox", "==", "1", ":", "lbox", "=", "struct", ".", "unpack", "(", "\">Q\"", ",", "hio", ".", "read", "(", "8", ")", ")", "[", "0", "]", "hlen", "=", "16", "else", ":", "hlen", "=", "8", "content", "=", "hio", ".", "read", "(", "lbox", "-", "hlen", ")", "if", "tbox", "==", "b\"ihdr\"", ":", "height", ",", "width", ",", "nc", ",", "bpc", ",", "c", ",", "unkc", ",", "ipr", "=", "struct", ".", "unpack", "(", "\">IIHBBBB\"", ",", "content", ")", "size", "=", "(", "width", ",", "height", ")", "if", "unkc", ":", "if", "nc", "==", "1", "and", "(", "bpc", "&", "0x7F", ")", ">", "8", ":", "mode", "=", "\"I;16\"", "elif", "nc", "==", "1", ":", "mode", "=", "\"L\"", "elif", "nc", "==", "2", ":", "mode", "=", "\"LA\"", "elif", "nc", "==", "3", ":", "mode", "=", "\"RGB\"", "elif", "nc", "==", "4", ":", "mode", "=", "\"RGBA\"", "break", "elif", "tbox", "==", "b\"colr\"", ":", "meth", ",", "prec", ",", "approx", "=", "struct", ".", "unpack_from", "(", "\">BBB\"", ",", "content", ")", "if", "meth", "==", "1", ":", "cs", "=", "struct", ".", "unpack_from", "(", "\">I\"", ",", "content", ",", "3", ")", "[", "0", "]", "if", "cs", "==", "16", ":", "# sRGB", "if", "nc", "==", "1", "and", "(", "bpc", "&", "0x7F", ")", ">", "8", ":", "mode", "=", "\"I;16\"", "elif", "nc", "==", "1", ":", "mode", "=", "\"L\"", "elif", "nc", "==", "3", ":", "mode", "=", "\"RGB\"", "elif", "nc", "==", "4", ":", "mode", "=", "\"RGBA\"", "break", "elif", "cs", "==", "17", ":", "# grayscale", "if", "nc", "==", "1", "and", "(", "bpc", "&", "0x7F", ")", ">", "8", ":", "mode", "=", "\"I;16\"", "elif", "nc", "==", "1", ":", "mode", "=", "\"L\"", "elif", "nc", "==", "2", ":", "mode", "=", "\"LA\"", "break", "elif", "cs", "==", "18", ":", "# sYCC", "if", "nc", "==", "3", ":", "mode", "=", "\"RGB\"", "elif", "nc", "==", "4", ":", "mode", "=", "\"RGBA\"", "break", "if", "size", "is", "None", "or", "mode", "is", "None", ":", "raise", "SyntaxError", "(", "\"Malformed jp2 header\"", ")", "return", "(", "size", ",", "mode", ",", "mimetype", ")" ]
[ 55, 0 ]
[ 149, 33 ]
python
en
['en', 'en', 'en']
True
bool_output
(func, argtypes, errcheck=None)
Generate a ctypes function that returns a boolean value.
Generate a ctypes function that returns a boolean value.
def bool_output(func, argtypes, errcheck=None): """Generate a ctypes function that returns a boolean value.""" func.argtypes = argtypes func.restype = c_bool if errcheck: func.errcheck = errcheck return func
[ "def", "bool_output", "(", "func", ",", "argtypes", ",", "errcheck", "=", "None", ")", ":", "func", ".", "argtypes", "=", "argtypes", "func", ".", "restype", "=", "c_bool", "if", "errcheck", ":", "func", ".", "errcheck", "=", "errcheck", "return", "func" ]
[ 19, 0 ]
[ 25, 15 ]
python
en
['en', 'en', 'en']
True
double_output
(func, argtypes, errcheck=False, strarg=False, cpl=False)
Generate a ctypes function that returns a double value.
Generate a ctypes function that returns a double value.
def double_output(func, argtypes, errcheck=False, strarg=False, cpl=False): "Generate a ctypes function that returns a double value." func.argtypes = argtypes func.restype = c_double if errcheck: func.errcheck = partial(check_arg_errcode, cpl=cpl) if strarg: func.errcheck = check_str_arg return func
[ "def", "double_output", "(", "func", ",", "argtypes", ",", "errcheck", "=", "False", ",", "strarg", "=", "False", ",", "cpl", "=", "False", ")", ":", "func", ".", "argtypes", "=", "argtypes", "func", ".", "restype", "=", "c_double", "if", "errcheck", ":", "func", ".", "errcheck", "=", "partial", "(", "check_arg_errcode", ",", "cpl", "=", "cpl", ")", "if", "strarg", ":", "func", ".", "errcheck", "=", "check_str_arg", "return", "func" ]
[ 28, 0 ]
[ 36, 15 ]
python
en
['en', 'en', 'en']
True
geom_output
(func, argtypes, offset=None)
Generate a function that returns a Geometry either by reference or directly (if the return_geom keyword is set to True).
Generate a function that returns a Geometry either by reference or directly (if the return_geom keyword is set to True).
def geom_output(func, argtypes, offset=None): """ Generate a function that returns a Geometry either by reference or directly (if the return_geom keyword is set to True). """ # Setting the argument types func.argtypes = argtypes if not offset: # When a geometry pointer is directly returned. func.restype = c_void_p func.errcheck = check_geom else: # Error code returned, geometry is returned by-reference. func.restype = c_int def geomerrcheck(result, func, cargs): return check_geom_offset(result, func, cargs, offset) func.errcheck = geomerrcheck return func
[ "def", "geom_output", "(", "func", ",", "argtypes", ",", "offset", "=", "None", ")", ":", "# Setting the argument types", "func", ".", "argtypes", "=", "argtypes", "if", "not", "offset", ":", "# When a geometry pointer is directly returned.", "func", ".", "restype", "=", "c_void_p", "func", ".", "errcheck", "=", "check_geom", "else", ":", "# Error code returned, geometry is returned by-reference.", "func", ".", "restype", "=", "c_int", "def", "geomerrcheck", "(", "result", ",", "func", ",", "cargs", ")", ":", "return", "check_geom_offset", "(", "result", ",", "func", ",", "cargs", ",", "offset", ")", "func", ".", "errcheck", "=", "geomerrcheck", "return", "func" ]
[ 39, 0 ]
[ 59, 15 ]
python
en
['en', 'error', 'th']
False
int_output
(func, argtypes, errcheck=None)
Generate a ctypes function that returns an integer value.
Generate a ctypes function that returns an integer value.
def int_output(func, argtypes, errcheck=None): "Generate a ctypes function that returns an integer value." func.argtypes = argtypes func.restype = c_int if errcheck: func.errcheck = errcheck return func
[ "def", "int_output", "(", "func", ",", "argtypes", ",", "errcheck", "=", "None", ")", ":", "func", ".", "argtypes", "=", "argtypes", "func", ".", "restype", "=", "c_int", "if", "errcheck", ":", "func", ".", "errcheck", "=", "errcheck", "return", "func" ]
[ 62, 0 ]
[ 68, 15 ]
python
en
['en', 'en', 'en']
True
int64_output
(func, argtypes)
Generate a ctypes function that returns a 64-bit integer value.
Generate a ctypes function that returns a 64-bit integer value.
def int64_output(func, argtypes): "Generate a ctypes function that returns a 64-bit integer value." func.argtypes = argtypes func.restype = c_int64 return func
[ "def", "int64_output", "(", "func", ",", "argtypes", ")", ":", "func", ".", "argtypes", "=", "argtypes", "func", ".", "restype", "=", "c_int64", "return", "func" ]
[ 71, 0 ]
[ 75, 15 ]
python
en
['en', 'en', 'en']
True
srs_output
(func, argtypes)
Generate a ctypes prototype for the given function with the given C arguments that returns a pointer to an OGR Spatial Reference System.
Generate a ctypes prototype for the given function with the given C arguments that returns a pointer to an OGR Spatial Reference System.
def srs_output(func, argtypes): """ Generate a ctypes prototype for the given function with the given C arguments that returns a pointer to an OGR Spatial Reference System. """ func.argtypes = argtypes func.restype = c_void_p func.errcheck = check_srs return func
[ "def", "srs_output", "(", "func", ",", "argtypes", ")", ":", "func", ".", "argtypes", "=", "argtypes", "func", ".", "restype", "=", "c_void_p", "func", ".", "errcheck", "=", "check_srs", "return", "func" ]
[ 78, 0 ]
[ 87, 15 ]
python
en
['en', 'error', 'th']
False
string_output
(func, argtypes, offset=-1, str_result=False, decoding=None)
Generate a ctypes prototype for the given function with the given argument types that returns a string from a GDAL pointer. The `const` flag indicates whether the allocated pointer should be freed via the GDAL library routine VSIFree -- but only applies only when `str_result` is True.
Generate a ctypes prototype for the given function with the given argument types that returns a string from a GDAL pointer. The `const` flag indicates whether the allocated pointer should be freed via the GDAL library routine VSIFree -- but only applies only when `str_result` is True.
def string_output(func, argtypes, offset=-1, str_result=False, decoding=None): """ Generate a ctypes prototype for the given function with the given argument types that returns a string from a GDAL pointer. The `const` flag indicates whether the allocated pointer should be freed via the GDAL library routine VSIFree -- but only applies only when `str_result` is True. """ func.argtypes = argtypes if str_result: # Use subclass of c_char_p so the error checking routine # can free the memory at the pointer's address. func.restype = gdal_char_p else: # Error code is returned func.restype = c_int # Dynamically defining our error-checking function with the # given offset. def _check_str(result, func, cargs): res = check_string(result, func, cargs, offset=offset, str_result=str_result) if res and decoding: res = res.decode(decoding) return res func.errcheck = _check_str return func
[ "def", "string_output", "(", "func", ",", "argtypes", ",", "offset", "=", "-", "1", ",", "str_result", "=", "False", ",", "decoding", "=", "None", ")", ":", "func", ".", "argtypes", "=", "argtypes", "if", "str_result", ":", "# Use subclass of c_char_p so the error checking routine", "# can free the memory at the pointer's address.", "func", ".", "restype", "=", "gdal_char_p", "else", ":", "# Error code is returned", "func", ".", "restype", "=", "c_int", "# Dynamically defining our error-checking function with the", "# given offset.", "def", "_check_str", "(", "result", ",", "func", ",", "cargs", ")", ":", "res", "=", "check_string", "(", "result", ",", "func", ",", "cargs", ",", "offset", "=", "offset", ",", "str_result", "=", "str_result", ")", "if", "res", "and", "decoding", ":", "res", "=", "res", ".", "decode", "(", "decoding", ")", "return", "res", "func", ".", "errcheck", "=", "_check_str", "return", "func" ]
[ 107, 0 ]
[ 132, 15 ]
python
en
['en', 'error', 'th']
False
void_output
(func, argtypes, errcheck=True, cpl=False)
For functions that don't only return an error code that needs to be examined.
For functions that don't only return an error code that needs to be examined.
def void_output(func, argtypes, errcheck=True, cpl=False): """ For functions that don't only return an error code that needs to be examined. """ if argtypes: func.argtypes = argtypes if errcheck: # `errcheck` keyword may be set to False for routines that # return void, rather than a status code. func.restype = c_int func.errcheck = partial(check_errcode, cpl=cpl) else: func.restype = None return func
[ "def", "void_output", "(", "func", ",", "argtypes", ",", "errcheck", "=", "True", ",", "cpl", "=", "False", ")", ":", "if", "argtypes", ":", "func", ".", "argtypes", "=", "argtypes", "if", "errcheck", ":", "# `errcheck` keyword may be set to False for routines that", "# return void, rather than a status code.", "func", ".", "restype", "=", "c_int", "func", ".", "errcheck", "=", "partial", "(", "check_errcode", ",", "cpl", "=", "cpl", ")", "else", ":", "func", ".", "restype", "=", "None", "return", "func" ]
[ 135, 0 ]
[ 150, 15 ]
python
en
['en', 'error', 'th']
False
voidptr_output
(func, argtypes, errcheck=True)
For functions that return c_void_p.
For functions that return c_void_p.
def voidptr_output(func, argtypes, errcheck=True): "For functions that return c_void_p." func.argtypes = argtypes func.restype = c_void_p if errcheck: func.errcheck = check_pointer return func
[ "def", "voidptr_output", "(", "func", ",", "argtypes", ",", "errcheck", "=", "True", ")", ":", "func", ".", "argtypes", "=", "argtypes", "func", ".", "restype", "=", "c_void_p", "if", "errcheck", ":", "func", ".", "errcheck", "=", "check_pointer", "return", "func" ]
[ 153, 0 ]
[ 159, 15 ]
python
en
['en', 'en', 'en']
True
chararray_output
(func, argtypes, errcheck=True)
For functions that return a c_char_p array.
For functions that return a c_char_p array.
def chararray_output(func, argtypes, errcheck=True): """For functions that return a c_char_p array.""" func.argtypes = argtypes func.restype = POINTER(c_char_p) if errcheck: func.errcheck = check_pointer return func
[ "def", "chararray_output", "(", "func", ",", "argtypes", ",", "errcheck", "=", "True", ")", ":", "func", ".", "argtypes", "=", "argtypes", "func", ".", "restype", "=", "POINTER", "(", "c_char_p", ")", "if", "errcheck", ":", "func", ".", "errcheck", "=", "check_pointer", "return", "func" ]
[ 162, 0 ]
[ 168, 15 ]
python
en
['en', 'en', 'en']
True
Corpus.tokenize
(self, path)
Tokenizes a text file.
Tokenizes a text file.
def tokenize(self, path): """Tokenizes a text file.""" assert os.path.exists(path) # Add words to the dictionary with open(path, 'r') as f: tokens = 0 for line in f: words = line.split() + ['<eos>'] tokens += len(words) for word in words: self.dictionary.add_word(word) # Tokenize file content with open(path, 'r') as f: ids = torch.LongTensor(tokens) token = 0 for line in f: words = line.split() + ['<eos>'] for word in words: ids[token] = self.dictionary.word2idx[word] token += 1 return ids
[ "def", "tokenize", "(", "self", ",", "path", ")", ":", "assert", "os", ".", "path", ".", "exists", "(", "path", ")", "# Add words to the dictionary", "with", "open", "(", "path", ",", "'r'", ")", "as", "f", ":", "tokens", "=", "0", "for", "line", "in", "f", ":", "words", "=", "line", ".", "split", "(", ")", "+", "[", "'<eos>'", "]", "tokens", "+=", "len", "(", "words", ")", "for", "word", "in", "words", ":", "self", ".", "dictionary", ".", "add_word", "(", "word", ")", "# Tokenize file content", "with", "open", "(", "path", ",", "'r'", ")", "as", "f", ":", "ids", "=", "torch", ".", "LongTensor", "(", "tokens", ")", "token", "=", "0", "for", "line", "in", "f", ":", "words", "=", "line", ".", "split", "(", ")", "+", "[", "'<eos>'", "]", "for", "word", "in", "words", ":", "ids", "[", "token", "]", "=", "self", ".", "dictionary", ".", "word2idx", "[", "word", "]", "token", "+=", "1", "return", "ids" ]
[ 25, 4 ]
[ 47, 18 ]
python
bg
['en', 'el-Latn', 'bg']
False
Search.expression
(self, value)
Specify the search query expression.
Specify the search query expression.
def expression(self, value): """Specify the search query expression.""" self.query["expression"] = value return self
[ "def", "expression", "(", "self", ",", "value", ")", ":", "self", ".", "query", "[", "\"expression\"", "]", "=", "value", "return", "self" ]
[ 11, 4 ]
[ 14, 19 ]
python
en
['en', 'en', 'en']
True
Search.max_results
(self, value)
Set the max results to return
Set the max results to return
def max_results(self, value): """Set the max results to return""" self.query["max_results"] = value return self
[ "def", "max_results", "(", "self", ",", "value", ")", ":", "self", ".", "query", "[", "\"max_results\"", "]", "=", "value", "return", "self" ]
[ 16, 4 ]
[ 19, 19 ]
python
en
['en', 'en', 'en']
True
Search.next_cursor
(self, value)
Get next page in the query using the ``next_cursor`` value from a previous invocation.
Get next page in the query using the ``next_cursor`` value from a previous invocation.
def next_cursor(self, value): """Get next page in the query using the ``next_cursor`` value from a previous invocation.""" self.query["next_cursor"] = value return self
[ "def", "next_cursor", "(", "self", ",", "value", ")", ":", "self", ".", "query", "[", "\"next_cursor\"", "]", "=", "value", "return", "self" ]
[ 21, 4 ]
[ 24, 19 ]
python
en
['en', 'en', 'en']
True
Search.sort_by
(self, field_name, direction=None)
Add a field to sort results by. If not provided, direction is ``desc``.
Add a field to sort results by. If not provided, direction is ``desc``.
def sort_by(self, field_name, direction=None): """Add a field to sort results by. If not provided, direction is ``desc``.""" if direction is None: direction = 'desc' self._add("sort_by", {field_name: direction}) return self
[ "def", "sort_by", "(", "self", ",", "field_name", ",", "direction", "=", "None", ")", ":", "if", "direction", "is", "None", ":", "direction", "=", "'desc'", "self", ".", "_add", "(", "\"sort_by\"", ",", "{", "field_name", ":", "direction", "}", ")", "return", "self" ]
[ 26, 4 ]
[ 31, 19 ]
python
en
['en', 'en', 'en']
True
Search.aggregate
(self, value)
Aggregate field.
Aggregate field.
def aggregate(self, value): """Aggregate field.""" self._add("aggregate", value) return self
[ "def", "aggregate", "(", "self", ",", "value", ")", ":", "self", ".", "_add", "(", "\"aggregate\"", ",", "value", ")", "return", "self" ]
[ 33, 4 ]
[ 36, 19 ]
python
en
['en', 'it', 'en']
False
Search.with_field
(self, value)
Request an additional field in the result set.
Request an additional field in the result set.
def with_field(self, value): """Request an additional field in the result set.""" self._add("with_field", value) return self
[ "def", "with_field", "(", "self", ",", "value", ")", ":", "self", ".", "_add", "(", "\"with_field\"", ",", "value", ")", "return", "self" ]
[ 38, 4 ]
[ 41, 19 ]
python
en
['en', 'en', 'en']
True
Search.execute
(self, **options)
Execute the search and return results.
Execute the search and return results.
def execute(self, **options): """Execute the search and return results.""" options["content_type"] = 'application/json' uri = ['resources', 'search'] return call_json_api('post', uri, self.as_dict(), **options)
[ "def", "execute", "(", "self", ",", "*", "*", "options", ")", ":", "options", "[", "\"content_type\"", "]", "=", "'application/json'", "uri", "=", "[", "'resources'", ",", "'search'", "]", "return", "call_json_api", "(", "'post'", ",", "uri", ",", "self", ".", "as_dict", "(", ")", ",", "*", "*", "options", ")" ]
[ 46, 4 ]
[ 50, 68 ]
python
en
['en', 'en', 'en']
True
iter_multi_items
(mapping)
Iterates over the items of a mapping yielding keys and values without dropping any from more complex structures.
Iterates over the items of a mapping yielding keys and values without dropping any from more complex structures.
def iter_multi_items(mapping): """Iterates over the items of a mapping yielding keys and values without dropping any from more complex structures. """ if isinstance(mapping, MultiDict): for item in iteritems(mapping, multi=True): yield item elif isinstance(mapping, dict): for key, value in iteritems(mapping): if isinstance(value, (tuple, list)): for value in value: yield key, value else: yield key, value else: for item in mapping: yield item
[ "def", "iter_multi_items", "(", "mapping", ")", ":", "if", "isinstance", "(", "mapping", ",", "MultiDict", ")", ":", "for", "item", "in", "iteritems", "(", "mapping", ",", "multi", "=", "True", ")", ":", "yield", "item", "elif", "isinstance", "(", "mapping", ",", "dict", ")", ":", "for", "key", ",", "value", "in", "iteritems", "(", "mapping", ")", ":", "if", "isinstance", "(", "value", ",", "(", "tuple", ",", "list", ")", ")", ":", "for", "value", "in", "value", ":", "yield", "key", ",", "value", "else", ":", "yield", "key", ",", "value", "else", ":", "for", "item", "in", "mapping", ":", "yield", "item" ]
[ 39, 0 ]
[ 55, 22 ]
python
en
['en', 'en', 'en']
True
cache_property
(key, empty, type)
Return a new property object for a cache header. Useful if you want to add support for a cache extension in a subclass.
Return a new property object for a cache header. Useful if you want to add support for a cache extension in a subclass.
def cache_property(key, empty, type): """Return a new property object for a cache header. Useful if you want to add support for a cache extension in a subclass.""" return property( lambda x: x._get_cache_value(key, empty, type), lambda x, v: x._set_cache_value(key, v, type), lambda x: x._del_cache_value(key), "accessor for %r" % key, )
[ "def", "cache_property", "(", "key", ",", "empty", ",", "type", ")", ":", "return", "property", "(", "lambda", "x", ":", "x", ".", "_get_cache_value", "(", "key", ",", "empty", ",", "type", ")", ",", "lambda", "x", ",", "v", ":", "x", ".", "_set_cache_value", "(", "key", ",", "v", ",", "type", ")", ",", "lambda", "x", ":", "x", ".", "_del_cache_value", "(", "key", ")", ",", "\"accessor for %r\"", "%", "key", ",", ")" ]
[ 1876, 0 ]
[ 1884, 5 ]
python
en
['en', 'en', 'en']
True
TypeConversionDict.get
(self, key, default=None, type=None)
Return the default value if the requested data doesn't exist. If `type` is provided and is a callable it should convert the value, return it or raise a :exc:`ValueError` if that is not possible. In this case the function will return the default as if the value was not found: >>> d = TypeConversionDict(foo='42', bar='blub') >>> d.get('foo', type=int) 42 >>> d.get('bar', -1, type=int) -1 :param key: The key to be looked up. :param default: The default value to be returned if the key can't be looked up. If not further specified `None` is returned. :param type: A callable that is used to cast the value in the :class:`MultiDict`. If a :exc:`ValueError` is raised by this callable the default value is returned.
Return the default value if the requested data doesn't exist. If `type` is provided and is a callable it should convert the value, return it or raise a :exc:`ValueError` if that is not possible. In this case the function will return the default as if the value was not found:
def get(self, key, default=None, type=None): """Return the default value if the requested data doesn't exist. If `type` is provided and is a callable it should convert the value, return it or raise a :exc:`ValueError` if that is not possible. In this case the function will return the default as if the value was not found: >>> d = TypeConversionDict(foo='42', bar='blub') >>> d.get('foo', type=int) 42 >>> d.get('bar', -1, type=int) -1 :param key: The key to be looked up. :param default: The default value to be returned if the key can't be looked up. If not further specified `None` is returned. :param type: A callable that is used to cast the value in the :class:`MultiDict`. If a :exc:`ValueError` is raised by this callable the default value is returned. """ try: rv = self[key] except KeyError: return default if type is not None: try: rv = type(rv) except ValueError: rv = default return rv
[ "def", "get", "(", "self", ",", "key", ",", "default", "=", "None", ",", "type", "=", "None", ")", ":", "try", ":", "rv", "=", "self", "[", "key", "]", "except", "KeyError", ":", "return", "default", "if", "type", "is", "not", "None", ":", "try", ":", "rv", "=", "type", "(", "rv", ")", "except", "ValueError", ":", "rv", "=", "default", "return", "rv" ]
[ 292, 4 ]
[ 322, 17 ]
python
en
['en', 'en', 'en']
True
ImmutableTypeConversionDict.copy
(self)
Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`).
Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`).
def copy(self): """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return TypeConversionDict(self)
[ "def", "copy", "(", "self", ")", ":", "return", "TypeConversionDict", "(", "self", ")" ]
[ 332, 4 ]
[ 337, 39 ]
python
en
['en', 'en', 'en']
True
FileMultiDict.add_file
(self, name, file, filename=None, content_type=None)
Adds a new file to the dict. `file` can be a file name or a :class:`file`-like or a :class:`FileStorage` object. :param name: the name of the field. :param file: a filename or :class:`file`-like object :param filename: an optional filename :param content_type: an optional content type
Adds a new file to the dict. `file` can be a file name or a :class:`file`-like or a :class:`FileStorage` object.
def add_file(self, name, file, filename=None, content_type=None): """Adds a new file to the dict. `file` can be a file name or a :class:`file`-like or a :class:`FileStorage` object. :param name: the name of the field. :param file: a filename or :class:`file`-like object :param filename: an optional filename :param content_type: an optional content type """ if isinstance(file, FileStorage): value = file else: if isinstance(file, string_types): if filename is None: filename = file file = open(file, "rb") if filename and content_type is None: content_type = ( mimetypes.guess_type(filename)[0] or "application/octet-stream" ) value = FileStorage(file, filename, name, content_type) self.add(name, value)
[ "def", "add_file", "(", "self", ",", "name", ",", "file", ",", "filename", "=", "None", ",", "content_type", "=", "None", ")", ":", "if", "isinstance", "(", "file", ",", "FileStorage", ")", ":", "value", "=", "file", "else", ":", "if", "isinstance", "(", "file", ",", "string_types", ")", ":", "if", "filename", "is", "None", ":", "filename", "=", "file", "file", "=", "open", "(", "file", ",", "\"rb\"", ")", "if", "filename", "and", "content_type", "is", "None", ":", "content_type", "=", "(", "mimetypes", ".", "guess_type", "(", "filename", ")", "[", "0", "]", "or", "\"application/octet-stream\"", ")", "value", "=", "FileStorage", "(", "file", ",", "filename", ",", "name", ",", "content_type", ")", "self", ".", "add", "(", "name", ",", "value", ")" ]
[ 1546, 4 ]
[ 1568, 29 ]
python
en
['en', 'en', 'en']
True
ImmutableDict.copy
(self)
Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`).
Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`).
def copy(self): """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return dict(self)
[ "def", "copy", "(", "self", ")", ":", "return", "dict", "(", "self", ")" ]
[ 1580, 4 ]
[ 1585, 25 ]
python
en
['en', 'en', 'en']
True
ImmutableMultiDict.copy
(self)
Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`).
Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`).
def copy(self): """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return MultiDict(self)
[ "def", "copy", "(", "self", ")", ":", "return", "MultiDict", "(", "self", ")" ]
[ 1597, 4 ]
[ 1602, 30 ]
python
en
['en', 'en', 'en']
True
ImmutableOrderedMultiDict.copy
(self)
Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`).
Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`).
def copy(self): """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return OrderedMultiDict(self)
[ "def", "copy", "(", "self", ")", ":", "return", "OrderedMultiDict", "(", "self", ")" ]
[ 1617, 4 ]
[ 1622, 37 ]
python
en
['en', 'en', 'en']
True
MIMEAccept.accept_html
(self)
True if this object accepts HTML.
True if this object accepts HTML.
def accept_html(self): """True if this object accepts HTML.""" return ( "text/html" in self or "application/xhtml+xml" in self or self.accept_xhtml )
[ "def", "accept_html", "(", "self", ")", ":", "return", "(", "\"text/html\"", "in", "self", "or", "\"application/xhtml+xml\"", "in", "self", "or", "self", ".", "accept_xhtml", ")" ]
[ 1836, 4 ]
[ 1840, 9 ]
python
en
['en', 'en', 'en']
True
MIMEAccept.accept_xhtml
(self)
True if this object accepts XHTML.
True if this object accepts XHTML.
def accept_xhtml(self): """True if this object accepts XHTML.""" return "application/xhtml+xml" in self or "application/xml" in self
[ "def", "accept_xhtml", "(", "self", ")", ":", "return", "\"application/xhtml+xml\"", "in", "self", "or", "\"application/xml\"", "in", "self" ]
[ 1843, 4 ]
[ 1845, 75 ]
python
en
['en', 'en', 'en']
True
MIMEAccept.accept_json
(self)
True if this object accepts JSON.
True if this object accepts JSON.
def accept_json(self): """True if this object accepts JSON.""" return "application/json" in self
[ "def", "accept_json", "(", "self", ")", ":", "return", "\"application/json\"", "in", "self" ]
[ 1848, 4 ]
[ 1850, 41 ]
python
en
['en', 'en', 'en']
True
_CacheControl._get_cache_value
(self, key, empty, type)
Used internally by the accessor properties.
Used internally by the accessor properties.
def _get_cache_value(self, key, empty, type): """Used internally by the accessor properties.""" if type is bool: return key in self if key in self: value = self[key] if value is None: return empty elif type is not None: try: value = type(value) except ValueError: pass return value
[ "def", "_get_cache_value", "(", "self", ",", "key", ",", "empty", ",", "type", ")", ":", "if", "type", "is", "bool", ":", "return", "key", "in", "self", "if", "key", "in", "self", ":", "value", "=", "self", "[", "key", "]", "if", "value", "is", "None", ":", "return", "empty", "elif", "type", "is", "not", "None", ":", "try", ":", "value", "=", "type", "(", "value", ")", "except", "ValueError", ":", "pass", "return", "value" ]
[ 1929, 4 ]
[ 1942, 24 ]
python
en
['en', 'en', 'en']
True
_CacheControl._set_cache_value
(self, key, value, type)
Used internally by the accessor properties.
Used internally by the accessor properties.
def _set_cache_value(self, key, value, type): """Used internally by the accessor properties.""" if type is bool: if value: self[key] = None else: self.pop(key, None) else: if value is None: self.pop(key) elif value is True: self[key] = None else: self[key] = value
[ "def", "_set_cache_value", "(", "self", ",", "key", ",", "value", ",", "type", ")", ":", "if", "type", "is", "bool", ":", "if", "value", ":", "self", "[", "key", "]", "=", "None", "else", ":", "self", ".", "pop", "(", "key", ",", "None", ")", "else", ":", "if", "value", "is", "None", ":", "self", ".", "pop", "(", "key", ")", "elif", "value", "is", "True", ":", "self", "[", "key", "]", "=", "None", "else", ":", "self", "[", "key", "]", "=", "value" ]
[ 1944, 4 ]
[ 1957, 33 ]
python
en
['en', 'en', 'en']
True
_CacheControl._del_cache_value
(self, key)
Used internally by the accessor properties.
Used internally by the accessor properties.
def _del_cache_value(self, key): """Used internally by the accessor properties.""" if key in self: del self[key]
[ "def", "_del_cache_value", "(", "self", ",", "key", ")", ":", "if", "key", "in", "self", ":", "del", "self", "[", "key", "]" ]
[ 1959, 4 ]
[ 1962, 25 ]
python
en
['en', 'en', 'en']
True
_CacheControl.to_header
(self)
Convert the stored values into a cache control header.
Convert the stored values into a cache control header.
def to_header(self): """Convert the stored values into a cache control header.""" return dump_header(self)
[ "def", "to_header", "(", "self", ")", ":", "return", "dump_header", "(", "self", ")" ]
[ 1964, 4 ]
[ 1966, 32 ]
python
en
['en', 'en', 'en']
True
HeaderSet.add
(self, header)
Add a new header to the set.
Add a new header to the set.
def add(self, header): """Add a new header to the set.""" self.update((header,))
[ "def", "add", "(", "self", ",", "header", ")", ":", "self", ".", "update", "(", "(", "header", ",", ")", ")" ]
[ 2056, 4 ]
[ 2058, 30 ]
python
en
['en', 'en', 'en']
True
HeaderSet.remove
(self, header)
Remove a header from the set. This raises an :exc:`KeyError` if the header is not in the set. .. versionchanged:: 0.5 In older versions a :exc:`IndexError` was raised instead of a :exc:`KeyError` if the object was missing. :param header: the header to be removed.
Remove a header from the set. This raises an :exc:`KeyError` if the header is not in the set.
def remove(self, header): """Remove a header from the set. This raises an :exc:`KeyError` if the header is not in the set. .. versionchanged:: 0.5 In older versions a :exc:`IndexError` was raised instead of a :exc:`KeyError` if the object was missing. :param header: the header to be removed. """ key = header.lower() if key not in self._set: raise KeyError(header) self._set.remove(key) for idx, key in enumerate(self._headers): if key.lower() == header: del self._headers[idx] break if self.on_update is not None: self.on_update(self)
[ "def", "remove", "(", "self", ",", "header", ")", ":", "key", "=", "header", ".", "lower", "(", ")", "if", "key", "not", "in", "self", ".", "_set", ":", "raise", "KeyError", "(", "header", ")", "self", ".", "_set", ".", "remove", "(", "key", ")", "for", "idx", ",", "key", "in", "enumerate", "(", "self", ".", "_headers", ")", ":", "if", "key", ".", "lower", "(", ")", "==", "header", ":", "del", "self", ".", "_headers", "[", "idx", "]", "break", "if", "self", ".", "on_update", "is", "not", "None", ":", "self", ".", "on_update", "(", "self", ")" ]
[ 2060, 4 ]
[ 2079, 32 ]
python
en
['en', 'en', 'en']
True
HeaderSet.update
(self, iterable)
Add all the headers from the iterable to the set. :param iterable: updates the set with the items from the iterable.
Add all the headers from the iterable to the set.
def update(self, iterable): """Add all the headers from the iterable to the set. :param iterable: updates the set with the items from the iterable. """ inserted_any = False for header in iterable: key = header.lower() if key not in self._set: self._headers.append(header) self._set.add(key) inserted_any = True if inserted_any and self.on_update is not None: self.on_update(self)
[ "def", "update", "(", "self", ",", "iterable", ")", ":", "inserted_any", "=", "False", "for", "header", "in", "iterable", ":", "key", "=", "header", ".", "lower", "(", ")", "if", "key", "not", "in", "self", ".", "_set", ":", "self", ".", "_headers", ".", "append", "(", "header", ")", "self", ".", "_set", ".", "add", "(", "key", ")", "inserted_any", "=", "True", "if", "inserted_any", "and", "self", ".", "on_update", "is", "not", "None", ":", "self", ".", "on_update", "(", "self", ")" ]
[ 2081, 4 ]
[ 2094, 32 ]
python
en
['en', 'en', 'en']
True
HeaderSet.discard
(self, header)
Like :meth:`remove` but ignores errors. :param header: the header to be discarded.
Like :meth:`remove` but ignores errors.
def discard(self, header): """Like :meth:`remove` but ignores errors. :param header: the header to be discarded. """ try: return self.remove(header) except KeyError: pass
[ "def", "discard", "(", "self", ",", "header", ")", ":", "try", ":", "return", "self", ".", "remove", "(", "header", ")", "except", "KeyError", ":", "pass" ]
[ 2096, 4 ]
[ 2104, 16 ]
python
en
['en', 'en', 'en']
True
HeaderSet.find
(self, header)
Return the index of the header in the set or return -1 if not found. :param header: the header to be looked up.
Return the index of the header in the set or return -1 if not found.
def find(self, header): """Return the index of the header in the set or return -1 if not found. :param header: the header to be looked up. """ header = header.lower() for idx, item in enumerate(self._headers): if item.lower() == header: return idx return -1
[ "def", "find", "(", "self", ",", "header", ")", ":", "header", "=", "header", ".", "lower", "(", ")", "for", "idx", ",", "item", "in", "enumerate", "(", "self", ".", "_headers", ")", ":", "if", "item", ".", "lower", "(", ")", "==", "header", ":", "return", "idx", "return", "-", "1" ]
[ 2106, 4 ]
[ 2115, 17 ]
python
en
['en', 'en', 'en']
True
HeaderSet.index
(self, header)
Return the index of the header in the set or raise an :exc:`IndexError`. :param header: the header to be looked up.
Return the index of the header in the set or raise an :exc:`IndexError`.
def index(self, header): """Return the index of the header in the set or raise an :exc:`IndexError`. :param header: the header to be looked up. """ rv = self.find(header) if rv < 0: raise IndexError(header) return rv
[ "def", "index", "(", "self", ",", "header", ")", ":", "rv", "=", "self", ".", "find", "(", "header", ")", "if", "rv", "<", "0", ":", "raise", "IndexError", "(", "header", ")", "return", "rv" ]
[ 2117, 4 ]
[ 2126, 17 ]
python
en
['en', 'en', 'en']
True
HeaderSet.clear
(self)
Clear the set.
Clear the set.
def clear(self): """Clear the set.""" self._set.clear() del self._headers[:] if self.on_update is not None: self.on_update(self)
[ "def", "clear", "(", "self", ")", ":", "self", ".", "_set", ".", "clear", "(", ")", "del", "self", ".", "_headers", "[", ":", "]", "if", "self", ".", "on_update", "is", "not", "None", ":", "self", ".", "on_update", "(", "self", ")" ]
[ 2128, 4 ]
[ 2133, 32 ]
python
en
['en', 'en', 'en']
True
HeaderSet.as_set
(self, preserve_casing=False)
Return the set as real python set type. When calling this, all the items are converted to lowercase and the ordering is lost. :param preserve_casing: if set to `True` the items in the set returned will have the original case like in the :class:`HeaderSet`, otherwise they will be lowercase.
Return the set as real python set type. When calling this, all the items are converted to lowercase and the ordering is lost.
def as_set(self, preserve_casing=False): """Return the set as real python set type. When calling this, all the items are converted to lowercase and the ordering is lost. :param preserve_casing: if set to `True` the items in the set returned will have the original case like in the :class:`HeaderSet`, otherwise they will be lowercase. """ if preserve_casing: return set(self._headers) return set(self._set)
[ "def", "as_set", "(", "self", ",", "preserve_casing", "=", "False", ")", ":", "if", "preserve_casing", ":", "return", "set", "(", "self", ".", "_headers", ")", "return", "set", "(", "self", ".", "_set", ")" ]
[ 2135, 4 ]
[ 2146, 29 ]
python
en
['en', 'en', 'en']
True
HeaderSet.to_header
(self)
Convert the header set into an HTTP header string.
Convert the header set into an HTTP header string.
def to_header(self): """Convert the header set into an HTTP header string.""" return ", ".join(map(quote_header_value, self._headers))
[ "def", "to_header", "(", "self", ")", ":", "return", "\", \"", ".", "join", "(", "map", "(", "quote_header_value", ",", "self", ".", "_headers", ")", ")" ]
[ 2148, 4 ]
[ 2150, 64 ]
python
en
['en', 'lb', 'en']
True
ETags.as_set
(self, include_weak=False)
Convert the `ETags` object into a python set. Per default all the weak etags are not part of this set.
Convert the `ETags` object into a python set. Per default all the weak etags are not part of this set.
def as_set(self, include_weak=False): """Convert the `ETags` object into a python set. Per default all the weak etags are not part of this set.""" rv = set(self._strong) if include_weak: rv.update(self._weak) return rv
[ "def", "as_set", "(", "self", ",", "include_weak", "=", "False", ")", ":", "rv", "=", "set", "(", "self", ".", "_strong", ")", "if", "include_weak", ":", "rv", ".", "update", "(", "self", ".", "_weak", ")", "return", "rv" ]
[ 2198, 4 ]
[ 2204, 17 ]
python
en
['en', 'en', 'en']
True
ETags.is_weak
(self, etag)
Check if an etag is weak.
Check if an etag is weak.
def is_weak(self, etag): """Check if an etag is weak.""" return etag in self._weak
[ "def", "is_weak", "(", "self", ",", "etag", ")", ":", "return", "etag", "in", "self", ".", "_weak" ]
[ 2206, 4 ]
[ 2208, 33 ]
python
en
['en', 'en', 'es']
True
ETags.is_strong
(self, etag)
Check if an etag is strong.
Check if an etag is strong.
def is_strong(self, etag): """Check if an etag is strong.""" return etag in self._strong
[ "def", "is_strong", "(", "self", ",", "etag", ")", ":", "return", "etag", "in", "self", ".", "_strong" ]
[ 2210, 4 ]
[ 2212, 35 ]
python
en
['en', 'en', 'en']
True
ETags.contains_weak
(self, etag)
Check if an etag is part of the set including weak and strong tags.
Check if an etag is part of the set including weak and strong tags.
def contains_weak(self, etag): """Check if an etag is part of the set including weak and strong tags.""" return self.is_weak(etag) or self.contains(etag)
[ "def", "contains_weak", "(", "self", ",", "etag", ")", ":", "return", "self", ".", "is_weak", "(", "etag", ")", "or", "self", ".", "contains", "(", "etag", ")" ]
[ 2214, 4 ]
[ 2216, 56 ]
python
en
['en', 'en', 'en']
True
ETags.contains
(self, etag)
Check if an etag is part of the set ignoring weak tags. It is also possible to use the ``in`` operator.
Check if an etag is part of the set ignoring weak tags. It is also possible to use the ``in`` operator.
def contains(self, etag): """Check if an etag is part of the set ignoring weak tags. It is also possible to use the ``in`` operator. """ if self.star_tag: return True return self.is_strong(etag)
[ "def", "contains", "(", "self", ",", "etag", ")", ":", "if", "self", ".", "star_tag", ":", "return", "True", "return", "self", ".", "is_strong", "(", "etag", ")" ]
[ 2218, 4 ]
[ 2224, 35 ]
python
en
['en', 'en', 'en']
True
ETags.contains_raw
(self, etag)
When passed a quoted tag it will check if this tag is part of the set. If the tag is weak it is checked against weak and strong tags, otherwise strong only.
When passed a quoted tag it will check if this tag is part of the set. If the tag is weak it is checked against weak and strong tags, otherwise strong only.
def contains_raw(self, etag): """When passed a quoted tag it will check if this tag is part of the set. If the tag is weak it is checked against weak and strong tags, otherwise strong only.""" etag, weak = unquote_etag(etag) if weak: return self.contains_weak(etag) return self.contains(etag)
[ "def", "contains_raw", "(", "self", ",", "etag", ")", ":", "etag", ",", "weak", "=", "unquote_etag", "(", "etag", ")", "if", "weak", ":", "return", "self", ".", "contains_weak", "(", "etag", ")", "return", "self", ".", "contains", "(", "etag", ")" ]
[ 2226, 4 ]
[ 2233, 34 ]
python
en
['en', 'en', 'en']
True
ETags.to_header
(self)
Convert the etags set into a HTTP header string.
Convert the etags set into a HTTP header string.
def to_header(self): """Convert the etags set into a HTTP header string.""" if self.star_tag: return "*" return ", ".join( ['"%s"' % x for x in self._strong] + ['W/"%s"' % x for x in self._weak] )
[ "def", "to_header", "(", "self", ")", ":", "if", "self", ".", "star_tag", ":", "return", "\"*\"", "return", "\", \"", ".", "join", "(", "[", "'\"%s\"'", "%", "x", "for", "x", "in", "self", ".", "_strong", "]", "+", "[", "'W/\"%s\"'", "%", "x", "for", "x", "in", "self", ".", "_weak", "]", ")" ]
[ 2235, 4 ]
[ 2241, 9 ]
python
en
['en', 'gl', 'en']
True
IfRange.to_header
(self)
Converts the object back into an HTTP header.
Converts the object back into an HTTP header.
def to_header(self): """Converts the object back into an HTTP header.""" if self.date is not None: return http_date(self.date) if self.etag is not None: return quote_etag(self.etag) return ""
[ "def", "to_header", "(", "self", ")", ":", "if", "self", ".", "date", "is", "not", "None", ":", "return", "http_date", "(", "self", ".", "date", ")", "if", "self", ".", "etag", "is", "not", "None", ":", "return", "quote_etag", "(", "self", ".", "etag", ")", "return", "\"\"" ]
[ 2286, 4 ]
[ 2292, 17 ]
python
en
['en', 'en', 'en']
True
Range.range_for_length
(self, length)
If the range is for bytes, the length is not None and there is exactly one range and it is satisfiable it returns a ``(start, stop)`` tuple, otherwise `None`.
If the range is for bytes, the length is not None and there is exactly one range and it is satisfiable it returns a ``(start, stop)`` tuple, otherwise `None`.
def range_for_length(self, length): """If the range is for bytes, the length is not None and there is exactly one range and it is satisfiable it returns a ``(start, stop)`` tuple, otherwise `None`. """ if self.units != "bytes" or length is None or len(self.ranges) != 1: return None start, end = self.ranges[0] if end is None: end = length if start < 0: start += length if is_byte_range_valid(start, end, length): return start, min(end, length)
[ "def", "range_for_length", "(", "self", ",", "length", ")", ":", "if", "self", ".", "units", "!=", "\"bytes\"", "or", "length", "is", "None", "or", "len", "(", "self", ".", "ranges", ")", "!=", "1", ":", "return", "None", "start", ",", "end", "=", "self", ".", "ranges", "[", "0", "]", "if", "end", "is", "None", ":", "end", "=", "length", "if", "start", "<", "0", ":", "start", "+=", "length", "if", "is_byte_range_valid", "(", "start", ",", "end", ",", "length", ")", ":", "return", "start", ",", "min", "(", "end", ",", "length", ")" ]
[ 2325, 4 ]
[ 2338, 42 ]
python
en
['en', 'en', 'en']
True
Range.make_content_range
(self, length)
Creates a :class:`~werkzeug.datastructures.ContentRange` object from the current range and given content length.
Creates a :class:`~werkzeug.datastructures.ContentRange` object from the current range and given content length.
def make_content_range(self, length): """Creates a :class:`~werkzeug.datastructures.ContentRange` object from the current range and given content length. """ rng = self.range_for_length(length) if rng is not None: return ContentRange(self.units, rng[0], rng[1], length)
[ "def", "make_content_range", "(", "self", ",", "length", ")", ":", "rng", "=", "self", ".", "range_for_length", "(", "length", ")", "if", "rng", "is", "not", "None", ":", "return", "ContentRange", "(", "self", ".", "units", ",", "rng", "[", "0", "]", ",", "rng", "[", "1", "]", ",", "length", ")" ]
[ 2340, 4 ]
[ 2346, 67 ]
python
de
['en', 'fr', 'de']
False
Range.to_header
(self)
Converts the object back into an HTTP header.
Converts the object back into an HTTP header.
def to_header(self): """Converts the object back into an HTTP header.""" ranges = [] for begin, end in self.ranges: if end is None: ranges.append("%s-" % begin if begin >= 0 else str(begin)) else: ranges.append("%s-%s" % (begin, end - 1)) return "%s=%s" % (self.units, ",".join(ranges))
[ "def", "to_header", "(", "self", ")", ":", "ranges", "=", "[", "]", "for", "begin", ",", "end", "in", "self", ".", "ranges", ":", "if", "end", "is", "None", ":", "ranges", ".", "append", "(", "\"%s-\"", "%", "begin", "if", "begin", ">=", "0", "else", "str", "(", "begin", ")", ")", "else", ":", "ranges", ".", "append", "(", "\"%s-%s\"", "%", "(", "begin", ",", "end", "-", "1", ")", ")", "return", "\"%s=%s\"", "%", "(", "self", ".", "units", ",", "\",\"", ".", "join", "(", "ranges", ")", ")" ]
[ 2348, 4 ]
[ 2356, 55 ]
python
en
['en', 'en', 'en']
True
Range.to_content_range_header
(self, length)
Converts the object into `Content-Range` HTTP header, based on given length
Converts the object into `Content-Range` HTTP header, based on given length
def to_content_range_header(self, length): """Converts the object into `Content-Range` HTTP header, based on given length """ range_for_length = self.range_for_length(length) if range_for_length is not None: return "%s %d-%d/%d" % ( self.units, range_for_length[0], range_for_length[1] - 1, length, ) return None
[ "def", "to_content_range_header", "(", "self", ",", "length", ")", ":", "range_for_length", "=", "self", ".", "range_for_length", "(", "length", ")", "if", "range_for_length", "is", "not", "None", ":", "return", "\"%s %d-%d/%d\"", "%", "(", "self", ".", "units", ",", "range_for_length", "[", "0", "]", ",", "range_for_length", "[", "1", "]", "-", "1", ",", "length", ",", ")", "return", "None" ]
[ 2358, 4 ]
[ 2370, 19 ]
python
en
['en', 'en', 'en']
True
ContentRange.set
(self, start, stop, length=None, units="bytes")
Simple method to update the ranges.
Simple method to update the ranges.
def set(self, start, stop, length=None, units="bytes"): """Simple method to update the ranges.""" assert is_byte_range_valid(start, stop, length), "Bad range provided" self._units = units self._start = start self._stop = stop self._length = length if self.on_update is not None: self.on_update(self)
[ "def", "set", "(", "self", ",", "start", ",", "stop", ",", "length", "=", "None", ",", "units", "=", "\"bytes\"", ")", ":", "assert", "is_byte_range_valid", "(", "start", ",", "stop", ",", "length", ")", ",", "\"Bad range provided\"", "self", ".", "_units", "=", "units", "self", ".", "_start", "=", "start", "self", ".", "_stop", "=", "stop", "self", ".", "_length", "=", "length", "if", "self", ".", "on_update", "is", "not", "None", ":", "self", ".", "on_update", "(", "self", ")" ]
[ 2412, 4 ]
[ 2420, 32 ]
python
en
['en', 'en', 'en']
True
ContentRange.unset
(self)
Sets the units to `None` which indicates that the header should no longer be used.
Sets the units to `None` which indicates that the header should no longer be used.
def unset(self): """Sets the units to `None` which indicates that the header should no longer be used. """ self.set(None, None, units=None)
[ "def", "unset", "(", "self", ")", ":", "self", ".", "set", "(", "None", ",", "None", ",", "units", "=", "None", ")" ]
[ 2422, 4 ]
[ 2426, 40 ]
python
en
['en', 'en', 'en']
True
WWWAuthenticate.set_basic
(self, realm="authentication required")
Clear the auth info and enable basic auth.
Clear the auth info and enable basic auth.
def set_basic(self, realm="authentication required"): """Clear the auth info and enable basic auth.""" dict.clear(self) dict.update(self, {"__auth_type__": "basic", "realm": realm}) if self.on_update: self.on_update(self)
[ "def", "set_basic", "(", "self", ",", "realm", "=", "\"authentication required\"", ")", ":", "dict", ".", "clear", "(", "self", ")", "dict", ".", "update", "(", "self", ",", "{", "\"__auth_type__\"", ":", "\"basic\"", ",", "\"realm\"", ":", "realm", "}", ")", "if", "self", ".", "on_update", ":", "self", ".", "on_update", "(", "self", ")" ]
[ 2546, 4 ]
[ 2551, 32 ]
python
en
['en', 'en', 'en']
True
WWWAuthenticate.set_digest
( self, realm, nonce, qop=("auth",), opaque=None, algorithm=None, stale=False )
Clear the auth info and enable digest auth.
Clear the auth info and enable digest auth.
def set_digest( self, realm, nonce, qop=("auth",), opaque=None, algorithm=None, stale=False ): """Clear the auth info and enable digest auth.""" d = { "__auth_type__": "digest", "realm": realm, "nonce": nonce, "qop": dump_header(qop), } if stale: d["stale"] = "TRUE" if opaque is not None: d["opaque"] = opaque if algorithm is not None: d["algorithm"] = algorithm dict.clear(self) dict.update(self, d) if self.on_update: self.on_update(self)
[ "def", "set_digest", "(", "self", ",", "realm", ",", "nonce", ",", "qop", "=", "(", "\"auth\"", ",", ")", ",", "opaque", "=", "None", ",", "algorithm", "=", "None", ",", "stale", "=", "False", ")", ":", "d", "=", "{", "\"__auth_type__\"", ":", "\"digest\"", ",", "\"realm\"", ":", "realm", ",", "\"nonce\"", ":", "nonce", ",", "\"qop\"", ":", "dump_header", "(", "qop", ")", ",", "}", "if", "stale", ":", "d", "[", "\"stale\"", "]", "=", "\"TRUE\"", "if", "opaque", "is", "not", "None", ":", "d", "[", "\"opaque\"", "]", "=", "opaque", "if", "algorithm", "is", "not", "None", ":", "d", "[", "\"algorithm\"", "]", "=", "algorithm", "dict", ".", "clear", "(", "self", ")", "dict", ".", "update", "(", "self", ",", "d", ")", "if", "self", ".", "on_update", ":", "self", ".", "on_update", "(", "self", ")" ]
[ 2553, 4 ]
[ 2572, 32 ]
python
en
['en', 'en', 'en']
True
WWWAuthenticate.to_header
(self)
Convert the stored values into a WWW-Authenticate header.
Convert the stored values into a WWW-Authenticate header.
def to_header(self): """Convert the stored values into a WWW-Authenticate header.""" d = dict(self) auth_type = d.pop("__auth_type__", None) or "basic" return "%s %s" % ( auth_type.title(), ", ".join( [ "%s=%s" % ( key, quote_header_value( value, allow_token=key not in self._require_quoting ), ) for key, value in iteritems(d) ] ), )
[ "def", "to_header", "(", "self", ")", ":", "d", "=", "dict", "(", "self", ")", "auth_type", "=", "d", ".", "pop", "(", "\"__auth_type__\"", ",", "None", ")", "or", "\"basic\"", "return", "\"%s %s\"", "%", "(", "auth_type", ".", "title", "(", ")", ",", "\", \"", ".", "join", "(", "[", "\"%s=%s\"", "%", "(", "key", ",", "quote_header_value", "(", "value", ",", "allow_token", "=", "key", "not", "in", "self", ".", "_require_quoting", ")", ",", ")", "for", "key", ",", "value", "in", "iteritems", "(", "d", ")", "]", ")", ",", ")" ]
[ 2574, 4 ]
[ 2592, 9 ]
python
en
['en', 'en', 'en']
True
WWWAuthenticate.auth_property
(name, doc=None)
A static helper function for subclasses to add extra authentication system properties onto a class:: class FooAuthenticate(WWWAuthenticate): special_realm = auth_property('special_realm') For more information have a look at the sourcecode to see how the regular properties (:attr:`realm` etc.) are implemented.
A static helper function for subclasses to add extra authentication system properties onto a class::
def auth_property(name, doc=None): # noqa: B902 """A static helper function for subclasses to add extra authentication system properties onto a class:: class FooAuthenticate(WWWAuthenticate): special_realm = auth_property('special_realm') For more information have a look at the sourcecode to see how the regular properties (:attr:`realm` etc.) are implemented. """ def _set_value(self, value): if value is None: self.pop(name, None) else: self[name] = str(value) return property(lambda x: x.get(name), _set_value, doc=doc)
[ "def", "auth_property", "(", "name", ",", "doc", "=", "None", ")", ":", "# noqa: B902", "def", "_set_value", "(", "self", ",", "value", ")", ":", "if", "value", "is", "None", ":", "self", ".", "pop", "(", "name", ",", "None", ")", "else", ":", "self", "[", "name", "]", "=", "str", "(", "value", ")", "return", "property", "(", "lambda", "x", ":", "x", ".", "get", "(", "name", ")", ",", "_set_value", ",", "doc", "=", "doc", ")" ]
[ 2600, 4 ]
[ 2617, 67 ]
python
en
['en', 'en', 'en']
True
WWWAuthenticate.stale
(self)
A flag, indicating that the previous request from the client was rejected because the nonce value was stale.
A flag, indicating that the previous request from the client was rejected because the nonce value was stale.
def stale(self): """A flag, indicating that the previous request from the client was rejected because the nonce value was stale. """ val = self.get("stale") if val is not None: return val.lower() == "true"
[ "def", "stale", "(", "self", ")", ":", "val", "=", "self", ".", "get", "(", "\"stale\"", ")", "if", "val", "is", "not", "None", ":", "return", "val", ".", "lower", "(", ")", "==", "\"true\"" ]
[ 2680, 4 ]
[ 2686, 40 ]
python
en
['en', 'en', 'en']
True
FileStorage.content_type
(self)
The content-type sent in the header. Usually not available
The content-type sent in the header. Usually not available
def content_type(self): """The content-type sent in the header. Usually not available""" return self.headers.get("content-type")
[ "def", "content_type", "(", "self", ")", ":", "return", "self", ".", "headers", ".", "get", "(", "\"content-type\"", ")" ]
[ 2749, 4 ]
[ 2751, 47 ]
python
en
['en', 'en', 'en']
True
FileStorage.content_length
(self)
The content-length sent in the header. Usually not available
The content-length sent in the header. Usually not available
def content_length(self): """The content-length sent in the header. Usually not available""" return int(self.headers.get("content-length") or 0)
[ "def", "content_length", "(", "self", ")", ":", "return", "int", "(", "self", ".", "headers", ".", "get", "(", "\"content-length\"", ")", "or", "0", ")" ]
[ 2754, 4 ]
[ 2756, 59 ]
python
en
['en', 'en', 'en']
True
FileStorage.mimetype
(self)
Like :attr:`content_type`, but without parameters (eg, without charset, type etc.) and always lowercase. For example if the content type is ``text/HTML; charset=utf-8`` the mimetype would be ``'text/html'``. .. versionadded:: 0.7
Like :attr:`content_type`, but without parameters (eg, without charset, type etc.) and always lowercase. For example if the content type is ``text/HTML; charset=utf-8`` the mimetype would be ``'text/html'``.
def mimetype(self): """Like :attr:`content_type`, but without parameters (eg, without charset, type etc.) and always lowercase. For example if the content type is ``text/HTML; charset=utf-8`` the mimetype would be ``'text/html'``. .. versionadded:: 0.7 """ self._parse_content_type() return self._parsed_content_type[0].lower()
[ "def", "mimetype", "(", "self", ")", ":", "self", ".", "_parse_content_type", "(", ")", "return", "self", ".", "_parsed_content_type", "[", "0", "]", ".", "lower", "(", ")" ]
[ 2759, 4 ]
[ 2768, 51 ]
python
en
['en', 'en', 'en']
True
FileStorage.mimetype_params
(self)
The mimetype parameters as dict. For example if the content type is ``text/html; charset=utf-8`` the params would be ``{'charset': 'utf-8'}``. .. versionadded:: 0.7
The mimetype parameters as dict. For example if the content type is ``text/html; charset=utf-8`` the params would be ``{'charset': 'utf-8'}``.
def mimetype_params(self): """The mimetype parameters as dict. For example if the content type is ``text/html; charset=utf-8`` the params would be ``{'charset': 'utf-8'}``. .. versionadded:: 0.7 """ self._parse_content_type() return self._parsed_content_type[1]
[ "def", "mimetype_params", "(", "self", ")", ":", "self", ".", "_parse_content_type", "(", ")", "return", "self", ".", "_parsed_content_type", "[", "1", "]" ]
[ 2771, 4 ]
[ 2779, 43 ]
python
en
['en', 'en', 'en']
True
FileStorage.save
(self, dst, buffer_size=16384)
Save the file to a destination path or file object. If the destination is a file object you have to close it yourself after the call. The buffer size is the number of bytes held in memory during the copy process. It defaults to 16KB. For secure file saving also have a look at :func:`secure_filename`. :param dst: a filename or open file object the uploaded file is saved to. :param buffer_size: the size of the buffer. This works the same as the `length` parameter of :func:`shutil.copyfileobj`.
Save the file to a destination path or file object. If the destination is a file object you have to close it yourself after the call. The buffer size is the number of bytes held in memory during the copy process. It defaults to 16KB.
def save(self, dst, buffer_size=16384): """Save the file to a destination path or file object. If the destination is a file object you have to close it yourself after the call. The buffer size is the number of bytes held in memory during the copy process. It defaults to 16KB. For secure file saving also have a look at :func:`secure_filename`. :param dst: a filename or open file object the uploaded file is saved to. :param buffer_size: the size of the buffer. This works the same as the `length` parameter of :func:`shutil.copyfileobj`. """ from shutil import copyfileobj close_dst = False if isinstance(dst, string_types): dst = open(dst, "wb") close_dst = True try: copyfileobj(self.stream, dst, buffer_size) finally: if close_dst: dst.close()
[ "def", "save", "(", "self", ",", "dst", ",", "buffer_size", "=", "16384", ")", ":", "from", "shutil", "import", "copyfileobj", "close_dst", "=", "False", "if", "isinstance", "(", "dst", ",", "string_types", ")", ":", "dst", "=", "open", "(", "dst", ",", "\"wb\"", ")", "close_dst", "=", "True", "try", ":", "copyfileobj", "(", "self", ".", "stream", ",", "dst", ",", "buffer_size", ")", "finally", ":", "if", "close_dst", ":", "dst", ".", "close", "(", ")" ]
[ 2781, 4 ]
[ 2805, 27 ]
python
en
['en', 'en', 'en']
True
FileStorage.close
(self)
Close the underlying file if possible.
Close the underlying file if possible.
def close(self): """Close the underlying file if possible.""" try: self.stream.close() except Exception: pass
[ "def", "close", "(", "self", ")", ":", "try", ":", "self", ".", "stream", ".", "close", "(", ")", "except", "Exception", ":", "pass" ]
[ 2807, 4 ]
[ 2812, 16 ]
python
en
['en', 'en', 'en']
True
getiptcinfo
(im)
Get IPTC information from TIFF, JPEG, or IPTC file. :param im: An image containing IPTC data. :returns: A dictionary containing IPTC information, or None if no IPTC information block was found.
Get IPTC information from TIFF, JPEG, or IPTC file.
def getiptcinfo(im): """ Get IPTC information from TIFF, JPEG, or IPTC file. :param im: An image containing IPTC data. :returns: A dictionary containing IPTC information, or None if no IPTC information block was found. """ import io from . import JpegImagePlugin, TiffImagePlugin data = None if isinstance(im, IptcImageFile): # return info dictionary right away return im.info elif isinstance(im, JpegImagePlugin.JpegImageFile): # extract the IPTC/NAA resource photoshop = im.info.get("photoshop") if photoshop: data = photoshop.get(0x0404) elif isinstance(im, TiffImagePlugin.TiffImageFile): # get raw data from the IPTC/NAA tag (PhotoShop tags the data # as 4-byte integers, so we cannot use the get method...) try: data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK] except (AttributeError, KeyError): pass if data is None: return None # no properties # create an IptcImagePlugin object without initializing it class FakeImage: pass im = FakeImage() im.__class__ = IptcImageFile # parse the IPTC information chunk im.info = {} im.fp = io.BytesIO(data) try: im._open() except (IndexError, KeyError): pass # expected failure return im.info
[ "def", "getiptcinfo", "(", "im", ")", ":", "import", "io", "from", ".", "import", "JpegImagePlugin", ",", "TiffImagePlugin", "data", "=", "None", "if", "isinstance", "(", "im", ",", "IptcImageFile", ")", ":", "# return info dictionary right away", "return", "im", ".", "info", "elif", "isinstance", "(", "im", ",", "JpegImagePlugin", ".", "JpegImageFile", ")", ":", "# extract the IPTC/NAA resource", "photoshop", "=", "im", ".", "info", ".", "get", "(", "\"photoshop\"", ")", "if", "photoshop", ":", "data", "=", "photoshop", ".", "get", "(", "0x0404", ")", "elif", "isinstance", "(", "im", ",", "TiffImagePlugin", ".", "TiffImageFile", ")", ":", "# get raw data from the IPTC/NAA tag (PhotoShop tags the data", "# as 4-byte integers, so we cannot use the get method...)", "try", ":", "data", "=", "im", ".", "tag", ".", "tagdata", "[", "TiffImagePlugin", ".", "IPTC_NAA_CHUNK", "]", "except", "(", "AttributeError", ",", "KeyError", ")", ":", "pass", "if", "data", "is", "None", ":", "return", "None", "# no properties", "# create an IptcImagePlugin object without initializing it", "class", "FakeImage", ":", "pass", "im", "=", "FakeImage", "(", ")", "im", ".", "__class__", "=", "IptcImageFile", "# parse the IPTC information chunk", "im", ".", "info", "=", "{", "}", "im", ".", "fp", "=", "io", ".", "BytesIO", "(", "data", ")", "try", ":", "im", ".", "_open", "(", ")", "except", "(", "IndexError", ",", "KeyError", ")", ":", "pass", "# expected failure", "return", "im", ".", "info" ]
[ 178, 0 ]
[ 229, 18 ]
python
en
['en', 'error', 'th']
False
_get_project_id
()
Get project ID from default GCP connection.
Get project ID from default GCP connection.
def _get_project_id(): """Get project ID from default GCP connection.""" extras = BaseHook.get_connection('google_cloud_default').extra_dejson key = 'extra__google_cloud_platform__project' if key in extras: project_id = extras[key] else: raise ('Must configure project_id in google_cloud_default ' 'connection from Airflow Console') return project_id
[ "def", "_get_project_id", "(", ")", ":", "extras", "=", "BaseHook", ".", "get_connection", "(", "'google_cloud_default'", ")", ".", "extra_dejson", "key", "=", "'extra__google_cloud_platform__project'", "if", "key", "in", "extras", ":", "project_id", "=", "extras", "[", "key", "]", "else", ":", "raise", "(", "'Must configure project_id in google_cloud_default '", "'connection from Airflow Console'", ")", "return", "project_id" ]
[ 34, 0 ]
[ 44, 19 ]
python
en
['en', 'en', 'en']
True
unpackb
(packed, **kwargs)
Unpack an object from `packed`. Raises ``ExtraData`` when *packed* contains extra bytes. Raises ``ValueError`` when *packed* is incomplete. Raises ``FormatError`` when *packed* is not valid msgpack. Raises ``StackError`` when *packed* contains too nested. Other exceptions can be raised during unpacking. See :class:`Unpacker` for options.
Unpack an object from `packed`.
def unpackb(packed, **kwargs): """ Unpack an object from `packed`. Raises ``ExtraData`` when *packed* contains extra bytes. Raises ``ValueError`` when *packed* is incomplete. Raises ``FormatError`` when *packed* is not valid msgpack. Raises ``StackError`` when *packed* contains too nested. Other exceptions can be raised during unpacking. See :class:`Unpacker` for options. """ unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs) unpacker.feed(packed) try: ret = unpacker._unpack() except OutOfData: raise ValueError("Unpack failed: incomplete input") except RecursionError as e: if _is_recursionerror(e): raise StackError raise if unpacker._got_extradata(): raise ExtraData(ret, unpacker._get_extradata()) return ret
[ "def", "unpackb", "(", "packed", ",", "*", "*", "kwargs", ")", ":", "unpacker", "=", "Unpacker", "(", "None", ",", "max_buffer_size", "=", "len", "(", "packed", ")", ",", "*", "*", "kwargs", ")", "unpacker", ".", "feed", "(", "packed", ")", "try", ":", "ret", "=", "unpacker", ".", "_unpack", "(", ")", "except", "OutOfData", ":", "raise", "ValueError", "(", "\"Unpack failed: incomplete input\"", ")", "except", "RecursionError", "as", "e", ":", "if", "_is_recursionerror", "(", "e", ")", ":", "raise", "StackError", "raise", "if", "unpacker", ".", "_got_extradata", "(", ")", ":", "raise", "ExtraData", "(", "ret", ",", "unpacker", ".", "_get_extradata", "(", ")", ")", "return", "ret" ]
[ 113, 0 ]
[ 137, 14 ]
python
en
['en', 'error', 'th']
False
Unpacker._consume
(self)
Gets rid of the used parts of the buffer.
Gets rid of the used parts of the buffer.
def _consume(self): """ Gets rid of the used parts of the buffer. """ self._stream_offset += self._buff_i - self._buf_checkpoint self._buf_checkpoint = self._buff_i
[ "def", "_consume", "(", "self", ")", ":", "self", ".", "_stream_offset", "+=", "self", ".", "_buff_i", "-", "self", ".", "_buf_checkpoint", "self", ".", "_buf_checkpoint", "=", "self", ".", "_buff_i" ]
[ 355, 4 ]
[ 358, 43 ]
python
en
['en', 'en', 'en']
True
Packer.bytes
(self)
Return internal buffer contents as bytes object
Return internal buffer contents as bytes object
def bytes(self): """Return internal buffer contents as bytes object""" return self._buffer.getvalue()
[ "def", "bytes", "(", "self", ")", ":", "return", "self", ".", "_buffer", ".", "getvalue", "(", ")" ]
[ 1070, 4 ]
[ 1072, 38 ]
python
en
['en', 'lb', 'en']
True
Packer.reset
(self)
Reset internal buffer. This method is useful only when autoreset=False.
Reset internal buffer.
def reset(self): """Reset internal buffer. This method is useful only when autoreset=False. """ self._buffer = StringIO()
[ "def", "reset", "(", "self", ")", ":", "self", ".", "_buffer", "=", "StringIO", "(", ")" ]
[ 1074, 4 ]
[ 1079, 33 ]
python
en
['en', 'lb', 'en']
True
Packer.getbuffer
(self)
Return view of internal buffer.
Return view of internal buffer.
def getbuffer(self): """Return view of internal buffer.""" if USING_STRINGBUILDER or PY2: return memoryview(self.bytes()) else: return self._buffer.getbuffer()
[ "def", "getbuffer", "(", "self", ")", ":", "if", "USING_STRINGBUILDER", "or", "PY2", ":", "return", "memoryview", "(", "self", ".", "bytes", "(", ")", ")", "else", ":", "return", "self", ".", "_buffer", ".", "getbuffer", "(", ")" ]
[ 1081, 4 ]
[ 1086, 43 ]
python
en
['en', 'lb', 'en']
True
dense_encoder
(X, params)
Dense model encoder subgraph that produces latent matrix. Given data matrix tensor X and dictionary of parameters, process through dense model encoder subgraph and return encoder latent vector for each example in batch. Args: X: tf.float64 matrix tensor of input data. params: Dictionary of parameters. Returns: tf.float64 matrix tensor encoder latent vector for each example in batch.
Dense model encoder subgraph that produces latent matrix.
def dense_encoder(X, params): """Dense model encoder subgraph that produces latent matrix. Given data matrix tensor X and dictionary of parameters, process through dense model encoder subgraph and return encoder latent vector for each example in batch. Args: X: tf.float64 matrix tensor of input data. params: Dictionary of parameters. Returns: tf.float64 matrix tensor encoder latent vector for each example in batch. """ # Create the input layer to our DNN network = X # Add hidden layers with the given number of units/neurons per layer for units in params["enc_dnn_hidden_units"]: network = tf.layers.dense( inputs=network, units=units, activation=tf.nn.relu) latent_matrix = tf.layers.dense( inputs=network, units=params["latent_vector_size"], activation=tf.nn.relu) return latent_matrix
[ "def", "dense_encoder", "(", "X", ",", "params", ")", ":", "# Create the input layer to our DNN", "network", "=", "X", "# Add hidden layers with the given number of units/neurons per layer", "for", "units", "in", "params", "[", "\"enc_dnn_hidden_units\"", "]", ":", "network", "=", "tf", ".", "layers", ".", "dense", "(", "inputs", "=", "network", ",", "units", "=", "units", ",", "activation", "=", "tf", ".", "nn", ".", "relu", ")", "latent_matrix", "=", "tf", ".", "layers", ".", "dense", "(", "inputs", "=", "network", ",", "units", "=", "params", "[", "\"latent_vector_size\"", "]", ",", "activation", "=", "tf", ".", "nn", ".", "relu", ")", "return", "latent_matrix" ]
[ 4, 0 ]
[ 33, 22 ]
python
en
['en', 'fr', 'en']
True
dense_decoder
(latent_matrix, orig_dims, params)
Dense model decoder subgraph that produces output matrix. Given encoder latent matrix tensor, the original dimensions of the input, and dictionary of parameters, process through dense model decoder subgraph and return decoder output matrix. Args: latent_matrix: tf.float64 matrix tensor of encoder latent matrix. orig_dims: Original dimensions of input data. params: Dictionary of parameters. Returns: tf.float64 matrix tensor decoder output vector for each example in batch.
Dense model decoder subgraph that produces output matrix.
def dense_decoder(latent_matrix, orig_dims, params): """Dense model decoder subgraph that produces output matrix. Given encoder latent matrix tensor, the original dimensions of the input, and dictionary of parameters, process through dense model decoder subgraph and return decoder output matrix. Args: latent_matrix: tf.float64 matrix tensor of encoder latent matrix. orig_dims: Original dimensions of input data. params: Dictionary of parameters. Returns: tf.float64 matrix tensor decoder output vector for each example in batch. """ # Create the input layer to our DNN network = latent_matrix # Add hidden layers with the given number of units/neurons per layer for units in params["dec_dnn_hidden_units"][::-1]: network = tf.layers.dense( inputs=network, units=units, activation=tf.nn.relu) output_matrix = tf.layers.dense( inputs=network, units=orig_dims, activation=tf.nn.relu) return output_matrix
[ "def", "dense_decoder", "(", "latent_matrix", ",", "orig_dims", ",", "params", ")", ":", "# Create the input layer to our DNN", "network", "=", "latent_matrix", "# Add hidden layers with the given number of units/neurons per layer", "for", "units", "in", "params", "[", "\"dec_dnn_hidden_units\"", "]", "[", ":", ":", "-", "1", "]", ":", "network", "=", "tf", ".", "layers", ".", "dense", "(", "inputs", "=", "network", ",", "units", "=", "units", ",", "activation", "=", "tf", ".", "nn", ".", "relu", ")", "output_matrix", "=", "tf", ".", "layers", ".", "dense", "(", "inputs", "=", "network", ",", "units", "=", "orig_dims", ",", "activation", "=", "tf", ".", "nn", ".", "relu", ")", "return", "output_matrix" ]
[ 36, 0 ]
[ 66, 22 ]
python
en
['en', 'fr', 'en']
True
dense_autoencoder
(X, orig_dims, params)
Dense model autoencoder using dense encoder and decoder networks. Given data matrix tensor X, the original dimensions of the input, and dictionary of parameters, process through dense model encoder and decoder subgraphs and return reconstructed inputs as output. Args: X: tf.float64 matrix tensor of input data. orig_dims: Original dimensions of input data. params: Dictionary of parameters. Returns: tf.float64 matrix tensor decoder output vector for each example in batch that is the reconstructed inputs.
Dense model autoencoder using dense encoder and decoder networks.
def dense_autoencoder(X, orig_dims, params): """Dense model autoencoder using dense encoder and decoder networks. Given data matrix tensor X, the original dimensions of the input, and dictionary of parameters, process through dense model encoder and decoder subgraphs and return reconstructed inputs as output. Args: X: tf.float64 matrix tensor of input data. orig_dims: Original dimensions of input data. params: Dictionary of parameters. Returns: tf.float64 matrix tensor decoder output vector for each example in batch that is the reconstructed inputs. """ latent_matrix = dense_encoder(X, params) output_matrix = dense_decoder(latent_matrix, orig_dims, params) return output_matrix
[ "def", "dense_autoencoder", "(", "X", ",", "orig_dims", ",", "params", ")", ":", "latent_matrix", "=", "dense_encoder", "(", "X", ",", "params", ")", "output_matrix", "=", "dense_decoder", "(", "latent_matrix", ",", "orig_dims", ",", "params", ")", "return", "output_matrix" ]
[ 69, 0 ]
[ 88, 22 ]
python
da
['da', 'no', 'en']
False
dense_autoencoder_model
( X, mode, params, cur_batch_size, dummy_var)
Dense autoencoder to reconstruct inputs and minimize reconstruction error. Given data matrix tensor X, the current Estimator mode, the dictionary of parameters, and the current batch size, process through dense model encoder and decoder subgraphs and return reconstructed inputs as output. Args: X: tf.float64 matrix tensor of input data. mode: Estimator ModeKeys. Can take values of TRAIN, EVAL, and PREDICT. params: Dictionary of parameters. cur_batch_size: Current batch size, could be partially filled. dummy_var: Dummy variable used to allow training mode to happen since it requires a gradient to tie back to the graph dependency. Returns: loss: Reconstruction loss. train_op: Train operation so that Estimator can correctly add to dependency graph. X_time: 2D tensor representation of time major input data. X_time_recon: 2D tensor representation of time major input data. X_feat: 2D tensor representation of feature major input data. X_feat_recon: 2D tensor representation of feature major input data.
Dense autoencoder to reconstruct inputs and minimize reconstruction error.
def dense_autoencoder_model( X, mode, params, cur_batch_size, dummy_var): """Dense autoencoder to reconstruct inputs and minimize reconstruction error. Given data matrix tensor X, the current Estimator mode, the dictionary of parameters, and the current batch size, process through dense model encoder and decoder subgraphs and return reconstructed inputs as output. Args: X: tf.float64 matrix tensor of input data. mode: Estimator ModeKeys. Can take values of TRAIN, EVAL, and PREDICT. params: Dictionary of parameters. cur_batch_size: Current batch size, could be partially filled. dummy_var: Dummy variable used to allow training mode to happen since it requires a gradient to tie back to the graph dependency. Returns: loss: Reconstruction loss. train_op: Train operation so that Estimator can correctly add to dependency graph. X_time: 2D tensor representation of time major input data. X_time_recon: 2D tensor representation of time major input data. X_feat: 2D tensor representation of feature major input data. X_feat_recon: 2D tensor representation of feature major input data. """ # Reshape into 2-D tensors # Time based # shape = (cur_batch_size * seq_len, num_feat) X_time = tf.reshape( tensor=X, shape=[cur_batch_size * params["seq_len"], params["num_feat"]]) # shape = (cur_batch_size * seq_len, num_feat) X_time_recon = dense_autoencoder(X_time, params["num_feat"], params) # Features based # shape = (cur_batch_size, num_feat, seq_len) X_transposed = tf.transpose(a=X, perm=[0, 2, 1]) # shape = (cur_batch_size * num_feat, seq_len) X_feat = tf.reshape( tensor=X_transposed, shape=[cur_batch_size * params["num_feat"], params["seq_len"]]) # shape = (cur_batch_size * num_feat, seq_len) X_feat_recon = dense_autoencoder(X_feat, params["seq_len"], params) if (mode == tf.estimator.ModeKeys.TRAIN and params["training_mode"] == "reconstruction"): X_time_recon_3d = tf.reshape( tensor=X_time_recon, shape=[cur_batch_size, params["seq_len"], params["num_feat"]]) X_feat_recon_3d = tf.transpose( a=tf.reshape( tensor=X_feat_recon, shape=[cur_batch_size, params["num_feat"], params["seq_len"]]), perm=[0, 2, 1]) X_time_recon_3d_weighted = X_time_recon_3d * params["time_loss_weight"] X_feat_recon_3d_weighted = X_feat_recon_3d * params["feat_loss_weight"] predictions = (X_time_recon_3d_weighted + X_feat_recon_3d_weighted) \ / (params["time_loss_weight"] + params["feat_loss_weight"]) loss = tf.losses.mean_squared_error(labels=X, predictions=predictions) train_op = tf.contrib.layers.optimize_loss( loss=loss, global_step=tf.train.get_global_step(), learning_rate=params["learning_rate"], optimizer="Adam") return loss, train_op, None, None, None, None else: return None, None, X_time, X_time_recon, X_feat, X_feat_recon
[ "def", "dense_autoencoder_model", "(", "X", ",", "mode", ",", "params", ",", "cur_batch_size", ",", "dummy_var", ")", ":", "# Reshape into 2-D tensors", "# Time based", "# shape = (cur_batch_size * seq_len, num_feat)", "X_time", "=", "tf", ".", "reshape", "(", "tensor", "=", "X", ",", "shape", "=", "[", "cur_batch_size", "*", "params", "[", "\"seq_len\"", "]", ",", "params", "[", "\"num_feat\"", "]", "]", ")", "# shape = (cur_batch_size * seq_len, num_feat)", "X_time_recon", "=", "dense_autoencoder", "(", "X_time", ",", "params", "[", "\"num_feat\"", "]", ",", "params", ")", "# Features based", "# shape = (cur_batch_size, num_feat, seq_len)", "X_transposed", "=", "tf", ".", "transpose", "(", "a", "=", "X", ",", "perm", "=", "[", "0", ",", "2", ",", "1", "]", ")", "# shape = (cur_batch_size * num_feat, seq_len)", "X_feat", "=", "tf", ".", "reshape", "(", "tensor", "=", "X_transposed", ",", "shape", "=", "[", "cur_batch_size", "*", "params", "[", "\"num_feat\"", "]", ",", "params", "[", "\"seq_len\"", "]", "]", ")", "# shape = (cur_batch_size * num_feat, seq_len)", "X_feat_recon", "=", "dense_autoencoder", "(", "X_feat", ",", "params", "[", "\"seq_len\"", "]", ",", "params", ")", "if", "(", "mode", "==", "tf", ".", "estimator", ".", "ModeKeys", ".", "TRAIN", "and", "params", "[", "\"training_mode\"", "]", "==", "\"reconstruction\"", ")", ":", "X_time_recon_3d", "=", "tf", ".", "reshape", "(", "tensor", "=", "X_time_recon", ",", "shape", "=", "[", "cur_batch_size", ",", "params", "[", "\"seq_len\"", "]", ",", "params", "[", "\"num_feat\"", "]", "]", ")", "X_feat_recon_3d", "=", "tf", ".", "transpose", "(", "a", "=", "tf", ".", "reshape", "(", "tensor", "=", "X_feat_recon", ",", "shape", "=", "[", "cur_batch_size", ",", "params", "[", "\"num_feat\"", "]", ",", "params", "[", "\"seq_len\"", "]", "]", ")", ",", "perm", "=", "[", "0", ",", "2", ",", "1", "]", ")", "X_time_recon_3d_weighted", "=", "X_time_recon_3d", "*", "params", "[", "\"time_loss_weight\"", "]", "X_feat_recon_3d_weighted", "=", "X_feat_recon_3d", "*", "params", "[", "\"feat_loss_weight\"", "]", "predictions", "=", "(", "X_time_recon_3d_weighted", "+", "X_feat_recon_3d_weighted", ")", "/", "(", "params", "[", "\"time_loss_weight\"", "]", "+", "params", "[", "\"feat_loss_weight\"", "]", ")", "loss", "=", "tf", ".", "losses", ".", "mean_squared_error", "(", "labels", "=", "X", ",", "predictions", "=", "predictions", ")", "train_op", "=", "tf", ".", "contrib", ".", "layers", ".", "optimize_loss", "(", "loss", "=", "loss", ",", "global_step", "=", "tf", ".", "train", ".", "get_global_step", "(", ")", ",", "learning_rate", "=", "params", "[", "\"learning_rate\"", "]", ",", "optimizer", "=", "\"Adam\"", ")", "return", "loss", ",", "train_op", ",", "None", ",", "None", ",", "None", ",", "None", "else", ":", "return", "None", ",", "None", ",", "X_time", ",", "X_time_recon", ",", "X_feat", ",", "X_feat_recon" ]
[ 91, 0 ]
[ 165, 65 ]
python
en
['en', 'en', 'en']
True
_script_names
(dist: Distribution, script_name: str, is_gui: bool)
Create the fully qualified name of the files created by {console,gui}_scripts for the given ``dist``. Returns the list of file names
Create the fully qualified name of the files created by {console,gui}_scripts for the given ``dist``. Returns the list of file names
def _script_names(dist: Distribution, script_name: str, is_gui: bool) -> List[str]: """Create the fully qualified name of the files created by {console,gui}_scripts for the given ``dist``. Returns the list of file names """ if dist_in_usersite(dist): bin_dir = get_bin_user() else: bin_dir = get_bin_prefix() exe_name = os.path.join(bin_dir, script_name) paths_to_remove = [exe_name] if WINDOWS: paths_to_remove.append(exe_name + '.exe') paths_to_remove.append(exe_name + '.exe.manifest') if is_gui: paths_to_remove.append(exe_name + '-script.pyw') else: paths_to_remove.append(exe_name + '-script.py') return paths_to_remove
[ "def", "_script_names", "(", "dist", ":", "Distribution", ",", "script_name", ":", "str", ",", "is_gui", ":", "bool", ")", "->", "List", "[", "str", "]", ":", "if", "dist_in_usersite", "(", "dist", ")", ":", "bin_dir", "=", "get_bin_user", "(", ")", "else", ":", "bin_dir", "=", "get_bin_prefix", "(", ")", "exe_name", "=", "os", ".", "path", ".", "join", "(", "bin_dir", ",", "script_name", ")", "paths_to_remove", "=", "[", "exe_name", "]", "if", "WINDOWS", ":", "paths_to_remove", ".", "append", "(", "exe_name", "+", "'.exe'", ")", "paths_to_remove", ".", "append", "(", "exe_name", "+", "'.exe.manifest'", ")", "if", "is_gui", ":", "paths_to_remove", ".", "append", "(", "exe_name", "+", "'-script.pyw'", ")", "else", ":", "paths_to_remove", ".", "append", "(", "exe_name", "+", "'-script.py'", ")", "return", "paths_to_remove" ]
[ 30, 0 ]
[ 48, 26 ]
python
en
['en', 'en', 'en']
True
uninstallation_paths
(dist: Distribution)
Yield all the uninstallation paths for dist based on RECORD-without-.py[co] Yield paths to all the files in RECORD. For each .py file in RECORD, add the .pyc and .pyo in the same directory. UninstallPathSet.add() takes care of the __pycache__ .py[co]. If RECORD is not found, raises UninstallationError, with possible information from the INSTALLER file. https://packaging.python.org/specifications/recording-installed-packages/
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
def uninstallation_paths(dist: Distribution) -> Iterator[str]: """ Yield all the uninstallation paths for dist based on RECORD-without-.py[co] Yield paths to all the files in RECORD. For each .py file in RECORD, add the .pyc and .pyo in the same directory. UninstallPathSet.add() takes care of the __pycache__ .py[co]. If RECORD is not found, raises UninstallationError, with possible information from the INSTALLER file. https://packaging.python.org/specifications/recording-installed-packages/ """ try: r = csv.reader(dist.get_metadata_lines('RECORD')) except FileNotFoundError as missing_record_exception: msg = 'Cannot uninstall {dist}, RECORD file not found.'.format(dist=dist) try: installer = next(dist.get_metadata_lines('INSTALLER')) if not installer or installer == 'pip': raise ValueError() except (OSError, StopIteration, ValueError): dep = '{}=={}'.format(dist.project_name, dist.version) msg += (" You might be able to recover from this via: " "'pip install --force-reinstall --no-deps {}'.".format(dep)) else: msg += ' Hint: The package was installed by {}.'.format(installer) raise UninstallationError(msg) from missing_record_exception for row in r: path = os.path.join(dist.location, row[0]) yield path if path.endswith('.py'): dn, fn = os.path.split(path) base = fn[:-3] path = os.path.join(dn, base + '.pyc') yield path path = os.path.join(dn, base + '.pyo') yield path
[ "def", "uninstallation_paths", "(", "dist", ":", "Distribution", ")", "->", "Iterator", "[", "str", "]", ":", "try", ":", "r", "=", "csv", ".", "reader", "(", "dist", ".", "get_metadata_lines", "(", "'RECORD'", ")", ")", "except", "FileNotFoundError", "as", "missing_record_exception", ":", "msg", "=", "'Cannot uninstall {dist}, RECORD file not found.'", ".", "format", "(", "dist", "=", "dist", ")", "try", ":", "installer", "=", "next", "(", "dist", ".", "get_metadata_lines", "(", "'INSTALLER'", ")", ")", "if", "not", "installer", "or", "installer", "==", "'pip'", ":", "raise", "ValueError", "(", ")", "except", "(", "OSError", ",", "StopIteration", ",", "ValueError", ")", ":", "dep", "=", "'{}=={}'", ".", "format", "(", "dist", ".", "project_name", ",", "dist", ".", "version", ")", "msg", "+=", "(", "\" You might be able to recover from this via: \"", "\"'pip install --force-reinstall --no-deps {}'.\"", ".", "format", "(", "dep", ")", ")", "else", ":", "msg", "+=", "' Hint: The package was installed by {}.'", ".", "format", "(", "installer", ")", "raise", "UninstallationError", "(", "msg", ")", "from", "missing_record_exception", "for", "row", "in", "r", ":", "path", "=", "os", ".", "path", ".", "join", "(", "dist", ".", "location", ",", "row", "[", "0", "]", ")", "yield", "path", "if", "path", ".", "endswith", "(", "'.py'", ")", ":", "dn", ",", "fn", "=", "os", ".", "path", ".", "split", "(", "path", ")", "base", "=", "fn", "[", ":", "-", "3", "]", "path", "=", "os", ".", "path", ".", "join", "(", "dn", ",", "base", "+", "'.pyc'", ")", "yield", "path", "path", "=", "os", ".", "path", ".", "join", "(", "dn", ",", "base", "+", "'.pyo'", ")", "yield", "path" ]
[ 63, 0 ]
[ 101, 22 ]
python
en
['en', 'error', 'th']
False
compact
(paths: Iterable[str])
Compact a path set to contain the minimal number of paths necessary to contain all paths in the set. If /a/path/ and /a/path/to/a/file.txt are both in the set, leave only the shorter path.
Compact a path set to contain the minimal number of paths necessary to contain all paths in the set. If /a/path/ and /a/path/to/a/file.txt are both in the set, leave only the shorter path.
def compact(paths: Iterable[str]) -> Set[str]: """Compact a path set to contain the minimal number of paths necessary to contain all paths in the set. If /a/path/ and /a/path/to/a/file.txt are both in the set, leave only the shorter path.""" sep = os.path.sep short_paths: Set[str] = set() for path in sorted(paths, key=len): should_skip = any( path.startswith(shortpath.rstrip("*")) and path[len(shortpath.rstrip("*").rstrip(sep))] == sep for shortpath in short_paths ) if not should_skip: short_paths.add(path) return short_paths
[ "def", "compact", "(", "paths", ":", "Iterable", "[", "str", "]", ")", "->", "Set", "[", "str", "]", ":", "sep", "=", "os", ".", "path", ".", "sep", "short_paths", ":", "Set", "[", "str", "]", "=", "set", "(", ")", "for", "path", "in", "sorted", "(", "paths", ",", "key", "=", "len", ")", ":", "should_skip", "=", "any", "(", "path", ".", "startswith", "(", "shortpath", ".", "rstrip", "(", "\"*\"", ")", ")", "and", "path", "[", "len", "(", "shortpath", ".", "rstrip", "(", "\"*\"", ")", ".", "rstrip", "(", "sep", ")", ")", "]", "==", "sep", "for", "shortpath", "in", "short_paths", ")", "if", "not", "should_skip", ":", "short_paths", ".", "add", "(", "path", ")", "return", "short_paths" ]
[ 104, 0 ]
[ 120, 22 ]
python
en
['en', 'en', 'en']
True
compress_for_rename
(paths: Iterable[str])
Returns a set containing the paths that need to be renamed. This set may include directories when the original sequence of paths included every file on disk.
Returns a set containing the paths that need to be renamed.
def compress_for_rename(paths: Iterable[str]) -> Set[str]: """Returns a set containing the paths that need to be renamed. This set may include directories when the original sequence of paths included every file on disk. """ case_map = {os.path.normcase(p): p for p in paths} remaining = set(case_map) unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len) wildcards: Set[str] = set() def norm_join(*a: str) -> str: return os.path.normcase(os.path.join(*a)) for root in unchecked: if any(os.path.normcase(root).startswith(w) for w in wildcards): # This directory has already been handled. continue all_files: Set[str] = set() all_subdirs: Set[str] = set() for dirname, subdirs, files in os.walk(root): all_subdirs.update(norm_join(root, dirname, d) for d in subdirs) all_files.update(norm_join(root, dirname, f) for f in files) # If all the files we found are in our remaining set of files to # remove, then remove them from the latter set and add a wildcard # for the directory. if not (all_files - remaining): remaining.difference_update(all_files) wildcards.add(root + os.sep) return set(map(case_map.__getitem__, remaining)) | wildcards
[ "def", "compress_for_rename", "(", "paths", ":", "Iterable", "[", "str", "]", ")", "->", "Set", "[", "str", "]", ":", "case_map", "=", "{", "os", ".", "path", ".", "normcase", "(", "p", ")", ":", "p", "for", "p", "in", "paths", "}", "remaining", "=", "set", "(", "case_map", ")", "unchecked", "=", "sorted", "(", "{", "os", ".", "path", ".", "split", "(", "p", ")", "[", "0", "]", "for", "p", "in", "case_map", ".", "values", "(", ")", "}", ",", "key", "=", "len", ")", "wildcards", ":", "Set", "[", "str", "]", "=", "set", "(", ")", "def", "norm_join", "(", "*", "a", ":", "str", ")", "->", "str", ":", "return", "os", ".", "path", ".", "normcase", "(", "os", ".", "path", ".", "join", "(", "*", "a", ")", ")", "for", "root", "in", "unchecked", ":", "if", "any", "(", "os", ".", "path", ".", "normcase", "(", "root", ")", ".", "startswith", "(", "w", ")", "for", "w", "in", "wildcards", ")", ":", "# This directory has already been handled.", "continue", "all_files", ":", "Set", "[", "str", "]", "=", "set", "(", ")", "all_subdirs", ":", "Set", "[", "str", "]", "=", "set", "(", ")", "for", "dirname", ",", "subdirs", ",", "files", "in", "os", ".", "walk", "(", "root", ")", ":", "all_subdirs", ".", "update", "(", "norm_join", "(", "root", ",", "dirname", ",", "d", ")", "for", "d", "in", "subdirs", ")", "all_files", ".", "update", "(", "norm_join", "(", "root", ",", "dirname", ",", "f", ")", "for", "f", "in", "files", ")", "# If all the files we found are in our remaining set of files to", "# remove, then remove them from the latter set and add a wildcard", "# for the directory.", "if", "not", "(", "all_files", "-", "remaining", ")", ":", "remaining", ".", "difference_update", "(", "all_files", ")", "wildcards", ".", "add", "(", "root", "+", "os", ".", "sep", ")", "return", "set", "(", "map", "(", "case_map", ".", "__getitem__", ",", "remaining", ")", ")", "|", "wildcards" ]
[ 123, 0 ]
[ 157, 64 ]
python
en
['en', 'en', 'en']
True
compress_for_output_listing
(paths: Iterable[str])
Returns a tuple of 2 sets of which paths to display to user The first set contains paths that would be deleted. Files of a package are not added and the top-level directory of the package has a '*' added at the end - to signify that all it's contents are removed. The second set contains files that would have been skipped in the above folders.
Returns a tuple of 2 sets of which paths to display to user
def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]: """Returns a tuple of 2 sets of which paths to display to user The first set contains paths that would be deleted. Files of a package are not added and the top-level directory of the package has a '*' added at the end - to signify that all it's contents are removed. The second set contains files that would have been skipped in the above folders. """ will_remove = set(paths) will_skip = set() # Determine folders and files folders = set() files = set() for path in will_remove: if path.endswith(".pyc"): continue if path.endswith("__init__.py") or ".dist-info" in path: folders.add(os.path.dirname(path)) files.add(path) # probably this one https://github.com/python/mypy/issues/390 _normcased_files = set(map(os.path.normcase, files)) # type: ignore folders = compact(folders) # This walks the tree using os.walk to not miss extra folders # that might get added. for folder in folders: for dirpath, _, dirfiles in os.walk(folder): for fname in dirfiles: if fname.endswith(".pyc"): continue file_ = os.path.join(dirpath, fname) if (os.path.isfile(file_) and os.path.normcase(file_) not in _normcased_files): # We are skipping this file. Add it to the set. will_skip.add(file_) will_remove = files | { os.path.join(folder, "*") for folder in folders } return will_remove, will_skip
[ "def", "compress_for_output_listing", "(", "paths", ":", "Iterable", "[", "str", "]", ")", "->", "Tuple", "[", "Set", "[", "str", "]", ",", "Set", "[", "str", "]", "]", ":", "will_remove", "=", "set", "(", "paths", ")", "will_skip", "=", "set", "(", ")", "# Determine folders and files", "folders", "=", "set", "(", ")", "files", "=", "set", "(", ")", "for", "path", "in", "will_remove", ":", "if", "path", ".", "endswith", "(", "\".pyc\"", ")", ":", "continue", "if", "path", ".", "endswith", "(", "\"__init__.py\"", ")", "or", "\".dist-info\"", "in", "path", ":", "folders", ".", "add", "(", "os", ".", "path", ".", "dirname", "(", "path", ")", ")", "files", ".", "add", "(", "path", ")", "# probably this one https://github.com/python/mypy/issues/390", "_normcased_files", "=", "set", "(", "map", "(", "os", ".", "path", ".", "normcase", ",", "files", ")", ")", "# type: ignore", "folders", "=", "compact", "(", "folders", ")", "# This walks the tree using os.walk to not miss extra folders", "# that might get added.", "for", "folder", "in", "folders", ":", "for", "dirpath", ",", "_", ",", "dirfiles", "in", "os", ".", "walk", "(", "folder", ")", ":", "for", "fname", "in", "dirfiles", ":", "if", "fname", ".", "endswith", "(", "\".pyc\"", ")", ":", "continue", "file_", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "fname", ")", "if", "(", "os", ".", "path", ".", "isfile", "(", "file_", ")", "and", "os", ".", "path", ".", "normcase", "(", "file_", ")", "not", "in", "_normcased_files", ")", ":", "# We are skipping this file. Add it to the set.", "will_skip", ".", "add", "(", "file_", ")", "will_remove", "=", "files", "|", "{", "os", ".", "path", ".", "join", "(", "folder", ",", "\"*\"", ")", "for", "folder", "in", "folders", "}", "return", "will_remove", ",", "will_skip" ]
[ 160, 0 ]
[ 207, 33 ]
python
en
['en', 'en', 'en']
True
StashedUninstallPathSet._get_directory_stash
(self, path: str)
Stashes a directory. Directories are stashed adjacent to their original location if possible, or else moved/copied into the user's temp dir.
Stashes a directory.
def _get_directory_stash(self, path: str) -> str: """Stashes a directory. Directories are stashed adjacent to their original location if possible, or else moved/copied into the user's temp dir.""" try: save_dir: TempDirectory = AdjacentTempDirectory(path) except OSError: save_dir = TempDirectory(kind="uninstall") self._save_dirs[os.path.normcase(path)] = save_dir return save_dir.path
[ "def", "_get_directory_stash", "(", "self", ",", "path", ":", "str", ")", "->", "str", ":", "try", ":", "save_dir", ":", "TempDirectory", "=", "AdjacentTempDirectory", "(", "path", ")", "except", "OSError", ":", "save_dir", "=", "TempDirectory", "(", "kind", "=", "\"uninstall\"", ")", "self", ".", "_save_dirs", "[", "os", ".", "path", ".", "normcase", "(", "path", ")", "]", "=", "save_dir", "return", "save_dir", ".", "path" ]
[ 221, 4 ]
[ 233, 28 ]
python
en
['en', 'en', 'en']
True
StashedUninstallPathSet._get_file_stash
(self, path: str)
Stashes a file. If no root has been provided, one will be created for the directory in the user's temp directory.
Stashes a file.
def _get_file_stash(self, path: str) -> str: """Stashes a file. If no root has been provided, one will be created for the directory in the user's temp directory.""" path = os.path.normcase(path) head, old_head = os.path.dirname(path), None save_dir = None while head != old_head: try: save_dir = self._save_dirs[head] break except KeyError: pass head, old_head = os.path.dirname(head), head else: # Did not find any suitable root head = os.path.dirname(path) save_dir = TempDirectory(kind='uninstall') self._save_dirs[head] = save_dir relpath = os.path.relpath(path, head) if relpath and relpath != os.path.curdir: return os.path.join(save_dir.path, relpath) return save_dir.path
[ "def", "_get_file_stash", "(", "self", ",", "path", ":", "str", ")", "->", "str", ":", "path", "=", "os", ".", "path", ".", "normcase", "(", "path", ")", "head", ",", "old_head", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", ",", "None", "save_dir", "=", "None", "while", "head", "!=", "old_head", ":", "try", ":", "save_dir", "=", "self", ".", "_save_dirs", "[", "head", "]", "break", "except", "KeyError", ":", "pass", "head", ",", "old_head", "=", "os", ".", "path", ".", "dirname", "(", "head", ")", ",", "head", "else", ":", "# Did not find any suitable root", "head", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "save_dir", "=", "TempDirectory", "(", "kind", "=", "'uninstall'", ")", "self", ".", "_save_dirs", "[", "head", "]", "=", "save_dir", "relpath", "=", "os", ".", "path", ".", "relpath", "(", "path", ",", "head", ")", "if", "relpath", "and", "relpath", "!=", "os", ".", "path", ".", "curdir", ":", "return", "os", ".", "path", ".", "join", "(", "save_dir", ".", "path", ",", "relpath", ")", "return", "save_dir", ".", "path" ]
[ 235, 4 ]
[ 260, 28 ]
python
en
['en', 'en', 'en']
True
StashedUninstallPathSet.stash
(self, path: str)
Stashes the directory or file and returns its new location. Handle symlinks as files to avoid modifying the symlink targets.
Stashes the directory or file and returns its new location. Handle symlinks as files to avoid modifying the symlink targets.
def stash(self, path: str) -> str: """Stashes the directory or file and returns its new location. Handle symlinks as files to avoid modifying the symlink targets. """ path_is_dir = os.path.isdir(path) and not os.path.islink(path) if path_is_dir: new_path = self._get_directory_stash(path) else: new_path = self._get_file_stash(path) self._moves.append((path, new_path)) if (path_is_dir and os.path.isdir(new_path)): # If we're moving a directory, we need to # remove the destination first or else it will be # moved to inside the existing directory. # We just created new_path ourselves, so it will # be removable. os.rmdir(new_path) renames(path, new_path) return new_path
[ "def", "stash", "(", "self", ",", "path", ":", "str", ")", "->", "str", ":", "path_is_dir", "=", "os", ".", "path", ".", "isdir", "(", "path", ")", "and", "not", "os", ".", "path", ".", "islink", "(", "path", ")", "if", "path_is_dir", ":", "new_path", "=", "self", ".", "_get_directory_stash", "(", "path", ")", "else", ":", "new_path", "=", "self", ".", "_get_file_stash", "(", "path", ")", "self", ".", "_moves", ".", "append", "(", "(", "path", ",", "new_path", ")", ")", "if", "(", "path_is_dir", "and", "os", ".", "path", ".", "isdir", "(", "new_path", ")", ")", ":", "# If we're moving a directory, we need to", "# remove the destination first or else it will be", "# moved to inside the existing directory.", "# We just created new_path ourselves, so it will", "# be removable.", "os", ".", "rmdir", "(", "new_path", ")", "renames", "(", "path", ",", "new_path", ")", "return", "new_path" ]
[ 262, 4 ]
[ 281, 23 ]
python
en
['en', 'en', 'en']
True
StashedUninstallPathSet.commit
(self)
Commits the uninstall by removing stashed files.
Commits the uninstall by removing stashed files.
def commit(self) -> None: """Commits the uninstall by removing stashed files.""" for _, save_dir in self._save_dirs.items(): save_dir.cleanup() self._moves = [] self._save_dirs = {}
[ "def", "commit", "(", "self", ")", "->", "None", ":", "for", "_", ",", "save_dir", "in", "self", ".", "_save_dirs", ".", "items", "(", ")", ":", "save_dir", ".", "cleanup", "(", ")", "self", ".", "_moves", "=", "[", "]", "self", ".", "_save_dirs", "=", "{", "}" ]
[ 283, 4 ]
[ 288, 28 ]
python
en
['en', 'en', 'en']
True
StashedUninstallPathSet.rollback
(self)
Undoes the uninstall by moving stashed files back.
Undoes the uninstall by moving stashed files back.
def rollback(self) -> None: """Undoes the uninstall by moving stashed files back.""" for p in self._moves: logger.info("Moving to %s\n from %s", *p) for new_path, path in self._moves: try: logger.debug('Replacing %s from %s', new_path, path) if os.path.isfile(new_path) or os.path.islink(new_path): os.unlink(new_path) elif os.path.isdir(new_path): rmtree(new_path) renames(path, new_path) except OSError as ex: logger.error("Failed to restore %s", new_path) logger.debug("Exception: %s", ex) self.commit()
[ "def", "rollback", "(", "self", ")", "->", "None", ":", "for", "p", "in", "self", ".", "_moves", ":", "logger", ".", "info", "(", "\"Moving to %s\\n from %s\"", ",", "*", "p", ")", "for", "new_path", ",", "path", "in", "self", ".", "_moves", ":", "try", ":", "logger", ".", "debug", "(", "'Replacing %s from %s'", ",", "new_path", ",", "path", ")", "if", "os", ".", "path", ".", "isfile", "(", "new_path", ")", "or", "os", ".", "path", ".", "islink", "(", "new_path", ")", ":", "os", ".", "unlink", "(", "new_path", ")", "elif", "os", ".", "path", ".", "isdir", "(", "new_path", ")", ":", "rmtree", "(", "new_path", ")", "renames", "(", "path", ",", "new_path", ")", "except", "OSError", "as", "ex", ":", "logger", ".", "error", "(", "\"Failed to restore %s\"", ",", "new_path", ")", "logger", ".", "debug", "(", "\"Exception: %s\"", ",", "ex", ")", "self", ".", "commit", "(", ")" ]
[ 290, 4 ]
[ 307, 21 ]
python
en
['en', 'en', 'en']
True
UninstallPathSet._permitted
(self, path: str)
Return True if the given path is one we are permitted to remove/modify, False otherwise.
Return True if the given path is one we are permitted to remove/modify, False otherwise.
def _permitted(self, path: str) -> bool: """ Return True if the given path is one we are permitted to remove/modify, False otherwise. """ return is_local(path)
[ "def", "_permitted", "(", "self", ",", "path", ":", "str", ")", "->", "bool", ":", "return", "is_local", "(", "path", ")" ]
[ 324, 4 ]
[ 330, 29 ]
python
en
['en', 'error', 'th']
False
UninstallPathSet.remove
(self, auto_confirm: bool = False, verbose: bool = False)
Remove paths in ``self.paths`` with confirmation (unless ``auto_confirm`` is True).
Remove paths in ``self.paths`` with confirmation (unless ``auto_confirm`` is True).
def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None: """Remove paths in ``self.paths`` with confirmation (unless ``auto_confirm`` is True).""" if not self.paths: logger.info( "Can't uninstall '%s'. No files were found to uninstall.", self.dist.project_name, ) return dist_name_version = ( self.dist.project_name + "-" + self.dist.version ) logger.info('Uninstalling %s:', dist_name_version) with indent_log(): if auto_confirm or self._allowed_to_proceed(verbose): moved = self._moved_paths for_rename = compress_for_rename(self.paths) for path in sorted(compact(for_rename)): moved.stash(path) logger.verbose('Removing file or directory %s', path) for pth in self.pth.values(): pth.remove() logger.info('Successfully uninstalled %s', dist_name_version)
[ "def", "remove", "(", "self", ",", "auto_confirm", ":", "bool", "=", "False", ",", "verbose", ":", "bool", "=", "False", ")", "->", "None", ":", "if", "not", "self", ".", "paths", ":", "logger", ".", "info", "(", "\"Can't uninstall '%s'. No files were found to uninstall.\"", ",", "self", ".", "dist", ".", "project_name", ",", ")", "return", "dist_name_version", "=", "(", "self", ".", "dist", ".", "project_name", "+", "\"-\"", "+", "self", ".", "dist", ".", "version", ")", "logger", ".", "info", "(", "'Uninstalling %s:'", ",", "dist_name_version", ")", "with", "indent_log", "(", ")", ":", "if", "auto_confirm", "or", "self", ".", "_allowed_to_proceed", "(", "verbose", ")", ":", "moved", "=", "self", ".", "_moved_paths", "for_rename", "=", "compress_for_rename", "(", "self", ".", "paths", ")", "for", "path", "in", "sorted", "(", "compact", "(", "for_rename", ")", ")", ":", "moved", ".", "stash", "(", "path", ")", "logger", ".", "verbose", "(", "'Removing file or directory %s'", ",", "path", ")", "for", "pth", "in", "self", ".", "pth", ".", "values", "(", ")", ":", "pth", ".", "remove", "(", ")", "logger", ".", "info", "(", "'Successfully uninstalled %s'", ",", "dist_name_version", ")" ]
[ 360, 4 ]
[ 389, 77 ]
python
en
['en', 'en', 'en']
True
UninstallPathSet._allowed_to_proceed
(self, verbose: bool)
Display which files would be deleted and prompt for confirmation
Display which files would be deleted and prompt for confirmation
def _allowed_to_proceed(self, verbose: bool) -> bool: """Display which files would be deleted and prompt for confirmation """ def _display(msg: str, paths: Iterable[str]) -> None: if not paths: return logger.info(msg) with indent_log(): for path in sorted(compact(paths)): logger.info(path) if not verbose: will_remove, will_skip = compress_for_output_listing(self.paths) else: # In verbose mode, display all the files that are going to be # deleted. will_remove = set(self.paths) will_skip = set() _display('Would remove:', will_remove) _display('Would not remove (might be manually added):', will_skip) _display('Would not remove (outside of prefix):', self._refuse) if verbose: _display('Will actually move:', compress_for_rename(self.paths)) return ask('Proceed (Y/n)? ', ('y', 'n', '')) != 'n'
[ "def", "_allowed_to_proceed", "(", "self", ",", "verbose", ":", "bool", ")", "->", "bool", ":", "def", "_display", "(", "msg", ":", "str", ",", "paths", ":", "Iterable", "[", "str", "]", ")", "->", "None", ":", "if", "not", "paths", ":", "return", "logger", ".", "info", "(", "msg", ")", "with", "indent_log", "(", ")", ":", "for", "path", "in", "sorted", "(", "compact", "(", "paths", ")", ")", ":", "logger", ".", "info", "(", "path", ")", "if", "not", "verbose", ":", "will_remove", ",", "will_skip", "=", "compress_for_output_listing", "(", "self", ".", "paths", ")", "else", ":", "# In verbose mode, display all the files that are going to be", "# deleted.", "will_remove", "=", "set", "(", "self", ".", "paths", ")", "will_skip", "=", "set", "(", ")", "_display", "(", "'Would remove:'", ",", "will_remove", ")", "_display", "(", "'Would not remove (might be manually added):'", ",", "will_skip", ")", "_display", "(", "'Would not remove (outside of prefix):'", ",", "self", ".", "_refuse", ")", "if", "verbose", ":", "_display", "(", "'Will actually move:'", ",", "compress_for_rename", "(", "self", ".", "paths", ")", ")", "return", "ask", "(", "'Proceed (Y/n)? '", ",", "(", "'y'", ",", "'n'", ",", "''", ")", ")", "!=", "'n'" ]
[ 391, 4 ]
[ 418, 60 ]
python
en
['en', 'en', 'en']
True
UninstallPathSet.rollback
(self)
Rollback the changes previously made by remove().
Rollback the changes previously made by remove().
def rollback(self) -> None: """Rollback the changes previously made by remove().""" if not self._moved_paths.can_rollback: logger.error( "Can't roll back %s; was not uninstalled", self.dist.project_name, ) return logger.info('Rolling back uninstall of %s', self.dist.project_name) self._moved_paths.rollback() for pth in self.pth.values(): pth.rollback()
[ "def", "rollback", "(", "self", ")", "->", "None", ":", "if", "not", "self", ".", "_moved_paths", ".", "can_rollback", ":", "logger", ".", "error", "(", "\"Can't roll back %s; was not uninstalled\"", ",", "self", ".", "dist", ".", "project_name", ",", ")", "return", "logger", ".", "info", "(", "'Rolling back uninstall of %s'", ",", "self", ".", "dist", ".", "project_name", ")", "self", ".", "_moved_paths", ".", "rollback", "(", ")", "for", "pth", "in", "self", ".", "pth", ".", "values", "(", ")", ":", "pth", ".", "rollback", "(", ")" ]
[ 420, 4 ]
[ 431, 26 ]
python
en
['en', 'en', 'en']
True
UninstallPathSet.commit
(self)
Remove temporary save dir: rollback will no longer be possible.
Remove temporary save dir: rollback will no longer be possible.
def commit(self) -> None: """Remove temporary save dir: rollback will no longer be possible.""" self._moved_paths.commit()
[ "def", "commit", "(", "self", ")", "->", "None", ":", "self", ".", "_moved_paths", ".", "commit", "(", ")" ]
[ 433, 4 ]
[ 435, 34 ]
python
en
['en', 'en', 'en']
True
generate_user
()
Returns a randomly generate dictionary representing a user, where each user is described by a user agent string, an ID, a latlng, an IP, an age_bracket, whether they've oped into marketing and the :return:
Returns a randomly generate dictionary representing a user, where each user is described by a user agent string, an ID, a latlng, an IP, an age_bracket, whether they've oped into marketing and the :return:
def generate_user(): """ Returns a randomly generate dictionary representing a user, where each user is described by a user agent string, an ID, a latlng, an IP, an age_bracket, whether they've oped into marketing and the :return: """ user = {} user['lat'] = "" user['lng'] = "" while user['lat'] == "" or user['lng'] == "": user['ip'] = faker.ipv4() g = geocoder.ip(user['ip']) latlng = list(map(str, g.latlng)) if len(latlng) == 2: user['lat'] = latlng[0] user['lng'] = latlng[1] user['user_agent'] = choice(ualist)() user['age_bracket'] = choice(['18-25', '26-40', '41-55', '55+']) user['opted_into_marketing'] = choice([True, False]) user['id'] = hash(str(user['ip']) + str(user['lat'] + str(user['lng']))) return user
[ "def", "generate_user", "(", ")", ":", "user", "=", "{", "}", "user", "[", "'lat'", "]", "=", "\"\"", "user", "[", "'lng'", "]", "=", "\"\"", "while", "user", "[", "'lat'", "]", "==", "\"\"", "or", "user", "[", "'lng'", "]", "==", "\"\"", ":", "user", "[", "'ip'", "]", "=", "faker", ".", "ipv4", "(", ")", "g", "=", "geocoder", ".", "ip", "(", "user", "[", "'ip'", "]", ")", "latlng", "=", "list", "(", "map", "(", "str", ",", "g", ".", "latlng", ")", ")", "if", "len", "(", "latlng", ")", "==", "2", ":", "user", "[", "'lat'", "]", "=", "latlng", "[", "0", "]", "user", "[", "'lng'", "]", "=", "latlng", "[", "1", "]", "user", "[", "'user_agent'", "]", "=", "choice", "(", "ualist", ")", "(", ")", "user", "[", "'age_bracket'", "]", "=", "choice", "(", "[", "'18-25'", ",", "'26-40'", ",", "'41-55'", ",", "'55+'", "]", ")", "user", "[", "'opted_into_marketing'", "]", "=", "choice", "(", "[", "True", ",", "False", "]", ")", "user", "[", "'id'", "]", "=", "hash", "(", "str", "(", "user", "[", "'ip'", "]", ")", "+", "str", "(", "user", "[", "'lat'", "]", "+", "str", "(", "user", "[", "'lng'", "]", ")", ")", ")", "return", "user" ]
[ 20, 0 ]
[ 41, 15 ]
python
en
['en', 'error', 'th']
False
write_csvs
(users)
Writes two .csv files, one for ingestiong by an event generator, the other formatted to be uploaded to BigQuery :param users: :return:
Writes two .csv files, one for ingestiong by an event generator, the other formatted to be uploaded to BigQuery :param users: :return:
def write_csvs(users): """ Writes two .csv files, one for ingestiong by an event generator, the other formatted to be uploaded to BigQuery :param users: :return: """ with open("users.csv", 'w') as event_out, open("users_bq.txt", 'w') as bq_out: cols = list(users[0].keys()) cols.sort() bq_cols = cols.copy() [bq_cols.remove(s) for s in sensitive_fields] event_out.write(",".join(cols) + '\n') for user in users: event_vals = [str(user[key]) for key in cols] event_out.write(",".join(event_vals) + '\n') bq_vals = [str(user[key]) for key in bq_cols] bq_out.write(",".join(bq_vals) + '\n')
[ "def", "write_csvs", "(", "users", ")", ":", "with", "open", "(", "\"users.csv\"", ",", "'w'", ")", "as", "event_out", ",", "open", "(", "\"users_bq.txt\"", ",", "'w'", ")", "as", "bq_out", ":", "cols", "=", "list", "(", "users", "[", "0", "]", ".", "keys", "(", ")", ")", "cols", ".", "sort", "(", ")", "bq_cols", "=", "cols", ".", "copy", "(", ")", "[", "bq_cols", ".", "remove", "(", "s", ")", "for", "s", "in", "sensitive_fields", "]", "event_out", ".", "write", "(", "\",\"", ".", "join", "(", "cols", ")", "+", "'\\n'", ")", "for", "user", "in", "users", ":", "event_vals", "=", "[", "str", "(", "user", "[", "key", "]", ")", "for", "key", "in", "cols", "]", "event_out", ".", "write", "(", "\",\"", ".", "join", "(", "event_vals", ")", "+", "'\\n'", ")", "bq_vals", "=", "[", "str", "(", "user", "[", "key", "]", ")", "for", "key", "in", "bq_cols", "]", "bq_out", ".", "write", "(", "\",\"", ".", "join", "(", "bq_vals", ")", "+", "'\\n'", ")" ]
[ 43, 0 ]
[ 59, 54 ]
python
en
['en', 'error', 'th']
False