id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
246,600
EdwinvO/pyutillib
pyutillib/date_utils.py
previous_weekday
def previous_weekday(date): ''' Returns the last weekday before date Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: - ''' weekday = date.weekday() if weekday == 0: n_days = 3 elif weekday == 6: n_days = 2 else: n_days = 1 return date - datetime.timedelta(days=n_days)
python
def previous_weekday(date): ''' Returns the last weekday before date Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: - ''' weekday = date.weekday() if weekday == 0: n_days = 3 elif weekday == 6: n_days = 2 else: n_days = 1 return date - datetime.timedelta(days=n_days)
[ "def", "previous_weekday", "(", "date", ")", ":", "weekday", "=", "date", ".", "weekday", "(", ")", "if", "weekday", "==", "0", ":", "n_days", "=", "3", "elif", "weekday", "==", "6", ":", "n_days", "=", "2", "else", ":", "n_days", "=", "1", "return", "date", "-", "datetime", ".", "timedelta", "(", "days", "=", "n_days", ")" ]
Returns the last weekday before date Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: -
[ "Returns", "the", "last", "weekday", "before", "date" ]
6d773c31d1f27cc5256d47feb8afb5c3ae5f0db5
https://github.com/EdwinvO/pyutillib/blob/6d773c31d1f27cc5256d47feb8afb5c3ae5f0db5/pyutillib/date_utils.py#L172-L190
246,601
EdwinvO/pyutillib
pyutillib/date_utils.py
next_weekday
def next_weekday(date): ''' Return the first weekday after date Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: - ''' n_days = 7 - date.weekday() if n_days > 3: n_days = 1 return date + datetime.timedelta(days=n_days)
python
def next_weekday(date): ''' Return the first weekday after date Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: - ''' n_days = 7 - date.weekday() if n_days > 3: n_days = 1 return date + datetime.timedelta(days=n_days)
[ "def", "next_weekday", "(", "date", ")", ":", "n_days", "=", "7", "-", "date", ".", "weekday", "(", ")", "if", "n_days", ">", "3", ":", "n_days", "=", "1", "return", "date", "+", "datetime", ".", "timedelta", "(", "days", "=", "n_days", ")" ]
Return the first weekday after date Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: -
[ "Return", "the", "first", "weekday", "after", "date" ]
6d773c31d1f27cc5256d47feb8afb5c3ae5f0db5
https://github.com/EdwinvO/pyutillib/blob/6d773c31d1f27cc5256d47feb8afb5c3ae5f0db5/pyutillib/date_utils.py#L193-L207
246,602
EdwinvO/pyutillib
pyutillib/date_utils.py
last_year
def last_year(date_): ''' Returns the same date 1 year ago. Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: - ''' day = 28 if date_.day == 29 and date_.month == 2 else date_.day return datetime.date(date_.year-1, date_.month, day)
python
def last_year(date_): ''' Returns the same date 1 year ago. Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: - ''' day = 28 if date_.day == 29 and date_.month == 2 else date_.day return datetime.date(date_.year-1, date_.month, day)
[ "def", "last_year", "(", "date_", ")", ":", "day", "=", "28", "if", "date_", ".", "day", "==", "29", "and", "date_", ".", "month", "==", "2", "else", "date_", ".", "day", "return", "datetime", ".", "date", "(", "date_", ".", "year", "-", "1", ",", "date_", ".", "month", ",", "day", ")" ]
Returns the same date 1 year ago. Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: -
[ "Returns", "the", "same", "date", "1", "year", "ago", "." ]
6d773c31d1f27cc5256d47feb8afb5c3ae5f0db5
https://github.com/EdwinvO/pyutillib/blob/6d773c31d1f27cc5256d47feb8afb5c3ae5f0db5/pyutillib/date_utils.py#L210-L222
246,603
EdwinvO/pyutillib
pyutillib/date_utils.py
timestr2time
def timestr2time(time_str): ''' Turns a string into a datetime.time object. This will only work if the format can be "guessed", so the string must have one of the formats from VALID_TIME_FORMATS_TEXT. Args: time_str (str) a string that represents a date Returns: datetime.time object Raises: ValueError if the input string does not have a valid format. ''' if any(c not in '0123456789:' for c in time_str): raise ValueError('Illegal character in time string') if time_str.count(':') == 2: h, m, s = time_str.split(':') elif time_str.count(':') == 1: h, m = time_str.split(':') s = '00' elif len(time_str) == 6: h = time_str[:2] m = time_str[2:4] s = time_str[4:] else: raise ValueError('Time format not recognised. {}'.format( VALID_TIME_FORMATS_TEXT)) if len(m) == 2 and len(s) == 2: mins = int(m) sec = int(s) else: raise ValueError('m and s must be 2 digits') try: return datetime.time(int(h), mins, sec) except ValueError: raise ValueError('Invalid time {}. {}'.format(time_str, VALID_TIME_FORMATS_TEXT))
python
def timestr2time(time_str): ''' Turns a string into a datetime.time object. This will only work if the format can be "guessed", so the string must have one of the formats from VALID_TIME_FORMATS_TEXT. Args: time_str (str) a string that represents a date Returns: datetime.time object Raises: ValueError if the input string does not have a valid format. ''' if any(c not in '0123456789:' for c in time_str): raise ValueError('Illegal character in time string') if time_str.count(':') == 2: h, m, s = time_str.split(':') elif time_str.count(':') == 1: h, m = time_str.split(':') s = '00' elif len(time_str) == 6: h = time_str[:2] m = time_str[2:4] s = time_str[4:] else: raise ValueError('Time format not recognised. {}'.format( VALID_TIME_FORMATS_TEXT)) if len(m) == 2 and len(s) == 2: mins = int(m) sec = int(s) else: raise ValueError('m and s must be 2 digits') try: return datetime.time(int(h), mins, sec) except ValueError: raise ValueError('Invalid time {}. {}'.format(time_str, VALID_TIME_FORMATS_TEXT))
[ "def", "timestr2time", "(", "time_str", ")", ":", "if", "any", "(", "c", "not", "in", "'0123456789:'", "for", "c", "in", "time_str", ")", ":", "raise", "ValueError", "(", "'Illegal character in time string'", ")", "if", "time_str", ".", "count", "(", "':'", ")", "==", "2", ":", "h", ",", "m", ",", "s", "=", "time_str", ".", "split", "(", "':'", ")", "elif", "time_str", ".", "count", "(", "':'", ")", "==", "1", ":", "h", ",", "m", "=", "time_str", ".", "split", "(", "':'", ")", "s", "=", "'00'", "elif", "len", "(", "time_str", ")", "==", "6", ":", "h", "=", "time_str", "[", ":", "2", "]", "m", "=", "time_str", "[", "2", ":", "4", "]", "s", "=", "time_str", "[", "4", ":", "]", "else", ":", "raise", "ValueError", "(", "'Time format not recognised. {}'", ".", "format", "(", "VALID_TIME_FORMATS_TEXT", ")", ")", "if", "len", "(", "m", ")", "==", "2", "and", "len", "(", "s", ")", "==", "2", ":", "mins", "=", "int", "(", "m", ")", "sec", "=", "int", "(", "s", ")", "else", ":", "raise", "ValueError", "(", "'m and s must be 2 digits'", ")", "try", ":", "return", "datetime", ".", "time", "(", "int", "(", "h", ")", ",", "mins", ",", "sec", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'Invalid time {}. {}'", ".", "format", "(", "time_str", ",", "VALID_TIME_FORMATS_TEXT", ")", ")" ]
Turns a string into a datetime.time object. This will only work if the format can be "guessed", so the string must have one of the formats from VALID_TIME_FORMATS_TEXT. Args: time_str (str) a string that represents a date Returns: datetime.time object Raises: ValueError if the input string does not have a valid format.
[ "Turns", "a", "string", "into", "a", "datetime", ".", "time", "object", ".", "This", "will", "only", "work", "if", "the", "format", "can", "be", "guessed", "so", "the", "string", "must", "have", "one", "of", "the", "formats", "from", "VALID_TIME_FORMATS_TEXT", "." ]
6d773c31d1f27cc5256d47feb8afb5c3ae5f0db5
https://github.com/EdwinvO/pyutillib/blob/6d773c31d1f27cc5256d47feb8afb5c3ae5f0db5/pyutillib/date_utils.py#L317-L353
246,604
EdwinvO/pyutillib
pyutillib/date_utils.py
time2timestr
def time2timestr(time, fmt='hhmmss'): ''' Turns a datetime.time object into a string. The string must have one of the formats from VALID_TIME_FORMATS_TEXT to make it compatible with timestr2time. Args: time (datetime.time) the time to be translated fmt (str) a format string. Returns: (str) that represents a time. Raises: ValueError if the format is not valid. ''' if fmt.count(':') == 2: if not fmt.index('h') < fmt.index('m') < fmt.index('s'): raise ValueError('Invalid format string. {}'.format( VALID_TIME_FORMATS_TEXT)) h, m, s = fmt.split(':') elif fmt.count(':') == 1: if not fmt.index('h') < fmt.index('m'): raise ValueError('Invalid format string. {}'.format( VALID_TIME_FORMATS_TEXT)) h, m = fmt.split(':') s = None elif any(c not in 'hms' for c in fmt) or len(fmt) != 6: raise ValueError('Invalid character in format string. {}'.format( VALID_TIME_FORMATS_TEXT)) else: if not fmt.index('h') < fmt.index('m') < fmt.index('s'): raise ValueError('Invalid format string. {}'.format( VALID_TIME_FORMATS_TEXT)) h, m, s = fmt[:-4], fmt[-4:-2], fmt[-2:] for string, char in ((h, 'h'), (m, 'm'), (s, 's')): if string is not None and any(c != char for c in string): raise ValueError('Invalid date format: {} is not {}'.\ format(char, string)) if len(h) == 2: fmt = fmt.replace('hh', '%H', 1) elif len(h) == 1: fmt = fmt.replace('h', 'X%H', 1) else: raise ValueError('Invalid format string, hour must have 1 or 2 digits') if len(m) == 2: fmt = fmt.replace('mm', '%M', 1) else: raise ValueError('Invalid format string, minutes must have 2 digits') if s is not None and len(s) == 2: fmt = fmt. replace('ss', '%S', 1) elif s is not None: raise ValueError('Invalid format string, seconds must have 2 digits') return time.strftime(fmt).replace('X0','X').replace('X','')
python
def time2timestr(time, fmt='hhmmss'): ''' Turns a datetime.time object into a string. The string must have one of the formats from VALID_TIME_FORMATS_TEXT to make it compatible with timestr2time. Args: time (datetime.time) the time to be translated fmt (str) a format string. Returns: (str) that represents a time. Raises: ValueError if the format is not valid. ''' if fmt.count(':') == 2: if not fmt.index('h') < fmt.index('m') < fmt.index('s'): raise ValueError('Invalid format string. {}'.format( VALID_TIME_FORMATS_TEXT)) h, m, s = fmt.split(':') elif fmt.count(':') == 1: if not fmt.index('h') < fmt.index('m'): raise ValueError('Invalid format string. {}'.format( VALID_TIME_FORMATS_TEXT)) h, m = fmt.split(':') s = None elif any(c not in 'hms' for c in fmt) or len(fmt) != 6: raise ValueError('Invalid character in format string. {}'.format( VALID_TIME_FORMATS_TEXT)) else: if not fmt.index('h') < fmt.index('m') < fmt.index('s'): raise ValueError('Invalid format string. {}'.format( VALID_TIME_FORMATS_TEXT)) h, m, s = fmt[:-4], fmt[-4:-2], fmt[-2:] for string, char in ((h, 'h'), (m, 'm'), (s, 's')): if string is not None and any(c != char for c in string): raise ValueError('Invalid date format: {} is not {}'.\ format(char, string)) if len(h) == 2: fmt = fmt.replace('hh', '%H', 1) elif len(h) == 1: fmt = fmt.replace('h', 'X%H', 1) else: raise ValueError('Invalid format string, hour must have 1 or 2 digits') if len(m) == 2: fmt = fmt.replace('mm', '%M', 1) else: raise ValueError('Invalid format string, minutes must have 2 digits') if s is not None and len(s) == 2: fmt = fmt. replace('ss', '%S', 1) elif s is not None: raise ValueError('Invalid format string, seconds must have 2 digits') return time.strftime(fmt).replace('X0','X').replace('X','')
[ "def", "time2timestr", "(", "time", ",", "fmt", "=", "'hhmmss'", ")", ":", "if", "fmt", ".", "count", "(", "':'", ")", "==", "2", ":", "if", "not", "fmt", ".", "index", "(", "'h'", ")", "<", "fmt", ".", "index", "(", "'m'", ")", "<", "fmt", ".", "index", "(", "'s'", ")", ":", "raise", "ValueError", "(", "'Invalid format string. {}'", ".", "format", "(", "VALID_TIME_FORMATS_TEXT", ")", ")", "h", ",", "m", ",", "s", "=", "fmt", ".", "split", "(", "':'", ")", "elif", "fmt", ".", "count", "(", "':'", ")", "==", "1", ":", "if", "not", "fmt", ".", "index", "(", "'h'", ")", "<", "fmt", ".", "index", "(", "'m'", ")", ":", "raise", "ValueError", "(", "'Invalid format string. {}'", ".", "format", "(", "VALID_TIME_FORMATS_TEXT", ")", ")", "h", ",", "m", "=", "fmt", ".", "split", "(", "':'", ")", "s", "=", "None", "elif", "any", "(", "c", "not", "in", "'hms'", "for", "c", "in", "fmt", ")", "or", "len", "(", "fmt", ")", "!=", "6", ":", "raise", "ValueError", "(", "'Invalid character in format string. {}'", ".", "format", "(", "VALID_TIME_FORMATS_TEXT", ")", ")", "else", ":", "if", "not", "fmt", ".", "index", "(", "'h'", ")", "<", "fmt", ".", "index", "(", "'m'", ")", "<", "fmt", ".", "index", "(", "'s'", ")", ":", "raise", "ValueError", "(", "'Invalid format string. {}'", ".", "format", "(", "VALID_TIME_FORMATS_TEXT", ")", ")", "h", ",", "m", ",", "s", "=", "fmt", "[", ":", "-", "4", "]", ",", "fmt", "[", "-", "4", ":", "-", "2", "]", ",", "fmt", "[", "-", "2", ":", "]", "for", "string", ",", "char", "in", "(", "(", "h", ",", "'h'", ")", ",", "(", "m", ",", "'m'", ")", ",", "(", "s", ",", "'s'", ")", ")", ":", "if", "string", "is", "not", "None", "and", "any", "(", "c", "!=", "char", "for", "c", "in", "string", ")", ":", "raise", "ValueError", "(", "'Invalid date format: {} is not {}'", ".", "format", "(", "char", ",", "string", ")", ")", "if", "len", "(", "h", ")", "==", "2", ":", "fmt", "=", "fmt", ".", "replace", "(", "'hh'", ",", "'%H'", ",", "1", ")", "elif", "len", "(", "h", ")", "==", "1", ":", "fmt", "=", "fmt", ".", "replace", "(", "'h'", ",", "'X%H'", ",", "1", ")", "else", ":", "raise", "ValueError", "(", "'Invalid format string, hour must have 1 or 2 digits'", ")", "if", "len", "(", "m", ")", "==", "2", ":", "fmt", "=", "fmt", ".", "replace", "(", "'mm'", ",", "'%M'", ",", "1", ")", "else", ":", "raise", "ValueError", "(", "'Invalid format string, minutes must have 2 digits'", ")", "if", "s", "is", "not", "None", "and", "len", "(", "s", ")", "==", "2", ":", "fmt", "=", "fmt", ".", "replace", "(", "'ss'", ",", "'%S'", ",", "1", ")", "elif", "s", "is", "not", "None", ":", "raise", "ValueError", "(", "'Invalid format string, seconds must have 2 digits'", ")", "return", "time", ".", "strftime", "(", "fmt", ")", ".", "replace", "(", "'X0'", ",", "'X'", ")", ".", "replace", "(", "'X'", ",", "''", ")" ]
Turns a datetime.time object into a string. The string must have one of the formats from VALID_TIME_FORMATS_TEXT to make it compatible with timestr2time. Args: time (datetime.time) the time to be translated fmt (str) a format string. Returns: (str) that represents a time. Raises: ValueError if the format is not valid.
[ "Turns", "a", "datetime", ".", "time", "object", "into", "a", "string", ".", "The", "string", "must", "have", "one", "of", "the", "formats", "from", "VALID_TIME_FORMATS_TEXT", "to", "make", "it", "compatible", "with", "timestr2time", "." ]
6d773c31d1f27cc5256d47feb8afb5c3ae5f0db5
https://github.com/EdwinvO/pyutillib/blob/6d773c31d1f27cc5256d47feb8afb5c3ae5f0db5/pyutillib/date_utils.py#L355-L406
246,605
cogniteev/docido-python-sdk
docido_sdk/crawler/tasks.py
check_custom_concurrency
def check_custom_concurrency(default, forced, logger=None): """ Get the proper concurrency value according to the default one and the one specified by the crawler. :param int default: default tasks concurrency :param forced: concurrency asked by crawler :return: concurrency to use. :rtype: int """ logger = logger or LOGGER cmc_msg = 'Invalid "max_concurrent_tasks: ' if not isinstance(forced, int): logger.warn(cmc_msg + 'expecting int') elif forced > default: msg = 'may not be greater than: %s' % default logger.warn(cmc_msg + msg) elif forced < 1: msg = 'may not be less than 1' logger.warn(cmc_msg + msg) else: default = forced return default
python
def check_custom_concurrency(default, forced, logger=None): """ Get the proper concurrency value according to the default one and the one specified by the crawler. :param int default: default tasks concurrency :param forced: concurrency asked by crawler :return: concurrency to use. :rtype: int """ logger = logger or LOGGER cmc_msg = 'Invalid "max_concurrent_tasks: ' if not isinstance(forced, int): logger.warn(cmc_msg + 'expecting int') elif forced > default: msg = 'may not be greater than: %s' % default logger.warn(cmc_msg + msg) elif forced < 1: msg = 'may not be less than 1' logger.warn(cmc_msg + msg) else: default = forced return default
[ "def", "check_custom_concurrency", "(", "default", ",", "forced", ",", "logger", "=", "None", ")", ":", "logger", "=", "logger", "or", "LOGGER", "cmc_msg", "=", "'Invalid \"max_concurrent_tasks: '", "if", "not", "isinstance", "(", "forced", ",", "int", ")", ":", "logger", ".", "warn", "(", "cmc_msg", "+", "'expecting int'", ")", "elif", "forced", ">", "default", ":", "msg", "=", "'may not be greater than: %s'", "%", "default", "logger", ".", "warn", "(", "cmc_msg", "+", "msg", ")", "elif", "forced", "<", "1", ":", "msg", "=", "'may not be less than 1'", "logger", ".", "warn", "(", "cmc_msg", "+", "msg", ")", "else", ":", "default", "=", "forced", "return", "default" ]
Get the proper concurrency value according to the default one and the one specified by the crawler. :param int default: default tasks concurrency :param forced: concurrency asked by crawler :return: concurrency to use. :rtype: int
[ "Get", "the", "proper", "concurrency", "value", "according", "to", "the", "default", "one", "and", "the", "one", "specified", "by", "the", "crawler", "." ]
58ecb6c6f5757fd40c0601657ab18368da7ddf33
https://github.com/cogniteev/docido-python-sdk/blob/58ecb6c6f5757fd40c0601657ab18368da7ddf33/docido_sdk/crawler/tasks.py#L9-L35
246,606
cogniteev/docido-python-sdk
docido_sdk/crawler/tasks.py
reorg_crawl_tasks
def reorg_crawl_tasks(tasks, concurrency, logger=None): """ Extract content returned by the crawler `iter_crawl_tasks` member method. :return: tuple made of the sub-tasks to executed, the epilogue task to execute or `None` is none was specified by the crawler, and the proper tasks concurrency level. :rtype: tuple (sub-tasks, epilogue, concurrent) """ futures = tasks['tasks'] epilogue = tasks.get('epilogue') custom_concurrency = tasks.get('max_concurrent_tasks', concurrency) check_custom_concurrency(concurrency, custom_concurrency, logger) futures = list(futures) return futures, epilogue, concurrency
python
def reorg_crawl_tasks(tasks, concurrency, logger=None): """ Extract content returned by the crawler `iter_crawl_tasks` member method. :return: tuple made of the sub-tasks to executed, the epilogue task to execute or `None` is none was specified by the crawler, and the proper tasks concurrency level. :rtype: tuple (sub-tasks, epilogue, concurrent) """ futures = tasks['tasks'] epilogue = tasks.get('epilogue') custom_concurrency = tasks.get('max_concurrent_tasks', concurrency) check_custom_concurrency(concurrency, custom_concurrency, logger) futures = list(futures) return futures, epilogue, concurrency
[ "def", "reorg_crawl_tasks", "(", "tasks", ",", "concurrency", ",", "logger", "=", "None", ")", ":", "futures", "=", "tasks", "[", "'tasks'", "]", "epilogue", "=", "tasks", ".", "get", "(", "'epilogue'", ")", "custom_concurrency", "=", "tasks", ".", "get", "(", "'max_concurrent_tasks'", ",", "concurrency", ")", "check_custom_concurrency", "(", "concurrency", ",", "custom_concurrency", ",", "logger", ")", "futures", "=", "list", "(", "futures", ")", "return", "futures", ",", "epilogue", ",", "concurrency" ]
Extract content returned by the crawler `iter_crawl_tasks` member method. :return: tuple made of the sub-tasks to executed, the epilogue task to execute or `None` is none was specified by the crawler, and the proper tasks concurrency level. :rtype: tuple (sub-tasks, epilogue, concurrent)
[ "Extract", "content", "returned", "by", "the", "crawler", "iter_crawl_tasks", "member", "method", "." ]
58ecb6c6f5757fd40c0601657ab18368da7ddf33
https://github.com/cogniteev/docido-python-sdk/blob/58ecb6c6f5757fd40c0601657ab18368da7ddf33/docido_sdk/crawler/tasks.py#L38-L53
246,607
cogniteev/docido-python-sdk
docido_sdk/crawler/tasks.py
split_crawl_tasks
def split_crawl_tasks(tasks, concurrency): """ Reorganize tasks according to the tasks max concurrency value. :param tasks: sub-tasks to execute, can be either a list of tasks of a list of list of tasks :param int concurrency: Maximum number of tasks that might be executed in parallel. :return: list of list of tasks. """ if any(tasks) and isinstance(tasks[0], list): for seq in tasks: if not isinstance(seq, list): raise Exception("Expected a list of tasks") else: if concurrency > 1: chain_size = int(ceil(float(len(tasks)) / concurrency)) tasks = [ chunk for chunk in chunks( iter(tasks), max(1, chain_size) ) ] else: tasks = [tasks] return tasks
python
def split_crawl_tasks(tasks, concurrency): """ Reorganize tasks according to the tasks max concurrency value. :param tasks: sub-tasks to execute, can be either a list of tasks of a list of list of tasks :param int concurrency: Maximum number of tasks that might be executed in parallel. :return: list of list of tasks. """ if any(tasks) and isinstance(tasks[0], list): for seq in tasks: if not isinstance(seq, list): raise Exception("Expected a list of tasks") else: if concurrency > 1: chain_size = int(ceil(float(len(tasks)) / concurrency)) tasks = [ chunk for chunk in chunks( iter(tasks), max(1, chain_size) ) ] else: tasks = [tasks] return tasks
[ "def", "split_crawl_tasks", "(", "tasks", ",", "concurrency", ")", ":", "if", "any", "(", "tasks", ")", "and", "isinstance", "(", "tasks", "[", "0", "]", ",", "list", ")", ":", "for", "seq", "in", "tasks", ":", "if", "not", "isinstance", "(", "seq", ",", "list", ")", ":", "raise", "Exception", "(", "\"Expected a list of tasks\"", ")", "else", ":", "if", "concurrency", ">", "1", ":", "chain_size", "=", "int", "(", "ceil", "(", "float", "(", "len", "(", "tasks", ")", ")", "/", "concurrency", ")", ")", "tasks", "=", "[", "chunk", "for", "chunk", "in", "chunks", "(", "iter", "(", "tasks", ")", ",", "max", "(", "1", ",", "chain_size", ")", ")", "]", "else", ":", "tasks", "=", "[", "tasks", "]", "return", "tasks" ]
Reorganize tasks according to the tasks max concurrency value. :param tasks: sub-tasks to execute, can be either a list of tasks of a list of list of tasks :param int concurrency: Maximum number of tasks that might be executed in parallel. :return: list of list of tasks.
[ "Reorganize", "tasks", "according", "to", "the", "tasks", "max", "concurrency", "value", "." ]
58ecb6c6f5757fd40c0601657ab18368da7ddf33
https://github.com/cogniteev/docido-python-sdk/blob/58ecb6c6f5757fd40c0601657ab18368da7ddf33/docido_sdk/crawler/tasks.py#L56-L84
246,608
jeff-regier/authortoolkit
authortoolkit/agglomerator.py
Agglomerator.do_static_merge
def do_static_merge(cls, c_source, c_target): """By the time we're just folding in clusters, there's no need to maintain self.INSTANCES and self.clusters, so we just call this method """ c_target.extend(c_source) c_source.parent = c_target.parent cls.CLUSTERS.remove(c_source) for m in c_source.mentions: cls.MENTION_TO_CLUSTER[m] = c_target
python
def do_static_merge(cls, c_source, c_target): """By the time we're just folding in clusters, there's no need to maintain self.INSTANCES and self.clusters, so we just call this method """ c_target.extend(c_source) c_source.parent = c_target.parent cls.CLUSTERS.remove(c_source) for m in c_source.mentions: cls.MENTION_TO_CLUSTER[m] = c_target
[ "def", "do_static_merge", "(", "cls", ",", "c_source", ",", "c_target", ")", ":", "c_target", ".", "extend", "(", "c_source", ")", "c_source", ".", "parent", "=", "c_target", ".", "parent", "cls", ".", "CLUSTERS", ".", "remove", "(", "c_source", ")", "for", "m", "in", "c_source", ".", "mentions", ":", "cls", ".", "MENTION_TO_CLUSTER", "[", "m", "]", "=", "c_target" ]
By the time we're just folding in clusters, there's no need to maintain self.INSTANCES and self.clusters, so we just call this method
[ "By", "the", "time", "we", "re", "just", "folding", "in", "clusters", "there", "s", "no", "need", "to", "maintain", "self", ".", "INSTANCES", "and", "self", ".", "clusters", "so", "we", "just", "call", "this", "method" ]
b119a953d46b2e23ad346927a0fb5c5047f28b9b
https://github.com/jeff-regier/authortoolkit/blob/b119a953d46b2e23ad346927a0fb5c5047f28b9b/authortoolkit/agglomerator.py#L67-L75
246,609
seangilleran/py_enigma_operator
enigma_operator/enigma_operator.py
random_letters
def random_letters(count): """Get a series of pseudo-random letters with no repeats.""" rv = random.choice(string.ascii_uppercase) while len(rv) < count: l = random.choice(string.ascii_uppercase) if not l in rv: rv += l return rv
python
def random_letters(count): """Get a series of pseudo-random letters with no repeats.""" rv = random.choice(string.ascii_uppercase) while len(rv) < count: l = random.choice(string.ascii_uppercase) if not l in rv: rv += l return rv
[ "def", "random_letters", "(", "count", ")", ":", "rv", "=", "random", ".", "choice", "(", "string", ".", "ascii_uppercase", ")", "while", "len", "(", "rv", ")", "<", "count", ":", "l", "=", "random", ".", "choice", "(", "string", ".", "ascii_uppercase", ")", "if", "not", "l", "in", "rv", ":", "rv", "+=", "l", "return", "rv" ]
Get a series of pseudo-random letters with no repeats.
[ "Get", "a", "series", "of", "pseudo", "-", "random", "letters", "with", "no", "repeats", "." ]
b37a29260faa2de024999a0fe6302c6e70b2b0e3
https://github.com/seangilleran/py_enigma_operator/blob/b37a29260faa2de024999a0fe6302c6e70b2b0e3/enigma_operator/enigma_operator.py#L9-L16
246,610
seangilleran/py_enigma_operator
enigma_operator/enigma_operator.py
random_codebuch
def random_codebuch(path): """Generate a month-long codebuch and save it to a file.""" lines = [] for i in range(31): line = str(i+1) + " " # Pick rotors all_rotors = ['I', 'II', 'III', 'IV', 'V'] rotors = [random.choice(all_rotors)] while len(rotors) < 3: r = random.choice(all_rotors) if not r in rotors: rotors.append(r) line += r + ' ' # Pick rotor settings. settings = [str(random.randint(1, 26))] while len(settings) < 3: s = str(random.randint(1, 26)) if not s in settings: settings.append(s) line += s + ' ' # Pick plugboard settings. plugboard = [] while len(plugboard) < 20: p1 = random_letters(1) p2 = random_letters(1) if (not p1 == p2 and not p1 in plugboard and not p2 in plugboard): plugboard.extend([p1, p2]) line += p1 + p2 + ' ' # Pick a reflector. reflector = random.choice(['B', 'C']) line += reflector line += os.linesep lines.append(line) with open(path, 'w') as f: f.writelines(lines) return lines
python
def random_codebuch(path): """Generate a month-long codebuch and save it to a file.""" lines = [] for i in range(31): line = str(i+1) + " " # Pick rotors all_rotors = ['I', 'II', 'III', 'IV', 'V'] rotors = [random.choice(all_rotors)] while len(rotors) < 3: r = random.choice(all_rotors) if not r in rotors: rotors.append(r) line += r + ' ' # Pick rotor settings. settings = [str(random.randint(1, 26))] while len(settings) < 3: s = str(random.randint(1, 26)) if not s in settings: settings.append(s) line += s + ' ' # Pick plugboard settings. plugboard = [] while len(plugboard) < 20: p1 = random_letters(1) p2 = random_letters(1) if (not p1 == p2 and not p1 in plugboard and not p2 in plugboard): plugboard.extend([p1, p2]) line += p1 + p2 + ' ' # Pick a reflector. reflector = random.choice(['B', 'C']) line += reflector line += os.linesep lines.append(line) with open(path, 'w') as f: f.writelines(lines) return lines
[ "def", "random_codebuch", "(", "path", ")", ":", "lines", "=", "[", "]", "for", "i", "in", "range", "(", "31", ")", ":", "line", "=", "str", "(", "i", "+", "1", ")", "+", "\" \"", "# Pick rotors", "all_rotors", "=", "[", "'I'", ",", "'II'", ",", "'III'", ",", "'IV'", ",", "'V'", "]", "rotors", "=", "[", "random", ".", "choice", "(", "all_rotors", ")", "]", "while", "len", "(", "rotors", ")", "<", "3", ":", "r", "=", "random", ".", "choice", "(", "all_rotors", ")", "if", "not", "r", "in", "rotors", ":", "rotors", ".", "append", "(", "r", ")", "line", "+=", "r", "+", "' '", "# Pick rotor settings.", "settings", "=", "[", "str", "(", "random", ".", "randint", "(", "1", ",", "26", ")", ")", "]", "while", "len", "(", "settings", ")", "<", "3", ":", "s", "=", "str", "(", "random", ".", "randint", "(", "1", ",", "26", ")", ")", "if", "not", "s", "in", "settings", ":", "settings", ".", "append", "(", "s", ")", "line", "+=", "s", "+", "' '", "# Pick plugboard settings.", "plugboard", "=", "[", "]", "while", "len", "(", "plugboard", ")", "<", "20", ":", "p1", "=", "random_letters", "(", "1", ")", "p2", "=", "random_letters", "(", "1", ")", "if", "(", "not", "p1", "==", "p2", "and", "not", "p1", "in", "plugboard", "and", "not", "p2", "in", "plugboard", ")", ":", "plugboard", ".", "extend", "(", "[", "p1", ",", "p2", "]", ")", "line", "+=", "p1", "+", "p2", "+", "' '", "# Pick a reflector.", "reflector", "=", "random", ".", "choice", "(", "[", "'B'", ",", "'C'", "]", ")", "line", "+=", "reflector", "line", "+=", "os", ".", "linesep", "lines", ".", "append", "(", "line", ")", "with", "open", "(", "path", ",", "'w'", ")", "as", "f", ":", "f", ".", "writelines", "(", "lines", ")", "return", "lines" ]
Generate a month-long codebuch and save it to a file.
[ "Generate", "a", "month", "-", "long", "codebuch", "and", "save", "it", "to", "a", "file", "." ]
b37a29260faa2de024999a0fe6302c6e70b2b0e3
https://github.com/seangilleran/py_enigma_operator/blob/b37a29260faa2de024999a0fe6302c6e70b2b0e3/enigma_operator/enigma_operator.py#L19-L63
246,611
seangilleran/py_enigma_operator
enigma_operator/enigma_operator.py
EnigmaOperator.encrypt
def encrypt(self, plaintext): """Have the operator encrypt a message.""" # Encrpyt message key. msg_key = random_letters(3) while msg_key == self.grundstellung: msg_key = random_letters(3) self.machine.set_display(self.grundstellung) enc_key = self.machine.process_text(msg_key) # Encrpyt message. self.machine.set_display(msg_key) ciphertext = self.machine.process_text(plaintext) # Encode message with keys. return "{enc_key}{ciphertext}{grundstellung}".format( enc_key=enc_key, ciphertext=ciphertext, grundstellung=self.grundstellung )
python
def encrypt(self, plaintext): """Have the operator encrypt a message.""" # Encrpyt message key. msg_key = random_letters(3) while msg_key == self.grundstellung: msg_key = random_letters(3) self.machine.set_display(self.grundstellung) enc_key = self.machine.process_text(msg_key) # Encrpyt message. self.machine.set_display(msg_key) ciphertext = self.machine.process_text(plaintext) # Encode message with keys. return "{enc_key}{ciphertext}{grundstellung}".format( enc_key=enc_key, ciphertext=ciphertext, grundstellung=self.grundstellung )
[ "def", "encrypt", "(", "self", ",", "plaintext", ")", ":", "# Encrpyt message key.", "msg_key", "=", "random_letters", "(", "3", ")", "while", "msg_key", "==", "self", ".", "grundstellung", ":", "msg_key", "=", "random_letters", "(", "3", ")", "self", ".", "machine", ".", "set_display", "(", "self", ".", "grundstellung", ")", "enc_key", "=", "self", ".", "machine", ".", "process_text", "(", "msg_key", ")", "# Encrpyt message.", "self", ".", "machine", ".", "set_display", "(", "msg_key", ")", "ciphertext", "=", "self", ".", "machine", ".", "process_text", "(", "plaintext", ")", "# Encode message with keys.", "return", "\"{enc_key}{ciphertext}{grundstellung}\"", ".", "format", "(", "enc_key", "=", "enc_key", ",", "ciphertext", "=", "ciphertext", ",", "grundstellung", "=", "self", ".", "grundstellung", ")" ]
Have the operator encrypt a message.
[ "Have", "the", "operator", "encrypt", "a", "message", "." ]
b37a29260faa2de024999a0fe6302c6e70b2b0e3
https://github.com/seangilleran/py_enigma_operator/blob/b37a29260faa2de024999a0fe6302c6e70b2b0e3/enigma_operator/enigma_operator.py#L81-L100
246,612
seangilleran/py_enigma_operator
enigma_operator/enigma_operator.py
EnigmaOperator.decrypt
def decrypt(self, ciphertext): """Have the operator decrypt a message.""" # Separate keys from message. enc_key = ciphertext[:3] message = ciphertext[3:-3] grundstellung = ciphertext[-3:] # Decrypt message key. self.machine.set_display(grundstellung) msg_key = self.machine.process_text(enc_key) # Decrpyt message. self.machine.set_display(msg_key) return self.machine.process_text(message)
python
def decrypt(self, ciphertext): """Have the operator decrypt a message.""" # Separate keys from message. enc_key = ciphertext[:3] message = ciphertext[3:-3] grundstellung = ciphertext[-3:] # Decrypt message key. self.machine.set_display(grundstellung) msg_key = self.machine.process_text(enc_key) # Decrpyt message. self.machine.set_display(msg_key) return self.machine.process_text(message)
[ "def", "decrypt", "(", "self", ",", "ciphertext", ")", ":", "# Separate keys from message.", "enc_key", "=", "ciphertext", "[", ":", "3", "]", "message", "=", "ciphertext", "[", "3", ":", "-", "3", "]", "grundstellung", "=", "ciphertext", "[", "-", "3", ":", "]", "# Decrypt message key.", "self", ".", "machine", ".", "set_display", "(", "grundstellung", ")", "msg_key", "=", "self", ".", "machine", ".", "process_text", "(", "enc_key", ")", "# Decrpyt message.", "self", ".", "machine", ".", "set_display", "(", "msg_key", ")", "return", "self", ".", "machine", ".", "process_text", "(", "message", ")" ]
Have the operator decrypt a message.
[ "Have", "the", "operator", "decrypt", "a", "message", "." ]
b37a29260faa2de024999a0fe6302c6e70b2b0e3
https://github.com/seangilleran/py_enigma_operator/blob/b37a29260faa2de024999a0fe6302c6e70b2b0e3/enigma_operator/enigma_operator.py#L102-L115
246,613
henrysher/kotocore
kotocore/__init__.py
get_version
def get_version(full=False): """ Returns a string-ified version number. Optionally accepts a ``full`` parameter, which if ``True``, will include any pre-release information. (Default: ``False``) """ version = '.'.join([str(bit) for bit in __version__[:3]]) if full: version = '-'.join([version] + list(__version__[3:])) return version
python
def get_version(full=False): """ Returns a string-ified version number. Optionally accepts a ``full`` parameter, which if ``True``, will include any pre-release information. (Default: ``False``) """ version = '.'.join([str(bit) for bit in __version__[:3]]) if full: version = '-'.join([version] + list(__version__[3:])) return version
[ "def", "get_version", "(", "full", "=", "False", ")", ":", "version", "=", "'.'", ".", "join", "(", "[", "str", "(", "bit", ")", "for", "bit", "in", "__version__", "[", ":", "3", "]", "]", ")", "if", "full", ":", "version", "=", "'-'", ".", "join", "(", "[", "version", "]", "+", "list", "(", "__version__", "[", "3", ":", "]", ")", ")", "return", "version" ]
Returns a string-ified version number. Optionally accepts a ``full`` parameter, which if ``True``, will include any pre-release information. (Default: ``False``)
[ "Returns", "a", "string", "-", "ified", "version", "number", "." ]
c52d2f3878b924ceabca07f61c91abcb1b230ecc
https://github.com/henrysher/kotocore/blob/c52d2f3878b924ceabca07f61c91abcb1b230ecc/kotocore/__init__.py#L8-L20
246,614
koheimiya/extheano
extheano/jit.py
FuncInfo.parse_args_kwargs
def parse_args_kwargs(self, *args, **kwargs): '''Parse the arguments with keywords.''' # unpack the arginfo keys, defdict = self.arginfo assigned = keys[:len(args)] not_assigned = keys[len(args):] # validate kwargs for key in kwargs: assert key not in assigned assert key in keys # integrate args and kwargs knowns = dict(defdict, **kwargs) parsed_args = args + tuple([knowns[key] for key in not_assigned]) return parsed_args
python
def parse_args_kwargs(self, *args, **kwargs): '''Parse the arguments with keywords.''' # unpack the arginfo keys, defdict = self.arginfo assigned = keys[:len(args)] not_assigned = keys[len(args):] # validate kwargs for key in kwargs: assert key not in assigned assert key in keys # integrate args and kwargs knowns = dict(defdict, **kwargs) parsed_args = args + tuple([knowns[key] for key in not_assigned]) return parsed_args
[ "def", "parse_args_kwargs", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# unpack the arginfo", "keys", ",", "defdict", "=", "self", ".", "arginfo", "assigned", "=", "keys", "[", ":", "len", "(", "args", ")", "]", "not_assigned", "=", "keys", "[", "len", "(", "args", ")", ":", "]", "# validate kwargs", "for", "key", "in", "kwargs", ":", "assert", "key", "not", "in", "assigned", "assert", "key", "in", "keys", "# integrate args and kwargs", "knowns", "=", "dict", "(", "defdict", ",", "*", "*", "kwargs", ")", "parsed_args", "=", "args", "+", "tuple", "(", "[", "knowns", "[", "key", "]", "for", "key", "in", "not_assigned", "]", ")", "return", "parsed_args" ]
Parse the arguments with keywords.
[ "Parse", "the", "arguments", "with", "keywords", "." ]
ea099a6395ca8772660b2c715fb26cde12738181
https://github.com/koheimiya/extheano/blob/ea099a6395ca8772660b2c715fb26cde12738181/extheano/jit.py#L139-L154
246,615
koheimiya/extheano
extheano/jit.py
FuncInfo._get_keys_defdict
def _get_keys_defdict(self): '''Get the keys and the default dictionary of the given function's arguments ''' # inspect argspecs argspec = inspect.getargspec(self.func) keys, defvals = argspec.args, argspec.defaults # convert to (list_of_argkeys, dict_of_default_keys) if defvals is None: return keys, None else: defvals = list(defvals) keys.reverse() defvals.reverse() defdict = dict(zip(keys, defvals)) keys.reverse() return keys, defdict
python
def _get_keys_defdict(self): '''Get the keys and the default dictionary of the given function's arguments ''' # inspect argspecs argspec = inspect.getargspec(self.func) keys, defvals = argspec.args, argspec.defaults # convert to (list_of_argkeys, dict_of_default_keys) if defvals is None: return keys, None else: defvals = list(defvals) keys.reverse() defvals.reverse() defdict = dict(zip(keys, defvals)) keys.reverse() return keys, defdict
[ "def", "_get_keys_defdict", "(", "self", ")", ":", "# inspect argspecs", "argspec", "=", "inspect", ".", "getargspec", "(", "self", ".", "func", ")", "keys", ",", "defvals", "=", "argspec", ".", "args", ",", "argspec", ".", "defaults", "# convert to (list_of_argkeys, dict_of_default_keys)", "if", "defvals", "is", "None", ":", "return", "keys", ",", "None", "else", ":", "defvals", "=", "list", "(", "defvals", ")", "keys", ".", "reverse", "(", ")", "defvals", ".", "reverse", "(", ")", "defdict", "=", "dict", "(", "zip", "(", "keys", ",", "defvals", ")", ")", "keys", ".", "reverse", "(", ")", "return", "keys", ",", "defdict" ]
Get the keys and the default dictionary of the given function's arguments
[ "Get", "the", "keys", "and", "the", "default", "dictionary", "of", "the", "given", "function", "s", "arguments" ]
ea099a6395ca8772660b2c715fb26cde12738181
https://github.com/koheimiya/extheano/blob/ea099a6395ca8772660b2c715fb26cde12738181/extheano/jit.py#L156-L173
246,616
koheimiya/extheano
extheano/jit.py
Compiler.compile_with_value
def compile_with_value(self, func, args=None, owner=None): '''Compile the function with array-like objects''' # format args if args is None: args = [] # cast numpy.ndarray into theano.tensor theano_args = [self.cast2theano_var(a, 'extheano.jit.Compiler-arg-%d' % i) for a, i in zip(args, range(len(args)))] # compiled value with symbol return self.compile_with_symbol(func, theano_args, owner)
python
def compile_with_value(self, func, args=None, owner=None): '''Compile the function with array-like objects''' # format args if args is None: args = [] # cast numpy.ndarray into theano.tensor theano_args = [self.cast2theano_var(a, 'extheano.jit.Compiler-arg-%d' % i) for a, i in zip(args, range(len(args)))] # compiled value with symbol return self.compile_with_symbol(func, theano_args, owner)
[ "def", "compile_with_value", "(", "self", ",", "func", ",", "args", "=", "None", ",", "owner", "=", "None", ")", ":", "# format args", "if", "args", "is", "None", ":", "args", "=", "[", "]", "# cast numpy.ndarray into theano.tensor", "theano_args", "=", "[", "self", ".", "cast2theano_var", "(", "a", ",", "'extheano.jit.Compiler-arg-%d'", "%", "i", ")", "for", "a", ",", "i", "in", "zip", "(", "args", ",", "range", "(", "len", "(", "args", ")", ")", ")", "]", "# compiled value with symbol", "return", "self", ".", "compile_with_symbol", "(", "func", ",", "theano_args", ",", "owner", ")" ]
Compile the function with array-like objects
[ "Compile", "the", "function", "with", "array", "-", "like", "objects" ]
ea099a6395ca8772660b2c715fb26cde12738181
https://github.com/koheimiya/extheano/blob/ea099a6395ca8772660b2c715fb26cde12738181/extheano/jit.py#L184-L195
246,617
koheimiya/extheano
extheano/jit.py
Compiler.compile_with_symbol
def compile_with_symbol(self, func, theano_args=None, owner=None): '''Compile the function with theano symbols''' if theano_args is None: theano_args = [] # initialize the shared buffers upc = UpdateCollector() # get the output symbols and other Theano options theano_ret = func(*theano_args) if owner is None \ else func(owner, *theano_args) # integrate the information of updates, givens and the other options out = copy.copy(self.default_options) out['outputs'] = theano_ret out['updates'] = upc.extract_updates() # compile the function return theano.function(theano_args, **out)
python
def compile_with_symbol(self, func, theano_args=None, owner=None): '''Compile the function with theano symbols''' if theano_args is None: theano_args = [] # initialize the shared buffers upc = UpdateCollector() # get the output symbols and other Theano options theano_ret = func(*theano_args) if owner is None \ else func(owner, *theano_args) # integrate the information of updates, givens and the other options out = copy.copy(self.default_options) out['outputs'] = theano_ret out['updates'] = upc.extract_updates() # compile the function return theano.function(theano_args, **out)
[ "def", "compile_with_symbol", "(", "self", ",", "func", ",", "theano_args", "=", "None", ",", "owner", "=", "None", ")", ":", "if", "theano_args", "is", "None", ":", "theano_args", "=", "[", "]", "# initialize the shared buffers", "upc", "=", "UpdateCollector", "(", ")", "# get the output symbols and other Theano options", "theano_ret", "=", "func", "(", "*", "theano_args", ")", "if", "owner", "is", "None", "else", "func", "(", "owner", ",", "*", "theano_args", ")", "# integrate the information of updates, givens and the other options", "out", "=", "copy", ".", "copy", "(", "self", ".", "default_options", ")", "out", "[", "'outputs'", "]", "=", "theano_ret", "out", "[", "'updates'", "]", "=", "upc", ".", "extract_updates", "(", ")", "# compile the function", "return", "theano", ".", "function", "(", "theano_args", ",", "*", "*", "out", ")" ]
Compile the function with theano symbols
[ "Compile", "the", "function", "with", "theano", "symbols" ]
ea099a6395ca8772660b2c715fb26cde12738181
https://github.com/koheimiya/extheano/blob/ea099a6395ca8772660b2c715fb26cde12738181/extheano/jit.py#L197-L215
246,618
koheimiya/extheano
extheano/jit.py
Compiler.cast2theano_var
def cast2theano_var(self, array_like, name=None): '''Cast `numpy.ndarray` into `theano.tensor` keeping `dtype` and `ndim` compatible ''' # extract the information of the input value array = np.asarray(array_like) args = (name, array.dtype) ndim = array.ndim # cast with the information above if ndim == 0: return T.scalar(*args) elif ndim == 1: return T.vector(*args) elif ndim == 2: return T.matrix(*args) elif ndim == 3: return T.tensor3(*args) elif ndim == 4: return T.tensor4(*args) else: raise ValueError('extheano.jit.Compiler: Unsupported type or shape')
python
def cast2theano_var(self, array_like, name=None): '''Cast `numpy.ndarray` into `theano.tensor` keeping `dtype` and `ndim` compatible ''' # extract the information of the input value array = np.asarray(array_like) args = (name, array.dtype) ndim = array.ndim # cast with the information above if ndim == 0: return T.scalar(*args) elif ndim == 1: return T.vector(*args) elif ndim == 2: return T.matrix(*args) elif ndim == 3: return T.tensor3(*args) elif ndim == 4: return T.tensor4(*args) else: raise ValueError('extheano.jit.Compiler: Unsupported type or shape')
[ "def", "cast2theano_var", "(", "self", ",", "array_like", ",", "name", "=", "None", ")", ":", "# extract the information of the input value", "array", "=", "np", ".", "asarray", "(", "array_like", ")", "args", "=", "(", "name", ",", "array", ".", "dtype", ")", "ndim", "=", "array", ".", "ndim", "# cast with the information above", "if", "ndim", "==", "0", ":", "return", "T", ".", "scalar", "(", "*", "args", ")", "elif", "ndim", "==", "1", ":", "return", "T", ".", "vector", "(", "*", "args", ")", "elif", "ndim", "==", "2", ":", "return", "T", ".", "matrix", "(", "*", "args", ")", "elif", "ndim", "==", "3", ":", "return", "T", ".", "tensor3", "(", "*", "args", ")", "elif", "ndim", "==", "4", ":", "return", "T", ".", "tensor4", "(", "*", "args", ")", "else", ":", "raise", "ValueError", "(", "'extheano.jit.Compiler: Unsupported type or shape'", ")" ]
Cast `numpy.ndarray` into `theano.tensor` keeping `dtype` and `ndim` compatible
[ "Cast", "numpy", ".", "ndarray", "into", "theano", ".", "tensor", "keeping", "dtype", "and", "ndim", "compatible" ]
ea099a6395ca8772660b2c715fb26cde12738181
https://github.com/koheimiya/extheano/blob/ea099a6395ca8772660b2c715fb26cde12738181/extheano/jit.py#L217-L238
246,619
tlevine/vlermv
vlermv/_fs.py
_get_fn
def _get_fn(fn, mode, load): ''' Load a contents, checking that the file was not modified during the read. ''' try: mtime_before = os.path.getmtime(fn) except OSError: mtime_before = None try: with open(fn, mode) as fp: item = load(fp) except OpenError: raise else: mtime_after = os.path.getmtime(fn) if mtime_before in {None, mtime_after}: return item else: raise EnvironmentError('File was edited during read: %s' % fn)
python
def _get_fn(fn, mode, load): ''' Load a contents, checking that the file was not modified during the read. ''' try: mtime_before = os.path.getmtime(fn) except OSError: mtime_before = None try: with open(fn, mode) as fp: item = load(fp) except OpenError: raise else: mtime_after = os.path.getmtime(fn) if mtime_before in {None, mtime_after}: return item else: raise EnvironmentError('File was edited during read: %s' % fn)
[ "def", "_get_fn", "(", "fn", ",", "mode", ",", "load", ")", ":", "try", ":", "mtime_before", "=", "os", ".", "path", ".", "getmtime", "(", "fn", ")", "except", "OSError", ":", "mtime_before", "=", "None", "try", ":", "with", "open", "(", "fn", ",", "mode", ")", "as", "fp", ":", "item", "=", "load", "(", "fp", ")", "except", "OpenError", ":", "raise", "else", ":", "mtime_after", "=", "os", ".", "path", ".", "getmtime", "(", "fn", ")", "if", "mtime_before", "in", "{", "None", ",", "mtime_after", "}", ":", "return", "item", "else", ":", "raise", "EnvironmentError", "(", "'File was edited during read: %s'", "%", "fn", ")" ]
Load a contents, checking that the file was not modified during the read.
[ "Load", "a", "contents", "checking", "that", "the", "file", "was", "not", "modified", "during", "the", "read", "." ]
0b332ea1c20e4065b30f5e3ec0c1d0fffbce6b20
https://github.com/tlevine/vlermv/blob/0b332ea1c20e4065b30f5e3ec0c1d0fffbce6b20/vlermv/_fs.py#L9-L28
246,620
tylerbutler/propane
propane/collections.py
flatten_iterable
def flatten_iterable(iterable): """ Flattens a nested iterable into a single layer. Generator. If you only want to flatten a single level, use more_itertools.flatten. Example:: >>> nested_iterable = (('t1', 't2'), ['l1', 'l2', ('l1', 'l2')]) >>> list(flatten_iterable(nested_iterable)) ['t1', 't2', 'l1', 'l2', 'l1', 'l2'] >>> set(flatten_iterable(nested_iterable)) == {'t1', 't2', 'l1', 'l2'} True """ for item in iterable: if isinstance(item, Iterable) and not isinstance(item, string_types): for sub in flatten_iterable(item): yield sub else: yield item
python
def flatten_iterable(iterable): """ Flattens a nested iterable into a single layer. Generator. If you only want to flatten a single level, use more_itertools.flatten. Example:: >>> nested_iterable = (('t1', 't2'), ['l1', 'l2', ('l1', 'l2')]) >>> list(flatten_iterable(nested_iterable)) ['t1', 't2', 'l1', 'l2', 'l1', 'l2'] >>> set(flatten_iterable(nested_iterable)) == {'t1', 't2', 'l1', 'l2'} True """ for item in iterable: if isinstance(item, Iterable) and not isinstance(item, string_types): for sub in flatten_iterable(item): yield sub else: yield item
[ "def", "flatten_iterable", "(", "iterable", ")", ":", "for", "item", "in", "iterable", ":", "if", "isinstance", "(", "item", ",", "Iterable", ")", "and", "not", "isinstance", "(", "item", ",", "string_types", ")", ":", "for", "sub", "in", "flatten_iterable", "(", "item", ")", ":", "yield", "sub", "else", ":", "yield", "item" ]
Flattens a nested iterable into a single layer. Generator. If you only want to flatten a single level, use more_itertools.flatten. Example:: >>> nested_iterable = (('t1', 't2'), ['l1', 'l2', ('l1', 'l2')]) >>> list(flatten_iterable(nested_iterable)) ['t1', 't2', 'l1', 'l2', 'l1', 'l2'] >>> set(flatten_iterable(nested_iterable)) == {'t1', 't2', 'l1', 'l2'} True
[ "Flattens", "a", "nested", "iterable", "into", "a", "single", "layer", ".", "Generator", "." ]
6c404285ab8d78865b7175a5c8adf8fae12d6be5
https://github.com/tylerbutler/propane/blob/6c404285ab8d78865b7175a5c8adf8fae12d6be5/propane/collections.py#L86-L105
246,621
knagra/farnsworth
events/views.py
list_events_view
def list_events_view(request): ''' A list view of upcoming events. ''' page_name = "Upcoming Events" profile = UserProfile.objects.get(user=request.user) event_form = EventForm( request.POST if 'post_event' in request.POST else None, profile=profile, ) if event_form.is_valid(): event_form.save() return HttpResponseRedirect(reverse('events:list')) # a pseudo-dictionary, actually a list with items of form (event, ongoing, # rsvpd, rsvp_form), where ongoing is a boolean of whether the event is # currently ongoing, rsvpd is a boolean of whether the user has rsvp'd to # the event events_dict = list() for event in Event.objects.filter(end_time__gte=now()): rsvp_form = RsvpForm( request.POST if "rsvp-{0}".format(event.pk) in request.POST else None, instance=event, profile=profile, ) if rsvp_form.is_valid(): rsvpd = rsvp_form.save() if rsvpd: message = MESSAGES['RSVP_ADD'].format(event=event.title) else: message = MESSAGES['RSVP_REMOVE'].format(event=event.title) messages.add_message(request, messages.SUCCESS, message) return HttpResponseRedirect(reverse('events:list')) ongoing = ((event.start_time <= now()) and (event.end_time >= now())) rsvpd = (profile in event.rsvps.all()) events_dict.append((event, ongoing, rsvpd, rsvp_form)) if request.method == "POST": messages.add_message(request, messages.ERROR, MESSAGES["EVENT_ERROR"]) return render_to_response('list_events.html', { 'page_name': page_name, 'events_dict': events_dict, 'now': now(), 'event_form': event_form, }, context_instance=RequestContext(request))
python
def list_events_view(request): ''' A list view of upcoming events. ''' page_name = "Upcoming Events" profile = UserProfile.objects.get(user=request.user) event_form = EventForm( request.POST if 'post_event' in request.POST else None, profile=profile, ) if event_form.is_valid(): event_form.save() return HttpResponseRedirect(reverse('events:list')) # a pseudo-dictionary, actually a list with items of form (event, ongoing, # rsvpd, rsvp_form), where ongoing is a boolean of whether the event is # currently ongoing, rsvpd is a boolean of whether the user has rsvp'd to # the event events_dict = list() for event in Event.objects.filter(end_time__gte=now()): rsvp_form = RsvpForm( request.POST if "rsvp-{0}".format(event.pk) in request.POST else None, instance=event, profile=profile, ) if rsvp_form.is_valid(): rsvpd = rsvp_form.save() if rsvpd: message = MESSAGES['RSVP_ADD'].format(event=event.title) else: message = MESSAGES['RSVP_REMOVE'].format(event=event.title) messages.add_message(request, messages.SUCCESS, message) return HttpResponseRedirect(reverse('events:list')) ongoing = ((event.start_time <= now()) and (event.end_time >= now())) rsvpd = (profile in event.rsvps.all()) events_dict.append((event, ongoing, rsvpd, rsvp_form)) if request.method == "POST": messages.add_message(request, messages.ERROR, MESSAGES["EVENT_ERROR"]) return render_to_response('list_events.html', { 'page_name': page_name, 'events_dict': events_dict, 'now': now(), 'event_form': event_form, }, context_instance=RequestContext(request))
[ "def", "list_events_view", "(", "request", ")", ":", "page_name", "=", "\"Upcoming Events\"", "profile", "=", "UserProfile", ".", "objects", ".", "get", "(", "user", "=", "request", ".", "user", ")", "event_form", "=", "EventForm", "(", "request", ".", "POST", "if", "'post_event'", "in", "request", ".", "POST", "else", "None", ",", "profile", "=", "profile", ",", ")", "if", "event_form", ".", "is_valid", "(", ")", ":", "event_form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'events:list'", ")", ")", "# a pseudo-dictionary, actually a list with items of form (event, ongoing,", "# rsvpd, rsvp_form), where ongoing is a boolean of whether the event is", "# currently ongoing, rsvpd is a boolean of whether the user has rsvp'd to", "# the event", "events_dict", "=", "list", "(", ")", "for", "event", "in", "Event", ".", "objects", ".", "filter", "(", "end_time__gte", "=", "now", "(", ")", ")", ":", "rsvp_form", "=", "RsvpForm", "(", "request", ".", "POST", "if", "\"rsvp-{0}\"", ".", "format", "(", "event", ".", "pk", ")", "in", "request", ".", "POST", "else", "None", ",", "instance", "=", "event", ",", "profile", "=", "profile", ",", ")", "if", "rsvp_form", ".", "is_valid", "(", ")", ":", "rsvpd", "=", "rsvp_form", ".", "save", "(", ")", "if", "rsvpd", ":", "message", "=", "MESSAGES", "[", "'RSVP_ADD'", "]", ".", "format", "(", "event", "=", "event", ".", "title", ")", "else", ":", "message", "=", "MESSAGES", "[", "'RSVP_REMOVE'", "]", ".", "format", "(", "event", "=", "event", ".", "title", ")", "messages", ".", "add_message", "(", "request", ",", "messages", ".", "SUCCESS", ",", "message", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'events:list'", ")", ")", "ongoing", "=", "(", "(", "event", ".", "start_time", "<=", "now", "(", ")", ")", "and", "(", "event", ".", "end_time", ">=", "now", "(", ")", ")", ")", "rsvpd", "=", "(", "profile", "in", "event", ".", "rsvps", ".", "all", "(", ")", ")", "events_dict", ".", "append", "(", "(", "event", ",", "ongoing", ",", "rsvpd", ",", "rsvp_form", ")", ")", "if", "request", ".", "method", "==", "\"POST\"", ":", "messages", ".", "add_message", "(", "request", ",", "messages", ".", "ERROR", ",", "MESSAGES", "[", "\"EVENT_ERROR\"", "]", ")", "return", "render_to_response", "(", "'list_events.html'", ",", "{", "'page_name'", ":", "page_name", ",", "'events_dict'", ":", "events_dict", ",", "'now'", ":", "now", "(", ")", ",", "'event_form'", ":", "event_form", ",", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ")" ]
A list view of upcoming events.
[ "A", "list", "view", "of", "upcoming", "events", "." ]
1b6589f0d9fea154f0a1e2231ed906764ed26d26
https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/events/views.py#L42-L86
246,622
knagra/farnsworth
events/views.py
edit_event_view
def edit_event_view(request, event_pk): ''' The view to edit an event. ''' page_name = "Edit Event" profile = UserProfile.objects.get(user=request.user) event = get_object_or_404(Event, pk=event_pk) if event.owner != profile and not request.user.is_superuser: return HttpResponseRedirect( reverse('events:view', kwargs={"event_pk": event_pk}), ) event_form = EventForm( request.POST or None, profile=profile, instance=event, ) if event_form.is_valid(): event = event_form.save() messages.add_message( request, messages.SUCCESS, MESSAGES['EVENT_UPDATED'].format(event=event.title), ) return HttpResponseRedirect( reverse('events:view', kwargs={"event_pk": event_pk}), ) return render_to_response('edit_event.html', { 'page_name': page_name, 'event_form': event_form, }, context_instance=RequestContext(request))
python
def edit_event_view(request, event_pk): ''' The view to edit an event. ''' page_name = "Edit Event" profile = UserProfile.objects.get(user=request.user) event = get_object_or_404(Event, pk=event_pk) if event.owner != profile and not request.user.is_superuser: return HttpResponseRedirect( reverse('events:view', kwargs={"event_pk": event_pk}), ) event_form = EventForm( request.POST or None, profile=profile, instance=event, ) if event_form.is_valid(): event = event_form.save() messages.add_message( request, messages.SUCCESS, MESSAGES['EVENT_UPDATED'].format(event=event.title), ) return HttpResponseRedirect( reverse('events:view', kwargs={"event_pk": event_pk}), ) return render_to_response('edit_event.html', { 'page_name': page_name, 'event_form': event_form, }, context_instance=RequestContext(request))
[ "def", "edit_event_view", "(", "request", ",", "event_pk", ")", ":", "page_name", "=", "\"Edit Event\"", "profile", "=", "UserProfile", ".", "objects", ".", "get", "(", "user", "=", "request", ".", "user", ")", "event", "=", "get_object_or_404", "(", "Event", ",", "pk", "=", "event_pk", ")", "if", "event", ".", "owner", "!=", "profile", "and", "not", "request", ".", "user", ".", "is_superuser", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'events:view'", ",", "kwargs", "=", "{", "\"event_pk\"", ":", "event_pk", "}", ")", ",", ")", "event_form", "=", "EventForm", "(", "request", ".", "POST", "or", "None", ",", "profile", "=", "profile", ",", "instance", "=", "event", ",", ")", "if", "event_form", ".", "is_valid", "(", ")", ":", "event", "=", "event_form", ".", "save", "(", ")", "messages", ".", "add_message", "(", "request", ",", "messages", ".", "SUCCESS", ",", "MESSAGES", "[", "'EVENT_UPDATED'", "]", ".", "format", "(", "event", "=", "event", ".", "title", ")", ",", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'events:view'", ",", "kwargs", "=", "{", "\"event_pk\"", ":", "event_pk", "}", ")", ",", ")", "return", "render_to_response", "(", "'edit_event.html'", ",", "{", "'page_name'", ":", "page_name", ",", "'event_form'", ":", "event_form", ",", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ")" ]
The view to edit an event.
[ "The", "view", "to", "edit", "an", "event", "." ]
1b6589f0d9fea154f0a1e2231ed906764ed26d26
https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/events/views.py#L201-L230
246,623
obiwanus/django-qurl
qurl/templatetags/qurl.py
_get_url_node
def _get_url_node(parser, bits): """ Parses the expression as if it was a normal url tag. Was copied from the original function django.template.defaulttags.url, but unnecessary pieces were removed. """ viewname = parser.compile_filter(bits[1]) args = [] kwargs = {} bits = bits[2:] if len(bits): kwarg_re = re.compile(r"(?:(\w+)=)?(.+)") for bit in bits: match = kwarg_re.match(bit) if not match: raise TemplateSyntaxError("Malformed arguments to url tag") name, value = match.groups() if name: kwargs[name] = parser.compile_filter(value) else: args.append(parser.compile_filter(value)) return URLNode(viewname, args, kwargs, asvar=None)
python
def _get_url_node(parser, bits): """ Parses the expression as if it was a normal url tag. Was copied from the original function django.template.defaulttags.url, but unnecessary pieces were removed. """ viewname = parser.compile_filter(bits[1]) args = [] kwargs = {} bits = bits[2:] if len(bits): kwarg_re = re.compile(r"(?:(\w+)=)?(.+)") for bit in bits: match = kwarg_re.match(bit) if not match: raise TemplateSyntaxError("Malformed arguments to url tag") name, value = match.groups() if name: kwargs[name] = parser.compile_filter(value) else: args.append(parser.compile_filter(value)) return URLNode(viewname, args, kwargs, asvar=None)
[ "def", "_get_url_node", "(", "parser", ",", "bits", ")", ":", "viewname", "=", "parser", ".", "compile_filter", "(", "bits", "[", "1", "]", ")", "args", "=", "[", "]", "kwargs", "=", "{", "}", "bits", "=", "bits", "[", "2", ":", "]", "if", "len", "(", "bits", ")", ":", "kwarg_re", "=", "re", ".", "compile", "(", "r\"(?:(\\w+)=)?(.+)\"", ")", "for", "bit", "in", "bits", ":", "match", "=", "kwarg_re", ".", "match", "(", "bit", ")", "if", "not", "match", ":", "raise", "TemplateSyntaxError", "(", "\"Malformed arguments to url tag\"", ")", "name", ",", "value", "=", "match", ".", "groups", "(", ")", "if", "name", ":", "kwargs", "[", "name", "]", "=", "parser", ".", "compile_filter", "(", "value", ")", "else", ":", "args", ".", "append", "(", "parser", ".", "compile_filter", "(", "value", ")", ")", "return", "URLNode", "(", "viewname", ",", "args", ",", "kwargs", ",", "asvar", "=", "None", ")" ]
Parses the expression as if it was a normal url tag. Was copied from the original function django.template.defaulttags.url, but unnecessary pieces were removed.
[ "Parses", "the", "expression", "as", "if", "it", "was", "a", "normal", "url", "tag", ".", "Was", "copied", "from", "the", "original", "function", "django", ".", "template", ".", "defaulttags", ".", "url", "but", "unnecessary", "pieces", "were", "removed", "." ]
745992fc4241fd7a2f034c202f6fe05da7437683
https://github.com/obiwanus/django-qurl/blob/745992fc4241fd7a2f034c202f6fe05da7437683/qurl/templatetags/qurl.py#L23-L46
246,624
fred49/linshare-api
linshareapi/admin/upgradetasks.py
UpgradeTasks.trigger
def trigger(self, identifier, force=True): """Trigger an upgrade task.""" self.debug(identifier) url = "{base}/{identifier}".format( base=self.local_base_url, identifier=identifier ) param = {} if force: param['force'] = force encode = urllib.urlencode(param) if encode: url += "?" url += encode return self.core.update(url, {})
python
def trigger(self, identifier, force=True): """Trigger an upgrade task.""" self.debug(identifier) url = "{base}/{identifier}".format( base=self.local_base_url, identifier=identifier ) param = {} if force: param['force'] = force encode = urllib.urlencode(param) if encode: url += "?" url += encode return self.core.update(url, {})
[ "def", "trigger", "(", "self", ",", "identifier", ",", "force", "=", "True", ")", ":", "self", ".", "debug", "(", "identifier", ")", "url", "=", "\"{base}/{identifier}\"", ".", "format", "(", "base", "=", "self", ".", "local_base_url", ",", "identifier", "=", "identifier", ")", "param", "=", "{", "}", "if", "force", ":", "param", "[", "'force'", "]", "=", "force", "encode", "=", "urllib", ".", "urlencode", "(", "param", ")", "if", "encode", ":", "url", "+=", "\"?\"", "url", "+=", "encode", "return", "self", ".", "core", ".", "update", "(", "url", ",", "{", "}", ")" ]
Trigger an upgrade task.
[ "Trigger", "an", "upgrade", "task", "." ]
be646c25aa8ba3718abb6869c620b157d53d6e41
https://github.com/fred49/linshare-api/blob/be646c25aa8ba3718abb6869c620b157d53d6e41/linshareapi/admin/upgradetasks.py#L177-L191
246,625
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
_negate_compare_text
def _negate_compare_text(atok: asttokens.ASTTokens, node: ast.Compare) -> str: """ Generate the text representing the negation of the comparison node. :param atok: parsing obtained with ``asttokens`` so that we can access the last tokens of a node. The standard ``ast`` module provides only the first token of an AST node. In lack of concrete syntax tree, getting text from first to last token is currently the simplest approach. :param node: AST node representing the comparison in a condition :return: text representation of the node's negation """ assert len(node.ops) == 1, "A single comparison expected, but got: {}".format(len(node.ops)) assert len(node.comparators) == 1, "A single comparator expected, but got: {}".format(len(node.comparators)) operator = node.ops[0] left = node.left right = node.comparators[0] left_text = atok.get_text(node=left) right_text = atok.get_text(node=right) text = '' if isinstance(operator, ast.Eq): text = '{} != {}'.format(left_text, right_text) elif isinstance(operator, ast.NotEq): text = '{} == {}'.format(left_text, right_text) elif isinstance(operator, ast.Lt): text = '{} >= {}'.format(left_text, right_text) elif isinstance(operator, ast.LtE): text = '{} > {}'.format(left_text, right_text) elif isinstance(operator, ast.Gt): text = '{} <= {}'.format(left_text, right_text) elif isinstance(operator, ast.GtE): text = '{} < {}'.format(left_text, right_text) elif isinstance(operator, ast.Is): text = '{} is not {}'.format(left_text, right_text) elif isinstance(operator, ast.IsNot): text = '{} is {}'.format(left_text, right_text) elif isinstance(operator, ast.In): text = '{} not in {}'.format(left_text, right_text) elif isinstance(operator, ast.NotIn): text = '{} in {}'.format(left_text, right_text) else: raise NotImplementedError("Unhandled comparison operator: {}".format(operator)) return text
python
def _negate_compare_text(atok: asttokens.ASTTokens, node: ast.Compare) -> str: """ Generate the text representing the negation of the comparison node. :param atok: parsing obtained with ``asttokens`` so that we can access the last tokens of a node. The standard ``ast`` module provides only the first token of an AST node. In lack of concrete syntax tree, getting text from first to last token is currently the simplest approach. :param node: AST node representing the comparison in a condition :return: text representation of the node's negation """ assert len(node.ops) == 1, "A single comparison expected, but got: {}".format(len(node.ops)) assert len(node.comparators) == 1, "A single comparator expected, but got: {}".format(len(node.comparators)) operator = node.ops[0] left = node.left right = node.comparators[0] left_text = atok.get_text(node=left) right_text = atok.get_text(node=right) text = '' if isinstance(operator, ast.Eq): text = '{} != {}'.format(left_text, right_text) elif isinstance(operator, ast.NotEq): text = '{} == {}'.format(left_text, right_text) elif isinstance(operator, ast.Lt): text = '{} >= {}'.format(left_text, right_text) elif isinstance(operator, ast.LtE): text = '{} > {}'.format(left_text, right_text) elif isinstance(operator, ast.Gt): text = '{} <= {}'.format(left_text, right_text) elif isinstance(operator, ast.GtE): text = '{} < {}'.format(left_text, right_text) elif isinstance(operator, ast.Is): text = '{} is not {}'.format(left_text, right_text) elif isinstance(operator, ast.IsNot): text = '{} is {}'.format(left_text, right_text) elif isinstance(operator, ast.In): text = '{} not in {}'.format(left_text, right_text) elif isinstance(operator, ast.NotIn): text = '{} in {}'.format(left_text, right_text) else: raise NotImplementedError("Unhandled comparison operator: {}".format(operator)) return text
[ "def", "_negate_compare_text", "(", "atok", ":", "asttokens", ".", "ASTTokens", ",", "node", ":", "ast", ".", "Compare", ")", "->", "str", ":", "assert", "len", "(", "node", ".", "ops", ")", "==", "1", ",", "\"A single comparison expected, but got: {}\"", ".", "format", "(", "len", "(", "node", ".", "ops", ")", ")", "assert", "len", "(", "node", ".", "comparators", ")", "==", "1", ",", "\"A single comparator expected, but got: {}\"", ".", "format", "(", "len", "(", "node", ".", "comparators", ")", ")", "operator", "=", "node", ".", "ops", "[", "0", "]", "left", "=", "node", ".", "left", "right", "=", "node", ".", "comparators", "[", "0", "]", "left_text", "=", "atok", ".", "get_text", "(", "node", "=", "left", ")", "right_text", "=", "atok", ".", "get_text", "(", "node", "=", "right", ")", "text", "=", "''", "if", "isinstance", "(", "operator", ",", "ast", ".", "Eq", ")", ":", "text", "=", "'{} != {}'", ".", "format", "(", "left_text", ",", "right_text", ")", "elif", "isinstance", "(", "operator", ",", "ast", ".", "NotEq", ")", ":", "text", "=", "'{} == {}'", ".", "format", "(", "left_text", ",", "right_text", ")", "elif", "isinstance", "(", "operator", ",", "ast", ".", "Lt", ")", ":", "text", "=", "'{} >= {}'", ".", "format", "(", "left_text", ",", "right_text", ")", "elif", "isinstance", "(", "operator", ",", "ast", ".", "LtE", ")", ":", "text", "=", "'{} > {}'", ".", "format", "(", "left_text", ",", "right_text", ")", "elif", "isinstance", "(", "operator", ",", "ast", ".", "Gt", ")", ":", "text", "=", "'{} <= {}'", ".", "format", "(", "left_text", ",", "right_text", ")", "elif", "isinstance", "(", "operator", ",", "ast", ".", "GtE", ")", ":", "text", "=", "'{} < {}'", ".", "format", "(", "left_text", ",", "right_text", ")", "elif", "isinstance", "(", "operator", ",", "ast", ".", "Is", ")", ":", "text", "=", "'{} is not {}'", ".", "format", "(", "left_text", ",", "right_text", ")", "elif", "isinstance", "(", "operator", ",", "ast", ".", "IsNot", ")", ":", "text", "=", "'{} is {}'", ".", "format", "(", "left_text", ",", "right_text", ")", "elif", "isinstance", "(", "operator", ",", "ast", ".", "In", ")", ":", "text", "=", "'{} not in {}'", ".", "format", "(", "left_text", ",", "right_text", ")", "elif", "isinstance", "(", "operator", ",", "ast", ".", "NotIn", ")", ":", "text", "=", "'{} in {}'", ".", "format", "(", "left_text", ",", "right_text", ")", "else", ":", "raise", "NotImplementedError", "(", "\"Unhandled comparison operator: {}\"", ".", "format", "(", "operator", ")", ")", "return", "text" ]
Generate the text representing the negation of the comparison node. :param atok: parsing obtained with ``asttokens`` so that we can access the last tokens of a node. The standard ``ast`` module provides only the first token of an AST node. In lack of concrete syntax tree, getting text from first to last token is currently the simplest approach. :param node: AST node representing the comparison in a condition :return: text representation of the node's negation
[ "Generate", "the", "text", "representing", "the", "negation", "of", "the", "comparison", "node", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L27-L84
246,626
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
_error_type_and_message
def _error_type_and_message( decorator_inspection: icontract._represent.DecoratorInspection) -> Tuple[Optional[str], Optional[str]]: """ Inspect the error argument of a contract and infer the error type and the message if the error is given as a lambda. If the error argument is not given or if it is not given as a lambda function, return immediately. The error message is inferred as the single string-literal argument to a single call in the lambda body. :param decorator_inspection: inspection of a contract decorator :return: error type (None if not inferrable), error message (None if not inferrable) """ call_node = decorator_inspection.node error_arg_node = None # type: Optional[ast.AST] for keyword in call_node.keywords: if keyword.arg == 'error': error_arg_node = keyword.value if error_arg_node is None and len(call_node.args) == 5: error_arg_node = call_node.args[4] if not isinstance(error_arg_node, ast.Lambda): return None, None body_node = error_arg_node.body # The body of the error lambda needs to be a callable, since it needs to return an instance of Exception if not isinstance(body_node, ast.Call): return None, None error_type = decorator_inspection.atok.get_text(node=body_node.func) error_message = None # type: Optional[str] if len(body_node.args) == 1 and len(body_node.keywords) == 0: if isinstance(body_node.args[0], ast.Str): error_message = body_node.args[0].s elif len(body_node.args) == 0 and len(body_node.keywords) == 1: if isinstance(body_node.keywords[0].value, ast.Str): error_message = body_node.keywords[0].value.s else: # The error message could not be inferred. pass return error_type, error_message
python
def _error_type_and_message( decorator_inspection: icontract._represent.DecoratorInspection) -> Tuple[Optional[str], Optional[str]]: """ Inspect the error argument of a contract and infer the error type and the message if the error is given as a lambda. If the error argument is not given or if it is not given as a lambda function, return immediately. The error message is inferred as the single string-literal argument to a single call in the lambda body. :param decorator_inspection: inspection of a contract decorator :return: error type (None if not inferrable), error message (None if not inferrable) """ call_node = decorator_inspection.node error_arg_node = None # type: Optional[ast.AST] for keyword in call_node.keywords: if keyword.arg == 'error': error_arg_node = keyword.value if error_arg_node is None and len(call_node.args) == 5: error_arg_node = call_node.args[4] if not isinstance(error_arg_node, ast.Lambda): return None, None body_node = error_arg_node.body # The body of the error lambda needs to be a callable, since it needs to return an instance of Exception if not isinstance(body_node, ast.Call): return None, None error_type = decorator_inspection.atok.get_text(node=body_node.func) error_message = None # type: Optional[str] if len(body_node.args) == 1 and len(body_node.keywords) == 0: if isinstance(body_node.args[0], ast.Str): error_message = body_node.args[0].s elif len(body_node.args) == 0 and len(body_node.keywords) == 1: if isinstance(body_node.keywords[0].value, ast.Str): error_message = body_node.keywords[0].value.s else: # The error message could not be inferred. pass return error_type, error_message
[ "def", "_error_type_and_message", "(", "decorator_inspection", ":", "icontract", ".", "_represent", ".", "DecoratorInspection", ")", "->", "Tuple", "[", "Optional", "[", "str", "]", ",", "Optional", "[", "str", "]", "]", ":", "call_node", "=", "decorator_inspection", ".", "node", "error_arg_node", "=", "None", "# type: Optional[ast.AST]", "for", "keyword", "in", "call_node", ".", "keywords", ":", "if", "keyword", ".", "arg", "==", "'error'", ":", "error_arg_node", "=", "keyword", ".", "value", "if", "error_arg_node", "is", "None", "and", "len", "(", "call_node", ".", "args", ")", "==", "5", ":", "error_arg_node", "=", "call_node", ".", "args", "[", "4", "]", "if", "not", "isinstance", "(", "error_arg_node", ",", "ast", ".", "Lambda", ")", ":", "return", "None", ",", "None", "body_node", "=", "error_arg_node", ".", "body", "# The body of the error lambda needs to be a callable, since it needs to return an instance of Exception", "if", "not", "isinstance", "(", "body_node", ",", "ast", ".", "Call", ")", ":", "return", "None", ",", "None", "error_type", "=", "decorator_inspection", ".", "atok", ".", "get_text", "(", "node", "=", "body_node", ".", "func", ")", "error_message", "=", "None", "# type: Optional[str]", "if", "len", "(", "body_node", ".", "args", ")", "==", "1", "and", "len", "(", "body_node", ".", "keywords", ")", "==", "0", ":", "if", "isinstance", "(", "body_node", ".", "args", "[", "0", "]", ",", "ast", ".", "Str", ")", ":", "error_message", "=", "body_node", ".", "args", "[", "0", "]", ".", "s", "elif", "len", "(", "body_node", ".", "args", ")", "==", "0", "and", "len", "(", "body_node", ".", "keywords", ")", "==", "1", ":", "if", "isinstance", "(", "body_node", ".", "keywords", "[", "0", "]", ".", "value", ",", "ast", ".", "Str", ")", ":", "error_message", "=", "body_node", ".", "keywords", "[", "0", "]", ".", "value", ".", "s", "else", ":", "# The error message could not be inferred.", "pass", "return", "error_type", ",", "error_message" ]
Inspect the error argument of a contract and infer the error type and the message if the error is given as a lambda. If the error argument is not given or if it is not given as a lambda function, return immediately. The error message is inferred as the single string-literal argument to a single call in the lambda body. :param decorator_inspection: inspection of a contract decorator :return: error type (None if not inferrable), error message (None if not inferrable)
[ "Inspect", "the", "error", "argument", "of", "a", "contract", "and", "infer", "the", "error", "type", "and", "the", "message", "if", "the", "error", "is", "given", "as", "a", "lambda", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L128-L174
246,627
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
_format_contract
def _format_contract(contract: icontract._Contract) -> str: """Format the contract as reST.""" # pylint: disable=too-many-branches decorator_inspection = None # type: Optional[icontract._represent.DecoratorInspection] ## # Parse condition ## if not icontract._represent._is_lambda(a_function=contract.condition): condition_text = ':py:func:`{}`'.format(contract.condition.__name__) else: # We need to extract the source code corresponding to the decorator since inspect.getsource() is broken with # lambdas. # Find the line corresponding to the condition lambda lines, condition_lineno = inspect.findsource(contract.condition) filename = inspect.getsourcefile(contract.condition) decorator_inspection = icontract._represent.inspect_decorator( lines=lines, lineno=condition_lineno, filename=filename) lambda_inspection = icontract._represent.find_lambda_condition(decorator_inspection=decorator_inspection) assert lambda_inspection is not None, \ "Expected non-None lambda inspection with the condition: {}".format(contract.condition) condition_text = _condition_as_text(lambda_inspection=lambda_inspection) ## # Parse error ## error_type = None # type: Optional[str] # Error message is set only for an error of a contract given as a lambda that takes no arguments and returns # a result of a call on a string literal (*e.g.*, ``error=ValueError("some message")``. error_msg = None # type: Optional[str] if contract.error is not None: if isinstance(contract.error, type): error_type = contract.error.__qualname__ elif inspect.isfunction(contract.error) and icontract._represent._is_lambda(a_function=contract.error): if decorator_inspection is None: lines, condition_lineno = inspect.findsource(contract.error) filename = inspect.getsourcefile(contract.error) decorator_inspection = icontract._represent.inspect_decorator( lines=lines, lineno=condition_lineno, filename=filename) error_type, error_msg = _error_type_and_message(decorator_inspection=decorator_inspection) else: # Error type could not be inferred pass ## # Format ## description = None # type: Optional[str] if contract.description: description = contract.description elif error_msg is not None: description = error_msg else: # Description could not be inferred. pass doc = None # type: Optional[str] if description and error_type: if description.strip()[-1] in [".", "!", "?"]: doc = "{} Raise :py:class:`{}`".format(description, error_type) elif description.strip()[-1] in [",", ";"]: doc = "{} raise :py:class:`{}`".format(description, error_type) else: doc = "{}; raise :py:class:`{}`".format(description, error_type) elif not description and error_type: doc = "Raise :py:class:`{}`".format(error_type) elif description and not error_type: doc = description else: # No extra documentation can be generated since the error type could not be inferred and # no contract description was given. doc = None if doc is not None: return "{} ({})".format(condition_text, doc) return condition_text
python
def _format_contract(contract: icontract._Contract) -> str: """Format the contract as reST.""" # pylint: disable=too-many-branches decorator_inspection = None # type: Optional[icontract._represent.DecoratorInspection] ## # Parse condition ## if not icontract._represent._is_lambda(a_function=contract.condition): condition_text = ':py:func:`{}`'.format(contract.condition.__name__) else: # We need to extract the source code corresponding to the decorator since inspect.getsource() is broken with # lambdas. # Find the line corresponding to the condition lambda lines, condition_lineno = inspect.findsource(contract.condition) filename = inspect.getsourcefile(contract.condition) decorator_inspection = icontract._represent.inspect_decorator( lines=lines, lineno=condition_lineno, filename=filename) lambda_inspection = icontract._represent.find_lambda_condition(decorator_inspection=decorator_inspection) assert lambda_inspection is not None, \ "Expected non-None lambda inspection with the condition: {}".format(contract.condition) condition_text = _condition_as_text(lambda_inspection=lambda_inspection) ## # Parse error ## error_type = None # type: Optional[str] # Error message is set only for an error of a contract given as a lambda that takes no arguments and returns # a result of a call on a string literal (*e.g.*, ``error=ValueError("some message")``. error_msg = None # type: Optional[str] if contract.error is not None: if isinstance(contract.error, type): error_type = contract.error.__qualname__ elif inspect.isfunction(contract.error) and icontract._represent._is_lambda(a_function=contract.error): if decorator_inspection is None: lines, condition_lineno = inspect.findsource(contract.error) filename = inspect.getsourcefile(contract.error) decorator_inspection = icontract._represent.inspect_decorator( lines=lines, lineno=condition_lineno, filename=filename) error_type, error_msg = _error_type_and_message(decorator_inspection=decorator_inspection) else: # Error type could not be inferred pass ## # Format ## description = None # type: Optional[str] if contract.description: description = contract.description elif error_msg is not None: description = error_msg else: # Description could not be inferred. pass doc = None # type: Optional[str] if description and error_type: if description.strip()[-1] in [".", "!", "?"]: doc = "{} Raise :py:class:`{}`".format(description, error_type) elif description.strip()[-1] in [",", ";"]: doc = "{} raise :py:class:`{}`".format(description, error_type) else: doc = "{}; raise :py:class:`{}`".format(description, error_type) elif not description and error_type: doc = "Raise :py:class:`{}`".format(error_type) elif description and not error_type: doc = description else: # No extra documentation can be generated since the error type could not be inferred and # no contract description was given. doc = None if doc is not None: return "{} ({})".format(condition_text, doc) return condition_text
[ "def", "_format_contract", "(", "contract", ":", "icontract", ".", "_Contract", ")", "->", "str", ":", "# pylint: disable=too-many-branches", "decorator_inspection", "=", "None", "# type: Optional[icontract._represent.DecoratorInspection]", "##", "# Parse condition", "##", "if", "not", "icontract", ".", "_represent", ".", "_is_lambda", "(", "a_function", "=", "contract", ".", "condition", ")", ":", "condition_text", "=", "':py:func:`{}`'", ".", "format", "(", "contract", ".", "condition", ".", "__name__", ")", "else", ":", "# We need to extract the source code corresponding to the decorator since inspect.getsource() is broken with", "# lambdas.", "# Find the line corresponding to the condition lambda", "lines", ",", "condition_lineno", "=", "inspect", ".", "findsource", "(", "contract", ".", "condition", ")", "filename", "=", "inspect", ".", "getsourcefile", "(", "contract", ".", "condition", ")", "decorator_inspection", "=", "icontract", ".", "_represent", ".", "inspect_decorator", "(", "lines", "=", "lines", ",", "lineno", "=", "condition_lineno", ",", "filename", "=", "filename", ")", "lambda_inspection", "=", "icontract", ".", "_represent", ".", "find_lambda_condition", "(", "decorator_inspection", "=", "decorator_inspection", ")", "assert", "lambda_inspection", "is", "not", "None", ",", "\"Expected non-None lambda inspection with the condition: {}\"", ".", "format", "(", "contract", ".", "condition", ")", "condition_text", "=", "_condition_as_text", "(", "lambda_inspection", "=", "lambda_inspection", ")", "##", "# Parse error", "##", "error_type", "=", "None", "# type: Optional[str]", "# Error message is set only for an error of a contract given as a lambda that takes no arguments and returns", "# a result of a call on a string literal (*e.g.*, ``error=ValueError(\"some message\")``.", "error_msg", "=", "None", "# type: Optional[str]", "if", "contract", ".", "error", "is", "not", "None", ":", "if", "isinstance", "(", "contract", ".", "error", ",", "type", ")", ":", "error_type", "=", "contract", ".", "error", ".", "__qualname__", "elif", "inspect", ".", "isfunction", "(", "contract", ".", "error", ")", "and", "icontract", ".", "_represent", ".", "_is_lambda", "(", "a_function", "=", "contract", ".", "error", ")", ":", "if", "decorator_inspection", "is", "None", ":", "lines", ",", "condition_lineno", "=", "inspect", ".", "findsource", "(", "contract", ".", "error", ")", "filename", "=", "inspect", ".", "getsourcefile", "(", "contract", ".", "error", ")", "decorator_inspection", "=", "icontract", ".", "_represent", ".", "inspect_decorator", "(", "lines", "=", "lines", ",", "lineno", "=", "condition_lineno", ",", "filename", "=", "filename", ")", "error_type", ",", "error_msg", "=", "_error_type_and_message", "(", "decorator_inspection", "=", "decorator_inspection", ")", "else", ":", "# Error type could not be inferred", "pass", "##", "# Format", "##", "description", "=", "None", "# type: Optional[str]", "if", "contract", ".", "description", ":", "description", "=", "contract", ".", "description", "elif", "error_msg", "is", "not", "None", ":", "description", "=", "error_msg", "else", ":", "# Description could not be inferred.", "pass", "doc", "=", "None", "# type: Optional[str]", "if", "description", "and", "error_type", ":", "if", "description", ".", "strip", "(", ")", "[", "-", "1", "]", "in", "[", "\".\"", ",", "\"!\"", ",", "\"?\"", "]", ":", "doc", "=", "\"{} Raise :py:class:`{}`\"", ".", "format", "(", "description", ",", "error_type", ")", "elif", "description", ".", "strip", "(", ")", "[", "-", "1", "]", "in", "[", "\",\"", ",", "\";\"", "]", ":", "doc", "=", "\"{} raise :py:class:`{}`\"", ".", "format", "(", "description", ",", "error_type", ")", "else", ":", "doc", "=", "\"{}; raise :py:class:`{}`\"", ".", "format", "(", "description", ",", "error_type", ")", "elif", "not", "description", "and", "error_type", ":", "doc", "=", "\"Raise :py:class:`{}`\"", ".", "format", "(", "error_type", ")", "elif", "description", "and", "not", "error_type", ":", "doc", "=", "description", "else", ":", "# No extra documentation can be generated since the error type could not be inferred and", "# no contract description was given.", "doc", "=", "None", "if", "doc", "is", "not", "None", ":", "return", "\"{} ({})\"", ".", "format", "(", "condition_text", ",", "doc", ")", "return", "condition_text" ]
Format the contract as reST.
[ "Format", "the", "contract", "as", "reST", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L177-L267
246,628
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
_format_preconditions
def _format_preconditions(preconditions: List[List[icontract._Contract]], prefix: Optional[str] = None) -> List[str]: """ Format preconditions as reST. :param preconditions: preconditions of a function :param prefix: prefix of the ``:requires:`` and ``:requires else:`` directive :return: list of lines """ if not preconditions: return [] result = [] # type: List[str] for i, group in enumerate(preconditions): if i == 0: if prefix is not None: result.append(":{} requires:".format(prefix)) else: result.append(":requires:") else: if prefix is not None: result.append(":{} requires else:".format(prefix)) else: result.append(":requires else:") for precondition in group: result.append(" * {}".format(_format_contract(contract=precondition))) return result
python
def _format_preconditions(preconditions: List[List[icontract._Contract]], prefix: Optional[str] = None) -> List[str]: """ Format preconditions as reST. :param preconditions: preconditions of a function :param prefix: prefix of the ``:requires:`` and ``:requires else:`` directive :return: list of lines """ if not preconditions: return [] result = [] # type: List[str] for i, group in enumerate(preconditions): if i == 0: if prefix is not None: result.append(":{} requires:".format(prefix)) else: result.append(":requires:") else: if prefix is not None: result.append(":{} requires else:".format(prefix)) else: result.append(":requires else:") for precondition in group: result.append(" * {}".format(_format_contract(contract=precondition))) return result
[ "def", "_format_preconditions", "(", "preconditions", ":", "List", "[", "List", "[", "icontract", ".", "_Contract", "]", "]", ",", "prefix", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "List", "[", "str", "]", ":", "if", "not", "preconditions", ":", "return", "[", "]", "result", "=", "[", "]", "# type: List[str]", "for", "i", ",", "group", "in", "enumerate", "(", "preconditions", ")", ":", "if", "i", "==", "0", ":", "if", "prefix", "is", "not", "None", ":", "result", ".", "append", "(", "\":{} requires:\"", ".", "format", "(", "prefix", ")", ")", "else", ":", "result", ".", "append", "(", "\":requires:\"", ")", "else", ":", "if", "prefix", "is", "not", "None", ":", "result", ".", "append", "(", "\":{} requires else:\"", ".", "format", "(", "prefix", ")", ")", "else", ":", "result", ".", "append", "(", "\":requires else:\"", ")", "for", "precondition", "in", "group", ":", "result", ".", "append", "(", "\" * {}\"", ".", "format", "(", "_format_contract", "(", "contract", "=", "precondition", ")", ")", ")", "return", "result" ]
Format preconditions as reST. :param preconditions: preconditions of a function :param prefix: prefix of the ``:requires:`` and ``:requires else:`` directive :return: list of lines
[ "Format", "preconditions", "as", "reST", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L273-L300
246,629
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
_capture_as_text
def _capture_as_text(capture: Callable[..., Any]) -> str: """Convert the capture function into its text representation by parsing the source code of the decorator.""" if not icontract._represent._is_lambda(a_function=capture): signature = inspect.signature(capture) param_names = list(signature.parameters.keys()) return "{}({})".format(capture.__qualname__, ", ".join(param_names)) lines, lineno = inspect.findsource(capture) filename = inspect.getsourcefile(capture) decorator_inspection = icontract._represent.inspect_decorator(lines=lines, lineno=lineno, filename=filename) call_node = decorator_inspection.node capture_node = None # type: Optional[ast.Lambda] if len(call_node.args) > 0: assert isinstance(call_node.args[0], ast.Lambda), \ ("Expected the first argument to the snapshot decorator to be a condition as lambda AST node, " "but got: {}").format(type(call_node.args[0])) capture_node = call_node.args[0] elif len(call_node.keywords) > 0: for keyword in call_node.keywords: if keyword.arg == "capture": assert isinstance(keyword.value, ast.Lambda), \ "Expected lambda node as value of the 'capture' argument to the decorator." capture_node = keyword.value break assert capture_node is not None, "Expected to find a keyword AST node with 'capture' arg, but found none" else: raise AssertionError( "Expected a call AST node of a snapshot decorator to have either args or keywords, but got: {}".format( ast.dump(call_node))) capture_text = decorator_inspection.atok.get_text(capture_node.body) return capture_text
python
def _capture_as_text(capture: Callable[..., Any]) -> str: """Convert the capture function into its text representation by parsing the source code of the decorator.""" if not icontract._represent._is_lambda(a_function=capture): signature = inspect.signature(capture) param_names = list(signature.parameters.keys()) return "{}({})".format(capture.__qualname__, ", ".join(param_names)) lines, lineno = inspect.findsource(capture) filename = inspect.getsourcefile(capture) decorator_inspection = icontract._represent.inspect_decorator(lines=lines, lineno=lineno, filename=filename) call_node = decorator_inspection.node capture_node = None # type: Optional[ast.Lambda] if len(call_node.args) > 0: assert isinstance(call_node.args[0], ast.Lambda), \ ("Expected the first argument to the snapshot decorator to be a condition as lambda AST node, " "but got: {}").format(type(call_node.args[0])) capture_node = call_node.args[0] elif len(call_node.keywords) > 0: for keyword in call_node.keywords: if keyword.arg == "capture": assert isinstance(keyword.value, ast.Lambda), \ "Expected lambda node as value of the 'capture' argument to the decorator." capture_node = keyword.value break assert capture_node is not None, "Expected to find a keyword AST node with 'capture' arg, but found none" else: raise AssertionError( "Expected a call AST node of a snapshot decorator to have either args or keywords, but got: {}".format( ast.dump(call_node))) capture_text = decorator_inspection.atok.get_text(capture_node.body) return capture_text
[ "def", "_capture_as_text", "(", "capture", ":", "Callable", "[", "...", ",", "Any", "]", ")", "->", "str", ":", "if", "not", "icontract", ".", "_represent", ".", "_is_lambda", "(", "a_function", "=", "capture", ")", ":", "signature", "=", "inspect", ".", "signature", "(", "capture", ")", "param_names", "=", "list", "(", "signature", ".", "parameters", ".", "keys", "(", ")", ")", "return", "\"{}({})\"", ".", "format", "(", "capture", ".", "__qualname__", ",", "\", \"", ".", "join", "(", "param_names", ")", ")", "lines", ",", "lineno", "=", "inspect", ".", "findsource", "(", "capture", ")", "filename", "=", "inspect", ".", "getsourcefile", "(", "capture", ")", "decorator_inspection", "=", "icontract", ".", "_represent", ".", "inspect_decorator", "(", "lines", "=", "lines", ",", "lineno", "=", "lineno", ",", "filename", "=", "filename", ")", "call_node", "=", "decorator_inspection", ".", "node", "capture_node", "=", "None", "# type: Optional[ast.Lambda]", "if", "len", "(", "call_node", ".", "args", ")", ">", "0", ":", "assert", "isinstance", "(", "call_node", ".", "args", "[", "0", "]", ",", "ast", ".", "Lambda", ")", ",", "(", "\"Expected the first argument to the snapshot decorator to be a condition as lambda AST node, \"", "\"but got: {}\"", ")", ".", "format", "(", "type", "(", "call_node", ".", "args", "[", "0", "]", ")", ")", "capture_node", "=", "call_node", ".", "args", "[", "0", "]", "elif", "len", "(", "call_node", ".", "keywords", ")", ">", "0", ":", "for", "keyword", "in", "call_node", ".", "keywords", ":", "if", "keyword", ".", "arg", "==", "\"capture\"", ":", "assert", "isinstance", "(", "keyword", ".", "value", ",", "ast", ".", "Lambda", ")", ",", "\"Expected lambda node as value of the 'capture' argument to the decorator.\"", "capture_node", "=", "keyword", ".", "value", "break", "assert", "capture_node", "is", "not", "None", ",", "\"Expected to find a keyword AST node with 'capture' arg, but found none\"", "else", ":", "raise", "AssertionError", "(", "\"Expected a call AST node of a snapshot decorator to have either args or keywords, but got: {}\"", ".", "format", "(", "ast", ".", "dump", "(", "call_node", ")", ")", ")", "capture_text", "=", "decorator_inspection", ".", "atok", ".", "get_text", "(", "capture_node", ".", "body", ")", "return", "capture_text" ]
Convert the capture function into its text representation by parsing the source code of the decorator.
[ "Convert", "the", "capture", "function", "into", "its", "text", "representation", "by", "parsing", "the", "source", "code", "of", "the", "decorator", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L303-L343
246,630
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
_format_snapshots
def _format_snapshots(snapshots: List[icontract._Snapshot], prefix: Optional[str] = None) -> List[str]: """ Format snapshots as reST. :param snapshots: snapshots defined to capture the argument values of a function before the invocation :param prefix: prefix to be prepended to ``:OLD:`` directive :return: list of lines describing the snapshots """ if not snapshots: return [] result = [] # type: List[str] if prefix is not None: result.append(":{} OLD:".format(prefix)) else: result.append(":OLD:") for snapshot in snapshots: text = _capture_as_text(capture=snapshot.capture) result.append(" * :code:`.{}` = :code:`{}`".format(snapshot.name, text)) return result
python
def _format_snapshots(snapshots: List[icontract._Snapshot], prefix: Optional[str] = None) -> List[str]: """ Format snapshots as reST. :param snapshots: snapshots defined to capture the argument values of a function before the invocation :param prefix: prefix to be prepended to ``:OLD:`` directive :return: list of lines describing the snapshots """ if not snapshots: return [] result = [] # type: List[str] if prefix is not None: result.append(":{} OLD:".format(prefix)) else: result.append(":OLD:") for snapshot in snapshots: text = _capture_as_text(capture=snapshot.capture) result.append(" * :code:`.{}` = :code:`{}`".format(snapshot.name, text)) return result
[ "def", "_format_snapshots", "(", "snapshots", ":", "List", "[", "icontract", ".", "_Snapshot", "]", ",", "prefix", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "List", "[", "str", "]", ":", "if", "not", "snapshots", ":", "return", "[", "]", "result", "=", "[", "]", "# type: List[str]", "if", "prefix", "is", "not", "None", ":", "result", ".", "append", "(", "\":{} OLD:\"", ".", "format", "(", "prefix", ")", ")", "else", ":", "result", ".", "append", "(", "\":OLD:\"", ")", "for", "snapshot", "in", "snapshots", ":", "text", "=", "_capture_as_text", "(", "capture", "=", "snapshot", ".", "capture", ")", "result", ".", "append", "(", "\" * :code:`.{}` = :code:`{}`\"", ".", "format", "(", "snapshot", ".", "name", ",", "text", ")", ")", "return", "result" ]
Format snapshots as reST. :param snapshots: snapshots defined to capture the argument values of a function before the invocation :param prefix: prefix to be prepended to ``:OLD:`` directive :return: list of lines describing the snapshots
[ "Format", "snapshots", "as", "reST", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L349-L371
246,631
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
_format_postconditions
def _format_postconditions(postconditions: List[icontract._Contract], prefix: Optional[str] = None) -> List[str]: """ Format postconditions as reST. :param postconditions: postconditions of a function :param prefix: prefix to be prepended to ``:ensures:`` directive :return: list of lines describing the postconditions """ if not postconditions: return [] result = [] # type: List[str] if prefix is not None: result.append(":{} ensures:".format(prefix)) else: result.append(":ensures:") for postcondition in postconditions: result.append(" * {}".format(_format_contract(contract=postcondition))) return result
python
def _format_postconditions(postconditions: List[icontract._Contract], prefix: Optional[str] = None) -> List[str]: """ Format postconditions as reST. :param postconditions: postconditions of a function :param prefix: prefix to be prepended to ``:ensures:`` directive :return: list of lines describing the postconditions """ if not postconditions: return [] result = [] # type: List[str] if prefix is not None: result.append(":{} ensures:".format(prefix)) else: result.append(":ensures:") for postcondition in postconditions: result.append(" * {}".format(_format_contract(contract=postcondition))) return result
[ "def", "_format_postconditions", "(", "postconditions", ":", "List", "[", "icontract", ".", "_Contract", "]", ",", "prefix", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "List", "[", "str", "]", ":", "if", "not", "postconditions", ":", "return", "[", "]", "result", "=", "[", "]", "# type: List[str]", "if", "prefix", "is", "not", "None", ":", "result", ".", "append", "(", "\":{} ensures:\"", ".", "format", "(", "prefix", ")", ")", "else", ":", "result", ".", "append", "(", "\":ensures:\"", ")", "for", "postcondition", "in", "postconditions", ":", "result", ".", "append", "(", "\" * {}\"", ".", "format", "(", "_format_contract", "(", "contract", "=", "postcondition", ")", ")", ")", "return", "result" ]
Format postconditions as reST. :param postconditions: postconditions of a function :param prefix: prefix to be prepended to ``:ensures:`` directive :return: list of lines describing the postconditions
[ "Format", "postconditions", "as", "reST", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L377-L398
246,632
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
_format_invariants
def _format_invariants(invariants: List[icontract._Contract]) -> List[str]: """Format invariants as reST.""" if not invariants: return [] result = [":establishes:"] # type: List[str] for invariant in invariants: result.append(" * {}".format(_format_contract(contract=invariant))) return result
python
def _format_invariants(invariants: List[icontract._Contract]) -> List[str]: """Format invariants as reST.""" if not invariants: return [] result = [":establishes:"] # type: List[str] for invariant in invariants: result.append(" * {}".format(_format_contract(contract=invariant))) return result
[ "def", "_format_invariants", "(", "invariants", ":", "List", "[", "icontract", ".", "_Contract", "]", ")", "->", "List", "[", "str", "]", ":", "if", "not", "invariants", ":", "return", "[", "]", "result", "=", "[", "\":establishes:\"", "]", "# type: List[str]", "for", "invariant", "in", "invariants", ":", "result", ".", "append", "(", "\" * {}\"", ".", "format", "(", "_format_contract", "(", "contract", "=", "invariant", ")", ")", ")", "return", "result" ]
Format invariants as reST.
[ "Format", "invariants", "as", "reST", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L403-L412
246,633
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
_preconditions_snapshots_postconditions
def _preconditions_snapshots_postconditions(checker: Callable) -> _PrePostSnaps: """Collect the preconditions, snapshots and postconditions from a contract checker of a function.""" preconditions = getattr(checker, "__preconditions__", []) # type: List[List[icontract._Contract]] assert all(isinstance(precondition_group, list) for precondition_group in preconditions) assert (all( isinstance(precondition, icontract._Contract) for precondition_group in preconditions for precondition in precondition_group)) # Filter empty precondition groups ("require else" blocks) preconditions = [group for group in preconditions if len(group) > 0] snapshots = getattr(checker, "__postcondition_snapshots__", []) # type: List[icontract._Snapshot] assert all(isinstance(snap, icontract._Snapshot) for snap in snapshots) postconditions = getattr(checker, "__postconditions__", []) # type: List[icontract._Contract] assert all(isinstance(postcondition, icontract._Contract) for postcondition in postconditions) return _PrePostSnaps(preconditions=preconditions, snapshots=snapshots, postconditions=postconditions)
python
def _preconditions_snapshots_postconditions(checker: Callable) -> _PrePostSnaps: """Collect the preconditions, snapshots and postconditions from a contract checker of a function.""" preconditions = getattr(checker, "__preconditions__", []) # type: List[List[icontract._Contract]] assert all(isinstance(precondition_group, list) for precondition_group in preconditions) assert (all( isinstance(precondition, icontract._Contract) for precondition_group in preconditions for precondition in precondition_group)) # Filter empty precondition groups ("require else" blocks) preconditions = [group for group in preconditions if len(group) > 0] snapshots = getattr(checker, "__postcondition_snapshots__", []) # type: List[icontract._Snapshot] assert all(isinstance(snap, icontract._Snapshot) for snap in snapshots) postconditions = getattr(checker, "__postconditions__", []) # type: List[icontract._Contract] assert all(isinstance(postcondition, icontract._Contract) for postcondition in postconditions) return _PrePostSnaps(preconditions=preconditions, snapshots=snapshots, postconditions=postconditions)
[ "def", "_preconditions_snapshots_postconditions", "(", "checker", ":", "Callable", ")", "->", "_PrePostSnaps", ":", "preconditions", "=", "getattr", "(", "checker", ",", "\"__preconditions__\"", ",", "[", "]", ")", "# type: List[List[icontract._Contract]]", "assert", "all", "(", "isinstance", "(", "precondition_group", ",", "list", ")", "for", "precondition_group", "in", "preconditions", ")", "assert", "(", "all", "(", "isinstance", "(", "precondition", ",", "icontract", ".", "_Contract", ")", "for", "precondition_group", "in", "preconditions", "for", "precondition", "in", "precondition_group", ")", ")", "# Filter empty precondition groups (\"require else\" blocks)", "preconditions", "=", "[", "group", "for", "group", "in", "preconditions", "if", "len", "(", "group", ")", ">", "0", "]", "snapshots", "=", "getattr", "(", "checker", ",", "\"__postcondition_snapshots__\"", ",", "[", "]", ")", "# type: List[icontract._Snapshot]", "assert", "all", "(", "isinstance", "(", "snap", ",", "icontract", ".", "_Snapshot", ")", "for", "snap", "in", "snapshots", ")", "postconditions", "=", "getattr", "(", "checker", ",", "\"__postconditions__\"", ",", "[", "]", ")", "# type: List[icontract._Contract]", "assert", "all", "(", "isinstance", "(", "postcondition", ",", "icontract", ".", "_Contract", ")", "for", "postcondition", "in", "postconditions", ")", "return", "_PrePostSnaps", "(", "preconditions", "=", "preconditions", ",", "snapshots", "=", "snapshots", ",", "postconditions", "=", "postconditions", ")" ]
Collect the preconditions, snapshots and postconditions from a contract checker of a function.
[ "Collect", "the", "preconditions", "snapshots", "and", "postconditions", "from", "a", "contract", "checker", "of", "a", "function", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L426-L445
246,634
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
_format_function_contracts
def _format_function_contracts(func: Callable, prefix: Optional[str] = None) -> List[str]: """ Format the preconditions and postconditions of a function given its checker decorator. :param func: function whose contracts we are describing :param prefix: prefix to be prepended to the contract directives such as ``get`` or ``set`` :return: list of lines """ checker = icontract._checkers.find_checker(func=func) if checker is None: return [] pps = _preconditions_snapshots_postconditions(checker=checker) pre_block = _format_preconditions(preconditions=pps.preconditions, prefix=prefix) old_block = _format_snapshots(snapshots=pps.snapshots, prefix=prefix) post_block = _format_postconditions(postconditions=pps.postconditions, prefix=prefix) return pre_block + old_block + post_block
python
def _format_function_contracts(func: Callable, prefix: Optional[str] = None) -> List[str]: """ Format the preconditions and postconditions of a function given its checker decorator. :param func: function whose contracts we are describing :param prefix: prefix to be prepended to the contract directives such as ``get`` or ``set`` :return: list of lines """ checker = icontract._checkers.find_checker(func=func) if checker is None: return [] pps = _preconditions_snapshots_postconditions(checker=checker) pre_block = _format_preconditions(preconditions=pps.preconditions, prefix=prefix) old_block = _format_snapshots(snapshots=pps.snapshots, prefix=prefix) post_block = _format_postconditions(postconditions=pps.postconditions, prefix=prefix) return pre_block + old_block + post_block
[ "def", "_format_function_contracts", "(", "func", ":", "Callable", ",", "prefix", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "List", "[", "str", "]", ":", "checker", "=", "icontract", ".", "_checkers", ".", "find_checker", "(", "func", "=", "func", ")", "if", "checker", "is", "None", ":", "return", "[", "]", "pps", "=", "_preconditions_snapshots_postconditions", "(", "checker", "=", "checker", ")", "pre_block", "=", "_format_preconditions", "(", "preconditions", "=", "pps", ".", "preconditions", ",", "prefix", "=", "prefix", ")", "old_block", "=", "_format_snapshots", "(", "snapshots", "=", "pps", ".", "snapshots", ",", "prefix", "=", "prefix", ")", "post_block", "=", "_format_postconditions", "(", "postconditions", "=", "pps", ".", "postconditions", ",", "prefix", "=", "prefix", ")", "return", "pre_block", "+", "old_block", "+", "post_block" ]
Format the preconditions and postconditions of a function given its checker decorator. :param func: function whose contracts we are describing :param prefix: prefix to be prepended to the contract directives such as ``get`` or ``set`` :return: list of lines
[ "Format", "the", "preconditions", "and", "postconditions", "of", "a", "function", "given", "its", "checker", "decorator", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L449-L467
246,635
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
_format_contracts
def _format_contracts(what: str, obj: Any) -> List[str]: """Format the contracts as reST.""" if what in ['function', 'method', 'attribute']: if what == 'attribute': if not isinstance(obj, property): return [] return _format_property_contracts(prop=obj) if what in ['function', 'method']: return _format_function_contracts(func=obj) raise NotImplementedError("Unhandled what: {}".format(what)) elif what == 'class': invariants = getattr(obj, "__invariants__", []) # type: List[icontract._Contract] assert isinstance(invariants, list) assert all(isinstance(inv, icontract._Contract) for inv in invariants) return _format_invariants(invariants=invariants) # Only properties, functions and classes have contracts. return []
python
def _format_contracts(what: str, obj: Any) -> List[str]: """Format the contracts as reST.""" if what in ['function', 'method', 'attribute']: if what == 'attribute': if not isinstance(obj, property): return [] return _format_property_contracts(prop=obj) if what in ['function', 'method']: return _format_function_contracts(func=obj) raise NotImplementedError("Unhandled what: {}".format(what)) elif what == 'class': invariants = getattr(obj, "__invariants__", []) # type: List[icontract._Contract] assert isinstance(invariants, list) assert all(isinstance(inv, icontract._Contract) for inv in invariants) return _format_invariants(invariants=invariants) # Only properties, functions and classes have contracts. return []
[ "def", "_format_contracts", "(", "what", ":", "str", ",", "obj", ":", "Any", ")", "->", "List", "[", "str", "]", ":", "if", "what", "in", "[", "'function'", ",", "'method'", ",", "'attribute'", "]", ":", "if", "what", "==", "'attribute'", ":", "if", "not", "isinstance", "(", "obj", ",", "property", ")", ":", "return", "[", "]", "return", "_format_property_contracts", "(", "prop", "=", "obj", ")", "if", "what", "in", "[", "'function'", ",", "'method'", "]", ":", "return", "_format_function_contracts", "(", "func", "=", "obj", ")", "raise", "NotImplementedError", "(", "\"Unhandled what: {}\"", ".", "format", "(", "what", ")", ")", "elif", "what", "==", "'class'", ":", "invariants", "=", "getattr", "(", "obj", ",", "\"__invariants__\"", ",", "[", "]", ")", "# type: List[icontract._Contract]", "assert", "isinstance", "(", "invariants", ",", "list", ")", "assert", "all", "(", "isinstance", "(", "inv", ",", "icontract", ".", "_Contract", ")", "for", "inv", "in", "invariants", ")", "return", "_format_invariants", "(", "invariants", "=", "invariants", ")", "# Only properties, functions and classes have contracts.", "return", "[", "]" ]
Format the contracts as reST.
[ "Format", "the", "contracts", "as", "reST", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L479-L501
246,636
Parquery/sphinx-icontract
sphinx_icontract/__init__.py
process_docstring
def process_docstring(app, what, name, obj, options, lines): """React to a docstring event and append contracts to it.""" # pylint: disable=unused-argument # pylint: disable=too-many-arguments lines.extend(_format_contracts(what=what, obj=obj))
python
def process_docstring(app, what, name, obj, options, lines): """React to a docstring event and append contracts to it.""" # pylint: disable=unused-argument # pylint: disable=too-many-arguments lines.extend(_format_contracts(what=what, obj=obj))
[ "def", "process_docstring", "(", "app", ",", "what", ",", "name", ",", "obj", ",", "options", ",", "lines", ")", ":", "# pylint: disable=unused-argument", "# pylint: disable=too-many-arguments", "lines", ".", "extend", "(", "_format_contracts", "(", "what", "=", "what", ",", "obj", "=", "obj", ")", ")" ]
React to a docstring event and append contracts to it.
[ "React", "to", "a", "docstring", "event", "and", "append", "contracts", "to", "it", "." ]
92918f23a8ea1873112e9b7446c64cd6f12ee04b
https://github.com/Parquery/sphinx-icontract/blob/92918f23a8ea1873112e9b7446c64cd6f12ee04b/sphinx_icontract/__init__.py#L504-L508
246,637
sarugaku/packagebuilder
src/packagebuilder/_pip.py
build_wheel
def build_wheel(ireq, sources, hashes=None, cache_dir=None): """Build a wheel file for the InstallRequirement object. An artifact is downloaded (or read from cache). If the artifact is not a wheel, build one out of it. The dynamically built wheel is ephemeral; do not depend on its existence after the returned wheel goes out of scope. If `hashes` is truthy, it is assumed to be a list of hashes (as formatted in Pipfile.lock) to be checked against the download. Returns a `distlib.wheel.Wheel` instance. Raises a `WheelBuildError` (a `RuntimeError` subclass) if the wheel cannot be built. """ kwargs = _prepare_wheel_building_kwargs(ireq) finder = _get_finder(sources, cache_dir=cache_dir) # Not for upgrade, hash not required. Hashes are not required here even # when we provide them, because pip skips local wheel cache if we set it # to True. Hashes are checked later if we need to download the file. ireq.populate_link(finder, False, False) # Ensure ireq.source_dir is set. # This is intentionally set to build_dir, not src_dir. Comments from pip: # [...] if filesystem packages are not marked editable in a req, a non # deterministic error occurs when the script attempts to unpack the # build directory. # Also see comments in `_prepare_wheel_building_kwargs()` -- If the ireq # is editable, build_dir is actually src_dir, making the build in-place. ireq.ensure_has_source_dir(kwargs["build_dir"]) # Ensure the source is fetched. For wheels, it is enough to just download # because we'll use them directly. For an sdist, we need to unpack so we # can build it. if not ireq.editable or not pip_shims.is_file_url(ireq.link): if ireq.is_wheel: only_download = True download_dir = kwargs["wheel_download_dir"] else: only_download = False download_dir = kwargs["download_dir"] ireq.options["hashes"] = _convert_hashes(hashes) unpack_url( ireq.link, ireq.source_dir, download_dir, only_download=only_download, session=finder.session, hashes=ireq.hashes(False), progress_bar="off", ) if ireq.is_wheel: # If this is a wheel, use the downloaded thing. output_dir = kwargs["wheel_download_dir"] wheel_path = os.path.join(output_dir, ireq.link.filename) else: # Othereise we need to build an ephemeral wheel. wheel_path = _build_wheel( ireq, vistir.path.create_tracked_tempdir(prefix="ephem"), finder, _get_wheel_cache(cache_dir=cache_dir), kwargs, ) if wheel_path is None or not os.path.exists(wheel_path): raise WheelBuildError return distlib.wheel.Wheel(wheel_path)
python
def build_wheel(ireq, sources, hashes=None, cache_dir=None): """Build a wheel file for the InstallRequirement object. An artifact is downloaded (or read from cache). If the artifact is not a wheel, build one out of it. The dynamically built wheel is ephemeral; do not depend on its existence after the returned wheel goes out of scope. If `hashes` is truthy, it is assumed to be a list of hashes (as formatted in Pipfile.lock) to be checked against the download. Returns a `distlib.wheel.Wheel` instance. Raises a `WheelBuildError` (a `RuntimeError` subclass) if the wheel cannot be built. """ kwargs = _prepare_wheel_building_kwargs(ireq) finder = _get_finder(sources, cache_dir=cache_dir) # Not for upgrade, hash not required. Hashes are not required here even # when we provide them, because pip skips local wheel cache if we set it # to True. Hashes are checked later if we need to download the file. ireq.populate_link(finder, False, False) # Ensure ireq.source_dir is set. # This is intentionally set to build_dir, not src_dir. Comments from pip: # [...] if filesystem packages are not marked editable in a req, a non # deterministic error occurs when the script attempts to unpack the # build directory. # Also see comments in `_prepare_wheel_building_kwargs()` -- If the ireq # is editable, build_dir is actually src_dir, making the build in-place. ireq.ensure_has_source_dir(kwargs["build_dir"]) # Ensure the source is fetched. For wheels, it is enough to just download # because we'll use them directly. For an sdist, we need to unpack so we # can build it. if not ireq.editable or not pip_shims.is_file_url(ireq.link): if ireq.is_wheel: only_download = True download_dir = kwargs["wheel_download_dir"] else: only_download = False download_dir = kwargs["download_dir"] ireq.options["hashes"] = _convert_hashes(hashes) unpack_url( ireq.link, ireq.source_dir, download_dir, only_download=only_download, session=finder.session, hashes=ireq.hashes(False), progress_bar="off", ) if ireq.is_wheel: # If this is a wheel, use the downloaded thing. output_dir = kwargs["wheel_download_dir"] wheel_path = os.path.join(output_dir, ireq.link.filename) else: # Othereise we need to build an ephemeral wheel. wheel_path = _build_wheel( ireq, vistir.path.create_tracked_tempdir(prefix="ephem"), finder, _get_wheel_cache(cache_dir=cache_dir), kwargs, ) if wheel_path is None or not os.path.exists(wheel_path): raise WheelBuildError return distlib.wheel.Wheel(wheel_path)
[ "def", "build_wheel", "(", "ireq", ",", "sources", ",", "hashes", "=", "None", ",", "cache_dir", "=", "None", ")", ":", "kwargs", "=", "_prepare_wheel_building_kwargs", "(", "ireq", ")", "finder", "=", "_get_finder", "(", "sources", ",", "cache_dir", "=", "cache_dir", ")", "# Not for upgrade, hash not required. Hashes are not required here even", "# when we provide them, because pip skips local wheel cache if we set it", "# to True. Hashes are checked later if we need to download the file.", "ireq", ".", "populate_link", "(", "finder", ",", "False", ",", "False", ")", "# Ensure ireq.source_dir is set.", "# This is intentionally set to build_dir, not src_dir. Comments from pip:", "# [...] if filesystem packages are not marked editable in a req, a non", "# deterministic error occurs when the script attempts to unpack the", "# build directory.", "# Also see comments in `_prepare_wheel_building_kwargs()` -- If the ireq", "# is editable, build_dir is actually src_dir, making the build in-place.", "ireq", ".", "ensure_has_source_dir", "(", "kwargs", "[", "\"build_dir\"", "]", ")", "# Ensure the source is fetched. For wheels, it is enough to just download", "# because we'll use them directly. For an sdist, we need to unpack so we", "# can build it.", "if", "not", "ireq", ".", "editable", "or", "not", "pip_shims", ".", "is_file_url", "(", "ireq", ".", "link", ")", ":", "if", "ireq", ".", "is_wheel", ":", "only_download", "=", "True", "download_dir", "=", "kwargs", "[", "\"wheel_download_dir\"", "]", "else", ":", "only_download", "=", "False", "download_dir", "=", "kwargs", "[", "\"download_dir\"", "]", "ireq", ".", "options", "[", "\"hashes\"", "]", "=", "_convert_hashes", "(", "hashes", ")", "unpack_url", "(", "ireq", ".", "link", ",", "ireq", ".", "source_dir", ",", "download_dir", ",", "only_download", "=", "only_download", ",", "session", "=", "finder", ".", "session", ",", "hashes", "=", "ireq", ".", "hashes", "(", "False", ")", ",", "progress_bar", "=", "\"off\"", ",", ")", "if", "ireq", ".", "is_wheel", ":", "# If this is a wheel, use the downloaded thing.", "output_dir", "=", "kwargs", "[", "\"wheel_download_dir\"", "]", "wheel_path", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", "ireq", ".", "link", ".", "filename", ")", "else", ":", "# Othereise we need to build an ephemeral wheel.", "wheel_path", "=", "_build_wheel", "(", "ireq", ",", "vistir", ".", "path", ".", "create_tracked_tempdir", "(", "prefix", "=", "\"ephem\"", ")", ",", "finder", ",", "_get_wheel_cache", "(", "cache_dir", "=", "cache_dir", ")", ",", "kwargs", ",", ")", "if", "wheel_path", "is", "None", "or", "not", "os", ".", "path", ".", "exists", "(", "wheel_path", ")", ":", "raise", "WheelBuildError", "return", "distlib", ".", "wheel", ".", "Wheel", "(", "wheel_path", ")" ]
Build a wheel file for the InstallRequirement object. An artifact is downloaded (or read from cache). If the artifact is not a wheel, build one out of it. The dynamically built wheel is ephemeral; do not depend on its existence after the returned wheel goes out of scope. If `hashes` is truthy, it is assumed to be a list of hashes (as formatted in Pipfile.lock) to be checked against the download. Returns a `distlib.wheel.Wheel` instance. Raises a `WheelBuildError` (a `RuntimeError` subclass) if the wheel cannot be built.
[ "Build", "a", "wheel", "file", "for", "the", "InstallRequirement", "object", "." ]
1b751f6e332962ef15e5827f418657c2cba60563
https://github.com/sarugaku/packagebuilder/blob/1b751f6e332962ef15e5827f418657c2cba60563/src/packagebuilder/_pip.py#L162-L221
246,638
exekias/droplet
droplet/sudo.py
set_euid
def set_euid(): """ Set settings.DROPLET_USER effective UID for the current process This adds some security, but nothing magic, an attacker can still gain root access, but at least we only elevate privileges when needed See root context manager """ current = os.geteuid() logger.debug("Current EUID is %s" % current) if settings.DROPLET_USER is None: logger.info("Not changing EUID, DROPLET_USER is None") return uid = int(pwd.getpwnam(settings.DROPLET_USER).pw_uid) if current != uid: try: os.seteuid(uid) logger.info("Set EUID to %s (%s)" % (settings.DROPLET_USER, os.geteuid())) except: current_user = pwd.getpwuid(os.getuid()).pw_name logger.error("Failed to set '%s' EUID, running as '%s'" % (settings.DROPLET_USER, current_user)) else: logger.debug("Didn't set EUID, it was already correct")
python
def set_euid(): """ Set settings.DROPLET_USER effective UID for the current process This adds some security, but nothing magic, an attacker can still gain root access, but at least we only elevate privileges when needed See root context manager """ current = os.geteuid() logger.debug("Current EUID is %s" % current) if settings.DROPLET_USER is None: logger.info("Not changing EUID, DROPLET_USER is None") return uid = int(pwd.getpwnam(settings.DROPLET_USER).pw_uid) if current != uid: try: os.seteuid(uid) logger.info("Set EUID to %s (%s)" % (settings.DROPLET_USER, os.geteuid())) except: current_user = pwd.getpwuid(os.getuid()).pw_name logger.error("Failed to set '%s' EUID, running as '%s'" % (settings.DROPLET_USER, current_user)) else: logger.debug("Didn't set EUID, it was already correct")
[ "def", "set_euid", "(", ")", ":", "current", "=", "os", ".", "geteuid", "(", ")", "logger", ".", "debug", "(", "\"Current EUID is %s\"", "%", "current", ")", "if", "settings", ".", "DROPLET_USER", "is", "None", ":", "logger", ".", "info", "(", "\"Not changing EUID, DROPLET_USER is None\"", ")", "return", "uid", "=", "int", "(", "pwd", ".", "getpwnam", "(", "settings", ".", "DROPLET_USER", ")", ".", "pw_uid", ")", "if", "current", "!=", "uid", ":", "try", ":", "os", ".", "seteuid", "(", "uid", ")", "logger", ".", "info", "(", "\"Set EUID to %s (%s)\"", "%", "(", "settings", ".", "DROPLET_USER", ",", "os", ".", "geteuid", "(", ")", ")", ")", "except", ":", "current_user", "=", "pwd", ".", "getpwuid", "(", "os", ".", "getuid", "(", ")", ")", ".", "pw_name", "logger", ".", "error", "(", "\"Failed to set '%s' EUID, running as '%s'\"", "%", "(", "settings", ".", "DROPLET_USER", ",", "current_user", ")", ")", "else", ":", "logger", ".", "debug", "(", "\"Didn't set EUID, it was already correct\"", ")" ]
Set settings.DROPLET_USER effective UID for the current process This adds some security, but nothing magic, an attacker can still gain root access, but at least we only elevate privileges when needed See root context manager
[ "Set", "settings", ".", "DROPLET_USER", "effective", "UID", "for", "the", "current", "process" ]
aeac573a2c1c4b774e99d5414a1c79b1bb734941
https://github.com/exekias/droplet/blob/aeac573a2c1c4b774e99d5414a1c79b1bb734941/droplet/sudo.py#L29-L56
246,639
exekias/droplet
droplet/sudo.py
drop_privileges
def drop_privileges(): """ Set settings.DROPLET_USER UID for the current process After calling this, root operation will be impossible to execute See root context manager """ uid = int(pwd.getpwnam(settings.DROPLET_USER).pw_uid) os.setuid(uid)
python
def drop_privileges(): """ Set settings.DROPLET_USER UID for the current process After calling this, root operation will be impossible to execute See root context manager """ uid = int(pwd.getpwnam(settings.DROPLET_USER).pw_uid) os.setuid(uid)
[ "def", "drop_privileges", "(", ")", ":", "uid", "=", "int", "(", "pwd", ".", "getpwnam", "(", "settings", ".", "DROPLET_USER", ")", ".", "pw_uid", ")", "os", ".", "setuid", "(", "uid", ")" ]
Set settings.DROPLET_USER UID for the current process After calling this, root operation will be impossible to execute See root context manager
[ "Set", "settings", ".", "DROPLET_USER", "UID", "for", "the", "current", "process" ]
aeac573a2c1c4b774e99d5414a1c79b1bb734941
https://github.com/exekias/droplet/blob/aeac573a2c1c4b774e99d5414a1c79b1bb734941/droplet/sudo.py#L59-L68
246,640
oblalex/candv
candv/__init__.py
Values.get_by_value
def get_by_value(cls, value): """ Get constant by its value. :param value: value of the constant to look for :returns: first found constant with given value :raises ValueError: if no constant in container has given value """ for constant in cls.iterconstants(): if constant.value == value: return constant raise ValueError( "Constant with value \"{0}\" is not present in \"{1}\"" .format(value, cls) )
python
def get_by_value(cls, value): """ Get constant by its value. :param value: value of the constant to look for :returns: first found constant with given value :raises ValueError: if no constant in container has given value """ for constant in cls.iterconstants(): if constant.value == value: return constant raise ValueError( "Constant with value \"{0}\" is not present in \"{1}\"" .format(value, cls) )
[ "def", "get_by_value", "(", "cls", ",", "value", ")", ":", "for", "constant", "in", "cls", ".", "iterconstants", "(", ")", ":", "if", "constant", ".", "value", "==", "value", ":", "return", "constant", "raise", "ValueError", "(", "\"Constant with value \\\"{0}\\\" is not present in \\\"{1}\\\"\"", ".", "format", "(", "value", ",", "cls", ")", ")" ]
Get constant by its value. :param value: value of the constant to look for :returns: first found constant with given value :raises ValueError: if no constant in container has given value
[ "Get", "constant", "by", "its", "value", "." ]
0b522bd24f4045844a04793b456f1135d093f280
https://github.com/oblalex/candv/blob/0b522bd24f4045844a04793b456f1135d093f280/candv/__init__.py#L141-L155
246,641
oblalex/candv
candv/__init__.py
Values.filter_by_value
def filter_by_value(cls, value): """ Get all constants which have given value. :param value: value of the constants to look for :returns: list of all found constants with given value """ constants = [] for constant in cls.iterconstants(): if constant.value == value: constants.append(constant) return constants
python
def filter_by_value(cls, value): """ Get all constants which have given value. :param value: value of the constants to look for :returns: list of all found constants with given value """ constants = [] for constant in cls.iterconstants(): if constant.value == value: constants.append(constant) return constants
[ "def", "filter_by_value", "(", "cls", ",", "value", ")", ":", "constants", "=", "[", "]", "for", "constant", "in", "cls", ".", "iterconstants", "(", ")", ":", "if", "constant", ".", "value", "==", "value", ":", "constants", ".", "append", "(", "constant", ")", "return", "constants" ]
Get all constants which have given value. :param value: value of the constants to look for :returns: list of all found constants with given value
[ "Get", "all", "constants", "which", "have", "given", "value", "." ]
0b522bd24f4045844a04793b456f1135d093f280
https://github.com/oblalex/candv/blob/0b522bd24f4045844a04793b456f1135d093f280/candv/__init__.py#L158-L169
246,642
dr4ke616/pinky
pinky/core/hash.py
ConsistentHash.get_machine
def get_machine(self, key): """ Returns the number of the machine which key gets sent to. """ h = self.hash(key) # edge case where we cycle past hash value of 1 and back to 0. if h > self.hash_tuples[-1][2]: return self.hash_tuples[0][0] hash_values = map(lambda x: x[2], self.hash_tuples) index = bisect.bisect_left(hash_values, h) return self.hash_tuples[index][0]
python
def get_machine(self, key): """ Returns the number of the machine which key gets sent to. """ h = self.hash(key) # edge case where we cycle past hash value of 1 and back to 0. if h > self.hash_tuples[-1][2]: return self.hash_tuples[0][0] hash_values = map(lambda x: x[2], self.hash_tuples) index = bisect.bisect_left(hash_values, h) return self.hash_tuples[index][0]
[ "def", "get_machine", "(", "self", ",", "key", ")", ":", "h", "=", "self", ".", "hash", "(", "key", ")", "# edge case where we cycle past hash value of 1 and back to 0.", "if", "h", ">", "self", ".", "hash_tuples", "[", "-", "1", "]", "[", "2", "]", ":", "return", "self", ".", "hash_tuples", "[", "0", "]", "[", "0", "]", "hash_values", "=", "map", "(", "lambda", "x", ":", "x", "[", "2", "]", ",", "self", ".", "hash_tuples", ")", "index", "=", "bisect", ".", "bisect_left", "(", "hash_values", ",", "h", ")", "return", "self", ".", "hash_tuples", "[", "index", "]", "[", "0", "]" ]
Returns the number of the machine which key gets sent to.
[ "Returns", "the", "number", "of", "the", "machine", "which", "key", "gets", "sent", "to", "." ]
35c165f5a1d410be467621f3152df1dbf458622a
https://github.com/dr4ke616/pinky/blob/35c165f5a1d410be467621f3152df1dbf458622a/pinky/core/hash.py#L34-L45
246,643
jut-io/jut-python-tools
jut/api/accounts.py
create_user
def create_user(name, username, email, password, token_manager=None, app_url=defaults.APP_URL): """ create a new user with the specified name, username email and password """ headers = token_manager.get_access_token_headers() auth_url = environment.get_auth_url(app_url=app_url) url = "%s/api/v1/accounts" % auth_url payload = { 'name': name, 'username': username, 'email': email, 'password': password } response = requests.post(url, data=json.dumps(payload), headers=headers) if response.status_code == 201: return response.json() else: raise JutException('Error %s: %s' % (response.status_code, response.text))
python
def create_user(name, username, email, password, token_manager=None, app_url=defaults.APP_URL): """ create a new user with the specified name, username email and password """ headers = token_manager.get_access_token_headers() auth_url = environment.get_auth_url(app_url=app_url) url = "%s/api/v1/accounts" % auth_url payload = { 'name': name, 'username': username, 'email': email, 'password': password } response = requests.post(url, data=json.dumps(payload), headers=headers) if response.status_code == 201: return response.json() else: raise JutException('Error %s: %s' % (response.status_code, response.text))
[ "def", "create_user", "(", "name", ",", "username", ",", "email", ",", "password", ",", "token_manager", "=", "None", ",", "app_url", "=", "defaults", ".", "APP_URL", ")", ":", "headers", "=", "token_manager", ".", "get_access_token_headers", "(", ")", "auth_url", "=", "environment", ".", "get_auth_url", "(", "app_url", "=", "app_url", ")", "url", "=", "\"%s/api/v1/accounts\"", "%", "auth_url", "payload", "=", "{", "'name'", ":", "name", ",", "'username'", ":", "username", ",", "'email'", ":", "email", ",", "'password'", ":", "password", "}", "response", "=", "requests", ".", "post", "(", "url", ",", "data", "=", "json", ".", "dumps", "(", "payload", ")", ",", "headers", "=", "headers", ")", "if", "response", ".", "status_code", "==", "201", ":", "return", "response", ".", "json", "(", ")", "else", ":", "raise", "JutException", "(", "'Error %s: %s'", "%", "(", "response", ".", "status_code", ",", "response", ".", "text", ")", ")" ]
create a new user with the specified name, username email and password
[ "create", "a", "new", "user", "with", "the", "specified", "name", "username", "email", "and", "password" ]
65574d23f51a7bbced9bb25010d02da5ca5d906f
https://github.com/jut-io/jut-python-tools/blob/65574d23f51a7bbced9bb25010d02da5ca5d906f/jut/api/accounts.py#L13-L42
246,644
jut-io/jut-python-tools
jut/api/accounts.py
get_logged_in_account
def get_logged_in_account(token_manager=None, app_url=defaults.APP_URL): """ get the account details for logged in account of the auth token_manager """ return get_logged_in_account(token_manager=token_manager, app_url=app_url)['id']
python
def get_logged_in_account(token_manager=None, app_url=defaults.APP_URL): """ get the account details for logged in account of the auth token_manager """ return get_logged_in_account(token_manager=token_manager, app_url=app_url)['id']
[ "def", "get_logged_in_account", "(", "token_manager", "=", "None", ",", "app_url", "=", "defaults", ".", "APP_URL", ")", ":", "return", "get_logged_in_account", "(", "token_manager", "=", "token_manager", ",", "app_url", "=", "app_url", ")", "[", "'id'", "]" ]
get the account details for logged in account of the auth token_manager
[ "get", "the", "account", "details", "for", "logged", "in", "account", "of", "the", "auth", "token_manager" ]
65574d23f51a7bbced9bb25010d02da5ca5d906f
https://github.com/jut-io/jut-python-tools/blob/65574d23f51a7bbced9bb25010d02da5ca5d906f/jut/api/accounts.py#L90-L97
246,645
jut-io/jut-python-tools
jut/api/accounts.py
get_logged_in_account
def get_logged_in_account(token_manager=None, app_url=defaults.APP_URL): """ get the account details for credentials provided """ headers = token_manager.get_access_token_headers() auth_url = environment.get_auth_url(app_url=app_url) url = "%s/api/v1/account" % auth_url response = requests.get(url, headers=headers) if response.status_code == 200: return response.json() else: raise JutException('Error %s; %s' % (response.status_code, response.text))
python
def get_logged_in_account(token_manager=None, app_url=defaults.APP_URL): """ get the account details for credentials provided """ headers = token_manager.get_access_token_headers() auth_url = environment.get_auth_url(app_url=app_url) url = "%s/api/v1/account" % auth_url response = requests.get(url, headers=headers) if response.status_code == 200: return response.json() else: raise JutException('Error %s; %s' % (response.status_code, response.text))
[ "def", "get_logged_in_account", "(", "token_manager", "=", "None", ",", "app_url", "=", "defaults", ".", "APP_URL", ")", ":", "headers", "=", "token_manager", ".", "get_access_token_headers", "(", ")", "auth_url", "=", "environment", ".", "get_auth_url", "(", "app_url", "=", "app_url", ")", "url", "=", "\"%s/api/v1/account\"", "%", "auth_url", "response", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "headers", ")", "if", "response", ".", "status_code", "==", "200", ":", "return", "response", ".", "json", "(", ")", "else", ":", "raise", "JutException", "(", "'Error %s; %s'", "%", "(", "response", ".", "status_code", ",", "response", ".", "text", ")", ")" ]
get the account details for credentials provided
[ "get", "the", "account", "details", "for", "credentials", "provided" ]
65574d23f51a7bbced9bb25010d02da5ca5d906f
https://github.com/jut-io/jut-python-tools/blob/65574d23f51a7bbced9bb25010d02da5ca5d906f/jut/api/accounts.py#L100-L117
246,646
jut-io/jut-python-tools
jut/api/accounts.py
user_exists
def user_exists(username, token_manager=None, app_url=defaults.APP_URL): """ check if the user exists with the specified username """ headers = token_manager.get_access_token_headers() auth_url = environment.get_auth_url(app_url=app_url) url = "%s/api/v1/accounts?username=%s" % (auth_url, username) response = requests.get(url, headers=headers) if response.status_code == 404: return False elif response.status_code == 200: return True else: raise JutException('Error %s: %s' % (response.status_code, response.text))
python
def user_exists(username, token_manager=None, app_url=defaults.APP_URL): """ check if the user exists with the specified username """ headers = token_manager.get_access_token_headers() auth_url = environment.get_auth_url(app_url=app_url) url = "%s/api/v1/accounts?username=%s" % (auth_url, username) response = requests.get(url, headers=headers) if response.status_code == 404: return False elif response.status_code == 200: return True else: raise JutException('Error %s: %s' % (response.status_code, response.text))
[ "def", "user_exists", "(", "username", ",", "token_manager", "=", "None", ",", "app_url", "=", "defaults", ".", "APP_URL", ")", ":", "headers", "=", "token_manager", ".", "get_access_token_headers", "(", ")", "auth_url", "=", "environment", ".", "get_auth_url", "(", "app_url", "=", "app_url", ")", "url", "=", "\"%s/api/v1/accounts?username=%s\"", "%", "(", "auth_url", ",", "username", ")", "response", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "headers", ")", "if", "response", ".", "status_code", "==", "404", ":", "return", "False", "elif", "response", ".", "status_code", "==", "200", ":", "return", "True", "else", ":", "raise", "JutException", "(", "'Error %s: %s'", "%", "(", "response", ".", "status_code", ",", "response", ".", "text", ")", ")" ]
check if the user exists with the specified username
[ "check", "if", "the", "user", "exists", "with", "the", "specified", "username" ]
65574d23f51a7bbced9bb25010d02da5ca5d906f
https://github.com/jut-io/jut-python-tools/blob/65574d23f51a7bbced9bb25010d02da5ca5d906f/jut/api/accounts.py#L130-L147
246,647
kodexlab/reliure
reliure/schema.py
DocField.FromType
def FromType(ftype): """ DocField subclasses factory, creates a convenient field to store data from a given Type. attribute precedence : * ``|attrs| > 0`` (``multi`` and ``uniq`` are implicit) => VectorField * ``uniq`` (``multi`` is implicit) => SetField * ``multi`` and ``not uniq`` => ListField * ``not multi`` => ValueField :param ftype: the desired type of field :type ftype: subclass of :class:`.GenericType` """ if ftype.attrs is not None and len(ftype.attrs): return VectorField(ftype) elif ftype.uniq: return SetField(ftype) elif ftype.multi: return ListField(ftype) else: return ValueField(ftype)
python
def FromType(ftype): """ DocField subclasses factory, creates a convenient field to store data from a given Type. attribute precedence : * ``|attrs| > 0`` (``multi`` and ``uniq`` are implicit) => VectorField * ``uniq`` (``multi`` is implicit) => SetField * ``multi`` and ``not uniq`` => ListField * ``not multi`` => ValueField :param ftype: the desired type of field :type ftype: subclass of :class:`.GenericType` """ if ftype.attrs is not None and len(ftype.attrs): return VectorField(ftype) elif ftype.uniq: return SetField(ftype) elif ftype.multi: return ListField(ftype) else: return ValueField(ftype)
[ "def", "FromType", "(", "ftype", ")", ":", "if", "ftype", ".", "attrs", "is", "not", "None", "and", "len", "(", "ftype", ".", "attrs", ")", ":", "return", "VectorField", "(", "ftype", ")", "elif", "ftype", ".", "uniq", ":", "return", "SetField", "(", "ftype", ")", "elif", "ftype", ".", "multi", ":", "return", "ListField", "(", "ftype", ")", "else", ":", "return", "ValueField", "(", "ftype", ")" ]
DocField subclasses factory, creates a convenient field to store data from a given Type. attribute precedence : * ``|attrs| > 0`` (``multi`` and ``uniq`` are implicit) => VectorField * ``uniq`` (``multi`` is implicit) => SetField * ``multi`` and ``not uniq`` => ListField * ``not multi`` => ValueField :param ftype: the desired type of field :type ftype: subclass of :class:`.GenericType`
[ "DocField", "subclasses", "factory", "creates", "a", "convenient", "field", "to", "store", "data", "from", "a", "given", "Type", "." ]
0450c7a9254c5c003162738458bbe0c49e777ba5
https://github.com/kodexlab/reliure/blob/0450c7a9254c5c003162738458bbe0c49e777ba5/reliure/schema.py#L177-L198
246,648
kodexlab/reliure
reliure/schema.py
VectorField.clear_attributes
def clear_attributes(self): """ removes all attributes """ self._attrs = {} # removes all attr for name, attr_field in six.iteritems(self._ftype.attrs): self._attrs[name] = []
python
def clear_attributes(self): """ removes all attributes """ self._attrs = {} # removes all attr for name, attr_field in six.iteritems(self._ftype.attrs): self._attrs[name] = []
[ "def", "clear_attributes", "(", "self", ")", ":", "self", ".", "_attrs", "=", "{", "}", "# removes all attr", "for", "name", ",", "attr_field", "in", "six", ".", "iteritems", "(", "self", ".", "_ftype", ".", "attrs", ")", ":", "self", ".", "_attrs", "[", "name", "]", "=", "[", "]" ]
removes all attributes
[ "removes", "all", "attributes" ]
0450c7a9254c5c003162738458bbe0c49e777ba5
https://github.com/kodexlab/reliure/blob/0450c7a9254c5c003162738458bbe0c49e777ba5/reliure/schema.py#L441-L446
246,649
kodexlab/reliure
reliure/schema.py
VectorField.set_attr_value
def set_attr_value(self, key, attr, value): """ set the value of a given attribute for a given key """ idx = self._keys[key] self._attrs[attr][idx].set(value)
python
def set_attr_value(self, key, attr, value): """ set the value of a given attribute for a given key """ idx = self._keys[key] self._attrs[attr][idx].set(value)
[ "def", "set_attr_value", "(", "self", ",", "key", ",", "attr", ",", "value", ")", ":", "idx", "=", "self", ".", "_keys", "[", "key", "]", "self", ".", "_attrs", "[", "attr", "]", "[", "idx", "]", ".", "set", "(", "value", ")" ]
set the value of a given attribute for a given key
[ "set", "the", "value", "of", "a", "given", "attribute", "for", "a", "given", "key" ]
0450c7a9254c5c003162738458bbe0c49e777ba5
https://github.com/kodexlab/reliure/blob/0450c7a9254c5c003162738458bbe0c49e777ba5/reliure/schema.py#L619-L623
246,650
kodexlab/reliure
reliure/schema.py
Doc.set_field
def set_field(self, name, value, parse=False): """ Set the value of a field """ # explicit getitem needed for ValueField try: item = dict.__getitem__(self, name) item.set( item.parse(value) if parse else value ) except ValidationError as err: raise FieldValidationError(name, value, list(err))
python
def set_field(self, name, value, parse=False): """ Set the value of a field """ # explicit getitem needed for ValueField try: item = dict.__getitem__(self, name) item.set( item.parse(value) if parse else value ) except ValidationError as err: raise FieldValidationError(name, value, list(err))
[ "def", "set_field", "(", "self", ",", "name", ",", "value", ",", "parse", "=", "False", ")", ":", "# explicit getitem needed for ValueField", "try", ":", "item", "=", "dict", ".", "__getitem__", "(", "self", ",", "name", ")", "item", ".", "set", "(", "item", ".", "parse", "(", "value", ")", "if", "parse", "else", "value", ")", "except", "ValidationError", "as", "err", ":", "raise", "FieldValidationError", "(", "name", ",", "value", ",", "list", "(", "err", ")", ")" ]
Set the value of a field
[ "Set", "the", "value", "of", "a", "field" ]
0450c7a9254c5c003162738458bbe0c49e777ba5
https://github.com/kodexlab/reliure/blob/0450c7a9254c5c003162738458bbe0c49e777ba5/reliure/schema.py#L861-L869
246,651
kodexlab/reliure
reliure/schema.py
Doc.export
def export(self, exclude=[]): """ returns a dictionary representation of the document """ fields = ( (key, self.get_field(key)) for key in self.schema if not key.startswith("_") and key not in exclude ) doc = {name: field.export() for name, field in fields} return doc
python
def export(self, exclude=[]): """ returns a dictionary representation of the document """ fields = ( (key, self.get_field(key)) for key in self.schema if not key.startswith("_") and key not in exclude ) doc = {name: field.export() for name, field in fields} return doc
[ "def", "export", "(", "self", ",", "exclude", "=", "[", "]", ")", ":", "fields", "=", "(", "(", "key", ",", "self", ".", "get_field", "(", "key", ")", ")", "for", "key", "in", "self", ".", "schema", "if", "not", "key", ".", "startswith", "(", "\"_\"", ")", "and", "key", "not", "in", "exclude", ")", "doc", "=", "{", "name", ":", "field", ".", "export", "(", ")", "for", "name", ",", "field", "in", "fields", "}", "return", "doc" ]
returns a dictionary representation of the document
[ "returns", "a", "dictionary", "representation", "of", "the", "document" ]
0450c7a9254c5c003162738458bbe0c49e777ba5
https://github.com/kodexlab/reliure/blob/0450c7a9254c5c003162738458bbe0c49e777ba5/reliure/schema.py#L905-L912
246,652
awacha/credolib
credolib/initialization.py
init_dirs
def init_dirs(rootdir_or_loader, outputpath, saveto_dir='data', auximages_dir='auximages', prefix='crd'): """Initialize the directiories. Inputs: rootdir_or_loader: depends on the type: str: the root directory of the SAXSCtrl/CCT software, i.e. where the subfolders ``eval2d``, ``param``, ``images``, ``mask`` etc. reside. sastool.classes2.Loader instance: a fully initialized loader, which will be used to acquire headers and exposures. list: a list of sastool.classes2.Loader instances, which will be used to open headers and exposures. When opening something, always the first item will be tried first, and if it fails with FileNotFoundError, the second, third, etc. will be tried until either the file can be opened or the last one fails. outputpath: the directory where the produced files are written. This is usually the working directory of the IPython notebook. saveto_dir: the subdirectory where averaged, united, subtracted etc. datasets are written. auximages_dir: the subdirectory where automatically produced images reside. Remarks: If a single root directory is given, a list of four loaders will be constructed in this order: CCT (processed), CCT (raw), SAXSCtrl (processed), SAXSCtrl (raw). Raw and processed loaders are handled separately. """ ip = get_ipython() if isinstance(rootdir_or_loader, str): print("Initializing loaders for SAXSCtrl and CCT.", flush=True) ip.user_ns['_loaders'] = [ credo_cct.Loader(rootdir_or_loader, processed=True, exposureclass=prefix), credo_saxsctrl.Loader(rootdir_or_loader, processed=True, exposureclass=prefix), credo_cct.Loader(rootdir_or_loader, processed=False, exposureclass=prefix), credo_saxsctrl.Loader(rootdir_or_loader, processed=False, exposureclass=prefix), ] print("Loaders initialized.", flush=True) elif isinstance(rootdir_or_loader, Loader): ip.user_ns['_loaders'] = [rootdir_or_loader] elif isinstance(rootdir_or_loader, list) and all([isinstance(l, Loader) for l in rootdir_or_loader]): ip.user_ns['_loaders'] = rootdir_or_loader[:] else: raise TypeError(rootdir_or_loader) if not os.path.isdir(outputpath): os.makedirs(outputpath) print("Output files will be written to:", outputpath) os.chdir(outputpath) ip.user_ns['outputpath'] = outputpath if not os.path.isdir(os.path.join(ip.user_ns['outputpath'], saveto_dir)): os.mkdir(os.path.join(ip.user_ns['outputpath'], saveto_dir)) if not os.path.isdir(os.path.join(ip.user_ns['outputpath'], auximages_dir)): os.mkdir(os.path.join(ip.user_ns['outputpath'], auximages_dir)) ip.user_ns['auximages_dir'] = os.path.join(outputpath, auximages_dir) ip.user_ns['saveto_dir'] = os.path.join(outputpath, saveto_dir) ip.user_ns['saveto_dir_rel'] = saveto_dir ip.user_ns['auximages_dir_rel'] = auximages_dir ip.user_ns['crd_prefix']=prefix set_length_units('nm')
python
def init_dirs(rootdir_or_loader, outputpath, saveto_dir='data', auximages_dir='auximages', prefix='crd'): """Initialize the directiories. Inputs: rootdir_or_loader: depends on the type: str: the root directory of the SAXSCtrl/CCT software, i.e. where the subfolders ``eval2d``, ``param``, ``images``, ``mask`` etc. reside. sastool.classes2.Loader instance: a fully initialized loader, which will be used to acquire headers and exposures. list: a list of sastool.classes2.Loader instances, which will be used to open headers and exposures. When opening something, always the first item will be tried first, and if it fails with FileNotFoundError, the second, third, etc. will be tried until either the file can be opened or the last one fails. outputpath: the directory where the produced files are written. This is usually the working directory of the IPython notebook. saveto_dir: the subdirectory where averaged, united, subtracted etc. datasets are written. auximages_dir: the subdirectory where automatically produced images reside. Remarks: If a single root directory is given, a list of four loaders will be constructed in this order: CCT (processed), CCT (raw), SAXSCtrl (processed), SAXSCtrl (raw). Raw and processed loaders are handled separately. """ ip = get_ipython() if isinstance(rootdir_or_loader, str): print("Initializing loaders for SAXSCtrl and CCT.", flush=True) ip.user_ns['_loaders'] = [ credo_cct.Loader(rootdir_or_loader, processed=True, exposureclass=prefix), credo_saxsctrl.Loader(rootdir_or_loader, processed=True, exposureclass=prefix), credo_cct.Loader(rootdir_or_loader, processed=False, exposureclass=prefix), credo_saxsctrl.Loader(rootdir_or_loader, processed=False, exposureclass=prefix), ] print("Loaders initialized.", flush=True) elif isinstance(rootdir_or_loader, Loader): ip.user_ns['_loaders'] = [rootdir_or_loader] elif isinstance(rootdir_or_loader, list) and all([isinstance(l, Loader) for l in rootdir_or_loader]): ip.user_ns['_loaders'] = rootdir_or_loader[:] else: raise TypeError(rootdir_or_loader) if not os.path.isdir(outputpath): os.makedirs(outputpath) print("Output files will be written to:", outputpath) os.chdir(outputpath) ip.user_ns['outputpath'] = outputpath if not os.path.isdir(os.path.join(ip.user_ns['outputpath'], saveto_dir)): os.mkdir(os.path.join(ip.user_ns['outputpath'], saveto_dir)) if not os.path.isdir(os.path.join(ip.user_ns['outputpath'], auximages_dir)): os.mkdir(os.path.join(ip.user_ns['outputpath'], auximages_dir)) ip.user_ns['auximages_dir'] = os.path.join(outputpath, auximages_dir) ip.user_ns['saveto_dir'] = os.path.join(outputpath, saveto_dir) ip.user_ns['saveto_dir_rel'] = saveto_dir ip.user_ns['auximages_dir_rel'] = auximages_dir ip.user_ns['crd_prefix']=prefix set_length_units('nm')
[ "def", "init_dirs", "(", "rootdir_or_loader", ",", "outputpath", ",", "saveto_dir", "=", "'data'", ",", "auximages_dir", "=", "'auximages'", ",", "prefix", "=", "'crd'", ")", ":", "ip", "=", "get_ipython", "(", ")", "if", "isinstance", "(", "rootdir_or_loader", ",", "str", ")", ":", "print", "(", "\"Initializing loaders for SAXSCtrl and CCT.\"", ",", "flush", "=", "True", ")", "ip", ".", "user_ns", "[", "'_loaders'", "]", "=", "[", "credo_cct", ".", "Loader", "(", "rootdir_or_loader", ",", "processed", "=", "True", ",", "exposureclass", "=", "prefix", ")", ",", "credo_saxsctrl", ".", "Loader", "(", "rootdir_or_loader", ",", "processed", "=", "True", ",", "exposureclass", "=", "prefix", ")", ",", "credo_cct", ".", "Loader", "(", "rootdir_or_loader", ",", "processed", "=", "False", ",", "exposureclass", "=", "prefix", ")", ",", "credo_saxsctrl", ".", "Loader", "(", "rootdir_or_loader", ",", "processed", "=", "False", ",", "exposureclass", "=", "prefix", ")", ",", "]", "print", "(", "\"Loaders initialized.\"", ",", "flush", "=", "True", ")", "elif", "isinstance", "(", "rootdir_or_loader", ",", "Loader", ")", ":", "ip", ".", "user_ns", "[", "'_loaders'", "]", "=", "[", "rootdir_or_loader", "]", "elif", "isinstance", "(", "rootdir_or_loader", ",", "list", ")", "and", "all", "(", "[", "isinstance", "(", "l", ",", "Loader", ")", "for", "l", "in", "rootdir_or_loader", "]", ")", ":", "ip", ".", "user_ns", "[", "'_loaders'", "]", "=", "rootdir_or_loader", "[", ":", "]", "else", ":", "raise", "TypeError", "(", "rootdir_or_loader", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "outputpath", ")", ":", "os", ".", "makedirs", "(", "outputpath", ")", "print", "(", "\"Output files will be written to:\"", ",", "outputpath", ")", "os", ".", "chdir", "(", "outputpath", ")", "ip", ".", "user_ns", "[", "'outputpath'", "]", "=", "outputpath", "if", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "ip", ".", "user_ns", "[", "'outputpath'", "]", ",", "saveto_dir", ")", ")", ":", "os", ".", "mkdir", "(", "os", ".", "path", ".", "join", "(", "ip", ".", "user_ns", "[", "'outputpath'", "]", ",", "saveto_dir", ")", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "ip", ".", "user_ns", "[", "'outputpath'", "]", ",", "auximages_dir", ")", ")", ":", "os", ".", "mkdir", "(", "os", ".", "path", ".", "join", "(", "ip", ".", "user_ns", "[", "'outputpath'", "]", ",", "auximages_dir", ")", ")", "ip", ".", "user_ns", "[", "'auximages_dir'", "]", "=", "os", ".", "path", ".", "join", "(", "outputpath", ",", "auximages_dir", ")", "ip", ".", "user_ns", "[", "'saveto_dir'", "]", "=", "os", ".", "path", ".", "join", "(", "outputpath", ",", "saveto_dir", ")", "ip", ".", "user_ns", "[", "'saveto_dir_rel'", "]", "=", "saveto_dir", "ip", ".", "user_ns", "[", "'auximages_dir_rel'", "]", "=", "auximages_dir", "ip", ".", "user_ns", "[", "'crd_prefix'", "]", "=", "prefix", "set_length_units", "(", "'nm'", ")" ]
Initialize the directiories. Inputs: rootdir_or_loader: depends on the type: str: the root directory of the SAXSCtrl/CCT software, i.e. where the subfolders ``eval2d``, ``param``, ``images``, ``mask`` etc. reside. sastool.classes2.Loader instance: a fully initialized loader, which will be used to acquire headers and exposures. list: a list of sastool.classes2.Loader instances, which will be used to open headers and exposures. When opening something, always the first item will be tried first, and if it fails with FileNotFoundError, the second, third, etc. will be tried until either the file can be opened or the last one fails. outputpath: the directory where the produced files are written. This is usually the working directory of the IPython notebook. saveto_dir: the subdirectory where averaged, united, subtracted etc. datasets are written. auximages_dir: the subdirectory where automatically produced images reside. Remarks: If a single root directory is given, a list of four loaders will be constructed in this order: CCT (processed), CCT (raw), SAXSCtrl (processed), SAXSCtrl (raw). Raw and processed loaders are handled separately.
[ "Initialize", "the", "directiories", "." ]
11c0be3eea7257d3d6e13697d3e76ce538f2f1b2
https://github.com/awacha/credolib/blob/11c0be3eea7257d3d6e13697d3e76ce538f2f1b2/credolib/initialization.py#L17-L82
246,653
treycucco/bidon
bidon/db/core/data_access_core.py
get_pg_core
def get_pg_core(connection_string, *, cursor_factory=None, edit_connection=None): """Creates a simple PostgreSQL core. Requires the psycopg2 library.""" import psycopg2 as pq from psycopg2.extras import NamedTupleCursor def opener(): """Opens a single PostgreSQL connection with the scope-captured connection string.""" cn = pq.connect(connection_string) cn.cursor_factory = cursor_factory or NamedTupleCursor if edit_connection: edit_connection(cn) return cn return InjectedDataAccessCore( opener, default_connection_closer, ("%({0})s", "%s", "{0}::{1}"), empty_params=None, supports_timezones=True, supports_returning_syntax=True, get_autocommit=get_pg_autocommit, set_autocommit=set_pg_autocommit)
python
def get_pg_core(connection_string, *, cursor_factory=None, edit_connection=None): """Creates a simple PostgreSQL core. Requires the psycopg2 library.""" import psycopg2 as pq from psycopg2.extras import NamedTupleCursor def opener(): """Opens a single PostgreSQL connection with the scope-captured connection string.""" cn = pq.connect(connection_string) cn.cursor_factory = cursor_factory or NamedTupleCursor if edit_connection: edit_connection(cn) return cn return InjectedDataAccessCore( opener, default_connection_closer, ("%({0})s", "%s", "{0}::{1}"), empty_params=None, supports_timezones=True, supports_returning_syntax=True, get_autocommit=get_pg_autocommit, set_autocommit=set_pg_autocommit)
[ "def", "get_pg_core", "(", "connection_string", ",", "*", ",", "cursor_factory", "=", "None", ",", "edit_connection", "=", "None", ")", ":", "import", "psycopg2", "as", "pq", "from", "psycopg2", ".", "extras", "import", "NamedTupleCursor", "def", "opener", "(", ")", ":", "\"\"\"Opens a single PostgreSQL connection with the scope-captured connection string.\"\"\"", "cn", "=", "pq", ".", "connect", "(", "connection_string", ")", "cn", ".", "cursor_factory", "=", "cursor_factory", "or", "NamedTupleCursor", "if", "edit_connection", ":", "edit_connection", "(", "cn", ")", "return", "cn", "return", "InjectedDataAccessCore", "(", "opener", ",", "default_connection_closer", ",", "(", "\"%({0})s\"", ",", "\"%s\"", ",", "\"{0}::{1}\"", ")", ",", "empty_params", "=", "None", ",", "supports_timezones", "=", "True", ",", "supports_returning_syntax", "=", "True", ",", "get_autocommit", "=", "get_pg_autocommit", ",", "set_autocommit", "=", "set_pg_autocommit", ")" ]
Creates a simple PostgreSQL core. Requires the psycopg2 library.
[ "Creates", "a", "simple", "PostgreSQL", "core", ".", "Requires", "the", "psycopg2", "library", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/db/core/data_access_core.py#L118-L139
246,654
treycucco/bidon
bidon/db/core/data_access_core.py
get_pooled_pg_core
def get_pooled_pg_core(connection_string, pool_size=None, *, cursor_factory=None, edit_connection=None, threaded=True): """Creates a pooled PostgreSQL core. Requires the psycopg2 library. :pool_size: must be a 2-tuple in the form (min_connections, max_connections). """ from psycopg2.extras import NamedTupleCursor from psycopg2.pool import ThreadedConnectionPool as TPool, SimpleConnectionPool as SPool if not pool_size: pool_size = (5, 10) if threaded: pool = TPool(pool_size[0], pool_size[1], connection_string) else: pool = SPool(pool_size[0], pool_size[1], connection_string) def opener(): """Gets a PostgreSQL connection from the scope-captured connection pool.""" cn = pool.getconn() cn.cursor_factory = cursor_factory or NamedTupleCursor if edit_connection: edit_connection(cn) return cn def closer(connection): """Returns a connection to the scope-captured connection pool.""" pool.putconn(connection) return InjectedDataAccessCore( opener, closer, ("%({0})s", "%s", "{0}::{1}"), empty_params=None, supports_timezones=True, supports_returning_syntax=True, get_autocommit=get_pg_autocommit, set_autocommit=set_pg_autocommit)
python
def get_pooled_pg_core(connection_string, pool_size=None, *, cursor_factory=None, edit_connection=None, threaded=True): """Creates a pooled PostgreSQL core. Requires the psycopg2 library. :pool_size: must be a 2-tuple in the form (min_connections, max_connections). """ from psycopg2.extras import NamedTupleCursor from psycopg2.pool import ThreadedConnectionPool as TPool, SimpleConnectionPool as SPool if not pool_size: pool_size = (5, 10) if threaded: pool = TPool(pool_size[0], pool_size[1], connection_string) else: pool = SPool(pool_size[0], pool_size[1], connection_string) def opener(): """Gets a PostgreSQL connection from the scope-captured connection pool.""" cn = pool.getconn() cn.cursor_factory = cursor_factory or NamedTupleCursor if edit_connection: edit_connection(cn) return cn def closer(connection): """Returns a connection to the scope-captured connection pool.""" pool.putconn(connection) return InjectedDataAccessCore( opener, closer, ("%({0})s", "%s", "{0}::{1}"), empty_params=None, supports_timezones=True, supports_returning_syntax=True, get_autocommit=get_pg_autocommit, set_autocommit=set_pg_autocommit)
[ "def", "get_pooled_pg_core", "(", "connection_string", ",", "pool_size", "=", "None", ",", "*", ",", "cursor_factory", "=", "None", ",", "edit_connection", "=", "None", ",", "threaded", "=", "True", ")", ":", "from", "psycopg2", ".", "extras", "import", "NamedTupleCursor", "from", "psycopg2", ".", "pool", "import", "ThreadedConnectionPool", "as", "TPool", ",", "SimpleConnectionPool", "as", "SPool", "if", "not", "pool_size", ":", "pool_size", "=", "(", "5", ",", "10", ")", "if", "threaded", ":", "pool", "=", "TPool", "(", "pool_size", "[", "0", "]", ",", "pool_size", "[", "1", "]", ",", "connection_string", ")", "else", ":", "pool", "=", "SPool", "(", "pool_size", "[", "0", "]", ",", "pool_size", "[", "1", "]", ",", "connection_string", ")", "def", "opener", "(", ")", ":", "\"\"\"Gets a PostgreSQL connection from the scope-captured connection pool.\"\"\"", "cn", "=", "pool", ".", "getconn", "(", ")", "cn", ".", "cursor_factory", "=", "cursor_factory", "or", "NamedTupleCursor", "if", "edit_connection", ":", "edit_connection", "(", "cn", ")", "return", "cn", "def", "closer", "(", "connection", ")", ":", "\"\"\"Returns a connection to the scope-captured connection pool.\"\"\"", "pool", ".", "putconn", "(", "connection", ")", "return", "InjectedDataAccessCore", "(", "opener", ",", "closer", ",", "(", "\"%({0})s\"", ",", "\"%s\"", ",", "\"{0}::{1}\"", ")", ",", "empty_params", "=", "None", ",", "supports_timezones", "=", "True", ",", "supports_returning_syntax", "=", "True", ",", "get_autocommit", "=", "get_pg_autocommit", ",", "set_autocommit", "=", "set_pg_autocommit", ")" ]
Creates a pooled PostgreSQL core. Requires the psycopg2 library. :pool_size: must be a 2-tuple in the form (min_connections, max_connections).
[ "Creates", "a", "pooled", "PostgreSQL", "core", ".", "Requires", "the", "psycopg2", "library", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/db/core/data_access_core.py#L142-L177
246,655
treycucco/bidon
bidon/db/core/data_access_core.py
set_sqlite_autocommit
def set_sqlite_autocommit(cn, autocommit): """SQLite autocommit setter for core.""" if isinstance(autocommit, bool): cn.isolation_level = None if autocommit else "" else: cn.isolation_level = autocommit
python
def set_sqlite_autocommit(cn, autocommit): """SQLite autocommit setter for core.""" if isinstance(autocommit, bool): cn.isolation_level = None if autocommit else "" else: cn.isolation_level = autocommit
[ "def", "set_sqlite_autocommit", "(", "cn", ",", "autocommit", ")", ":", "if", "isinstance", "(", "autocommit", ",", "bool", ")", ":", "cn", ".", "isolation_level", "=", "None", "if", "autocommit", "else", "\"\"", "else", ":", "cn", ".", "isolation_level", "=", "autocommit" ]
SQLite autocommit setter for core.
[ "SQLite", "autocommit", "setter", "for", "core", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/db/core/data_access_core.py#L185-L190
246,656
treycucco/bidon
bidon/db/core/data_access_core.py
get_sqlite_core
def get_sqlite_core(connection_string, *, cursor_factory=None, edit_connection=None): """Creates a simple SQLite3 core.""" import sqlite3 as sqlite def opener(): """Opens a single connection with the scope-captured connection string.""" cn = sqlite.connect(connection_string) if cursor_factory: cn.row_factory = cursor_factory if edit_connection: edit_connection(cn) return cn return InjectedDataAccessCore( opener, default_connection_closer, (":{0}", "?", SQL_CAST), empty_params=[], supports_timezones=True, supports_returning_syntax=False, get_autocommit=get_sqlite_autocommit, set_autocommit=set_sqlite_autocommit)
python
def get_sqlite_core(connection_string, *, cursor_factory=None, edit_connection=None): """Creates a simple SQLite3 core.""" import sqlite3 as sqlite def opener(): """Opens a single connection with the scope-captured connection string.""" cn = sqlite.connect(connection_string) if cursor_factory: cn.row_factory = cursor_factory if edit_connection: edit_connection(cn) return cn return InjectedDataAccessCore( opener, default_connection_closer, (":{0}", "?", SQL_CAST), empty_params=[], supports_timezones=True, supports_returning_syntax=False, get_autocommit=get_sqlite_autocommit, set_autocommit=set_sqlite_autocommit)
[ "def", "get_sqlite_core", "(", "connection_string", ",", "*", ",", "cursor_factory", "=", "None", ",", "edit_connection", "=", "None", ")", ":", "import", "sqlite3", "as", "sqlite", "def", "opener", "(", ")", ":", "\"\"\"Opens a single connection with the scope-captured connection string.\"\"\"", "cn", "=", "sqlite", ".", "connect", "(", "connection_string", ")", "if", "cursor_factory", ":", "cn", ".", "row_factory", "=", "cursor_factory", "if", "edit_connection", ":", "edit_connection", "(", "cn", ")", "return", "cn", "return", "InjectedDataAccessCore", "(", "opener", ",", "default_connection_closer", ",", "(", "\":{0}\"", ",", "\"?\"", ",", "SQL_CAST", ")", ",", "empty_params", "=", "[", "]", ",", "supports_timezones", "=", "True", ",", "supports_returning_syntax", "=", "False", ",", "get_autocommit", "=", "get_sqlite_autocommit", ",", "set_autocommit", "=", "set_sqlite_autocommit", ")" ]
Creates a simple SQLite3 core.
[ "Creates", "a", "simple", "SQLite3", "core", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/db/core/data_access_core.py#L193-L213
246,657
treycucco/bidon
bidon/db/core/data_access_core.py
get_mysql_core
def get_mysql_core(connection_args, *, cursor_factory=None, edit_connection=None): """Creates a simple MySQL core. Requires the pymysql library.""" import pymysql def opener(): """Opens a single connection with the scope-captured connection string.""" cn = pymysql.connect(**connection_args) if cursor_factory: cn.cursorclass = cursor_factory if edit_connection: edit_connection(cn) return cn return InjectedDataAccessCore( opener, default_connection_closer, ("%({0})s", "%s", SQL_CAST), empty_params=None, supports_timezones=False, supports_returning_syntax=False, get_autocommit=get_mysql_autocommit, set_autocommit=set_mysql_autocommit)
python
def get_mysql_core(connection_args, *, cursor_factory=None, edit_connection=None): """Creates a simple MySQL core. Requires the pymysql library.""" import pymysql def opener(): """Opens a single connection with the scope-captured connection string.""" cn = pymysql.connect(**connection_args) if cursor_factory: cn.cursorclass = cursor_factory if edit_connection: edit_connection(cn) return cn return InjectedDataAccessCore( opener, default_connection_closer, ("%({0})s", "%s", SQL_CAST), empty_params=None, supports_timezones=False, supports_returning_syntax=False, get_autocommit=get_mysql_autocommit, set_autocommit=set_mysql_autocommit)
[ "def", "get_mysql_core", "(", "connection_args", ",", "*", ",", "cursor_factory", "=", "None", ",", "edit_connection", "=", "None", ")", ":", "import", "pymysql", "def", "opener", "(", ")", ":", "\"\"\"Opens a single connection with the scope-captured connection string.\"\"\"", "cn", "=", "pymysql", ".", "connect", "(", "*", "*", "connection_args", ")", "if", "cursor_factory", ":", "cn", ".", "cursorclass", "=", "cursor_factory", "if", "edit_connection", ":", "edit_connection", "(", "cn", ")", "return", "cn", "return", "InjectedDataAccessCore", "(", "opener", ",", "default_connection_closer", ",", "(", "\"%({0})s\"", ",", "\"%s\"", ",", "SQL_CAST", ")", ",", "empty_params", "=", "None", ",", "supports_timezones", "=", "False", ",", "supports_returning_syntax", "=", "False", ",", "get_autocommit", "=", "get_mysql_autocommit", ",", "set_autocommit", "=", "set_mysql_autocommit", ")" ]
Creates a simple MySQL core. Requires the pymysql library.
[ "Creates", "a", "simple", "MySQL", "core", ".", "Requires", "the", "pymysql", "library", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/db/core/data_access_core.py#L226-L247
246,658
treycucco/bidon
bidon/db/core/data_access_core.py
InjectedDataAccessCore.close
def close(self, connection, *, commit=True): """Close the connection using the closer method passed to the constructor.""" if commit: connection.commit() else: connection.rollback() self.closer(connection)
python
def close(self, connection, *, commit=True): """Close the connection using the closer method passed to the constructor.""" if commit: connection.commit() else: connection.rollback() self.closer(connection)
[ "def", "close", "(", "self", ",", "connection", ",", "*", ",", "commit", "=", "True", ")", ":", "if", "commit", ":", "connection", ".", "commit", "(", ")", "else", ":", "connection", ".", "rollback", "(", ")", "self", ".", "closer", "(", "connection", ")" ]
Close the connection using the closer method passed to the constructor.
[ "Close", "the", "connection", "using", "the", "closer", "method", "passed", "to", "the", "constructor", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/db/core/data_access_core.py#L86-L92
246,659
MartinThoma/geocodertools
geocodertools/reverse.py
download
def download(): """Download cities database.""" url = "http://download.geonames.org/export/dump/cities1000.zip" logging.info("Download cities from %s", url) if not os.path.exists(MISC_PATH): os.makedirs(MISC_PATH) zip_path = os.path.join(MISC_PATH, "cities1000.zip") urlretrieve(url, zip_path) with zipfile.ZipFile(zip_path, "r") as z: z.extractall(MISC_PATH)
python
def download(): """Download cities database.""" url = "http://download.geonames.org/export/dump/cities1000.zip" logging.info("Download cities from %s", url) if not os.path.exists(MISC_PATH): os.makedirs(MISC_PATH) zip_path = os.path.join(MISC_PATH, "cities1000.zip") urlretrieve(url, zip_path) with zipfile.ZipFile(zip_path, "r") as z: z.extractall(MISC_PATH)
[ "def", "download", "(", ")", ":", "url", "=", "\"http://download.geonames.org/export/dump/cities1000.zip\"", "logging", ".", "info", "(", "\"Download cities from %s\"", ",", "url", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "MISC_PATH", ")", ":", "os", ".", "makedirs", "(", "MISC_PATH", ")", "zip_path", "=", "os", ".", "path", ".", "join", "(", "MISC_PATH", ",", "\"cities1000.zip\"", ")", "urlretrieve", "(", "url", ",", "zip_path", ")", "with", "zipfile", ".", "ZipFile", "(", "zip_path", ",", "\"r\"", ")", "as", "z", ":", "z", ".", "extractall", "(", "MISC_PATH", ")" ]
Download cities database.
[ "Download", "cities", "database", "." ]
d590dbde7b03497df713bd9ae17d739319e87096
https://github.com/MartinThoma/geocodertools/blob/d590dbde7b03497df713bd9ae17d739319e87096/geocodertools/reverse.py#L32-L42
246,660
MartinThoma/geocodertools
geocodertools/reverse.py
BinClassifier.get
def get(self, pos): """Get the closest dataset.""" latitude = int(round(pos['latitude'])) search_set = self.bins[latitude] i = 1 if latitude - i >= -90: search_set += self.bins[latitude-i] if latitude + i <= 90: search_set += self.bins[latitude+i] while len(search_set) == 0 and i <= 200: if latitude - i >= -90: search_set += self.bins[latitude-i] if latitude + i <= 90: search_set += self.bins[latitude+i] i += 1 return find_closest(search_set, pos)
python
def get(self, pos): """Get the closest dataset.""" latitude = int(round(pos['latitude'])) search_set = self.bins[latitude] i = 1 if latitude - i >= -90: search_set += self.bins[latitude-i] if latitude + i <= 90: search_set += self.bins[latitude+i] while len(search_set) == 0 and i <= 200: if latitude - i >= -90: search_set += self.bins[latitude-i] if latitude + i <= 90: search_set += self.bins[latitude+i] i += 1 return find_closest(search_set, pos)
[ "def", "get", "(", "self", ",", "pos", ")", ":", "latitude", "=", "int", "(", "round", "(", "pos", "[", "'latitude'", "]", ")", ")", "search_set", "=", "self", ".", "bins", "[", "latitude", "]", "i", "=", "1", "if", "latitude", "-", "i", ">=", "-", "90", ":", "search_set", "+=", "self", ".", "bins", "[", "latitude", "-", "i", "]", "if", "latitude", "+", "i", "<=", "90", ":", "search_set", "+=", "self", ".", "bins", "[", "latitude", "+", "i", "]", "while", "len", "(", "search_set", ")", "==", "0", "and", "i", "<=", "200", ":", "if", "latitude", "-", "i", ">=", "-", "90", ":", "search_set", "+=", "self", ".", "bins", "[", "latitude", "-", "i", "]", "if", "latitude", "+", "i", "<=", "90", ":", "search_set", "+=", "self", ".", "bins", "[", "latitude", "+", "i", "]", "i", "+=", "1", "return", "find_closest", "(", "search_set", ",", "pos", ")" ]
Get the closest dataset.
[ "Get", "the", "closest", "dataset", "." ]
d590dbde7b03497df713bd9ae17d739319e87096
https://github.com/MartinThoma/geocodertools/blob/d590dbde7b03497df713bd9ae17d739319e87096/geocodertools/reverse.py#L141-L156
246,661
westerncapelabs/django-snappy-vumi-bouncer
snappybouncer/api.py
urlencodeSerializer.from_urlencode
def from_urlencode(self, data, options=None): """ handles basic formencoded url posts """ qs = dict((k, v if len(v) > 1 else v[0]) for k, v in urlparse.parse_qs(data).iteritems()) return qs
python
def from_urlencode(self, data, options=None): """ handles basic formencoded url posts """ qs = dict((k, v if len(v) > 1 else v[0]) for k, v in urlparse.parse_qs(data).iteritems()) return qs
[ "def", "from_urlencode", "(", "self", ",", "data", ",", "options", "=", "None", ")", ":", "qs", "=", "dict", "(", "(", "k", ",", "v", "if", "len", "(", "v", ")", ">", "1", "else", "v", "[", "0", "]", ")", "for", "k", ",", "v", "in", "urlparse", ".", "parse_qs", "(", "data", ")", ".", "iteritems", "(", ")", ")", "return", "qs" ]
handles basic formencoded url posts
[ "handles", "basic", "formencoded", "url", "posts" ]
5750827020aa83f0f5eecee87a2fe8f19dfaac16
https://github.com/westerncapelabs/django-snappy-vumi-bouncer/blob/5750827020aa83f0f5eecee87a2fe8f19dfaac16/snappybouncer/api.py#L94-L98
246,662
Jayin/ETipsService
service/lifeService.py
LifeService.get_electricity_info
def get_electricity_info(self, apart_id, meter_room): """get electricity info :param apart_id: 栋数 :param meter_room: 宿舍号 """ apart_id = str(apart_id) meter_room = str(meter_room) try: content = LifeService._get_electricity_info_html(apart_id, meter_room) except KeyError as e: _.d(e.message) result = { 'response': None } return _.to_json_string(result) soup = BeautifulSoup(content) tags = soup.find_all(name='span', class_='STYLE7') result = { 'response': { 'apart': _.trim(tags[0].string), 'apart_id': _.trim(tags[1].string), 'used': _.trim(tags[2].string), 'left': _.trim(tags[3].string), 'update_time': _.trim(tags[4].string) } } return _.to_json_string(result)
python
def get_electricity_info(self, apart_id, meter_room): """get electricity info :param apart_id: 栋数 :param meter_room: 宿舍号 """ apart_id = str(apart_id) meter_room = str(meter_room) try: content = LifeService._get_electricity_info_html(apart_id, meter_room) except KeyError as e: _.d(e.message) result = { 'response': None } return _.to_json_string(result) soup = BeautifulSoup(content) tags = soup.find_all(name='span', class_='STYLE7') result = { 'response': { 'apart': _.trim(tags[0].string), 'apart_id': _.trim(tags[1].string), 'used': _.trim(tags[2].string), 'left': _.trim(tags[3].string), 'update_time': _.trim(tags[4].string) } } return _.to_json_string(result)
[ "def", "get_electricity_info", "(", "self", ",", "apart_id", ",", "meter_room", ")", ":", "apart_id", "=", "str", "(", "apart_id", ")", "meter_room", "=", "str", "(", "meter_room", ")", "try", ":", "content", "=", "LifeService", ".", "_get_electricity_info_html", "(", "apart_id", ",", "meter_room", ")", "except", "KeyError", "as", "e", ":", "_", ".", "d", "(", "e", ".", "message", ")", "result", "=", "{", "'response'", ":", "None", "}", "return", "_", ".", "to_json_string", "(", "result", ")", "soup", "=", "BeautifulSoup", "(", "content", ")", "tags", "=", "soup", ".", "find_all", "(", "name", "=", "'span'", ",", "class_", "=", "'STYLE7'", ")", "result", "=", "{", "'response'", ":", "{", "'apart'", ":", "_", ".", "trim", "(", "tags", "[", "0", "]", ".", "string", ")", ",", "'apart_id'", ":", "_", ".", "trim", "(", "tags", "[", "1", "]", ".", "string", ")", ",", "'used'", ":", "_", ".", "trim", "(", "tags", "[", "2", "]", ".", "string", ")", ",", "'left'", ":", "_", ".", "trim", "(", "tags", "[", "3", "]", ".", "string", ")", ",", "'update_time'", ":", "_", ".", "trim", "(", "tags", "[", "4", "]", ".", "string", ")", "}", "}", "return", "_", ".", "to_json_string", "(", "result", ")" ]
get electricity info :param apart_id: 栋数 :param meter_room: 宿舍号
[ "get", "electricity", "info" ]
1a42612a5e5d11bec0ec1a26c99dec6fe216fca4
https://github.com/Jayin/ETipsService/blob/1a42612a5e5d11bec0ec1a26c99dec6fe216fca4/service/lifeService.py#L67-L94
246,663
kervi/kervi-core
kervi/hal/gpio.py
IGPIODeviceDriver.set_channels
def set_channels(self, channels): """Sets the state of multiple channels in one operation. :param channels: A dictionary where keys are channels and values the value to set for each channel. :type channels: ``dict`` """ for key in channels: self.set(key, channels[key])
python
def set_channels(self, channels): """Sets the state of multiple channels in one operation. :param channels: A dictionary where keys are channels and values the value to set for each channel. :type channels: ``dict`` """ for key in channels: self.set(key, channels[key])
[ "def", "set_channels", "(", "self", ",", "channels", ")", ":", "for", "key", "in", "channels", ":", "self", ".", "set", "(", "key", ",", "channels", "[", "key", "]", ")" ]
Sets the state of multiple channels in one operation. :param channels: A dictionary where keys are channels and values the value to set for each channel. :type channels: ``dict``
[ "Sets", "the", "state", "of", "multiple", "channels", "in", "one", "operation", "." ]
3c1e3c8a17a7b4d085d8a28b99180ff2a96b0e23
https://github.com/kervi/kervi-core/blob/3c1e3c8a17a7b4d085d8a28b99180ff2a96b0e23/kervi/hal/gpio.py#L199-L209
246,664
slarse/pdfebc-core
pdfebc_core/compress.py
_get_pdf_filenames_at
def _get_pdf_filenames_at(source_directory): """Find all PDF files in the specified directory. Args: source_directory (str): The source directory. Returns: list(str): Filepaths to all PDF files in the specified directory. Raises: ValueError """ if not os.path.isdir(source_directory): raise ValueError("%s is not a directory!" % source_directory) return [os.path.join(source_directory, filename) for filename in os.listdir(source_directory) if filename.endswith(PDF_EXTENSION)]
python
def _get_pdf_filenames_at(source_directory): """Find all PDF files in the specified directory. Args: source_directory (str): The source directory. Returns: list(str): Filepaths to all PDF files in the specified directory. Raises: ValueError """ if not os.path.isdir(source_directory): raise ValueError("%s is not a directory!" % source_directory) return [os.path.join(source_directory, filename) for filename in os.listdir(source_directory) if filename.endswith(PDF_EXTENSION)]
[ "def", "_get_pdf_filenames_at", "(", "source_directory", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "source_directory", ")", ":", "raise", "ValueError", "(", "\"%s is not a directory!\"", "%", "source_directory", ")", "return", "[", "os", ".", "path", ".", "join", "(", "source_directory", ",", "filename", ")", "for", "filename", "in", "os", ".", "listdir", "(", "source_directory", ")", "if", "filename", ".", "endswith", "(", "PDF_EXTENSION", ")", "]" ]
Find all PDF files in the specified directory. Args: source_directory (str): The source directory. Returns: list(str): Filepaths to all PDF files in the specified directory. Raises: ValueError
[ "Find", "all", "PDF", "files", "in", "the", "specified", "directory", "." ]
fc40857bc42365b7434714333e37d7a3487603a0
https://github.com/slarse/pdfebc-core/blob/fc40857bc42365b7434714333e37d7a3487603a0/pdfebc_core/compress.py#L36-L52
246,665
slarse/pdfebc-core
pdfebc_core/compress.py
compress_pdf
def compress_pdf(filepath, output_path, ghostscript_binary): """Compress a single PDF file. Args: filepath (str): Path to the PDF file. output_path (str): Output path. ghostscript_binary (str): Name/alias of the Ghostscript binary. Raises: ValueError FileNotFoundError """ if not filepath.endswith(PDF_EXTENSION): raise ValueError("Filename must end with .pdf!\n%s does not." % filepath) try: file_size = os.stat(filepath).st_size if file_size < FILE_SIZE_LOWER_LIMIT: LOGGER.info(NOT_COMPRESSING.format(filepath, file_size, FILE_SIZE_LOWER_LIMIT)) process = subprocess.Popen(['cp', filepath, output_path]) else: LOGGER.info(COMPRESSING.format(filepath)) process = subprocess.Popen( [ghostscript_binary, "-sDEVICE=pdfwrite", "-dCompatabilityLevel=1.4", "-dPDFSETTINGS=/ebook", "-dNOPAUSE", "-dQUIET", "-dBATCH", "-sOutputFile=%s" % output_path, filepath] ) except FileNotFoundError: msg = GS_NOT_INSTALLED.format(ghostscript_binary) raise FileNotFoundError(msg) process.communicate() LOGGER.info(FILE_DONE.format(output_path))
python
def compress_pdf(filepath, output_path, ghostscript_binary): """Compress a single PDF file. Args: filepath (str): Path to the PDF file. output_path (str): Output path. ghostscript_binary (str): Name/alias of the Ghostscript binary. Raises: ValueError FileNotFoundError """ if not filepath.endswith(PDF_EXTENSION): raise ValueError("Filename must end with .pdf!\n%s does not." % filepath) try: file_size = os.stat(filepath).st_size if file_size < FILE_SIZE_LOWER_LIMIT: LOGGER.info(NOT_COMPRESSING.format(filepath, file_size, FILE_SIZE_LOWER_LIMIT)) process = subprocess.Popen(['cp', filepath, output_path]) else: LOGGER.info(COMPRESSING.format(filepath)) process = subprocess.Popen( [ghostscript_binary, "-sDEVICE=pdfwrite", "-dCompatabilityLevel=1.4", "-dPDFSETTINGS=/ebook", "-dNOPAUSE", "-dQUIET", "-dBATCH", "-sOutputFile=%s" % output_path, filepath] ) except FileNotFoundError: msg = GS_NOT_INSTALLED.format(ghostscript_binary) raise FileNotFoundError(msg) process.communicate() LOGGER.info(FILE_DONE.format(output_path))
[ "def", "compress_pdf", "(", "filepath", ",", "output_path", ",", "ghostscript_binary", ")", ":", "if", "not", "filepath", ".", "endswith", "(", "PDF_EXTENSION", ")", ":", "raise", "ValueError", "(", "\"Filename must end with .pdf!\\n%s does not.\"", "%", "filepath", ")", "try", ":", "file_size", "=", "os", ".", "stat", "(", "filepath", ")", ".", "st_size", "if", "file_size", "<", "FILE_SIZE_LOWER_LIMIT", ":", "LOGGER", ".", "info", "(", "NOT_COMPRESSING", ".", "format", "(", "filepath", ",", "file_size", ",", "FILE_SIZE_LOWER_LIMIT", ")", ")", "process", "=", "subprocess", ".", "Popen", "(", "[", "'cp'", ",", "filepath", ",", "output_path", "]", ")", "else", ":", "LOGGER", ".", "info", "(", "COMPRESSING", ".", "format", "(", "filepath", ")", ")", "process", "=", "subprocess", ".", "Popen", "(", "[", "ghostscript_binary", ",", "\"-sDEVICE=pdfwrite\"", ",", "\"-dCompatabilityLevel=1.4\"", ",", "\"-dPDFSETTINGS=/ebook\"", ",", "\"-dNOPAUSE\"", ",", "\"-dQUIET\"", ",", "\"-dBATCH\"", ",", "\"-sOutputFile=%s\"", "%", "output_path", ",", "filepath", "]", ")", "except", "FileNotFoundError", ":", "msg", "=", "GS_NOT_INSTALLED", ".", "format", "(", "ghostscript_binary", ")", "raise", "FileNotFoundError", "(", "msg", ")", "process", ".", "communicate", "(", ")", "LOGGER", ".", "info", "(", "FILE_DONE", ".", "format", "(", "output_path", ")", ")" ]
Compress a single PDF file. Args: filepath (str): Path to the PDF file. output_path (str): Output path. ghostscript_binary (str): Name/alias of the Ghostscript binary. Raises: ValueError FileNotFoundError
[ "Compress", "a", "single", "PDF", "file", "." ]
fc40857bc42365b7434714333e37d7a3487603a0
https://github.com/slarse/pdfebc-core/blob/fc40857bc42365b7434714333e37d7a3487603a0/pdfebc_core/compress.py#L54-L85
246,666
slarse/pdfebc-core
pdfebc_core/compress.py
compress_multiple_pdfs
def compress_multiple_pdfs(source_directory, output_directory, ghostscript_binary): """Compress all PDF files in the current directory and place the output in the given output directory. This is a generator function that first yields the amount of files to be compressed, and then yields the output path of each file. Args: source_directory (str): Filepath to the source directory. output_directory (str): Filepath to the output directory. ghostscript_binary (str): Name of the Ghostscript binary. Returns: list(str): paths to outputs. """ source_paths = _get_pdf_filenames_at(source_directory) yield len(source_paths) for source_path in source_paths: output = os.path.join(output_directory, os.path.basename(source_path)) compress_pdf(source_path, output, ghostscript_binary) yield output
python
def compress_multiple_pdfs(source_directory, output_directory, ghostscript_binary): """Compress all PDF files in the current directory and place the output in the given output directory. This is a generator function that first yields the amount of files to be compressed, and then yields the output path of each file. Args: source_directory (str): Filepath to the source directory. output_directory (str): Filepath to the output directory. ghostscript_binary (str): Name of the Ghostscript binary. Returns: list(str): paths to outputs. """ source_paths = _get_pdf_filenames_at(source_directory) yield len(source_paths) for source_path in source_paths: output = os.path.join(output_directory, os.path.basename(source_path)) compress_pdf(source_path, output, ghostscript_binary) yield output
[ "def", "compress_multiple_pdfs", "(", "source_directory", ",", "output_directory", ",", "ghostscript_binary", ")", ":", "source_paths", "=", "_get_pdf_filenames_at", "(", "source_directory", ")", "yield", "len", "(", "source_paths", ")", "for", "source_path", "in", "source_paths", ":", "output", "=", "os", ".", "path", ".", "join", "(", "output_directory", ",", "os", ".", "path", ".", "basename", "(", "source_path", ")", ")", "compress_pdf", "(", "source_path", ",", "output", ",", "ghostscript_binary", ")", "yield", "output" ]
Compress all PDF files in the current directory and place the output in the given output directory. This is a generator function that first yields the amount of files to be compressed, and then yields the output path of each file. Args: source_directory (str): Filepath to the source directory. output_directory (str): Filepath to the output directory. ghostscript_binary (str): Name of the Ghostscript binary. Returns: list(str): paths to outputs.
[ "Compress", "all", "PDF", "files", "in", "the", "current", "directory", "and", "place", "the", "output", "in", "the", "given", "output", "directory", ".", "This", "is", "a", "generator", "function", "that", "first", "yields", "the", "amount", "of", "files", "to", "be", "compressed", "and", "then", "yields", "the", "output", "path", "of", "each", "file", "." ]
fc40857bc42365b7434714333e37d7a3487603a0
https://github.com/slarse/pdfebc-core/blob/fc40857bc42365b7434714333e37d7a3487603a0/pdfebc_core/compress.py#L87-L105
246,667
oblalex/candv
candv/base.py
Constant._post_init
def _post_init(self, name, container=None): """ Called automatically by container after container's class construction. """ self.name = name self.container = container
python
def _post_init(self, name, container=None): """ Called automatically by container after container's class construction. """ self.name = name self.container = container
[ "def", "_post_init", "(", "self", ",", "name", ",", "container", "=", "None", ")", ":", "self", ".", "name", "=", "name", "self", ".", "container", "=", "container" ]
Called automatically by container after container's class construction.
[ "Called", "automatically", "by", "container", "after", "container", "s", "class", "construction", "." ]
0b522bd24f4045844a04793b456f1135d093f280
https://github.com/oblalex/candv/blob/0b522bd24f4045844a04793b456f1135d093f280/candv/base.py#L35-L40
246,668
b3j0f/conf
b3j0f/conf/driver/base.py
ConfDriver.pathresource
def pathresource(self, rscpath=None, logger=None): """Returns specific resource. :param str rscpath: resource path. :param Logger logger: logger to use. :param bool error: raise internal error if True (False by default). :param bool force: create the resource even if rscpath does not exist. :return: specific configuration resource. """ result = None try: result = self._pathresource(rscpath=rscpath) except Exception as ex: if logger is not None: msg = 'Error while getting resource from {0}.'.format(rscpath) full_msg = '{0} {1}: {2}'.format(msg, ex, format_exc()) logger.error(full_msg) return result
python
def pathresource(self, rscpath=None, logger=None): """Returns specific resource. :param str rscpath: resource path. :param Logger logger: logger to use. :param bool error: raise internal error if True (False by default). :param bool force: create the resource even if rscpath does not exist. :return: specific configuration resource. """ result = None try: result = self._pathresource(rscpath=rscpath) except Exception as ex: if logger is not None: msg = 'Error while getting resource from {0}.'.format(rscpath) full_msg = '{0} {1}: {2}'.format(msg, ex, format_exc()) logger.error(full_msg) return result
[ "def", "pathresource", "(", "self", ",", "rscpath", "=", "None", ",", "logger", "=", "None", ")", ":", "result", "=", "None", "try", ":", "result", "=", "self", ".", "_pathresource", "(", "rscpath", "=", "rscpath", ")", "except", "Exception", "as", "ex", ":", "if", "logger", "is", "not", "None", ":", "msg", "=", "'Error while getting resource from {0}.'", ".", "format", "(", "rscpath", ")", "full_msg", "=", "'{0} {1}: {2}'", ".", "format", "(", "msg", ",", "ex", ",", "format_exc", "(", ")", ")", "logger", ".", "error", "(", "full_msg", ")", "return", "result" ]
Returns specific resource. :param str rscpath: resource path. :param Logger logger: logger to use. :param bool error: raise internal error if True (False by default). :param bool force: create the resource even if rscpath does not exist. :return: specific configuration resource.
[ "Returns", "specific", "resource", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/driver/base.py#L82-L103
246,669
b3j0f/conf
b3j0f/conf/driver/base.py
ConfDriver.getconf
def getconf(self, path, conf=None, logger=None): """Parse a configuration path with input conf and returns parameters by param name. :param str path: conf resource path to parse and from get parameters. :param Configuration conf: conf to fill with path values and conf param names. :param Logger logger: logger to use in order to trace information/error. :rtype: Configuration """ result = conf pathconf = None rscpaths = self.rscpaths(path=path) for rscpath in rscpaths: pathconf = self._getconf(rscpath=rscpath, logger=logger, conf=conf) if pathconf is not None: if result is None: result = pathconf else: result.update(pathconf) return result
python
def getconf(self, path, conf=None, logger=None): """Parse a configuration path with input conf and returns parameters by param name. :param str path: conf resource path to parse and from get parameters. :param Configuration conf: conf to fill with path values and conf param names. :param Logger logger: logger to use in order to trace information/error. :rtype: Configuration """ result = conf pathconf = None rscpaths = self.rscpaths(path=path) for rscpath in rscpaths: pathconf = self._getconf(rscpath=rscpath, logger=logger, conf=conf) if pathconf is not None: if result is None: result = pathconf else: result.update(pathconf) return result
[ "def", "getconf", "(", "self", ",", "path", ",", "conf", "=", "None", ",", "logger", "=", "None", ")", ":", "result", "=", "conf", "pathconf", "=", "None", "rscpaths", "=", "self", ".", "rscpaths", "(", "path", "=", "path", ")", "for", "rscpath", "in", "rscpaths", ":", "pathconf", "=", "self", ".", "_getconf", "(", "rscpath", "=", "rscpath", ",", "logger", "=", "logger", ",", "conf", "=", "conf", ")", "if", "pathconf", "is", "not", "None", ":", "if", "result", "is", "None", ":", "result", "=", "pathconf", "else", ":", "result", ".", "update", "(", "pathconf", ")", "return", "result" ]
Parse a configuration path with input conf and returns parameters by param name. :param str path: conf resource path to parse and from get parameters. :param Configuration conf: conf to fill with path values and conf param names. :param Logger logger: logger to use in order to trace information/error. :rtype: Configuration
[ "Parse", "a", "configuration", "path", "with", "input", "conf", "and", "returns", "parameters", "by", "param", "name", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/driver/base.py#L105-L135
246,670
b3j0f/conf
b3j0f/conf/driver/base.py
ConfDriver.setconf
def setconf(self, conf, rscpath, logger=None): """Set input conf in input path. :param Configuration conf: conf to write to path. :param str rscpath: specific resource path to use. :param Logger logger: used to log info/errors. :param bool error: raise catched errors. :raises: ConfDriver.Error in case of error and input error. """ resource = self.pathresource(rscpath=rscpath, logger=logger) if resource is None: resource = self.resource() try: self._setconf(conf=conf, resource=resource, rscpath=rscpath) except Exception as ex: if logger is not None: msg = 'Error while setting conf to {0}.'.format(rscpath) full_msg = '{0} {1}: {2}'.format(msg, ex, format_exc()) logger.error(full_msg) reraise(self.Error, self.Error(msg))
python
def setconf(self, conf, rscpath, logger=None): """Set input conf in input path. :param Configuration conf: conf to write to path. :param str rscpath: specific resource path to use. :param Logger logger: used to log info/errors. :param bool error: raise catched errors. :raises: ConfDriver.Error in case of error and input error. """ resource = self.pathresource(rscpath=rscpath, logger=logger) if resource is None: resource = self.resource() try: self._setconf(conf=conf, resource=resource, rscpath=rscpath) except Exception as ex: if logger is not None: msg = 'Error while setting conf to {0}.'.format(rscpath) full_msg = '{0} {1}: {2}'.format(msg, ex, format_exc()) logger.error(full_msg) reraise(self.Error, self.Error(msg))
[ "def", "setconf", "(", "self", ",", "conf", ",", "rscpath", ",", "logger", "=", "None", ")", ":", "resource", "=", "self", ".", "pathresource", "(", "rscpath", "=", "rscpath", ",", "logger", "=", "logger", ")", "if", "resource", "is", "None", ":", "resource", "=", "self", ".", "resource", "(", ")", "try", ":", "self", ".", "_setconf", "(", "conf", "=", "conf", ",", "resource", "=", "resource", ",", "rscpath", "=", "rscpath", ")", "except", "Exception", "as", "ex", ":", "if", "logger", "is", "not", "None", ":", "msg", "=", "'Error while setting conf to {0}.'", ".", "format", "(", "rscpath", ")", "full_msg", "=", "'{0} {1}: {2}'", ".", "format", "(", "msg", ",", "ex", ",", "format_exc", "(", ")", ")", "logger", ".", "error", "(", "full_msg", ")", "reraise", "(", "self", ".", "Error", ",", "self", ".", "Error", "(", "msg", ")", ")" ]
Set input conf in input path. :param Configuration conf: conf to write to path. :param str rscpath: specific resource path to use. :param Logger logger: used to log info/errors. :param bool error: raise catched errors. :raises: ConfDriver.Error in case of error and input error.
[ "Set", "input", "conf", "in", "input", "path", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/driver/base.py#L137-L160
246,671
b3j0f/conf
b3j0f/conf/driver/base.py
ConfDriver._getconf
def _getconf(self, rscpath, logger=None, conf=None): """Get specific conf from one driver path. :param str rscpath: resource path. :param Logger logger: logger to use. """ result = None resource = self.pathresource(rscpath=rscpath, logger=logger) if resource is not None: for cname in self._cnames(resource=resource): category = Category(name=cname) if result is None: result = Configuration() result += category for param in self._params(resource=resource, cname=cname): if conf is not None: confparam = None if cname in conf and param.name in conf[cname]: confparam = conf[cname][param.name] else: confparam = conf.param(pname=param.name) if confparam is not None: svalue = param.svalue param.update(confparam) if svalue is not None: param.svalue = svalue param.resolve() category += param return result
python
def _getconf(self, rscpath, logger=None, conf=None): """Get specific conf from one driver path. :param str rscpath: resource path. :param Logger logger: logger to use. """ result = None resource = self.pathresource(rscpath=rscpath, logger=logger) if resource is not None: for cname in self._cnames(resource=resource): category = Category(name=cname) if result is None: result = Configuration() result += category for param in self._params(resource=resource, cname=cname): if conf is not None: confparam = None if cname in conf and param.name in conf[cname]: confparam = conf[cname][param.name] else: confparam = conf.param(pname=param.name) if confparam is not None: svalue = param.svalue param.update(confparam) if svalue is not None: param.svalue = svalue param.resolve() category += param return result
[ "def", "_getconf", "(", "self", ",", "rscpath", ",", "logger", "=", "None", ",", "conf", "=", "None", ")", ":", "result", "=", "None", "resource", "=", "self", ".", "pathresource", "(", "rscpath", "=", "rscpath", ",", "logger", "=", "logger", ")", "if", "resource", "is", "not", "None", ":", "for", "cname", "in", "self", ".", "_cnames", "(", "resource", "=", "resource", ")", ":", "category", "=", "Category", "(", "name", "=", "cname", ")", "if", "result", "is", "None", ":", "result", "=", "Configuration", "(", ")", "result", "+=", "category", "for", "param", "in", "self", ".", "_params", "(", "resource", "=", "resource", ",", "cname", "=", "cname", ")", ":", "if", "conf", "is", "not", "None", ":", "confparam", "=", "None", "if", "cname", "in", "conf", "and", "param", ".", "name", "in", "conf", "[", "cname", "]", ":", "confparam", "=", "conf", "[", "cname", "]", "[", "param", ".", "name", "]", "else", ":", "confparam", "=", "conf", ".", "param", "(", "pname", "=", "param", ".", "name", ")", "if", "confparam", "is", "not", "None", ":", "svalue", "=", "param", ".", "svalue", "param", ".", "update", "(", "confparam", ")", "if", "svalue", "is", "not", "None", ":", "param", ".", "svalue", "=", "svalue", "param", ".", "resolve", "(", ")", "category", "+=", "param", "return", "result" ]
Get specific conf from one driver path. :param str rscpath: resource path. :param Logger logger: logger to use.
[ "Get", "specific", "conf", "from", "one", "driver", "path", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/driver/base.py#L162-L206
246,672
minhhoit/yacms
yacms/galleries/models.py
BaseGallery.save
def save(self, delete_zip_import=True, *args, **kwargs): """ If a zip file is uploaded, extract any images from it and add them to the gallery, before removing the zip file. """ super(BaseGallery, self).save(*args, **kwargs) if self.zip_import: zip_file = ZipFile(self.zip_import) for name in zip_file.namelist(): data = zip_file.read(name) try: from PIL import Image image = Image.open(BytesIO(data)) image.load() image = Image.open(BytesIO(data)) image.verify() except ImportError: pass except: continue name = os.path.split(name)[1] # In python3, name is a string. Convert it to bytes. if not isinstance(name, bytes): try: name = name.encode('cp437') except UnicodeEncodeError: # File name includes characters that aren't in cp437, # which isn't supported by most zip tooling. They will # not appear correctly. tempname = name # Decode byte-name. if isinstance(name, bytes): encoding = charsetdetect(name)['encoding'] tempname = name.decode(encoding) # A gallery with a slug of "/" tries to extract files # to / on disk; see os.path.join docs. slug = self.slug if self.slug != "/" else "" path = os.path.join(GALLERIES_UPLOAD_DIR, slug, tempname) try: saved_path = default_storage.save(path, ContentFile(data)) except UnicodeEncodeError: from warnings import warn warn("A file was saved that contains unicode " "characters in its path, but somehow the current " "locale does not support utf-8. You may need to set " "'LC_ALL' to a correct value, eg: 'en_US.UTF-8'.") # The native() call is needed here around str because # os.path.join() in Python 2.x (in posixpath.py) # mixes byte-strings with unicode strings without # explicit conversion, which raises a TypeError as it # would on Python 3. path = os.path.join(GALLERIES_UPLOAD_DIR, slug, native(str(name, errors="ignore"))) saved_path = default_storage.save(path, ContentFile(data)) self.images.create(file=saved_path) if delete_zip_import: zip_file.close() self.zip_import.delete(save=True)
python
def save(self, delete_zip_import=True, *args, **kwargs): """ If a zip file is uploaded, extract any images from it and add them to the gallery, before removing the zip file. """ super(BaseGallery, self).save(*args, **kwargs) if self.zip_import: zip_file = ZipFile(self.zip_import) for name in zip_file.namelist(): data = zip_file.read(name) try: from PIL import Image image = Image.open(BytesIO(data)) image.load() image = Image.open(BytesIO(data)) image.verify() except ImportError: pass except: continue name = os.path.split(name)[1] # In python3, name is a string. Convert it to bytes. if not isinstance(name, bytes): try: name = name.encode('cp437') except UnicodeEncodeError: # File name includes characters that aren't in cp437, # which isn't supported by most zip tooling. They will # not appear correctly. tempname = name # Decode byte-name. if isinstance(name, bytes): encoding = charsetdetect(name)['encoding'] tempname = name.decode(encoding) # A gallery with a slug of "/" tries to extract files # to / on disk; see os.path.join docs. slug = self.slug if self.slug != "/" else "" path = os.path.join(GALLERIES_UPLOAD_DIR, slug, tempname) try: saved_path = default_storage.save(path, ContentFile(data)) except UnicodeEncodeError: from warnings import warn warn("A file was saved that contains unicode " "characters in its path, but somehow the current " "locale does not support utf-8. You may need to set " "'LC_ALL' to a correct value, eg: 'en_US.UTF-8'.") # The native() call is needed here around str because # os.path.join() in Python 2.x (in posixpath.py) # mixes byte-strings with unicode strings without # explicit conversion, which raises a TypeError as it # would on Python 3. path = os.path.join(GALLERIES_UPLOAD_DIR, slug, native(str(name, errors="ignore"))) saved_path = default_storage.save(path, ContentFile(data)) self.images.create(file=saved_path) if delete_zip_import: zip_file.close() self.zip_import.delete(save=True)
[ "def", "save", "(", "self", ",", "delete_zip_import", "=", "True", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "super", "(", "BaseGallery", ",", "self", ")", ".", "save", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "self", ".", "zip_import", ":", "zip_file", "=", "ZipFile", "(", "self", ".", "zip_import", ")", "for", "name", "in", "zip_file", ".", "namelist", "(", ")", ":", "data", "=", "zip_file", ".", "read", "(", "name", ")", "try", ":", "from", "PIL", "import", "Image", "image", "=", "Image", ".", "open", "(", "BytesIO", "(", "data", ")", ")", "image", ".", "load", "(", ")", "image", "=", "Image", ".", "open", "(", "BytesIO", "(", "data", ")", ")", "image", ".", "verify", "(", ")", "except", "ImportError", ":", "pass", "except", ":", "continue", "name", "=", "os", ".", "path", ".", "split", "(", "name", ")", "[", "1", "]", "# In python3, name is a string. Convert it to bytes.", "if", "not", "isinstance", "(", "name", ",", "bytes", ")", ":", "try", ":", "name", "=", "name", ".", "encode", "(", "'cp437'", ")", "except", "UnicodeEncodeError", ":", "# File name includes characters that aren't in cp437,", "# which isn't supported by most zip tooling. They will", "# not appear correctly.", "tempname", "=", "name", "# Decode byte-name.", "if", "isinstance", "(", "name", ",", "bytes", ")", ":", "encoding", "=", "charsetdetect", "(", "name", ")", "[", "'encoding'", "]", "tempname", "=", "name", ".", "decode", "(", "encoding", ")", "# A gallery with a slug of \"/\" tries to extract files", "# to / on disk; see os.path.join docs.", "slug", "=", "self", ".", "slug", "if", "self", ".", "slug", "!=", "\"/\"", "else", "\"\"", "path", "=", "os", ".", "path", ".", "join", "(", "GALLERIES_UPLOAD_DIR", ",", "slug", ",", "tempname", ")", "try", ":", "saved_path", "=", "default_storage", ".", "save", "(", "path", ",", "ContentFile", "(", "data", ")", ")", "except", "UnicodeEncodeError", ":", "from", "warnings", "import", "warn", "warn", "(", "\"A file was saved that contains unicode \"", "\"characters in its path, but somehow the current \"", "\"locale does not support utf-8. You may need to set \"", "\"'LC_ALL' to a correct value, eg: 'en_US.UTF-8'.\"", ")", "# The native() call is needed here around str because", "# os.path.join() in Python 2.x (in posixpath.py)", "# mixes byte-strings with unicode strings without", "# explicit conversion, which raises a TypeError as it", "# would on Python 3.", "path", "=", "os", ".", "path", ".", "join", "(", "GALLERIES_UPLOAD_DIR", ",", "slug", ",", "native", "(", "str", "(", "name", ",", "errors", "=", "\"ignore\"", ")", ")", ")", "saved_path", "=", "default_storage", ".", "save", "(", "path", ",", "ContentFile", "(", "data", ")", ")", "self", ".", "images", ".", "create", "(", "file", "=", "saved_path", ")", "if", "delete_zip_import", ":", "zip_file", ".", "close", "(", ")", "self", ".", "zip_import", ".", "delete", "(", "save", "=", "True", ")" ]
If a zip file is uploaded, extract any images from it and add them to the gallery, before removing the zip file.
[ "If", "a", "zip", "file", "is", "uploaded", "extract", "any", "images", "from", "it", "and", "add", "them", "to", "the", "gallery", "before", "removing", "the", "zip", "file", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/galleries/models.py#L51-L111
246,673
nickmilon/Hellas
Hellas/Sparta.py
seconds_to_DHMS
def seconds_to_DHMS(seconds, as_str=True): """converts seconds to Days, Hours, Minutes, Seconds :param int seconds: number of seconds :param bool as_string: to return a formated string defaults to True :returns: a formated string if as_str else a dictionary :Example: >>> seconds_to_DHMS(60*60*24) 001-00:00:00 >>> seconds_to_DHMS(60*60*24, False) {'hours': 0, 'seconds': 0, 'minutes': 0, 'days': 1} """ d = DotDot() d.days = int(seconds // (3600 * 24)) d.hours = int((seconds // 3600) % 24) d.minutes = int((seconds // 60) % 60) d.seconds = int(seconds % 60) return FMT_DHMS_DICT.format(**d) if as_str else d
python
def seconds_to_DHMS(seconds, as_str=True): """converts seconds to Days, Hours, Minutes, Seconds :param int seconds: number of seconds :param bool as_string: to return a formated string defaults to True :returns: a formated string if as_str else a dictionary :Example: >>> seconds_to_DHMS(60*60*24) 001-00:00:00 >>> seconds_to_DHMS(60*60*24, False) {'hours': 0, 'seconds': 0, 'minutes': 0, 'days': 1} """ d = DotDot() d.days = int(seconds // (3600 * 24)) d.hours = int((seconds // 3600) % 24) d.minutes = int((seconds // 60) % 60) d.seconds = int(seconds % 60) return FMT_DHMS_DICT.format(**d) if as_str else d
[ "def", "seconds_to_DHMS", "(", "seconds", ",", "as_str", "=", "True", ")", ":", "d", "=", "DotDot", "(", ")", "d", ".", "days", "=", "int", "(", "seconds", "//", "(", "3600", "*", "24", ")", ")", "d", ".", "hours", "=", "int", "(", "(", "seconds", "//", "3600", ")", "%", "24", ")", "d", ".", "minutes", "=", "int", "(", "(", "seconds", "//", "60", ")", "%", "60", ")", "d", ".", "seconds", "=", "int", "(", "seconds", "%", "60", ")", "return", "FMT_DHMS_DICT", ".", "format", "(", "*", "*", "d", ")", "if", "as_str", "else", "d" ]
converts seconds to Days, Hours, Minutes, Seconds :param int seconds: number of seconds :param bool as_string: to return a formated string defaults to True :returns: a formated string if as_str else a dictionary :Example: >>> seconds_to_DHMS(60*60*24) 001-00:00:00 >>> seconds_to_DHMS(60*60*24, False) {'hours': 0, 'seconds': 0, 'minutes': 0, 'days': 1}
[ "converts", "seconds", "to", "Days", "Hours", "Minutes", "Seconds" ]
542e4778692fbec90753942946f20100412ec9ee
https://github.com/nickmilon/Hellas/blob/542e4778692fbec90753942946f20100412ec9ee/Hellas/Sparta.py#L23-L40
246,674
nickmilon/Hellas
Hellas/Sparta.py
relations_dict
def relations_dict(rel_lst): """constructs a relation's dictionary from a list that describes amphidromus relations between objects :param list rel_lst: a relationships list of the form [[a,b],[c, a, b]] # can include duplicates :returns: a dictionary :Example: >>> rl = [('a', 'b', 'c'), ('a', 'x', 'y'), ('x', 'y', 'z')] >>> relations_dict(rl) {'a': ['x', 'c', 'b', 'y'], 'c': ['a', 'b'], 'b': ['a', 'c'], 'y': ['a', 'x', 'z'], 'x': ['a', 'z', 'y'], 'z': ['y', 'x']} """ dc = {} for c in rel_lst: for i in c: for k in c: dc.setdefault(i, []) dc[i].append(k) do = {} for k in list(dc.keys()): if dc[k]: vl = list(set(dc[k])) # remove duplicates vl.remove(k) do[k] = vl return do
python
def relations_dict(rel_lst): """constructs a relation's dictionary from a list that describes amphidromus relations between objects :param list rel_lst: a relationships list of the form [[a,b],[c, a, b]] # can include duplicates :returns: a dictionary :Example: >>> rl = [('a', 'b', 'c'), ('a', 'x', 'y'), ('x', 'y', 'z')] >>> relations_dict(rl) {'a': ['x', 'c', 'b', 'y'], 'c': ['a', 'b'], 'b': ['a', 'c'], 'y': ['a', 'x', 'z'], 'x': ['a', 'z', 'y'], 'z': ['y', 'x']} """ dc = {} for c in rel_lst: for i in c: for k in c: dc.setdefault(i, []) dc[i].append(k) do = {} for k in list(dc.keys()): if dc[k]: vl = list(set(dc[k])) # remove duplicates vl.remove(k) do[k] = vl return do
[ "def", "relations_dict", "(", "rel_lst", ")", ":", "dc", "=", "{", "}", "for", "c", "in", "rel_lst", ":", "for", "i", "in", "c", ":", "for", "k", "in", "c", ":", "dc", ".", "setdefault", "(", "i", ",", "[", "]", ")", "dc", "[", "i", "]", ".", "append", "(", "k", ")", "do", "=", "{", "}", "for", "k", "in", "list", "(", "dc", ".", "keys", "(", ")", ")", ":", "if", "dc", "[", "k", "]", ":", "vl", "=", "list", "(", "set", "(", "dc", "[", "k", "]", ")", ")", "# remove duplicates", "vl", ".", "remove", "(", "k", ")", "do", "[", "k", "]", "=", "vl", "return", "do" ]
constructs a relation's dictionary from a list that describes amphidromus relations between objects :param list rel_lst: a relationships list of the form [[a,b],[c, a, b]] # can include duplicates :returns: a dictionary :Example: >>> rl = [('a', 'b', 'c'), ('a', 'x', 'y'), ('x', 'y', 'z')] >>> relations_dict(rl) {'a': ['x', 'c', 'b', 'y'], 'c': ['a', 'b'], 'b': ['a', 'c'], 'y': ['a', 'x', 'z'], 'x': ['a', 'z', 'y'], 'z': ['y', 'x']}
[ "constructs", "a", "relation", "s", "dictionary", "from", "a", "list", "that", "describes", "amphidromus", "relations", "between", "objects" ]
542e4778692fbec90753942946f20100412ec9ee
https://github.com/nickmilon/Hellas/blob/542e4778692fbec90753942946f20100412ec9ee/Hellas/Sparta.py#L264-L288
246,675
nickmilon/Hellas
Hellas/Sparta.py
chunks
def chunks(sliceable, n): """ returns a list of lists of any slice-able object each of max lentgh n :Parameters: -sliceable: (string|list|tuple) any sliceable object - n max elements of ech chunk :Example: >>> chunksn([1,2,3,4,5,6,7,8,9,'x'], 4) [[1, 2, 3, 4], [5, 6, 7, 8], [9, 'x']] >>> chunksn('123456789X', 3) ['123', '456', '789', 'X'] """ return [sliceable[i:i+n] for i in range(0, len(sliceable), n)]
python
def chunks(sliceable, n): """ returns a list of lists of any slice-able object each of max lentgh n :Parameters: -sliceable: (string|list|tuple) any sliceable object - n max elements of ech chunk :Example: >>> chunksn([1,2,3,4,5,6,7,8,9,'x'], 4) [[1, 2, 3, 4], [5, 6, 7, 8], [9, 'x']] >>> chunksn('123456789X', 3) ['123', '456', '789', 'X'] """ return [sliceable[i:i+n] for i in range(0, len(sliceable), n)]
[ "def", "chunks", "(", "sliceable", ",", "n", ")", ":", "return", "[", "sliceable", "[", "i", ":", "i", "+", "n", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "sliceable", ")", ",", "n", ")", "]" ]
returns a list of lists of any slice-able object each of max lentgh n :Parameters: -sliceable: (string|list|tuple) any sliceable object - n max elements of ech chunk :Example: >>> chunksn([1,2,3,4,5,6,7,8,9,'x'], 4) [[1, 2, 3, 4], [5, 6, 7, 8], [9, 'x']] >>> chunksn('123456789X', 3) ['123', '456', '789', 'X']
[ "returns", "a", "list", "of", "lists", "of", "any", "slice", "-", "able", "object", "each", "of", "max", "lentgh", "n" ]
542e4778692fbec90753942946f20100412ec9ee
https://github.com/nickmilon/Hellas/blob/542e4778692fbec90753942946f20100412ec9ee/Hellas/Sparta.py#L291-L305
246,676
nickmilon/Hellas
Hellas/Sparta.py
chunks_str
def chunks_str(str, n, separator="\n", fill_blanks_last=True): """returns lines with max n characters :Example: >>> print (chunks_str('123456X', 3)) 123 456 X """ return separator.join(chunks(str, n))
python
def chunks_str(str, n, separator="\n", fill_blanks_last=True): """returns lines with max n characters :Example: >>> print (chunks_str('123456X', 3)) 123 456 X """ return separator.join(chunks(str, n))
[ "def", "chunks_str", "(", "str", ",", "n", ",", "separator", "=", "\"\\n\"", ",", "fill_blanks_last", "=", "True", ")", ":", "return", "separator", ".", "join", "(", "chunks", "(", "str", ",", "n", ")", ")" ]
returns lines with max n characters :Example: >>> print (chunks_str('123456X', 3)) 123 456 X
[ "returns", "lines", "with", "max", "n", "characters" ]
542e4778692fbec90753942946f20100412ec9ee
https://github.com/nickmilon/Hellas/blob/542e4778692fbec90753942946f20100412ec9ee/Hellas/Sparta.py#L308-L317
246,677
nickmilon/Hellas
Hellas/Sparta.py
class_name_str
def class_name_str(obj, skip_parent=False): """ return's object's class name as string """ rt = str(type(obj)).split(" ")[1][1:-2] if skip_parent: rt = rt.split(".")[-1] return rt
python
def class_name_str(obj, skip_parent=False): """ return's object's class name as string """ rt = str(type(obj)).split(" ")[1][1:-2] if skip_parent: rt = rt.split(".")[-1] return rt
[ "def", "class_name_str", "(", "obj", ",", "skip_parent", "=", "False", ")", ":", "rt", "=", "str", "(", "type", "(", "obj", ")", ")", ".", "split", "(", "\" \"", ")", "[", "1", "]", "[", "1", ":", "-", "2", "]", "if", "skip_parent", ":", "rt", "=", "rt", ".", "split", "(", "\".\"", ")", "[", "-", "1", "]", "return", "rt" ]
return's object's class name as string
[ "return", "s", "object", "s", "class", "name", "as", "string" ]
542e4778692fbec90753942946f20100412ec9ee
https://github.com/nickmilon/Hellas/blob/542e4778692fbec90753942946f20100412ec9ee/Hellas/Sparta.py#L333-L340
246,678
nickmilon/Hellas
Hellas/Sparta.py
EnumLabels.value_name
def value_name(cls, value): """ Returns the label from a value if label exists otherwise returns the value since method does a reverse look up it is slow """ for k, v in list(cls.__dict__.items()): if v == value: return k return value
python
def value_name(cls, value): """ Returns the label from a value if label exists otherwise returns the value since method does a reverse look up it is slow """ for k, v in list(cls.__dict__.items()): if v == value: return k return value
[ "def", "value_name", "(", "cls", ",", "value", ")", ":", "for", "k", ",", "v", "in", "list", "(", "cls", ".", "__dict__", ".", "items", "(", ")", ")", ":", "if", "v", "==", "value", ":", "return", "k", "return", "value" ]
Returns the label from a value if label exists otherwise returns the value since method does a reverse look up it is slow
[ "Returns", "the", "label", "from", "a", "value", "if", "label", "exists", "otherwise", "returns", "the", "value", "since", "method", "does", "a", "reverse", "look", "up", "it", "is", "slow" ]
542e4778692fbec90753942946f20100412ec9ee
https://github.com/nickmilon/Hellas/blob/542e4778692fbec90753942946f20100412ec9ee/Hellas/Sparta.py#L253-L261
246,679
pbrisk/unicum
unicum/persistentobject.py
PersistentObject._is_visible
def _is_visible(cls, property_name): """ private method to check visible object property to be visible """ if isinstance(property_name, list): return [cls._is_visible(p) for p in property_name] if property_name.startswith('__') and property_name.endswith('__'): return False return property_name.startswith(cls.STARTS_WITH) and property_name.endswith(cls.ENDS_WITH)
python
def _is_visible(cls, property_name): """ private method to check visible object property to be visible """ if isinstance(property_name, list): return [cls._is_visible(p) for p in property_name] if property_name.startswith('__') and property_name.endswith('__'): return False return property_name.startswith(cls.STARTS_WITH) and property_name.endswith(cls.ENDS_WITH)
[ "def", "_is_visible", "(", "cls", ",", "property_name", ")", ":", "if", "isinstance", "(", "property_name", ",", "list", ")", ":", "return", "[", "cls", ".", "_is_visible", "(", "p", ")", "for", "p", "in", "property_name", "]", "if", "property_name", ".", "startswith", "(", "'__'", ")", "and", "property_name", ".", "endswith", "(", "'__'", ")", ":", "return", "False", "return", "property_name", ".", "startswith", "(", "cls", ".", "STARTS_WITH", ")", "and", "property_name", ".", "endswith", "(", "cls", ".", "ENDS_WITH", ")" ]
private method to check visible object property to be visible
[ "private", "method", "to", "check", "visible", "object", "property", "to", "be", "visible" ]
24bfa7355f36847a06646c58e9fd75bd3b689bfe
https://github.com/pbrisk/unicum/blob/24bfa7355f36847a06646c58e9fd75bd3b689bfe/unicum/persistentobject.py#L58-L64
246,680
pbrisk/unicum
unicum/persistentobject.py
PersistentObject._from_class
def _from_class(cls, class_name, module_name=None, *args, **kwargs): """ class method to create object of a given class """ def _get_module(module_name): names = module_name.split(".") module = __import__(names[0]) for i in xrange(1, len(names)): module = getattr(module, names[i]) return module if module_name: # module = globals()[module_name] # module = __import__(module_name) module = _get_module(module_name) class_ = getattr(module, class_name) else: class_ = globals()[class_name] if not issubclass(class_, PersistentObject): t = class_.__name__, PersistentObject.__name__ raise TypeError, 'Requested object type %s must be subtype of %s ' % t # workaround to mimic FactoryType to work well with FactoryObject. name = str(args[0]) if args else cls.__name__ name = kwargs['name'] if 'name' in kwargs else name if hasattr(cls, 'get'): instance = cls.get(name) if instance: return instance instance = class_.__new__(class_, *args, **kwargs) instance.__init__(*args, **kwargs) return instance
python
def _from_class(cls, class_name, module_name=None, *args, **kwargs): """ class method to create object of a given class """ def _get_module(module_name): names = module_name.split(".") module = __import__(names[0]) for i in xrange(1, len(names)): module = getattr(module, names[i]) return module if module_name: # module = globals()[module_name] # module = __import__(module_name) module = _get_module(module_name) class_ = getattr(module, class_name) else: class_ = globals()[class_name] if not issubclass(class_, PersistentObject): t = class_.__name__, PersistentObject.__name__ raise TypeError, 'Requested object type %s must be subtype of %s ' % t # workaround to mimic FactoryType to work well with FactoryObject. name = str(args[0]) if args else cls.__name__ name = kwargs['name'] if 'name' in kwargs else name if hasattr(cls, 'get'): instance = cls.get(name) if instance: return instance instance = class_.__new__(class_, *args, **kwargs) instance.__init__(*args, **kwargs) return instance
[ "def", "_from_class", "(", "cls", ",", "class_name", ",", "module_name", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "_get_module", "(", "module_name", ")", ":", "names", "=", "module_name", ".", "split", "(", "\".\"", ")", "module", "=", "__import__", "(", "names", "[", "0", "]", ")", "for", "i", "in", "xrange", "(", "1", ",", "len", "(", "names", ")", ")", ":", "module", "=", "getattr", "(", "module", ",", "names", "[", "i", "]", ")", "return", "module", "if", "module_name", ":", "# module = globals()[module_name]", "# module = __import__(module_name)", "module", "=", "_get_module", "(", "module_name", ")", "class_", "=", "getattr", "(", "module", ",", "class_name", ")", "else", ":", "class_", "=", "globals", "(", ")", "[", "class_name", "]", "if", "not", "issubclass", "(", "class_", ",", "PersistentObject", ")", ":", "t", "=", "class_", ".", "__name__", ",", "PersistentObject", ".", "__name__", "raise", "TypeError", ",", "'Requested object type %s must be subtype of %s '", "%", "t", "# workaround to mimic FactoryType to work well with FactoryObject.", "name", "=", "str", "(", "args", "[", "0", "]", ")", "if", "args", "else", "cls", ".", "__name__", "name", "=", "kwargs", "[", "'name'", "]", "if", "'name'", "in", "kwargs", "else", "name", "if", "hasattr", "(", "cls", ",", "'get'", ")", ":", "instance", "=", "cls", ".", "get", "(", "name", ")", "if", "instance", ":", "return", "instance", "instance", "=", "class_", ".", "__new__", "(", "class_", ",", "*", "args", ",", "*", "*", "kwargs", ")", "instance", ".", "__init__", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "instance" ]
class method to create object of a given class
[ "class", "method", "to", "create", "object", "of", "a", "given", "class" ]
24bfa7355f36847a06646c58e9fd75bd3b689bfe
https://github.com/pbrisk/unicum/blob/24bfa7355f36847a06646c58e9fd75bd3b689bfe/unicum/persistentobject.py#L89-L120
246,681
pbrisk/unicum
unicum/persistentobject.py
PersistentObject.from_serializable
def from_serializable(cls, object_dict): """ core class method to create visible objects from a dictionary """ key_class = cls._from_visible(cls.STARTS_WITH + 'class' + cls.ENDS_WITH) key_module = cls._from_visible(cls.STARTS_WITH + 'module' + cls.ENDS_WITH) obj_class = object_dict.pop(key_class) obj_module = object_dict.pop(key_module) if key_module in object_dict else None obj = cls._from_class(obj_class, obj_module) obj.modify_object(object_dict) return obj
python
def from_serializable(cls, object_dict): """ core class method to create visible objects from a dictionary """ key_class = cls._from_visible(cls.STARTS_WITH + 'class' + cls.ENDS_WITH) key_module = cls._from_visible(cls.STARTS_WITH + 'module' + cls.ENDS_WITH) obj_class = object_dict.pop(key_class) obj_module = object_dict.pop(key_module) if key_module in object_dict else None obj = cls._from_class(obj_class, obj_module) obj.modify_object(object_dict) return obj
[ "def", "from_serializable", "(", "cls", ",", "object_dict", ")", ":", "key_class", "=", "cls", ".", "_from_visible", "(", "cls", ".", "STARTS_WITH", "+", "'class'", "+", "cls", ".", "ENDS_WITH", ")", "key_module", "=", "cls", ".", "_from_visible", "(", "cls", ".", "STARTS_WITH", "+", "'module'", "+", "cls", ".", "ENDS_WITH", ")", "obj_class", "=", "object_dict", ".", "pop", "(", "key_class", ")", "obj_module", "=", "object_dict", ".", "pop", "(", "key_module", ")", "if", "key_module", "in", "object_dict", "else", "None", "obj", "=", "cls", ".", "_from_class", "(", "obj_class", ",", "obj_module", ")", "obj", ".", "modify_object", "(", "object_dict", ")", "return", "obj" ]
core class method to create visible objects from a dictionary
[ "core", "class", "method", "to", "create", "visible", "objects", "from", "a", "dictionary" ]
24bfa7355f36847a06646c58e9fd75bd3b689bfe
https://github.com/pbrisk/unicum/blob/24bfa7355f36847a06646c58e9fd75bd3b689bfe/unicum/persistentobject.py#L123-L134
246,682
pbrisk/unicum
unicum/persistentobject.py
PersistentObject.modify_object
def modify_object(self, property_name, property_value_variant=None): """ api visible method for modifying visible object properties :param property_name: property name :type property_name: string, list or dict :param property_value_variant: property value, must be `None` if property_name is of type `dict` :type property_value_variant: various or None :return: modified object :rtype: unicum.lfojbect.VisibleObject """ if type(property_name) is dict: property_value_variant = property_name.values() property_name = property_name.keys() if isinstance(property_name, str): property_name, property_value_variant = [property_name], [property_value_variant] assert len(property_name) == len(property_value_variant) # convert names into visible property_name = self.__class__._to_visible(property_name) # loop over properties to set for n, v in zip(property_name, property_value_variant): self._modify_property(n.encode('ascii','ignore'), v) # rebuild object in order to maintain consistency self._rebuild_object() return self
python
def modify_object(self, property_name, property_value_variant=None): """ api visible method for modifying visible object properties :param property_name: property name :type property_name: string, list or dict :param property_value_variant: property value, must be `None` if property_name is of type `dict` :type property_value_variant: various or None :return: modified object :rtype: unicum.lfojbect.VisibleObject """ if type(property_name) is dict: property_value_variant = property_name.values() property_name = property_name.keys() if isinstance(property_name, str): property_name, property_value_variant = [property_name], [property_value_variant] assert len(property_name) == len(property_value_variant) # convert names into visible property_name = self.__class__._to_visible(property_name) # loop over properties to set for n, v in zip(property_name, property_value_variant): self._modify_property(n.encode('ascii','ignore'), v) # rebuild object in order to maintain consistency self._rebuild_object() return self
[ "def", "modify_object", "(", "self", ",", "property_name", ",", "property_value_variant", "=", "None", ")", ":", "if", "type", "(", "property_name", ")", "is", "dict", ":", "property_value_variant", "=", "property_name", ".", "values", "(", ")", "property_name", "=", "property_name", ".", "keys", "(", ")", "if", "isinstance", "(", "property_name", ",", "str", ")", ":", "property_name", ",", "property_value_variant", "=", "[", "property_name", "]", ",", "[", "property_value_variant", "]", "assert", "len", "(", "property_name", ")", "==", "len", "(", "property_value_variant", ")", "# convert names into visible", "property_name", "=", "self", ".", "__class__", ".", "_to_visible", "(", "property_name", ")", "# loop over properties to set", "for", "n", ",", "v", "in", "zip", "(", "property_name", ",", "property_value_variant", ")", ":", "self", ".", "_modify_property", "(", "n", ".", "encode", "(", "'ascii'", ",", "'ignore'", ")", ",", "v", ")", "# rebuild object in order to maintain consistency", "self", ".", "_rebuild_object", "(", ")", "return", "self" ]
api visible method for modifying visible object properties :param property_name: property name :type property_name: string, list or dict :param property_value_variant: property value, must be `None` if property_name is of type `dict` :type property_value_variant: various or None :return: modified object :rtype: unicum.lfojbect.VisibleObject
[ "api", "visible", "method", "for", "modifying", "visible", "object", "properties" ]
24bfa7355f36847a06646c58e9fd75bd3b689bfe
https://github.com/pbrisk/unicum/blob/24bfa7355f36847a06646c58e9fd75bd3b689bfe/unicum/persistentobject.py#L164-L192
246,683
klmitch/bark
bark/handlers.py
arg_types
def arg_types(**kwargs): """ Mark the expected types of certain arguments. Arguments for which no types are provided default to strings. To specify an argument type, give this decorator a keyword argument, where the argument name is the name of the function argument and the value is a callable taking one argument, which will convert a string to a value of that type. Note that the 'bool' type is treated specially. """ def decorator(func): if not hasattr(func, '_bark_types'): func._bark_types = {} func._bark_types.update(kwargs) return func return decorator
python
def arg_types(**kwargs): """ Mark the expected types of certain arguments. Arguments for which no types are provided default to strings. To specify an argument type, give this decorator a keyword argument, where the argument name is the name of the function argument and the value is a callable taking one argument, which will convert a string to a value of that type. Note that the 'bool' type is treated specially. """ def decorator(func): if not hasattr(func, '_bark_types'): func._bark_types = {} func._bark_types.update(kwargs) return func return decorator
[ "def", "arg_types", "(", "*", "*", "kwargs", ")", ":", "def", "decorator", "(", "func", ")", ":", "if", "not", "hasattr", "(", "func", ",", "'_bark_types'", ")", ":", "func", ".", "_bark_types", "=", "{", "}", "func", ".", "_bark_types", ".", "update", "(", "kwargs", ")", "return", "func", "return", "decorator" ]
Mark the expected types of certain arguments. Arguments for which no types are provided default to strings. To specify an argument type, give this decorator a keyword argument, where the argument name is the name of the function argument and the value is a callable taking one argument, which will convert a string to a value of that type. Note that the 'bool' type is treated specially.
[ "Mark", "the", "expected", "types", "of", "certain", "arguments", ".", "Arguments", "for", "which", "no", "types", "are", "provided", "default", "to", "strings", ".", "To", "specify", "an", "argument", "type", "give", "this", "decorator", "a", "keyword", "argument", "where", "the", "argument", "name", "is", "the", "name", "of", "the", "function", "argument", "and", "the", "value", "is", "a", "callable", "taking", "one", "argument", "which", "will", "convert", "a", "string", "to", "a", "value", "of", "that", "type", "." ]
6e0e002d55f01fee27e3e45bb86e30af1bfeef36
https://github.com/klmitch/bark/blob/6e0e002d55f01fee27e3e45bb86e30af1bfeef36/bark/handlers.py#L72-L90
246,684
klmitch/bark
bark/handlers.py
boolean
def boolean(text): """ An alternative to the "bool" argument type which interprets string values. """ tmp = text.lower() if tmp.isdigit(): return bool(int(tmp)) elif tmp in ('t', 'true', 'on', 'yes'): return True elif tmp in ('f', 'false', 'off', 'no'): return False raise ValueError("invalid Boolean value %r" % text)
python
def boolean(text): """ An alternative to the "bool" argument type which interprets string values. """ tmp = text.lower() if tmp.isdigit(): return bool(int(tmp)) elif tmp in ('t', 'true', 'on', 'yes'): return True elif tmp in ('f', 'false', 'off', 'no'): return False raise ValueError("invalid Boolean value %r" % text)
[ "def", "boolean", "(", "text", ")", ":", "tmp", "=", "text", ".", "lower", "(", ")", "if", "tmp", ".", "isdigit", "(", ")", ":", "return", "bool", "(", "int", "(", "tmp", ")", ")", "elif", "tmp", "in", "(", "'t'", ",", "'true'", ",", "'on'", ",", "'yes'", ")", ":", "return", "True", "elif", "tmp", "in", "(", "'f'", ",", "'false'", ",", "'off'", ",", "'no'", ")", ":", "return", "False", "raise", "ValueError", "(", "\"invalid Boolean value %r\"", "%", "text", ")" ]
An alternative to the "bool" argument type which interprets string values.
[ "An", "alternative", "to", "the", "bool", "argument", "type", "which", "interprets", "string", "values", "." ]
6e0e002d55f01fee27e3e45bb86e30af1bfeef36
https://github.com/klmitch/bark/blob/6e0e002d55f01fee27e3e45bb86e30af1bfeef36/bark/handlers.py#L93-L107
246,685
klmitch/bark
bark/handlers.py
file_handler
def file_handler(name, logname, filename, mode='a', encoding=None, delay=False): """ A Bark logging handler logging output to a named file. Similar to logging.FileHandler. """ return wrap_log_handler(logging.FileHandler( filename, mode=mode, encoding=encoding, delay=delay))
python
def file_handler(name, logname, filename, mode='a', encoding=None, delay=False): """ A Bark logging handler logging output to a named file. Similar to logging.FileHandler. """ return wrap_log_handler(logging.FileHandler( filename, mode=mode, encoding=encoding, delay=delay))
[ "def", "file_handler", "(", "name", ",", "logname", ",", "filename", ",", "mode", "=", "'a'", ",", "encoding", "=", "None", ",", "delay", "=", "False", ")", ":", "return", "wrap_log_handler", "(", "logging", ".", "FileHandler", "(", "filename", ",", "mode", "=", "mode", ",", "encoding", "=", "encoding", ",", "delay", "=", "delay", ")", ")" ]
A Bark logging handler logging output to a named file. Similar to logging.FileHandler.
[ "A", "Bark", "logging", "handler", "logging", "output", "to", "a", "named", "file", "." ]
6e0e002d55f01fee27e3e45bb86e30af1bfeef36
https://github.com/klmitch/bark/blob/6e0e002d55f01fee27e3e45bb86e30af1bfeef36/bark/handlers.py#L237-L246
246,686
klmitch/bark
bark/handlers.py
watched_file_handler
def watched_file_handler(name, logname, filename, mode='a', encoding=None, delay=False): """ A Bark logging handler logging output to a named file. If the file has changed since the last log message was written, it will be closed and reopened. Similar to logging.handlers.WatchedFileHandler. """ return wrap_log_handler(logging.handlers.WatchedFileHandler( filename, mode=mode, encoding=encoding, delay=delay))
python
def watched_file_handler(name, logname, filename, mode='a', encoding=None, delay=False): """ A Bark logging handler logging output to a named file. If the file has changed since the last log message was written, it will be closed and reopened. Similar to logging.handlers.WatchedFileHandler. """ return wrap_log_handler(logging.handlers.WatchedFileHandler( filename, mode=mode, encoding=encoding, delay=delay))
[ "def", "watched_file_handler", "(", "name", ",", "logname", ",", "filename", ",", "mode", "=", "'a'", ",", "encoding", "=", "None", ",", "delay", "=", "False", ")", ":", "return", "wrap_log_handler", "(", "logging", ".", "handlers", ".", "WatchedFileHandler", "(", "filename", ",", "mode", "=", "mode", ",", "encoding", "=", "encoding", ",", "delay", "=", "delay", ")", ")" ]
A Bark logging handler logging output to a named file. If the file has changed since the last log message was written, it will be closed and reopened. Similar to logging.handlers.WatchedFileHandler.
[ "A", "Bark", "logging", "handler", "logging", "output", "to", "a", "named", "file", ".", "If", "the", "file", "has", "changed", "since", "the", "last", "log", "message", "was", "written", "it", "will", "be", "closed", "and", "reopened", "." ]
6e0e002d55f01fee27e3e45bb86e30af1bfeef36
https://github.com/klmitch/bark/blob/6e0e002d55f01fee27e3e45bb86e30af1bfeef36/bark/handlers.py#L250-L261
246,687
klmitch/bark
bark/handlers.py
rotating_file_handler
def rotating_file_handler(name, logname, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False): """ A Bark logging handler logging output to a named file. When the file grows close to 'maxBytes', it will be rotated, under control of 'backupCount'. Similar to logging.handlers.RotatingFileHandler. """ return wrap_log_handler(logging.handlers.RotatingFileHandler( filename, mode=mode, maxBytes=maxBytes, backupCount=backupCount, encoding=encoding, delay=delay))
python
def rotating_file_handler(name, logname, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False): """ A Bark logging handler logging output to a named file. When the file grows close to 'maxBytes', it will be rotated, under control of 'backupCount'. Similar to logging.handlers.RotatingFileHandler. """ return wrap_log_handler(logging.handlers.RotatingFileHandler( filename, mode=mode, maxBytes=maxBytes, backupCount=backupCount, encoding=encoding, delay=delay))
[ "def", "rotating_file_handler", "(", "name", ",", "logname", ",", "filename", ",", "mode", "=", "'a'", ",", "maxBytes", "=", "0", ",", "backupCount", "=", "0", ",", "encoding", "=", "None", ",", "delay", "=", "False", ")", ":", "return", "wrap_log_handler", "(", "logging", ".", "handlers", ".", "RotatingFileHandler", "(", "filename", ",", "mode", "=", "mode", ",", "maxBytes", "=", "maxBytes", ",", "backupCount", "=", "backupCount", ",", "encoding", "=", "encoding", ",", "delay", "=", "delay", ")", ")" ]
A Bark logging handler logging output to a named file. When the file grows close to 'maxBytes', it will be rotated, under control of 'backupCount'. Similar to logging.handlers.RotatingFileHandler.
[ "A", "Bark", "logging", "handler", "logging", "output", "to", "a", "named", "file", ".", "When", "the", "file", "grows", "close", "to", "maxBytes", "it", "will", "be", "rotated", "under", "control", "of", "backupCount", "." ]
6e0e002d55f01fee27e3e45bb86e30af1bfeef36
https://github.com/klmitch/bark/blob/6e0e002d55f01fee27e3e45bb86e30af1bfeef36/bark/handlers.py#L265-L277
246,688
klmitch/bark
bark/handlers.py
timed_rotating_file_handler
def timed_rotating_file_handler(name, logname, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False): """ A Bark logging handler logging output to a named file. At intervals specified by the 'when', the file will be rotated, under control of 'backupCount'. Similar to logging.handlers.TimedRotatingFileHandler. """ return wrap_log_handler(logging.handlers.TimedRotatingFileHandler( filename, when=when, interval=interval, backupCount=backupCount, encoding=encoding, delay=delay, utc=utc))
python
def timed_rotating_file_handler(name, logname, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False): """ A Bark logging handler logging output to a named file. At intervals specified by the 'when', the file will be rotated, under control of 'backupCount'. Similar to logging.handlers.TimedRotatingFileHandler. """ return wrap_log_handler(logging.handlers.TimedRotatingFileHandler( filename, when=when, interval=interval, backupCount=backupCount, encoding=encoding, delay=delay, utc=utc))
[ "def", "timed_rotating_file_handler", "(", "name", ",", "logname", ",", "filename", ",", "when", "=", "'h'", ",", "interval", "=", "1", ",", "backupCount", "=", "0", ",", "encoding", "=", "None", ",", "delay", "=", "False", ",", "utc", "=", "False", ")", ":", "return", "wrap_log_handler", "(", "logging", ".", "handlers", ".", "TimedRotatingFileHandler", "(", "filename", ",", "when", "=", "when", ",", "interval", "=", "interval", ",", "backupCount", "=", "backupCount", ",", "encoding", "=", "encoding", ",", "delay", "=", "delay", ",", "utc", "=", "utc", ")", ")" ]
A Bark logging handler logging output to a named file. At intervals specified by the 'when', the file will be rotated, under control of 'backupCount'. Similar to logging.handlers.TimedRotatingFileHandler.
[ "A", "Bark", "logging", "handler", "logging", "output", "to", "a", "named", "file", ".", "At", "intervals", "specified", "by", "the", "when", "the", "file", "will", "be", "rotated", "under", "control", "of", "backupCount", "." ]
6e0e002d55f01fee27e3e45bb86e30af1bfeef36
https://github.com/klmitch/bark/blob/6e0e002d55f01fee27e3e45bb86e30af1bfeef36/bark/handlers.py#L281-L294
246,689
klmitch/bark
bark/handlers.py
nt_event_log_handler
def nt_event_log_handler(name, logname, appname, dllname=None, logtype="Application"): """ A Bark logging handler logging output to the NT Event Log. Similar to logging.handlers.NTEventLogHandler. """ return wrap_log_handler(logging.handlers.NTEventLogHandler( appname, dllname=dllname, logtype=logtype))
python
def nt_event_log_handler(name, logname, appname, dllname=None, logtype="Application"): """ A Bark logging handler logging output to the NT Event Log. Similar to logging.handlers.NTEventLogHandler. """ return wrap_log_handler(logging.handlers.NTEventLogHandler( appname, dllname=dllname, logtype=logtype))
[ "def", "nt_event_log_handler", "(", "name", ",", "logname", ",", "appname", ",", "dllname", "=", "None", ",", "logtype", "=", "\"Application\"", ")", ":", "return", "wrap_log_handler", "(", "logging", ".", "handlers", ".", "NTEventLogHandler", "(", "appname", ",", "dllname", "=", "dllname", ",", "logtype", "=", "logtype", ")", ")" ]
A Bark logging handler logging output to the NT Event Log. Similar to logging.handlers.NTEventLogHandler.
[ "A", "Bark", "logging", "handler", "logging", "output", "to", "the", "NT", "Event", "Log", "." ]
6e0e002d55f01fee27e3e45bb86e30af1bfeef36
https://github.com/klmitch/bark/blob/6e0e002d55f01fee27e3e45bb86e30af1bfeef36/bark/handlers.py#L342-L351
246,690
klmitch/bark
bark/handlers.py
http_handler
def http_handler(name, logname, host, url, method="GET"): """ A Bark logging handler logging output to an HTTP server, using either GET or POST semantics. Similar to logging.handlers.HTTPHandler. """ return wrap_log_handler(logging.handlers.HTTPHandler( host, url, method=method))
python
def http_handler(name, logname, host, url, method="GET"): """ A Bark logging handler logging output to an HTTP server, using either GET or POST semantics. Similar to logging.handlers.HTTPHandler. """ return wrap_log_handler(logging.handlers.HTTPHandler( host, url, method=method))
[ "def", "http_handler", "(", "name", ",", "logname", ",", "host", ",", "url", ",", "method", "=", "\"GET\"", ")", ":", "return", "wrap_log_handler", "(", "logging", ".", "handlers", ".", "HTTPHandler", "(", "host", ",", "url", ",", "method", "=", "method", ")", ")" ]
A Bark logging handler logging output to an HTTP server, using either GET or POST semantics. Similar to logging.handlers.HTTPHandler.
[ "A", "Bark", "logging", "handler", "logging", "output", "to", "an", "HTTP", "server", "using", "either", "GET", "or", "POST", "semantics", "." ]
6e0e002d55f01fee27e3e45bb86e30af1bfeef36
https://github.com/klmitch/bark/blob/6e0e002d55f01fee27e3e45bb86e30af1bfeef36/bark/handlers.py#L371-L380
246,691
klmitch/bark
bark/handlers.py
_lookup_handler
def _lookup_handler(name): """ Look up the implementation of a named handler. Broken out for testing purposes. :param name: The name of the handler to look up. :returns: A factory function for the log handler. """ # Look up and load the handler factory for ep in pkg_resources.iter_entry_points('bark.handler', name): try: # Load and return the handler factory return ep.load() except (ImportError, pkg_resources.UnknownExtra): # Couldn't load it... continue raise ImportError("Unknown log file handler %r" % name)
python
def _lookup_handler(name): """ Look up the implementation of a named handler. Broken out for testing purposes. :param name: The name of the handler to look up. :returns: A factory function for the log handler. """ # Look up and load the handler factory for ep in pkg_resources.iter_entry_points('bark.handler', name): try: # Load and return the handler factory return ep.load() except (ImportError, pkg_resources.UnknownExtra): # Couldn't load it... continue raise ImportError("Unknown log file handler %r" % name)
[ "def", "_lookup_handler", "(", "name", ")", ":", "# Look up and load the handler factory", "for", "ep", "in", "pkg_resources", ".", "iter_entry_points", "(", "'bark.handler'", ",", "name", ")", ":", "try", ":", "# Load and return the handler factory", "return", "ep", ".", "load", "(", ")", "except", "(", "ImportError", ",", "pkg_resources", ".", "UnknownExtra", ")", ":", "# Couldn't load it...", "continue", "raise", "ImportError", "(", "\"Unknown log file handler %r\"", "%", "name", ")" ]
Look up the implementation of a named handler. Broken out for testing purposes. :param name: The name of the handler to look up. :returns: A factory function for the log handler.
[ "Look", "up", "the", "implementation", "of", "a", "named", "handler", ".", "Broken", "out", "for", "testing", "purposes", "." ]
6e0e002d55f01fee27e3e45bb86e30af1bfeef36
https://github.com/klmitch/bark/blob/6e0e002d55f01fee27e3e45bb86e30af1bfeef36/bark/handlers.py#L383-L402
246,692
nicholasbishop/trask
trask/phase2.py
make_keys_safe
def make_keys_safe(dct): """Modify the keys in |dct| to be valid attribute names.""" result = {} for key, val in dct.items(): key = key.replace('-', '_') if key in keyword.kwlist: key = key + '_' result[key] = val return result
python
def make_keys_safe(dct): """Modify the keys in |dct| to be valid attribute names.""" result = {} for key, val in dct.items(): key = key.replace('-', '_') if key in keyword.kwlist: key = key + '_' result[key] = val return result
[ "def", "make_keys_safe", "(", "dct", ")", ":", "result", "=", "{", "}", "for", "key", ",", "val", "in", "dct", ".", "items", "(", ")", ":", "key", "=", "key", ".", "replace", "(", "'-'", ",", "'_'", ")", "if", "key", "in", "keyword", ".", "kwlist", ":", "key", "=", "key", "+", "'_'", "result", "[", "key", "]", "=", "val", "return", "result" ]
Modify the keys in |dct| to be valid attribute names.
[ "Modify", "the", "keys", "in", "|dct|", "to", "be", "valid", "attribute", "names", "." ]
a97688425f70b539c7710b498627da9a6e39afd8
https://github.com/nicholasbishop/trask/blob/a97688425f70b539c7710b498627da9a6e39afd8/trask/phase2.py#L36-L44
246,693
robertdfrench/psychic-disco
psychic_disco/cli.py
_apply_args_to_func
def _apply_args_to_func(global_args, func): """ Unpacks the argparse Namespace object and applies its contents as normal arguments to the function func """ global_args = vars(global_args) local_args = dict() for argument in inspect.getargspec(func).args: local_args[argument] = global_args[argument] return func(**local_args)
python
def _apply_args_to_func(global_args, func): """ Unpacks the argparse Namespace object and applies its contents as normal arguments to the function func """ global_args = vars(global_args) local_args = dict() for argument in inspect.getargspec(func).args: local_args[argument] = global_args[argument] return func(**local_args)
[ "def", "_apply_args_to_func", "(", "global_args", ",", "func", ")", ":", "global_args", "=", "vars", "(", "global_args", ")", "local_args", "=", "dict", "(", ")", "for", "argument", "in", "inspect", ".", "getargspec", "(", "func", ")", ".", "args", ":", "local_args", "[", "argument", "]", "=", "global_args", "[", "argument", "]", "return", "func", "(", "*", "*", "local_args", ")" ]
Unpacks the argparse Namespace object and applies its contents as normal arguments to the function func
[ "Unpacks", "the", "argparse", "Namespace", "object", "and", "applies", "its", "contents", "as", "normal", "arguments", "to", "the", "function", "func" ]
3cf167b44c64d64606691fc186be7d9ef8e8e938
https://github.com/robertdfrench/psychic-disco/blob/3cf167b44c64d64606691fc186be7d9ef8e8e938/psychic_disco/cli.py#L10-L17
246,694
hoh/Hereby
hereby.py
Here.open
def open(self, path, mode='r', *args, **kwargs): """Proxy to function `open` with path to the current file.""" return open(os.path.join(os.path.dirname(self.path), path), mode=mode, *args, **kwargs)
python
def open(self, path, mode='r', *args, **kwargs): """Proxy to function `open` with path to the current file.""" return open(os.path.join(os.path.dirname(self.path), path), mode=mode, *args, **kwargs)
[ "def", "open", "(", "self", ",", "path", ",", "mode", "=", "'r'", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "open", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "self", ".", "path", ")", ",", "path", ")", ",", "mode", "=", "mode", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Proxy to function `open` with path to the current file.
[ "Proxy", "to", "function", "open", "with", "path", "to", "the", "current", "file", "." ]
a5f8bcdcb667e1fe1e64f542162e15ec31741505
https://github.com/hoh/Hereby/blob/a5f8bcdcb667e1fe1e64f542162e15ec31741505/hereby.py#L33-L36
246,695
hoh/Hereby
hereby.py
Here.abspath
def abspath(self, path): """Return absolute path for a path relative to the current file.""" return os.path.abspath(os.path.join(os.path.dirname(self.path), path))
python
def abspath(self, path): """Return absolute path for a path relative to the current file.""" return os.path.abspath(os.path.join(os.path.dirname(self.path), path))
[ "def", "abspath", "(", "self", ",", "path", ")", ":", "return", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "self", ".", "path", ")", ",", "path", ")", ")" ]
Return absolute path for a path relative to the current file.
[ "Return", "absolute", "path", "for", "a", "path", "relative", "to", "the", "current", "file", "." ]
a5f8bcdcb667e1fe1e64f542162e15ec31741505
https://github.com/hoh/Hereby/blob/a5f8bcdcb667e1fe1e64f542162e15ec31741505/hereby.py#L38-L40
246,696
caspervg/pylex
src/pylex/lot.py
LotRoute.lot
def lot(self, id, user=True, dependencies=True, comments=True, votes=True, no_strip=False): """ Retrieve the lot with given identifier :param id: Identifier of the lot to retrieve :param user: Should user (authenticated) information be returned (e.g. last_downloaded) :param dependencies: Should a dependency list be returned :param comments: Should a list of comments be returned :param votes: Should a list of votes be returned :param no_strip: Should XML/HTML tags be stripped in the returned lot description :return: Requested lot :rtype: dict """ args = {} if user: args['user'] = 'true' if dependencies: args['dependencies'] = 'true' if comments: args['comments'] = 'true' if votes: args['votes'] = 'true' if no_strip: args['nostrip'] = 'true' return self._get_json('lot/{0}', id, **args)
python
def lot(self, id, user=True, dependencies=True, comments=True, votes=True, no_strip=False): """ Retrieve the lot with given identifier :param id: Identifier of the lot to retrieve :param user: Should user (authenticated) information be returned (e.g. last_downloaded) :param dependencies: Should a dependency list be returned :param comments: Should a list of comments be returned :param votes: Should a list of votes be returned :param no_strip: Should XML/HTML tags be stripped in the returned lot description :return: Requested lot :rtype: dict """ args = {} if user: args['user'] = 'true' if dependencies: args['dependencies'] = 'true' if comments: args['comments'] = 'true' if votes: args['votes'] = 'true' if no_strip: args['nostrip'] = 'true' return self._get_json('lot/{0}', id, **args)
[ "def", "lot", "(", "self", ",", "id", ",", "user", "=", "True", ",", "dependencies", "=", "True", ",", "comments", "=", "True", ",", "votes", "=", "True", ",", "no_strip", "=", "False", ")", ":", "args", "=", "{", "}", "if", "user", ":", "args", "[", "'user'", "]", "=", "'true'", "if", "dependencies", ":", "args", "[", "'dependencies'", "]", "=", "'true'", "if", "comments", ":", "args", "[", "'comments'", "]", "=", "'true'", "if", "votes", ":", "args", "[", "'votes'", "]", "=", "'true'", "if", "no_strip", ":", "args", "[", "'nostrip'", "]", "=", "'true'", "return", "self", ".", "_get_json", "(", "'lot/{0}'", ",", "id", ",", "*", "*", "args", ")" ]
Retrieve the lot with given identifier :param id: Identifier of the lot to retrieve :param user: Should user (authenticated) information be returned (e.g. last_downloaded) :param dependencies: Should a dependency list be returned :param comments: Should a list of comments be returned :param votes: Should a list of votes be returned :param no_strip: Should XML/HTML tags be stripped in the returned lot description :return: Requested lot :rtype: dict
[ "Retrieve", "the", "lot", "with", "given", "identifier" ]
bae00c705b5331b0f784d42b27e19dc3541e1b5a
https://github.com/caspervg/pylex/blob/bae00c705b5331b0f784d42b27e19dc3541e1b5a/src/pylex/lot.py#L13-L38
246,697
fred49/linshare-api
linshareapi/user/threadentries.py
WorkgroupContent.head
def head(self, wg_uuid, uuid): """ Get one workgroup node.""" url = "%(base)s/%(wg_uuid)s/nodes/%(uuid)s" % { 'base': self.local_base_url, 'wg_uuid': wg_uuid, 'uuid': uuid } # return self.core.head(url) try: # workaround return self.core.get(url) except LinShareException: return False
python
def head(self, wg_uuid, uuid): """ Get one workgroup node.""" url = "%(base)s/%(wg_uuid)s/nodes/%(uuid)s" % { 'base': self.local_base_url, 'wg_uuid': wg_uuid, 'uuid': uuid } # return self.core.head(url) try: # workaround return self.core.get(url) except LinShareException: return False
[ "def", "head", "(", "self", ",", "wg_uuid", ",", "uuid", ")", ":", "url", "=", "\"%(base)s/%(wg_uuid)s/nodes/%(uuid)s\"", "%", "{", "'base'", ":", "self", ".", "local_base_url", ",", "'wg_uuid'", ":", "wg_uuid", ",", "'uuid'", ":", "uuid", "}", "# return self.core.head(url)", "try", ":", "# workaround", "return", "self", ".", "core", ".", "get", "(", "url", ")", "except", "LinShareException", ":", "return", "False" ]
Get one workgroup node.
[ "Get", "one", "workgroup", "node", "." ]
be646c25aa8ba3718abb6869c620b157d53d6e41
https://github.com/fred49/linshare-api/blob/be646c25aa8ba3718abb6869c620b157d53d6e41/linshareapi/user/threadentries.py#L159-L171
246,698
fred49/linshare-api
linshareapi/user/threadentries.py
WorkgroupContent.list
def list(self, wg_uuid, parent=None, flat=False, node_types=None): """ Get a list of workgroup nodes.""" url = "%(base)s/%(wg_uuid)s/nodes" % { 'base': self.local_base_url, 'wg_uuid': wg_uuid } param = [] if parent: # I use only the last folder uuid, the first ones are not really useful if isinstance(parent, (list,)): if len(parent) >= 1: parent = parent[-1] param.append(("parent", parent)) if flat: param.append(("flat", True)) if node_types: for node_type in node_types: param.append(("type", node_type)) encode = urllib.urlencode(param) if encode: url += "?" url += encode return self.core.list(url)
python
def list(self, wg_uuid, parent=None, flat=False, node_types=None): """ Get a list of workgroup nodes.""" url = "%(base)s/%(wg_uuid)s/nodes" % { 'base': self.local_base_url, 'wg_uuid': wg_uuid } param = [] if parent: # I use only the last folder uuid, the first ones are not really useful if isinstance(parent, (list,)): if len(parent) >= 1: parent = parent[-1] param.append(("parent", parent)) if flat: param.append(("flat", True)) if node_types: for node_type in node_types: param.append(("type", node_type)) encode = urllib.urlencode(param) if encode: url += "?" url += encode return self.core.list(url)
[ "def", "list", "(", "self", ",", "wg_uuid", ",", "parent", "=", "None", ",", "flat", "=", "False", ",", "node_types", "=", "None", ")", ":", "url", "=", "\"%(base)s/%(wg_uuid)s/nodes\"", "%", "{", "'base'", ":", "self", ".", "local_base_url", ",", "'wg_uuid'", ":", "wg_uuid", "}", "param", "=", "[", "]", "if", "parent", ":", "# I use only the last folder uuid, the first ones are not really useful", "if", "isinstance", "(", "parent", ",", "(", "list", ",", ")", ")", ":", "if", "len", "(", "parent", ")", ">=", "1", ":", "parent", "=", "parent", "[", "-", "1", "]", "param", ".", "append", "(", "(", "\"parent\"", ",", "parent", ")", ")", "if", "flat", ":", "param", ".", "append", "(", "(", "\"flat\"", ",", "True", ")", ")", "if", "node_types", ":", "for", "node_type", "in", "node_types", ":", "param", ".", "append", "(", "(", "\"type\"", ",", "node_type", ")", ")", "encode", "=", "urllib", ".", "urlencode", "(", "param", ")", "if", "encode", ":", "url", "+=", "\"?\"", "url", "+=", "encode", "return", "self", ".", "core", ".", "list", "(", "url", ")" ]
Get a list of workgroup nodes.
[ "Get", "a", "list", "of", "workgroup", "nodes", "." ]
be646c25aa8ba3718abb6869c620b157d53d6e41
https://github.com/fred49/linshare-api/blob/be646c25aa8ba3718abb6869c620b157d53d6e41/linshareapi/user/threadentries.py#L175-L197
246,699
fred49/linshare-api
linshareapi/user/threadentries.py
WorkgroupFolders.update
def update(self, data): """ Update meta of one document.""" self.debug(data) self._check(data) wg_uuid = data.get('workGroup') self.log.debug("wg_uuid : %s ", wg_uuid) uuid = data.get('uuid') url = "%(base)s/%(wg_uuid)s/nodes/%(uuid)s" % { 'base': self.local_base_url, 'wg_uuid': wg_uuid, 'uuid': uuid } return self.core.update(url, data)
python
def update(self, data): """ Update meta of one document.""" self.debug(data) self._check(data) wg_uuid = data.get('workGroup') self.log.debug("wg_uuid : %s ", wg_uuid) uuid = data.get('uuid') url = "%(base)s/%(wg_uuid)s/nodes/%(uuid)s" % { 'base': self.local_base_url, 'wg_uuid': wg_uuid, 'uuid': uuid } return self.core.update(url, data)
[ "def", "update", "(", "self", ",", "data", ")", ":", "self", ".", "debug", "(", "data", ")", "self", ".", "_check", "(", "data", ")", "wg_uuid", "=", "data", ".", "get", "(", "'workGroup'", ")", "self", ".", "log", ".", "debug", "(", "\"wg_uuid : %s \"", ",", "wg_uuid", ")", "uuid", "=", "data", ".", "get", "(", "'uuid'", ")", "url", "=", "\"%(base)s/%(wg_uuid)s/nodes/%(uuid)s\"", "%", "{", "'base'", ":", "self", ".", "local_base_url", ",", "'wg_uuid'", ":", "wg_uuid", ",", "'uuid'", ":", "uuid", "}", "return", "self", ".", "core", ".", "update", "(", "url", ",", "data", ")" ]
Update meta of one document.
[ "Update", "meta", "of", "one", "document", "." ]
be646c25aa8ba3718abb6869c620b157d53d6e41
https://github.com/fred49/linshare-api/blob/be646c25aa8ba3718abb6869c620b157d53d6e41/linshareapi/user/threadentries.py#L321-L333