sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
---|---|---|
def filter_module(module, filter_type):
""" filter functions or variables from import module
@params
module: imported module
filter_type: "function" or "variable"
"""
filter_type = ModuleUtils.is_function if filter_type == "function" else ModuleUtils.is_variable
module_functions_dict = dict(filter(filter_type, vars(module).items()))
return module_functions_dict | filter functions or variables from import module
@params
module: imported module
filter_type: "function" or "variable" | entailment |
def search_conf_item(start_path, item_type, item_name):
""" search expected function or variable recursive upward
@param
start_path: search start path
item_type: "function" or "variable"
item_name: function name or variable name
e.g.
search_conf_item('C:/Users/RockFeng/Desktop/s/preference.py','function','test_func')
"""
dir_path = os.path.dirname(os.path.abspath(start_path))
target_file = os.path.join(dir_path, "preference.py")
if os.path.isfile(target_file):
imported_module = ModuleUtils.get_imported_module_from_file(target_file)
items_dict = ModuleUtils.filter_module(imported_module, item_type)
if item_name in items_dict:
return items_dict[item_name]
else:
return ModuleUtils.search_conf_item(dir_path, item_type, item_name)
if dir_path == start_path:
# system root path
err_msg = "'{}' not found in recursive upward path!".format(item_name)
if item_type == "function":
raise p_exception.FunctionNotFound(err_msg)
else:
raise p_exception.VariableNotFound(err_msg)
return ModuleUtils.search_conf_item(dir_path, item_type, item_name) | search expected function or variable recursive upward
@param
start_path: search start path
item_type: "function" or "variable"
item_name: function name or variable name
e.g.
search_conf_item('C:/Users/RockFeng/Desktop/s/preference.py','function','test_func') | entailment |
def find_data_files(source,target,patterns,isiter=False):
"""Locates the specified data-files and returns the matches;
filesystem tree for setup's data_files in setup.py
Usage:
data_files = find_data_files(r"C:\Python27\Lib\site-packages\numpy\core","numpy/core",["*.dll","*.pyd"])
data_files = find_data_files(r"d:\auto\buffer\test\test","buffer/test/test",["*"],True)
:param source -a full path directory which you want to find data from
:param target -a relative path directory which you want to pack data to
:param patterns -glob patterns, such as "*dll", "*pyd" etc.
:param isiter - True/Fase, Will traverse path if True when patterns equal ["*"]
"""
if glob.has_magic(source) or glob.has_magic(target):
raise ValueError("Magic not allowed in src, target")
ret = {}
for pattern in patterns:
pattern = os.path.join(source,pattern)
for filename in glob.glob(pattern):
if os.path.isfile(filename):
targetpath = os.path.join(target,os.path.relpath(filename,source))
path = os.path.dirname(targetpath)
ret.setdefault(path,[]).append(filename)
elif isiter and os.path.isdir(filename):
source2 = os.path.join(source,filename)
targetpath2 = "%s/%s" %(target,os.path.basename(filename))
# iter_target = os.path.dirname(targetpath2)
ret.update(SetupUtils.find_data_files(source2,targetpath2,patterns,isiter))
return sorted(ret.items()) | Locates the specified data-files and returns the matches;
filesystem tree for setup's data_files in setup.py
Usage:
data_files = find_data_files(r"C:\Python27\Lib\site-packages\numpy\core","numpy/core",["*.dll","*.pyd"])
data_files = find_data_files(r"d:\auto\buffer\test\test","buffer/test/test",["*"],True)
:param source -a full path directory which you want to find data from
:param target -a relative path directory which you want to pack data to
:param patterns -glob patterns, such as "*dll", "*pyd" etc.
:param isiter - True/Fase, Will traverse path if True when patterns equal ["*"] | entailment |
def convert_to_order_dict(map_list):
""" convert mapping in list to ordered dict
@param (list) map_list
[
{"a": 1},
{"b": 2}
]
@return (OrderDict)
OrderDict({
"a": 1,
"b": 2
})
"""
ordered_dict = OrderedDict()
for map_dict in map_list:
ordered_dict.update(map_dict)
return ordered_dict | convert mapping in list to ordered dict
@param (list) map_list
[
{"a": 1},
{"b": 2}
]
@return (OrderDict)
OrderDict({
"a": 1,
"b": 2
}) | entailment |
def get_value_from_cfg(cfg_file):
''' initial the configuration with file that you specify
Sample usage:
config = get_value_from_cfg()
return:
return a dict -->config[section][option] such as config["twsm"]["dut_ip"]
'''
if not os.path.isfile(cfg_file):
return
cfg = {}
config = ConfigParser.RawConfigParser()
try:
config.read(cfg_file)
except Exception as e:
# raise Exception("\n\tcommon exception 1.2: Not a well format configuration file. error: '%s'" %(e))
return
for section in config.sections():
cfg[section] = {}
for option in config.options(section):
cfg[section][option]=config.get(section,option)
return cfg | initial the configuration with file that you specify
Sample usage:
config = get_value_from_cfg()
return:
return a dict -->config[section][option] such as config["twsm"]["dut_ip"] | entailment |
def get_exception_error():
''' Get the exception info
Sample usage:
try:
raise Exception("asdfsdfsdf")
except:
print common.get_exception_error()
Return:
return the exception infomation.
'''
error_message = ""
for i in range(len(inspect.trace())):
error_line = u"""
File: %s - [%s]
Function: %s
Statement: %s
-""" % (inspect.trace()[i][1], inspect.trace()[i][2], inspect.trace()[i][3], inspect.trace()[i][4])
error_message = "%s%s" % (error_message, error_line)
error_message = u"""Error!\n%s\n\t%s\n\t%s\n-------------------------------------------------------------------------------------------\n\n""" % (error_message,sys.exc_info()[0], sys.exc_info()[1])
return error_message | Get the exception info
Sample usage:
try:
raise Exception("asdfsdfsdf")
except:
print common.get_exception_error()
Return:
return the exception infomation. | entailment |
def echo(transferred, toBeTransferred, suffix=''):
''' usage:
for i in range(101):
ProgressBarUtils.echo(i,100)
'''
bar_len = 60
rate = transferred/float(toBeTransferred)
filled_len = int(round(bar_len * rate))
_percents = "%s%s" %(round(100.0 * rate, 1), "%")
end_str = "\r"
_bar = '=' * filled_len + '-' * (bar_len - filled_len)
print("[%s] %s ...%s%s" %(_bar, _percents, suffix, end_str)) | usage:
for i in range(101):
ProgressBarUtils.echo(i,100) | entailment |
def echo_size(self, transferred=1, status=None):
'''Sample usage:
f=lambda x,y:x+y
ldata = range(10)
toBeTransferred = reduce(f,range(10))
progress = ProgressBarUtils("refresh", toBeTransferred=toBeTransferred, unit="KB", chunk_size=1.0, run_status="ๆญฃๅจไธ่ฝฝ", fin_status="ไธ่ฝฝๅฎๆ")
import time
for i in ldata:
time.sleep(0.2)
progress.echo_size(transferred=i)
'''
self.transferred += transferred
# if status is not None:
self.status = status or self.status
end_str = "\r"
if self.transferred == self.toBeTransferred:
end_str = '\n'
self.status = status or self.fin_status
print(self.__get_info() + end_str) | Sample usage:
f=lambda x,y:x+y
ldata = range(10)
toBeTransferred = reduce(f,range(10))
progress = ProgressBarUtils("refresh", toBeTransferred=toBeTransferred, unit="KB", chunk_size=1.0, run_status="ๆญฃๅจไธ่ฝฝ", fin_status="ไธ่ฝฝๅฎๆ")
import time
for i in ldata:
time.sleep(0.2)
progress.echo_size(transferred=i) | entailment |
def echo_percent(self,transferred=1, status=None):
'''Sample usage:
f=lambda x,y:x+y
ldata = range(10)
toBeTransferred = reduce(f,range(10))
import time
progress = ProgressBarUtils("viewbar", toBeTransferred=toBeTransferred, run_status="ๆญฃๅจไธ่ฝฝ", fin_status="ไธ่ฝฝๅฎๆ")
for i in ldata:
time.sleep(0.1)
progress.echo_percent(i)
'''
self.transferred += transferred
self.status = status or self.status
end_str = "\r"
if self.transferred == self.toBeTransferred:
end_str = '\n'
self.status = status or self.fin_status
print(self.__get_bar() + end_str) | Sample usage:
f=lambda x,y:x+y
ldata = range(10)
toBeTransferred = reduce(f,range(10))
import time
progress = ProgressBarUtils("viewbar", toBeTransferred=toBeTransferred, run_status="ๆญฃๅจไธ่ฝฝ", fin_status="ไธ่ฝฝๅฎๆ")
for i in ldata:
time.sleep(0.1)
progress.echo_percent(i) | entailment |
def start_service(addr, n):
""" Start a service """
s = Service(addr)
s.register('add', lambda x, y: x + y)
started = time.time()
for _ in range(n):
s.process()
duration = time.time() - started
time.sleep(0.1)
print('Service stats:')
util.print_stats(n, duration)
return | Start a service | entailment |
def bench(client, n):
""" Benchmark n requests """
pairs = [(x, x + 1) for x in range(n)]
started = time.time()
for pair in pairs:
res, err = client.call('add', *pair)
# assert err is None
duration = time.time() - started
print('Client stats:')
util.print_stats(n, duration) | Benchmark n requests | entailment |
def start_service(addr, n):
""" Start a service """
s = Subscriber(addr)
s.socket.set_string_option(nanomsg.SUB, nanomsg.SUB_SUBSCRIBE, 'test')
started = time.time()
for _ in range(n):
msg = s.socket.recv()
s.socket.close()
duration = time.time() - started
print('Raw SUB service stats:')
util.print_stats(n, duration)
return | Start a service | entailment |
def shell_escape(s):
r"""Given bl"a, returns "bl\\"a".
"""
if isinstance(s, PosixPath):
s = unicode_(s)
elif isinstance(s, bytes):
s = s.decode('utf-8')
if not s or any(c not in safe_shell_chars for c in s):
return '"%s"' % (s.replace('\\', '\\\\')
.replace('"', '\\"')
.replace('`', '\\`')
.replace('$', '\\$'))
else:
return s | r"""Given bl"a, returns "bl\\"a". | entailment |
def projects(lancet, query):
"""List Harvest projects, optionally filtered with a regexp."""
projects = lancet.timer.projects()
if query:
regexp = re.compile(query, flags=re.IGNORECASE)
def match(project):
match = regexp.search(project['name'])
if match is None:
return False
project['match'] = match
return True
projects = (p for p in projects if match(p))
for project in sorted(projects, key=lambda p: p['name'].lower()):
name = project['name']
if 'match' in project:
m = project['match']
s, e = m.start(), m.end()
match = click.style(name[s:e], fg='green')
name = name[:s] + match + name[e:]
click.echo('{:>9d} {} {}'.format(
project['id'], click.style('โฃ', fg='yellow'), name)) | List Harvest projects, optionally filtered with a regexp. | entailment |
def tasks(lancet, project_id):
"""List Harvest tasks for the given project ID."""
for task in lancet.timer.tasks(project_id):
click.echo('{:>9d} {} {}'.format(
task['id'], click.style('โฃ', fg='yellow'), task['name'])) | List Harvest tasks for the given project ID. | entailment |
def get_long_description():
""" Retrieve the long description from DESCRIPTION.rst """
here = os.path.abspath(os.path.dirname(__file__))
with copen(os.path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as description:
return description.read() | Retrieve the long description from DESCRIPTION.rst | entailment |
def flatten(nested_list: list) -> list:
"""Flattens a list, ignore all the lambdas."""
return list(sorted(filter(lambda y: y is not None,
list(map(lambda x: (nested_list.extend(x) # noqa: T484
if isinstance(x, list) else x),
nested_list))))) | Flattens a list, ignore all the lambdas. | entailment |
def get_py_files(dir_name: str) -> list:
"""Get all .py files."""
return flatten([
x for x in
[["{0}/{1}".format(path, f) for f in files if f.endswith(".py")]
for path, _, files in os.walk(dir_name)
if not path.startswith("./build")] if x
]) | Get all .py files. | entailment |
def exit(self) -> None:
"""Raise SystemExit with correct status code and output logs."""
total = sum(len(logs) for logs in self.logs.values())
if self.json:
self.logs['total'] = total
print(json.dumps(self.logs, indent=self.indent))
else:
for name, log in self.logs.items():
if not log or self.parser[name].as_bool("quiet"):
continue
print("[[{0}]]".format(name))
getattr(snekchek.format, name + "_format")(log)
print("\n")
print("-" * 30)
print("Total:", total)
sys.exit(self.status_code) | Raise SystemExit with correct status code and output logs. | entailment |
def run_linter(self, linter) -> None:
"""Run a checker class"""
self.current = linter.name
if (linter.name not in self.parser["all"].as_list("linters")
or linter.base_pyversion > sys.version_info): # noqa: W503
return
if any(x not in self.installed for x in linter.requires_install):
raise ModuleNotInstalled(linter.requires_install)
linter.add_output_hook(self.out_func)
linter.set_config(self.fn, self.parser[linter.name])
linter.run(self.files)
self.status_code = self.status_code or linter.status_code | Run a checker class | entailment |
def read_rcfile():
"""
Try to read a rcfile from a list of paths
"""
files = [
'{}/.millipederc'.format(os.environ.get('HOME')),
'/usr/local/etc/millipederc',
'/etc/millipederc',
]
for filepath in files:
if os.path.isfile(filepath):
with open(filepath) as rcfile:
return parse_rcfile(rcfile)
return {} | Try to read a rcfile from a list of paths | entailment |
def parse_rcfile(rcfile):
"""
Parses rcfile
Invalid lines are ignored with a warning
"""
def parse_bool(value):
"""Parse boolean string"""
value = value.lower()
if value in ['yes', 'true']:
return True
elif value in ['no', 'false']:
return False
else:
raise ValueError('''Can't parse {}'''.format(value))
valid_keys = {
'size': int,
'comment': str,
'template': str,
'reverse': parse_bool,
'opposite': parse_bool,
'position': int,
}
params = {}
for linenum, line in enumerate(rcfile):
line = line.strip()
if not line or line[0] == '#':
continue
pos = line.find(' ')
key = line[:pos]
value = line[pos:].strip()
if key in valid_keys.keys():
try:
params[key] = valid_keys[key](value)
except ValueError:
print('Ignoring line {} from rcfile'.format(linenum + 1),
file=sys.stderr)
return params | Parses rcfile
Invalid lines are ignored with a warning | entailment |
def compute_settings(args, rc_settings):
"""
Merge arguments and rc_settings.
"""
settings = {}
for key, value in args.items():
if key in ['reverse', 'opposite']:
settings[key] = value ^ rc_settings.get(key, False)
else:
settings[key] = value or rc_settings.get(key)
if not settings['size']:
settings['size'] = DEFAULT_SIZE
return settings | Merge arguments and rc_settings. | entailment |
def millipede(size, comment=None, reverse=False, template='default', position=0, opposite=False):
"""
Output the millipede
"""
padding_offsets = [2, 1, 0, 1, 2, 3, 4, 4, 3]
padding_suite_length = len(padding_offsets)
head_padding_extra_offset = 2
if opposite:
padding_offsets.reverse()
position = position or 0
templates = {
'frozen': {'bodyr': 'โโ(โโโ)โโ', 'body': 'โโ(โโโ)โโ',
'headr': 'โโ โโ', 'head': 'โโ โโ'},
'love': {'bodyr': 'โโ(โฅโฅโฅ)โโ', 'body': 'โโ(โฅโฅโฅ)โโ',
'headr': 'โโ โโ', 'head': 'โโ โโ'},
'corporate': {'bodyr': 'โโ(ยฉยฉยฉ)โโ', 'body': 'โโ(ยฉยฉยฉ)โโ',
'headr': 'โโ โโ', 'head': 'โโ โโ'},
'musician': {'bodyr': 'โโ(โซโฉโฌ)โโ', 'body': 'โโ(โซโฉโฌ)โโ',
'headr': 'โโ โโ', 'head': 'โโ โโ'},
'bocal': {'bodyr': 'โโ(๐๐๐)โโ', 'body': 'โโ(๐๐๐)โโ',
'headr': 'โโ โโ', 'head': 'โโ โโ'},
'ascii': {'bodyr': '|=(###)=|', 'body': '|=(###)=|',
'headr': '/โ โ\\', 'head': '\\โ โ/'},
'default': {'bodyr': 'โโ(โโโ)โโ', 'body': 'โโ(โโโ)โโ',
'headr': 'โโ โโ', 'head': 'โโ โโ'},
'inception': {'bodyr': 'โโ(๐๐๐)โโ', 'body': 'โโ(๐๐๐)โโ',
'headr': 'โโ โโ', 'head': 'โโ โโ'},
'humancentipede': {'bodyr': 'โโ(๐ท๐ท๐ท)โโ', 'body': 'โโ(๐ท๐ท๐ท)โโ',
'headr': 'โโ โโ', 'head': 'โโ โโ'},
'heart': {'bodyr': 'โโ(โค๏ธโค๏ธโค๏ธ)โโ', 'body': 'โโ(โค๏ธโค๏ธโค๏ธ)โโ',
'headr': 'โโ โโ', 'head': 'โโ โโ'},
}
template = templates.get(template, templates['default'])
head = "{}{}\n".format(
" " * (padding_offsets[position % padding_suite_length] + head_padding_extra_offset),
template['headr'] if reverse else template['head']
)
body_lines = [
"{}{}\n".format(
" " * padding_offsets[(x + position) % padding_suite_length],
template['bodyr'] if reverse else template['body']
)
for x in range(size)
]
if reverse:
body_lines.reverse()
body = "".join(body_lines)
output = ""
if reverse:
output += body + head
if comment:
output += "\n" + comment + "\n"
else:
if comment:
output += comment + "\n\n"
output += head + body
return output | Output the millipede | entailment |
def api_post(message, url, name, http_data=None, auth=None):
""" Send `message` as `name` to `url`.
You can specify extra variables in `http_data`
"""
try:
import requests
except ImportError:
print('requests is required to do api post.', file=sys.stderr)
sys.exit(1)
data = {name : message}
if http_data:
for var in http_data:
key, value = var.split('=')
data[key] = value
response = requests.post(
url,
data=data,
auth=auth
)
if response.status_code != 200:
raise RuntimeError('Unable to post data') | Send `message` as `name` to `url`.
You can specify extra variables in `http_data` | entailment |
def main():
"""
Entry point
"""
rc_settings = read_rcfile()
parser = ArgumentParser(description='Millipede generator')
parser.add_argument('-s', '--size',
type=int,
nargs="?",
help='the size of the millipede')
parser.add_argument('-c', '--comment',
type=str,
help='the comment')
parser.add_argument('-v', '--version',
action='version',
version=__version__)
parser.add_argument('-r', '--reverse',
action='store_true',
help='reverse the millipede')
parser.add_argument('-t', '--template',
help='customize your millipede')
parser.add_argument('-p', '--position',
type=int,
help='move your millipede')
parser.add_argument('-o', '--opposite',
action='store_true',
help='go the opposite direction')
parser.add_argument(
'--http-host',
metavar="The http server to send the data",
help='Send the millipede via an http post request'
)
parser.add_argument(
'--http-auth',
metavar='user:pass',
help='Used to authenticate to the API ',
default=os.environ.get('HTTP_AUTH')
)
parser.add_argument(
'--http-data',
metavar='key=value',
nargs='*',
help='Add additional HTTP POST data'
)
parser.add_argument(
'--http-name',
metavar='name',
help='The json variable name that will contain the millipede'
)
args = parser.parse_args()
settings = compute_settings(vars(args), rc_settings)
out = millipede(
settings['size'],
comment=settings['comment'],
reverse=settings['reverse'],
template=settings['template'],
position=settings['position'],
opposite=settings['opposite']
)
if args.http_host:
if args.http_auth:
try:
login, passwd = args.http_auth.split(':')
except ValueError:
parser.error(
"Credentials should be a string like "
"`user:pass'"
)
else:
login = None
passwd = None
api_post(
out,
args.http_host,
args.http_name,
http_data=args.http_data,
auth=(login, passwd)
)
print(out, end='') | Entry point | entailment |
def read_long_description(readme_file):
""" Read package long description from README file """
try:
import pypandoc
except (ImportError, OSError) as exception:
print('No pypandoc or pandoc: %s' % (exception,))
if sys.version_info.major == 3:
handle = open(readme_file, encoding='utf-8')
else:
handle = open(readme_file)
long_description = handle.read()
handle.close()
return long_description
else:
return pypandoc.convert(readme_file, 'rst') | Read package long description from README file | entailment |
def content_from_path(path, encoding='utf-8'):
"""Return the content of the specified file as a string.
This function also supports loading resources from packages.
"""
if not os.path.isabs(path) and ':' in path:
package, path = path.split(':', 1)
content = resource_string(package, path)
else:
path = os.path.expanduser(path)
with open(path, 'rb') as fh:
content = fh.read()
return content.decode(encoding) | Return the content of the specified file as a string.
This function also supports loading resources from packages. | entailment |
def execute(self, method, args, ref):
""" Execute the method with args """
response = {'result': None, 'error': None, 'ref': ref}
fun = self.methods.get(method)
if not fun:
response['error'] = 'Method `{}` not found'.format(method)
else:
try:
response['result'] = fun(*args)
except Exception as exception:
logging.error(exception, exc_info=1)
response['error'] = str(exception)
return response | Execute the method with args | entailment |
def register(self, name, fun, description=None):
""" Register function on this service """
self.methods[name] = fun
self.descriptions[name] = description | Register function on this service | entailment |
def parse(cls, payload):
""" Parse client request """
try:
method, args, ref = payload
except Exception as exception:
raise RequestParseError(exception)
else:
return method, args, ref | Parse client request | entailment |
def process(self):
""" Receive data from socket and process request """
response = None
try:
payload = self.receive()
method, args, ref = self.parse(payload)
response = self.execute(method, args, ref)
except AuthenticateError as exception:
logging.error(
'Service error while authenticating request: {}'
.format(exception), exc_info=1)
except AuthenticatorInvalidSignature as exception:
logging.error(
'Service error while authenticating request: {}'
.format(exception), exc_info=1)
except DecodeError as exception:
logging.error(
'Service error while decoding request: {}'
.format(exception), exc_info=1)
except RequestParseError as exception:
logging.error(
'Service error while parsing request: {}'
.format(exception), exc_info=1)
else:
logging.debug('Service received payload: {}'.format(payload))
if response:
self.send(response)
else:
self.send('') | Receive data from socket and process request | entailment |
def build_payload(cls, method, args):
""" Build the payload to be sent to a `Responder` """
ref = str(uuid.uuid4())
return (method, args, ref) | Build the payload to be sent to a `Responder` | entailment |
def call(self, method, *args):
""" Make a call to a `Responder` and return the result """
payload = self.build_payload(method, args)
logging.debug('* Client will send payload: {}'.format(payload))
self.send(payload)
res = self.receive()
assert payload[2] == res['ref']
return res['result'], res['error'] | Make a call to a `Responder` and return the result | entailment |
def load(filepath=None, filecontent=None):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = DotDict()
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as handle:
filecontent = handle.read().decode('utf-8')
configs = json.loads(filecontent)
conf.update(configs.items())
return conf | Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string | entailment |
def run_main(args: argparse.Namespace, do_exit=True) -> None:
"""Runs the checks and exits.
To extend this tool, use this function and set do_exit to False
to get returned the status code.
"""
if args.init:
generate()
return None # exit after generate instead of starting to lint
handler = CheckHandler(
file=args.config_file, out_json=args.json, files=args.files)
for style in get_stylers():
handler.run_linter(style())
for linter in get_linters():
handler.run_linter(linter())
for security in get_security():
handler.run_linter(security())
for tool in get_tools():
tool = tool()
# Only run pypi if everything else passed
if tool.name == "pypi" and handler.status_code != 0:
continue
handler.run_linter(tool)
if do_exit:
handler.exit()
return handler.status_code | Runs the checks and exits.
To extend this tool, use this function and set do_exit to False
to get returned the status code. | entailment |
def main() -> None:
"""Main entry point for console commands."""
parser = argparse.ArgumentParser()
parser.add_argument(
"--json",
help="output in JSON format",
action="store_true",
default=False)
parser.add_argument(
"--config-file", help="Select config file to use", default=".snekrc")
parser.add_argument(
'files',
metavar='file',
nargs='*',
default=[],
help='Files to run checks against')
parser.add_argument(
"--init", help="generate snekrc", action="store_true", default=False)
args = parser.parse_args()
run_main(args) | Main entry point for console commands. | entailment |
def get_session(user_agent=None, user_agent_config_yaml=None, user_agent_lookup=None, **kwargs):
# type: (Optional[str], Optional[str], Optional[str], Any) -> requests.Session
"""Set up and return Session object that is set up with retrying. Requires either global user agent to be set or
appropriate user agent parameter(s) to be completed.
Args:
user_agent (Optional[str]): User agent string. HDXPythonUtilities/X.X.X- is prefixed.
user_agent_config_yaml (Optional[str]): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yml.
user_agent_lookup (Optional[str]): Lookup key for YAML. Ignored if user_agent supplied.
**kwargs: See below
auth (Tuple[str, str]): Authorisation information in tuple form (user, pass) OR
basic_auth (str): Authorisation information in basic auth string form (Basic xxxxxxxxxxxxxxxx) OR
basic_auth_file (str): Path to file containing authorisation information in basic auth string form (Basic xxxxxxxxxxxxxxxx)
extra_params_dict (Dict): Extra parameters to put on end of url as a dictionary OR
extra_params_json (str): Path to JSON file containing extra parameters to put on end of url OR
extra_params_yaml (str): Path to YAML file containing extra parameters to put on end of url
extra_params_lookup (str): Lookup key for parameters. If not given assumes parameters are at root of the dict.
status_forcelist (iterable): HTTP statuses for which to force retry. Defaults to [429, 500, 502, 503, 504].
method_whitelist (iterable): HTTP methods for which to force retry. Defaults t0 frozenset(['GET']).
"""
s = requests.Session()
ua = kwargs.get('full_agent')
if not ua:
ua = UserAgent.get(user_agent, user_agent_config_yaml, user_agent_lookup, **kwargs)
s.headers['User-Agent'] = ua
extra_params = os.getenv('EXTRA_PARAMS')
if extra_params is not None:
extra_params_dict = dict()
if '=' in extra_params:
logger.info('Loading extra parameters from environment variable')
for extra_param in extra_params.split(','):
key, value = extra_param.split('=')
extra_params_dict[key] = value
else:
extra_params_found = False
extra_params_dict = kwargs.get('extra_params_dict')
if extra_params_dict:
extra_params_found = True
logger.info('Loading extra parameters from dictionary')
extra_params_json = kwargs.get('extra_params_json', '')
if extra_params_json:
if extra_params_found:
raise SessionError('More than one set of extra parameters given!')
extra_params_found = True
logger.info('Loading extra parameters from: %s' % extra_params_json)
extra_params_dict = load_json(extra_params_json)
extra_params_yaml = kwargs.get('extra_params_yaml', '')
if extra_params_found:
if extra_params_yaml:
raise SessionError('More than one set of extra parameters given!')
else:
if extra_params_yaml:
logger.info('Loading extra parameters from: %s' % extra_params_yaml)
extra_params_dict = load_yaml(extra_params_yaml)
else:
extra_params_dict = dict()
extra_params_lookup = kwargs.get('extra_params_lookup')
if extra_params_lookup:
extra_params_dict = extra_params_dict.get(extra_params_lookup)
if extra_params_dict is None:
raise SessionError('%s does not exist in extra_params!' % extra_params_lookup)
auth_found = False
basic_auth = os.getenv('BASIC_AUTH')
if basic_auth:
logger.info('Loading authorisation from basic_auth environment variable')
auth_found = True
else:
basic_auth = kwargs.get('basic_auth')
if basic_auth:
logger.info('Loading authorisation from basic_auth argument')
auth_found = True
bauth = extra_params_dict.get('basic_auth')
if bauth:
if not auth_found:
basic_auth = bauth
logger.info('Loading authorisation from basic_auth parameter')
auth_found = True
del extra_params_dict['basic_auth']
s.params = extra_params_dict
auth = kwargs.get('auth')
if auth:
if auth_found:
raise SessionError('More than one authorisation given!')
logger.info('Loading authorisation from auth argument')
auth_found = True
basic_auth_file = kwargs.get('basic_auth_file')
if basic_auth_file:
if auth_found:
raise SessionError('More than one authorisation given!')
logger.info('Loading authorisation from: %s' % basic_auth_file)
basic_auth = load_file_to_str(basic_auth_file)
if basic_auth:
auth = decode(basic_auth)
s.auth = auth
status_forcelist = kwargs.get('status_forcelist', [429, 500, 502, 503, 504])
method_whitelist = kwargs.get('method_whitelist', frozenset(['HEAD', 'TRACE', 'GET', 'PUT', 'OPTIONS', 'DELETE']))
retries = Retry(total=5, backoff_factor=0.4, status_forcelist=status_forcelist, method_whitelist=method_whitelist,
raise_on_redirect=True,
raise_on_status=True)
s.mount('http://', HTTPAdapter(max_retries=retries, pool_connections=100, pool_maxsize=100))
s.mount('https://', HTTPAdapter(max_retries=retries, pool_connections=100, pool_maxsize=100))
return s | Set up and return Session object that is set up with retrying. Requires either global user agent to be set or
appropriate user agent parameter(s) to be completed.
Args:
user_agent (Optional[str]): User agent string. HDXPythonUtilities/X.X.X- is prefixed.
user_agent_config_yaml (Optional[str]): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yml.
user_agent_lookup (Optional[str]): Lookup key for YAML. Ignored if user_agent supplied.
**kwargs: See below
auth (Tuple[str, str]): Authorisation information in tuple form (user, pass) OR
basic_auth (str): Authorisation information in basic auth string form (Basic xxxxxxxxxxxxxxxx) OR
basic_auth_file (str): Path to file containing authorisation information in basic auth string form (Basic xxxxxxxxxxxxxxxx)
extra_params_dict (Dict): Extra parameters to put on end of url as a dictionary OR
extra_params_json (str): Path to JSON file containing extra parameters to put on end of url OR
extra_params_yaml (str): Path to YAML file containing extra parameters to put on end of url
extra_params_lookup (str): Lookup key for parameters. If not given assumes parameters are at root of the dict.
status_forcelist (iterable): HTTP statuses for which to force retry. Defaults to [429, 500, 502, 503, 504].
method_whitelist (iterable): HTTP methods for which to force retry. Defaults t0 frozenset(['GET']). | entailment |
def connect(self):
# type: () -> None
"""
Connect to server
Returns:
None
"""
if self.connection_type.lower() == 'ssl':
self.server = smtplib.SMTP_SSL(host=self.host, port=self.port, local_hostname=self.local_hostname,
timeout=self.timeout, source_address=self.source_address)
elif self.connection_type.lower() == 'lmtp':
self.server = smtplib.LMTP(host=self.host, port=self.port, local_hostname=self.local_hostname,
source_address=self.source_address)
else:
self.server = smtplib.SMTP(host=self.host, port=self.port, local_hostname=self.local_hostname,
timeout=self.timeout, source_address=self.source_address)
self.server.login(self.username, self.password) | Connect to server
Returns:
None | entailment |
def send(self, recipients, subject, text_body, html_body=None, sender=None, **kwargs):
# type: (List[str], str, str, Optional[str], Optional[str], Any) -> None
"""
Send email
Args:
recipients (List[str]): Email recipient
subject (str): Email subject
text_body (str): Plain text email body
html_body (Optional[str]): HTML email body
sender (Optional[str]): Email sender. Defaults to global sender.
**kwargs: See below
mail_options (list): Mail options (see smtplib documentation)
rcpt_options (list): Recipient options (see smtplib documentation)
Returns:
None
"""
if sender is None:
sender = self.sender
v = validate_email(sender, check_deliverability=False) # validate and get info
sender = v['email'] # replace with normalized form
normalised_recipients = list()
for recipient in recipients:
v = validate_email(recipient, check_deliverability=True) # validate and get info
normalised_recipients.append(v['email']) # replace with normalized form
if html_body is not None:
msg = MIMEMultipart('alternative')
part1 = MIMEText(text_body, 'plain')
part2 = MIMEText(html_body, 'html')
msg.attach(part1)
msg.attach(part2)
else:
msg = MIMEText(text_body)
msg['Subject'] = subject
msg['From'] = sender
msg['To'] = ', '.join(normalised_recipients)
# Perform operations via server
self.connect()
self.server.sendmail(sender, normalised_recipients, msg.as_string(), **kwargs)
self.close() | Send email
Args:
recipients (List[str]): Email recipient
subject (str): Email subject
text_body (str): Plain text email body
html_body (Optional[str]): HTML email body
sender (Optional[str]): Email sender. Defaults to global sender.
**kwargs: See below
mail_options (list): Mail options (see smtplib documentation)
rcpt_options (list): Recipient options (see smtplib documentation)
Returns:
None | entailment |
def get_session(db_url):
# type: (str) -> Session
"""Gets SQLAlchemy session given url. Your tables must inherit
from Base in hdx.utilities.database.
Args:
db_url (str): SQLAlchemy url
Returns:
sqlalchemy.orm.session.Session: SQLAlchemy session
"""
engine = create_engine(db_url, poolclass=NullPool, echo=False)
Session = sessionmaker(bind=engine)
Base.metadata.create_all(engine)
return Session() | Gets SQLAlchemy session given url. Your tables must inherit
from Base in hdx.utilities.database.
Args:
db_url (str): SQLAlchemy url
Returns:
sqlalchemy.orm.session.Session: SQLAlchemy session | entailment |
def get_params_from_sqlalchemy_url(db_url):
# type: (str) -> Dict[str,Any]
"""Gets PostgreSQL database connection parameters from SQLAlchemy url
Args:
db_url (str): SQLAlchemy url
Returns:
Dict[str,Any]: Dictionary of database connection parameters
"""
result = urlsplit(db_url)
return {'database': result.path[1:], 'host': result.hostname, 'port': result.port,
'username': result.username, 'password': result.password, 'driver': result.scheme} | Gets PostgreSQL database connection parameters from SQLAlchemy url
Args:
db_url (str): SQLAlchemy url
Returns:
Dict[str,Any]: Dictionary of database connection parameters | entailment |
def get_sqlalchemy_url(database=None, host=None, port=None, username=None, password=None, driver='postgres'):
# type: (Optional[str], Optional[str], Union[int, str, None], Optional[str], Optional[str], str) -> str
"""Gets SQLAlchemy url from database connection parameters
Args:
database (Optional[str]): Database name
host (Optional[str]): Host where database is located
port (Union[int, str, None]): Database port
username (Optional[str]): Username to log into database
password (Optional[str]): Password to log into database
driver (str): Database driver. Defaults to 'postgres'.
Returns:
db_url (str): SQLAlchemy url
"""
strings = ['%s://' % driver]
if username:
strings.append(username)
if password:
strings.append(':%s@' % password)
else:
strings.append('@')
if host:
strings.append(host)
if port is not None:
strings.append(':%d' % int(port))
if database:
strings.append('/%s' % database)
return ''.join(strings) | Gets SQLAlchemy url from database connection parameters
Args:
database (Optional[str]): Database name
host (Optional[str]): Host where database is located
port (Union[int, str, None]): Database port
username (Optional[str]): Username to log into database
password (Optional[str]): Password to log into database
driver (str): Database driver. Defaults to 'postgres'.
Returns:
db_url (str): SQLAlchemy url | entailment |
def wait_for_postgres(database, host, port, username, password):
# type: (Optional[str], Optional[str], Union[int, str, None], Optional[str], Optional[str]) -> None
"""Waits for PostgreSQL database to be up
Args:
database (Optional[str]): Database name
host (Optional[str]): Host where database is located
port (Union[int, str, None]): Database port
username (Optional[str]): Username to log into database
password (Optional[str]): Password to log into database
Returns:
None
"""
connecting_string = 'Checking for PostgreSQL...'
if port is not None:
port = int(port)
while True:
try:
logger.info(connecting_string)
connection = psycopg2.connect(
database=database,
host=host,
port=port,
user=username,
password=password,
connect_timeout=3
)
connection.close()
logger.info('PostgreSQL is running!')
break
except psycopg2.OperationalError:
time.sleep(1) | Waits for PostgreSQL database to be up
Args:
database (Optional[str]): Database name
host (Optional[str]): Host where database is located
port (Union[int, str, None]): Database port
username (Optional[str]): Username to log into database
password (Optional[str]): Password to log into database
Returns:
None | entailment |
def get_unset_cache(self):
"""return : returns a tuple (num_of_not_None_caches, [list of unset caches endpoint])
"""
caches = []
if self._cached_api_global_response is None:
caches.append('global')
if self._cached_api_ticker_response is None:
caches.append('ticker')
return (len(caches), caches) | return : returns a tuple (num_of_not_None_caches, [list of unset caches endpoint]) | entailment |
def dicts_filter(dicts_object, field_to_filter, value_of_filter):
"""This function gets as arguments an array of dicts through the dicts_objects parameter,
then it'll return the dicts that have a value value_of_filter of the key field_to_filter.
"""
lambda_query = lambda value: value[field_to_filter] == value_of_filter
filtered_coin = filter(lambda_query, dicts_object)
selected_coins = list(filtered_coin)
#if not selected_coin: #Empty list, no coin found
# raise AttributeError('attribute %s not found' % attr)
return selected_coins | This function gets as arguments an array of dicts through the dicts_objects parameter,
then it'll return the dicts that have a value value_of_filter of the key field_to_filter. | entailment |
def get_path_for_url(url, folder=None, filename=None, overwrite=False):
# type: (str, Optional[str], Optional[str], bool) -> str
"""Get filename from url and join to provided folder or temporary folder if no folder supplied, ensuring uniqueness
Args:
url (str): URL to download
folder (Optional[str]): Folder to download it to. Defaults to None (temporary folder).
filename (Optional[str]): Filename to use for downloaded file. Defaults to None (derive from the url).
overwrite (bool): Whether to overwrite existing file. Defaults to False.
Returns:
str: Path of downloaded file
"""
if not filename:
urlpath = urlsplit(url).path
filename = basename(urlpath)
filename, extension = splitext(filename)
if not folder:
folder = get_temp_dir()
path = join(folder, '%s%s' % (filename, extension))
if overwrite:
try:
remove(path)
except OSError:
pass
else:
count = 0
while exists(path):
count += 1
path = join(folder, '%s%d%s' % (filename, count, extension))
return path | Get filename from url and join to provided folder or temporary folder if no folder supplied, ensuring uniqueness
Args:
url (str): URL to download
folder (Optional[str]): Folder to download it to. Defaults to None (temporary folder).
filename (Optional[str]): Filename to use for downloaded file. Defaults to None (derive from the url).
overwrite (bool): Whether to overwrite existing file. Defaults to False.
Returns:
str: Path of downloaded file | entailment |
def get_full_url(self, url):
# type: (str) -> str
"""Get full url including any additional parameters
Args:
url (str): URL for which to get full url
Returns:
str: Full url including any additional parameters
"""
request = Request('GET', url)
preparedrequest = self.session.prepare_request(request)
return preparedrequest.url | Get full url including any additional parameters
Args:
url (str): URL for which to get full url
Returns:
str: Full url including any additional parameters | entailment |
def get_url_for_get(url, parameters=None):
# type: (str, Optional[Dict]) -> str
"""Get full url for GET request including parameters
Args:
url (str): URL to download
parameters (Optional[Dict]): Parameters to pass. Defaults to None.
Returns:
str: Full url
"""
spliturl = urlsplit(url)
getparams = OrderedDict(parse_qsl(spliturl.query))
if parameters is not None:
getparams.update(parameters)
spliturl = spliturl._replace(query=urlencode(getparams))
return urlunsplit(spliturl) | Get full url for GET request including parameters
Args:
url (str): URL to download
parameters (Optional[Dict]): Parameters to pass. Defaults to None.
Returns:
str: Full url | entailment |
def get_url_params_for_post(url, parameters=None):
# type: (str, Optional[Dict]) -> Tuple[str, Dict]
"""Get full url for POST request and all parameters including any in the url
Args:
url (str): URL to download
parameters (Optional[Dict]): Parameters to pass. Defaults to None.
Returns:
Tuple[str, Dict]: (Full url, parameters)
"""
spliturl = urlsplit(url)
getparams = OrderedDict(parse_qsl(spliturl.query))
if parameters is not None:
getparams.update(parameters)
spliturl = spliturl._replace(query='')
full_url = urlunsplit(spliturl)
return full_url, getparams | Get full url for POST request and all parameters including any in the url
Args:
url (str): URL to download
parameters (Optional[Dict]): Parameters to pass. Defaults to None.
Returns:
Tuple[str, Dict]: (Full url, parameters) | entailment |
def setup(self, url, stream=True, post=False, parameters=None, timeout=None):
# type: (str, bool, bool, Optional[Dict], Optional[float]) -> requests.Response
"""Setup download from provided url returning the response
Args:
url (str): URL to download
stream (bool): Whether to stream download. Defaults to True.
post (bool): Whether to use POST instead of GET. Defaults to False.
parameters (Optional[Dict]): Parameters to pass. Defaults to None.
timeout (Optional[float]): Timeout for connecting to URL. Defaults to None (no timeout).
Returns:
requests.Response: requests.Response object
"""
self.close_response()
self.response = None
try:
if post:
full_url, parameters = self.get_url_params_for_post(url, parameters)
self.response = self.session.post(full_url, data=parameters, stream=stream, timeout=timeout)
else:
self.response = self.session.get(self.get_url_for_get(url, parameters), stream=stream, timeout=timeout)
self.response.raise_for_status()
except Exception as e:
raisefrom(DownloadError, 'Setup of Streaming Download of %s failed!' % url, e)
return self.response | Setup download from provided url returning the response
Args:
url (str): URL to download
stream (bool): Whether to stream download. Defaults to True.
post (bool): Whether to use POST instead of GET. Defaults to False.
parameters (Optional[Dict]): Parameters to pass. Defaults to None.
timeout (Optional[float]): Timeout for connecting to URL. Defaults to None (no timeout).
Returns:
requests.Response: requests.Response object | entailment |
def hash_stream(self, url):
# type: (str) -> str
"""Stream file from url and hash it using MD5. Must call setup method first.
Args:
url (str): URL to download
Returns:
str: MD5 hash of file
"""
md5hash = hashlib.md5()
try:
for chunk in self.response.iter_content(chunk_size=10240):
if chunk: # filter out keep-alive new chunks
md5hash.update(chunk)
return md5hash.hexdigest()
except Exception as e:
raisefrom(DownloadError, 'Download of %s failed in retrieval of stream!' % url, e) | Stream file from url and hash it using MD5. Must call setup method first.
Args:
url (str): URL to download
Returns:
str: MD5 hash of file | entailment |
def stream_file(self, url, folder=None, filename=None, overwrite=False):
# type: (str, Optional[str], Optional[str], bool) -> str
"""Stream file from url and store in provided folder or temporary folder if no folder supplied.
Must call setup method first.
Args:
url (str): URL to download
filename (Optional[str]): Filename to use for downloaded file. Defaults to None (derive from the url).
folder (Optional[str]): Folder to download it to. Defaults to None (temporary folder).
overwrite (bool): Whether to overwrite existing file. Defaults to False.
Returns:
str: Path of downloaded file
"""
path = self.get_path_for_url(url, folder, filename, overwrite)
f = None
try:
f = open(path, 'wb')
for chunk in self.response.iter_content(chunk_size=10240):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
return f.name
except Exception as e:
raisefrom(DownloadError, 'Download of %s failed in retrieval of stream!' % url, e)
finally:
if f:
f.close() | Stream file from url and store in provided folder or temporary folder if no folder supplied.
Must call setup method first.
Args:
url (str): URL to download
filename (Optional[str]): Filename to use for downloaded file. Defaults to None (derive from the url).
folder (Optional[str]): Folder to download it to. Defaults to None (temporary folder).
overwrite (bool): Whether to overwrite existing file. Defaults to False.
Returns:
str: Path of downloaded file | entailment |
def download_file(self, url, folder=None, filename=None, overwrite=False,
post=False, parameters=None, timeout=None):
# type: (str, Optional[str], Optional[str], bool, bool, Optional[Dict], Optional[float]) -> str
"""Download file from url and store in provided folder or temporary folder if no folder supplied
Args:
url (str): URL to download
folder (Optional[str]): Folder to download it to. Defaults to None.
filename (Optional[str]): Filename to use for downloaded file. Defaults to None (derive from the url).
overwrite (bool): Whether to overwrite existing file. Defaults to False.
post (bool): Whether to use POST instead of GET. Defaults to False.
parameters (Optional[Dict]): Parameters to pass. Defaults to None.
timeout (Optional[float]): Timeout for connecting to URL. Defaults to None (no timeout).
Returns:
str: Path of downloaded file
"""
self.setup(url, stream=True, post=post, parameters=parameters, timeout=timeout)
return self.stream_file(url, folder, filename, overwrite) | Download file from url and store in provided folder or temporary folder if no folder supplied
Args:
url (str): URL to download
folder (Optional[str]): Folder to download it to. Defaults to None.
filename (Optional[str]): Filename to use for downloaded file. Defaults to None (derive from the url).
overwrite (bool): Whether to overwrite existing file. Defaults to False.
post (bool): Whether to use POST instead of GET. Defaults to False.
parameters (Optional[Dict]): Parameters to pass. Defaults to None.
timeout (Optional[float]): Timeout for connecting to URL. Defaults to None (no timeout).
Returns:
str: Path of downloaded file | entailment |
def download(self, url, post=False, parameters=None, timeout=None):
# type: (str, bool, Optional[Dict], Optional[float]) -> requests.Response
"""Download url
Args:
url (str): URL to download
post (bool): Whether to use POST instead of GET. Defaults to False.
parameters (Optional[Dict]): Parameters to pass. Defaults to None.
timeout (Optional[float]): Timeout for connecting to URL. Defaults to None (no timeout).
Returns:
requests.Response: Response
"""
return self.setup(url, stream=False, post=post, parameters=parameters, timeout=timeout) | Download url
Args:
url (str): URL to download
post (bool): Whether to use POST instead of GET. Defaults to False.
parameters (Optional[Dict]): Parameters to pass. Defaults to None.
timeout (Optional[float]): Timeout for connecting to URL. Defaults to None (no timeout).
Returns:
requests.Response: Response | entailment |
def get_tabular_stream(self, url, **kwargs):
# type: (str, Any) -> tabulator.Stream
"""Get Tabulator stream.
Args:
url (str): URL to download
**kwargs:
headers (Union[int, List[int], List[str]]): Number of row(s) containing headers or list of headers
file_type (Optional[str]): Type of file. Defaults to inferring.
delimiter (Optional[str]): Delimiter used for values in each row. Defaults to inferring.
Returns:
tabulator.Stream: Tabulator Stream object
"""
self.close_response()
file_type = kwargs.get('file_type')
if file_type is not None:
kwargs['format'] = file_type
del kwargs['file_type']
try:
self.response = tabulator.Stream(url, **kwargs)
self.response.open()
return self.response
except TabulatorException as e:
raisefrom(DownloadError, 'Getting tabular stream for %s failed!' % url, e) | Get Tabulator stream.
Args:
url (str): URL to download
**kwargs:
headers (Union[int, List[int], List[str]]): Number of row(s) containing headers or list of headers
file_type (Optional[str]): Type of file. Defaults to inferring.
delimiter (Optional[str]): Delimiter used for values in each row. Defaults to inferring.
Returns:
tabulator.Stream: Tabulator Stream object | entailment |
def get_tabular_rows(self, url, dict_rows=False, **kwargs):
# type: (str, bool, Any) -> Iterator[Dict]
"""Get iterator for reading rows from tabular data. Each row is returned as a dictionary.
Args:
url (str): URL to download
dict_rows (bool): Return dict (requires headers parameter) or list for each row. Defaults to False (list).
**kwargs:
headers (Union[int, List[int], List[str]]): Number of row(s) containing headers or list of headers
file_type (Optional[str]): Type of file. Defaults to inferring.
delimiter (Optional[str]): Delimiter used for values in each row. Defaults to inferring.
Returns:
Iterator[Union[List,Dict]]: Iterator where each row is returned as a list or dictionary.
"""
return self.get_tabular_stream(url, **kwargs).iter(keyed=dict_rows) | Get iterator for reading rows from tabular data. Each row is returned as a dictionary.
Args:
url (str): URL to download
dict_rows (bool): Return dict (requires headers parameter) or list for each row. Defaults to False (list).
**kwargs:
headers (Union[int, List[int], List[str]]): Number of row(s) containing headers or list of headers
file_type (Optional[str]): Type of file. Defaults to inferring.
delimiter (Optional[str]): Delimiter used for values in each row. Defaults to inferring.
Returns:
Iterator[Union[List,Dict]]: Iterator where each row is returned as a list or dictionary. | entailment |
def download_tabular_key_value(self, url, **kwargs):
# type: (str, Any) -> Dict
"""Download 2 column csv from url and return a dictionary of keys (first column) and values (second column)
Args:
url (str): URL to download
**kwargs:
headers (Union[int, List[int], List[str]]): Number of row(s) containing headers or list of headers
file_type (Optional[str]): Type of file. Defaults to inferring.
delimiter (Optional[str]): Delimiter used for values in each row. Defaults to inferring.
Returns:
Dict: Dictionary keys (first column) and values (second column)
"""
output_dict = dict()
for row in self.get_tabular_rows(url, **kwargs):
if len(row) < 2:
continue
output_dict[row[0]] = row[1]
return output_dict | Download 2 column csv from url and return a dictionary of keys (first column) and values (second column)
Args:
url (str): URL to download
**kwargs:
headers (Union[int, List[int], List[str]]): Number of row(s) containing headers or list of headers
file_type (Optional[str]): Type of file. Defaults to inferring.
delimiter (Optional[str]): Delimiter used for values in each row. Defaults to inferring.
Returns:
Dict: Dictionary keys (first column) and values (second column) | entailment |
def download_tabular_rows_as_dicts(self, url, headers=1, keycolumn=1, **kwargs):
# type: (str, Union[int, List[int], List[str]], int, Any) -> Dict[Dict]
"""Download multicolumn csv from url and return dictionary where keys are first column and values are
dictionaries with keys from column headers and values from columns beneath
Args:
url (str): URL to download
headers (Union[int, List[int], List[str]]): Number of row(s) containing headers or list of headers. Defaults to 1.
keycolumn (int): Number of column to be used for key. Defaults to 1.
**kwargs:
file_type (Optional[str]): Type of file. Defaults to inferring.
delimiter (Optional[str]): Delimiter used for values in each row. Defaults to inferring.
Returns:
Dict[Dict]: Dictionary where keys are first column and values are dictionaries with keys from column
headers and values from columns beneath
"""
kwargs['headers'] = headers
stream = self.get_tabular_stream(url, **kwargs)
output_dict = dict()
headers = stream.headers
key_header = headers[keycolumn - 1]
for row in stream.iter(keyed=True):
first_val = row[key_header]
output_dict[first_val] = dict()
for header in row:
if header == key_header:
continue
else:
output_dict[first_val][header] = row[header]
return output_dict | Download multicolumn csv from url and return dictionary where keys are first column and values are
dictionaries with keys from column headers and values from columns beneath
Args:
url (str): URL to download
headers (Union[int, List[int], List[str]]): Number of row(s) containing headers or list of headers. Defaults to 1.
keycolumn (int): Number of column to be used for key. Defaults to 1.
**kwargs:
file_type (Optional[str]): Type of file. Defaults to inferring.
delimiter (Optional[str]): Delimiter used for values in each row. Defaults to inferring.
Returns:
Dict[Dict]: Dictionary where keys are first column and values are dictionaries with keys from column
headers and values from columns beneath | entailment |
def setup(ctx, force):
"""Wizard to create the user-level configuration file."""
if os.path.exists(USER_CONFIG) and not force:
click.secho(
'An existing configuration file was found at "{}".\n'
.format(USER_CONFIG),
fg='red', bold=True
)
click.secho(
'Please remove it before in order to run the setup wizard or use\n'
'the --force flag to overwrite it.'
)
ctx.exit(1)
click.echo('Address of the issue tracker (your JIRA instance). \n'
'Normally in the form https://<company>.atlassian.net.')
tracker_url = click.prompt('URL')
tracker_user = click.prompt('Username for {}'.format(tracker_url))
click.echo()
click.echo('Address of the time tracker (your Harvest instance). \n'
'Normally in the form https://<company>.harvestapp.com.')
timer_url = click.prompt('URL')
timer_user = click.prompt('Username for {}'.format(timer_url))
click.echo()
config = configparser.ConfigParser()
config.add_section('tracker')
config.set('tracker', 'url', tracker_url)
config.set('tracker', 'username', tracker_user)
config.add_section('harvest')
config.set('harvest', 'url', timer_url)
config.set('harvest', 'username', timer_user)
with open(USER_CONFIG, 'w') as fh:
config.write(fh)
click.secho('Configuration correctly written to "{}".'
.format(USER_CONFIG), fg='green') | Wizard to create the user-level configuration file. | entailment |
def init(ctx, force):
"""Wizard to create a project-level configuration file."""
if os.path.exists(PROJECT_CONFIG) and not force:
click.secho(
'An existing configuration file was found at "{}".\n'
.format(PROJECT_CONFIG),
fg='red', bold=True
)
click.secho(
'Please remove it before in order to run the setup wizard or use\n'
'the --force flag to overwrite it.'
)
ctx.exit(1)
project_key = click.prompt('Project key on the issue tracker')
base_branch = click.prompt('Integration branch', default='master')
virtualenvs = ('.venv', '.env', 'venv', 'env')
for p in virtualenvs:
if os.path.exists(os.path.join(p, 'bin', 'activate')):
venv = p
break
else:
venv = ''
venv_path = click.prompt('Path to virtual environment', default=venv)
project_id = click.prompt('Project ID on Harvest', type=int)
task_id = click.prompt('Task id on Harvest', type=int)
config = configparser.ConfigParser()
config.add_section('lancet')
config.set('lancet', 'virtualenv', venv_path)
config.add_section('tracker')
config.set('tracker', 'default_project', project_key)
config.add_section('harvest')
config.set('harvest', 'project_id', str(project_id))
config.set('harvest', 'task_id', str(task_id))
config.add_section('repository')
config.set('repository', 'base_branch', base_branch)
with open(PROJECT_CONFIG, 'w') as fh:
config.write(fh)
click.secho('\nConfiguration correctly written to "{}".'
.format(PROJECT_CONFIG), fg='green') | Wizard to create a project-level configuration file. | entailment |
def logout(lancet, service):
"""Forget saved passwords for the web services."""
if service:
services = [service]
else:
services = ['tracker', 'harvest']
for service in services:
url = lancet.config.get(service, 'url')
key = 'lancet+{}'.format(url)
username = lancet.config.get(service, 'username')
with taskstatus('Logging out from {}', url) as ts:
if keyring.get_password(key, username):
keyring.delete_password(key, username)
ts.ok('Logged out from {}', url)
else:
ts.ok('Already logged out from {}', url) | Forget saved passwords for the web services. | entailment |
def _services(lancet):
"""List all currently configured services."""
def get_services(config):
for s in config.sections():
if config.has_option(s, 'url'):
if config.has_option(s, 'username'):
yield s
for s in get_services(lancet.config):
click.echo('{}[Logout from {}]'.format(s, lancet.config.get(s, 'url'))) | List all currently configured services. | entailment |
def send_request(self, endpoint='ticker', coin_name=None, **kwargs):
""": param string 'ticker', it's 'ticker' if we want info about coins,
'global' for global market's info.
: param string 'coin_name', specify the name of the coin, if None,
we'll retrieve info about all available coins.
"""
built_url = self._make_url(endpoint, coin_name)
payload = dict(**kwargs)
self._process_request(endpoint, built_url, payload) | : param string 'ticker', it's 'ticker' if we want info about coins,
'global' for global market's info.
: param string 'coin_name', specify the name of the coin, if None,
we'll retrieve info about all available coins. | entailment |
def get_response(self, data_type=None):
"""return json response from APIs converted into python list
: param string 'data_type', if it's None it'll return the avaliable cache,
if we've both global and ticker data, the function will return 'ticker' data,
in that case, data_type should be assigned with 'ticker' or 'global'
"""
if not data_type:
return self.cache.get_response(r_type='ticker') or self.cache.get_response(r_type='global')
elif data_type == 'ticker':
return self.cache.get_response(r_type='ticker')
return self.cache.get_response(r_type='global') | return json response from APIs converted into python list
: param string 'data_type', if it's None it'll return the avaliable cache,
if we've both global and ticker data, the function will return 'ticker' data,
in that case, data_type should be assigned with 'ticker' or 'global' | entailment |
def iso_639_alpha3(code):
"""Convert a given language identifier into an ISO 639 Part 2 code, such
as "eng" or "deu". This will accept language codes in the two- or three-
letter format, and some language names. If the given string cannot be
converted, ``None`` will be returned.
"""
code = normalize_code(code)
code = ISO3_MAP.get(code, code)
if code in ISO3_ALL:
return code | Convert a given language identifier into an ISO 639 Part 2 code, such
as "eng" or "deu". This will accept language codes in the two- or three-
letter format, and some language names. If the given string cannot be
converted, ``None`` will be returned. | entailment |
def list_to_alpha3(languages, synonyms=True):
"""Parse all the language codes in a given list into ISO 639 Part 2 codes
and optionally expand them with synonyms (i.e. other names for the same
language)."""
codes = set([])
for language in ensure_list(languages):
code = iso_639_alpha3(language)
if code is None:
continue
codes.add(code)
if synonyms:
codes.update(expand_synonyms(code))
return codes | Parse all the language codes in a given list into ISO 639 Part 2 codes
and optionally expand them with synonyms (i.e. other names for the same
language). | entailment |
def pull_request(ctx, base_branch, open_pr, stop_timer):
"""Create a new pull request for this issue."""
lancet = ctx.obj
review_status = lancet.config.get("tracker", "review_status")
remote_name = lancet.config.get("repository", "remote_name")
if not base_branch:
base_branch = lancet.config.get("repository", "base_branch")
# Get the issue
issue = get_issue(lancet)
transition = get_transition(ctx, lancet, issue, review_status)
# Get the working branch
branch = get_branch(lancet, issue, create=False)
with taskstatus("Checking pre-requisites") as ts:
if not branch:
ts.abort("No working branch found")
if lancet.tracker.whoami() not in issue.assignees:
ts.abort("Issue currently not assigned to you")
# TODO: Check mergeability
# TODO: Check remote status (PR does not already exist)
# Push to remote
with taskstatus('Pushing to "{}"', remote_name) as ts:
remote = lancet.repo.lookup_remote(remote_name)
if not remote:
ts.abort('Remote "{}" not found', remote_name)
from ..git import CredentialsCallbacks
remote.push([branch.name], callbacks=CredentialsCallbacks())
ts.ok('Pushed latest changes to "{}"', remote_name)
# Create pull request
with taskstatus("Creating pull request") as ts:
template_path = lancet.config.get("repository", "pr_template")
message = edit_template(template_path, issue=issue)
if not message:
ts.abort("You didn't provide a title for the pull request")
title, body = message.split("\n", 1)
title = title.strip()
if not title:
ts.abort("You didn't provide a title for the pull request")
try:
pr = lancet.scm_manager.create_pull_request(
branch.branch_name, base_branch, title, body.strip("\n")
)
except PullRequestAlreadyExists as e:
pr = e.pull_request
ts.ok("Pull request does already exist at {}", pr.link)
else:
ts.ok("Pull request created at {}", pr.link)
# Update issue
set_issue_status(lancet, issue, review_status, transition)
# TODO: Post to activity stream on JIRA?
# TODO: Post to Slack?
# Stop harvest timer
if stop_timer:
with taskstatus("Pausing harvest timer") as ts:
lancet.timer.pause()
ts.ok("Harvest timer paused")
# Open the pull request page in the browser if requested
if open_pr:
click.launch(pr.link) | Create a new pull request for this issue. | entailment |
def checkout(lancet, force, issue):
"""
Checkout the branch for the given issue.
It is an error if the branch does no exist yet.
"""
issue = get_issue(lancet, issue)
# Get the working branch
branch = get_branch(lancet, issue, create=force)
with taskstatus("Checking out working branch") as ts:
if not branch:
ts.abort("Working branch not found")
lancet.repo.checkout(branch.name)
ts.ok('Checked out "{}"', branch.name) | Checkout the branch for the given issue.
It is an error if the branch does no exist yet. | entailment |
def get_soup(url, downloader=None, user_agent=None, user_agent_config_yaml=None, user_agent_lookup=None, **kwargs):
# type: (str, Download, Optional[str], Optional[str], Optional[str], Any) -> BeautifulSoup
"""
Get BeautifulSoup object for a url. Requires either global user agent to be set or appropriate user agent
parameter(s) to be completed.
Args:
url (str): url to read
downloader (Download): Download object. Defaults to creating a Download object with given user agent values.
user_agent (Optional[str]): User agent string. HDXPythonUtilities/X.X.X- is prefixed.
user_agent_config_yaml (Optional[str]): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yml.
user_agent_lookup (Optional[str]): Lookup key for YAML. Ignored if user_agent supplied.
Returns:
BeautifulSoup: The BeautifulSoup object for a url
"""
if not downloader:
downloader = Download(user_agent, user_agent_config_yaml, user_agent_lookup, **kwargs)
response = downloader.download(url)
return BeautifulSoup(response.text, 'html.parser') | Get BeautifulSoup object for a url. Requires either global user agent to be set or appropriate user agent
parameter(s) to be completed.
Args:
url (str): url to read
downloader (Download): Download object. Defaults to creating a Download object with given user agent values.
user_agent (Optional[str]): User agent string. HDXPythonUtilities/X.X.X- is prefixed.
user_agent_config_yaml (Optional[str]): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yml.
user_agent_lookup (Optional[str]): Lookup key for YAML. Ignored if user_agent supplied.
Returns:
BeautifulSoup: The BeautifulSoup object for a url | entailment |
def extract_table(tabletag):
# type: (Tag) -> List[Dict]
"""
Extract HTML table as list of dictionaries
Args:
tabletag (Tag): BeautifulSoup tag
Returns:
str: Text of tag stripped of leading and trailing whitespace and newlines and with   replaced with space
"""
theadtag = tabletag.find_next('thead')
headertags = theadtag.find_all('th')
if len(headertags) == 0:
headertags = theadtag.find_all('td')
headers = []
for tag in headertags:
headers.append(get_text(tag))
tbodytag = tabletag.find_next('tbody')
trtags = tbodytag.find_all('tr')
table = list()
for trtag in trtags:
row = dict()
tdtags = trtag.find_all('td')
for i, tag in enumerate(tdtags):
row[headers[i]] = get_text(tag)
table.append(row)
return table | Extract HTML table as list of dictionaries
Args:
tabletag (Tag): BeautifulSoup tag
Returns:
str: Text of tag stripped of leading and trailing whitespace and newlines and with   replaced with space | entailment |
def wrap_callable(cls, uri, methods, callable_obj):
"""Wraps function-based callable_obj into a `Route` instance, else
proxies a `bottle_neck.handlers.BaseHandler` subclass instance.
Args:
uri (str): The uri relative path.
methods (tuple): A tuple of valid method strings.
callable_obj (instance): The callable object.
Returns:
A route instance.
Raises:
RouteError for invalid callable object type.
"""
if isinstance(callable_obj, HandlerMeta):
callable_obj.base_endpoint = uri
callable_obj.is_valid = True
return callable_obj
if isinstance(callable_obj, types.FunctionType):
return cls(uri=uri, methods=methods, callable_obj=callable_obj)
raise RouteError("Invalid handler type.") | Wraps function-based callable_obj into a `Route` instance, else
proxies a `bottle_neck.handlers.BaseHandler` subclass instance.
Args:
uri (str): The uri relative path.
methods (tuple): A tuple of valid method strings.
callable_obj (instance): The callable object.
Returns:
A route instance.
Raises:
RouteError for invalid callable object type. | entailment |
def register_app(self, app):
"""Register the route object to a `bottle.Bottle` app instance.
Args:
app (instance):
Returns:
Route instance (for chaining purposes)
"""
app.route(self.uri, methods=self.methods)(self.callable_obj)
return self | Register the route object to a `bottle.Bottle` app instance.
Args:
app (instance):
Returns:
Route instance (for chaining purposes) | entailment |
def register_handler(self, callable_obj, entrypoint, methods=('GET',)):
"""Register a handler callable to a specific route.
Args:
entrypoint (str): The uri relative path.
methods (tuple): A tuple of valid method strings.
callable_obj (callable): The callable object.
Returns:
The Router instance (for chaining purposes).
Raises:
RouteError, for missing routing params or invalid callable
object type.
"""
router_obj = Route.wrap_callable(
uri=entrypoint,
methods=methods,
callable_obj=callable_obj
)
if router_obj.is_valid:
self._routes.add(router_obj)
return self
raise RouteError( # pragma: no cover
"Missing params: methods: {} - entrypoint: {}".format(
methods, entrypoint
)
) | Register a handler callable to a specific route.
Args:
entrypoint (str): The uri relative path.
methods (tuple): A tuple of valid method strings.
callable_obj (callable): The callable object.
Returns:
The Router instance (for chaining purposes).
Raises:
RouteError, for missing routing params or invalid callable
object type. | entailment |
def mount(self, app=None):
"""Mounts all registered routes to a bottle.py application instance.
Args:
app (instance): A `bottle.Bottle()` application instance.
Returns:
The Router instance (for chaining purposes).
"""
for endpoint in self._routes:
endpoint.register_app(app)
return self | Mounts all registered routes to a bottle.py application instance.
Args:
app (instance): A `bottle.Bottle()` application instance.
Returns:
The Router instance (for chaining purposes). | entailment |
def setup_logger(log_level, log_file=None, logger_name=None):
"""setup logger
@param log_level: debug/info/warning/error/critical
@param log_file: log file path
@param logger_name: the name of logger, default is 'root' if not specify
"""
applogger = AppLog(logger_name)
level = getattr(logging, log_level.upper(), None)
if not level:
color_print("Invalid log level: %s" % log_level, "RED")
sys.exit(1)
# hide traceback when log level is INFO/WARNING/ERROR/CRITICAL
if level >= logging.INFO:
sys.tracebacklimit = 0
if log_file:
applogger._handle2file(log_file)
else:
applogger._handle2screen(color = True)
applogger.logger.setLevel(level) | setup logger
@param log_level: debug/info/warning/error/critical
@param log_file: log file path
@param logger_name: the name of logger, default is 'root' if not specify | entailment |
def _tolog(self,level):
""" log with different level """
def wrapper(msg):
if self.log_colors:
color = self.log_colors[level.upper()]
getattr(self.logger, level.lower())(coloring("- {}".format(msg), color))
else:
getattr(self.logger, level.lower())(msg)
return wrapper | log with different level | entailment |
def from_status(cls, status_line, msg=None):
"""Returns a class method from bottle.HTTPError.status_line attribute.
Useful for patching `bottle.HTTPError` for web services.
Args:
status_line (str): bottle.HTTPError.status_line text.
msg: The message data for response.
Returns:
Class method based on status_line arg.
Examples:
>>> status_line = '401 Unauthorized'
>>> error_msg = 'Get out!'
>>> resp = WSResponse.from_status(status_line, error_msg)
>>> resp['errors']
['Get out!']
>>> resp['status_text']
'Unauthorized'
"""
method = getattr(cls, status_line.lower()[4:].replace(' ', '_'))
return method(msg) | Returns a class method from bottle.HTTPError.status_line attribute.
Useful for patching `bottle.HTTPError` for web services.
Args:
status_line (str): bottle.HTTPError.status_line text.
msg: The message data for response.
Returns:
Class method based on status_line arg.
Examples:
>>> status_line = '401 Unauthorized'
>>> error_msg = 'Get out!'
>>> resp = WSResponse.from_status(status_line, error_msg)
>>> resp['errors']
['Get out!']
>>> resp['status_text']
'Unauthorized' | entailment |
def created(cls, data=None):
"""Shortcut API for HTTP 201 `Created` response.
Args:
data (object): Response key/value data.
Returns:
WSResponse Instance.
"""
if cls.expose_status: # pragma: no cover
cls.response.content_type = 'application/json'
cls.response._status_line = '201 Created'
return cls(201, data=data).to_json | Shortcut API for HTTP 201 `Created` response.
Args:
data (object): Response key/value data.
Returns:
WSResponse Instance. | entailment |
def not_modified(cls, errors=None):
"""Shortcut API for HTTP 304 `Not Modified` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance.
"""
if cls.expose_status: # pragma: no cover
cls.response.content_type = 'application/json'
cls.response._status_line = '304 Not Modified'
return cls(304, None, errors).to_json | Shortcut API for HTTP 304 `Not Modified` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance. | entailment |
def bad_request(cls, errors=None):
"""Shortcut API for HTTP 400 `Bad Request` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance.
"""
if cls.expose_status: # pragma: no cover
cls.response.content_type = 'application/json'
cls.response._status_line = '400 Bad Request'
return cls(400, errors=errors).to_json | Shortcut API for HTTP 400 `Bad Request` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance. | entailment |
def unauthorized(cls, errors=None):
"""Shortcut API for HTTP 401 `Unauthorized` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance.
"""
if cls.expose_status: # pragma: no cover
cls.response.content_type = 'application/json'
cls.response._status_line = '401 Unauthorized'
return cls(401, errors=errors).to_json | Shortcut API for HTTP 401 `Unauthorized` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance. | entailment |
def forbidden(cls, errors=None):
"""Shortcut API for HTTP 403 `Forbidden` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance.
"""
if cls.expose_status: # pragma: no cover
cls.response.content_type = 'application/json'
cls.response._status_line = '403 Forbidden'
return cls(403, errors=errors).to_json | Shortcut API for HTTP 403 `Forbidden` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance. | entailment |
def not_found(cls, errors=None):
"""Shortcut API for HTTP 404 `Not found` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance.
"""
if cls.expose_status: # pragma: no cover
cls.response.content_type = 'application/json'
cls.response._status_line = '404 Not Found'
return cls(404, None, errors).to_json | Shortcut API for HTTP 404 `Not found` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance. | entailment |
def method_not_allowed(cls, errors=None):
"""Shortcut API for HTTP 405 `Method not allowed` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance.
"""
if cls.expose_status: # pragma: no cover
cls.response.content_type = 'application/json'
cls.response._status_line = '405 Method Not Allowed'
return cls(405, None, errors).to_json | Shortcut API for HTTP 405 `Method not allowed` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance. | entailment |
def not_implemented(cls, errors=None):
"""Shortcut API for HTTP 501 `Not Implemented` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance.
"""
if cls.expose_status: # pragma: no cover
cls.response.content_type = 'application/json'
cls.response._status_line = '501 Not Implemented'
return cls(501, None, errors).to_json | Shortcut API for HTTP 501 `Not Implemented` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance. | entailment |
def service_unavailable(cls, errors=None):
"""Shortcut API for HTTP 503 `Service Unavailable` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance.
"""
if cls.expose_status: # pragma: no cover
cls.response.content_type = 'application/json'
cls.response._status_line = '503 Service Unavailable'
return cls(503, None, errors).to_json | Shortcut API for HTTP 503 `Service Unavailable` response.
Args:
errors (list): Response key/value data.
Returns:
WSResponse Instance. | entailment |
def to_json(self):
"""Short cut for JSON response service data.
Returns:
Dict that implements JSON interface.
"""
web_resp = collections.OrderedDict()
web_resp['status_code'] = self.status_code
web_resp['status_text'] = dict(HTTP_CODES).get(self.status_code)
web_resp['data'] = self.data if self.data is not None else {}
web_resp['errors'] = self.errors or []
return web_resp | Short cut for JSON response service data.
Returns:
Dict that implements JSON interface. | entailment |
def setup_logging(**kwargs):
# type: (Any) -> None
"""Setup logging configuration
Args:
**kwargs: See below
logging_config_dict (dict): Logging configuration dictionary OR
logging_config_json (str): Path to JSON Logging configuration OR
logging_config_yaml (str): Path to YAML Logging configuration. Defaults to internal logging_configuration.yml.
smtp_config_dict (dict): Email Logging configuration dictionary if using default logging configuration OR
smtp_config_json (str): Path to JSON Email Logging configuration if using default logging configuration OR
smtp_config_yaml (str): Path to YAML Email Logging configuration if using default logging configuration
Returns:
None
"""
smtp_config_found = False
smtp_config_dict = kwargs.get('smtp_config_dict', None)
if smtp_config_dict:
smtp_config_found = True
print('Loading smtp configuration customisations from dictionary')
smtp_config_json = kwargs.get('smtp_config_json', '')
if smtp_config_json:
if smtp_config_found:
raise LoggingError('More than one smtp configuration file given!')
smtp_config_found = True
print('Loading smtp configuration customisations from: %s' % smtp_config_json)
smtp_config_dict = load_json(smtp_config_json)
smtp_config_yaml = kwargs.get('smtp_config_yaml', '')
if smtp_config_yaml:
if smtp_config_found:
raise LoggingError('More than one smtp configuration file given!')
smtp_config_found = True
print('Loading smtp configuration customisations from: %s' % smtp_config_yaml)
smtp_config_dict = load_yaml(smtp_config_yaml)
logging_smtp_config_dict = None
logging_config_found = False
logging_config_dict = kwargs.get('logging_config_dict', None)
if logging_config_dict:
logging_config_found = True
print('Loading logging configuration from dictionary')
logging_config_json = kwargs.get('logging_config_json', '')
if logging_config_json:
if logging_config_found:
raise LoggingError('More than one logging configuration file given!')
logging_config_found = True
print('Loading logging configuration from: %s' % logging_config_json)
logging_config_dict = load_json(logging_config_json)
logging_config_yaml = kwargs.get('logging_config_yaml', '')
if logging_config_found:
if logging_config_yaml:
raise LoggingError('More than one logging configuration file given!')
else:
if not logging_config_yaml:
print('No logging configuration parameter. Using default.')
logging_config_yaml = script_dir_plus_file('logging_configuration.yml', setup_logging)
if smtp_config_found:
logging_smtp_config_yaml = script_dir_plus_file('logging_smtp_configuration.yml', setup_logging)
print('Loading base SMTP logging configuration from: %s' % logging_smtp_config_yaml)
logging_smtp_config_dict = load_yaml(logging_smtp_config_yaml)
print('Loading logging configuration from: %s' % logging_config_yaml)
logging_config_dict = load_yaml(logging_config_yaml)
if smtp_config_found:
if logging_smtp_config_dict:
logging_config_dict = merge_dictionaries([logging_config_dict, logging_smtp_config_dict, smtp_config_dict])
else:
raise LoggingError('SMTP logging configuration file given but not using default logging configuration!')
file_only = os.getenv('LOG_FILE_ONLY')
if file_only is not None and file_only.lower() not in ['false', 'f', 'n', 'no', '0']:
root = logging_config_dict.get('root')
if root is not None:
handlers = root.get('handlers', list())
for i, handler in enumerate(handlers):
if handler.lower() == 'console':
del handlers[i]
break
logging.config.dictConfig(logging_config_dict) | Setup logging configuration
Args:
**kwargs: See below
logging_config_dict (dict): Logging configuration dictionary OR
logging_config_json (str): Path to JSON Logging configuration OR
logging_config_yaml (str): Path to YAML Logging configuration. Defaults to internal logging_configuration.yml.
smtp_config_dict (dict): Email Logging configuration dictionary if using default logging configuration OR
smtp_config_json (str): Path to JSON Email Logging configuration if using default logging configuration OR
smtp_config_yaml (str): Path to YAML Email Logging configuration if using default logging configuration
Returns:
None | entailment |
def _search_generator(self, item: Any, reverse: bool = False) -> Generator[Any, None, None]:
"""A helper method for `self.search` that returns a generator rather than a list."""
results = 0
for _, x in self.enumerate(item, reverse=reverse):
yield x
results += 1
if results == 0:
raise SearchError(str(item)) | A helper method for `self.search` that returns a generator rather than a list. | entailment |
def _search_generator(self, item: Any) -> Generator[Any, None, None]:
"""A helper method for `self.search` that returns a generator rather than a list."""
results = 0
for x in self.enumerate(item):
yield x
results += 1
if results == 0:
raise SearchError(str(item)) | A helper method for `self.search` that returns a generator rather than a list. | entailment |
def _search_generator(self, item: Any) -> Generator[Tuple[Any, Any], None, None]:
"""A helper method for `self.search` that returns a generator rather than a list."""
results = 0
for key, value in self.enumerate(item):
yield key, value
results += 1
if results == 0:
raise SearchError(str(item)) | A helper method for `self.search` that returns a generator rather than a list. | entailment |
def ask_bool(question: str, default: bool = True) -> bool:
"""Asks a question yes no style"""
default_q = "Y/n" if default else "y/N"
answer = input("{0} [{1}]: ".format(question, default_q))
lower = answer.lower()
if not lower:
return default
return lower == "y" | Asks a question yes no style | entailment |
def ask_int(question: str, default: int = None) -> int:
"""Asks for a number in a question"""
default_q = " [default: {0}]: ".format(
default) if default is not None else ""
answer = input("{0} [{1}]: ".format(question, default_q))
if not answer:
if default is None:
print("No default set, try again.")
return ask_int(question, default)
return default
if any(x not in "1234567890" for x in answer):
print("Please enter only numbers (0-9).")
return ask_int(question, default)
return int(answer) | Asks for a number in a question | entailment |
def ask_path(question: str, default: str = None) -> str:
"""Asks for a path"""
default_q = " [default: {0}]: ".format(
default) if default is not None else ""
answer = input("{0} [{1}]: ".format(question, default_q))
if answer == "":
return default
if os.path.isdir(answer):
return answer
print(
"No such directory: {answer}, please try again".format(answer=answer))
return ask_path(question, default) | Asks for a path | entailment |
def ask_list(question: str, default: list = None) -> list:
"""Asks for a comma seperated list of strings"""
default_q = " [default: {0}]: ".format(
",".join(default)) if default is not None else ""
answer = input("{0} [{1}]: ".format(question, default_q))
if answer == "":
return default
return [ans.strip() for ans in answer.split(",")] | Asks for a comma seperated list of strings | entailment |
def ask_str(question: str, default: str = None):
"""Asks for a simple string"""
default_q = " [default: {0}]: ".format(
default) if default is not None else ""
answer = input("{0} [{1}]: ".format(question, default_q))
if answer == "":
return default
return answer | Asks for a simple string | entailment |
def get_tools(self) -> list:
"""Lets the user enter the tools he want to use"""
tools = "flake8,pylint,vulture,pyroma,isort,yapf,safety,dodgy,pytest,pypi".split(
",")
print("Available tools: {0}".format(",".join(tools)))
answer = ask_list("What tools would you like to use?",
["flake8", "pytest"])
if any(tool not in tools for tool in answer):
print("Invalid answer, retry.")
self.get_tools()
return answer | Lets the user enter the tools he want to use | entailment |
def main(self) -> None:
"""The main function for generating the config file"""
path = ask_path("where should the config be stored?", ".snekrc")
conf = configobj.ConfigObj()
tools = self.get_tools()
for tool in tools:
conf[tool] = getattr(self, tool)() # pylint: disable=assignment-from-no-return
conf.filename = path
conf.write()
print("Written config file!")
if "pylint" in tools:
print(
"Please also run `pylint --generate-rcfile` to complete setup") | The main function for generating the config file | entailment |
def paginator(limit, offset, record_count, base_uri, page_nav_tpl='&limit={}&offset={}'):
"""Compute pagination info for collection filtering.
Args:
limit (int): Collection filter limit.
offset (int): Collection filter offset.
record_count (int): Collection filter total record count.
base_uri (str): Collection filter base uri (without limit, offset)
page_nav_tpl (str): Pagination template.
Returns:
A mapping of pagination info.
"""
total_pages = int(math.ceil(record_count / limit))
next_cond = limit + offset <= record_count
prev_cond = offset >= limit
next_page = base_uri + page_nav_tpl.format(limit, offset + limit) if next_cond else None
prev_page = base_uri + page_nav_tpl.format(limit, offset - limit) if prev_cond else None
return OrderedDict([
('total_count', record_count),
('total_pages', total_pages),
('next_page', next_page),
('prev_page', prev_page)
]) | Compute pagination info for collection filtering.
Args:
limit (int): Collection filter limit.
offset (int): Collection filter offset.
record_count (int): Collection filter total record count.
base_uri (str): Collection filter base uri (without limit, offset)
page_nav_tpl (str): Pagination template.
Returns:
A mapping of pagination info. | entailment |
Subsets and Splits