_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q278700
|
handle_jobs
|
test
|
async def handle_jobs(job_handler, host, port, *, loop):
"""
Connects to the remote master and continuously receives calls, executes
them, then returns a response until interrupted.
"""
try:
try:
reader, writer = await asyncio.open_connection(host, port, loop=loop)
except OSError:
logging.error("worker could not connect to server")
return
while True:
try:
call_encoded = await reader.readuntil(b"\n")
except (asyncio.IncompleteReadError, ConnectionResetError):
break
logging.debug("worker got call")
call_json = call_encoded.decode("utf-8")
call = json.loads(call_json)
response = job_handler(call)
response_json = json.dumps(response) + "\n"
response_encoded = response_json.encode("utf-8")
writer.write(response_encoded)
logging.debug("worker returned response")
except KeyboardInterrupt:
pass
|
python
|
{
"resource": ""
}
|
q278701
|
worker_main
|
test
|
def worker_main(job_handler, host, port):
"""
Starts an asyncio event loop to connect to the master and run jobs.
"""
loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
loop.run_until_complete(handle_jobs(job_handler, host, port, loop=loop))
loop.close()
|
python
|
{
"resource": ""
}
|
q278702
|
run_worker_pool
|
test
|
def run_worker_pool(job_handler, host="localhost", port=48484,
*, max_workers=None):
"""
Runs a pool of workers which connect to a remote HighFive master and begin
executing calls.
"""
if max_workers is None:
max_workers = multiprocessing.cpu_count()
processes = []
for _ in range(max_workers):
p = multiprocessing.Process(target=worker_main,
args=(job_handler, host, port))
p.start()
processes.append(p)
logger.debug("workers started")
for p in processes:
p.join()
logger.debug("all workers completed")
|
python
|
{
"resource": ""
}
|
q278703
|
CompanyDetailCompany.classification
|
test
|
def classification(self, classification):
"""
Sets the classification of this CompanyDetailCompany.
Classification of Company
:param classification: The classification of this CompanyDetailCompany.
:type: str
"""
allowed_values = ["Public Limited Indian Non-Government Company", "Private Limited Indian Non-Government Company", "One Person Company", "Private Limited Foreign Company Incorporated in India", "Public Limited Foreign Company Incorporated in India", "Union Government Company", "State Government Company", "Guarantee & Association Public", "Guarantee & Association Private", "Not For Profit Company", "Unlimited Liabilities Public", "Unlimited Liabilities Private", "Undefined"]
if classification not in allowed_values:
raise ValueError(
"Invalid value for `classification`, must be one of {0}"
.format(allowed_values)
)
self._classification = classification
|
python
|
{
"resource": ""
}
|
q278704
|
LWLink._send_message
|
test
|
def _send_message(self, msg):
"""Add message to queue and start processing the queue."""
LWLink.the_queue.put_nowait(msg)
if LWLink.thread is None or not LWLink.thread.isAlive():
LWLink.thread = Thread(target=self._send_queue)
LWLink.thread.start()
|
python
|
{
"resource": ""
}
|
q278705
|
LWLink.turn_on_light
|
test
|
def turn_on_light(self, device_id, name):
"""Create the message to turn light on."""
msg = "!%sFdP32|Turn On|%s" % (device_id, name)
self._send_message(msg)
|
python
|
{
"resource": ""
}
|
q278706
|
LWLink.turn_on_switch
|
test
|
def turn_on_switch(self, device_id, name):
"""Create the message to turn switch on."""
msg = "!%sF1|Turn On|%s" % (device_id, name)
self._send_message(msg)
|
python
|
{
"resource": ""
}
|
q278707
|
LWLink.turn_on_with_brightness
|
test
|
def turn_on_with_brightness(self, device_id, name, brightness):
"""Scale brightness from 0..255 to 1..32."""
brightness_value = round((brightness * 31) / 255) + 1
# F1 = Light on and F0 = light off. FdP[0..32] is brightness. 32 is
# full. We want that when turning the light on.
msg = "!%sFdP%d|Lights %d|%s" % (
device_id, brightness_value, brightness_value, name)
self._send_message(msg)
|
python
|
{
"resource": ""
}
|
q278708
|
LWLink.turn_off
|
test
|
def turn_off(self, device_id, name):
"""Create the message to turn light or switch off."""
msg = "!%sF0|Turn Off|%s" % (device_id, name)
self._send_message(msg)
|
python
|
{
"resource": ""
}
|
q278709
|
LWLink._send_queue
|
test
|
def _send_queue(self):
"""If the queue is not empty, process the queue."""
while not LWLink.the_queue.empty():
self._send_reliable_message(LWLink.the_queue.get_nowait())
|
python
|
{
"resource": ""
}
|
q278710
|
LWLink._send_reliable_message
|
test
|
def _send_reliable_message(self, msg):
"""Send msg to LightwaveRF hub."""
result = False
max_retries = 15
trans_id = next(LWLink.transaction_id)
msg = "%d,%s" % (trans_id, msg)
try:
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) \
as write_sock, \
socket.socket(socket.AF_INET, socket.SOCK_DGRAM) \
as read_sock:
write_sock.setsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
read_sock.setsockopt(socket.SOL_SOCKET,
socket.SO_BROADCAST, 1)
read_sock.settimeout(self.SOCKET_TIMEOUT)
read_sock.bind(('0.0.0.0', self.RX_PORT))
while max_retries:
max_retries -= 1
write_sock.sendto(msg.encode(
'UTF-8'), (LWLink.link_ip, self.TX_PORT))
result = False
while True:
response, dummy = read_sock.recvfrom(1024)
response = response.decode('UTF-8')
if "Not yet registered." in response:
_LOGGER.error("Not yet registered")
self.register()
result = True
break
if response.startswith("%d,OK" % trans_id):
result = True
break
if response.startswith("%d,ERR" % trans_id):
_LOGGER.error(response)
break
_LOGGER.info(response)
if result:
break
time.sleep(0.25)
except socket.timeout:
_LOGGER.error("LW broker timeout!")
return result
except Exception as ex:
_LOGGER.error(ex)
raise
if result:
_LOGGER.info("LW broker OK!")
else:
_LOGGER.error("LW broker fail!")
return result
|
python
|
{
"resource": ""
}
|
q278711
|
create_adapter
|
test
|
def create_adapter(cmph, ffi, obj):
""" Generates a wrapped adapter for the given object
Parameters
----------
obj : list, buffer, array, or file
Raises
------
ValueError
If presented with an object that cannot be adapted
Returns
-------
CMPH capable adapter
"""
# if arraylike and fixed unit size
# if file
# if buffer
if is_file_location(obj):
# The FP is captured for GC reasons inside the dtor closure
# pylint: disable=invalid-name
fd = open(obj)
adapter = cmph.cmph_io_nlfile_adapter(fd)
def dtor():
cmph.cmph_io_nlfile_adapter_destroy(adapter)
fd.close()
# pylint: enable=invalid-name
return _AdapterCxt(adapter, dtor)
elif is_file(obj):
adapter = cmph.cmph_io_nlfile_adapter(obj)
dtor = lambda: cmph.cmph_io_nlfile_adapter_destroy(adapter)
return _AdapterCxt(adapter, dtor)
elif isinstance(obj, Sequence):
if len(obj) == 0:
raise ValueError("An empty sequence is already a perfect hash!")
return _create_pyobj_adapter(cmph, ffi, obj)
else:
raise ValueError("data cannot have a cmph wrapper generated")
|
python
|
{
"resource": ""
}
|
q278712
|
YearlyFinancials.nature
|
test
|
def nature(self, nature):
"""
Sets the nature of this YearlyFinancials.
Nature of the balancesheet
:param nature: The nature of this YearlyFinancials.
:type: str
"""
allowed_values = ["STANDALONE"]
if nature not in allowed_values:
raise ValueError(
"Invalid value for `nature`, must be one of {0}"
.format(allowed_values)
)
self._nature = nature
|
python
|
{
"resource": ""
}
|
q278713
|
Section.update_
|
test
|
def update_(self, sct_dict, conf_arg=True):
"""Update values of configuration section with dict.
Args:
sct_dict (dict): dict indexed with option names. Undefined
options are discarded.
conf_arg (bool): if True, only options that can be set in a config
file are updated.
"""
for opt, val in sct_dict.items():
if opt not in self.def_:
continue
if not conf_arg or self.def_[opt].conf_arg:
self[opt] = val
|
python
|
{
"resource": ""
}
|
q278714
|
Section.reset_
|
test
|
def reset_(self):
"""Restore default values of options in this section."""
for opt, meta in self.defaults_():
self[opt] = meta.default
|
python
|
{
"resource": ""
}
|
q278715
|
ConfigurationManager.set_config_files_
|
test
|
def set_config_files_(self, *config_files):
"""Set the list of config files.
Args:
config_files (pathlike): path of config files, given in the order
of reading.
"""
self._config_files = tuple(pathlib.Path(path) for path in config_files)
|
python
|
{
"resource": ""
}
|
q278716
|
ConfigurationManager.opt_vals_
|
test
|
def opt_vals_(self):
"""Iterator over sections, option names, and option values.
This iterator is also implemented at the section level. The two loops
produce the same output::
for sct, opt, val in conf.opt_vals_():
print(sct, opt, val)
for sct in conf.sections_():
for opt, val in conf[sct].opt_vals_():
print(sct, opt, val)
Yields:
tuples with sections, option names, and option values.
"""
for sct, opt in self.options_():
yield sct, opt, self[sct][opt]
|
python
|
{
"resource": ""
}
|
q278717
|
ConfigurationManager.defaults_
|
test
|
def defaults_(self):
"""Iterator over sections, option names, and option metadata.
This iterator is also implemented at the section level. The two loops
produce the same output::
for sct, opt, meta in conf.defaults_():
print(sct, opt, meta.default)
for sct in conf.sections_():
for opt, meta in conf[sct].defaults_():
print(sct, opt, meta.default)
Yields:
tuples with sections, option names, and :class:`Conf` instances
holding option metadata.
"""
for sct, opt in self.options_():
yield sct, opt, self[sct].def_[opt]
|
python
|
{
"resource": ""
}
|
q278718
|
ConfigurationManager.create_config_
|
test
|
def create_config_(self, index=0, update=False):
"""Create config file.
Create config file in :attr:`config_files_[index]`.
Parameters:
index(int): index of config file.
update (bool): if set to True and :attr:`config_files_` already
exists, its content is read and all the options it sets are
kept in the produced config file.
"""
if not self.config_files_[index:]:
return
path = self.config_files_[index]
if not path.parent.exists():
path.parent.mkdir(parents=True)
conf_dict = {}
for section in self.sections_():
conf_opts = [o for o, m in self[section].defaults_() if m.conf_arg]
if not conf_opts:
continue
conf_dict[section] = {}
for opt in conf_opts:
conf_dict[section][opt] = (self[section][opt] if update else
self[section].def_[opt].default)
with path.open('w') as cfile:
toml.dump(conf_dict, cfile)
|
python
|
{
"resource": ""
}
|
q278719
|
ConfigurationManager.update_
|
test
|
def update_(self, conf_dict, conf_arg=True):
"""Update values of configuration options with dict.
Args:
conf_dict (dict): dict of dict indexed with section and option
names.
conf_arg (bool): if True, only options that can be set in a config
file are updated.
"""
for section, secdict in conf_dict.items():
self[section].update_(secdict, conf_arg)
|
python
|
{
"resource": ""
}
|
q278720
|
ConfigurationManager.read_config_
|
test
|
def read_config_(self, cfile):
"""Read a config file and set config values accordingly.
Returns:
dict: content of config file.
"""
if not cfile.exists():
return {}
try:
conf_dict = toml.load(str(cfile))
except toml.TomlDecodeError:
return None
self.update_(conf_dict)
return conf_dict
|
python
|
{
"resource": ""
}
|
q278721
|
ConfigurationManager.read_configs_
|
test
|
def read_configs_(self):
"""Read config files and set config values accordingly.
Returns:
(dict, list, list): respectively content of files, list of
missing/empty files and list of files for which a parsing error
arised.
"""
if not self.config_files_:
return {}, [], []
content = {section: {} for section in self}
empty_files = []
faulty_files = []
for cfile in self.config_files_:
conf_dict = self.read_config_(cfile)
if conf_dict is None:
faulty_files.append(cfile)
continue
elif not conf_dict:
empty_files.append(cfile)
continue
for section, secdict in conf_dict.items():
content[section].update(secdict)
return content, empty_files, faulty_files
|
python
|
{
"resource": ""
}
|
q278722
|
_names
|
test
|
def _names(section, option):
"""List of cli strings for a given option."""
meta = section.def_[option]
action = meta.cmd_kwargs.get('action')
if action is internal.Switch:
names = ['-{}'.format(option), '+{}'.format(option)]
if meta.shortname is not None:
names.append('-{}'.format(meta.shortname))
names.append('+{}'.format(meta.shortname))
else:
names = ['--{}'.format(option)]
if meta.shortname is not None:
names.append('-{}'.format(meta.shortname))
return names
|
python
|
{
"resource": ""
}
|
q278723
|
CLIManager.sections_list
|
test
|
def sections_list(self, cmd=None):
"""List of config sections used by a command.
Args:
cmd (str): command name, set to ``None`` or ``''`` for the bare
command.
Returns:
list of str: list of configuration sections used by that command.
"""
sections = list(self.common.sections)
if not cmd:
if self.bare is not None:
sections.extend(self.bare.sections)
return sections
return []
sections.extend(self.subcmds[cmd].sections)
if cmd in self._conf:
sections.append(cmd)
return sections
|
python
|
{
"resource": ""
}
|
q278724
|
CLIManager._cmd_opts_solver
|
test
|
def _cmd_opts_solver(self, cmd_name):
"""Scan options related to one command and enrich _opt_cmds."""
sections = self.sections_list(cmd_name)
cmd_dict = self._opt_cmds[cmd_name] if cmd_name else self._opt_bare
for sct in reversed(sections):
for opt, opt_meta in self._conf[sct].def_.items():
if not opt_meta.cmd_arg:
continue
if opt not in cmd_dict:
cmd_dict[opt] = sct
else:
warnings.warn(
'Command <{0}>: {1}.{2} shadowed by {3}.{2}'.format(
cmd_name, sct, opt, cmd_dict[opt]),
error.LoamWarning, stacklevel=4)
|
python
|
{
"resource": ""
}
|
q278725
|
CLIManager._add_options_to_parser
|
test
|
def _add_options_to_parser(self, opts_dict, parser):
"""Add options to a parser."""
store_bool = ('store_true', 'store_false')
for opt, sct in opts_dict.items():
meta = self._conf[sct].def_[opt]
kwargs = copy.deepcopy(meta.cmd_kwargs)
action = kwargs.get('action')
if action is internal.Switch:
kwargs.update(nargs=0)
elif meta.default is not None and action not in store_bool:
kwargs.setdefault('type', type(meta.default))
kwargs.update(help=meta.help)
kwargs.setdefault('default', self._conf[sct][opt])
parser.add_argument(*_names(self._conf[sct], opt), **kwargs)
|
python
|
{
"resource": ""
}
|
q278726
|
CLIManager._build_parser
|
test
|
def _build_parser(self):
"""Build command line argument parser.
Returns:
:class:`argparse.ArgumentParser`: the command line argument parser.
You probably won't need to use it directly. To parse command line
arguments and update the :class:`ConfigurationManager` instance
accordingly, use the :meth:`parse_args` method.
"""
main_parser = argparse.ArgumentParser(description=self.common.help,
prefix_chars='-+')
self._add_options_to_parser(self._opt_bare, main_parser)
main_parser.set_defaults(**self.common.defaults)
if self.bare is not None:
main_parser.set_defaults(**self.bare.defaults)
subparsers = main_parser.add_subparsers(dest='loam_sub_name')
for cmd_name, meta in self.subcmds.items():
kwargs = {'prefix_chars': '+-', 'help': meta.help}
dummy_parser = subparsers.add_parser(cmd_name, **kwargs)
self._add_options_to_parser(self._opt_cmds[cmd_name], dummy_parser)
dummy_parser.set_defaults(**meta.defaults)
return main_parser
|
python
|
{
"resource": ""
}
|
q278727
|
CLIManager.parse_args
|
test
|
def parse_args(self, arglist=None):
"""Parse arguments and update options accordingly.
Args:
arglist (list of str): list of arguments to parse. If set to None,
``sys.argv[1:]`` is used.
Returns:
:class:`Namespace`: the argument namespace returned by the
:class:`argparse.ArgumentParser`.
"""
args = self._parser.parse_args(args=arglist)
sub_cmd = args.loam_sub_name
if sub_cmd is None:
for opt, sct in self._opt_bare.items():
self._conf[sct][opt] = getattr(args, opt, None)
else:
for opt, sct in self._opt_cmds[sub_cmd].items():
self._conf[sct][opt] = getattr(args, opt, None)
return args
|
python
|
{
"resource": ""
}
|
q278728
|
CLIManager._zsh_comp_command
|
test
|
def _zsh_comp_command(self, zcf, cmd, grouping, add_help=True):
"""Write zsh _arguments compdef for a given command.
Args:
zcf (file): zsh compdef file.
cmd (str): command name, set to None or '' for bare command.
grouping (bool): group options (zsh>=5.4).
add_help (bool): add an help option.
"""
if add_help:
if grouping:
print("+ '(help)'", end=BLK, file=zcf)
print("'--help[show help message]'", end=BLK, file=zcf)
print("'-h[show help message]'", end=BLK, file=zcf)
# could deal with duplicate by iterating in reverse and keep set of
# already defined opts.
no_comp = ('store_true', 'store_false')
cmd_dict = self._opt_cmds[cmd] if cmd else self._opt_bare
for opt, sct in cmd_dict.items():
meta = self._conf[sct].def_[opt]
if meta.cmd_kwargs.get('action') == 'append':
grpfmt, optfmt = "+ '{}'", "'*{}[{}]{}'"
if meta.comprule is None:
meta.comprule = ''
else:
grpfmt, optfmt = "+ '({})'", "'{}[{}]{}'"
if meta.cmd_kwargs.get('action') in no_comp \
or meta.cmd_kwargs.get('nargs') == 0:
meta.comprule = None
if meta.comprule is None:
compstr = ''
elif meta.comprule == '':
optfmt = optfmt.split('[')
optfmt = optfmt[0] + '=[' + optfmt[1]
compstr = ': :( )'
else:
optfmt = optfmt.split('[')
optfmt = optfmt[0] + '=[' + optfmt[1]
compstr = ': :{}'.format(meta.comprule)
if grouping:
print(grpfmt.format(opt), end=BLK, file=zcf)
for name in _names(self._conf[sct], opt):
print(optfmt.format(name, meta.help.replace("'", "'\"'\"'"),
compstr), end=BLK, file=zcf)
|
python
|
{
"resource": ""
}
|
q278729
|
CLIManager.zsh_complete
|
test
|
def zsh_complete(self, path, cmd, *cmds, sourceable=False):
"""Write zsh compdef script.
Args:
path (path-like): desired path of the compdef script.
cmd (str): command name that should be completed.
cmds (str): extra command names that should be completed.
sourceable (bool): if True, the generated file will contain an
explicit call to ``compdef``, which means it can be sourced
to activate CLI completion.
"""
grouping = internal.zsh_version() >= (5, 4)
path = pathlib.Path(path)
firstline = ['#compdef', cmd]
firstline.extend(cmds)
subcmds = list(self.subcmds.keys())
with path.open('w') as zcf:
print(*firstline, end='\n\n', file=zcf)
# main function
print('function _{} {{'.format(cmd), file=zcf)
print('local line', file=zcf)
print('_arguments -C', end=BLK, file=zcf)
if subcmds:
# list of subcommands and their description
substrs = ["{}\\:'{}'".format(sub, self.subcmds[sub].help)
for sub in subcmds]
print('"1:Commands:(({}))"'.format(' '.join(substrs)),
end=BLK, file=zcf)
self._zsh_comp_command(zcf, None, grouping)
if subcmds:
print("'*::arg:->args'", file=zcf)
print('case $line[1] in', file=zcf)
for sub in subcmds:
print('{sub}) _{cmd}_{sub} ;;'.format(sub=sub, cmd=cmd),
file=zcf)
print('esac', file=zcf)
print('}', file=zcf)
# all subcommand completion handlers
for sub in subcmds:
print('\nfunction _{}_{} {{'.format(cmd, sub), file=zcf)
print('_arguments', end=BLK, file=zcf)
self._zsh_comp_command(zcf, sub, grouping)
print('}', file=zcf)
if sourceable:
print('\ncompdef _{0} {0}'.format(cmd), *cmds, file=zcf)
|
python
|
{
"resource": ""
}
|
q278730
|
CLIManager._bash_comp_command
|
test
|
def _bash_comp_command(self, cmd, add_help=True):
"""Build a list of all options for a given command.
Args:
cmd (str): command name, set to None or '' for bare command.
add_help (bool): add an help option.
Returns:
list of str: list of CLI options strings.
"""
out = ['-h', '--help'] if add_help else []
cmd_dict = self._opt_cmds[cmd] if cmd else self._opt_bare
for opt, sct in cmd_dict:
out.extend(_names(self._conf[sct], opt))
return out
|
python
|
{
"resource": ""
}
|
q278731
|
CLIManager.bash_complete
|
test
|
def bash_complete(self, path, cmd, *cmds):
"""Write bash complete script.
Args:
path (path-like): desired path of the complete script.
cmd (str): command name that should be completed.
cmds (str): extra command names that should be completed.
"""
path = pathlib.Path(path)
subcmds = list(self.subcmds.keys())
with path.open('w') as bcf:
# main function
print('_{}() {{'.format(cmd), file=bcf)
print('COMPREPLY=()', file=bcf)
print(r'local cur=${COMP_WORDS[COMP_CWORD]}', end='\n\n', file=bcf)
optstr = ' '.join(self._bash_comp_command(None))
print(r'local options="{}"'.format(optstr), end='\n\n', file=bcf)
if subcmds:
print('local commands="{}"'.format(' '.join(subcmds)),
file=bcf)
print('declare -A suboptions', file=bcf)
for sub in subcmds:
optstr = ' '.join(self._bash_comp_command(sub))
print('suboptions[{}]="{}"'.format(sub, optstr), file=bcf)
condstr = 'if'
for sub in subcmds:
print(condstr, r'[[ "${COMP_LINE}" == *"', sub, '"* ]] ; then',
file=bcf)
print(r'COMPREPLY=( `compgen -W "${suboptions[', sub,
r']}" -- ${cur}` )', sep='', file=bcf)
condstr = 'elif'
print(condstr, r'[[ ${cur} == -* ]] ; then', file=bcf)
print(r'COMPREPLY=( `compgen -W "${options}" -- ${cur}`)',
file=bcf)
if subcmds:
print(r'else', file=bcf)
print(r'COMPREPLY=( `compgen -W "${commands}" -- ${cur}`)',
file=bcf)
print('fi', file=bcf)
print('}', end='\n\n', file=bcf)
print('complete -F _{0} {0}'.format(cmd), *cmds, file=bcf)
|
python
|
{
"resource": ""
}
|
q278732
|
start_master
|
test
|
async def start_master(host="", port=48484, *, loop=None):
"""
Starts a new HighFive master at the given host and port, and returns it.
"""
loop = loop if loop is not None else asyncio.get_event_loop()
manager = jobs.JobManager(loop=loop)
workers = set()
server = await loop.create_server(
lambda: WorkerProtocol(manager, workers), host, port)
return Master(server, manager, workers, loop=loop)
|
python
|
{
"resource": ""
}
|
q278733
|
WorkerProtocol.connection_made
|
test
|
def connection_made(self, transport):
"""
Called when a remote worker connection has been found. Finishes setting
up the protocol object.
"""
if self._manager.is_closed():
logger.debug("worker tried to connect while manager was closed")
return
logger.debug("new worker connected")
self._transport = transport
self._buffer = bytearray()
self._worker = Worker(self._transport, self._manager)
self._workers.add(self._worker)
|
python
|
{
"resource": ""
}
|
q278734
|
WorkerProtocol.line_received
|
test
|
def line_received(self, line):
"""
Called when a complete line is found from the remote worker. Decodes
a response object from the line, then passes it to the worker object.
"""
response = json.loads(line.decode("utf-8"))
self._worker.response_received(response)
|
python
|
{
"resource": ""
}
|
q278735
|
WorkerProtocol.connection_lost
|
test
|
def connection_lost(self, exc):
"""
Called when the connection to the remote worker is broken. Closes the
worker.
"""
logger.debug("worker connection lost")
self._worker.close()
self._workers.remove(self._worker)
|
python
|
{
"resource": ""
}
|
q278736
|
Worker._job_loaded
|
test
|
def _job_loaded(self, job):
"""
Called when a job has been found for the worker to run. Sends the job's
RPC to the remote worker.
"""
logger.debug("worker {} found a job".format(id(self)))
if self._closed:
self._manager.return_job(job)
return
self._job = job
call_obj = self._job.get_call()
call = (json.dumps(call_obj) + "\n").encode("utf-8")
self._transport.write(call)
|
python
|
{
"resource": ""
}
|
q278737
|
Worker.response_received
|
test
|
def response_received(self, response):
"""
Called when a response to a job RPC has been received. Decodes the
response and finalizes the result, then reports the result to the
job manager.
"""
if self._closed:
return
assert self._job is not None
logger.debug("worker {} got response".format(id(self)))
result = self._job.get_result(response)
self._manager.add_result(self._job, result)
self._load_job()
|
python
|
{
"resource": ""
}
|
q278738
|
Worker.close
|
test
|
def close(self):
"""
Closes the worker. No more jobs will be handled by the worker, and any
running job is immediately returned to the job manager.
"""
if self._closed:
return
self._closed = True
if self._job is not None:
self._manager.return_job(self._job)
self._job = None
|
python
|
{
"resource": ""
}
|
q278739
|
Master.run
|
test
|
def run(self, job_list):
"""
Runs a job set which consists of the jobs in an iterable job list.
"""
if self._closed:
raise RuntimeError("master is closed")
return self._manager.add_job_set(job_list)
|
python
|
{
"resource": ""
}
|
q278740
|
Master.close
|
test
|
def close(self):
"""
Starts closing the HighFive master. The server will be closed and
all queued job sets will be cancelled.
"""
if self._closed:
return
self._closed = True
self._server.close()
self._manager.close()
for worker in self._workers:
worker.close()
|
python
|
{
"resource": ""
}
|
q278741
|
Results._change
|
test
|
def _change(self):
"""
Called when a state change has occurred. Waiters are notified that a
change has occurred.
"""
for waiter in self._waiters:
if not waiter.done():
waiter.set_result(None)
self._waiters = []
|
python
|
{
"resource": ""
}
|
q278742
|
Results.add
|
test
|
def add(self, result):
"""
Adds a new result.
"""
assert not self._complete
self._results.append(result)
self._change()
|
python
|
{
"resource": ""
}
|
q278743
|
Results.wait_changed
|
test
|
async def wait_changed(self):
"""
Waits until the result set changes. Possible changes can be a result
being added or the result set becoming complete. If the result set is
already completed, this method returns immediately.
"""
if not self.is_complete():
waiter = self._loop.create_future()
self._waiters.append(waiter)
await waiter
|
python
|
{
"resource": ""
}
|
q278744
|
JobSet._load_job
|
test
|
def _load_job(self):
"""
If there is still a job in the job iterator, loads it and increments
the active job count.
"""
try:
next_job = next(self._jobs)
except StopIteration:
self._on_deck = None
else:
if not isinstance(next_job, Job):
next_job = DefaultJob(next_job)
self._on_deck = next_job
self._active_jobs += 1
|
python
|
{
"resource": ""
}
|
q278745
|
JobSet._done
|
test
|
def _done(self):
"""
Marks the job set as completed, and notifies all waiting tasks.
"""
self._results.complete()
waiters = self._waiters
for waiter in waiters:
waiter.set_result(None)
self._manager.job_set_done(self)
|
python
|
{
"resource": ""
}
|
q278746
|
JobSet.add_result
|
test
|
def add_result(self, result):
"""
Adds the result of a completed job to the result list, then decrements
the active job count. If the job set is already complete, the result is
simply discarded instead.
"""
if self._active_jobs == 0:
return
self._results.add(result)
self._active_jobs -= 1
if self._active_jobs == 0:
self._done()
|
python
|
{
"resource": ""
}
|
q278747
|
JobSet.cancel
|
test
|
def cancel(self):
"""
Cancels the job set. The job set is immediately finished, and all
queued jobs are discarded.
"""
if self._active_jobs == 0:
return
self._jobs = iter(())
self._on_deck = None
self._return_queue.clear()
self._active_jobs = 0
self._done()
|
python
|
{
"resource": ""
}
|
q278748
|
JobSet.wait_done
|
test
|
async def wait_done(self):
"""
Waits until the job set is finished. Returns immediately if the job set
is already finished.
"""
if self._active_jobs > 0:
future = self._loop.create_future()
self._waiters.append(future)
await future
|
python
|
{
"resource": ""
}
|
q278749
|
JobManager._distribute_jobs
|
test
|
def _distribute_jobs(self):
"""
Distributes jobs from the active job set to any waiting get_job
callbacks.
"""
while (self._active_js.job_available()
and len(self._ready_callbacks) > 0):
job = self._active_js.get_job()
self._job_sources[job] = self._active_js
callback = self._ready_callbacks.popleft()
callback(job)
|
python
|
{
"resource": ""
}
|
q278750
|
JobManager.add_job_set
|
test
|
def add_job_set(self, job_list):
"""
Adds a job set to the manager's queue. If there is no job set running,
it is activated immediately. A new job set handle is returned.
"""
assert not self._closed
results = Results(loop=self._loop)
js = JobSet(job_list, results, self, loop=self._loop)
if not js.is_done():
if self._active_js is None:
self._active_js = js
logger.debug("activated job set")
self._distribute_jobs()
else:
self._js_queue.append(js)
else:
logger.debug("new job set has no jobs")
return JobSetHandle(js, results)
|
python
|
{
"resource": ""
}
|
q278751
|
JobManager.get_job
|
test
|
def get_job(self, callback):
"""
Calls the given callback function when a job becomes available.
"""
assert not self._closed
if self._active_js is None or not self._active_js.job_available():
self._ready_callbacks.append(callback)
else:
job = self._active_js.get_job()
self._job_sources[job] = self._active_js
callback(job)
|
python
|
{
"resource": ""
}
|
q278752
|
JobManager.return_job
|
test
|
def return_job(self, job):
"""
Returns a job to its source job set to be run again later.
"""
if self._closed:
return
js = self._job_sources[job]
if len(self._ready_callbacks) > 0:
callback = self._ready_callbacks.popleft()
callback(job)
else:
del self._job_sources[job]
js.return_job(job)
|
python
|
{
"resource": ""
}
|
q278753
|
JobManager.add_result
|
test
|
def add_result(self, job, result):
"""
Adds the result of a job to the results list of the job's source job
set.
"""
if self._closed:
return
js = self._job_sources[job]
del self._job_sources[job]
js.add_result(result)
|
python
|
{
"resource": ""
}
|
q278754
|
JobManager.job_set_done
|
test
|
def job_set_done(self, js):
"""
Called when a job set has been completed or cancelled. If the job set
was active, the next incomplete job set is loaded from the job set
queue and is activated.
"""
if self._closed:
return
if self._active_js != js:
return
try:
while self._active_js.is_done():
logger.debug("job set done")
self._active_js = self._js_queue.popleft()
logger.debug("activated job set")
except IndexError:
self._active_js = None
else:
self._distribute_jobs()
|
python
|
{
"resource": ""
}
|
q278755
|
JobManager.close
|
test
|
def close(self):
"""
Closes the job manager. No more jobs will be assigned, no more job sets
will be added, and any queued or active job sets will be cancelled.
"""
if self._closed:
return
self._closed = True
if self._active_js is not None:
self._active_js.cancel()
for js in self._js_queue:
js.cancel()
|
python
|
{
"resource": ""
}
|
q278756
|
_uniquify
|
test
|
def _uniquify(_list):
"""Remove duplicates in a list."""
seen = set()
result = []
for x in _list:
if x not in seen:
result.append(x)
seen.add(x)
return result
|
python
|
{
"resource": ""
}
|
q278757
|
_match_regex
|
test
|
def _match_regex(regex, obj):
"""
Returns true if the regex matches the object, or a string in the object
if it is some sort of container.
:param regex: A regex.
:type regex: ``regex``
:param obj: An arbitrary object.
:type object: ``object``
:rtype: ``bool``
"""
if isinstance(obj, six.string_types):
return len(regex.findall(obj)) > 0
elif isinstance(obj, dict):
return _match_regex(regex, obj.values())
elif hasattr(obj, '__iter__'):
# Object is a list or some other iterable.
return any(_match_regex(regex, s)
for s in obj if isinstance(s, six.string_types))
else:
return False
|
python
|
{
"resource": ""
}
|
q278758
|
get_entries
|
test
|
def get_entries(latest, filters, exclude, limit=None):
"""
Lists all available instances.
:param latest: If true, ignores the cache and grabs the latest list.
:type latest: ``bool``
:param filters: Filters to apply to results. A result will only be shown
if it includes all text in all filters.
:type filters: [``str``]
:param exclude: The opposite of filters. Results will be rejected if they
include any of these strings.
:type exclude: [``str``]
:param limit: Maximum number of entries to show (default no maximum).
:type limit: ``int`` or ``NoneType``
:return: A list of host entries.
:rtype: ``list`` of :py:class:`HostEntry`
"""
entry_list = _list_all_latest() if latest is True or not _is_valid_cache()\
else _list_all_cached()
filtered = filter_entries(entry_list, filters, exclude)
if limit is not None:
return filtered[:limit]
else:
return filtered
|
python
|
{
"resource": ""
}
|
q278759
|
get_region
|
test
|
def get_region():
"""Use the environment to get the current region"""
global _REGION
if _REGION is None:
region_name = os.getenv("AWS_DEFAULT_REGION") or "us-east-1"
region_dict = {r.name: r for r in boto.regioninfo.get_regions("ec2")}
if region_name not in region_dict:
raise ValueError("No such EC2 region: {}. Check AWS_DEFAULT_REGION "
"environment variable".format(region_name))
_REGION = region_dict[region_name]
return _REGION
|
python
|
{
"resource": ""
}
|
q278760
|
filter_entries
|
test
|
def filter_entries(entries, filters, exclude):
"""
Filters a list of host entries according to the given filters.
:param entries: A list of host entries.
:type entries: [:py:class:`HostEntry`]
:param filters: Regexes that must match a `HostEntry`.
:type filters: [``str``]
:param exclude: Regexes that must NOT match a `HostEntry`.
:type exclude: [``str``]
:return: The filtered list of host entries.
:rtype: [:py:class:`HostEntry`]
"""
filtered = [entry
for entry in entries
if all(entry.matches(f) for f in filters)
and not any(entry.matches(e) for e in exclude)]
return filtered
|
python
|
{
"resource": ""
}
|
q278761
|
get_host
|
test
|
def get_host(name):
"""
Prints the public dns name of `name`, if it exists.
:param name: The instance name.
:type name: ``str``
"""
f = {'instance-state-name': 'running', 'tag:Name': name}
ec2 = boto.connect_ec2(region=get_region())
rs = ec2.get_all_instances(filters=f)
if len(rs) == 0:
raise Exception('Host "%s" not found' % name)
print(rs[0].instances[0].public_dns_name)
|
python
|
{
"resource": ""
}
|
q278762
|
HostEntry.from_dict
|
test
|
def from_dict(cls, entry_dict):
"""Deserialize a HostEntry from a dictionary.
This is more or less the same as calling
HostEntry(**entry_dict), but is clearer if something is
missing.
:param entry_dict: A dictionary in the format outputted by to_dict().
:type entry_dict: ``dict``
:return: A HostEntry object.
:rtype: ``cls``
"""
return cls(
name=entry_dict["name"],
instance_type=entry_dict["instance_type"],
hostname=entry_dict["hostname"],
private_ip=entry_dict["private_ip"],
public_ip=entry_dict["public_ip"],
stack_name=entry_dict["stack_name"],
stack_id=entry_dict["stack_id"],
logical_id=entry_dict["logical_id"],
security_groups=entry_dict["security_groups"],
tags=entry_dict["tags"],
ami_id=entry_dict["ami_id"],
launch_time=entry_dict["launch_time"],
instance_id=entry_dict["instance_id"]
)
|
python
|
{
"resource": ""
}
|
q278763
|
HostEntry._get_attrib
|
test
|
def _get_attrib(self, attr, convert_to_str=False):
"""
Given an attribute name, looks it up on the entry. Names that
start with ``tags.`` are looked up in the ``tags`` dictionary.
:param attr: Name of attribute to look up.
:type attr: ``str``
:param convert_to_str: Convert result to a string.
:type convert_to_str: ``bool``
:rtype: ``object``
"""
if attr.startswith('tags.'):
tag = attr[len('tags.'):]
if tag in self.tags and self.tags[tag] != '':
return self.tags[tag]
elif convert_to_str is True:
return '<not set>'
else:
return self.tags.get(tag)
elif not hasattr(self, attr):
raise AttributeError('Invalid attribute: {0}. Perhaps you meant '
'{1}?'.format(red(attr),
green('tags.' + attr)))
else:
result = getattr(self, attr)
if convert_to_str is True and not result:
return '<none>'
elif convert_to_str is True and isinstance(result, list):
return ', '.join(result)
elif convert_to_str is True:
return str(result)
else:
return result
|
python
|
{
"resource": ""
}
|
q278764
|
HostEntry.sort_by
|
test
|
def sort_by(cls, entries, attribute):
"""
Sorts a list of entries by the given attribute.
"""
def key(entry):
return entry._get_attrib(attribute, convert_to_str=True)
return sorted(entries, key=key)
|
python
|
{
"resource": ""
}
|
q278765
|
HostEntry.repr_as_line
|
test
|
def repr_as_line(self, additional_columns=None, only_show=None, sep=','):
"""
Returns a representation of the host as a single line, with columns
joined by ``sep``.
:param additional_columns: Columns to show in addition to defaults.
:type additional_columns: ``list`` of ``str``
:param only_show: A specific list of columns to show.
:type only_show: ``NoneType`` or ``list`` of ``str``
:param sep: The column separator to use.
:type sep: ``str``
:rtype: ``str``
"""
additional_columns = additional_columns or []
if only_show is not None:
columns = _uniquify(only_show)
else:
columns = _uniquify(self.DEFAULT_COLUMNS + additional_columns)
to_display = [self._get_attrib(c, convert_to_str=True) for c in columns]
return sep.join(to_display)
|
python
|
{
"resource": ""
}
|
q278766
|
HostEntry.from_boto_instance
|
test
|
def from_boto_instance(cls, instance):
"""
Loads a ``HostEntry`` from a boto instance.
:param instance: A boto instance object.
:type instance: :py:class:`boto.ec2.instanceInstance`
:rtype: :py:class:`HostEntry`
"""
return cls(
name=instance.tags.get('Name'),
private_ip=instance.private_ip_address,
public_ip=instance.ip_address,
instance_type=instance.instance_type,
instance_id=instance.id,
hostname=instance.dns_name,
stack_id=instance.tags.get('aws:cloudformation:stack-id'),
stack_name=instance.tags.get('aws:cloudformation:stack-name'),
logical_id=instance.tags.get('aws:cloudformation:logical-id'),
security_groups=[g.name for g in instance.groups],
launch_time=instance.launch_time,
ami_id=instance.image_id,
tags={k.lower(): v for k, v in six.iteritems(instance.tags)}
)
|
python
|
{
"resource": ""
}
|
q278767
|
HostEntry.matches
|
test
|
def matches(self, _filter):
"""
Returns whether the instance matches the given filter text.
:param _filter: A regex filter. If it starts with `<identifier>:`, then
the part before the colon will be used as an attribute
and the part after will be applied to that attribute.
:type _filter: ``basestring``
:return: True if the entry matches the filter.
:rtype: ``bool``
"""
within_attrib = re.match(r'^([a-z_.]+):(.*)', _filter)
having_attrib = re.match(r'^([a-z_.]+)\?$', _filter)
if within_attrib is not None:
# Then we're matching against a specific attribute.
val = self._get_attrib(within_attrib.group(1))
sub_regex = within_attrib.group(2)
if len(sub_regex) > 0:
sub_regex = re.compile(sub_regex, re.IGNORECASE)
return _match_regex(sub_regex, val)
else:
# Then we are matching on the value being empty.
return val == '' or val is None or val == []
elif having_attrib is not None:
# Then we're searching for anything that has a specific attribute.
val = self._get_attrib(having_attrib.group(1))
return val != '' and val is not None and val != []
else:
regex = re.compile(_filter, re.IGNORECASE)
return _match_regex(regex, vars(self))
|
python
|
{
"resource": ""
}
|
q278768
|
HostEntry.display
|
test
|
def display(self):
"""
Returns the best name to display for this host. Uses the instance
name if available; else just the public IP.
:rtype: ``str``
"""
if isinstance(self.name, six.string_types) and len(self.name) > 0:
return '{0} ({1})'.format(self.name, self.public_ip)
else:
return self.public_ip
|
python
|
{
"resource": ""
}
|
q278769
|
HostEntry.render_entries
|
test
|
def render_entries(cls, entries, additional_columns=None,
only_show=None, numbers=False):
"""
Pretty-prints a list of entries. If the window is wide enough to
support printing as a table, runs the `print_table.render_table`
function on the table. Otherwise, constructs a line-by-line
representation..
:param entries: A list of entries.
:type entries: [:py:class:`HostEntry`]
:param additional_columns: Columns to show in addition to defaults.
:type additional_columns: ``list`` of ``str``
:param only_show: A specific list of columns to show.
:type only_show: ``NoneType`` or ``list`` of ``str``
:param numbers: Whether to include a number column.
:type numbers: ``bool``
:return: A pretty-printed string.
:rtype: ``str``
"""
additional_columns = additional_columns or []
if only_show is not None:
columns = _uniquify(only_show)
else:
columns = _uniquify(cls.DEFAULT_COLUMNS + additional_columns)
top_row = [cls.prettyname(col) for col in columns]
table = [top_row] if numbers is False else [[''] + top_row]
for i, entry in enumerate(entries):
row = [entry._get_attrib(c, convert_to_str=True) for c in columns]
table.append(row if numbers is False else [i] + row)
cur_width = get_current_terminal_width()
colors = [get_color_hash(c, MIN_COLOR_BRIGHT, MAX_COLOR_BRIGHT)
for c in columns]
if cur_width >= get_table_width(table):
return render_table(table,
column_colors=colors if numbers is False
else [green] + colors)
else:
result = []
first_index = 1 if numbers is True else 0
for row in table[1:]:
rep = [green('%s:' % row[0] if numbers is True else '-----')]
for i, val in enumerate(row[first_index:]):
color = colors[i-1 if numbers is True else i]
name = columns[i]
rep.append(' %s: %s' % (name, color(val)))
result.append('\n'.join(rep))
return '\n'.join(result)
|
python
|
{
"resource": ""
}
|
q278770
|
add_timestamp
|
test
|
def add_timestamp(logger_class, log_method, event_dict):
''' Attach the event time, as unix epoch '''
event_dict['timestamp'] = calendar.timegm(time.gmtime())
return event_dict
|
python
|
{
"resource": ""
}
|
q278771
|
setup
|
test
|
def setup(level='debug', output=None):
''' Hivy formated logger '''
output = output or settings.LOG['file']
level = level.upper()
handlers = [
logbook.NullHandler()
]
if output == 'stdout':
handlers.append(
logbook.StreamHandler(sys.stdout,
format_string=settings.LOG['format'],
level=level))
else:
handlers.append(
logbook.FileHandler(output,
format_string=settings.LOG['format'],
level=level))
sentry_dns = settings.LOG['sentry_dns']
if sentry_dns:
handlers.append(SentryHandler(sentry_dns, level='ERROR'))
return logbook.NestedSetup(handlers)
|
python
|
{
"resource": ""
}
|
q278772
|
logger
|
test
|
def logger(name=__name__, output=None, uuid=False, timestamp=False):
''' Configure and return a new logger for hivy modules '''
processors = []
if output == 'json':
processors.append(structlog.processors.JSONRenderer())
if uuid:
processors.append(add_unique_id)
if uuid:
processors.append(add_timestamp)
return structlog.wrap_logger(
logbook.Logger(name),
processors=processors
)
|
python
|
{
"resource": ""
}
|
q278773
|
setup
|
test
|
def setup(title, output='json', timezone=None):
''' Implement celery workers using json and redis '''
timezone = timezone or dna.time_utils._detect_timezone()
broker_url = 'redis://{}:{}/{}'.format(
os.environ.get('BROKER_HOST', 'localhost'),
os.environ.get('BROKER_PORT', 6379),
0
)
app = Celery(title, broker=broker_url)
app.conf.update(
CELERY_TASK_SERIALIZER=output,
CELERY_ACCEPT_CONTENT=[output], # Ignore other content
CELERY_RESULT_SERIALIZER=output,
CELERY_RESULT_BACKEND=broker_url,
CELERY_TIMEZONE=timezone,
CELERYD_FORCE_EXECV=True,
CELERY_ENABLE_UTC=True,
CELERY_IGNORE_RESULT=False
)
return app
|
python
|
{
"resource": ""
}
|
q278774
|
RestfulWorker.get
|
test
|
def get(self, worker_id):
''' Return status report '''
code = 200
if worker_id == 'all':
report = {'workers': [{
'id': job,
'report': self._inspect_worker(job)}
for job in self.jobs]
}
elif worker_id in self.jobs:
report = {
'id': worker_id,
'report': self._inspect_worker(worker_id)
}
else:
report = {'error': 'job {} unknown'.format(worker_id)}
code = 404
return flask.jsonify(report), code
|
python
|
{
"resource": ""
}
|
q278775
|
RestfulWorker.delete
|
test
|
def delete(self, worker_id):
''' Stop and remove a worker '''
code = 200
if worker_id in self.jobs:
# NOTE pop it if done ?
self.jobs[worker_id]['worker'].revoke(terminate=True)
report = {
'id': worker_id,
'revoked': True
# FIXME Unable to serialize self.jobs[worker_id]
# 'session': self.jobs.pop(worker_id)
}
self.jobs.pop(worker_id)
else:
report = {'error': 'job {} unknown'.format(worker_id)}
code = 404
return flask.jsonify(report), code
|
python
|
{
"resource": ""
}
|
q278776
|
switch_opt
|
test
|
def switch_opt(default, shortname, help_msg):
"""Define a switchable ConfOpt.
This creates a boolean option. If you use it in your CLI, it can be
switched on and off by prepending + or - to its name: +opt / -opt.
Args:
default (bool): the default value of the swith option.
shortname (str): short name of the option, no shortname will be used if
it is set to None.
help_msg (str): short description of the option.
Returns:
:class:`~loam.manager.ConfOpt`: a configuration option with the given
properties.
"""
return ConfOpt(bool(default), True, shortname,
dict(action=internal.Switch), True, help_msg, None)
|
python
|
{
"resource": ""
}
|
q278777
|
config_conf_section
|
test
|
def config_conf_section():
"""Define a configuration section handling config file.
Returns:
dict of ConfOpt: it defines the 'create', 'update', 'edit' and 'editor'
configuration options.
"""
config_dict = OrderedDict((
('create',
ConfOpt(None, True, None, {'action': 'store_true'},
False, 'create most global config file')),
('create_local',
ConfOpt(None, True, None, {'action': 'store_true'},
False, 'create most local config file')),
('update',
ConfOpt(None, True, None, {'action': 'store_true'},
False, 'add missing entries to config file')),
('edit',
ConfOpt(None, True, None, {'action': 'store_true'},
False, 'open config file in a text editor')),
('editor',
ConfOpt('vim', False, None, {}, True, 'text editor')),
))
return config_dict
|
python
|
{
"resource": ""
}
|
q278778
|
set_conf_str
|
test
|
def set_conf_str(conf, optstrs):
"""Set options from a list of section.option=value string.
Args:
conf (:class:`~loam.manager.ConfigurationManager`): the conf to update.
optstrs (list of str): the list of 'section.option=value' formatted
string.
"""
falsy = ['0', 'no', 'n', 'off', 'false', 'f']
bool_actions = ['store_true', 'store_false', internal.Switch]
for optstr in optstrs:
opt, val = optstr.split('=', 1)
sec, opt = opt.split('.', 1)
if sec not in conf:
raise error.SectionError(sec)
if opt not in conf[sec]:
raise error.OptionError(opt)
meta = conf[sec].def_[opt]
if meta.default is None:
if 'type' in meta.cmd_kwargs:
cast = meta.cmd_kwargs['type']
else:
act = meta.cmd_kwargs.get('action')
cast = bool if act in bool_actions else str
else:
cast = type(meta.default)
if cast is bool and val.lower() in falsy:
val = ''
conf[sec][opt] = cast(val)
|
python
|
{
"resource": ""
}
|
q278779
|
config_cmd_handler
|
test
|
def config_cmd_handler(conf, config='config'):
"""Implement the behavior of a subcmd using config_conf_section
Args:
conf (:class:`~loam.manager.ConfigurationManager`): it should contain a
section created with :func:`config_conf_section` function.
config (str): name of the configuration section created with
:func:`config_conf_section` function.
"""
if conf[config].create or conf[config].update:
conf.create_config_(update=conf[config].update)
if conf[config].create_local:
conf.create_config_(index=-1, update=conf[config].update)
if conf[config].edit:
if not conf.config_files_[0].is_file():
conf.create_config_(update=conf[config].update)
subprocess.call(shlex.split('{} {}'.format(conf[config].editor,
conf.config_files_[0])))
|
python
|
{
"resource": ""
}
|
q278780
|
create_complete_files
|
test
|
def create_complete_files(climan, path, cmd, *cmds, zsh_sourceable=False):
"""Create completion files for bash and zsh.
Args:
climan (:class:`~loam.cli.CLIManager`): CLI manager.
path (path-like): directory in which the config files should be
created. It is created if it doesn't exist.
cmd (str): command name that should be completed.
cmds (str): extra command names that should be completed.
zsh_sourceable (bool): if True, the generated file will contain an
explicit call to ``compdef``, which means it can be sourced
to activate CLI completion.
"""
path = pathlib.Path(path)
zsh_dir = path / 'zsh'
if not zsh_dir.exists():
zsh_dir.mkdir(parents=True)
zsh_file = zsh_dir / '_{}.sh'.format(cmd)
bash_dir = path / 'bash'
if not bash_dir.exists():
bash_dir.mkdir(parents=True)
bash_file = bash_dir / '{}.sh'.format(cmd)
climan.zsh_complete(zsh_file, cmd, *cmds, sourceable=zsh_sourceable)
climan.bash_complete(bash_file, cmd, *cmds)
|
python
|
{
"resource": ""
}
|
q278781
|
render_columns
|
test
|
def render_columns(columns, write_borders=True, column_colors=None):
"""
Renders a list of columns.
:param columns: A list of columns, where each column is a list of strings.
:type columns: [[``str``]]
:param write_borders: Whether to write the top and bottom borders.
:type write_borders: ``bool``
:param column_colors: A list of coloring functions, one for each column.
Optional.
:type column_colors: [``str`` -> ``str``] or ``NoneType``
:return: The rendered columns.
:rtype: ``str``
"""
if column_colors is not None and len(column_colors) != len(columns):
raise ValueError('Wrong number of column colors')
widths = [max(len(cell) for cell in column) for column in columns]
max_column_length = max(len(column) for column in columns)
result = '\n'.join(render_row(i, columns, widths, column_colors)
for i in range(max_column_length))
if write_borders:
border = '+%s+' % '|'.join('-' * (w + 2) for w in widths)
return '%s\n%s\n%s' % (border, result, border)
else:
return result
|
python
|
{
"resource": ""
}
|
q278782
|
render_row
|
test
|
def render_row(num, columns, widths, column_colors=None):
"""
Render the `num`th row of each column in `columns`.
:param num: Which row to render.
:type num: ``int``
:param columns: The list of columns.
:type columns: [[``str``]]
:param widths: The widths of each column.
:type widths: [``int``]
:param column_colors: An optional list of coloring functions.
:type column_colors: [``str`` -> ``str``] or ``NoneType``
:return: The rendered row.
:rtype: ``str``
"""
row_str = '|'
cell_strs = []
for i, column in enumerate(columns):
try:
cell = column[num]
# We choose the number of spaces before we color the string, so
# that the coloring codes don't affect the length.
spaces = ' ' * (widths[i] - len(cell))
if column_colors is not None and column_colors[i] is not None:
cell = column_colors[i](cell)
cell_strs.append(' %s%s ' % (cell, spaces))
except IndexError:
# If the index is out of range, just print an empty cell.
cell_strs.append(' ' * (widths[i] + 2))
return '|%s|' % '|'.join(cell_strs)
|
python
|
{
"resource": ""
}
|
q278783
|
render_table
|
test
|
def render_table(table, write_borders=True, column_colors=None):
"""
Renders a table. A table is a list of rows, each of which is a list
of arbitrary objects. The `.str` method will be called on each element
of the row. Jagged tables are ok; in this case, each row will be expanded
to the maximum row length.
:param table: A list of rows, as described above.
:type table: [[``object``]]
:param write_borders: Whether there should be a border on the top and
bottom. Defaults to ``True``.
:type write_borders: ``bool``
:param column_colors: An optional list of coloring *functions* to be
applied to each cell in each column. If provided,
the list's length must be equal to the maximum
number of columns. ``None`` can be mixed in to this
list so that a selection of columns can be colored.
:type column_colors: [``str`` -> ``str``] or ``NoneType``
:return: The rendered table.
:rtype: ``str``
"""
prepare_rows(table)
columns = transpose_table(table)
return render_columns(columns, write_borders, column_colors)
|
python
|
{
"resource": ""
}
|
q278784
|
prepare_rows
|
test
|
def prepare_rows(table):
"""
Prepare the rows so they're all strings, and all the same length.
:param table: A 2D grid of anything.
:type table: [[``object``]]
:return: A table of strings, where every row is the same length.
:rtype: [[``str``]]
"""
num_columns = max(len(row) for row in table)
for row in table:
while len(row) < num_columns:
row.append('')
for i in range(num_columns):
row[i] = str(row[i]) if row[i] is not None else ''
return table
|
python
|
{
"resource": ""
}
|
q278785
|
color
|
test
|
def color(number):
"""
Returns a function that colors a string with a number from 0 to 255.
"""
if supports_256():
template = "\033[38;5;{number}m{text}\033[0m"
else:
template = "\033[{number}m{text}\033[0m"
def _color(text):
if not all([sys.stdout.isatty(), sys.stderr.isatty()]):
return text
else:
return template.format(number=number, text=text)
return _color
|
python
|
{
"resource": ""
}
|
q278786
|
get_color_hash
|
test
|
def get_color_hash(string, _min=MIN_COLOR_BRIGHT, _max=MAX_COLOR_BRIGHT):
"""
Hashes a string and returns a number between ``min`` and ``max``.
"""
hash_num = int(hashlib.sha1(string.encode('utf-8')).hexdigest()[:6], 16)
_range = _max - _min
num_in_range = hash_num % _range
return color(_min + num_in_range)
|
python
|
{
"resource": ""
}
|
q278787
|
random_color
|
test
|
def random_color(_min=MIN_COLOR, _max=MAX_COLOR):
"""Returns a random color between min and max."""
return color(random.randint(_min, _max))
|
python
|
{
"resource": ""
}
|
q278788
|
get_input
|
test
|
def get_input(prompt, default=None, exit_msg='bye!'):
"""
Reads stdin, exits with a message if interrupted, EOF, or a quit message.
:return: The entered input. Converts to an integer if possible.
:rtype: ``str`` or ``int``
"""
try:
response = six.moves.input(prompt)
except (KeyboardInterrupt, EOFError):
print()
print(exit_msg)
exit()
try:
return int(response)
except ValueError:
if response.strip() == "" and default is not None:
return default
else:
return response
|
python
|
{
"resource": ""
}
|
q278789
|
check_credentials
|
test
|
def check_credentials(username, password):
''' Verify basic http authentification '''
user = models.User.objects(
username=username,
password=password
).first()
return user or None
|
python
|
{
"resource": ""
}
|
q278790
|
check_token
|
test
|
def check_token(token):
''' Verify http header token authentification '''
user = models.User.objects(api_key=token).first()
return user or None
|
python
|
{
"resource": ""
}
|
q278791
|
requires_token_auth
|
test
|
def requires_token_auth(resource):
'''
Flask decorator protecting ressources using token scheme
'''
@functools.wraps(resource)
def decorated(*args, **kwargs):
''' Check provided token '''
token = flask.request.headers.get('Authorization')
user = check_token(token)
if not token or user is None:
log.warn('authentification failed', token=token)
return auth_failed()
flask.g.user = user
log.info('authentification succeeded', token=token, user=flask.g.user)
return resource(*args, **kwargs)
return decorated
|
python
|
{
"resource": ""
}
|
q278792
|
is_running
|
test
|
def is_running(process):
''' `pgrep` returns an error code if no process was found '''
try:
pgrep = sh.Command('/usr/bin/pgrep')
pgrep(process)
flag = True
except sh.ErrorReturnCode_1:
flag = False
return flag
|
python
|
{
"resource": ""
}
|
q278793
|
dynamic_import
|
test
|
def dynamic_import(mod_path, obj_name=None):
''' Take a string and return the corresponding module '''
try:
module = __import__(mod_path, fromlist=['whatever'])
except ImportError, error:
raise errors.DynamicImportFailed(
module='.'.join([mod_path, obj_name]), reason=error)
# Make sure we're up-to-date
reload(module)
if obj_name is None:
obj = module
elif hasattr(module, obj_name):
obj = getattr(module, obj_name)
else:
raise errors.DynamicImportFailed(
module='.'.join([mod_path, obj_name]),
reason='module {} has no attribute {}'.
format(module.__name__, obj_name))
return None
return obj
|
python
|
{
"resource": ""
}
|
q278794
|
self_ip
|
test
|
def self_ip(public=False):
''' Utility for logbook information injection '''
try:
if public:
data = str(urlopen('http://checkip.dyndns.com/').read())
ip_addr = re.compile(
r'Address: (\d+\.\d+\.\d+\.\d+)').search(data).group(1)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.connect(('google.com', 0))
ip_addr = sock.getsockname()[0]
except Exception, error:
print('Online test failed : {}'.format(error))
raise
return ip_addr
|
python
|
{
"resource": ""
}
|
q278795
|
ApiClient.request
|
test
|
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None):
"""
Makes the HTTP request using RESTClient.
"""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
post_params=post_params,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers)
else:
raise ValueError(
"http method must be `GET`, `HEAD`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
|
python
|
{
"resource": ""
}
|
q278796
|
ApiClient.prepare_post_parameters
|
test
|
def prepare_post_parameters(self, post_params=None, files=None):
"""
Builds form parameters.
:param post_params: Normal form parameters.
:param files: File parameters.
:return: Form parameters with files.
"""
params = {}
if post_params:
params.update(post_params)
if files:
for k, v in iteritems(files):
if not v:
continue
with open(v, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = mimetypes.\
guess_type(filename)[0] or 'application/octet-stream'
params[k] = tuple([filename, filedata, mimetype])
return params
|
python
|
{
"resource": ""
}
|
q278797
|
App.serve
|
test
|
def serve(self, app_docopt=DEFAULT_DOC, description=''):
''' Configure from cli and run the server '''
exit_status = 0
if isinstance(app_docopt, str):
args = docopt(app_docopt, version=description)
elif isinstance(app_docopt, dict):
args = app_docopt
else:
raise ValueError('unknown configuration object ({})'
.format(type(app_docopt)))
log_level = args.get('--log', 'debug')
is_debug = args.get('--debug', False)
# TODO More serious default
log_output = 'stdout' if is_debug else 'apy.log'
safe_bind = args.get('--bind', '127.0.0.1')
safe_port = int(args.get('--port', 5000))
log_setup = dna.logging.setup(level=log_level, output=log_output)
with log_setup.applicationbound():
try:
log.info('server ready',
version=description,
log=log_level,
debug=is_debug,
bind='{}:{}'.format(safe_bind, safe_port))
self.app.run(host=safe_bind,
port=safe_port,
debug=is_debug)
except Exception as error:
if is_debug:
raise
log.error('{}: {}'.format(type(error).__name__, str(error)))
exit_status = 1
finally:
log.info('session ended with status {}'.format(exit_status))
return exit_status
|
python
|
{
"resource": ""
}
|
q278798
|
WYSIWYGWidget.render
|
test
|
def render(self, name, value, attrs=None):
"""Include a hidden input to stored the serialized upload value."""
context = attrs or {}
context.update({'name': name, 'value': value, })
return render_to_string(self.template_name, context)
|
python
|
{
"resource": ""
}
|
q278799
|
stream_command
|
test
|
def stream_command(command, formatter=None, write_stdin=None, ignore_empty=False):
"""
Starts `command` in a subprocess. Prints every line the command prints,
prefaced with `description`.
:param command: The bash command to run. Must use fully-qualified paths.
:type command: ``str``
:param formatter: An optional formatting function to apply to each line.
:type formatter: ``function`` or ``NoneType``
:param write_stdin: An optional string to write to the process' stdin.
:type write_stdin: ``str`` or ``NoneType``
:param ignore_empty: If true, empty or whitespace-only lines will be skipped.
:type ignore_empty: ``bool``
"""
command_list = shlex.split(command)
try:
proc = subprocess.Popen(command_list, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stdin=subprocess.PIPE)
except Exception as e:
raise IOError('Encountered error: {0} when running command {1}'
.format(e.message, ' '.join(command_list)))
if write_stdin is not None:
proc.stdin.write(write_stdin)
proc.stdin.flush()
while proc.poll() is None:
try:
line = proc.stdout.readline()
except KeyboardInterrupt:
sys.exit('Keyboard interrupt while running {}'.format(command))
if len(line.strip()) == 0 and ignore_empty is True:
continue
elif 'killed by signal 1' in decode(line).lower():
continue
elif 'to the list of known hosts' in decode(line).lower():
continue
if formatter is not None:
line = formatter(line)
sys.stdout.write(line)
result = proc.poll()
return result
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.