code
stringlengths 26
870k
| docstring
stringlengths 1
65.6k
| func_name
stringlengths 1
194
| language
stringclasses 1
value | repo
stringlengths 8
68
| path
stringlengths 5
194
| url
stringlengths 46
254
| license
stringclasses 4
values |
---|---|---|---|---|---|---|---|
def _add_task(self, *args, **kwargs):
"""
Call ``self._scheduler.add_task``, but store the values too so we can
implement :py:func:`luigi.execution_summary.summary`.
"""
task_id = kwargs['task_id']
status = kwargs['status']
runnable = kwargs['runnable']
task = self._scheduled_tasks.get(task_id)
if task:
self._add_task_history.append((task, status, runnable))
kwargs['owners'] = task._owner_list()
if task_id in self._batch_running_tasks:
for batch_task in self._batch_running_tasks.pop(task_id):
self._add_task_history.append((batch_task, status, True))
if task and kwargs.get('params'):
kwargs['param_visibilities'] = task._get_param_visibilities()
self._scheduler.add_task(*args, **kwargs)
logger.info('Informed scheduler that task %s has status %s', task_id, status) | Call ``self._scheduler.add_task``, but store the values too so we can
implement :py:func:`luigi.execution_summary.summary`. | _add_task | python | spotify/luigi | luigi/worker.py | https://github.com/spotify/luigi/blob/master/luigi/worker.py | Apache-2.0 |
def __enter__(self):
"""
Start the KeepAliveThread.
"""
self._keep_alive_thread = KeepAliveThread(self._scheduler, self._id,
self._config.ping_interval,
self._handle_rpc_message)
self._keep_alive_thread.daemon = True
self._keep_alive_thread.start()
return self | Start the KeepAliveThread. | __enter__ | python | spotify/luigi | luigi/worker.py | https://github.com/spotify/luigi/blob/master/luigi/worker.py | Apache-2.0 |
def __exit__(self, type, value, traceback):
"""
Stop the KeepAliveThread and kill still running tasks.
"""
self._keep_alive_thread.stop()
self._keep_alive_thread.join()
for task in self._running_tasks.values():
if task.is_alive():
task.terminate()
self._task_result_queue.close()
return False # Don't suppress exception | Stop the KeepAliveThread and kill still running tasks. | __exit__ | python | spotify/luigi | luigi/worker.py | https://github.com/spotify/luigi/blob/master/luigi/worker.py | Apache-2.0 |
def add(self, task, multiprocess=False, processes=0):
"""
Add a Task for the worker to check and possibly schedule and run.
Returns True if task and its dependencies were successfully scheduled or completed before.
"""
if self._first_task is None and hasattr(task, 'task_id'):
self._first_task = task.task_id
self.add_succeeded = True
if multiprocess:
queue = multiprocessing.Manager().Queue()
pool = multiprocessing.Pool(processes=processes if processes > 0 else None)
else:
queue = DequeQueue()
pool = SingleProcessPool()
self._validate_task(task)
pool.apply_async(check_complete, [task, queue, self._task_completion_cache])
# we track queue size ourselves because len(queue) won't work for multiprocessing
queue_size = 1
try:
seen = {task.task_id}
while queue_size:
current = queue.get()
queue_size -= 1
item, is_complete = current
for next in self._add(item, is_complete):
if next.task_id not in seen:
self._validate_task(next)
seen.add(next.task_id)
pool.apply_async(check_complete, [next, queue, self._task_completion_cache])
queue_size += 1
except (KeyboardInterrupt, TaskException):
raise
except Exception as ex:
self.add_succeeded = False
formatted_traceback = traceback.format_exc()
self._log_unexpected_error(task)
task.trigger_event(Event.BROKEN_TASK, task, ex)
self._email_unexpected_error(task, formatted_traceback)
raise
finally:
pool.close()
pool.join()
return self.add_succeeded | Add a Task for the worker to check and possibly schedule and run.
Returns True if task and its dependencies were successfully scheduled or completed before. | add | python | spotify/luigi | luigi/worker.py | https://github.com/spotify/luigi/blob/master/luigi/worker.py | Apache-2.0 |
def _purge_children(self):
"""
Find dead children and put a response on the result queue.
:return:
"""
for task_id, p in self._running_tasks.items():
if not p.is_alive() and p.exitcode:
error_msg = 'Task {} died unexpectedly with exit code {}'.format(task_id, p.exitcode)
p.task.trigger_event(Event.PROCESS_FAILURE, p.task, error_msg)
elif p.timeout_time is not None and time.time() > float(p.timeout_time) and p.is_alive():
p.terminate()
error_msg = 'Task {} timed out after {} seconds and was terminated.'.format(task_id, p.worker_timeout)
p.task.trigger_event(Event.TIMEOUT, p.task, error_msg)
else:
continue
logger.info(error_msg)
self._task_result_queue.put((task_id, FAILED, error_msg, [], [])) | Find dead children and put a response on the result queue.
:return: | _purge_children | python | spotify/luigi | luigi/worker.py | https://github.com/spotify/luigi/blob/master/luigi/worker.py | Apache-2.0 |
def _handle_next_task(self):
"""
We have to catch three ways a task can be "done":
1. normal execution: the task runs/fails and puts a result back on the queue,
2. new dependencies: the task yielded new deps that were not complete and
will be rescheduled and dependencies added,
3. child process dies: we need to catch this separately.
"""
self._idle_since = None
while True:
self._purge_children() # Deal with subprocess failures
try:
task_id, status, expl, missing, new_requirements = (
self._task_result_queue.get(
timeout=self._config.wait_interval))
except Queue.Empty:
return
task = self._scheduled_tasks[task_id]
if not task or task_id not in self._running_tasks:
continue
# Not a running task. Probably already removed.
# Maybe it yielded something?
# external task if run not implemented, retry-able if config option is enabled.
external_task_retryable = _is_external(task) and self._config.retry_external_tasks
if status == FAILED and not external_task_retryable:
self._email_task_failure(task, expl)
new_deps = []
if new_requirements:
new_req = [load_task(module, name, params)
for module, name, params in new_requirements]
for t in new_req:
self.add(t)
new_deps = [t.task_id for t in new_req]
self._add_task(worker=self._id,
task_id=task_id,
status=status,
expl=json.dumps(expl),
resources=task.process_resources(),
runnable=None,
params=task.to_str_params(),
family=task.task_family,
module=task.task_module,
new_deps=new_deps,
assistant=self._assistant,
retry_policy_dict=_get_retry_policy_dict(task))
self._running_tasks.pop(task_id)
# re-add task to reschedule missing dependencies
if missing:
reschedule = True
# keep out of infinite loops by not rescheduling too many times
for task_id in missing:
self.unfulfilled_counts[task_id] += 1
if (self.unfulfilled_counts[task_id] >
self._config.max_reschedules):
reschedule = False
if reschedule:
self.add(task)
self.run_succeeded &= (status == DONE) or (len(new_deps) > 0)
return | We have to catch three ways a task can be "done":
1. normal execution: the task runs/fails and puts a result back on the queue,
2. new dependencies: the task yielded new deps that were not complete and
will be rescheduled and dependencies added,
3. child process dies: we need to catch this separately. | _handle_next_task | python | spotify/luigi | luigi/worker.py | https://github.com/spotify/luigi/blob/master/luigi/worker.py | Apache-2.0 |
def _keep_alive(self, get_work_response):
"""
Returns true if a worker should stay alive given.
If worker-keep-alive is not set, this will always return false.
For an assistant, it will always return the value of worker-keep-alive.
Otherwise, it will return true for nonzero n_pending_tasks.
If worker-count-uniques is true, it will also
require that one of the tasks is unique to this worker.
"""
if not self._config.keep_alive:
return False
elif self._assistant:
return True
elif self._config.count_last_scheduled:
return get_work_response.n_pending_last_scheduled > 0
elif self._config.count_uniques:
return get_work_response.n_unique_pending > 0
elif get_work_response.n_pending_tasks == 0:
return False
elif not self._config.max_keep_alive_idle_duration:
return True
elif not self._idle_since:
return True
else:
time_to_shutdown = self._idle_since + self._config.max_keep_alive_idle_duration - datetime.datetime.now()
logger.debug("[%s] %s until shutdown", self._id, time_to_shutdown)
return time_to_shutdown > datetime.timedelta(0) | Returns true if a worker should stay alive given.
If worker-keep-alive is not set, this will always return false.
For an assistant, it will always return the value of worker-keep-alive.
Otherwise, it will return true for nonzero n_pending_tasks.
If worker-count-uniques is true, it will also
require that one of the tasks is unique to this worker. | _keep_alive | python | spotify/luigi | luigi/worker.py | https://github.com/spotify/luigi/blob/master/luigi/worker.py | Apache-2.0 |
def handle_interrupt(self, signum, _):
"""
Stops the assistant from asking for more work on SIGUSR1
"""
if signum == signal.SIGUSR1:
self._start_phasing_out() | Stops the assistant from asking for more work on SIGUSR1 | handle_interrupt | python | spotify/luigi | luigi/worker.py | https://github.com/spotify/luigi/blob/master/luigi/worker.py | Apache-2.0 |
def _start_phasing_out(self):
"""
Go into a mode where we dont ask for more work and quit once existing
tasks are done.
"""
self._config.keep_alive = False
self._stop_requesting_work = True | Go into a mode where we dont ask for more work and quit once existing
tasks are done. | _start_phasing_out | python | spotify/luigi | luigi/worker.py | https://github.com/spotify/luigi/blob/master/luigi/worker.py | Apache-2.0 |
def run(self):
"""
Returns True if all scheduled tasks were executed successfully.
"""
logger.info('Running Worker with %d processes', self.worker_processes)
sleeper = self._sleeper()
self.run_succeeded = True
self._add_worker()
while True:
while len(self._running_tasks) >= self.worker_processes > 0:
logger.debug('%d running tasks, waiting for next task to finish', len(self._running_tasks))
self._handle_next_task()
get_work_response = self._get_work()
if get_work_response.worker_state == WORKER_STATE_DISABLED:
self._start_phasing_out()
if get_work_response.task_id is None:
if not self._stop_requesting_work:
self._log_remote_tasks(get_work_response)
if len(self._running_tasks) == 0:
self._idle_since = self._idle_since or datetime.datetime.now()
if self._keep_alive(get_work_response):
next(sleeper)
continue
else:
break
else:
self._handle_next_task()
continue
# task_id is not None:
logger.debug("Pending tasks: %s", get_work_response.n_pending_tasks)
self._run_task(get_work_response.task_id)
while len(self._running_tasks):
logger.debug('Shut down Worker, %d more tasks to go', len(self._running_tasks))
self._handle_next_task()
return self.run_succeeded | Returns True if all scheduled tasks were executed successfully. | run | python | spotify/luigi | luigi/worker.py | https://github.com/spotify/luigi/blob/master/luigi/worker.py | Apache-2.0 |
def getpcmd(pid):
"""
Returns command of process.
:param pid:
"""
if os.name == "nt":
# Use wmic command instead of ps on Windows.
cmd = 'wmic path win32_process where ProcessID=%s get Commandline 2> nul' % (pid, )
with os.popen(cmd, 'r') as p:
lines = [line for line in p.readlines() if line.strip("\r\n ") != ""]
if lines:
_, val = lines
return val
elif sys.platform == "darwin":
# Use pgrep instead of /proc on macOS.
pidfile = ".%d.pid" % (pid, )
with open(pidfile, 'w') as f:
f.write(str(pid))
try:
p = Popen(['pgrep', '-lf', '-F', pidfile], stdout=PIPE)
stdout, _ = p.communicate()
line = stdout.decode('utf8').strip()
if line:
_, scmd = line.split(' ', 1)
return scmd
finally:
os.unlink(pidfile)
else:
# Use the /proc filesystem
# At least on android there have been some issues with not all
# process infos being readable. In these cases using the `ps` command
# worked. See the pull request at
# https://github.com/spotify/luigi/pull/1876
try:
with open('/proc/{0}/cmdline'.format(pid), 'r') as fh:
return fh.read().replace('\0', ' ').rstrip()
except IOError:
# the system may not allow reading the command line
# of a process owned by another user
pass
# Fallback instead of None, for e.g. Cygwin where -o is an "unknown option" for the ps command:
return '[PROCESS_WITH_PID={}]'.format(pid) | Returns command of process.
:param pid: | getpcmd | python | spotify/luigi | luigi/lock.py | https://github.com/spotify/luigi/blob/master/luigi/lock.py | Apache-2.0 |
def acquire_for(pid_dir, num_available=1, kill_signal=None):
"""
Makes sure the process is only run once at the same time with the same name.
Notice that we since we check the process name, different parameters to the same
command can spawn multiple processes at the same time, i.e. running
"/usr/bin/my_process" does not prevent anyone from launching
"/usr/bin/my_process --foo bar".
"""
my_pid, my_cmd, pid_file = get_info(pid_dir)
# Create a pid file if it does not exist
try:
os.mkdir(pid_dir)
os.chmod(pid_dir, 0o700)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
pass
# Let variable "pids" be all pids who exist in the .pid-file who are still
# about running the same command.
pids = {pid for pid in _read_pids_file(pid_file) if getpcmd(pid) == my_cmd}
if kill_signal is not None:
for pid in pids:
os.kill(pid, kill_signal)
print('Sent kill signal to Pids: {}'.format(pids))
# We allow for the killer to progress, yet we don't want these to stack
# up! So we only allow it once.
num_available += 1
if len(pids) >= num_available:
# We are already running under a different pid
print('Pid(s) {} already running'.format(pids))
if kill_signal is not None:
print('Note: There have (probably) been 1 other "--take-lock"'
' process which continued to run! Probably no need to run'
' this one as well.')
return False
_write_pids_file(pid_file, pids | {my_pid})
return True | Makes sure the process is only run once at the same time with the same name.
Notice that we since we check the process name, different parameters to the same
command can spawn multiple processes at the same time, i.e. running
"/usr/bin/my_process" does not prevent anyone from launching
"/usr/bin/my_process --foo bar". | acquire_for | python | spotify/luigi | luigi/lock.py | https://github.com/spotify/luigi/blob/master/luigi/lock.py | Apache-2.0 |
def copy(self, path, dest, raise_if_exists=False):
"""
Copies the contents of a single file path to dest
"""
if raise_if_exists and dest in self.get_all_data():
raise RuntimeError('Destination exists: %s' % path)
contents = self.get_all_data()[path]
self.get_all_data()[dest] = contents | Copies the contents of a single file path to dest | copy | python | spotify/luigi | luigi/mock.py | https://github.com/spotify/luigi/blob/master/luigi/mock.py | Apache-2.0 |
def remove(self, path, recursive=True, skip_trash=True):
"""
Removes the given mockfile. skip_trash doesn't have any meaning.
"""
if recursive:
to_delete = []
for s in self.get_all_data().keys():
if s.startswith(path):
to_delete.append(s)
for s in to_delete:
self.get_all_data().pop(s)
else:
self.get_all_data().pop(path) | Removes the given mockfile. skip_trash doesn't have any meaning. | remove | python | spotify/luigi | luigi/mock.py | https://github.com/spotify/luigi/blob/master/luigi/mock.py | Apache-2.0 |
def move(self, path, dest, raise_if_exists=False):
"""
Moves a single file from path to dest
"""
if raise_if_exists and dest in self.get_all_data():
raise RuntimeError('Destination exists: %s' % path)
contents = self.get_all_data().pop(path)
self.get_all_data()[dest] = contents | Moves a single file from path to dest | move | python | spotify/luigi | luigi/mock.py | https://github.com/spotify/luigi/blob/master/luigi/mock.py | Apache-2.0 |
def listdir(self, path):
"""
listdir does a prefix match of self.get_all_data(), but doesn't yet support globs.
"""
return [s for s in self.get_all_data().keys()
if s.startswith(path)] | listdir does a prefix match of self.get_all_data(), but doesn't yet support globs. | listdir | python | spotify/luigi | luigi/mock.py | https://github.com/spotify/luigi/blob/master/luigi/mock.py | Apache-2.0 |
def mkdir(self, path, parents=True, raise_if_exists=False):
"""
mkdir is a noop.
"""
pass | mkdir is a noop. | mkdir | python | spotify/luigi | luigi/mock.py | https://github.com/spotify/luigi/blob/master/luigi/mock.py | Apache-2.0 |
def move(self, path, raise_if_exists=False):
"""
Call MockFileSystem's move command
"""
self.fs.move(self.path, path, raise_if_exists) | Call MockFileSystem's move command | move | python | spotify/luigi | luigi/mock.py | https://github.com/spotify/luigi/blob/master/luigi/mock.py | Apache-2.0 |
def rename(self, *args, **kwargs):
"""
Call move to rename self
"""
self.move(*args, **kwargs) | Call move to rename self | rename | python | spotify/luigi | luigi/mock.py | https://github.com/spotify/luigi/blob/master/luigi/mock.py | Apache-2.0 |
def get_instance(cls):
""" Singleton getter """
return cls._instance | Singleton getter | get_instance | python | spotify/luigi | luigi/cmdline_parser.py | https://github.com/spotify/luigi/blob/master/luigi/cmdline_parser.py | Apache-2.0 |
def global_instance(cls, cmdline_args, allow_override=False):
"""
Meant to be used as a context manager.
"""
orig_value = cls._instance
assert (orig_value is None) or allow_override
new_value = None
try:
new_value = CmdlineParser(cmdline_args)
cls._instance = new_value
yield new_value
finally:
assert cls._instance is new_value
cls._instance = orig_value | Meant to be used as a context manager. | global_instance | python | spotify/luigi | luigi/cmdline_parser.py | https://github.com/spotify/luigi/blob/master/luigi/cmdline_parser.py | Apache-2.0 |
def __init__(self, cmdline_args):
"""
Initialize cmd line args
"""
known_args, _ = self._build_parser().parse_known_args(args=cmdline_args)
self._attempt_load_module(known_args)
# We have to parse again now. As the positionally first unrecognized
# argument (the task) could be different.
known_args, _ = self._build_parser().parse_known_args(args=cmdline_args)
root_task = known_args.root_task
parser = self._build_parser(root_task=root_task,
help_all=known_args.core_help_all)
self._possibly_exit_with_help(parser, known_args)
if not root_task:
raise SystemExit('No task specified')
else:
# Check that what we believe to be the task is correctly spelled
Register.get_task_cls(root_task)
known_args = parser.parse_args(args=cmdline_args)
self.known_args = known_args # Also publicly expose parsed arguments | Initialize cmd line args | __init__ | python | spotify/luigi | luigi/cmdline_parser.py | https://github.com/spotify/luigi/blob/master/luigi/cmdline_parser.py | Apache-2.0 |
def get_task_obj(self):
"""
Get the task object
"""
return self._get_task_cls()(**self._get_task_kwargs()) | Get the task object | get_task_obj | python | spotify/luigi | luigi/cmdline_parser.py | https://github.com/spotify/luigi/blob/master/luigi/cmdline_parser.py | Apache-2.0 |
def _get_task_cls(self):
"""
Get the task class
"""
return Register.get_task_cls(self.known_args.root_task) | Get the task class | _get_task_cls | python | spotify/luigi | luigi/cmdline_parser.py | https://github.com/spotify/luigi/blob/master/luigi/cmdline_parser.py | Apache-2.0 |
def _get_task_kwargs(self):
"""
Get the local task arguments as a dictionary. The return value is in
the form ``dict(my_param='my_value', ...)``
"""
res = {}
for (param_name, param_obj) in self._get_task_cls().get_params():
attr = getattr(self.known_args, param_name)
if attr:
res.update(((param_name, param_obj.parse(attr)),))
return res | Get the local task arguments as a dictionary. The return value is in
the form ``dict(my_param='my_value', ...)`` | _get_task_kwargs | python | spotify/luigi | luigi/cmdline_parser.py | https://github.com/spotify/luigi/blob/master/luigi/cmdline_parser.py | Apache-2.0 |
def _attempt_load_module(known_args):
"""
Load the --module parameter
"""
module = known_args.core_module
if module:
__import__(module) | Load the --module parameter | _attempt_load_module | python | spotify/luigi | luigi/cmdline_parser.py | https://github.com/spotify/luigi/blob/master/luigi/cmdline_parser.py | Apache-2.0 |
def _possibly_exit_with_help(parser, known_args):
"""
Check if the user passed --help[-all], if so, print a message and exit.
"""
if known_args.core_help or known_args.core_help_all:
parser.print_help()
sys.exit() | Check if the user passed --help[-all], if so, print a message and exit. | _possibly_exit_with_help | python | spotify/luigi | luigi/cmdline_parser.py | https://github.com/spotify/luigi/blob/master/luigi/cmdline_parser.py | Apache-2.0 |
def move(self, old_path, new_path, raise_if_exists=False):
"""
Move file atomically. If source and destination are located
on different filesystems, atomicity is approximated
but cannot be guaranteed.
"""
if raise_if_exists and os.path.exists(new_path):
raise FileAlreadyExists('Destination exists: %s' % new_path)
d = os.path.dirname(new_path)
if d and not os.path.exists(d):
self.mkdir(d)
try:
os.replace(old_path, new_path)
except OSError as err:
if err.errno == errno.EXDEV:
new_path_tmp = '%s-%09d' % (new_path, random.randint(0, 999999999))
shutil.copy(old_path, new_path_tmp)
os.replace(new_path_tmp, new_path)
os.remove(old_path)
else:
raise err | Move file atomically. If source and destination are located
on different filesystems, atomicity is approximated
but cannot be guaranteed. | move | python | spotify/luigi | luigi/local_target.py | https://github.com/spotify/luigi/blob/master/luigi/local_target.py | Apache-2.0 |
def rename_dont_move(self, path, dest):
"""
Rename ``path`` to ``dest``, but don't move it into the ``dest``
folder (if it is a folder). This method is just a wrapper around the
``move`` method of LocalTarget.
"""
self.move(path, dest, raise_if_exists=True) | Rename ``path`` to ``dest``, but don't move it into the ``dest``
folder (if it is a folder). This method is just a wrapper around the
``move`` method of LocalTarget. | rename_dont_move | python | spotify/luigi | luigi/local_target.py | https://github.com/spotify/luigi/blob/master/luigi/local_target.py | Apache-2.0 |
def makedirs(self):
"""
Create all parent folders if they do not exist.
"""
normpath = os.path.normpath(self.path)
parentfolder = os.path.dirname(normpath)
if parentfolder:
try:
os.makedirs(parentfolder)
except OSError:
pass | Create all parent folders if they do not exist. | makedirs | python | spotify/luigi | luigi/local_target.py | https://github.com/spotify/luigi/blob/master/luigi/local_target.py | Apache-2.0 |
def from_utc(utcTime, fmt=None):
"""convert UTC time string to time.struct_time: change datetime.datetime to time, return time.struct_time type"""
if fmt is None:
try_formats = ["%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S"]
else:
try_formats = [fmt]
for fmt in try_formats:
try:
time_struct = datetime.datetime.strptime(utcTime, fmt)
except ValueError:
pass
else:
date = int(time.mktime(time_struct.timetuple()))
return date
else:
raise ValueError("No UTC format matches {}".format(utcTime)) | convert UTC time string to time.struct_time: change datetime.datetime to time, return time.struct_time type | from_utc | python | spotify/luigi | luigi/server.py | https://github.com/spotify/luigi/blob/master/luigi/server.py | Apache-2.0 |
def head(self):
"""HEAD endpoint for health checking the scheduler"""
self.set_status(204)
self.finish() | HEAD endpoint for health checking the scheduler | head | python | spotify/luigi | luigi/server.py | https://github.com/spotify/luigi/blob/master/luigi/server.py | Apache-2.0 |
def run(api_port=8082, address=None, unix_socket=None, scheduler=None):
"""
Runs one instance of the API server.
"""
if scheduler is None:
scheduler = Scheduler()
# load scheduler state
scheduler.load()
_init_api(
scheduler=scheduler,
api_port=api_port,
address=address,
unix_socket=unix_socket,
)
# prune work DAG every 60 seconds
pruner = tornado.ioloop.PeriodicCallback(scheduler.prune, 60000)
pruner.start()
def shutdown_handler(signum, frame):
exit_handler()
sys.exit(0)
@atexit.register
def exit_handler():
logger.info("Scheduler instance shutting down")
scheduler.dump()
stop()
signal.signal(signal.SIGINT, shutdown_handler)
signal.signal(signal.SIGTERM, shutdown_handler)
if os.name == 'nt':
signal.signal(signal.SIGBREAK, shutdown_handler)
else:
signal.signal(signal.SIGQUIT, shutdown_handler)
logger.info("Scheduler starting up")
tornado.ioloop.IOLoop.instance().start() | Runs one instance of the API server. | run | python | spotify/luigi | luigi/server.py | https://github.com/spotify/luigi/blob/master/luigi/server.py | Apache-2.0 |
def _partition_tasks(worker):
"""
Takes a worker and sorts out tasks based on their status.
Still_pending_not_ext is only used to get upstream_failure, upstream_missing_dependency and run_by_other_worker
"""
task_history = worker._add_task_history
pending_tasks = {task for (task, status, ext) in task_history if status == 'PENDING'}
set_tasks = {}
set_tasks["completed"] = {task for (task, status, ext) in task_history if status == 'DONE' and task in pending_tasks}
set_tasks["already_done"] = {task for (task, status, ext) in task_history
if status == 'DONE' and task not in pending_tasks and task not in set_tasks["completed"]}
set_tasks["ever_failed"] = {task for (task, status, ext) in task_history if status == 'FAILED'}
set_tasks["failed"] = set_tasks["ever_failed"] - set_tasks["completed"]
set_tasks["scheduling_error"] = {task for (task, status, ext) in task_history if status == 'UNKNOWN'}
set_tasks["still_pending_ext"] = {task for (task, status, ext) in task_history
if status == 'PENDING' and task not in set_tasks["ever_failed"] and task not in set_tasks["completed"] and not ext}
set_tasks["still_pending_not_ext"] = {task for (task, status, ext) in task_history
if status == 'PENDING' and task not in set_tasks["ever_failed"] and task not in set_tasks["completed"] and ext}
set_tasks["run_by_other_worker"] = set()
set_tasks["upstream_failure"] = set()
set_tasks["upstream_missing_dependency"] = set()
set_tasks["upstream_run_by_other_worker"] = set()
set_tasks["upstream_scheduling_error"] = set()
set_tasks["not_run"] = set()
return set_tasks | Takes a worker and sorts out tasks based on their status.
Still_pending_not_ext is only used to get upstream_failure, upstream_missing_dependency and run_by_other_worker | _partition_tasks | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def _root_task(worker):
"""
Return the first task scheduled by the worker, corresponding to the root task
"""
return worker._add_task_history[0][0] | Return the first task scheduled by the worker, corresponding to the root task | _root_task | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def _populate_unknown_statuses(set_tasks):
"""
Add the "upstream_*" and "not_run" statuses my mutating set_tasks.
"""
visited = set()
for task in set_tasks["still_pending_not_ext"]:
_depth_first_search(set_tasks, task, visited) | Add the "upstream_*" and "not_run" statuses my mutating set_tasks. | _populate_unknown_statuses | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def _depth_first_search(set_tasks, current_task, visited):
"""
This dfs checks why tasks are still pending.
"""
visited.add(current_task)
if current_task in set_tasks["still_pending_not_ext"]:
upstream_failure = False
upstream_missing_dependency = False
upstream_run_by_other_worker = False
upstream_scheduling_error = False
for task in current_task._requires():
if task not in visited:
_depth_first_search(set_tasks, task, visited)
if task in set_tasks["ever_failed"] or task in set_tasks["upstream_failure"]:
set_tasks["upstream_failure"].add(current_task)
upstream_failure = True
if task in set_tasks["still_pending_ext"] or task in set_tasks["upstream_missing_dependency"]:
set_tasks["upstream_missing_dependency"].add(current_task)
upstream_missing_dependency = True
if task in set_tasks["run_by_other_worker"] or task in set_tasks["upstream_run_by_other_worker"]:
set_tasks["upstream_run_by_other_worker"].add(current_task)
upstream_run_by_other_worker = True
if task in set_tasks["scheduling_error"]:
set_tasks["upstream_scheduling_error"].add(current_task)
upstream_scheduling_error = True
if not upstream_failure and not upstream_missing_dependency and \
not upstream_run_by_other_worker and not upstream_scheduling_error and \
current_task not in set_tasks["run_by_other_worker"]:
set_tasks["not_run"].add(current_task) | This dfs checks why tasks are still pending. | _depth_first_search | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def _get_str(task_dict, extra_indent):
"""
This returns a string for each status
"""
summary_length = execution_summary().summary_length
lines = []
task_names = sorted(task_dict.keys())
for task_family in task_names:
tasks = task_dict[task_family]
tasks = sorted(tasks, key=lambda x: str(x))
prefix_size = 8 if extra_indent else 4
prefix = ' ' * prefix_size
line = None
if summary_length > 0 and len(lines) >= summary_length:
line = prefix + "..."
lines.append(line)
break
if len(tasks[0].get_params()) == 0:
line = prefix + '- {0} {1}()'.format(len(tasks), str(task_family))
elif _get_len_of_params(tasks[0]) > 60 or len(str(tasks[0])) > 200 or \
(len(tasks) == 2 and len(tasks[0].get_params()) > 1 and (_get_len_of_params(tasks[0]) > 40 or len(str(tasks[0])) > 100)):
"""
This is to make sure that there is no really long task in the output
"""
line = prefix + '- {0} {1}(...)'.format(len(tasks), task_family)
elif len((tasks[0].get_params())) == 1:
attributes = {getattr(task, tasks[0].get_params()[0][0]) for task in tasks}
param_class = tasks[0].get_params()[0][1]
first, last = _ranging_attributes(attributes, param_class)
if first is not None and last is not None and len(attributes) > 3:
param_str = '{0}...{1}'.format(param_class.serialize(first), param_class.serialize(last))
else:
param_str = '{0}'.format(_get_str_one_parameter(tasks))
line = prefix + '- {0} {1}({2}={3})'.format(len(tasks), task_family, tasks[0].get_params()[0][0], param_str)
else:
ranging = False
params = _get_set_of_params(tasks)
unique_param_keys = list(_get_unique_param_keys(params))
if len(unique_param_keys) == 1:
unique_param, = unique_param_keys
attributes = params[unique_param]
param_class = unique_param[1]
first, last = _ranging_attributes(attributes, param_class)
if first is not None and last is not None and len(attributes) > 2:
ranging = True
line = prefix + '- {0} {1}({2}'.format(len(tasks), task_family, _get_str_ranging_multiple_parameters(first, last, tasks, unique_param))
if not ranging:
if len(tasks) == 1:
line = prefix + '- {0} {1}'.format(len(tasks), tasks[0])
if len(tasks) == 2:
line = prefix + '- {0} {1} and {2}'.format(len(tasks), tasks[0], tasks[1])
if len(tasks) > 2:
line = prefix + '- {0} {1} ...'.format(len(tasks), tasks[0])
lines.append(line)
return '\n'.join(lines) | This returns a string for each status | _get_str | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def _ranging_attributes(attributes, param_class):
"""
Checks if there is a continuous range
"""
next_attributes = {param_class.next_in_enumeration(attribute) for attribute in attributes}
in_first = attributes.difference(next_attributes)
in_second = next_attributes.difference(attributes)
if len(in_first) == 1 and len(in_second) == 1:
for x in attributes:
if {param_class.next_in_enumeration(x)} == in_second:
return next(iter(in_first)), x
return None, None | Checks if there is a continuous range | _ranging_attributes | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def _get_comments(group_tasks):
"""
Get the human readable comments and quantities for the task types.
"""
comments = {}
for status, human in _COMMENTS:
num_tasks = _get_number_of_tasks_for(status, group_tasks)
if num_tasks:
space = " " if status in _PENDING_SUB_STATUSES else ""
comments[status] = '{space}* {num_tasks} {human}:\n'.format(
space=space,
num_tasks=num_tasks,
human=human)
return comments | Get the human readable comments and quantities for the task types. | _get_comments | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def _get_run_by_other_worker(worker):
"""
This returns a set of the tasks that are being run by other worker
"""
task_sets = _get_external_workers(worker).values()
return functools.reduce(lambda a, b: a | b, task_sets, set()) | This returns a set of the tasks that are being run by other worker | _get_run_by_other_worker | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def _get_external_workers(worker):
"""
This returns a dict with a set of tasks for all of the other workers
"""
worker_that_blocked_task = collections.defaultdict(set)
get_work_response_history = worker._get_work_response_history
for get_work_response in get_work_response_history:
if get_work_response['task_id'] is None:
for running_task in get_work_response['running_tasks']:
other_worker_id = running_task['worker']
other_task_id = running_task['task_id']
other_task = worker._scheduled_tasks.get(other_task_id)
if other_worker_id == worker._id or not other_task:
continue
worker_that_blocked_task[other_worker_id].add(other_task)
return worker_that_blocked_task | This returns a dict with a set of tasks for all of the other workers | _get_external_workers | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def _group_tasks_by_name_and_status(task_dict):
"""
Takes a dictionary with sets of tasks grouped by their status and
returns a dictionary with dictionaries with an array of tasks grouped by
their status and task name
"""
group_status = {}
for task in task_dict:
if task.task_family not in group_status:
group_status[task.task_family] = []
group_status[task.task_family].append(task)
return group_status | Takes a dictionary with sets of tasks grouped by their status and
returns a dictionary with dictionaries with an array of tasks grouped by
their status and task name | _group_tasks_by_name_and_status | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def _create_one_line_summary(status_code):
"""
Given a status_code of type LuigiStatusCode which has a tuple value, returns a one line summary
"""
return "This progress looks {0} because {1}".format(*status_code.value) | Given a status_code of type LuigiStatusCode which has a tuple value, returns a one line summary | _create_one_line_summary | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def _tasks_status(set_tasks):
"""
Given a grouped set of tasks, returns a LuigiStatusCode
"""
if set_tasks["ever_failed"]:
if not set_tasks["failed"]:
return LuigiStatusCode.SUCCESS_WITH_RETRY
else:
if set_tasks["scheduling_error"]:
return LuigiStatusCode.FAILED_AND_SCHEDULING_FAILED
return LuigiStatusCode.FAILED
elif set_tasks["scheduling_error"]:
return LuigiStatusCode.SCHEDULING_FAILED
elif set_tasks["not_run"]:
return LuigiStatusCode.NOT_RUN
elif set_tasks["still_pending_ext"]:
return LuigiStatusCode.MISSING_EXT
else:
return LuigiStatusCode.SUCCESS | Given a grouped set of tasks, returns a LuigiStatusCode | _tasks_status | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def summary(worker):
"""
Given a worker, return a human readable summary of what the worker have
done.
"""
return _summary_wrap(_summary_format(_summary_dict(worker), worker)) | Given a worker, return a human readable summary of what the worker have
done. | summary | python | spotify/luigi | luigi/execution_summary.py | https://github.com/spotify/luigi/blob/master/luigi/execution_summary.py | Apache-2.0 |
def __init__(self, path="."):
"""
Initializes the SafeExtractor with the specified directory path.
Args:
path (str): The directory to extract files into. Defaults to the current directory.
"""
self.path = path | Initializes the SafeExtractor with the specified directory path.
Args:
path (str): The directory to extract files into. Defaults to the current directory. | __init__ | python | spotify/luigi | luigi/safe_extractor.py | https://github.com/spotify/luigi/blob/master/luigi/safe_extractor.py | Apache-2.0 |
def _is_within_directory(directory, target):
"""
Checks if a target path is within a given directory.
Args:
directory (str): The directory to check against.
target (str): The target path to check.
Returns:
bool: True if the target path is within the directory, False otherwise.
"""
abs_directory = os.path.abspath(directory)
abs_target = os.path.abspath(target)
prefix = os.path.commonprefix([abs_directory, abs_target])
return prefix == abs_directory | Checks if a target path is within a given directory.
Args:
directory (str): The directory to check against.
target (str): The target path to check.
Returns:
bool: True if the target path is within the directory, False otherwise. | _is_within_directory | python | spotify/luigi | luigi/safe_extractor.py | https://github.com/spotify/luigi/blob/master/luigi/safe_extractor.py | Apache-2.0 |
def safe_extract(self, tar_path, members=None, *, numeric_owner=False):
"""
Safely extracts the contents of a tar file to the specified directory.
Args:
tar_path (str): The path to the tar file to extract.
members (list, optional): A list of members to extract. Defaults to None.
numeric_owner (bool, optional): If True, only the numeric owner will be used. Defaults to False.
Raises:
RuntimeError: If a path traversal attempt is detected.
"""
with tarfile.open(tar_path, 'r') as tar:
for member in tar.getmembers():
member_path = os.path.join(self.path, member.name)
if not self._is_within_directory(self.path, member_path):
raise RuntimeError("Attempted Path Traversal in Tar File")
tar.extractall(self.path, members, numeric_owner=numeric_owner) | Safely extracts the contents of a tar file to the specified directory.
Args:
tar_path (str): The path to the tar file to extract.
members (list, optional): A list of members to extract. Defaults to None.
numeric_owner (bool, optional): If True, only the numeric owner will be used. Defaults to False.
Raises:
RuntimeError: If a path traversal attempt is detected. | safe_extract | python | spotify/luigi | luigi/safe_extractor.py | https://github.com/spotify/luigi/blob/master/luigi/safe_extractor.py | Apache-2.0 |
def get_function_hook(
self, fullname: str
) -> Callable[[FunctionContext], Type] | None:
"""Adjust the return type of the `Parameters` function."""
if self.check_parameter(fullname):
return self._task_parameter_field_callback
return None | Adjust the return type of the `Parameters` function. | get_function_hook | python | spotify/luigi | luigi/mypy.py | https://github.com/spotify/luigi/blob/master/luigi/mypy.py | Apache-2.0 |
def _task_parameter_field_callback(self, ctx: FunctionContext) -> Type:
"""Extract the type of the `default` argument from the Field function, and use it as the return type.
In particular:
* Retrieve the type of the argument which is specified, and use it as return type for the function.
* If no default argument is specified, return AnyType with unannotated type instead of parameter types like `luigi.Parameter()`
This makes mypy avoid conflict between the type annotation and the parameter type.
e.g.
```python
foo: int = luigi.IntParameter()
```
"""
try:
default_idx = ctx.callee_arg_names.index("default")
# if no `default` argument is found, return AnyType with unannotated type.
except ValueError:
return AnyType(TypeOfAny.unannotated)
default_args = ctx.args[default_idx]
if default_args:
default_type = ctx.arg_types[0][0]
default_arg = default_args[0]
# Fallback to default Any type if the field is required
if not isinstance(default_arg, EllipsisExpr):
return default_type
# NOTE: This is a workaround to avoid the error between type annotation and parameter type.
# As the following code snippet, the type of `foo` is `int` but the assigned value is `luigi.IntParameter()`.
# foo: int = luigi.IntParameter()
# TODO: infer mypy type from the parameter type.
return AnyType(TypeOfAny.unannotated) | Extract the type of the `default` argument from the Field function, and use it as the return type.
In particular:
* Retrieve the type of the argument which is specified, and use it as return type for the function.
* If no default argument is specified, return AnyType with unannotated type instead of parameter types like `luigi.Parameter()`
This makes mypy avoid conflict between the type annotation and the parameter type.
e.g.
```python
foo: int = luigi.IntParameter()
``` | _task_parameter_field_callback | python | spotify/luigi | luigi/mypy.py | https://github.com/spotify/luigi/blob/master/luigi/mypy.py | Apache-2.0 |
def expand_typevar_from_subtype(self, sub_type: TypeInfo) -> None:
"""Expands type vars in the context of a subtype when an attribute is inherited
from a generic super type."""
if self.type is not None:
with state.strict_optional_set(self._api.options.strict_optional):
self.type = map_type_from_supertype(self.type, sub_type, self.info) | Expands type vars in the context of a subtype when an attribute is inherited
from a generic super type. | expand_typevar_from_subtype | python | spotify/luigi | luigi/mypy.py | https://github.com/spotify/luigi/blob/master/luigi/mypy.py | Apache-2.0 |
def transform(self) -> bool:
"""Apply all the necessary transformations to the underlying gokart.Task"""
info = self._cls.info
attributes = self.collect_attributes()
if attributes is None:
# Some definitions are not ready. We need another pass.
return False
for attr in attributes:
if attr.type is None:
return False
# If there are no attributes, it may be that the semantic analyzer has not
# processed them yet. In order to work around this, we can simply skip generating
# __init__ if there are no attributes, because if the user truly did not define any,
# then the object default __init__ with an empty signature will be present anyway.
if (
"__init__" not in info.names or info.names["__init__"].plugin_generated
) and attributes:
args = [attr.to_argument(info, of="__init__") for attr in attributes]
add_method_to_class(
self._api, self._cls, "__init__", args=args, return_type=NoneType()
)
info.metadata[METADATA_TAG] = {
"attributes": [attr.serialize() for attr in attributes],
}
return True | Apply all the necessary transformations to the underlying gokart.Task | transform | python | spotify/luigi | luigi/mypy.py | https://github.com/spotify/luigi/blob/master/luigi/mypy.py | Apache-2.0 |
def collect_attributes(self) -> Optional[List[TaskAttribute]]:
"""Collect all attributes declared in the task and its parents.
All assignments of the form
a: SomeType
b: SomeOtherType = ...
are collected.
Return None if some base class hasn't been processed
yet and thus we'll need to ask for another pass.
"""
cls = self._cls
# First, collect attributes belonging to any class in the MRO, ignoring duplicates.
#
# We iterate through the MRO in reverse because attrs defined in the parent must appear
# earlier in the attributes list than attrs defined in the child.
#
# However, we also want attributes defined in the subtype to override ones defined
# in the parent. We can implement this via a dict without disrupting the attr order
# because dicts preserve insertion order in Python 3.7+.
found_attrs: Dict[str, TaskAttribute] = {}
for info in reversed(cls.info.mro[1:-1]):
if METADATA_TAG not in info.metadata:
continue
# Each class depends on the set of attributes in its task ancestors.
self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname))
for data in info.metadata[METADATA_TAG]["attributes"]:
name: str = data["name"]
attr = TaskAttribute.deserialize(info, data, self._api)
# TODO: We shouldn't be performing type operations during the main
# semantic analysis pass, since some TypeInfo attributes might
# still be in flux. This should be performed in a later phase.
attr.expand_typevar_from_subtype(cls.info)
found_attrs[name] = attr
sym_node = cls.info.names.get(name)
if sym_node and sym_node.node and not isinstance(sym_node.node, Var):
self._api.fail(
"Task attribute may only be overridden by another attribute",
sym_node.node,
)
# Second, collect attributes belonging to the current class.
current_attr_names: set[str] = set()
for stmt in self._get_assignment_statements_from_block(cls.defs):
if not self.is_parameter_call(stmt.rvalue):
continue
# a: int, b: str = 1, 'foo' is not supported syntax so we
# don't have to worry about it.
lhs = stmt.lvalues[0]
if not isinstance(lhs, NameExpr):
continue
sym = cls.info.names.get(lhs.name)
if sym is None:
# There was probably a semantic analysis error.
continue
node = sym.node
assert not isinstance(node, PlaceholderNode)
assert isinstance(node, Var)
has_parameter_call, parameter_args = self._collect_parameter_args(
stmt.rvalue
)
has_default = False
# Ensure that something like x: int = field() is rejected
# after an attribute with a default.
if has_parameter_call:
has_default = "default" in parameter_args
# All other assignments are already type checked.
elif not isinstance(stmt.rvalue, TempNode):
has_default = True
if not has_default:
# Make all non-default task attributes implicit because they are de-facto
# set on self in the generated __init__(), not in the class body. On the other
# hand, we don't know how custom task transforms initialize attributes,
# so we don't treat them as implicit. This is required to support descriptors
# (https://github.com/python/mypy/issues/14868).
sym.implicit = True
current_attr_names.add(lhs.name)
with state.strict_optional_set(self._api.options.strict_optional):
init_type = self._infer_task_attr_init_type(sym, stmt)
found_attrs[lhs.name] = TaskAttribute(
name=lhs.name,
has_default=has_default,
line=stmt.line,
column=stmt.column,
type=init_type,
info=cls.info,
api=self._api,
)
return list(found_attrs.values()) | Collect all attributes declared in the task and its parents.
All assignments of the form
a: SomeType
b: SomeOtherType = ...
are collected.
Return None if some base class hasn't been processed
yet and thus we'll need to ask for another pass. | collect_attributes | python | spotify/luigi | luigi/mypy.py | https://github.com/spotify/luigi/blob/master/luigi/mypy.py | Apache-2.0 |
def _collect_parameter_args(
self, expr: Expression
) -> tuple[bool, Dict[str, Expression]]:
"""Returns a tuple where the first value represents whether or not
the expression is a call to luigi.Parameter()
and the second value is a dictionary of the keyword arguments that luigi.Parameter() was called with.
"""
if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr):
args = {}
for name, arg in zip(expr.arg_names, expr.args):
if name is None:
# NOTE: this is a workaround to get default value from a parameter
self._api.fail(
"Positional arguments are not allowed for parameters when using the mypy plugin. "
"Update your code to use named arguments, like luigi.Parameter(default='foo') instead of luigi.Parameter('foo')",
expr,
)
continue
args[name] = arg
return True, args
return False, {} | Returns a tuple where the first value represents whether or not
the expression is a call to luigi.Parameter()
and the second value is a dictionary of the keyword arguments that luigi.Parameter() was called with. | _collect_parameter_args | python | spotify/luigi | luigi/mypy.py | https://github.com/spotify/luigi/blob/master/luigi/mypy.py | Apache-2.0 |
def _infer_task_attr_init_type(
self, sym: SymbolTableNode, context: Context
) -> Type | None:
"""Infer __init__ argument type for an attribute.
In particular, possibly use the signature of __set__.
"""
default = sym.type
if sym.implicit:
return default
t = get_proper_type(sym.type)
# Perform a simple-minded inference from the signature of __set__, if present.
# We can't use mypy.checkmember here, since this plugin runs before type checking.
# We only support some basic scanerios here, which is hopefully sufficient for
# the vast majority of use cases.
if not isinstance(t, Instance):
return default
setter = t.type.get("__set__")
if not setter:
return default
if isinstance(setter.node, FuncDef):
super_info = t.type.get_containing_type_info("__set__")
assert super_info
if setter.type:
setter_type = get_proper_type(
map_type_from_supertype(setter.type, t.type, super_info)
)
else:
return AnyType(TypeOfAny.unannotated)
if isinstance(setter_type, CallableType) and setter_type.arg_kinds == [
ARG_POS,
ARG_POS,
ARG_POS,
]:
return expand_type_by_instance(setter_type.arg_types[2], t)
else:
self._api.fail(
f'Unsupported signature for "__set__" in "{t.type.name}"', context
)
else:
self._api.fail(f'Unsupported "__set__" in "{t.type.name}"', context)
return default | Infer __init__ argument type for an attribute.
In particular, possibly use the signature of __set__. | _infer_task_attr_init_type | python | spotify/luigi | luigi/mypy.py | https://github.com/spotify/luigi/blob/master/luigi/mypy.py | Apache-2.0 |
def is_parameter_call(self, expr: Expression) -> bool:
"""Checks if the expression is a call to luigi.Parameter()"""
if not isinstance(expr, CallExpr):
return False
callee = expr.callee
fullname = None
if isinstance(callee, MemberExpr):
type_info = callee.node
if type_info is None and isinstance(callee.expr, NameExpr):
fullname = f"{callee.expr.name}.{callee.name}"
elif isinstance(callee, NameExpr):
type_info = callee.node
else:
return False
if isinstance(type_info, TypeInfo):
fullname = type_info.fullname
return fullname is not None and self._task_plugin.check_parameter(fullname) | Checks if the expression is a call to luigi.Parameter() | is_parameter_call | python | spotify/luigi | luigi/mypy.py | https://github.com/spotify/luigi/blob/master/luigi/mypy.py | Apache-2.0 |
def namespace(namespace=None, scope=''):
"""
Call to set namespace of tasks declared after the call.
It is often desired to call this function with the keyword argument
``scope=__name__``.
The ``scope`` keyword makes it so that this call is only effective for task
classes with a matching [*]_ ``__module__``. The default value for
``scope`` is the empty string, which means all classes. Multiple calls with
the same scope simply replace each other.
The namespace of a :py:class:`Task` can also be changed by specifying the property
``task_namespace``.
.. code-block:: python
class Task2(luigi.Task):
task_namespace = 'namespace2'
This explicit setting takes priority over whatever is set in the
``namespace()`` method, and it's also inherited through normal python
inheritence.
There's no equivalent way to set the ``task_family``.
*New since Luigi 2.6.0:* ``scope`` keyword argument.
.. [*] When there are multiple levels of matching module scopes like
``a.b`` vs ``a.b.c``, the more specific one (``a.b.c``) wins.
.. seealso:: The new and better scaling :py:func:`auto_namespace`
"""
Register._default_namespace_dict[scope] = namespace or '' | Call to set namespace of tasks declared after the call.
It is often desired to call this function with the keyword argument
``scope=__name__``.
The ``scope`` keyword makes it so that this call is only effective for task
classes with a matching [*]_ ``__module__``. The default value for
``scope`` is the empty string, which means all classes. Multiple calls with
the same scope simply replace each other.
The namespace of a :py:class:`Task` can also be changed by specifying the property
``task_namespace``.
.. code-block:: python
class Task2(luigi.Task):
task_namespace = 'namespace2'
This explicit setting takes priority over whatever is set in the
``namespace()`` method, and it's also inherited through normal python
inheritence.
There's no equivalent way to set the ``task_family``.
*New since Luigi 2.6.0:* ``scope`` keyword argument.
.. [*] When there are multiple levels of matching module scopes like
``a.b`` vs ``a.b.c``, the more specific one (``a.b.c``) wins.
.. seealso:: The new and better scaling :py:func:`auto_namespace` | namespace | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def auto_namespace(scope=''):
"""
Same as :py:func:`namespace`, but instead of a constant namespace, it will
be set to the ``__module__`` of the task class. This is desirable for these
reasons:
* Two tasks with the same name will not have conflicting task families
* It's more pythonic, as modules are Python's recommended way to
do namespacing.
* It's traceable. When you see the full name of a task, you can immediately
identify where it is defined.
We recommend calling this function from your package's outermost
``__init__.py`` file. The file contents could look like this:
.. code-block:: python
import luigi
luigi.auto_namespace(scope=__name__)
To reset an ``auto_namespace()`` call, you can use
``namespace(scope='my_scope')``. But this will not be
needed (and is also discouraged) if you use the ``scope`` kwarg.
*New since Luigi 2.6.0.*
"""
namespace(namespace=_SAME_AS_PYTHON_MODULE, scope=scope) | Same as :py:func:`namespace`, but instead of a constant namespace, it will
be set to the ``__module__`` of the task class. This is desirable for these
reasons:
* Two tasks with the same name will not have conflicting task families
* It's more pythonic, as modules are Python's recommended way to
do namespacing.
* It's traceable. When you see the full name of a task, you can immediately
identify where it is defined.
We recommend calling this function from your package's outermost
``__init__.py`` file. The file contents could look like this:
.. code-block:: python
import luigi
luigi.auto_namespace(scope=__name__)
To reset an ``auto_namespace()`` call, you can use
``namespace(scope='my_scope')``. But this will not be
needed (and is also discouraged) if you use the ``scope`` kwarg.
*New since Luigi 2.6.0.* | auto_namespace | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def task_id_str(task_family, params):
"""
Returns a canonical string used to identify a particular task
:param task_family: The task family (class name) of the task
:param params: a dict mapping parameter names to their serialized values
:return: A unique, shortened identifier corresponding to the family and params
"""
# task_id is a concatenation of task family, the first values of the first 3 parameters
# sorted by parameter name and a md5hash of the family/parameters as a cananocalised json.
param_str = json.dumps(params, separators=(',', ':'), sort_keys=True)
param_hash = hashlib.new('md5', param_str.encode('utf-8'), usedforsecurity=False).hexdigest()
param_summary = '_'.join(p[:TASK_ID_TRUNCATE_PARAMS]
for p in (params[p] for p in sorted(params)[:TASK_ID_INCLUDE_PARAMS]))
param_summary = TASK_ID_INVALID_CHAR_REGEX.sub('_', param_summary)
return '{}_{}_{}'.format(task_family, param_summary, param_hash[:TASK_ID_TRUNCATE_HASH]) | Returns a canonical string used to identify a particular task
:param task_family: The task family (class name) of the task
:param params: a dict mapping parameter names to their serialized values
:return: A unique, shortened identifier corresponding to the family and params | task_id_str | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def batchable(self):
"""
True if this instance can be run as part of a batch. By default, True
if it has any batched parameters
"""
return bool(self.batch_param_names()) | True if this instance can be run as part of a batch. By default, True
if it has any batched parameters | batchable | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def retry_count(self):
"""
Override this positive integer to have different ``retry_count`` at task level
Check :ref:`scheduler-config`
"""
return None | Override this positive integer to have different ``retry_count`` at task level
Check :ref:`scheduler-config` | retry_count | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def disable_hard_timeout(self):
"""
Override this positive integer to have different ``disable_hard_timeout`` at task level.
Check :ref:`scheduler-config`
"""
return None | Override this positive integer to have different ``disable_hard_timeout`` at task level.
Check :ref:`scheduler-config` | disable_hard_timeout | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def disable_window(self):
"""
Override this positive integer to have different ``disable_window`` at task level.
Check :ref:`scheduler-config`
"""
return None | Override this positive integer to have different ``disable_window`` at task level.
Check :ref:`scheduler-config` | disable_window | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def owner_email(self):
'''
Override this to send out additional error emails to task owner, in addition to the one
defined in the global configuration. This should return a string or a list of strings. e.g.
'[email protected]' or ['[email protected]', '[email protected]']
'''
return None | Override this to send out additional error emails to task owner, in addition to the one
defined in the global configuration. This should return a string or a list of strings. e.g.
'[email protected]' or ['[email protected]', '[email protected]'] | owner_email | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def _owner_list(self):
"""
Turns the owner_email property into a list. This should not be overridden.
"""
owner_email = self.owner_email
if owner_email is None:
return []
elif isinstance(owner_email, str):
return owner_email.split(',')
else:
return owner_email | Turns the owner_email property into a list. This should not be overridden. | _owner_list | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def use_cmdline_section(self):
''' Property used by core config such as `--workers` etc.
These will be exposed without the class as prefix.'''
return True | Property used by core config such as `--workers` etc.
These will be exposed without the class as prefix. | use_cmdline_section | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def event_handler(cls, event):
"""
Decorator for adding event handlers.
"""
def wrapped(callback):
cls._event_callbacks.setdefault(cls, {}).setdefault(event, set()).add(callback)
return callback
return wrapped | Decorator for adding event handlers. | event_handler | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def remove_event_handler(cls, event, callback):
"""
Function to remove the event handler registered previously by the cls.event_handler decorator.
"""
cls._event_callbacks[cls][event].remove(callback) | Function to remove the event handler registered previously by the cls.event_handler decorator. | remove_event_handler | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def trigger_event(self, event, *args, **kwargs):
"""
Trigger that calls all of the specified events associated with this class.
"""
for event_class, event_callbacks in self._event_callbacks.items():
if not isinstance(self, event_class):
continue
for callback in event_callbacks.get(event, []):
try:
# callbacks are protected
callback(*args, **kwargs)
except KeyboardInterrupt:
return
except BaseException:
logger.exception("Error in event callback for %r", event) | Trigger that calls all of the specified events associated with this class. | trigger_event | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def accepts_messages(self):
"""
For configuring which scheduler messages can be received. When falsy, this tasks does not
accept any message. When True, all messages are accepted.
"""
return False | For configuring which scheduler messages can be received. When falsy, this tasks does not
accept any message. When True, all messages are accepted. | accepts_messages | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def task_module(self):
''' Returns what Python module to import to get access to this class. '''
# TODO(erikbern): we should think about a language-agnostic mechanism
return self.__class__.__module__ | Returns what Python module to import to get access to this class. | task_module | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def get_task_namespace(cls):
"""
The task family for the given class.
Note: You normally don't want to override this.
"""
if cls.task_namespace != cls.__not_user_specified:
return cls.task_namespace
elif cls._namespace_at_class_time == _SAME_AS_PYTHON_MODULE:
return cls.__module__
return cls._namespace_at_class_time | The task family for the given class.
Note: You normally don't want to override this. | get_task_namespace | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def task_family(self):
"""
DEPRECATED since after 2.4.0. See :py:meth:`get_task_family` instead.
Hopefully there will be less meta magic in Luigi.
Convenience method since a property on the metaclass isn't directly
accessible through the class instances.
"""
return self.__class__.task_family | DEPRECATED since after 2.4.0. See :py:meth:`get_task_family` instead.
Hopefully there will be less meta magic in Luigi.
Convenience method since a property on the metaclass isn't directly
accessible through the class instances. | task_family | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def get_task_family(cls):
"""
The task family for the given class.
If ``task_namespace`` is not set, then it's simply the name of the
class. Otherwise, ``<task_namespace>.`` is prefixed to the class name.
Note: You normally don't want to override this.
"""
if not cls.get_task_namespace():
return cls.__name__
else:
return "{}.{}".format(cls.get_task_namespace(), cls.__name__) | The task family for the given class.
If ``task_namespace`` is not set, then it's simply the name of the
class. Otherwise, ``<task_namespace>.`` is prefixed to the class name.
Note: You normally don't want to override this. | get_task_family | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def get_params(cls):
"""
Returns all of the Parameters for this Task.
"""
# We want to do this here and not at class instantiation, or else there is no room to extend classes dynamically
params = []
for param_name in dir(cls):
param_obj = getattr(cls, param_name)
if not isinstance(param_obj, Parameter):
continue
params.append((param_name, param_obj))
# The order the parameters are created matters. See Parameter class
params.sort(key=lambda t: t[1]._counter)
return params | Returns all of the Parameters for this Task. | get_params | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def list_to_tuple(x):
""" Make tuples out of lists and sets to allow hashing """
if isinstance(x, list) or isinstance(x, set):
return tuple(x)
else:
return x | Make tuples out of lists and sets to allow hashing | get_param_values.list_to_tuple | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def get_param_values(cls, params, args, kwargs):
"""
Get the values of the parameters from the args and kwargs.
:param params: list of (param_name, Parameter).
:param args: positional arguments
:param kwargs: keyword arguments.
:returns: list of `(name, value)` tuples, one for each parameter.
"""
result = {}
params_dict = dict(params)
task_family = cls.get_task_family()
# In case any exceptions are thrown, create a helpful description of how the Task was invoked
# TODO: should we detect non-reprable arguments? These will lead to mysterious errors
exc_desc = '%s[args=%s, kwargs=%s]' % (task_family, args, kwargs)
# Fill in the positional arguments
positional_params = [(n, p) for n, p in params if p.positional]
for i, arg in enumerate(args):
if i >= len(positional_params):
raise parameter.UnknownParameterException('%s: takes at most %d parameters (%d given)' % (exc_desc, len(positional_params), len(args)))
param_name, param_obj = positional_params[i]
result[param_name] = param_obj.normalize(arg)
# Then the keyword arguments
for param_name, arg in kwargs.items():
if param_name in result:
raise parameter.DuplicateParameterException('%s: parameter %s was already set as a positional parameter' % (exc_desc, param_name))
if param_name not in params_dict:
raise parameter.UnknownParameterException('%s: unknown parameter %s' % (exc_desc, param_name))
result[param_name] = params_dict[param_name].normalize(arg)
# Then use the defaults for anything not filled in
for param_name, param_obj in params:
if param_name not in result:
try:
has_task_value = param_obj.has_task_value(task_family, param_name)
except Exception as exc:
raise ValueError("%s: Error when parsing the default value of '%s'" % (exc_desc, param_name)) from exc
if not has_task_value:
raise parameter.MissingParameterException("%s: requires the '%s' parameter to be set" % (exc_desc, param_name))
result[param_name] = param_obj.task_value(task_family, param_name)
def list_to_tuple(x):
""" Make tuples out of lists and sets to allow hashing """
if isinstance(x, list) or isinstance(x, set):
return tuple(x)
else:
return x
# Check for unconsumed parameters
conf = configuration.get_config()
if not hasattr(cls, "_unconsumed_params"):
cls._unconsumed_params = set()
if task_family in conf.sections():
ignore_unconsumed = getattr(cls, 'ignore_unconsumed', set())
for key, value in conf[task_family].items():
key = key.replace('-', '_')
composite_key = f"{task_family}_{key}"
if key not in result and key not in ignore_unconsumed and composite_key not in cls._unconsumed_params:
warnings.warn(
"The configuration contains the parameter "
f"'{key}' with value '{value}' that is not consumed by the task "
f"'{task_family}'.",
UnconsumedParameterWarning,
)
cls._unconsumed_params.add(composite_key)
# Sort it by the correct order and make a list
return [(param_name, list_to_tuple(result[param_name])) for param_name, param_obj in params] | Get the values of the parameters from the args and kwargs.
:param params: list of (param_name, Parameter).
:param args: positional arguments
:param kwargs: keyword arguments.
:returns: list of `(name, value)` tuples, one for each parameter. | get_param_values | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def initialized(self):
"""
Returns ``True`` if the Task is initialized and ``False`` otherwise.
"""
return hasattr(self, 'task_id') | Returns ``True`` if the Task is initialized and ``False`` otherwise. | initialized | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def from_str_params(cls, params_str):
"""
Creates an instance from a str->str hash.
:param params_str: dict of param name -> value as string.
"""
kwargs = {}
for param_name, param in cls.get_params():
if param_name in params_str:
param_str = params_str[param_name]
if isinstance(param_str, list):
kwargs[param_name] = param._parse_list(param_str)
else:
kwargs[param_name] = param.parse(param_str)
return cls(**kwargs) | Creates an instance from a str->str hash.
:param params_str: dict of param name -> value as string. | from_str_params | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def to_str_params(self, only_significant=False, only_public=False):
"""
Convert all parameters to a str->str hash.
"""
params_str = {}
params = dict(self.get_params())
for param_name, param_value in self.param_kwargs.items():
if (((not only_significant) or params[param_name].significant)
and ((not only_public) or params[param_name].visibility == ParameterVisibility.PUBLIC)
and params[param_name].visibility != ParameterVisibility.PRIVATE):
params_str[param_name] = params[param_name].serialize(param_value)
return params_str | Convert all parameters to a str->str hash. | to_str_params | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def clone(self, cls=None, **kwargs):
"""
Creates a new instance from an existing instance where some of the args have changed.
There's at least two scenarios where this is useful (see test/clone_test.py):
* remove a lot of boiler plate when you have recursive dependencies and lots of args
* there's task inheritance and some logic is on the base class
:param cls:
:param kwargs:
:return:
"""
if cls is None:
cls = self.__class__
new_k = {}
for param_name, param_class in cls.get_params():
if param_name in kwargs:
new_k[param_name] = kwargs[param_name]
elif hasattr(self, param_name):
new_k[param_name] = getattr(self, param_name)
return cls(**new_k) | Creates a new instance from an existing instance where some of the args have changed.
There's at least two scenarios where this is useful (see test/clone_test.py):
* remove a lot of boiler plate when you have recursive dependencies and lots of args
* there's task inheritance and some logic is on the base class
:param cls:
:param kwargs:
:return: | clone | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def __repr__(self):
"""
Build a task representation like `MyTask(param1=1.5, param2='5')`
"""
params = self.get_params()
param_values = self.get_param_values(params, [], self.param_kwargs)
# Build up task id
repr_parts = []
param_objs = dict(params)
for param_name, param_value in param_values:
if param_objs[param_name].significant:
repr_parts.append('%s=%s' % (param_name, param_objs[param_name].serialize(param_value)))
task_str = '{}({})'.format(self.get_task_family(), ', '.join(repr_parts))
return task_str | Build a task representation like `MyTask(param1=1.5, param2='5')` | __repr__ | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def complete(self):
"""
If the task has any outputs, return ``True`` if all outputs exist.
Otherwise, return ``False``.
However, you may freely override this method with custom logic.
"""
outputs = flatten(self.output())
if len(outputs) == 0:
warnings.warn(
"Task %r without outputs has no custom complete() method" % self,
stacklevel=2
)
return False
return all(map(lambda output: output.exists(), outputs)) | If the task has any outputs, return ``True`` if all outputs exist.
Otherwise, return ``False``.
However, you may freely override this method with custom logic. | complete | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def bulk_complete(cls, parameter_tuples):
"""
Returns those of parameter_tuples for which this Task is complete.
Override (with an efficient implementation) for efficient scheduling
with range tools. Keep the logic consistent with that of complete().
"""
raise BulkCompleteNotImplementedError() | Returns those of parameter_tuples for which this Task is complete.
Override (with an efficient implementation) for efficient scheduling
with range tools. Keep the logic consistent with that of complete(). | bulk_complete | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def output(self):
"""
The output that this Task produces.
The output of the Task determines if the Task needs to be run--the task
is considered finished iff the outputs all exist. Subclasses should
override this method to return a single :py:class:`Target` or a list of
:py:class:`Target` instances.
Implementation note
If running multiple workers, the output must be a resource that is accessible
by all workers, such as a DFS or database. Otherwise, workers might compute
the same output since they don't see the work done by other workers.
See :ref:`Task.output`
"""
return [] # default impl | The output that this Task produces.
The output of the Task determines if the Task needs to be run--the task
is considered finished iff the outputs all exist. Subclasses should
override this method to return a single :py:class:`Target` or a list of
:py:class:`Target` instances.
Implementation note
If running multiple workers, the output must be a resource that is accessible
by all workers, such as a DFS or database. Otherwise, workers might compute
the same output since they don't see the work done by other workers.
See :ref:`Task.output` | output | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def requires(self):
"""
The Tasks that this Task depends on.
A Task will only run if all of the Tasks that it requires are completed.
If your Task does not require any other Tasks, then you don't need to
override this method. Otherwise, a subclass can override this method
to return a single Task, a list of Task instances, or a dict whose
values are Task instances.
See :ref:`Task.requires`
"""
return [] # default impl | The Tasks that this Task depends on.
A Task will only run if all of the Tasks that it requires are completed.
If your Task does not require any other Tasks, then you don't need to
override this method. Otherwise, a subclass can override this method
to return a single Task, a list of Task instances, or a dict whose
values are Task instances.
See :ref:`Task.requires` | requires | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def _requires(self):
"""
Override in "template" tasks which themselves are supposed to be
subclassed and thus have their requires() overridden (name preserved to
provide consistent end-user experience), yet need to introduce
(non-input) dependencies.
Must return an iterable which among others contains the _requires() of
the superclass.
"""
return flatten(self.requires()) # base impl | Override in "template" tasks which themselves are supposed to be
subclassed and thus have their requires() overridden (name preserved to
provide consistent end-user experience), yet need to introduce
(non-input) dependencies.
Must return an iterable which among others contains the _requires() of
the superclass. | _requires | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def process_resources(self):
"""
Override in "template" tasks which provide common resource functionality
but allow subclasses to specify additional resources while preserving
the name for consistent end-user experience.
"""
return self.resources # default impl | Override in "template" tasks which provide common resource functionality
but allow subclasses to specify additional resources while preserving
the name for consistent end-user experience. | process_resources | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def input(self):
"""
Returns the outputs of the Tasks returned by :py:meth:`requires`
See :ref:`Task.input`
:return: a list of :py:class:`Target` objects which are specified as
outputs of all required Tasks.
"""
return getpaths(self.requires()) | Returns the outputs of the Tasks returned by :py:meth:`requires`
See :ref:`Task.input`
:return: a list of :py:class:`Target` objects which are specified as
outputs of all required Tasks. | input | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def deps(self):
"""
Internal method used by the scheduler.
Returns the flattened list of requires.
"""
# used by scheduler
return flatten(self._requires()) | Internal method used by the scheduler.
Returns the flattened list of requires. | deps | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def run(self):
"""
The task run method, to be overridden in a subclass.
See :ref:`Task.run`
"""
pass # default impl | The task run method, to be overridden in a subclass.
See :ref:`Task.run` | run | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def on_failure(self, exception):
"""
Override for custom error handling.
This method gets called if an exception is raised in :py:meth:`run`.
The returned value of this method is json encoded and sent to the scheduler
as the `expl` argument. Its string representation will be used as the
body of the error email sent out if any.
Default behavior is to return a string representation of the stack trace.
"""
traceback_string = traceback.format_exc()
return "Runtime error:\n%s" % traceback_string | Override for custom error handling.
This method gets called if an exception is raised in :py:meth:`run`.
The returned value of this method is json encoded and sent to the scheduler
as the `expl` argument. Its string representation will be used as the
body of the error email sent out if any.
Default behavior is to return a string representation of the stack trace. | on_failure | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def on_success(self):
"""
Override for doing custom completion handling for a larger class of tasks
This method gets called when :py:meth:`run` completes without raising any exceptions.
The returned value is json encoded and sent to the scheduler as the `expl` argument.
Default behavior is to send an None value"""
pass | Override for doing custom completion handling for a larger class of tasks
This method gets called when :py:meth:`run` completes without raising any exceptions.
The returned value is json encoded and sent to the scheduler as the `expl` argument.
Default behavior is to send an None value | on_success | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def no_unpicklable_properties(self):
"""
Remove unpicklable properties before dump task and resume them after.
This method could be called in subtask's dump method, to ensure unpicklable
properties won't break dump.
This method is a context-manager which can be called as below:
.. code-block: python
class DummyTask(luigi):
def _dump(self):
with self.no_unpicklable_properties():
pickle.dumps(self)
"""
unpicklable_properties = tuple(luigi.worker.TaskProcess.forward_reporter_attributes.values())
reserved_properties = {}
for property_name in unpicklable_properties:
if hasattr(self, property_name):
reserved_properties[property_name] = getattr(self, property_name)
setattr(self, property_name, 'placeholder_during_pickling')
yield
for property_name, value in reserved_properties.items():
setattr(self, property_name, value) | Remove unpicklable properties before dump task and resume them after.
This method could be called in subtask's dump method, to ensure unpicklable
properties won't break dump.
This method is a context-manager which can be called as below:
.. code-block: python
class DummyTask(luigi):
def _dump(self):
with self.no_unpicklable_properties():
pickle.dumps(self) | no_unpicklable_properties | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def externalize(taskclass_or_taskobject):
"""
Returns an externalized version of a Task. You may both pass an
instantiated task object or a task class. Some examples:
.. code-block:: python
class RequiringTask(luigi.Task):
def requires(self):
task_object = self.clone(MyTask)
return externalize(task_object)
...
Here's mostly equivalent code, but ``externalize`` is applied to a task
class instead.
.. code-block:: python
@luigi.util.requires(externalize(MyTask))
class RequiringTask(luigi.Task):
pass
...
Of course, it may also be used directly on classes and objects (for example
for reexporting or other usage).
.. code-block:: python
MyTask = externalize(MyTask)
my_task_2 = externalize(MyTask2(param='foo'))
If you however want a task class to be external from the beginning, you're
better off inheriting :py:class:`ExternalTask` rather than :py:class:`Task`.
This function tries to be side-effect free by creating a copy of the class
or the object passed in and then modify that object. In particular this
code shouldn't do anything.
.. code-block:: python
externalize(MyTask) # BAD: This does nothing (as after luigi 2.4.0)
"""
copied_value = copy.copy(taskclass_or_taskobject)
if copied_value is taskclass_or_taskobject:
# Assume it's a class
clazz = taskclass_or_taskobject
@_task_wraps(clazz)
class _CopyOfClass(clazz):
# How to copy a class: http://stackoverflow.com/a/9541120/621449
_visible_in_registry = False
_CopyOfClass.run = None
return _CopyOfClass
else:
# We assume it's an object
copied_value.run = None
return copied_value | Returns an externalized version of a Task. You may both pass an
instantiated task object or a task class. Some examples:
.. code-block:: python
class RequiringTask(luigi.Task):
def requires(self):
task_object = self.clone(MyTask)
return externalize(task_object)
...
Here's mostly equivalent code, but ``externalize`` is applied to a task
class instead.
.. code-block:: python
@luigi.util.requires(externalize(MyTask))
class RequiringTask(luigi.Task):
pass
...
Of course, it may also be used directly on classes and objects (for example
for reexporting or other usage).
.. code-block:: python
MyTask = externalize(MyTask)
my_task_2 = externalize(MyTask2(param='foo'))
If you however want a task class to be external from the beginning, you're
better off inheriting :py:class:`ExternalTask` rather than :py:class:`Task`.
This function tries to be side-effect free by creating a copy of the class
or the object passed in and then modify that object. In particular this
code shouldn't do anything.
.. code-block:: python
externalize(MyTask) # BAD: This does nothing (as after luigi 2.4.0) | externalize | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def getpaths(struct):
"""
Maps all Tasks in a structured data object to their .output().
"""
if isinstance(struct, Task):
return struct.output()
elif isinstance(struct, dict):
return struct.__class__((k, getpaths(v)) for k, v in struct.items())
elif isinstance(struct, (list, tuple)):
return struct.__class__(getpaths(r) for r in struct)
else:
# Remaining case: assume struct is iterable...
try:
return [getpaths(r) for r in struct]
except TypeError:
raise Exception('Cannot map %s to Task/dict/list' % str(struct)) | Maps all Tasks in a structured data object to their .output(). | getpaths | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def flatten(struct):
"""
Creates a flat list of all items in structured output (dicts, lists, items):
.. code-block:: python
>>> sorted(flatten({'a': 'foo', 'b': 'bar'}))
['bar', 'foo']
>>> sorted(flatten(['foo', ['bar', 'troll']]))
['bar', 'foo', 'troll']
>>> flatten('foo')
['foo']
>>> flatten(42)
[42]
"""
if struct is None:
return []
flat = []
if isinstance(struct, dict):
for _, result in struct.items():
flat += flatten(result)
return flat
if isinstance(struct, str):
return [struct]
try:
# if iterable
iterator = iter(struct)
except TypeError:
return [struct]
for result in iterator:
flat += flatten(result)
return flat | Creates a flat list of all items in structured output (dicts, lists, items):
.. code-block:: python
>>> sorted(flatten({'a': 'foo', 'b': 'bar'}))
['bar', 'foo']
>>> sorted(flatten(['foo', ['bar', 'troll']]))
['bar', 'foo', 'troll']
>>> flatten('foo')
['foo']
>>> flatten(42)
[42] | flatten | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def flatten_output(task):
"""
Lists all output targets by recursively walking output-less (wrapper) tasks.
"""
output_tasks = OrderedDict() # OrderedDict used as ordered set
tasks_to_process = deque([task])
while tasks_to_process:
current_task = tasks_to_process.popleft()
if flatten(current_task.output()):
if current_task not in output_tasks:
output_tasks[current_task] = None
else:
tasks_to_process.extend(flatten(current_task.requires()))
return flatten(task.output() for task in output_tasks) | Lists all output targets by recursively walking output-less (wrapper) tasks. | flatten_output | python | spotify/luigi | luigi/task.py | https://github.com/spotify/luigi/blob/master/luigi/task.py | Apache-2.0 |
def recursively_freeze(value):
"""
Recursively walks ``Mapping``s and ``list``s and converts them to ``FrozenOrderedDict`` and ``tuples``, respectively.
"""
if isinstance(value, Mapping):
return FrozenOrderedDict(((k, recursively_freeze(v)) for k, v in value.items()))
elif isinstance(value, list) or isinstance(value, tuple):
return tuple(recursively_freeze(v) for v in value)
return value | Recursively walks ``Mapping``s and ``list``s and converts them to ``FrozenOrderedDict`` and ``tuples``, respectively. | recursively_freeze | python | spotify/luigi | luigi/freezing.py | https://github.com/spotify/luigi/blob/master/luigi/freezing.py | Apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.