desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Run git_pillar with the specified configuration'
def get_pillar(self, ext_pillar_conf):
cachedir = tempfile.mkdtemp(dir=TMP) self.addCleanup(shutil.rmtree, cachedir, ignore_errors=True) ext_pillar_opts = yaml.safe_load(ext_pillar_conf.format(cachedir=cachedir, extmods=os.path.join(cachedir, 'extmods'), **self.ext_opts)) with patch.dict(git_pillar.__opts__, ext_pillar_opts): return git_pillar.ext_pillar('minion', ext_pillar_opts['ext_pillar'][0]['git'], {})
'Create the SSH server and user, and create the git repo'
def setUp(self):
super(GitPillarSSHTestBase, self).setUp() self.sshd_proc = self.find_proc(name='sshd', search=self.sshd_config) self.sshd_bin = salt.utils.path.which('sshd') if (self.sshd_proc is None): self.spawn_server() known_hosts_ret = self.run_function('ssh.set_known_host', user=self.master_opts['user'], hostname='127.0.0.1', port=self.sshd_port, enc='ssh-rsa', fingerprint='fd:6f:7f:5d:06:6b:f2:06:0d:26:93:9e:5a:b5:19:46', hash_known_hosts=False, fingerprint_hash_type='md5') if ('error' in known_hosts_ret): raise Exception("Failed to add key to {0} user's known_hosts file: {1}".format(self.master_opts['user'], known_hosts_ret['error'])) root_dir = os.path.expanduser('~{0}'.format(self.username)) if root_dir.startswith('~'): self.fail("Unable to resolve homedir for user '{0}'".format(self.username)) self.make_repo(root_dir, user=self.username)
'Wrap the parent class\' get_pillar() func in logic that temporarily changes the GIT_SSH to use our custom script, ensuring that the passphraselsess key is used to auth without needing to modify the root user\'s ssh config file.'
def get_pillar(self, ext_pillar_conf):
orig_git_ssh = os.environ.pop('GIT_SSH', NOTSET) os.environ['GIT_SSH'] = self.git_ssh try: return super(GitPillarSSHTestBase, self).get_pillar(ext_pillar_conf) finally: os.environ.pop('GIT_SSH', None) if (orig_git_ssh is not NOTSET): os.environ['GIT_SSH'] = orig_git_ssh
'Create and start the webserver, and create the git repo'
def setUp(self):
super(GitPillarHTTPTestBase, self).setUp() self.nginx_proc = self.find_proc(name='nginx', search=self.nginx_conf) self.uwsgi_proc = self.find_proc(name='uwsgi', search=self.uwsgi_conf) if ((self.nginx_proc is None) and (self.uwsgi_proc is None)): self.spawn_server() self.make_repo(self.repo_dir)
'Validate the default available options'
def _validate_options(self):
if ((self.xml_output_dir is not None) and self.options.xml_out and (HAS_XMLRUNNER is False)): self.error("'--xml' is not available. The xmlrunner library is not installed.") if self.options.xml_out: self.xml_output_dir = self.options.xml_out if ((self.xml_output_dir is not None) and self.options.xml_out): if (not os.path.isdir(self.xml_output_dir)): os.makedirs(self.xml_output_dir) print(' * Generated unit test XML reports will be stored at {0!r}'.format(self.xml_output_dir)) self.validate_options() if self.support_destructive_tests_selection: os.environ['DESTRUCTIVE_TESTS'] = str(self.options.run_destructive) if self.support_expensive_tests_selection: os.environ['EXPENSIVE_TESTS'] = str(self.options.run_expensive)
'Setup python\'s logging system to work with/for the tests suite'
def _setup_logging(self):
formatter = logging.Formatter('%(asctime)s,%(msecs)03.0f [%(name)-5s:%(lineno)-4d][%(levelname)-8s] %(message)s', datefmt='%H:%M:%S') if (not hasattr(logging, 'TRACE')): logging.TRACE = 5 logging.addLevelName(logging.TRACE, 'TRACE') if (not hasattr(logging, 'GARBAGE')): logging.GARBAGE = 1 logging.addLevelName(logging.GARBAGE, 'GARBAGE') logging.root.setLevel(logging.NOTSET) if self.options.tests_logfile: filehandler = logging.FileHandler(mode='w', filename=self.options.tests_logfile) filehandler.setLevel(logging.DEBUG) filehandler.setFormatter(formatter) logging.root.addHandler(filehandler) print(' * Logging tests on {0}'.format(self.options.tests_logfile)) if (self.options.verbosity >= 2): consolehandler = logging.StreamHandler(sys.stderr) consolehandler.setFormatter(formatter) if (self.options.verbosity >= 6): logging_level = logging.GARBAGE elif (self.options.verbosity == 5): logging_level = logging.TRACE elif (self.options.verbosity == 4): logging_level = logging.DEBUG print('DEBUG') elif (self.options.verbosity == 3): print('INFO') logging_level = logging.INFO else: logging_level = logging.ERROR consolehandler.setLevel(logging_level) logging.root.addHandler(consolehandler) log.info('Runtests logging has been setup')
'Run any initial clean up operations. If sub-classed, don\'t forget to call SaltTestingParser.pre_execution_cleanup(self) from the overridden method.'
def pre_execution_cleanup(self):
if (self.options.clean is True): for path in (self.xml_output_dir,): if (path is None): continue if os.path.isdir(path): shutil.rmtree(path)
'Execute a unit test suite'
def run_suite(self, path, display_name, suffix='test_*.py', load_from_name=False, additional_test_dirs=None):
loaded_custom = False loader = TestLoader() try: if load_from_name: tests = loader.loadTestsFromName(display_name) elif ((additional_test_dirs is None) or self.testsuite_directory.startswith(path)): tests = loader.discover(path, suffix, self.testsuite_directory) else: tests = loader.discover(path, suffix) loaded_custom = True except (AttributeError, ImportError): print("Could not locate test '{0}'. Exiting.".format(display_name)) sys.exit(1) if (additional_test_dirs and (not loaded_custom)): for test_dir in additional_test_dirs: additional_tests = loader.discover(test_dir, suffix, test_dir) tests.addTests(additional_tests) header = '{0} Tests'.format(display_name) print_header('Starting {0}'.format(header), width=self.options.output_columns) if self.options.xml_out: runner = XMLTestRunner(stream=sys.stdout, output=self.xml_output_dir, verbosity=self.options.verbosity).run(tests) else: runner = TextTestRunner(stream=sys.stdout, verbosity=self.options.verbosity).run(tests) errors = [] skipped = [] failures = [] for (testcase, reason) in runner.errors: errors.append(TestResult(testcase.id(), reason)) for (testcase, reason) in runner.skipped: skipped.append(TestResult(testcase.id(), reason)) for (testcase, reason) in runner.failures: failures.append(TestResult(testcase.id(), reason)) self.testsuite_results.append(TestsuiteResult(header, errors, skipped, failures, (runner.testsRun - len(((errors + skipped) + failures))))) success = runner.wasSuccessful() del loader del runner return success
'Print a nicely formatted report about the test suite results'
def print_overall_testsuite_report(self):
print() print_header(u' Overall Tests Report ', sep=u'=', centered=True, inline=True, width=self.options.output_columns) failures = errors = skipped = passed = 0 no_problems_found = True for results in self.testsuite_results: failures += len(results.failures) errors += len(results.errors) skipped += len(results.skipped) passed += results.passed if ((not results.failures) and (not results.errors) and (not results.skipped)): continue no_problems_found = False print_header(u'*** {0} '.format(results.header), sep=u'*', inline=True, width=self.options.output_columns) if results.skipped: print_header(u' -------- Skipped Tests ', sep='-', inline=True, width=self.options.output_columns) maxlen = len(max([testcase.id for testcase in results.skipped], key=len)) fmt = u' -> {0: <{maxlen}} -> {1}' for testcase in results.skipped: print(fmt.format(testcase.id, testcase.reason, maxlen=maxlen)) print_header(u' ', sep='-', inline=True, width=self.options.output_columns) if results.errors: print_header(u' -------- Tests with Errors ', sep='-', inline=True, width=self.options.output_columns) for testcase in results.errors: print_header(u' -> {0} '.format(testcase.id), sep=u'.', inline=True, width=self.options.output_columns) for line in testcase.reason.rstrip().splitlines(): print(' {0}'.format(line.rstrip())) print_header(u' ', sep=u'.', inline=True, width=self.options.output_columns) print_header(u' ', sep='-', inline=True, width=self.options.output_columns) if results.failures: print_header(u' -------- Failed Tests ', sep='-', inline=True, width=self.options.output_columns) for testcase in results.failures: print_header(u' -> {0} '.format(testcase.id), sep=u'.', inline=True, width=self.options.output_columns) for line in testcase.reason.rstrip().splitlines(): print(' {0}'.format(line.rstrip())) print_header(u' ', sep=u'.', inline=True, width=self.options.output_columns) print_header(u' ', sep='-', inline=True, width=self.options.output_columns) if no_problems_found: print_header(u'*** No Problems Found While Running Tests ', sep=u'*', inline=True, width=self.options.output_columns) print_header(u'', sep=u'=', inline=True, width=self.options.output_columns) total = sum([passed, skipped, errors, failures]) print('{0} (total={1}, skipped={2}, passed={3}, failures={4}, errors={5}) '.format((((errors or failures) and 'FAILED') or 'OK'), total, skipped, passed, failures, errors)) print_header(' Overall Tests Report ', sep='=', centered=True, inline=True, width=self.options.output_columns)
'Run the finalization procedures. Show report, clean-up file-system, etc'
def finalize(self, exit_code=0):
children = helpers.collect_child_processes(os.getpid()) if (self.options.no_report is False): self.print_overall_testsuite_report() self.post_execution_cleanup() if children: log.info('Terminating test suite child processes: %s', children) helpers.terminate_process(children=children, kill_children=True) children = helpers.collect_child_processes(os.getpid()) if children: log.info('Second run at terminating test suite child processes: %s', children) helpers.terminate_process(children=children, kill_children=True) log.info('Test suite execution finalized with exit code: {0}'.format(exit_code)) self.exit(exit_code)
'Run the tests suite in a Docker container'
def run_suite_in_docker(self):
def stop_running_docked_container(cid, signum=None, frame=None): time.sleep(0.5) print_header('', inline=True, width=self.options.output_columns) scode_call = subprocess.Popen([self.options.docker_binary, 'inspect', '--format={{.State.Running}}', cid], env=os.environ.copy(), close_fds=True, stdout=subprocess.PIPE) scode_call.wait() parsed_scode = scode_call.stdout.read().strip() if six.PY3: parsed_scode = parsed_scode.decode(__salt_system_encoding__) if (parsed_scode != 'false'): sys.stdout.write(' * Making sure the container is stopped. CID: ') sys.stdout.flush() stop_call = subprocess.Popen([self.options.docker_binary, 'stop', '--time=15', cid], env=os.environ.copy(), close_fds=True, stdout=subprocess.PIPE) stop_call.wait() output = stop_call.stdout.read().strip() if six.PY3: output = output.decode(__salt_system_encoding__) print(output) sys.stdout.flush() time.sleep(0.5) sys.stdout.write(' * Container exit code: ') sys.stdout.flush() rcode_call = subprocess.Popen([self.options.docker_binary, 'inspect', '--format={{.State.ExitCode}}', cid], env=os.environ.copy(), close_fds=True, stdout=subprocess.PIPE) rcode_call.wait() parsed_rcode = rcode_call.stdout.read().strip() if six.PY3: parsed_rcode = parsed_rcode.decode(__salt_system_encoding__) try: returncode = int(parsed_rcode) except ValueError: returncode = (-1) print(parsed_rcode) sys.stdout.flush() if ((self.options.docked_skip_delete is False) and ((self.options.docked_skip_delete_on_errors is False) or (self.options.docked_skip_delete_on_error and (returncode == 0)))): sys.stdout.write(' * Cleaning Up Temporary Docker Container. CID: ') sys.stdout.flush() cleanup_call = subprocess.Popen([self.options.docker_binary, 'rm', cid], env=os.environ.copy(), close_fds=True, stdout=subprocess.PIPE) cleanup_call.wait() output = cleanup_call.stdout.read().strip() if six.PY3: output = output.decode(__salt_system_encoding__) print(output) if ('DOCKER_CIDFILE' not in os.environ): os.unlink(cidfile) print_header('', inline=True, width=self.options.output_columns) sys.exit(returncode) if ('/' not in self.options.docked): container = 'salttest/{0}'.format(self.options.docked) else: container = self.options.docked calling_args = [self.options.docked_interpreter, '/salt-source/tests/runtests.py'] for option in self._get_all_options(): if (option.dest is None): continue if (option.dest and ((option.dest in ('verbosity',)) or option.dest.startswith('docked'))): continue default = self.defaults.get(option.dest) value = getattr(self.options, option.dest, default) if (default == value): continue if option.action.startswith('store_'): calling_args.append(option.get_opt_string()) elif (option.action == 'append'): for val in (((value is not None) and value) or default): calling_args.extend([option.get_opt_string(), str(val)]) elif (option.action == 'count'): calling_args.extend(([option.get_opt_string()] * value)) else: calling_args.extend([option.get_opt_string(), str((((value is not None) and value) or default))]) if (not self.options.run_destructive): calling_args.append('--run-destructive') if (self.options.verbosity > 1): calling_args.append('-{0}'.format(('v' * (self.options.verbosity - 1)))) sys.stdout.write(' * Docker command: {0}\n'.format(' '.join(calling_args))) sys.stdout.write(' * Running the tests suite under the {0!r} docker container. CID: '.format(container)) sys.stdout.flush() cidfile = os.environ.get('DOCKER_CIDFILE', tempfile.mktemp(prefix='docked-testsuite-', suffix='.cid')) call = subprocess.Popen([self.options.docker_binary, 'run', '--tty', '--interactive', '-v', '{0}:/salt-source'.format(self.source_code_basedir), '-w', '/salt-source', '-e', 'SHELL=/bin/sh', '-e', 'COLUMNS={0}'.format(WIDTH), '-e', 'LINES={0}'.format(HEIGHT), '--cidfile={0}'.format(cidfile), container, ' '.join(calling_args)], env=os.environ.copy(), close_fds=True) cid = None cid_printed = terminating = exiting = False signal_handler_installed = signalled = False time.sleep(0.25) while True: try: time.sleep(0.15) if (cid_printed is False): with open(cidfile) as cidfile_fd: cid = cidfile_fd.read() if cid: print(cid) sys.stdout.flush() cid_printed = True for sig in (signal.SIGTERM, signal.SIGINT, signal.SIGHUP, signal.SIGQUIT): signal.signal(sig, partial(stop_running_docked_container, cid)) signal_handler_installed = True if exiting: break elif (terminating and (not exiting)): exiting = True call.kill() break elif (signalled and (not terminating)): terminating = True call.terminate() else: call.poll() if (call.returncode is not None): break except KeyboardInterrupt: print('Caught CTRL-C, exiting...') signalled = True call.send_signal(signal.SIGINT) call.wait() time.sleep(0.25) if signal_handler_installed: stop_running_docked_container(cid, signum=(signal.SIGINT if signalled else WEIRD_SIGNAL_NUM)) else: sys.exit(call.returncode)
'Run one or more ``unittest.case.TestCase``'
def run_testcase(self, testcase):
header = '' loader = TestLoader() if isinstance(testcase, list): for case in testcase: tests = loader.loadTestsFromTestCase(case) else: tests = loader.loadTestsFromTestCase(testcase) if (not isinstance(testcase, list)): header = '{0} Tests'.format(testcase.__name__) print_header('Starting {0}'.format(header), width=self.options.output_columns) runner = TextTestRunner(verbosity=self.options.verbosity).run(tests) self.testsuite_results.append((header, runner)) return runner.wasSuccessful()
'Start code coverage. You can pass any coverage options as keyword arguments. For the available options please see: http://nedbatchelder.com/code/coverage/api.html'
def start_coverage(self, **coverage_options):
if (self.options.coverage is False): return if (coverage_options.pop('track_processes', None) is not None): raise RuntimeWarning("Please stop passing 'track_processes' to 'start_coverage()'. It's now the default and '--no-processes-coverage' was added to the parser to disable it.") print(' * Starting Coverage') if (self.options.no_processes_coverage is False): coverage_options['data_suffix'] = True os.environ['COVERAGE_PROCESS_START'] = '1' os.environ['COVERAGE_OPTIONS'] = json.dumps(coverage_options) self.code_coverage = coverage.coverage(**coverage_options) self.code_coverage.start()
'Stop code coverage.'
def stop_coverage(self, save_coverage=True):
if (self.options.coverage is False): return os.environ.pop('COVERAGE_OPTIONS', None) os.environ.pop('COVERAGE_PROCESS_START', None) print(' * Stopping coverage') self.code_coverage.stop() if save_coverage: print(' * Saving coverage info') self.code_coverage.save() if (self.options.no_processes_coverage is False): sys.stdout.write(' * Combining multiple coverage info files ... ') sys.stdout.flush() self.code_coverage.combine() print('Done.') if (self.options.coverage_xml is not None): sys.stdout.write(' * Generating Coverage XML Report At {0!r} ... '.format(self.options.coverage_xml)) sys.stdout.flush() self.code_coverage.xml_report(outfile=self.options.coverage_xml) print('Done.') if (self.options.coverage_html is not None): sys.stdout.write(' * Generating Coverage HTML Report Under {0!r} ... '.format(self.options.coverage_html)) sys.stdout.flush() self.code_coverage.html_report(directory=self.options.coverage_html) print('Done.')
'root Root directory of webserver. If not passed, it will default to the location of the base environment of the integration suite\'s file roots (tests/integration/files/file/base/) port Port on which to listen. If not passed, a random one will be chosen at the time the start() function is invoked. wait : 5 Number of seconds to wait for the socket to be open before raising an exception handler Can be used to use a subclass of tornado.web.StaticFileHandler, such as when enforcing authentication with the http_basic_auth decorator.'
def __init__(self, root=None, port=None, wait=5, handler=None):
if ((port is not None) and (not isinstance(port, six.integer_types))): raise ValueError('port must be an integer') if (root is None): root = os.path.join(FILES, 'file', 'base') try: self.root = os.path.realpath(root) except AttributeError: raise ValueError('root must be a string') self.port = port self.wait = wait self.handler = (handler if (handler is not None) else tornado.web.StaticFileHandler) self.web_root = None
'Threading target which stands up the tornado application'
def target(self):
self.ioloop = tornado.ioloop.IOLoop() self.ioloop.make_current() self.application = tornado.web.Application([('/(.*)', self.handler, {'path': self.root})]) self.application.listen(self.port) self.ioloop.start()
'Convenience function which, given a file path, will return a URL that points to that path. If the path is relative, it will just be appended to self.web_root.'
def url(self, path):
if (self.web_root is None): raise RuntimeError('Webserver instance has not been started') err_msg = 'invalid path, must be either a relative path or a path within {0}'.format(self.root) try: relpath = (path if (not os.path.isabs(path)) else os.path.relpath(path, self.root)) if relpath.startswith(('..' + os.sep)): raise ValueError(err_msg) return '/'.join((self.web_root, relpath)) except AttributeError: raise ValueError(err_msg)
'Starts the webserver'
def start(self):
if (self.port is None): self.port = get_unused_localhost_port() self.web_root = 'http://127.0.0.1:{0}'.format(self.port) self.server_thread = threading.Thread(target=self.target) self.server_thread.daemon = True self.server_thread.start() for idx in range((self.wait + 1)): if self.listening: break if (idx != self.wait): time.sleep(1) else: raise Exception('Failed to start tornado webserver on 127.0.0.1:{0} within {1} seconds'.format(self.port, self.wait))
'Stops the webserver'
def stop(self):
self.ioloop.add_callback(self.ioloop.stop) self.server_thread.join()
'Return the path to a testing runtime script'
def get_script_path(self, script_name):
if (not os.path.isdir(TMP_SCRIPT_DIR)): os.makedirs(TMP_SCRIPT_DIR) script_path = os.path.join(TMP_SCRIPT_DIR, 'cli_{0}.py'.format(script_name.replace('-', '_'))) if (not os.path.isfile(script_path)): log.info('Generating {0}'.format(script_path)) import salt.utils.files with salt.utils.files.fopen(script_path, 'w') as sfh: script_template = SCRIPT_TEMPLATES.get(script_name, None) if (script_template is None): script_template = SCRIPT_TEMPLATES.get('common', None) if (script_template is None): raise RuntimeError('{0} does not know how to handle the {1} script'.format(self.__class__.__name__, script_name)) sfh.write(((((('#!{0}\n\n'.format(sys.executable) + 'import sys\n') + 'CODE_DIR="{0}"\n'.format(CODE_DIR)) + 'if CODE_DIR not in sys.path:\n') + ' sys.path.insert(0, CODE_DIR)\n\n') + '\n'.join(script_template).format(script_name.replace('salt-', '')))) fst = os.stat(script_path) os.chmod(script_path, (fst.st_mode | stat.S_IEXEC)) log.info('Returning script path %r', script_path) return script_path
'CherryPy does not have a facility for serverless unit testing. However this recipe demonstrates a way of doing it by calling its internal API to simulate an incoming request. This will exercise the whole stack from there. Remember a couple of things: * CherryPy is multithreaded. The response you will get from this method is a thread-data object attached to the current thread. Unless you use many threads from within a unit test, you can mostly forget about the thread data aspect of the response. * Responses are dispatched to a mounted application\'s page handler, if found. This is the reason why you must indicate which app you are targeting with this request by specifying its mount point. You can simulate various request settings by setting the `headers` parameter to a dictionary of headers, the request\'s `scheme` or `protocol`. .. seealso: http://docs.cherrypy.org/stable/refman/_cprequest.html#cherrypy._cprequest.Response'
def request(self, path='/', method='GET', app_path='', scheme='http', proto='HTTP/1.1', body=None, qs=None, headers=None, **kwargs):
h = {'Host': '127.0.0.1'} fd = None if (body is not None): h['content-length'] = '{0}'.format(len(body)) fd = StringIO(body) if (headers is not None): h.update(headers) app = cherrypy.tree.apps.get(app_path) if (not app): raise AssertionError("No application mounted at '{0}'".format(app_path)) app.release_serving() (request, response) = app.get_serving(local, remote, scheme, proto) try: h = [(k, v) for (k, v) in six.iteritems(h)] response = request.run(method, path, qs, proto, h, fd) finally: if fd: fd.close() fd = None if response.output_status.startswith(six.b('500')): response_body = response.collapse_body() if six.PY3: response_body = response_body.decode(__salt_system_encoding__) print(response_body) raise AssertionError('Unexpected error') response.collapse_body() return (request, response)
'Return a set of all test suites except unit and cloud provider tests unless requested'
def _get_suites(self, include_unit=False, include_cloud_provider=False, include_proxy=False):
suites = set(TEST_SUITES.keys()) if (not include_unit): suites -= set(['unit']) if (not include_cloud_provider): suites -= set(['cloud_provider']) if (not include_proxy): suites -= set(['proxy']) return suites
'Query whether test suites have been enabled'
def _check_enabled_suites(self, include_unit=False, include_cloud_provider=False, include_proxy=False):
suites = self._get_suites(include_unit=include_unit, include_cloud_provider=include_cloud_provider, include_proxy=include_proxy) return any([getattr(self.options, suite) for suite in suites])
'Enable test suites for current test run'
def _enable_suites(self, include_unit=False, include_cloud_provider=False, include_proxy=False):
suites = self._get_suites(include_unit=include_unit, include_cloud_provider=include_cloud_provider, include_proxy=include_proxy) for suite in suites: setattr(self.options, suite, True)
'Run an integration test suite'
def run_integration_suite(self, path='', display_name=''):
full_path = os.path.join(TEST_DIR, path) return self.run_suite(full_path, display_name, suffix='test_*.py')
'Set soft and hard limits on open file handles at required thresholds for integration tests or unit tests'
def set_filehandle_limits(self, limits='integration'):
if salt.utils.platform.is_windows(): import win32file prev_hard = win32file._getmaxstdio() prev_soft = 512 else: (prev_soft, prev_hard) = resource.getrlimit(resource.RLIMIT_NOFILE) min_soft = MAX_OPEN_FILES[limits]['soft_limit'] min_hard = MAX_OPEN_FILES[limits]['hard_limit'] set_limits = False if (prev_soft < min_soft): soft = min_soft set_limits = True else: soft = prev_soft if (prev_hard < min_hard): hard = min_hard set_limits = True else: hard = prev_hard if set_limits: print(' * Max open files settings is too low (soft: {0}, hard: {1}) for running the tests'.format(prev_soft, prev_hard)) print(' * Trying to raise the limits to soft: {0}, hard: {1}'.format(soft, hard)) try: if salt.utils.platform.is_windows(): hard = (2048 if (hard > 2048) else hard) win32file._setmaxstdio(hard) else: resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) except Exception as err: print('ERROR: Failed to raise the max open files settings -> {0}'.format(err)) print('Please issue the following command on your console:') print(' ulimit -n {0}'.format(soft)) self.exit() finally: print(('~' * getattr(self.options, 'output_columns', PNUM)))
'Execute the integration tests suite'
def run_integration_tests(self):
named_tests = [] named_unit_test = [] if self.options.name: for test in self.options.name: if test.startswith(('tests.unit.', 'unit.')): named_unit_test.append(test) continue named_tests.append(test) if ((self.options.unit or named_unit_test) and (not named_tests) and (not self._check_enabled_suites(include_cloud_provider=True))): return [True] if (not salt.utils.platform.is_windows()): self.set_filehandle_limits('integration') try: print_header(' * Setting up Salt daemons to execute tests', top=False, width=getattr(self.options, 'output_columns', PNUM)) except TypeError: print_header(' * Setting up Salt daemons to execute tests', top=False) status = [] if ((not self._check_enabled_suites(include_cloud_provider=True, include_proxy=True)) and (not self.options.name)): return status with TestDaemon(self): if self.options.name: for name in self.options.name: if os.path.isfile(name): if (not name.endswith('.py')): continue if name.startswith(os.path.join('tests', 'unit')): continue results = self.run_suite(os.path.dirname(name), name, suffix=os.path.basename(name), load_from_name=False) status.append(results) continue if name.startswith(('tests.unit.', 'unit.')): continue results = self.run_suite('', name, suffix='test_*.py', load_from_name=True) status.append(results) for suite in TEST_SUITES: if ((suite != 'unit') and getattr(self.options, suite)): status.append(self.run_integration_suite(**TEST_SUITES[suite])) return status
'Execute the unit tests'
def run_unit_tests(self):
named_unit_test = [] if self.options.name: for test in self.options.name: if (not test.startswith(('tests.unit.', 'unit.'))): continue named_unit_test.append(test) if ((not self.options.unit) and (not named_unit_test)): return [True] status = [] if self.options.unit: self.set_filehandle_limits('unit') results = self.run_suite(os.path.join(TEST_DIR, 'unit'), 'Unit', suffix='test_*.py') status.append(results) return status for name in named_unit_test: results = self.run_suite(os.path.join(TEST_DIR, 'unit'), name, suffix='test_*.py', load_from_name=True) status.append(results) return status
'Simulate writing data Args: data: Returns:'
def write(self, data):
self.content.append(data)
'Simulate closing the IO object. Returns:'
def close(self):
self.closed = True
'Fill in the blanks for the eauth system'
def __eauth(self):
if self.opts['eauth']: resolver = salt.auth.Resolver(self.opts) res = resolver.cli(self.opts['eauth']) self.opts.update(res)
'Execute the wheel call'
def run(self):
return self.wheel.master_call(**self.opts)
'Ensure exception does not display a context by default Wraps TestCase.assertRaisesRegex'
@contextlib.contextmanager def assertCleanError(self, exc_type, details, *args):
if args: details = (details % args) cm = self.assertRaisesRegex(exc_type, details) with cm as exc: (yield exc)
'Ensure a clean AddressValueError'
def assertAddressError(self, details, *args):
return self.assertCleanError(ipaddress.AddressValueError, details, *args)
'Ensure a clean NetmaskValueError'
def assertNetmaskError(self, details, *args):
return self.assertCleanError(ipaddress.NetmaskValueError, details, *args)
'Check constructor arguments produce equivalent instances'
def assertInstancesEqual(self, lhs, rhs):
self.assertEqual(self.factory(lhs), self.factory(rhs))
'Ensure a clean ValueError with the expected message'
def assertFactoryError(self, factory, kind):
addr = 'camelot' msg = '%r does not appear to be an IPv4 or IPv6 %s' with self.assertCleanError(ValueError, msg, addr, kind): factory(addr)
'Run the sequence in a loop'
def run(self):
last_check = 0 self.start_time = datetime.datetime.now() goal = (self.reqs_sec * self.run_time) while True: self.fire_it() last_check += 1 if (last_check > self.granularity): self.calibrate() last_check = 0 if (self.total_complete > goal): print('Test complete') break
'Send the pub!'
def fire_it(self):
self.client.pub('silver', 'test.ping') self.total_complete += 1
'Re-calibrate the speed'
def calibrate(self):
elapsed_time = (datetime.datetime.now() - self.start_time) runtime_reqs_sec = (self.total_complete / elapsed_time.total_seconds()) print('Recalibrating. Current reqs/sec: {0}'.format(runtime_reqs_sec)) return
'Read a file on the file system (relative to salt\'s base project dir) :returns: A file-like object. :raises IOError: If the file cannot be found or read.'
def parse_file(self, fpath):
sdir = os.path.abspath(os.path.join(os.path.dirname(salt.__file__), os.pardir)) with open(os.path.join(sdir, fpath), 'rb') as f: return f.readlines()
'Parse a string line-by-line delineating comments and code :returns: An tuple of boolean/list-of-string pairs. True designates a comment; False designates code.'
def parse_lit(self, lines):
comment_char = '#' comment = re.compile('^\\s*{0}[ \\n]'.format(comment_char)) section_test = (lambda val: bool(comment.match(val))) sections = [] for (is_doc, group) in itertools.groupby(lines, section_test): if is_doc: text = [comment.sub('', i).rstrip('\r\n') for i in group] else: text = [i.rstrip('\r\n') for i in group] sections.append((is_doc, text)) return sections
'Given a typical Salt SLS path (e.g.: apache.vhosts.standard), find the file on the file system and parse it'
def parse_file(self, sls_path):
config = self.state.document.settings.env.config formulas_dirs = config.formulas_dirs fpath = sls_path.replace('.', '/') name_options = ('{0}.sls'.format(fpath), os.path.join(fpath, 'init.sls')) paths = [os.path.join(fdir, fname) for fname in name_options for fdir in formulas_dirs] for i in paths: try: with open(i, 'rb') as f: return f.readlines() except IOError: pass raise IOError("Could not find sls file '{0}'".format(sls_path))
'Stores the specified attributes which represent a URL which links to an RFC which defines an HTTP method.'
def __init__(self, base_url, anchor, section):
self.base_url = base_url self.anchor = anchor self.section = section
'Returns the URL which this object represents, which points to the location of the RFC which defines some HTTP method.'
def __repr__(self):
return '{0}#{1}{2}'.format(self.base_url, self.anchor, self.section)
'Format the function name'
def format_name(self):
if (not hasattr(self.module, '__func_alias__')): return super(FunctionDocumenter, self).format_name() if (not self.objpath): return super(FunctionDocumenter, self).format_name() if (len(self.objpath) > 1): return super(FunctionDocumenter, self).format_name() return self.module.__func_alias__.get(self.objpath[0], self.objpath[0])
'Mapping allows autodoc to bypass the Mock object, but actually assign a specific value, expected by a specific attribute returned.'
def __init__(self, mapping=None, *args, **kwargs):
self.__mapping = (mapping or {})
'Provide access eg. to \'pack\''
def __getattr__(self, attr):
return getattr(self.client.functions, attr)
'Return a function that you can call with regular func params, but will do all the _proc_function magic'
def __getitem__(self, key):
if (key not in self.client.functions): raise KeyError def wrapper(*args, **kwargs): low = {u'fun': key, u'args': args, u'kwargs': kwargs} pub_data = {} kwargs_keys = list(kwargs) for kwargs_key in kwargs_keys: if kwargs_key.startswith(u'__pub_'): pub_data[kwargs_key] = kwargs.pop(kwargs_key) async_pub = self.client._gen_async_pub(pub_data.get(u'__pub_jid')) user = salt.utils.get_specific_user() return self.client._proc_function(key, low, user, async_pub[u'tag'], async_pub[u'jid'], False) return wrapper
'Return a dict that will mimic the "functions" dict used all over salt. It creates a wrapper around the function allowing **kwargs, and if pub_data is passed in as kwargs, will re-use the JID passed in'
def functions_dict(self):
return ClientFuncsDict(self)
'Execute a function through the master network interface.'
def master_call(self, **kwargs):
load = kwargs load[u'cmd'] = self.client channel = salt.transport.Channel.factory(self.opts, crypt=u'clear', usage=u'master_call') ret = channel.send(load) if isinstance(ret, collections.Mapping): if (u'error' in ret): salt.utils.error.raise_error(**ret[u'error']) return ret
'Execute a runner function synchronously; eauth is respected This function requires that :conf_master:`external_auth` is configured and the user is authorized to execute runner functions: (``@runner``). .. code-block:: python runner.eauth_sync({ \'fun\': \'jobs.list_jobs\', \'username\': \'saltdev\', \'password\': \'saltdev\', \'eauth\': \'pam\','
def cmd_sync(self, low, timeout=None, full_return=False):
event = salt.utils.event.get_master_event(self.opts, self.opts[u'sock_dir'], listen=True) job = self.master_call(**low) ret_tag = salt.utils.event.tagify(u'ret', base=job[u'tag']) if (timeout is None): timeout = self.opts.get(u'rest_timeout', 300) ret = event.get_event(tag=ret_tag, full=True, wait=timeout, auto_reconnect=True) if (ret is None): raise salt.exceptions.SaltClientTimeout(u"RunnerClient job '{0}' timed out".format(job[u'jid']), jid=job[u'jid']) return (ret if full_return else ret[u'data'][u'return'])
'Execute a function .. code-block:: python >>> opts = salt.config.master_config(\'/etc/salt/master\') >>> runner = salt.runner.RunnerClient(opts) >>> runner.cmd(\'jobs.list_jobs\', []) \'20131219215650131543\': { \'Arguments\': [300], \'Function\': \'test.sleep\', \'StartTime\': \'2013, Dec 19 21:56:50.131543\', \'Target\': \'*\', \'Target-type\': \'glob\', \'User\': \'saltdev\' \'20131219215921857715\': { \'Arguments\': [300], \'Function\': \'test.sleep\', \'StartTime\': \'2013, Dec 19 21:59:21.857715\', \'Target\': \'*\', \'Target-type\': \'glob\', \'User\': \'saltdev\''
def cmd(self, fun, arg=None, pub_data=None, kwarg=None, print_event=True, full_return=False):
if (arg is None): arg = tuple() if ((not isinstance(arg, list)) and (not isinstance(arg, tuple))): raise salt.exceptions.SaltInvocationError(u'arg must be formatted as a list/tuple') if (pub_data is None): pub_data = {} if (not isinstance(pub_data, dict)): raise salt.exceptions.SaltInvocationError(u'pub_data must be formatted as a dictionary') if (kwarg is None): kwarg = {} if (not isinstance(kwarg, dict)): raise salt.exceptions.SaltInvocationError(u'kwarg must be formatted as a dictionary') arglist = salt.utils.args.parse_input(arg, no_parse=self.opts.get(u'no_parse', [])) if kwarg: kwarg[u'__kwarg__'] = True arglist.append(kwarg) (args, kwargs) = salt.minion.load_args_and_kwargs(self.functions[fun], arglist, pub_data) low = {u'fun': fun, u'arg': args, u'kwarg': kwargs} return self.low(fun, low, print_event=print_event, full_return=full_return)
'Check for deprecated usage and allow until Salt Oxygen.'
def low(self, fun, low, print_event=True, full_return=False):
msg = [] if (u'args' in low): msg.append(u'call with arg instead') low[u'arg'] = low.pop(u'args') if (u'kwargs' in low): msg.append(u'call with kwarg instead') low[u'kwarg'] = low.pop(u'kwargs') if msg: salt.utils.warn_until(u'Oxygen', u' '.join(msg)) return self._low(fun, low, print_event=print_event, full_return=full_return)
'Helper that allows us to turn off storing jobs for different classes that may incorporate this mixin.'
@property def store_job(self):
try: class_name = self.__class__.__name__.lower() except AttributeError: log.warning(u'Unable to determine class name', exc_info_on_loglevel=logging.DEBUG) return True try: return self.opts[u'{0}_returns'.format(class_name)] except KeyError: return True
'Execute a function from low data Low data includes: required: - fun: the name of the function to run optional: - arg: a list of args to pass to fun - kwarg: kwargs for fun - __user__: user who is running the command - __jid__: jid to run under - __tag__: tag to run under'
def _low(self, fun, low, print_event=True, full_return=False):
self.mminion jid = low.get(u'__jid__', salt.utils.jid.gen_jid()) tag = low.get(u'__tag__', salt.utils.event.tagify(jid, prefix=self.tag_prefix)) data = {u'fun': u'{0}.{1}'.format(self.client, fun), u'jid': jid, u'user': low.get(u'__user__', u'UNKNOWN')} event = salt.utils.event.get_event(u'master', self.opts[u'sock_dir'], self.opts[u'transport'], opts=self.opts, listen=False) if print_event: print_func = (self.print_async_event if hasattr(self, u'print_async_event') else None) else: print_func = None namespaced_event = salt.utils.event.NamespacedEvent(event, tag, print_func=print_func) func_globals = {u'__jid__': jid, u'__user__': data[u'user'], u'__tag__': tag, u'__jid_event__': weakref.proxy(namespaced_event)} try: self_functions = pycopy.copy(self.functions) salt.utils.lazy.verify_fun(self_functions, fun) completed_funcs = [] for mod_name in six.iterkeys(self_functions): if (u'.' not in mod_name): continue (mod, _) = mod_name.split(u'.', 1) if (mod in completed_funcs): continue completed_funcs.append(mod) for (global_key, value) in six.iteritems(func_globals): self.functions[mod_name].__globals__[global_key] = value f_call = None if (u'arg' not in low): f_call = salt.utils.format_call(self.functions[fun], low, expected_extra_kws=CLIENT_INTERNAL_KEYWORDS) args = f_call.get(u'args', ()) else: args = low[u'arg'] if (u'kwarg' not in low): log.critical(u"kwargs must be passed inside the low data within the 'kwarg' key. See usage of salt.utils.args.parse_input() and salt.minion.load_args_and_kwargs() elsewhere in the codebase.") kwargs = {} else: kwargs = low[u'kwarg'] data[u'fun_args'] = (list(args) + ([kwargs] if kwargs else [])) func_globals[u'__jid_event__'].fire_event(data, u'new') with tornado.stack_context.StackContext(self.functions.context_dict.clone): data[u'return'] = self.functions[fun](*args, **kwargs) data[u'success'] = True if (isinstance(data[u'return'], dict) and (u'data' in data[u'return'])): data[u'success'] = salt.utils.check_state_result(data[u'return'][u'data']) except (Exception, SystemExit) as ex: if isinstance(ex, salt.exceptions.NotImplemented): data[u'return'] = str(ex) else: data[u'return'] = u'Exception occurred in {0} {1}: {2}'.format(self.client, fun, traceback.format_exc()) data[u'success'] = False namespaced_event.fire_event(data, u'ret') if self.store_job: try: salt.utils.job.store_job(self.opts, {u'id': self.opts[u'id'], u'tgt': self.opts[u'id'], u'jid': data[u'jid'], u'return': data}, event=None, mminion=self.mminion) except salt.exceptions.SaltCacheError: log.error(u'Could not store job cache info. Job details for this run may be unavailable.') log.info(u'Runner completed: %s', data[u'jid']) del event del namespaced_event return (data if full_return else data[u'return'])
'Return a dictionary of functions and the inline documentation for each'
def get_docs(self, arg=None):
if arg: if (u'*' in arg): target_mod = arg _use_fnmatch = True else: target_mod = ((arg + u'.') if (not arg.endswith(u'.')) else arg) if _use_fnmatch: docs = [(fun, self.functions[fun].__doc__) for fun in fnmatch.filter(self.functions, target_mod)] else: docs = [(fun, self.functions[fun].__doc__) for fun in sorted(self.functions) if ((fun == arg) or fun.startswith(target_mod))] else: docs = [(fun, self.functions[fun].__doc__) for fun in sorted(self.functions)] docs = dict(docs) return salt.utils.doc.strip_rst(docs)
'Run this method in a multiprocess target to execute the function in a multiprocess and fire the return data on the event bus'
def _proc_function(self, fun, low, user, tag, jid, daemonize=True):
if (daemonize and (not salt.utils.platform.is_windows())): salt.log.setup.shutdown_multiprocessing_logging() salt.utils.daemonize() salt.log.setup.setup_multiprocessing_logging() low[u'__jid__'] = jid low[u'__user__'] = user low[u'__tag__'] = tag return self.low(fun, low, full_return=False)
'Execute a function asynchronously; eauth is respected This function requires that :conf_master:`external_auth` is configured and the user is authorized .. code-block:: python >>> wheel.cmd_async({ \'fun\': \'key.finger\', \'match\': \'jerry\', \'eauth\': \'auto\', \'username\': \'saltdev\', \'password\': \'saltdev\', {\'jid\': \'20131219224744416681\', \'tag\': \'salt/wheel/20131219224744416681\'}'
def cmd_async(self, low):
return self.master_call(**low)
'Execute the function in a multiprocess and return the event tag to use to watch for the return'
def async(self, fun, low, user=u'UNKNOWN', pub=None):
async_pub = (pub if (pub is not None) else self._gen_async_pub()) proc = salt.utils.process.SignalHandlingMultiprocessingProcess(target=self._proc_function, args=(fun, low, user, async_pub[u'tag'], async_pub[u'jid'])) with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM): proc.start() proc.join() return async_pub
'Print all of the events with the prefix \'tag\''
def print_async_event(self, suffix, event):
if (not isinstance(event, dict)): return if self.opts.get(u'quiet', False): return if (suffix in (u'new',)): return try: outputter = self.opts.get(u'output', (event.get(u'outputter', None) or event.get(u'return').get(u'outputter'))) except AttributeError: outputter = None if (suffix == u'ret'): if (isinstance(event.get(u'return'), dict) and (set(event[u'return']) == set((u'data', u'outputter')))): event_data = event[u'return'][u'data'] outputter = event[u'return'][u'outputter'] else: event_data = event[u'return'] else: event_data = {u'suffix': suffix, u'event': event} salt.output.display_output(event_data, outputter, self.opts)
':param IOLoop io_loop: io_loop used for events. Pass in an io_loop if you want asynchronous operation for obtaining events. Eg use of set_event_handler() API. Otherwise, operation will be synchronous.'
def __init__(self, c_path=os.path.join(syspaths.CONFIG_DIR, u'master'), mopts=None, skip_perm_errors=False, io_loop=None, keep_loop=False, auto_reconnect=False):
if mopts: self.opts = mopts else: if os.path.isdir(c_path): log.warning(u"%s expects a file path not a directory path(%s) to its 'c_path' keyword argument", self.__class__.__name__, c_path) self.opts = salt.config.client_config(c_path) self.serial = salt.payload.Serial(self.opts) self.salt_user = salt.utils.get_specific_user() self.skip_perm_errors = skip_perm_errors self.key = self.__read_master_key() self.auto_reconnect = auto_reconnect self.event = salt.utils.event.get_event(u'master', self.opts[u'sock_dir'], self.opts[u'transport'], opts=self.opts, listen=False, io_loop=io_loop, keep_loop=keep_loop) self.utils = salt.loader.utils(self.opts) self.functions = salt.loader.minion_mods(self.opts, utils=self.utils) self.returners = salt.loader.returners(self.opts, self.functions)
'Read in the rotating master authentication key'
def __read_master_key(self):
key_user = self.salt_user if (key_user == u'root'): if (self.opts.get(u'user', u'root') != u'root'): key_user = self.opts.get(u'user', u'root') if key_user.startswith(u'sudo_'): key_user = self.opts.get(u'user', u'root') if salt.utils.platform.is_windows(): key_user = key_user.replace(u'\\', u'_') keyfile = os.path.join(self.opts[u'cachedir'], u'.{0}_key'.format(key_user)) salt.utils.verify.check_path_traversal(self.opts[u'cachedir'], key_user, self.skip_perm_errors) try: with salt.utils.files.fopen(keyfile, u'r') as key: return key.read() except (OSError, IOError): return u''
'convert a seco.range range into a list target'
def _convert_range_to_list(self, tgt):
range_ = seco.range.Range(self.opts[u'range_server']) try: return range_.expand(tgt) except seco.range.RangeException as err: print(u'Range server exception: {0}'.format(err)) return []
'Return the timeout to use'
def _get_timeout(self, timeout):
if (timeout is None): return self.opts[u'timeout'] if isinstance(timeout, int): return timeout if isinstance(timeout, six.string_types): try: return int(timeout) except ValueError: return self.opts[u'timeout'] return self.opts[u'timeout']
'Return the information about a given job'
def gather_job_info(self, jid, tgt, tgt_type, **kwargs):
log.debug(u'Checking whether jid %s is still running', jid) timeout = int(kwargs.get(u'gather_job_timeout', self.opts[u'gather_job_timeout'])) pub_data = self.run_job(tgt, u'saltutil.find_job', arg=[jid], tgt_type=tgt_type, timeout=timeout, **kwargs) if (u'jid' in pub_data): self.event.subscribe(pub_data[u'jid']) return pub_data
'Common checks on the pub_data data structure returned from running pub'
def _check_pub_data(self, pub_data):
if (pub_data == u''): raise EauthAuthenticationError(u'Failed to authenticate! This is most likely because this user is not permitted to execute commands, but there is a small possibility that a disk error occurred (check disk/inode usage).') if (u'error' in pub_data): print(pub_data[u'error']) log.debug(u'_check_pub_data() error: %s', pub_data[u'error']) return {} elif (u'jid' not in pub_data): return {} if (pub_data[u'jid'] == u'0'): print(u'Failed to connect to the Master, is the Salt Master running?') return {} if (not self.opts.get(u'order_masters')): if (not pub_data[u'minions']): print(u'No minions matched the target. No command was sent, no jid was assigned.') return {} else: self.event.subscribe(u'syndic/.*/{0}'.format(pub_data[u'jid']), u'regex') self.event.subscribe(u'salt/job/{0}'.format(pub_data[u'jid'])) return pub_data
'Asynchronously send a command to connected minions Prep the job directory and publish a command to any targeted minions. :return: A dictionary of (validated) ``pub_data`` or an empty dictionary on failure. The ``pub_data`` contains the job ID and a list of all minions that are expected to return data. .. code-block:: python >>> local.run_job(\'*\', \'test.sleep\', [300]) {\'jid\': \'20131219215650131543\', \'minions\': [\'jerry\']}'
def run_job(self, tgt, fun, arg=(), tgt_type=u'glob', ret=u'', timeout=None, jid=u'', kwarg=None, listen=False, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') arg = salt.utils.args.condition_input(arg, kwarg) try: pub_data = self.pub(tgt, fun, arg, tgt_type, ret, jid=jid, timeout=self._get_timeout(timeout), listen=listen, **kwargs) except SaltClientError: raise SaltClientError(u'The salt master could not be contacted. Is master running?') except Exception as general_exception: raise SaltClientError(general_exception) return self._check_pub_data(pub_data)
'Asynchronously send a command to connected minions Prep the job directory and publish a command to any targeted minions. :return: A dictionary of (validated) ``pub_data`` or an empty dictionary on failure. The ``pub_data`` contains the job ID and a list of all minions that are expected to return data. .. code-block:: python >>> local.run_job_async(\'*\', \'test.sleep\', [300]) {\'jid\': \'20131219215650131543\', \'minions\': [\'jerry\']}'
@tornado.gen.coroutine def run_job_async(self, tgt, fun, arg=(), tgt_type=u'glob', ret=u'', timeout=None, jid=u'', kwarg=None, listen=True, io_loop=None, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') arg = salt.utils.args.condition_input(arg, kwarg) try: pub_data = (yield self.pub_async(tgt, fun, arg, tgt_type, ret, jid=jid, timeout=self._get_timeout(timeout), io_loop=io_loop, listen=listen, **kwargs)) except SaltClientError: raise SaltClientError(u'The salt master could not be contacted. Is master running?') except Exception as general_exception: raise SaltClientError(general_exception) raise tornado.gen.Return(self._check_pub_data(pub_data))
'Asynchronously send a command to connected minions The function signature is the same as :py:meth:`cmd` with the following exceptions. :returns: A job ID or 0 on failure. .. code-block:: python >>> local.cmd_async(\'*\', \'test.sleep\', [300]) \'20131219215921857715\''
def cmd_async(self, tgt, fun, arg=(), tgt_type=u'glob', ret=u'', jid=u'', kwarg=None, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') arg = salt.utils.args.condition_input(arg, kwarg) pub_data = self.run_job(tgt, fun, arg, tgt_type, ret, jid=jid, **kwargs) try: return pub_data[u'jid'] except KeyError: return 0
'Execute a command on a random subset of the targeted systems The function signature is the same as :py:meth:`cmd` with the following exceptions. :param sub: The number of systems to execute on .. code-block:: python >>> SLC.cmd_subset(\'*\', \'test.ping\', sub=1) {\'jerry\': True}'
def cmd_subset(self, tgt, fun, arg=(), tgt_type=u'glob', ret=u'', kwarg=None, sub=3, cli=False, progress=False, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') minion_ret = self.cmd(tgt, u'sys.list_functions', tgt_type=tgt_type, **kwargs) minions = list(minion_ret) random.shuffle(minions) f_tgt = [] for minion in minions: if (fun in minion_ret[minion]): f_tgt.append(minion) if (len(f_tgt) >= sub): break func = self.cmd if cli: func = self.cmd_cli return func(f_tgt, fun, arg, tgt_type=u'list', ret=ret, kwarg=kwarg, progress=progress, **kwargs)
'Iteratively execute a command on subsets of minions at a time The function signature is the same as :py:meth:`cmd` with the following exceptions. :param batch: The batch identifier of systems to execute on :returns: A generator of minion returns .. code-block:: python >>> returns = local.cmd_batch(\'*\', \'state.highstate\', batch=\'10%\') >>> for ret in returns: ... print(ret) {\'jerry\': {...}} {\'dave\': {...}} {\'stewart\': {...}}'
def cmd_batch(self, tgt, fun, arg=(), tgt_type=u'glob', ret=u'', kwarg=None, batch=u'10%', **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') import salt.cli.batch arg = salt.utils.args.condition_input(arg, kwarg) opts = {u'tgt': tgt, u'fun': fun, u'arg': arg, u'tgt_type': tgt_type, u'ret': ret, u'batch': batch, u'failhard': kwargs.get(u'failhard', False), u'raw': kwargs.get(u'raw', False)} if (u'timeout' in kwargs): opts[u'timeout'] = kwargs[u'timeout'] if (u'gather_job_timeout' in kwargs): opts[u'gather_job_timeout'] = kwargs[u'gather_job_timeout'] if (u'batch_wait' in kwargs): opts[u'batch_wait'] = int(kwargs[u'batch_wait']) eauth = {} if (u'eauth' in kwargs): eauth[u'eauth'] = kwargs.pop(u'eauth') if (u'username' in kwargs): eauth[u'username'] = kwargs.pop(u'username') if (u'password' in kwargs): eauth[u'password'] = kwargs.pop(u'password') if (u'token' in kwargs): eauth[u'token'] = kwargs.pop(u'token') for (key, val) in six.iteritems(self.opts): if (key not in opts): opts[key] = val batch = salt.cli.batch.Batch(opts, eauth=eauth, quiet=True) for ret in batch.run(): (yield ret)
'Synchronously execute a command on targeted minions The cmd method will execute and wait for the timeout period for all minions to reply, then it will return all minion data at once. .. code-block:: python >>> import salt.client >>> local = salt.client.LocalClient() >>> local.cmd(\'*\', \'cmd.run\', [\'whoami\']) {\'jerry\': \'root\'} With extra keyword arguments for the command function to be run: .. code-block:: python local.cmd(\'*\', \'test.arg\', [\'arg1\', \'arg2\'], kwarg={\'foo\': \'bar\'}) Compound commands can be used for multiple executions in a single publish. Function names and function arguments are provided in separate lists but the index values must correlate and an empty list must be used if no arguments are required. .. code-block:: python >>> local.cmd(\'*\', [ \'grains.items\', \'sys.doc\', \'cmd.run\', [\'uptime\'], :param tgt: Which minions to target for the execution. Default is shell glob. Modified by the ``tgt_type`` option. :type tgt: string or list :param fun: The module and function to call on the specified minions of the form ``module.function``. For example ``test.ping`` or ``grains.items``. Compound commands Multiple functions may be called in a single publish by passing a list of commands. This can dramatically lower overhead and speed up the application communicating with Salt. This requires that the ``arg`` param is a list of lists. The ``fun`` list and the ``arg`` list must correlate by index meaning a function that does not take arguments must still have a corresponding empty list at the expected index. :type fun: string or list of strings :param arg: A list of arguments to pass to the remote function. If the function takes no arguments ``arg`` may be omitted except when executing a compound command. :type arg: list or list-of-lists :param timeout: Seconds to wait after the last minion returns but before all minions return. :param tgt_type: The type of ``tgt``. Allowed values: * ``glob`` - Bash glob completion - Default * ``pcre`` - Perl style regular expression * ``list`` - Python list of hosts * ``grain`` - Match based on a grain comparison * ``grain_pcre`` - Grain comparison with a regex * ``pillar`` - Pillar data comparison * ``pillar_pcre`` - Pillar data comparison with a regex * ``nodegroup`` - Match on nodegroup * ``range`` - Use a Range server for matching * ``compound`` - Pass a compound match string * ``ipcidr`` - Match based on Subnet (CIDR notation) or IPv4 address. .. versionchanged:: 2017.7.0 Renamed from ``expr_form`` to ``tgt_type`` :param ret: The returner to use. The value passed can be single returner, or a comma delimited list of returners to call in order on the minions :param kwarg: A dictionary with keyword arguments for the function. :param full_return: Output the job return only (default) or the full return including exit code and other job metadata. :param kwargs: Optional keyword arguments. Authentication credentials may be passed when using :conf_master:`external_auth`. For example: ``local.cmd(\'*\', \'test.ping\', username=\'saltdev\', password=\'saltdev\', eauth=\'pam\')``. Or: ``local.cmd(\'*\', \'test.ping\', token=\'5871821ea51754fdcea8153c1c745433\')`` :returns: A dictionary with the result of the execution, keyed by minion ID. A compound command will return a sub-dictionary keyed by function name.'
def cmd(self, tgt, fun, arg=(), timeout=None, tgt_type=u'glob', ret=u'', jid=u'', full_return=False, kwarg=None, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') arg = salt.utils.args.condition_input(arg, kwarg) was_listening = self.event.cpub try: pub_data = self.run_job(tgt, fun, arg, tgt_type, ret, timeout, jid, listen=True, **kwargs) if (not pub_data): return pub_data ret = {} for fn_ret in self.get_cli_event_returns(pub_data[u'jid'], pub_data[u'minions'], self._get_timeout(timeout), tgt, tgt_type, **kwargs): if fn_ret: for (mid, data) in six.iteritems(fn_ret): ret[mid] = (data if full_return else data.get(u'ret', {})) for failed in list((set(pub_data[u'minions']) - set(ret))): ret[failed] = False return ret finally: if (not was_listening): self.event.close_pub()
'Used by the :command:`salt` CLI. This method returns minion returns as they come back and attempts to block until all minions return. The function signature is the same as :py:meth:`cmd` with the following exceptions. :param verbose: Print extra information about the running command :returns: A generator'
def cmd_cli(self, tgt, fun, arg=(), timeout=None, tgt_type=u'glob', ret=u'', verbose=False, kwarg=None, progress=False, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') arg = salt.utils.args.condition_input(arg, kwarg) was_listening = self.event.cpub try: self.pub_data = self.run_job(tgt, fun, arg, tgt_type, ret, timeout, listen=True, **kwargs) if (not self.pub_data): (yield self.pub_data) else: try: for fn_ret in self.get_cli_event_returns(self.pub_data[u'jid'], self.pub_data[u'minions'], self._get_timeout(timeout), tgt, tgt_type, verbose, progress, **kwargs): if (not fn_ret): continue (yield fn_ret) except KeyboardInterrupt: raise SystemExit(u"\nThis job's jid is: {0}\nExiting gracefully on Ctrl-c\nThe minions may not have all finished running and any remaining minions will return upon completion. To look up the return data for this job later, run the following command:\n\nsalt-run jobs.lookup_jid {0}".format(self.pub_data[u'jid'])) finally: if (not was_listening): self.event.close_pub()
'Yields the individual minion returns as they come in The function signature is the same as :py:meth:`cmd` with the following exceptions. :return: A generator yielding the individual minion returns .. code-block:: python >>> ret = local.cmd_iter(\'*\', \'test.ping\') >>> for i in ret: ... print(i) {\'jerry\': {\'ret\': True}} {\'dave\': {\'ret\': True}} {\'stewart\': {\'ret\': True}}'
def cmd_iter(self, tgt, fun, arg=(), timeout=None, tgt_type=u'glob', ret=u'', kwarg=None, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') arg = salt.utils.args.condition_input(arg, kwarg) was_listening = self.event.cpub try: pub_data = self.run_job(tgt, fun, arg, tgt_type, ret, timeout, listen=True, **kwargs) if (not pub_data): (yield pub_data) else: if kwargs.get(u'yield_pub_data'): (yield pub_data) for fn_ret in self.get_iter_returns(pub_data[u'jid'], pub_data[u'minions'], timeout=self._get_timeout(timeout), tgt=tgt, tgt_type=tgt_type, **kwargs): if (not fn_ret): continue (yield fn_ret) self._clean_up_subscriptions(pub_data[u'jid']) finally: if (not was_listening): self.event.close_pub()
'Yields the individual minion returns as they come in, or None when no returns are available. The function signature is the same as :py:meth:`cmd` with the following exceptions. :returns: A generator yielding the individual minion returns, or None when no returns are available. This allows for actions to be injected in between minion returns. .. code-block:: python >>> ret = local.cmd_iter_no_block(\'*\', \'test.ping\') >>> for i in ret: ... print(i) None {\'jerry\': {\'ret\': True}} {\'dave\': {\'ret\': True}} None {\'stewart\': {\'ret\': True}}'
def cmd_iter_no_block(self, tgt, fun, arg=(), timeout=None, tgt_type=u'glob', ret=u'', kwarg=None, show_jid=False, verbose=False, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') arg = salt.utils.args.condition_input(arg, kwarg) was_listening = self.event.cpub try: pub_data = self.run_job(tgt, fun, arg, tgt_type, ret, timeout, listen=True, **kwargs) if (not pub_data): (yield pub_data) else: for fn_ret in self.get_iter_returns(pub_data[u'jid'], pub_data[u'minions'], timeout=timeout, tgt=tgt, tgt_type=tgt_type, block=False, **kwargs): if (fn_ret and any([show_jid, verbose])): for minion in fn_ret: fn_ret[minion][u'jid'] = pub_data[u'jid'] (yield fn_ret) self._clean_up_subscriptions(pub_data[u'jid']) finally: if (not was_listening): self.event.close_pub()
'Execute a salt command and return'
def cmd_full_return(self, tgt, fun, arg=(), timeout=None, tgt_type=u'glob', ret=u'', verbose=False, kwarg=None, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') arg = salt.utils.args.condition_input(arg, kwarg) was_listening = self.event.cpub try: pub_data = self.run_job(tgt, fun, arg, tgt_type, ret, timeout, listen=True, **kwargs) if (not pub_data): return pub_data return self.get_cli_static_event_returns(pub_data[u'jid'], pub_data[u'minions'], timeout, tgt, tgt_type, verbose) finally: if (not was_listening): self.event.close_pub()
'Starts a watcher looking at the return data for a specified JID :returns: all of the information for the JID'
def get_cli_returns(self, jid, minions, timeout=None, tgt=u'*', tgt_type=u'glob', verbose=False, show_jid=False, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') if verbose: msg = u'Executing job with jid {0}'.format(jid) print(msg) print(((u'-' * len(msg)) + u'\n')) elif show_jid: print(u'jid: {0}'.format(jid)) if (timeout is None): timeout = self.opts[u'timeout'] fret = {} minions = set(minions) found = set() event_iter = self.get_event_iter_returns(jid, minions, timeout=timeout) ret = self.get_cache_returns(jid) if (ret != {}): found.update(set(ret)) (yield ret) if (len(found.intersection(minions)) >= len(minions)): raise StopIteration() for event in event_iter: if (event != {}): found.update(set(event)) (yield event) if (len(found.intersection(minions)) >= len(minions)): self._clean_up_subscriptions(jid) raise StopIteration()
'Raw function to just return events of jid excluding timeout logic Yield either the raw event data or None Pass a list of additional regular expressions as `tags_regex` to search the event bus for non-return data, such as minion lists returned from syndics.'
def get_returns_no_block(self, tag, match_type=None):
while True: raw = self.event.get_event(wait=0.01, tag=tag, match_type=match_type, full=True, no_block=True, auto_reconnect=self.auto_reconnect) (yield raw)
'Watch the event system and return job data as it comes in :returns: all of the information for the JID'
def get_iter_returns(self, jid, minions, timeout=None, tgt=u'*', tgt_type=u'glob', expect_minions=False, block=True, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') if (not isinstance(minions, set)): if isinstance(minions, six.string_types): minions = set([minions]) elif isinstance(minions, (list, tuple)): minions = set(list(minions)) if (timeout is None): timeout = self.opts[u'timeout'] gather_job_timeout = int(kwargs.get(u'gather_job_timeout', self.opts[u'gather_job_timeout'])) start = int(time.time()) minion_timeouts = {} found = set() try: if (self.returners[u'{0}.get_load'.format(self.opts[u'master_job_cache'])](jid) == {}): log.warning(u'jid does not exist') (yield {}) raise StopIteration() except Exception as exc: log.warning(u'Returner unavailable: %s', exc) last_time = False if self.opts[u'order_masters']: ret_iter = self.get_returns_no_block(u'(salt/job|syndic/.*)/{0}'.format(jid), u'regex') else: ret_iter = self.get_returns_no_block(u'salt/job/{0}'.format(jid)) jinfo_iter = [] open_jids = set() timeout_at = (time.time() + timeout) gather_syndic_wait = (time.time() + self.opts[u'syndic_wait']) minions_running = True log.debug(u'get_iter_returns for jid %s sent to %s will timeout at %s', jid, minions, datetime.fromtimestamp(timeout_at).time()) while True: for raw in ret_iter: if (raw is None): break if (u'minions' in raw.get(u'data', {})): minions.update(raw[u'data'][u'minions']) continue if (u'return' not in raw[u'data']): continue if kwargs.get(u'raw', False): found.add(raw[u'data'][u'id']) (yield raw) else: found.add(raw[u'data'][u'id']) ret = {raw[u'data'][u'id']: {u'ret': raw[u'data'][u'return']}} if (u'out' in raw[u'data']): ret[raw[u'data'][u'id']][u'out'] = raw[u'data'][u'out'] if (u'retcode' in raw[u'data']): ret[raw[u'data'][u'id']][u'retcode'] = raw[u'data'][u'retcode'] if (u'jid' in raw[u'data']): ret[raw[u'data'][u'id']][u'jid'] = raw[u'data'][u'jid'] if kwargs.get(u'_cmd_meta', False): ret[raw[u'data'][u'id']].update(raw[u'data']) log.debug(u'jid %s return from %s', jid, raw[u'data'][u'id']) (yield ret) if ((len(found.intersection(minions)) >= len(minions)) and (not self.opts[u'order_masters'])): log.debug(u'jid %s found all minions %s', jid, found) break elif ((len(found.intersection(minions)) >= len(minions)) and self.opts[u'order_masters']): if ((len(found) >= len(minions)) and (len(minions) > 0) and (time.time() > gather_syndic_wait)): break for id_ in (minions - found): if (id_ not in minion_timeouts): minion_timeouts[id_] = (time.time() + timeout) if ((time.time() > timeout_at) and minions_running): jinfo = self.gather_job_info(jid, list((minions - found)), u'list', **kwargs) minions_running = False if (u'jid' not in jinfo): jinfo_iter = [] else: jinfo_iter = self.get_returns_no_block(u'salt/job/{0}'.format(jinfo[u'jid'])) timeout_at = (time.time() + gather_job_timeout) if self.opts[u'order_masters']: timeout_at += self.opts.get(u'syndic_wait', 1) for raw in jinfo_iter: if (raw is None): break try: if (raw[u'data'][u'retcode'] > 0): log.error(u'saltutil returning errors on minion %s', raw[u'data'][u'id']) minions.remove(raw[u'data'][u'id']) break except KeyError as exc: missing_key = exc.__str__().strip(u'\'"') if (missing_key == u'retcode'): log.debug(u'retcode missing from client return') else: log.debug(u"Passing on saltutil error. Key '%s' missing from client return. This may be an error in the client.", missing_key) open_jids.add(jinfo[u'jid']) if (u'minions' in raw.get(u'data', {})): minions.update(raw[u'data'][u'minions']) continue if (u'syndic' in raw.get(u'data', {})): minions.update(raw[u'syndic']) continue if (u'return' not in raw.get(u'data', {})): continue if (raw[u'data'][u'return'] == {}): continue if ((u'return' in raw[u'data'][u'return']) and (raw[u'data'][u'return'][u'return'] == {})): continue if (raw[u'data'][u'id'] not in minions): minions.add(raw[u'data'][u'id']) minion_timeouts[raw[u'data'][u'id']] = (time.time() + timeout) minions_running = True now = time.time() done = ((now > timeout_at) and (not minions_running)) if done: for id_ in (minions - found): if (now < minion_timeouts[id_]): done = False break if done: break if block: time.sleep(0.01) else: (yield) if open_jids: for jid in open_jids: self.event.unsubscribe(jid) if expect_minions: for minion in list((minions - found)): (yield {minion: {u'failed': True}})
'Get the returns for the command line interface via the event system'
def get_returns(self, jid, minions, timeout=None):
minions = set(minions) if (timeout is None): timeout = self.opts[u'timeout'] start = int(time.time()) timeout_at = (start + timeout) log.debug(u'get_returns for jid %s sent to %s will timeout at %s', jid, minions, datetime.fromtimestamp(timeout_at).time()) found = set() ret = {} try: if (self.returners[u'{0}.get_load'.format(self.opts[u'master_job_cache'])](jid) == {}): log.warning(u'jid does not exist') return ret except Exception as exc: raise SaltClientError(u'Master job cache returner [{0}] failed to verify jid. Exception details: {1}'.format(self.opts[u'master_job_cache'], exc)) while True: time_left = (timeout_at - int(time.time())) wait = max(1, time_left) raw = self.event.get_event(wait, jid, auto_reconnect=self.auto_reconnect) if ((raw is not None) and (u'return' in raw)): found.add(raw[u'id']) ret[raw[u'id']] = raw[u'return'] if (len(found.intersection(minions)) >= len(minions)): log.debug(u'jid %s found all minions', jid) break continue if (len(found.intersection(minions)) >= len(minions)): log.debug(u'jid %s found all minions', jid) break if (int(time.time()) > timeout_at): log.info(u'jid %s minions %s did not return in time', jid, (minions - found)) break time.sleep(0.01) return ret
'This method starts off a watcher looking at the return data for a specified jid, it returns all of the information for the jid'
def get_full_returns(self, jid, minions, timeout=None):
ret = {} event_iter = self.get_event_iter_returns(jid, minions, timeout=timeout) try: data = self.returners[u'{0}.get_jid'.format(self.opts[u'master_job_cache'])](jid) except Exception as exc: raise SaltClientError(u'Returner {0} could not fetch jid data. Exception details: {1}'.format(self.opts[u'master_job_cache'], exc)) for minion in data: m_data = {} if (u'return' in data[minion]): m_data[u'ret'] = data[minion].get(u'return') else: m_data[u'ret'] = data[minion].get(u'return') if (u'out' in data[minion]): m_data[u'out'] = data[minion][u'out'] if (minion in ret): ret[minion].update(m_data) else: ret[minion] = m_data if (len(set(ret).intersection(minions)) >= len(minions)): return ret for event_ret in event_iter: if (event_ret == {}): time.sleep(0.02) continue for (minion, m_data) in six.iteritems(event_ret): if (minion in ret): ret[minion].update(m_data) else: ret[minion] = m_data if (len(set(ret).intersection(minions)) >= len(minions)): return ret return ret
'Execute a single pass to gather the contents of the job cache'
def get_cache_returns(self, jid):
ret = {} try: data = self.returners[u'{0}.get_jid'.format(self.opts[u'master_job_cache'])](jid) except Exception as exc: raise SaltClientError(u'Could not examine master job cache. Error occurred in {0} returner. Exception details: {1}'.format(self.opts[u'master_job_cache'], exc)) for minion in data: m_data = {} if (u'return' in data[minion]): m_data[u'ret'] = data[minion].get(u'return') else: m_data[u'ret'] = data[minion].get(u'return') if (u'out' in data[minion]): m_data[u'out'] = data[minion][u'out'] if (minion in ret): ret[minion].update(m_data) else: ret[minion] = m_data return ret
'Get the returns for the command line interface via the event system'
def get_cli_static_event_returns(self, jid, minions, timeout=None, tgt=u'*', tgt_type=u'glob', verbose=False, show_timeout=False, show_jid=False):
log.trace(u'entered - function get_cli_static_event_returns()') minions = set(minions) if verbose: msg = u'Executing job with jid {0}'.format(jid) print(msg) print(((u'-' * len(msg)) + u'\n')) elif show_jid: print(u'jid: {0}'.format(jid)) if (timeout is None): timeout = self.opts[u'timeout'] start = int(time.time()) timeout_at = (start + timeout) found = set() ret = {} try: if (self.returners[u'{0}.get_load'.format(self.opts[u'master_job_cache'])](jid) == {}): log.warning(u'jid does not exist') return ret except Exception as exc: raise SaltClientError(u'Load could not be retrieved from returner {0}. Exception details: {1}'.format(self.opts[u'master_job_cache'], exc)) while True: time_left = (timeout_at - int(time.time())) wait = max(1, time_left) jid_tag = u'salt/job/{0}'.format(jid) raw = self.event.get_event(wait, jid_tag, auto_reconnect=self.auto_reconnect) if ((raw is not None) and (u'return' in raw)): if (u'minions' in raw.get(u'data', {})): minions.update(raw[u'data'][u'minions']) continue found.add(raw[u'id']) ret[raw[u'id']] = {u'ret': raw[u'return']} ret[raw[u'id']][u'success'] = raw.get(u'success', False) if (u'out' in raw): ret[raw[u'id']][u'out'] = raw[u'out'] if (len(found.intersection(minions)) >= len(minions)): break continue if (len(found.intersection(minions)) >= len(minions)): break if (int(time.time()) > timeout_at): if (verbose or show_timeout): if (self.opts.get(u'minion_data_cache', False) or (tgt_type in (u'glob', u'pcre', u'list'))): if (len(found) < len(minions)): fail = sorted(list(minions.difference(found))) for minion in fail: ret[minion] = {u'out': u'no_return', u'ret': u'Minion did not return'} break time.sleep(0.01) self._clean_up_subscriptions(jid) return ret
'Get the returns for the command line interface via the event system'
def get_cli_event_returns(self, jid, minions, timeout=None, tgt=u'*', tgt_type=u'glob', verbose=False, progress=False, show_timeout=False, show_jid=False, **kwargs):
log.trace(u'func get_cli_event_returns()') if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') if verbose: msg = u'Executing job with jid {0}'.format(jid) print(msg) print(((u'-' * len(msg)) + u'\n')) elif show_jid: print(u'jid: {0}'.format(jid)) connected_minions = None return_count = 0 for ret in self.get_iter_returns(jid, minions, timeout=timeout, tgt=tgt, tgt_type=tgt_type, expect_minions=(verbose or show_timeout), **kwargs): log.debug(u'return event: %s', ret) return_count = (return_count + 1) if progress: for (id_, min_ret) in six.iteritems(ret): if (not (min_ret.get(u'failed') is True)): (yield {u'minion_count': len(minions), u'return_count': return_count}) for (id_, min_ret) in six.iteritems(ret): if (min_ret.get(u'failed') is True): if (connected_minions is None): connected_minions = salt.utils.minions.CkMinions(self.opts).connected_ids() if (self.opts[u'minion_data_cache'] and salt.cache.factory(self.opts).contains(u'minions/{0}'.format(id_), u'data') and connected_minions and (id_ not in connected_minions)): (yield {id_: {u'out': u'no_return', u'ret': u'Minion did not return. [Not connected]'}}) elif (not os.path.exists(os.path.join(self.opts[u'syndic_dir'], id_))): (yield {id_: {u'out': u'no_return', u'ret': u'Minion did not return. [No response]'}}) else: (yield {id_: min_ret}) self._clean_up_subscriptions(jid)
'Gather the return data from the event system, break hard when timeout is reached.'
def get_event_iter_returns(self, jid, minions, timeout=None):
log.trace(u'entered - function get_event_iter_returns()') if (timeout is None): timeout = self.opts[u'timeout'] timeout_at = (time.time() + timeout) found = set() if (self.returners[u'{0}.get_load'.format(self.opts[u'master_job_cache'])](jid) == {}): log.warning(u'jid does not exist') (yield {}) raise StopIteration() while True: raw = self.event.get_event(timeout, auto_reconnect=self.auto_reconnect) if ((raw is None) or (time.time() > timeout_at)): break if (u'minions' in raw.get(u'data', {})): continue try: found.add(raw[u'id']) ret = {raw[u'id']: {u'ret': raw[u'return']}} except KeyError: continue if (u'out' in raw): ret[raw[u'id']][u'out'] = raw[u'out'] (yield ret) time.sleep(0.02)
'Set up the payload_kwargs to be sent down to the master'
def _prep_pub(self, tgt, fun, arg, tgt_type, ret, jid, timeout, **kwargs):
if (tgt_type == u'nodegroup'): if (tgt not in self.opts[u'nodegroups']): conf_file = self.opts.get(u'conf_file', u'the master config file') raise SaltInvocationError(u'Node group {0} unavailable in {1}'.format(tgt, conf_file)) tgt = salt.utils.minions.nodegroup_comp(tgt, self.opts[u'nodegroups']) tgt_type = u'compound' if ((tgt_type == u'range') and HAS_RANGE): tgt = self._convert_range_to_list(tgt) tgt_type = u'list' if self.opts.get(u'ext_job_cache'): if ret: ret += u',{0}'.format(self.opts[u'ext_job_cache']) else: ret = self.opts[u'ext_job_cache'] payload_kwargs = {u'cmd': u'publish', u'tgt': tgt, u'fun': fun, u'arg': arg, u'key': self.key, u'tgt_type': tgt_type, u'ret': ret, u'jid': jid} if kwargs: payload_kwargs[u'kwargs'] = kwargs if (self.opts[u'syndic_master'] and (u'user' in kwargs)): payload_kwargs[u'user'] = kwargs[u'user'] elif self.salt_user: payload_kwargs[u'user'] = self.salt_user if self.opts[u'order_masters']: payload_kwargs[u'to'] = timeout return payload_kwargs
'Take the required arguments and publish the given command. Arguments: tgt: The tgt is a regex or a glob used to match up the ids on the minions. Salt works by always publishing every command to all of the minions and then the minions determine if the command is for them based on the tgt value. fun: The function name to be called on the remote host(s), this must be a string in the format "<modulename>.<function name>" arg: The arg option needs to be a tuple of arguments to pass to the calling function, if left blank Returns: jid: A string, as returned by the publisher, which is the job id, this will inform the client where to get the job results minions: A set, the targets that the tgt passed should match.'
def pub(self, tgt, fun, arg=(), tgt_type=u'glob', ret=u'', jid=u'', timeout=5, listen=False, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') if ((self.opts.get(u'ipc_mode', u'') != u'tcp') and (not os.path.exists(os.path.join(self.opts[u'sock_dir'], u'publish_pull.ipc')))): log.error(u'Unable to connect to the salt master publisher at %s', self.opts[u'sock_dir']) raise SaltClientError payload_kwargs = self._prep_pub(tgt, fun, arg, tgt_type, ret, jid, timeout, **kwargs) master_uri = (((u'tcp://' + salt.utils.ip_bracket(self.opts[u'interface'])) + u':') + str(self.opts[u'ret_port'])) channel = salt.transport.Channel.factory(self.opts, crypt=u'clear', master_uri=master_uri) try: if (listen and (not self.event.connect_pub(timeout=timeout))): raise SaltReqTimeoutError() payload = channel.send(payload_kwargs, timeout=timeout) except SaltReqTimeoutError: raise SaltReqTimeoutError(u'Salt request timed out. The master is not responding. You may need to run your command with `--async` in order to bypass the congested event bus. With `--async`, the CLI tool will print the job id (jid) and exit immediately without listening for responses. You can then use `salt-run jobs.lookup_jid` to look up the results of the job in the job cache later.') if (not payload): key = self.__read_master_key() if (key == self.key): return payload self.key = key payload_kwargs[u'key'] = self.key payload = channel.send(payload_kwargs) error = payload.pop(u'error', None) if (error is not None): raise PublishError(error) if (not payload): return payload del channel return {u'jid': payload[u'load'][u'jid'], u'minions': payload[u'load'][u'minions']}
'Take the required arguments and publish the given command. Arguments: tgt: The tgt is a regex or a glob used to match up the ids on the minions. Salt works by always publishing every command to all of the minions and then the minions determine if the command is for them based on the tgt value. fun: The function name to be called on the remote host(s), this must be a string in the format "<modulename>.<function name>" arg: The arg option needs to be a tuple of arguments to pass to the calling function, if left blank Returns: jid: A string, as returned by the publisher, which is the job id, this will inform the client where to get the job results minions: A set, the targets that the tgt passed should match.'
@tornado.gen.coroutine def pub_async(self, tgt, fun, arg=(), tgt_type=u'glob', ret=u'', jid=u'', timeout=5, io_loop=None, listen=True, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') if ((self.opts.get(u'ipc_mode', u'') != u'tcp') and (not os.path.exists(os.path.join(self.opts[u'sock_dir'], u'publish_pull.ipc')))): log.error(u'Unable to connect to the salt master publisher at %s', self.opts[u'sock_dir']) raise SaltClientError payload_kwargs = self._prep_pub(tgt, fun, arg, tgt_type, ret, jid, timeout, **kwargs) master_uri = (((u'tcp://' + salt.utils.ip_bracket(self.opts[u'interface'])) + u':') + str(self.opts[u'ret_port'])) channel = salt.transport.client.AsyncReqChannel.factory(self.opts, io_loop=io_loop, crypt=u'clear', master_uri=master_uri) try: if (listen and (not self.event.connect_pub(timeout=timeout))): raise SaltReqTimeoutError() payload = (yield channel.send(payload_kwargs, timeout=timeout)) except SaltReqTimeoutError: raise SaltReqTimeoutError(u'Salt request timed out. The master is not responding. You may need to run your command with `--async` in order to bypass the congested event bus. With `--async`, the CLI tool will print the job id (jid) and exit immediately without listening for responses. You can then use `salt-run jobs.lookup_jid` to look up the results of the job in the job cache later.') if (not payload): key = self.__read_master_key() if (key == self.key): raise tornado.gen.Return(payload) self.key = key payload_kwargs[u'key'] = self.key payload = (yield channel.send(payload_kwargs)) error = payload.pop(u'error', None) if (error is not None): raise PublishError(error) if (not payload): raise tornado.gen.Return(payload) del channel raise tornado.gen.Return({u'jid': payload[u'load'][u'jid'], u'minions': payload[u'load'][u'minions']})
'Since the function key is missing, wrap this call to a command to the minion of said key if it is available in the self.functions set'
def __missing__(self, key):
if (key not in self.functions): raise KeyError return self.run_key(key)
'Find out what functions are available on the minion'
def __load_functions(self):
return set(self.local.cmd(self.minion, u'sys.list_functions').get(self.minion, []))
'Return a function that executes the arguments passed via the local client'
def run_key(self, key):
def func(*args, **kwargs): '\n Run a remote call\n ' args = list(args) for (_key, _val) in kwargs: args.append(u'{0}={1}'.format(_key, _val)) return self.local.cmd(self.minion, key, args) return func
'Call an execution module with the given arguments and keyword arguments .. versionchanged:: 2015.8.0 Added the ``cmd`` method for consistency with the other Salt clients. The existing ``function`` and ``sminion.functions`` interfaces still exist but have been removed from the docs. .. code-block:: python caller.cmd(\'test.arg\', \'Foo\', \'Bar\', baz=\'Baz\') caller.cmd(\'event.send\', \'myco/myevent/something\', data={\'foo\': \'Foo\'}, with_env=[\'GIT_COMMIT\'], with_grains=True)'
def cmd(self, fun, *args, **kwargs):
return self.sminion.functions[fun](*args, **kwargs)
'Call a single salt function'
def function(self, fun, *args, **kwargs):
func = self.sminion.functions[fun] (args, kwargs) = salt.minion.load_args_and_kwargs(func, salt.utils.args.parse_input(args), kwargs) return func(*args, **kwargs)
'Load and start all available api modules'
def run(self):
if (not len(self.netapi)): log.error(u'Did not find any netapi configurations, nothing to start') for fun in self.netapi: if fun.endswith(u'.start'): log.info(u'Starting %s netapi module', fun) self.process_manager.add_process(self.netapi[fun]) if (signal.getsignal(signal.SIGINT) is signal.SIG_DFL): signal.signal(signal.SIGINT, self._handle_signals) if (signal.getsignal(signal.SIGTERM) is signal.SIG_DFL): signal.signal(signal.SIGTERM, self._handle_signals) self.process_manager.run()
'Publish the command!'
def pub(self, tgt, fun, arg=(), tgt_type=u'glob', ret=u'', jid=u'', timeout=5, **kwargs):
if (u'expr_form' in kwargs): salt.utils.warn_until(u'Fluorine', u"The target type should be passed using the 'tgt_type' argument instead of 'expr_form'. Support for using 'expr_form' will be removed in Salt Fluorine.") tgt_type = kwargs.pop(u'expr_form') payload_kwargs = self._prep_pub(tgt, fun, arg=arg, tgt_type=tgt_type, ret=ret, jid=jid, timeout=timeout, **kwargs) kind = self.opts[u'__role'] if (kind not in kinds.APPL_KINDS): emsg = u"Invalid application kind = '{0}' for Raet LocalClient.".format(kind) log.error((emsg + u'\n')) raise ValueError(emsg) if (kind in [kinds.APPL_KIND_NAMES[kinds.applKinds.master], kinds.APPL_KIND_NAMES[kinds.applKinds.syndic]]): lanename = u'master' else: emsg = u"Unsupported application kind '{0}' for Raet LocalClient.".format(kind) log.error((emsg + u'\n')) raise ValueError(emsg) sockdirpath = self.opts[u'sock_dir'] name = (u'client' + nacling.uuid(size=18)) stack = LaneStack(name=name, lanename=lanename, sockdirpath=sockdirpath) stack.Pk = raeting.PackKind.pack.value manor_yard = RemoteYard(stack=stack, lanename=lanename, name=u'manor', dirpath=sockdirpath) stack.addRemote(manor_yard) route = {u'dst': (None, manor_yard.name, u'local_cmd'), u'src': (None, stack.local.name, None)} msg = {u'route': route, u'load': payload_kwargs} stack.transmit(msg) stack.serviceAll() while True: time.sleep(0.01) stack.serviceAll() while stack.rxMsgs: (msg, sender) = stack.rxMsgs.popleft() ret = msg.get(u'return', {}) if (u'ret' in ret): stack.server.close() return ret[u'ret'] stack.server.close() return ret
'Execute the salt command given by cmd dict. cmd is a dictionary of the following form: \'mode\': \'modestring\', \'fun\' : \'modulefunctionstring\', \'kwarg\': functionkeywordargdictionary, \'tgt\' : \'targetpatternstring\', \'tgt_type\' : \'targetpatterntype\', \'ret\' : \'returner namestring\', \'timeout\': \'functiontimeout\', \'arg\' : \'functionpositionalarg sequence\', \'token\': \'salttokenstring\', \'username\': \'usernamestring\', \'password\': \'passwordstring\', \'eauth\': \'eauthtypestring\', Implied by the fun is which client is used to run the command, that is, either the master local minion client, the master runner client, or the master wheel client. The cmd dict items are as follows: mode: either \'sync\' or \'async\'. Defaults to \'async\' if missing fun: required. If the function is to be run on the master using either a wheel or runner client then the fun: includes either \'wheel.\' or \'runner.\' as a prefix and has three parts separated by \'.\'. Otherwise the fun: specifies a module to be run on a minion via the local minion client. Example: fun of \'wheel.config.values\' run with master wheel client fun of \'runner.manage.status\' run with master runner client fun of \'test.ping\' run with local minion client fun of \'wheel.foobar\' run with with local minion client not wheel kwarg: A dictionary of keyword function parameters to be passed to the eventual salt function specified by fun: tgt: Pattern string specifying the targeted minions when the implied client is local tgt_type: Optional target pattern type string when client is local minion. Defaults to \'glob\' if missing ret: Optional name string of returner when local minion client. arg: Optional positional argument string when local minion client token: the salt token. Either token: is required or the set of username:, password: , and eauth: username: the salt username. Required if token is missing. password: the user\'s password. Required if token is missing. eauth: the authentication type such as \'pam\' or \'ldap\'. Required if token is missing'
def run(self, cmd):
cmd = dict(cmd) client = u'minion' mode = cmd.get(u'mode', u'async') funparts = cmd.get(u'fun', u'').split(u'.') if ((len(funparts) > 2) and (funparts[0] in [u'wheel', u'runner'])): client = funparts[0] cmd[u'fun'] = u'.'.join(funparts[1:]) if (not ((u'token' in cmd) or ((u'eauth' in cmd) and (u'password' in cmd) and (u'username' in cmd)))): raise EauthAuthenticationError(u'No authentication credentials given') executor = getattr(self, u'{0}_{1}'.format(client, mode)) result = executor(**cmd) return result
'Wrap LocalClient for running :ref:`execution modules <all-salt.modules>` and immediately return the job ID. The results of the job can then be retrieved at a later time. .. seealso:: :ref:`python-api`'
def minion_async(self, **kwargs):
return self.localClient.run_job(**kwargs)
'Wrap LocalClient for running :ref:`execution modules <all-salt.modules>` .. seealso:: :ref:`python-api`'
def minion_sync(self, **kwargs):
return self.localClient.cmd(**kwargs)
'Wrap RunnerClient for executing :ref:`runner modules <all-salt.runners>` Expects that one of the kwargs is key \'fun\' whose value is the namestring of the function to call'
def runner_async(self, **kwargs):
return self.runnerClient.master_call(**kwargs)