desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Verify if Minion is verifying hash_type config option. :return:'
def test_minion_daemon_hash_type_verified(self):
def exec_test(child_pipe): def _create_minion(): '\n Create minion instance\n :return:\n ' obj = daemons.Minion() obj.config = {'user': 'dummy', 'hash_type': alg} for attr in ['start_log_info', 'prepare', 'shutdown']: setattr(obj, attr, MagicMock()) setattr(obj, 'minion', MagicMock(restart=False)) return obj ret = True _logger = LoggerMock() with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)): with patch('salt.cli.daemons.log', _logger): for alg in ['md5', 'sha1']: _create_minion().start() ret = (ret and _logger.messages and _logger.has_message('Do not use {alg}'.format(alg=alg), log_type='warning')) _logger.reset() for alg in ['sha224', 'sha256', 'sha384', 'sha512']: _create_minion().start() ret = (ret and _logger.messages and (not _logger.has_message('Do not use '))) child_pipe.send(ret) child_pipe.close() self._multiproc_exec_test(exec_test)
'Verify if ProxyMinion is verifying hash_type config option. :return:'
def test_proxy_minion_daemon_hash_type_verified(self):
def exec_test(child_pipe): def _create_proxy_minion(): '\n Create proxy minion instance\n :return:\n ' obj = daemons.ProxyMinion() obj.config = {'user': 'dummy', 'hash_type': alg} for attr in ['minion', 'start_log_info', 'prepare', 'shutdown', 'tune_in']: setattr(obj, attr, MagicMock()) obj.minion.restart = False return obj ret = True _logger = LoggerMock() with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)): with patch('salt.cli.daemons.log', _logger): for alg in ['md5', 'sha1']: _create_proxy_minion().start() ret = (ret and _logger.messages and _logger.has_message('Do not use {alg}'.format(alg=alg), log_type='warning')) _logger.reset() for alg in ['sha224', 'sha256', 'sha384', 'sha512']: _create_proxy_minion().start() ret = (ret and _logger.messages and (not _logger.has_message('Do not use '))) child_pipe.send(ret) child_pipe.close() self._multiproc_exec_test(exec_test)
'Verify if Syndic is verifying hash_type config option. :return:'
def test_syndic_daemon_hash_type_verified(self):
def exec_test(child_pipe): def _create_syndic(): '\n Create syndic instance\n :return:\n ' obj = daemons.Syndic() obj.config = {'user': 'dummy', 'hash_type': alg} for attr in ['syndic', 'start_log_info', 'prepare', 'shutdown']: setattr(obj, attr, MagicMock()) return obj ret = True _logger = LoggerMock() with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)): with patch('salt.cli.daemons.log', _logger): for alg in ['md5', 'sha1']: _create_syndic().start() ret = (ret and _logger.messages and _logger.has_message('Do not use {alg}'.format(alg=alg), log_type='warning')) _logger.reset() for alg in ['sha224', 'sha256', 'sha384', 'sha512']: _create_syndic().start() ret = (ret and _logger.messages and (not _logger.has_message('Do not use '))) child_pipe.send(ret) child_pipe.close() self._multiproc_exec_test(exec_test)
'Sanity check for ssh.Single options'
def test_single_opts(self):
argv = ['ssh.set_auth_key', 'root', 'hobn+amNAXSBTiOXEqlBjGB...rsa root@master'] opts = {'argv': argv, '__role': 'master', 'cachedir': self.tmp_cachedir, 'extension_modules': os.path.join(self.tmp_cachedir, 'extmods')} target = {'passwd': 'abc123', 'ssh_options': None, 'sudo': False, 'identities_only': False, 'host': 'login1', 'user': 'root', 'timeout': 65, 'remote_port_forwards': None, 'sudo_user': '', 'port': '22', 'priv': '/etc/salt/pki/master/ssh/salt-ssh.rsa'} single = ssh.Single(opts, opts['argv'], 'localhost', mods={}, fsclient=None, thin=thin.thin_path(opts['cachedir']), mine=False, **target) self.assertEqual(single.shell._ssh_opts(), '') self.assertEqual(single.shell._cmd_str('date +%s'), 'ssh login1 -o KbdInteractiveAuthentication=no -o PasswordAuthentication=yes -o ConnectTimeout=65 -o Port=22 -o IdentityFile=/etc/salt/pki/master/ssh/salt-ssh.rsa -o User=root date +%s')
'Helper function that creates a temporary cache file using localfs.store. This is to used to create DRY unit tests for the localfs cache.'
def _create_tmp_cache_file(self, tmp_dir, serializer):
self.addCleanup(shutil.rmtree, tmp_dir) with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): with patch.dict(localfs.__context__, {'serial': serializer}): localfs.store(bank='bank', key='key', data='payload data', cachedir=tmp_dir)
'Tests that a SaltCacheError is raised when the base directory doesn\'t exist and cannot be created.'
def test_store_no_base_cache_dir(self):
with patch('os.path.isdir', MagicMock(return_value=None)): with patch('os.makedirs', MagicMock(side_effect=OSError)): self.assertRaises(SaltCacheError, localfs.store, bank='', key='', data='', cachedir='')
'Tests that the file descriptor that is opened by os.open during the mkstemp call in localfs.store is closed before calling salt.utils.files.fopen on the filename. This test mocks the call to mkstemp, but forces an OSError to be raised when the close() function is called on a file descriptor that doesn\'t exist.'
def test_store_close_mkstemp_file_handle(self):
with patch('os.path.isdir', MagicMock(return_value=True)): with patch('tempfile.mkstemp', MagicMock(return_value=(12345, 'foo'))): self.assertRaises(OSError, localfs.store, bank='', key='', data='', cachedir='')
'Tests that a SaltCacheError is raised when there is a problem writing to the cache file.'
def test_store_error_writing_cache(self):
with patch('os.path.isdir', MagicMock(return_value=True)): with patch('tempfile.mkstemp', MagicMock(return_value=('one', 'two'))): with patch('os.close', MagicMock(return_value=None)): with patch('salt.utils.files.fopen', MagicMock(side_effect=IOError)): self.assertRaises(SaltCacheError, localfs.store, bank='', key='', data='', cachedir='')
'Tests that the store function writes the data to the serializer for storage.'
def test_store_success(self):
tmp_dir = tempfile.mkdtemp(dir=TMP) self._create_tmp_cache_file(tmp_dir, salt.payload.Serial(self)) with salt.utils.files.fopen((tmp_dir + '/bank/key.p'), 'rb') as fh_: for line in fh_: self.assertIn(six.b('payload data'), line)
'Tests that the fetch function returns an empty dic when the cache key file doesn\'t exist.'
def test_fetch_return_when_cache_file_does_not_exist(self):
with patch('os.path.isfile', MagicMock(return_value=False)): self.assertEqual(localfs.fetch(bank='', key='', cachedir=''), {})
'Tests that a SaltCacheError is raised when there is a problem reading the cache file.'
def test_fetch_error_reading_cache(self):
with patch('os.path.isfile', MagicMock(return_value=True)): with patch('salt.utils.files.fopen', MagicMock(side_effect=IOError)): self.assertRaises(SaltCacheError, localfs.fetch, bank='', key='', cachedir='')
'Tests that the fetch function is able to read the cache file and return its data.'
def test_fetch_success(self):
tmp_dir = tempfile.mkdtemp(dir=TMP) serializer = salt.payload.Serial(self) self._create_tmp_cache_file(tmp_dir, serializer) with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): with patch.dict(localfs.__context__, {'serial': serializer}): self.assertIn('payload data', localfs.fetch(bank='bank', key='key', cachedir=tmp_dir))
'Tests that the updated function returns None when the cache key file doesn\'t exist.'
def test_updated_return_when_cache_file_does_not_exist(self):
with patch('os.path.isfile', MagicMock(return_value=False)): self.assertIsNone(localfs.updated(bank='', key='', cachedir=''))
'Tests that a SaltCacheError is raised when there is a problem reading the mtime of the cache file.'
def test_updated_error_when_reading_mtime(self):
with patch('os.path.isfile', MagicMock(return_value=True)): with patch('os.path.getmtime', MagicMock(side_effect=IOError)): self.assertRaises(SaltCacheError, localfs.updated, bank='', key='', cachedir='')
'Test that the updated function returns the modification time of the cache file'
def test_updated_success(self):
tmp_dir = tempfile.mkdtemp(dir=TMP) self._create_tmp_cache_file(tmp_dir, salt.payload.Serial(self)) with patch('os.path.join', MagicMock(return_value=(tmp_dir + '/bank/key.p'))): self.assertIsInstance(localfs.updated(bank='bank', key='key', cachedir=tmp_dir), int)
'Tests that the flush function returns False when no key is passed in and the target directory doesn\'t exist.'
def test_flush_key_is_none_and_no_target_dir(self):
with patch('os.path.isdir', MagicMock(return_value=False)): self.assertFalse(localfs.flush(bank='', key=None, cachedir=''))
'Tests that the flush function returns False when a key file is provided but the target key file doesn\'t exist in the cache bank.'
def test_flush_key_provided_and_no_key_file_false(self):
with patch('os.path.isfile', MagicMock(return_value=False)): self.assertFalse(localfs.flush(bank='', key='key', cachedir=''))
'Tests that the flush function returns True when a key file is provided and the target key exists in the cache bank.'
def test_flush_success(self):
with patch('os.path.isfile', MagicMock(return_value=True)): tmp_dir = tempfile.mkdtemp(dir=TMP) self._create_tmp_cache_file(tmp_dir, salt.payload.Serial(self)) with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): self.assertTrue(localfs.flush(bank='bank', key='key', cachedir=tmp_dir))
'Tests that a SaltCacheError is raised when there is a problem removing the key file from the cache bank'
def test_flush_error_raised(self):
with patch('os.path.isfile', MagicMock(return_value=True)): with patch('os.remove', MagicMock(side_effect=OSError)): self.assertRaises(SaltCacheError, localfs.flush, bank='', key='key', cachedir='/var/cache/salt')
'Tests that the ls function returns an empty list if the bank directory doesn\'t exist.'
def test_ls_no_base_dir(self):
with patch('os.path.isdir', MagicMock(return_value=False)): self.assertEqual(localfs.ls(bank='', cachedir=''), [])
'Tests that a SaltCacheError is raised when there is a problem accessing the cache bank directory.'
def test_ls_error_raised_no_bank_directory_access(self):
with patch('os.path.isdir', MagicMock(return_value=True)): with patch('os.listdir', MagicMock(side_effect=OSError)): self.assertRaises(SaltCacheError, localfs.ls, bank='', cachedir='')
'Tests the return of the ls function containing bank entries.'
def test_ls_success(self):
tmp_dir = tempfile.mkdtemp(dir=TMP) self._create_tmp_cache_file(tmp_dir, salt.payload.Serial(self)) with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): self.assertEqual(localfs.ls(bank='bank', cachedir=tmp_dir), ['key'])
'Test the return of the contains function when key=None and when a key is provided.'
def test_contains(self):
tmp_dir = tempfile.mkdtemp(dir=TMP) self._create_tmp_cache_file(tmp_dir, salt.payload.Serial(self)) with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): self.assertTrue(localfs.contains(bank='bank', key=None, cachedir=tmp_dir)) with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): self.assertTrue(localfs.contains(bank='bank', key='key', cachedir=tmp_dir))
'Some service send JSON as text/plain for compatibility purposes'
def test_json_as_text_out(self):
data = {'valid': 'stuff'} (request, response) = self.request('/', method='POST', body=json.dumps(data), headers=(('Content-type', 'text/plain'),)) self.assertEqual(response.status, '200 OK') self.assertDictEqual(request.unserialized_data, data)
'Mint and return a valid token for auth_creds'
@property def token(self):
return self.auth.mk_token(self.auth_creds_dict)
'Test the base handler\'s accept picking'
def test_accept_content_type(self):
response = self.fetch('/') self.assertEqual(response.headers['Content-Type'], self.content_type_map['json']) self.assertEqual(type(json.loads(response.body)), dict) response = self.fetch('/', headers={'Accept': self.content_type_map['json']}) self.assertEqual(response.headers['Content-Type'], self.content_type_map['json']) self.assertEqual(type(json.loads(response.body)), dict) response = self.fetch('/', headers={'Accept': self.content_type_map['yaml']}) self.assertEqual(response.headers['Content-Type'], self.content_type_map['yaml']) self.assertEqual(type(yaml.load(response.body)), dict) response = self.fetch('/', headers={'Accept': self.content_type_map['xml']}) self.assertEqual(response.code, 406) accept_header = self.content_type_map['real-accept-header-json'] response = self.fetch('/', headers={'Accept': accept_header}) self.assertEqual(response.headers['Content-Type'], self.content_type_map['json']) self.assertEqual(type(json.loads(response.body)), dict) accept_header = self.content_type_map['real-accept-header-yaml'] response = self.fetch('/', headers={'Accept': accept_header}) self.assertEqual(response.headers['Content-Type'], self.content_type_map['yaml']) self.assertEqual(type(yaml.load(response.body)), dict)
'Test that the token is returned correctly'
def test_token(self):
token = json.loads(self.fetch('/').body)['token'] self.assertIs(token, None) response = self.fetch('/', headers={saltnado.AUTH_TOKEN_HEADER: 'foo'}) token = json.loads(response.body)['token'] self.assertEqual(token, 'foo') response = self.fetch('/', headers={'Cookie': '{0}=foo'.format(saltnado.AUTH_COOKIE_NAME)}) token = json.loads(response.body)['token'] self.assertEqual(token, 'foo') response = self.fetch('/', headers={saltnado.AUTH_TOKEN_HEADER: 'foo', 'Cookie': '{0}=bar'.format(saltnado.AUTH_COOKIE_NAME)}) token = json.loads(response.body)['token'] self.assertEqual(token, 'foo')
'Send various encoded forms of lowstates (and bad ones) to make sure we handle deserialization correctly'
def test_deserialize(self):
valid_lowstate = [{'client': 'local', 'tgt': '*', 'fun': 'test.fib', 'arg': ['10']}, {'client': 'runner', 'fun': 'jobs.lookup_jid', 'jid': '20130603122505459265'}] response = self.fetch('/', method='POST', body=json.dumps(valid_lowstate), headers={'Content-Type': self.content_type_map['json']}) self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate']) response = self.fetch('/', method='POST', body=yaml.dump(valid_lowstate), headers={'Content-Type': self.content_type_map['json']}) self.assertEqual(response.code, 400) response = self.fetch('/', method='POST', body=yaml.dump(valid_lowstate), headers={'Content-Type': self.content_type_map['yaml']}) self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate']) response = self.fetch('/', method='POST', body=json.dumps(valid_lowstate), headers={'Content-Type': self.content_type_map['yaml']}) self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate']) response = self.fetch('/', method='POST', body=json.dumps(valid_lowstate), headers={'Content-Type': self.content_type_map['text']}) self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate']) form_lowstate = (('client', 'local'), ('tgt', '*'), ('fun', 'test.fib'), ('arg', '10'), ('arg', 'foo')) response = self.fetch('/', method='POST', body=urlencode(form_lowstate), headers={'Content-Type': self.content_type_map['form']}) returned_lowstate = json.loads(response.body)['lowstate'] self.assertEqual(len(returned_lowstate), 1) returned_lowstate = returned_lowstate[0] self.assertEqual(returned_lowstate['client'], 'local') self.assertEqual(returned_lowstate['tgt'], '*') self.assertEqual(returned_lowstate['fun'], 'test.fib') self.assertEqual(returned_lowstate['arg'], ['10', 'foo']) response = self.fetch('/', method='POST', body=json.dumps(valid_lowstate), headers={'Content-Type': self.content_type_map['json-utf8']}) self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate'])
'Test transformations low data of the function _get_lowstate'
def test_get_lowstate(self):
valid_lowstate = [{u'client': u'local', u'tgt': u'*', u'fun': u'test.fib', u'arg': [u'10']}] request_lowstate = {'client': 'local', 'tgt': '*', 'fun': 'test.fib', 'arg': ['10']} response = self.fetch('/', method='POST', body=json.dumps(request_lowstate), headers={'Content-Type': self.content_type_map['json']}) self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate']) request_lowstate = {'client': 'local', 'tgt': '*', 'fun': 'test.fib', 'arg': '10'} response = self.fetch('/', method='POST', body=json.dumps(request_lowstate), headers={'Content-Type': self.content_type_map['json']}) self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate']) request_lowstate = {'client': 'local', 'tgt': '*', 'fun': 'test.fib', 'arg': '10'} response = self.fetch('/', method='POST', body=json.dumps(request_lowstate), headers={'Content-Type': self.content_type_map['json']}) self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate']) response = self.fetch('/', method='POST', body=yaml.dump(request_lowstate), headers={'Content-Type': self.content_type_map['yaml']}) self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate']) response = self.fetch('/', method='POST', body=json.dumps(request_lowstate), headers={'Content-Type': self.content_type_map['text']}) self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate']) request_form_lowstate = (('client', 'local'), ('tgt', '*'), ('fun', 'test.fib'), ('arg', '10')) response = self.fetch('/', method='POST', body=urlencode(request_form_lowstate), headers={'Content-Type': self.content_type_map['form']}) self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate'])
'Check that endpoints returns Access-Control-Allow-Origin'
def test_cors_origin_wildcard(self):
self._app.mod_opts['cors_origin'] = '*' headers = self.fetch('/').headers self.assertEqual(headers['Access-Control-Allow-Origin'], '*')
'Check that endpoints returns the Access-Control-Allow-Origin when only one origins is set'
def test_cors_origin_single(self):
self._app.mod_opts['cors_origin'] = 'http://example.foo' headers = self.fetch('/', headers={'Origin': 'http://example.foo'}).headers self.assertEqual(headers['Access-Control-Allow-Origin'], 'http://example.foo') headers = self.fetch('/', headers={'Origin': 'http://example2.foo'}).headers self.assertEqual(headers.get('Access-Control-Allow-Origin'), None)
'Check that endpoints returns the Access-Control-Allow-Origin when multiple origins are set'
def test_cors_origin_multiple(self):
self._app.mod_opts['cors_origin'] = ['http://example.foo', 'http://foo.example'] headers = self.fetch('/', headers={'Origin': 'http://example.foo'}).headers self.assertEqual(headers['Access-Control-Allow-Origin'], 'http://example.foo') headers = self.fetch('/', headers={'Origin': 'http://example2.foo'}).headers self.assertEqual(headers.get('Access-Control-Allow-Origin'), None)
'Check that preflight request contains right headers'
def test_cors_preflight_request(self):
self._app.mod_opts['cors_origin'] = '*' request_headers = 'X-Auth-Token, accept, content-type' preflight_headers = {'Access-Control-Request-Headers': request_headers, 'Access-Control-Request-Method': 'GET'} response = self.fetch('/', method='OPTIONS', headers=preflight_headers) headers = response.headers self.assertEqual(response.code, 204) self.assertEqual(headers['Access-Control-Allow-Headers'], request_headers) self.assertEqual(headers['Access-Control-Expose-Headers'], 'X-Auth-Token') self.assertEqual(headers['Access-Control-Allow-Methods'], 'OPTIONS, GET, POST') self.assertEqual(response.code, 204)
'Check that preflight requests works with url with components like jobs or minions endpoints.'
def test_cors_origin_url_with_arguments(self):
self._app.mod_opts['cors_origin'] = '*' request_headers = 'X-Auth-Token, accept, content-type' preflight_headers = {'Access-Control-Request-Headers': request_headers, 'Access-Control-Request-Method': 'GET'} response = self.fetch('/1234567890', method='OPTIONS', headers=preflight_headers) headers = response.headers self.assertEqual(response.code, 204) self.assertEqual(headers['Access-Control-Allow-Origin'], '*')
'We don\'t allow gets, so assert we get 401s'
def test_get(self):
response = self.fetch('/login') self.assertEqual(response.code, 401)
'Test valid logins'
def test_login(self):
response = self.fetch('/login', method='POST', body=urlencode(self.auth_creds), headers={'Content-Type': self.content_type_map['form']}) self.assertEqual(response.code, 200) response_obj = json.loads(response.body)['return'][0] self.assertEqual(response_obj['perms'], self.opts['external_auth']['auto'][self.auth_creds_dict['username']]) self.assertIn('token', response_obj) self.assertEqual(response_obj['user'], self.auth_creds_dict['username']) self.assertEqual(response_obj['eauth'], self.auth_creds_dict['eauth']) response = self.fetch('/login', method='POST', body=json.dumps(self.auth_creds_dict), headers={'Content-Type': self.content_type_map['json']}) self.assertEqual(response.code, 200) response_obj = json.loads(response.body)['return'][0] self.assertEqual(response_obj['perms'], self.opts['external_auth']['auto'][self.auth_creds_dict['username']]) self.assertIn('token', response_obj) self.assertEqual(response_obj['user'], self.auth_creds_dict['username']) self.assertEqual(response_obj['eauth'], self.auth_creds_dict['eauth']) response = self.fetch('/login', method='POST', body=yaml.dump(self.auth_creds_dict), headers={'Content-Type': self.content_type_map['yaml']}) self.assertEqual(response.code, 200) response_obj = json.loads(response.body)['return'][0] self.assertEqual(response_obj['perms'], self.opts['external_auth']['auto'][self.auth_creds_dict['username']]) self.assertIn('token', response_obj) self.assertEqual(response_obj['user'], self.auth_creds_dict['username']) self.assertEqual(response_obj['eauth'], self.auth_creds_dict['eauth'])
'Test logins with bad/missing passwords'
def test_login_missing_password(self):
bad_creds = [] for (key, val) in six.iteritems(self.auth_creds_dict): if (key == 'password'): continue bad_creds.append((key, val)) response = self.fetch('/login', method='POST', body=urlencode(bad_creds), headers={'Content-Type': self.content_type_map['form']}) self.assertEqual(response.code, 400)
'Test logins with bad/missing passwords'
def test_login_bad_creds(self):
bad_creds = [] for (key, val) in six.iteritems(self.auth_creds_dict): if (key == 'username'): val = (val + 'foo') bad_creds.append((key, val)) response = self.fetch('/login', method='POST', body=urlencode(bad_creds), headers={'Content-Type': self.content_type_map['form']}) self.assertEqual(response.code, 401)
'Test logins with either list or string JSON payload'
def test_login_invalid_data_structure(self):
response = self.fetch('/login', method='POST', body=json.dumps(self.auth_creds), headers={'Content-Type': self.content_type_map['form']}) self.assertEqual(response.code, 400) response = self.fetch('/login', method='POST', body=json.dumps(42), headers={'Content-Type': self.content_type_map['form']}) self.assertEqual(response.code, 400) response = self.fetch('/login', method='POST', body=json.dumps('mystring42'), headers={'Content-Type': self.content_type_map['form']}) self.assertEqual(response.code, 400)
'A bad token should returns a 401 during a websocket connect'
@gen_test def test_websocket_handler_bad_token(self):
token = ('A' * len(getattr(hashlib, self.opts.get('hash_type', 'md5'))().hexdigest())) url = 'ws://127.0.0.1:{0}/all_events/{1}'.format(self.get_http_port(), token) request = HTTPRequest(url, headers={'Origin': 'http://example.com', 'Host': 'example.com'}) try: ws = (yield websocket_connect(request)) except HTTPError as error: self.assertEqual(error.code, 401)
'Test that the Any Future does what we think it does'
def test_any_future(self):
futures = [] for x in range(0, 3): future = tornado.concurrent.Future() future.add_done_callback(self.stop) futures.append(future) any_ = saltnado.Any(futures) self.assertIs(any_.done(), False) futures[0].set_result('foo') self.wait() self.assertIs(any_.done(), True) self.assertIs(futures[0].done(), True) self.assertIs(futures[1].done(), False) self.assertIs(futures[2].done(), False) self.assertEqual(any_.result(), futures[0]) futures = futures[1:] any_ = saltnado.Any(futures) futures[0].set_result('foo') self.wait() self.assertIs(any_.done(), True) self.assertIs(futures[0].done(), True) self.assertIs(futures[1].done(), False)
'Test getting a few events'
def test_simple(self):
with eventpublisher_process(): me = salt.utils.event.MasterEvent(SOCK_DIR) event_listener = saltnado.EventListener({}, {'sock_dir': SOCK_DIR, 'transport': 'zeromq'}) self._finished = False event_future = event_listener.get_event(self, 'evt1', self.stop) me.fire_event({'data': 'foo2'}, 'evt2') me.fire_event({'data': 'foo1'}, 'evt1') self.wait() self.assertTrue(event_future.done()) self.assertEqual(event_future.result()['tag'], 'evt1') self.assertEqual(event_future.result()['data']['data'], 'foo1')
'Test subscribing events using set_event_handler'
def test_set_event_handler(self):
with eventpublisher_process(): me = salt.utils.event.MasterEvent(SOCK_DIR) event_listener = saltnado.EventListener({}, {'sock_dir': SOCK_DIR, 'transport': 'zeromq'}) self._finished = False event_future = event_listener.get_event(self, tag='evt', callback=self.stop, timeout=1) me.fire_event({'data': 'foo'}, 'evt') self.wait() self.assertEqual(len(event_listener.timeout_map), 0)
'Make sure timeouts work correctly'
def test_timeout(self):
with eventpublisher_process(): event_listener = saltnado.EventListener({}, {'sock_dir': SOCK_DIR, 'transport': 'zeromq'}) self._finished = False event_future = event_listener.get_event(self, tag='evt1', callback=self.stop, timeout=1) self.wait() self.assertTrue(event_future.done()) with self.assertRaises(saltnado.TimeoutException): event_future.result()
'Test a variety of messages, make sure we get the expected responses'
def test_basic(self):
msgs = [{'foo': 'bar'}, {'bar': 'baz'}, {'baz': 'qux', 'list': [1, 2, 3]}] for msg in msgs: ret = self.channel.send(msg, timeout=2, tries=1) self.assertEqual(ret['load'], msg)
'Since we use msgpack, we need to test that list types are converted to lists'
def test_normalization(self):
types = {'list': list} msgs = [{'list': tuple([1, 2, 3])}] for msg in msgs: ret = self.channel.send(msg, timeout=2, tries=1) for (k, v) in six.iteritems(ret['load']): self.assertEqual(types[k], type(v))
'Test a variety of bad requests, make sure that we get some sort of error'
def test_badload(self):
msgs = ['', [], tuple()] for msg in msgs: ret = self.channel.send(msg, timeout=2, tries=1) self.assertEqual(ret, 'payload and load must be a dict')
'TODO: something besides echo'
@classmethod def _handle_payload(cls, payload):
return (payload, {'fun': 'send_clear'})
'TODO: something besides echo'
@classmethod @tornado.gen.coroutine def _handle_payload(cls, payload):
raise tornado.gen.Return((payload, {'fun': 'send_clear'}))
'TODO: something besides echo'
@classmethod @tornado.gen.coroutine def _handle_payload(cls, payload):
raise tornado.gen.Return((payload, {'fun': 'send'}))
'Test a variety of bad requests, make sure that we get some sort of error'
def test_badload(self):
msgs = ['', [], tuple()] for msg in msgs: with self.assertRaises(salt.exceptions.AuthenticationError): ret = self.channel.send(msg, timeout=5)
'TODO: something besides echo'
@classmethod def _handle_payload(cls, payload):
return (payload, {'fun': 'send_clear'})
'TODO: something besides echo'
@classmethod @tornado.gen.coroutine def _handle_payload(cls, payload):
raise tornado.gen.Return((payload, {'fun': 'send_clear'}))
'TODO: something besides echo'
@classmethod @tornado.gen.coroutine def _handle_payload(cls, payload):
raise tornado.gen.Return((payload, {'fun': 'send_clear'}))
'TODO: something besides echo'
@classmethod @tornado.gen.coroutine def _handle_payload(cls, payload):
raise tornado.gen.Return((payload, {'fun': 'send'}))
'Test a variety of bad requests, make sure that we get some sort of error'
@flaky def test_badload(self):
msgs = ['', [], tuple()] for msg in msgs: with self.assertRaises(salt.exceptions.AuthenticationError): ret = self.channel.send(msg)
'TODO: something besides echo'
@classmethod def _handle_payload(cls, payload):
return (payload, {'fun': 'send_clear'})
'Build from a specific template or just use a default if no template is passed to this function.'
def build_map(self, template=None):
if (template is None): template = textwrap.dedent(' {{ ubuntu }}\n {{ centos }}\n {{ debian }}\n ') full_template = ((map_prefix + template) + map_suffix) ret = jinja2.Template(full_template).render(**map_data) log.debug('built map: \n%s', ret) return ret
'Test for https://github.com/saltstack/salt/issues/21796'
def test_random_password(self):
ret = self.render(random_password_template)
'Import test for https://github.com/saltstack/salt/issues/21796'
def test_import_random_password(self):
self.write_template_file('password.sls', random_password_template) ret = self.render(random_password_import_template)
'Ensure that the implicit list characteristic works as expected'
def test_requisite_implicit_list(self):
ret = self.render(requisite_implicit_list_template, {'grains': {'os_family': 'Debian', 'os': 'Debian'}}) self.assertEqual(ret, OrderedDict([('pkg', OrderedDict([('pkg.installed', [])])), ('service', OrderedDict([('service.running', [{'require': [{'cmd': 'cmd'}, {'pkg': 'pkg'}]}, {'watch': [{'file': 'file'}]}])]))]))
'Test declarative ordering'
def test_map(self):
template = self.build_map(textwrap.dedent(' {{ debian }}\n {{ centos }}\n {{ ubuntu }}\n ')) ret = self.samba_with_grains(template, self.debian_grains) self.assert_equal(ret, *self.debian_attrs) ret = self.samba_with_grains(template, self.ubuntu_grains) self.assert_equal(ret, *self.ubuntu_attrs) ret = self.samba_with_grains(template, self.centos_grains) self.assert_equal(ret, *self.centos_attrs) template = self.build_map(textwrap.dedent(' {{ ubuntu }}\n {{ debian }}\n ')) ret = self.samba_with_grains(template, self.debian_grains) self.assert_equal(ret, *self.debian_attrs) ret = self.samba_with_grains(template, self.ubuntu_grains) self.assert_not_equal(ret, *self.ubuntu_attrs)
'With declarative ordering, the debian service name would override the ubuntu one since debian comes second. This will test overriding this behavior using the priority attribute.'
def test_map_with_priority(self):
template = self.build_map(textwrap.dedent(" {{ priority(('os_family', 'os')) }}\n {{ ubuntu }}\n {{ centos }}\n {{ debian }}\n ")) ret = self.samba_with_grains(template, self.debian_grains) self.assert_equal(ret, *self.debian_attrs) ret = self.samba_with_grains(template, self.ubuntu_grains) self.assert_equal(ret, *self.ubuntu_attrs) ret = self.samba_with_grains(template, self.centos_grains) self.assert_equal(ret, *self.centos_attrs)
'Tests running a non-ascii character through the state.format_log function. See Issue #33605.'
def test_format_log_non_ascii_character(self):
ret = {'changes': {u'Fran\xe7ais': {'old': 'something old', 'new': 'something new'}}, 'result': True} salt.state.format_log(ret)
'Test that the state compiler correctly deliver a rendering exception when a requisite cannot be resolved'
def test_render_error_on_invalid_requisite(self):
with patch('salt.state.State._gather_pillar') as state_patch: high_data = {'git': OrderedDict([('pkg', [OrderedDict([('require', [OrderedDict([('file', OrderedDict([('test1', 'test')]))])])]), 'installed', {'order': 10000}]), ('__sls__', u'issue_35226'), ('__env__', 'base')])} minion_opts = self.get_temp_config('minion') minion_opts['pillar'] = {'git': OrderedDict([('test1', 'test')])} state_obj = salt.state.State(minion_opts) with self.assertRaises(salt.exceptions.SaltRenderError): state_obj.call_high(high_data)
'Create multiple top files for use in each test. Envs within self.tops should be defined in the same order as this ordering will affect ordering in merge_tops. The envs in each top file are defined in the same order as self.env_order. This is no accident; it was done this way in order to produce the proper deterministic results to match the tests. Changing anything created in this func will affect the tests, as they would affect ordering in states in real life. So, don\'t change any of this unless you know what you\'re doing. If a test is failing, it is likely due to incorrect logic in merge_tops.'
def setUp(self):
self.env_order = ['base', 'foo', 'bar', 'baz'] self.addCleanup(delattr, self, 'env_order') self.tops = {'base': OrderedDict([('base', OrderedDict([('*', ['base_base'])])), ('foo', OrderedDict([('*', ['base_foo'])])), ('bar', OrderedDict([('*', ['base_bar'])])), ('baz', OrderedDict([('*', ['base_baz'])]))]), 'foo': OrderedDict([('base', OrderedDict([('*', ['foo_base'])])), ('foo', OrderedDict([('*', ['foo_foo'])])), ('bar', OrderedDict([('*', ['foo_bar'])])), ('baz', OrderedDict([('*', ['foo_baz'])]))]), 'bar': OrderedDict([('base', OrderedDict([('*', ['bar_base'])])), ('foo', OrderedDict([('*', ['bar_foo'])])), ('bar', OrderedDict([('*', ['bar_bar'])])), ('baz', OrderedDict([('*', ['bar_baz'])]))]), 'baz': OrderedDict()} self.addCleanup(delattr, self, 'tops') self.tops_limited_base = copy.deepcopy(self.tops) self.tops_limited_base['base'] = OrderedDict([('base', OrderedDict([('*', ['base_base'])]))]) self.addCleanup(delattr, self, 'tops_limited_base')
'A test helper to emulate salt.state.HighState.get_tops() but just to construct an appropriate data structure for top files from multiple environments'
def get_tops(self, tops=None, env_order=None, state_top_saltenv=None):
if (tops is None): tops = self.tops if state_top_saltenv: append_order = [state_top_saltenv] elif env_order: append_order = env_order else: append_order = self.env_order ret = DefaultOrderedDict(list) for env in append_order: item = tops[env] if env_order: for remove in [x for x in self.env_order if (x not in env_order)]: item.pop(remove) ret[env].append(tops[env]) return ret
'Test the default merge strategy for top files, in an instance where the base top file contains sections for all envs and the other envs\' top files are therefore ignored.'
def test_merge_tops_merge(self):
merged_tops = self.highstate().merge_tops(self.get_tops()) expected_merge = DefaultOrderedDict(OrderedDict) for env in self.env_order: expected_merge[env]['*'] = ['base_{0}'.format(env)] self.assertEqual(merged_tops, expected_merge)
'Test the default merge strategy for top files when the base environment only defines states for itself.'
def test_merge_tops_merge_limited_base(self):
tops = self.get_tops(tops=self.tops_limited_base) merged_tops = self.highstate().merge_tops(tops) expected_merge = DefaultOrderedDict(OrderedDict) for env in self.env_order[:(-1)]: expected_merge[env]['*'] = ['_'.join((env, env))] self.assertEqual(merged_tops, expected_merge)
'Test the \'state_top_saltenv\' parameter to load states exclusively from the \'base\' saltenv, with the default merging strategy. This should result in all states from the \'base\' top file being in the merged result.'
def test_merge_tops_merge_state_top_saltenv_base(self):
env = 'base' tops = self.get_tops(state_top_saltenv=env) merged_tops = self.highstate().merge_tops(tops) expected_merge = DefaultOrderedDict(OrderedDict) for env2 in self.env_order: expected_merge[env2]['*'] = ['_'.join((env, env2))] self.assertEqual(merged_tops, expected_merge)
'Test the \'state_top_saltenv\' parameter to load states exclusively from the \'foo\' saltenv, with the default merging strategy. This should result in just the \'foo\' environment\'s states from the \'foo\' top file being in the merged result.'
def test_merge_tops_merge_state_top_saltenv_foo(self):
env = 'foo' tops = self.get_tops(state_top_saltenv=env) merged_tops = self.highstate().merge_tops(tops) expected_merge = DefaultOrderedDict(OrderedDict) expected_merge[env]['*'] = ['_'.join((env, env))] self.assertEqual(merged_tops, expected_merge)
'Test the merge_all strategy'
def test_merge_tops_merge_all(self):
tops = self.get_tops() merged_tops = self.highstate(top_file_merging_strategy='merge_all').merge_tops(tops) expected_merge = DefaultOrderedDict(OrderedDict) for env in self.env_order: states = [] for top_env in self.env_order: if (top_env in tops[top_env][0]): states.extend(tops[top_env][0][env]['*']) expected_merge[env]['*'] = states self.assertEqual(merged_tops, expected_merge)
'Test an altered env_order with the \'merge_all\' strategy.'
def test_merge_tops_merge_all_with_env_order(self):
env_order = ['bar', 'foo', 'base'] tops = self.get_tops(env_order=env_order) merged_tops = self.highstate(top_file_merging_strategy='merge_all', env_order=env_order).merge_tops(tops) expected_merge = DefaultOrderedDict(OrderedDict) for env in [x for x in self.env_order if (x in env_order)]: states = [] for top_env in env_order: states.extend(tops[top_env][0][env]['*']) expected_merge[env]['*'] = states self.assertEqual(merged_tops, expected_merge)
'Test the \'state_top_saltenv\' parameter to load states exclusively from the \'base\' saltenv, with the \'merge_all\' merging strategy. This should result in all states from the \'base\' top file being in the merged result.'
def test_merge_tops_merge_all_state_top_saltenv_base(self):
env = 'base' tops = self.get_tops(state_top_saltenv=env) merged_tops = self.highstate(top_file_merging_strategy='merge_all').merge_tops(tops) expected_merge = DefaultOrderedDict(OrderedDict) for env2 in self.env_order: expected_merge[env2]['*'] = ['_'.join((env, env2))] self.assertEqual(merged_tops, expected_merge)
'Test the \'state_top_saltenv\' parameter to load states exclusively from the \'foo\' saltenv, with the \'merge_all\' merging strategy. This should result in all the states from the \'foo\' top file being in the merged result.'
def test_merge_tops_merge_all_state_top_saltenv_foo(self):
env = 'foo' tops = self.get_tops(state_top_saltenv=env) merged_tops = self.highstate(top_file_merging_strategy='merge_all').merge_tops(tops) expected_merge = DefaultOrderedDict(OrderedDict) for env2 in self.env_order: expected_merge[env2]['*'] = ['_'.join((env, env2))] self.assertEqual(merged_tops, expected_merge)
'Test to see if the top file that corresponds to the requested env is the one that is used by the state system. Also test the \'default_top\' option for env \'baz\', which has no top file and should pull its states from the \'foo\' top file.'
def test_merge_tops_same_with_default_top(self):
merged_tops = self.highstate(top_file_merging_strategy='same', default_top='foo').merge_tops(self.get_tops()) expected_merge = DefaultOrderedDict(OrderedDict) for env in self.env_order[:(-1)]: expected_merge[env]['*'] = ['_'.join((env, env))] expected_merge['baz']['*'] = ['foo_baz'] self.assertEqual(merged_tops, expected_merge)
'Test to see if the top file that corresponds to the requested env is the one that is used by the state system. default_top will not be set (falling back to \'base\'), so the \'baz\' environment should pull its states from the \'base\' top file.'
def test_merge_tops_same_without_default_top(self):
merged_tops = self.highstate(top_file_merging_strategy='same').merge_tops(self.get_tops()) expected_merge = DefaultOrderedDict(OrderedDict) for env in self.env_order[:(-1)]: expected_merge[env]['*'] = ['_'.join((env, env))] expected_merge['baz']['*'] = ['base_baz'] self.assertEqual(merged_tops, expected_merge)
'Test to see if the top file that corresponds to the requested env is the one that is used by the state system. default_top will not be set (falling back to \'base\'), and since we are using a limited base top file, the \'baz\' environment should not appear in the merged tops.'
def test_merge_tops_same_limited_base_without_default_top(self):
tops = self.get_tops(tops=self.tops_limited_base) merged_tops = self.highstate(top_file_merging_strategy='same').merge_tops(tops) expected_merge = DefaultOrderedDict(OrderedDict) for env in self.env_order[:(-1)]: expected_merge[env]['*'] = ['_'.join((env, env))] self.assertEqual(merged_tops, expected_merge)
'Test the \'state_top_saltenv\' parameter to load states exclusively from the \'base\' saltenv, with the \'same\' merging strategy. This should result in just the \'base\' environment\'s states from the \'base\' top file being in the merged result.'
def test_merge_tops_same_state_top_saltenv_base(self):
env = 'base' tops = self.get_tops(state_top_saltenv=env) merged_tops = self.highstate(top_file_merging_strategy='same').merge_tops(tops) expected_merge = DefaultOrderedDict(OrderedDict) expected_merge[env]['*'] = ['_'.join((env, env))] self.assertEqual(merged_tops, expected_merge)
'Test the \'state_top_saltenv\' parameter to load states exclusively from the \'foo\' saltenv, with the \'same\' merging strategy. This should result in just the \'foo\' environment\'s states from the \'foo\' top file being in the merged result.'
def test_merge_tops_same_state_top_saltenv_foo(self):
env = 'foo' tops = self.get_tops(state_top_saltenv=env) merged_tops = self.highstate(top_file_merging_strategy='same').merge_tops(tops) expected_merge = DefaultOrderedDict(OrderedDict) expected_merge[env]['*'] = ['_'.join((env, env))] self.assertEqual(merged_tops, expected_merge)
'Test the \'state_top_saltenv\' parameter to load states exclusively from the \'baz\' saltenv, with the \'same\' merging strategy. This should result in an empty dictionary since this environment has not top file.'
def test_merge_tops_same_state_top_saltenv_baz(self):
tops = self.get_tops(state_top_saltenv='baz') merged_tops = self.highstate(top_file_merging_strategy='same').merge_tops(tops) expected_merge = DefaultOrderedDict(OrderedDict) self.assertEqual(merged_tops, expected_merge)
'Test that the depends decorator works properly'
def test_depends(self):
self.assertTrue(inspect.isfunction(self.loader[(self.module_name + '.loaded')])) self.assertTrue(((self.module_name + '.not_loaded') not in self.loader))
'Ensure that it only loads stuff when needed'
def test_basic(self):
self.assertEqual(self.loader._dict, {}) self.assertTrue(inspect.isfunction(self.loader['test.ping'])) for (key, val) in six.iteritems(self.loader._dict): self.assertEqual(key.split('.', 1)[0], 'test') self.assertFalse(('test.missing_func' in self.loader._dict))
'Since LazyLoader is a MutableMapping, if someone asks for len() we have to load all'
def test_len_load(self):
self.assertEqual(self.loader._dict, {}) len(self.loader) self.assertNotEqual(self.loader._dict, {})
'Since LazyLoader is a MutableMapping, if someone asks to iterate we have to load all'
def test_iter_load(self):
self.assertEqual(self.loader._dict, {}) for (key, func) in six.iteritems(self.loader): break self.assertNotEqual(self.loader._dict, {})
'Make sure context is shared across modules'
def test_context(self):
self.assertEqual(self.loader._dict, {}) func = self.loader['test.ping'] with patch.dict(func.__globals__['__context__'], {'foo': 'bar'}): self.assertEqual(self.loader['test.echo'].__globals__['__context__']['foo'], 'bar') self.assertEqual(self.loader['grains.get'].__globals__['__context__']['foo'], 'bar')
'Make sure that you can access alias-d modules'
def test_alias(self):
self.assertNotIn(self.module_key, self.loader) self.update_module() self.assertNotIn('{0}.test_alias'.format(self.module_name), self.loader) self.assertTrue(inspect.isfunction(self.loader['{0}.working_alias'.format(self.module_name)]))
'If a module specifies __load__ we should only load/expose those modules'
def test__load__(self):
self.update_module() self.assertNotIn((self.module_key + '2'), self.loader)
'If a module specifies __load__ we should only load/expose those modules'
def test__load__and_depends(self):
self.update_module() self.assertNotIn((self.module_key + '3'), self.loader) self.assertNotIn((self.module_key + '4'), self.loader)
'Test the __virtual_alias__ feature'
def test_virtual_alias(self):
self.update_module() mod_names = ([self.module_name] + list(virtual_aliases)) for mod_name in mod_names: func_name = '.'.join((mod_name, 'test')) log.debug('Running %s (dict attribute)', func_name) self.assertTrue(self.loader[func_name]()) log.debug('Running %s (loader attribute)', func_name) self.assertTrue(getattr(self.loader, mod_name).test())
'Make sure that we can reload all libraries of arbitrary depth'
def test_reload(self):
self._verify_libs() for lib in self.libs: for x in range(5): self.update_lib(lib) self.loader.clear() self._verify_libs()
'Verify that the globals listed in the doc string (from the test) are in these modules'
def _verify_globals(self, mod_dict):
global_vars = [] for val in six.itervalues(mod_dict): if (val.__module__.startswith('salt.loaded') and hasattr(val, '__globals__')): global_vars.append(val.__globals__) self.assertNotEqual(global_vars, [], msg='No modules were loaded.') func_name = inspect.stack()[1][3] names = next(six.itervalues(yaml.load(getattr(self, func_name).__doc__))) for item in global_vars: for name in names: self.assertIn(name, list(item.keys()))
'Test that auth mods have: - __pillar__ - __grains__ - __salt__ - __context__'
def test_auth(self):
self._verify_globals(salt.loader.auth(self.master_opts))
'Test that runners have: - __pillar__ - __salt__ - __opts__ - __grains__ - __context__'
def test_runners(self):
self._verify_globals(salt.loader.runner(self.master_opts))
'Test that returners have: - __salt__ - __opts__ - __pillar__ - __grains__ - __context__'
def test_returners(self):
self._verify_globals(salt.loader.returners(self.master_opts, {}))
'Test that pillars have: - __salt__ - __opts__ - __pillar__ - __grains__ - __context__'
def test_pillars(self):
self._verify_globals(salt.loader.pillars(self.master_opts, {}))
'Test that tops have: []'
def test_tops(self):
self._verify_globals(salt.loader.tops(self.master_opts))
'Test that outputters have: - __opts__ - __pillar__ - __grains__ - __context__'
def test_outputters(self):
self._verify_globals(salt.loader.outputters(self.master_opts))
'Test that serializers have: []'
def test_serializers(self):
self._verify_globals(salt.loader.serializers(self.master_opts))
'Test that states: - __pillar__ - __salt__ - __opts__ - __grains__ - __context__'
def test_states(self):
self._verify_globals(salt.loader.states(self.master_opts, {}, {}, {}))