desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Test that different maps are indeed reported different'
def test_diff_with_diffent_values(self):
map1 = {'file1': 12345} map2 = {'file1': 1234} assert (fileserver.diff_mtime_map(map1, map2) is True)
'Ensure that the fileclient class won\'t allow a direct call to file_list_emptydirs()'
def test_file_list_emptydirs(self):
with self.assertRaises(NotImplementedError): self.file_client.file_list_emptydirs()
'Ensure that the fileclient class won\'t allow a direct call to get_file()'
def test_get_file(self):
with self.assertRaises(NotImplementedError): self.file_client.get_file(None)
'No need to add a dummy foo.txt to muddy up the github repo, just make our own fileserver root on-the-fly.'
def setUp(self):
def _new_dir(path): '\n Add a new dir at ``path`` using os.makedirs. If the directory\n already exists, remove it recursively and then try to create it\n again.\n ' try: os.makedirs(path) except OSError as exc: if (exc.errno == errno.EEXIST): shutil.rmtree(path) os.makedirs(path) else: raise for saltenv in SALTENVS: saltenv_root = os.path.join(FS_ROOT, saltenv) _new_dir(saltenv_root) path = os.path.join(saltenv_root, 'foo.txt') with salt.utils.files.fopen(path, 'w') as fp_: fp_.write("This is a test file in the '{0}' saltenv.\n".format(saltenv)) subdir_abspath = os.path.join(saltenv_root, SUBDIR) os.makedirs(subdir_abspath) for subdir_file in SUBDIR_FILES: path = os.path.join(subdir_abspath, subdir_file) with salt.utils.files.fopen(path, 'w') as fp_: fp_.write("This is file '{0}' in subdir '{1} from saltenv '{2}'".format(subdir_file, SUBDIR, saltenv)) _new_dir(CACHE_ROOT)
'Remove the directories created for these tests'
def tearDown(self):
shutil.rmtree(FS_ROOT) shutil.rmtree(CACHE_ROOT)
'Ensure entire directory is cached to correct location'
def test_cache_dir(self):
patched_opts = dict(((x, y) for (x, y) in six.iteritems(self.minion_opts))) patched_opts.update(MOCKED_OPTS) with patch.dict(fileclient.__opts__, patched_opts): client = fileclient.get_file_client(fileclient.__opts__, pillar=False) for saltenv in SALTENVS: self.assertTrue(client.cache_dir('salt://{0}'.format(SUBDIR), saltenv, cachedir=None)) for subdir_file in SUBDIR_FILES: cache_loc = os.path.join(fileclient.__opts__['cachedir'], 'files', saltenv, SUBDIR, subdir_file) with salt.utils.files.fopen(cache_loc) as fp_: content = fp_.read() log.debug('cache_loc = %s', cache_loc) log.debug('content = %s', content) self.assertTrue((subdir_file in content)) self.assertTrue((SUBDIR in content)) self.assertTrue((saltenv in content))
'Ensure entire directory is cached to correct location when an alternate cachedir is specified and that cachedir is an absolute path'
def test_cache_dir_with_alternate_cachedir_and_absolute_path(self):
patched_opts = dict(((x, y) for (x, y) in six.iteritems(self.minion_opts))) patched_opts.update(MOCKED_OPTS) alt_cachedir = os.path.join(TMP, 'abs_cachedir') with patch.dict(fileclient.__opts__, patched_opts): client = fileclient.get_file_client(fileclient.__opts__, pillar=False) for saltenv in SALTENVS: self.assertTrue(client.cache_dir('salt://{0}'.format(SUBDIR), saltenv, cachedir=alt_cachedir)) for subdir_file in SUBDIR_FILES: cache_loc = os.path.join(alt_cachedir, 'files', saltenv, SUBDIR, subdir_file) with salt.utils.files.fopen(cache_loc) as fp_: content = fp_.read() log.debug('cache_loc = %s', cache_loc) log.debug('content = %s', content) self.assertTrue((subdir_file in content)) self.assertTrue((SUBDIR in content)) self.assertTrue((saltenv in content))
'Ensure entire directory is cached to correct location when an alternate cachedir is specified and that cachedir is a relative path'
def test_cache_dir_with_alternate_cachedir_and_relative_path(self):
patched_opts = dict(((x, y) for (x, y) in six.iteritems(self.minion_opts))) patched_opts.update(MOCKED_OPTS) alt_cachedir = 'foo' with patch.dict(fileclient.__opts__, patched_opts): client = fileclient.get_file_client(fileclient.__opts__, pillar=False) for saltenv in SALTENVS: self.assertTrue(client.cache_dir('salt://{0}'.format(SUBDIR), saltenv, cachedir=alt_cachedir)) for subdir_file in SUBDIR_FILES: cache_loc = os.path.join(fileclient.__opts__['cachedir'], alt_cachedir, 'files', saltenv, SUBDIR, subdir_file) with salt.utils.files.fopen(cache_loc) as fp_: content = fp_.read() log.debug('cache_loc = %s', cache_loc) log.debug('content = %s', content) self.assertTrue((subdir_file in content)) self.assertTrue((SUBDIR in content)) self.assertTrue((saltenv in content))
'Ensure file is cached to correct location'
def test_cache_file(self):
patched_opts = dict(((x, y) for (x, y) in six.iteritems(self.minion_opts))) patched_opts.update(MOCKED_OPTS) with patch.dict(fileclient.__opts__, patched_opts): client = fileclient.get_file_client(fileclient.__opts__, pillar=False) for saltenv in SALTENVS: self.assertTrue(client.cache_file('salt://foo.txt', saltenv, cachedir=None)) cache_loc = os.path.join(fileclient.__opts__['cachedir'], 'files', saltenv, 'foo.txt') with salt.utils.files.fopen(cache_loc) as fp_: content = fp_.read() log.debug('cache_loc = %s', cache_loc) log.debug('content = %s', content) self.assertTrue((saltenv in content))
'Ensure file is cached to correct location when an alternate cachedir is specified and that cachedir is an absolute path'
def test_cache_file_with_alternate_cachedir_and_absolute_path(self):
patched_opts = dict(((x, y) for (x, y) in six.iteritems(self.minion_opts))) patched_opts.update(MOCKED_OPTS) alt_cachedir = os.path.join(TMP, 'abs_cachedir') with patch.dict(fileclient.__opts__, patched_opts): client = fileclient.get_file_client(fileclient.__opts__, pillar=False) for saltenv in SALTENVS: self.assertTrue(client.cache_file('salt://foo.txt', saltenv, cachedir=alt_cachedir)) cache_loc = os.path.join(alt_cachedir, 'files', saltenv, 'foo.txt') with salt.utils.files.fopen(cache_loc) as fp_: content = fp_.read() log.debug('cache_loc = %s', cache_loc) log.debug('content = %s', content) self.assertTrue((saltenv in content))
'Ensure file is cached to correct location when an alternate cachedir is specified and that cachedir is a relative path'
def test_cache_file_with_alternate_cachedir_and_relative_path(self):
patched_opts = dict(((x, y) for (x, y) in six.iteritems(self.minion_opts))) patched_opts.update(MOCKED_OPTS) alt_cachedir = 'foo' with patch.dict(fileclient.__opts__, patched_opts): client = fileclient.get_file_client(fileclient.__opts__, pillar=False) for saltenv in SALTENVS: self.assertTrue(client.cache_file('salt://foo.txt', saltenv, cachedir=alt_cachedir)) cache_loc = os.path.join(fileclient.__opts__['cachedir'], alt_cachedir, 'files', saltenv, 'foo.txt') with salt.utils.files.fopen(cache_loc) as fp_: content = fp_.read() log.debug('cache_loc = %s', cache_loc) log.debug('content = %s', content) self.assertTrue((saltenv in content))
'Create special file_roots for symlink test on Windows'
@classmethod def setUpClass(cls):
if salt.utils.platform.is_windows(): root_dir = tempfile.mkdtemp(dir=TMP) source_sym = os.path.join(root_dir, 'source_sym') with salt.utils.files.fopen(source_sym, 'w') as fp_: fp_.write('hello world!\n') cwd = os.getcwd() try: os.chdir(root_dir) win32file.CreateSymbolicLink('dest_sym', 'source_sym', 0) finally: os.chdir(cwd) cls.test_symlink_list_file_roots = {'base': [root_dir]} else: cls.test_symlink_list_file_roots = None
'Remove special file_roots for symlink test'
@classmethod def tearDownClass(cls):
if salt.utils.platform.is_windows(): try: salt.utils.files.rm_rf(cls.test_symlink_list_file_roots['base'][0]) except OSError: pass
'1) Set up a deep directory structure 2) Enable the configuration option for \'limit_directory_traversal\' 3) Ensure that we can find SLS files in a directory so long as there is an SLS file in a directory above. 4) Ensure that we cannot find an SLS file in a directory that does not have an SLS file in a directory above.'
def test_limit_traversal(self):
file_client_opts = self.get_temp_config('master') file_client_opts['fileserver_limit_traversal'] = True ret = salt.fileclient.Client(file_client_opts).list_states('base') self.assertIn('test_deep.test', ret) self.assertIn('test_deep.a.test', ret) self.assertNotIn('test_deep.b.2.test', ret)
'Test drift detector for a correct cookie file. Returns:'
def test_drift_detector(self):
drift = zyppnotify.DriftDetector() drift._get_mtime = MagicMock(return_value=123) drift._get_checksum = MagicMock(return_value='deadbeef') bogus_io = BogusIO() with patch(BUILTINS_OPEN, bogus_io): drift.PLUGINEND(None, None) self.assertEqual(str(bogus_io), 'deadbeef 123\n') self.assertEqual(bogus_io.mode, 'w') self.assertEqual(bogus_io.path, '/var/cache/salt/minion/rpmdb.cookie')
'create repo in tempdir'
def _create_hg_repo(self):
hg_repo = os.path.join(self.tmpdir, 'repo_pillar') os.makedirs(hg_repo) subprocess.check_call(['hg', 'init', hg_repo]) for filename in FILE_DATA: with salt.utils.files.fopen(os.path.join(hg_repo, filename), 'w') as data_file: yaml.dump(FILE_DATA[filename], data_file) subprocess.check_call(['hg', 'ci', '-A', '-R', hg_repo, '-m', 'first commit', '-u', COMMIT_USER_NAME]) return hg_repo
'check hg repo is imported correctly'
def test_base(self):
mypillar = hg_pillar.ext_pillar('*', None, 'file://{0}'.format(self.hg_repo_path)) self.assertEqual(PILLAR_CONTENT, mypillar)
'create source Git repo in temp directory'
def _create_repo(self):
repo = os.path.join(self.tmpdir, 'repo_pillar') os.makedirs(repo) subprocess.check_call(['git', 'init', repo]) for filename in FILE_DATA: with salt.utils.files.fopen(os.path.join(repo, filename), 'w') as data_file: yaml.dump(FILE_DATA[filename], data_file) subprocess.check_call(['git', 'add', '.'], cwd=repo) subprocess.call(['git', 'config', 'user.email', COMMIT_USER_EMAIL], cwd=repo) subprocess.call(['git', 'config', 'user.name', COMMIT_USER_NAME], cwd=repo) subprocess.check_call(['git', 'commit', '-m', 'first commit'], cwd=repo) return repo
'check direct call ``ext_pillar()`` interface'
def test_base(self):
with patch.dict(git_pillar.__opts__, {'environment': None}): mypillar = git_pillar.ext_pillar('myminion', self.conf_line, {}) self.assertEqual(PILLAR_CONTENT, mypillar)
'Check whole calling stack from parent Pillar instance This test is closer to what happens in real life, and demonstrates how ``compile_pillar()`` is called twice. This kind of test should/would become non-necessary, once git_pillar, all these pillar are called exactly in the same way (git is an exception for now), and don\'t recurse.'
def test_from_upper(self):
with patch.dict(git_pillar.__opts__, {'ext_pillar': [dict(git=self.conf_line)]}): pil = Pillar(git_pillar.__opts__, git_pillar.__grains__, 'myminion', None) self.assertEqual(PILLAR_CONTENT, pil.compile_pillar(pillar_dirs={}))
'Check that the reinstantiation of a pillar object does recurse. This test goes in great details of patching that the dedicated utilities might do in a simpler way. Namely, we replace the main ``ext_pillar`` entry function by one that keeps count of its calls. Otherwise, the fact that the :class:`MaximumRecursion` error is caught can go in the way on the testing. On the current code base, this test fails if the two first lines of :func:``git_pillar.ext_pillar`:: if pillar_dirs is None: return are replaced by:: if pillar_dirs is None: pillar_dirs = {} .. note:: the explicit anti-recursion protection does not prevent looping between two different Git pillars. This test will help subsequent refactors, and also as a base for other external pillars of the same kind.'
def test_no_loop(self):
repo2 = os.path.join(self.tmpdir, 'repo_pillar2') conf_line2 = 'master file://{0}'.format(repo2) subprocess.check_call(['git', 'clone', self.repo_path, repo2]) with patch.dict(git_pillar.__opts__, {'ext_pillar': [dict(git=self.conf_line), dict(git=conf_line2)]}): git_pillar._update(*conf_line2.split(None, 1)) pil = Pillar(git_pillar.__opts__, git_pillar.__grains__, 'myminion', 'base') orig_ext_pillar = pil.ext_pillars['git'] orig_ext_pillar.count = 0 def ext_pillar_count_calls(minion_id, repo_string, pillar_dirs): orig_ext_pillar.count += 1 if (orig_ext_pillar.count > 6): raise RuntimeError('Infinite loop detected') return orig_ext_pillar(minion_id, repo_string, pillar_dirs) from salt.loader import LazyLoader orig_getitem = LazyLoader.__getitem__ def __getitem__(self, key): if (key == 'git.ext_pillar'): return ext_pillar_count_calls return orig_getitem(self, key) with patch.object(LazyLoader, '__getitem__', __getitem__): self.assertEqual(PILLAR_CONTENT, pil.compile_pillar(pillar_dirs={})) self.assertTrue((orig_ext_pillar.count < 7))
'Ensure proper parsing for grains'
def test_parse_grains_target(self):
g_tgt = 'G@a:b' ret = salt.utils.minions.parse_target(g_tgt) self.assertDictEqual(ret, {'engine': 'G', 'pattern': 'a:b', 'delimiter': None})
'Ensure proper parsing for grains PCRE matching'
def test_parse_grains_pcre_target(self):
p_tgt = 'P@a:b' ret = salt.utils.minions.parse_target(p_tgt) self.assertDictEqual(ret, {'engine': 'P', 'pattern': 'a:b', 'delimiter': None})
'Ensure proper parsing for pillar PCRE matching'
def test_parse_pillar_pcre_target(self):
j_tgt = 'J@a:b' ret = salt.utils.minions.parse_target(j_tgt) self.assertDictEqual(ret, {'engine': 'J', 'pattern': 'a:b', 'delimiter': None})
'Ensure proper parsing for list matching'
def test_parse_list_target(self):
l_tgt = 'L@a:b' ret = salt.utils.minions.parse_target(l_tgt) self.assertDictEqual(ret, {'engine': 'L', 'pattern': 'a:b', 'delimiter': None})
'Ensure proper parsing for pillar matching'
def test_parse_nodegroup_target(self):
n_tgt = 'N@a:b' ret = salt.utils.minions.parse_target(n_tgt) self.assertDictEqual(ret, {'engine': 'N', 'pattern': 'a:b', 'delimiter': None})
'Ensure proper parsing for subnet matching'
def test_parse_subnet_target(self):
s_tgt = 'S@a:b' ret = salt.utils.minions.parse_target(s_tgt) self.assertDictEqual(ret, {'engine': 'S', 'pattern': 'a:b', 'delimiter': None})
'Ensure proper parsing for minion PCRE matching'
def test_parse_minion_pcre_target(self):
e_tgt = 'E@a:b' ret = salt.utils.minions.parse_target(e_tgt) self.assertDictEqual(ret, {'engine': 'E', 'pattern': 'a:b', 'delimiter': None})
'Ensure proper parsing for range matching'
def test_parse_range_target(self):
r_tgt = 'R@a:b' ret = salt.utils.minions.parse_target(r_tgt) self.assertDictEqual(ret, {'engine': 'R', 'pattern': 'a:b', 'delimiter': None})
'Ensure proper parsing for multi-word targets Refs https://github.com/saltstack/salt/issues/37231'
def test_parse_multiword_target(self):
mw_tgt = 'G@a:b c' ret = salt.utils.minions.parse_target(mw_tgt) self.assertEqual(ret['pattern'], 'a:b c')
'Smoke test a very simple nodegroup. No recursion.'
def test_simple_nodegroup(self):
simple_nodegroup = {'group1': '[email protected],bar.domain.com,baz.domain.com or bl*.domain.com'} ret = salt.utils.minions.nodegroup_comp('group1', simple_nodegroup) expected_ret = ['[email protected],bar.domain.com,baz.domain.com', 'or', 'bl*.domain.com'] self.assertListEqual(ret, expected_ret)
'Test a case where one nodegroup contains a second nodegroup'
def test_simple_recurse(self):
referenced_nodegroups = {'group1': '[email protected],bar.domain.com,baz.domain.com or bl*.domain.com', 'group2': 'G@os:Debian and N@group1'} ret = salt.utils.minions.nodegroup_comp('group2', referenced_nodegroups) expected_ret = ['(', 'G@os:Debian', 'and', '(', '[email protected],bar.domain.com,baz.domain.com', 'or', 'bl*.domain.com', ')', ')'] self.assertListEqual(ret, expected_ret)
'Test to see what happens if A refers to B and B in turn refers back to A'
def test_circular_nodegroup_reference(self):
referenced_nodegroups = {'group1': 'N@group2', 'group2': 'N@group1'} ret = salt.utils.minions.nodegroup_comp('group1', referenced_nodegroups) self.assertEqual(ret, [])
'The searchpath is based on the cachedir option and the saltenv parameter'
def test_searchpath(self):
tmp = tempfile.gettempdir() opts = copy.deepcopy(self.opts) opts.update({'cachedir': tmp}) loader = SaltCacheLoader(opts, saltenv='test') assert (loader.searchpath == [os.path.join(tmp, 'files', 'test')])
'A MockFileClient is used that records all file requests normally sent to the master.'
def test_mockclient(self):
loader = SaltCacheLoader(self.opts, 'test') fc = MockFileClient(loader) res = loader.get_source(None, 'hello_simple') assert (len(res) == 3) self.assertEqual(str(res[0]), ('world' + os.linesep)) tmpl_dir = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_simple') self.assertEqual(res[1], tmpl_dir) assert res[2](), 'Template up to date?' assert len(fc.requests) self.assertEqual(fc.requests[0]['path'], 'salt://hello_simple')
'Setup a simple jinja test environment'
def get_test_saltenv(self):
loader = SaltCacheLoader(self.opts, 'test') fc = MockFileClient(loader) jinja = Environment(loader=loader) return (fc, jinja)
'You can import and use macros from other files'
def test_import(self):
(fc, jinja) = self.get_test_saltenv() result = jinja.get_template('hello_import').render() self.assertEqual(result, 'Hey world !a b !') assert (len(fc.requests) == 2) self.assertEqual(fc.requests[0]['path'], 'salt://hello_import') self.assertEqual(fc.requests[1]['path'], 'salt://macro')
'You can also include a template that imports and uses macros'
def test_include(self):
(fc, jinja) = self.get_test_saltenv() result = jinja.get_template('hello_include').render() self.assertEqual(result, 'Hey world !a b !') assert (len(fc.requests) == 3) self.assertEqual(fc.requests[0]['path'], 'salt://hello_include') self.assertEqual(fc.requests[1]['path'], 'salt://hello_import') self.assertEqual(fc.requests[2]['path'], 'salt://macro')
'Context variables are passes to the included template by default.'
def test_include_context(self):
(_, jinja) = self.get_test_saltenv() result = jinja.get_template('hello_include').render(a='Hi', b='Salt') self.assertEqual(result, 'Hey world !Hi Salt !')
'A Template with a filesystem loader is returned as fallback if the file is not contained in the searchpath'
def test_fallback(self):
fn_ = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_simple') with salt.utils.files.fopen(fn_) as fp_: out = render_jinja_tmpl(fp_.read(), dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(out, 'world\n')
'A Template with a filesystem loader is returned as fallback if the file is not contained in the searchpath'
def test_fallback_noloader(self):
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import') with salt.utils.files.fopen(filename) as fp_: out = render_jinja_tmpl(fp_.read(), dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(out, 'Hey world !a b !\n')
'If the template is within the searchpath it can import, include and extend other templates. The initial template is expected to be already cached get_template does not request it from the master again.'
def test_saltenv(self):
fc = MockFileClient() with patch.object(SaltCacheLoader, 'file_client', MagicMock(return_value=fc)): filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import') with salt.utils.files.fopen(filename) as fp_: out = render_jinja_tmpl(fp_.read(), dict(opts={'cachedir': TEMPLATES_DIR, 'file_client': 'remote', 'file_roots': self.local_opts['file_roots'], 'pillar_roots': self.local_opts['pillar_roots']}, a='Hi', b='Salt', saltenv='test', salt=self.local_salt)) self.assertEqual(out, 'Hey world !Hi Salt !\n') self.assertEqual(fc.requests[0]['path'], 'salt://macro')
'If we failed in a macro because of e.g. a TypeError, get more output from trace.'
def test_macro_additional_log_for_generalexc(self):
expected = 'Jinja error:.*division.*\n.*/macrogeneral\\(2\\):\n---\n\\{% macro mymacro\\(\\) -%\\}\n\\{\\{ 1/0 \\}\\} <======================\n\\{%- endmacro %\\}\n---.*' filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import_generalerror') fc = MockFileClient() with patch.object(SaltCacheLoader, 'file_client', MagicMock(return_value=fc)): with salt.utils.files.fopen(filename) as fp_: self.assertRaisesRegex(SaltRenderError, expected, render_jinja_tmpl, fp_.read(), dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
'If we failed in a macro because of undefined variables, get more output from trace.'
def test_macro_additional_log_for_undefined(self):
expected = "Jinja variable 'b' is undefined\n.*/macroundefined\\(2\\):\n---\n\\{% macro mymacro\\(\\) -%\\}\n\\{\\{b.greetee\\}\\} <-- error is here <======================\n\\{%- endmacro %\\}\n---" filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import_undefined') fc = MockFileClient() with patch.object(SaltCacheLoader, 'file_client', MagicMock(return_value=fc)): with salt.utils.files.fopen(filename) as fp_: self.assertRaisesRegex(SaltRenderError, expected, render_jinja_tmpl, fp_.read(), dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
'If we failed in a macro, get more output from trace.'
def test_macro_additional_log_syntaxerror(self):
expected = "Jinja syntax error: expected token .*end.*got '-'.*\n.*/macroerror\\(2\\):\n---\n# macro\n\\{% macro mymacro\\(greeting, greetee='world'\\) -\\} <-- error is here <======================\n\\{\\{ greeting ~ ' ' ~ greetee \\}\\} !\n\\{%- endmacro %\\}\n---.*" filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import_error') fc = MockFileClient() with patch.object(SaltCacheLoader, 'file_client', MagicMock(return_value=fc)): with salt.utils.files.fopen(filename) as fp_: self.assertRaisesRegex(SaltRenderError, expected, render_jinja_tmpl, fp_.read(), dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
'Test the `is_ip` Jinja filter.'
def test_is_ip(self):
rendered = render_jinja_tmpl("{{ '192.168.0.1' | is_ip }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'True') rendered = render_jinja_tmpl("{{ 'FE80::' | is_ip }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'True') rendered = render_jinja_tmpl("{{ 'random' | is_ip }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'False')
'Test the `is_ipv4` Jinja filter.'
def test_is_ipv4(self):
rendered = render_jinja_tmpl("{{ '192.168.0.1' | is_ipv4 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'True') rendered = render_jinja_tmpl("{{ 'FE80::' | is_ipv4 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'False') rendered = render_jinja_tmpl("{{ 'random' | is_ipv4 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'False')
'Test the `is_ipv6` Jinja filter.'
def test_is_ipv6(self):
rendered = render_jinja_tmpl("{{ '192.168.0.1' | is_ipv6 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'False') rendered = render_jinja_tmpl("{{ 'FE80::' | is_ipv6 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'True') rendered = render_jinja_tmpl("{{ 'random' | is_ipv6 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'False')
'Test the `ipaddr` Jinja filter.'
def test_ipaddr(self):
rendered = render_jinja_tmpl("{{ '192.168.0.1' | ipaddr }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'192.168.0.1') rendered = render_jinja_tmpl("{{ ['192.168.0.1', '172.17.17.1', 'foo', 'bar', '::'] | ipaddr | join(', ') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'192.168.0.1, 172.17.17.1, ::') rendered = render_jinja_tmpl("{{ ['224.0.0.1', 'FF01::1', '::'] | ipaddr(options='multicast') | join(', ') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'224.0.0.1, ff01::1')
'Test the `ipv4` Jinja filter.'
def test_ipv4(self):
rendered = render_jinja_tmpl("{{ '192.168.0.1' | ipv4 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'192.168.0.1') rendered = render_jinja_tmpl("{{ ['192.168.0.1', '172.17.17.1'] | ipv4 | join(', ')}}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'192.168.0.1, 172.17.17.1') rendered = render_jinja_tmpl("{{ 'fe80::' | ipv4 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'None') rendered = render_jinja_tmpl("{{ 'random' | ipv4 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'None') rendered = render_jinja_tmpl("{{ '192.168.0.1' | ipv4(options='lo') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'None') rendered = render_jinja_tmpl("{{ '127.0.0.1' | ipv4(options='lo') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'127.0.0.1')
'Test the `ipv6` Jinja filter.'
def test_ipv6(self):
rendered = render_jinja_tmpl("{{ '192.168.0.1' | ipv6 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'None') rendered = render_jinja_tmpl("{{ 'random' | ipv6 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'None') rendered = render_jinja_tmpl("{{ 'FE80:0:0::0' | ipv6 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'fe80::') rendered = render_jinja_tmpl("{{ 'fe80::' | ipv6(options='ll') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'fe80::') rendered = render_jinja_tmpl("{{ 'fe80::' | ipv6(options='lo') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'None') rendered = render_jinja_tmpl("{{ ['fe80::', '192.168.0.1'] | ipv6 | join(', ') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'fe80::') rendered = render_jinja_tmpl("{{ ['fe80::', '::'] | ipv6 | join(', ') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'fe80::, ::')
'Test the `network_hosts` Jinja filter.'
def test_network_hosts(self):
rendered = render_jinja_tmpl("{{ '192.168.0.1/30' | network_hosts | join(', ') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'192.168.0.1, 192.168.0.2')
'Test the `network_size` Jinja filter.'
def test_network_size(self):
rendered = render_jinja_tmpl("{{ '192.168.0.1' | network_size }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'1') rendered = render_jinja_tmpl("{{ '192.168.0.1/8' | network_size }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'16777216')
'Test the `http_query` Jinja filter.'
def test_http_query(self):
for backend in ('requests', 'tornado', 'urllib2'): rendered = render_jinja_tmpl((("{{ 'http://www.google.com' | http_query(backend='" + backend) + "') }}"), dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertIsInstance(rendered, six.text_type, 'Failed with backend: {}'.format(backend)) dict_reply = ast.literal_eval(rendered) self.assertIsInstance(dict_reply, dict, 'Failed with backend: {}'.format(backend)) self.assertIsInstance(dict_reply['body'], six.string_types, 'Failed with backend: {}'.format(backend))
'Test the `to_bool` Jinja filter.'
def test_to_bool(self):
rendered = render_jinja_tmpl('{{ 1 | to_bool }}', dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'True') rendered = render_jinja_tmpl("{{ 'True' | to_bool }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'True') rendered = render_jinja_tmpl('{{ 0 | to_bool }}', dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'False') rendered = render_jinja_tmpl("{{ 'Yes' | to_bool }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'True')
'Test the `quote` Jinja filter.'
def test_quote(self):
rendered = render_jinja_tmpl("{{ 'random' | quote }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'random')
'Test the `regex_search` Jinja filter.'
def test_regex_search(self):
rendered = render_jinja_tmpl("{{ 'abcdefabcdef' | regex_search('BC(.*)', ignorecase=True) }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u"('defabcdef',)")
'Test the `regex_match` Jinja filter.'
def test_regex_match(self):
rendered = render_jinja_tmpl("{{ 'abcdefabcdef' | regex_match('BC(.*)', ignorecase=True)}}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'None')
'Test the `regex_replace` Jinja filter.'
def test_regex_replace(self):
rendered = render_jinja_tmpl("{{ 'lets replace spaces' | regex_replace('\\s+', '__') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'lets__replace__spaces')
'Test the `uuid` Jinja filter.'
def test_uuid(self):
rendered = render_jinja_tmpl("{{ 'random' | uuid }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'3652b285-26ad-588e-a5dc-c2ee65edc804')
'Test the `min` Jinja filter.'
def test_min(self):
rendered = render_jinja_tmpl('{{ [1, 2, 3] | min }}', dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'1')
'Test the `max` Jinja filter.'
def test_max(self):
rendered = render_jinja_tmpl('{{ [1, 2, 3] | max }}', dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'3')
'Test the `avg` Jinja filter.'
def test_avg(self):
rendered = render_jinja_tmpl('{{ [1, 2, 3] | avg }}', dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'2.0')
'Test the `union` Jinja filter.'
def test_union(self):
rendered = render_jinja_tmpl("{{ [1, 2, 3] | union([2, 3, 4]) | join(', ') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'1, 2, 3, 4')
'Test the `intersect` Jinja filter.'
def test_intersect(self):
rendered = render_jinja_tmpl("{{ [1, 2, 3] | intersect([2, 3, 4]) | join(', ') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'2, 3')
'Test the `difference` Jinja filter.'
def test_difference(self):
rendered = render_jinja_tmpl("{{ [1, 2, 3] | difference([2, 3, 4]) | join(', ') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'1')
'Test the `symmetric_difference` Jinja filter.'
def test_symmetric_difference(self):
rendered = render_jinja_tmpl("{{ [1, 2, 3] | symmetric_difference([2, 3, 4]) | join(', ') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'1, 4')
'Test the `md5` Jinja filter.'
def test_md5(self):
rendered = render_jinja_tmpl("{{ 'random' | md5 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'7ddf32e17a6ac5ce04a8ecbf782ca509')
'Test the `sha256` Jinja filter.'
def test_sha256(self):
rendered = render_jinja_tmpl("{{ 'random' | sha256 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'a441b15fe9a3cf56661190a0b93b9dec7d04127288cc87250967cf3b52894d11')
'Test the `sha512` Jinja filter.'
def test_sha512(self):
rendered = render_jinja_tmpl("{{ 'random' | sha512 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, six.text_type('811a90e1c8e86c7b4c0eef5b2c0bf0ec1b19c4b1b5a242e6455be93787cb473cb7bc9b0fdeb960d00d5c6881c2094dd63c5c900ce9057255e2a4e271fc25fef1'))
'Test the `hmac` Jinja filter.'
def test_hmac(self):
rendered = render_jinja_tmpl("{{ 'random' | hmac('secret', 'blah') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'False') rendered = render_jinja_tmpl("{{ 'get salted' | hmac('shared secret', 'eBWf9bstXg+NiP5AOwppB5HMvZiYMPzEM9W5YMm/AmQ=') }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'True')
'Test the `base64_encode` Jinja filter.'
def test_base64_encode(self):
rendered = render_jinja_tmpl("{{ 'random' | base64_encode }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'cmFuZG9t')
'Test the `base64_decode` Jinja filter.'
def test_base64_decode(self):
rendered = render_jinja_tmpl("{{ 'cmFuZG9t' | base64_decode }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, u'random')
'Sanity-check the normal dictionary-lookup syntax for our stub function'
def test_normlookup(self):
tmpl_str = "Hello, {{ salt['mocktest.ping']() }}." ret = self.render(tmpl_str) self.assertEqual(ret, 'Hello, True.')
'Check calling a stub function using awesome dot-notation'
def test_dotlookup(self):
tmpl_str = 'Hello, {{ salt.mocktest.ping() }}.' ret = self.render(tmpl_str) self.assertEqual(ret, 'Hello, True.')
'Check calling a stub function with a name that shadows a ``dict`` method name'
def test_shadowed_dict_method(self):
tmpl_str = "Hello, {{ salt.mockgrains.get('id') }}." ret = self.render(tmpl_str) self.assertEqual(ret, 'Hello, jerry.')
'Test to return a list of all enabled services'
@skipIf((not salt.utils.platform.is_linux()), 'System is not Linux') def test_gnu_slash_linux_in_os_name(self):
_path_exists_map = {'/proc/1/cmdline': False} _path_isfile_map = {} _cmd_run_map = {'dpkg --print-architecture': 'amd64'} path_exists_mock = MagicMock(side_effect=(lambda x: _path_exists_map[x])) path_isfile_mock = MagicMock(side_effect=(lambda x: _path_isfile_map.get(x, False))) cmd_run_mock = MagicMock(side_effect=(lambda x: _cmd_run_map[x])) empty_mock = MagicMock(return_value={}) orig_import = __import__ if six.PY2: built_in = '__builtin__' else: built_in = 'builtins' def _import_mock(name, *args): if (name == 'lsb_release'): raise ImportError('No module named lsb_release') return orig_import(name, *args) with patch.object(salt.utils.platform, 'is_proxy', MagicMock(return_value=False)): with patch.object(core, '_linux_bin_exists', MagicMock(return_value=False)): with patch.object(os.path, 'exists', path_exists_mock): with patch('{0}.__import__'.format(built_in), side_effect=_import_mock): with patch.object(os.path, 'isfile', path_isfile_mock): distro_mock = MagicMock(return_value=('Debian GNU/Linux', '8.3', '')) with patch.object(core, 'linux_distribution', distro_mock): with patch.object(core, '_linux_cpudata', empty_mock): with patch.object(core, '_linux_gpu_data', empty_mock): with patch.object(core, '_memdata', empty_mock): with patch.object(core, '_hw_data', empty_mock): with patch.object(core, '_virtual', empty_mock): with patch.object(core, '_ps', empty_mock): with patch.dict(core.__salt__, {'cmd.run': cmd_run_mock}): os_grains = core.os_data() self.assertEqual(os_grains.get('os_family'), 'Debian')
'Test if \'os\' grain is parsed from CPE_NAME of /etc/os-release'
@skipIf((not salt.utils.platform.is_linux()), 'System is not Linux') def test_suse_os_from_cpe_data(self):
_path_exists_map = {'/proc/1/cmdline': False} _path_isfile_map = {'/etc/os-release': True} _os_release_map = {'NAME': 'SLES', 'VERSION': '12-SP1', 'VERSION_ID': '12.1', 'PRETTY_NAME': 'SUSE Linux Enterprise Server 12 SP1', 'ID': 'sles', 'ANSI_COLOR': '0;32', 'CPE_NAME': 'cpe:/o:suse:sles:12:sp1'} path_exists_mock = MagicMock(side_effect=(lambda x: _path_exists_map[x])) path_isfile_mock = MagicMock(side_effect=(lambda x: _path_isfile_map.get(x, False))) empty_mock = MagicMock(return_value={}) osarch_mock = MagicMock(return_value='amd64') os_release_mock = MagicMock(return_value=_os_release_map) orig_import = __import__ if six.PY2: built_in = '__builtin__' else: built_in = 'builtins' def _import_mock(name, *args): if (name == 'lsb_release'): raise ImportError('No module named lsb_release') return orig_import(name, *args) with patch.object(salt.utils.platform, 'is_proxy', MagicMock(return_value=False)): with patch.object(core, '_linux_bin_exists', MagicMock(return_value=False)): with patch.object(os.path, 'exists', path_exists_mock): with patch('{0}.__import__'.format(built_in), side_effect=_import_mock): with patch.object(os.path, 'isfile', path_isfile_mock): with patch.object(core, '_parse_os_release', os_release_mock): distro_mock = MagicMock(return_value=('SUSE Linux Enterprise Server ', '12', 'x86_64')) with patch.object(core, 'linux_distribution', distro_mock): with patch.object(core, '_linux_gpu_data', empty_mock): with patch.object(core, '_hw_data', empty_mock): with patch.object(core, '_linux_cpudata', empty_mock): with patch.object(core, '_virtual', empty_mock): with patch.dict(core.__salt__, {'cmd.run': osarch_mock}): os_grains = core.os_data() self.assertEqual(os_grains.get('os_family'), 'Suse') self.assertEqual(os_grains.get('os'), 'SUSE')
'Test if OS grains are parsed correctly in SLES 11 SP3'
@skipIf((not salt.utils.platform.is_linux()), 'System is not Linux') def test_suse_os_grains_sles11sp3(self):
_os_release_map = {'suse_release_file': 'SUSE Linux Enterprise Server 11 (x86_64)\nVERSION = 11\nPATCHLEVEL = 3\n', 'oscodename': 'SUSE Linux Enterprise Server 11 SP3', 'osfullname': 'SLES', 'osrelease': '11.3', 'osrelease_info': [11, 3], 'osmajorrelease': 11, 'files': ['/etc/SuSE-release']} self._run_suse_os_grains_tests(_os_release_map)
'Test if OS grains are parsed correctly in SLES 11 SP4'
@skipIf((not salt.utils.platform.is_linux()), 'System is not Linux') def test_suse_os_grains_sles11sp4(self):
_os_release_map = {'os_release_file': {'NAME': 'SLES', 'VERSION': '11.4', 'VERSION_ID': '11.4', 'PRETTY_NAME': 'SUSE Linux Enterprise Server 11 SP4', 'ID': 'sles', 'ANSI_COLOR': '0;32', 'CPE_NAME': 'cpe:/o:suse:sles:11:4'}, 'oscodename': 'SUSE Linux Enterprise Server 11 SP4', 'osfullname': 'SLES', 'osrelease': '11.4', 'osrelease_info': [11, 4], 'osmajorrelease': 11, 'files': ['/etc/os-release']} self._run_suse_os_grains_tests(_os_release_map)
'Test if OS grains are parsed correctly in SLES 12'
@skipIf((not salt.utils.platform.is_linux()), 'System is not Linux') def test_suse_os_grains_sles12(self):
_os_release_map = {'os_release_file': {'NAME': 'SLES', 'VERSION': '12', 'VERSION_ID': '12', 'PRETTY_NAME': 'SUSE Linux Enterprise Server 12', 'ID': 'sles', 'ANSI_COLOR': '0;32', 'CPE_NAME': 'cpe:/o:suse:sles:12'}, 'oscodename': 'SUSE Linux Enterprise Server 12', 'osfullname': 'SLES', 'osrelease': '12', 'osrelease_info': [12], 'osmajorrelease': 12, 'files': ['/etc/os-release']} self._run_suse_os_grains_tests(_os_release_map)
'Test if OS grains are parsed correctly in SLES 12 SP1'
@skipIf((not salt.utils.platform.is_linux()), 'System is not Linux') def test_suse_os_grains_sles12sp1(self):
_os_release_map = {'os_release_file': {'NAME': 'SLES', 'VERSION': '12-SP1', 'VERSION_ID': '12.1', 'PRETTY_NAME': 'SUSE Linux Enterprise Server 12 SP1', 'ID': 'sles', 'ANSI_COLOR': '0;32', 'CPE_NAME': 'cpe:/o:suse:sles:12:sp1'}, 'oscodename': 'SUSE Linux Enterprise Server 12 SP1', 'osfullname': 'SLES', 'osrelease': '12.1', 'osrelease_info': [12, 1], 'osmajorrelease': 12, 'files': ['/etc/os-release']} self._run_suse_os_grains_tests(_os_release_map)
'Test if OS grains are parsed correctly in openSUSE Leap 42.1'
@skipIf((not salt.utils.platform.is_linux()), 'System is not Linux') def test_suse_os_grains_opensuse_leap_42_1(self):
_os_release_map = {'os_release_file': {'NAME': 'openSUSE Leap', 'VERSION': '42.1', 'VERSION_ID': '42.1', 'PRETTY_NAME': 'openSUSE Leap 42.1 (x86_64)', 'ID': 'opensuse', 'ANSI_COLOR': '0;32', 'CPE_NAME': 'cpe:/o:opensuse:opensuse:42.1'}, 'oscodename': 'openSUSE Leap 42.1 (x86_64)', 'osfullname': 'Leap', 'osrelease': '42.1', 'osrelease_info': [42, 1], 'osmajorrelease': 42, 'files': ['/etc/os-release']} self._run_suse_os_grains_tests(_os_release_map)
'Test if OS grains are parsed correctly in openSUSE Tumbleweed'
@skipIf((not salt.utils.platform.is_linux()), 'System is not Linux') def test_suse_os_grains_tumbleweed(self):
_os_release_map = {'os_release_file': {'NAME': 'openSUSE', 'VERSION': 'Tumbleweed', 'VERSION_ID': '20160504', 'PRETTY_NAME': 'openSUSE Tumbleweed (20160504) (x86_64)', 'ID': 'opensuse', 'ANSI_COLOR': '0;32', 'CPE_NAME': 'cpe:/o:opensuse:opensuse:20160504'}, 'oscodename': 'openSUSE Tumbleweed (20160504) (x86_64)', 'osfullname': 'Tumbleweed', 'osrelease': '20160504', 'osrelease_info': [20160504], 'osmajorrelease': 20160504, 'files': ['/etc/os-release']} self._run_suse_os_grains_tests(_os_release_map)
'Test if OS grains are parsed correctly in Ubuntu Xenial Xerus'
@skipIf((not salt.utils.platform.is_linux()), 'System is not Linux') def test_ubuntu_os_grains(self):
_os_release_map = {'os_release_file': {'NAME': 'Ubuntu', 'VERSION': '16.04.1 LTS (Xenial Xerus)', 'VERSION_ID': '16.04', 'PRETTY_NAME': '', 'ID': 'ubuntu'}, 'oscodename': 'xenial', 'osfullname': 'Ubuntu', 'osrelease': '16.04', 'osrelease_info': [16, 4], 'osmajorrelease': 16, 'osfinger': 'Ubuntu-16.04'} self._run_ubuntu_os_grains_tests(_os_release_map)
'Test winrepo.genrepo runner'
def test_genrepo(self):
sls_file = os.path.join(self.winrepo_sls_dir, 'wireshark.sls') with salt.utils.files.fopen(sls_file, 'w') as fp_: fp_.write(_WINREPO_SLS) self.assertEqual(winrepo.genrepo(), _WINREPO_GENREPO_DATA)
'Ensure _expand_pattern_lists works as intended: - Expand list-valued patterns - Do not change non-list-valued tokens'
def test_pattern_list_expander(self):
cases = {'no-tokens-to-replace': ['no-tokens-to-replace'], 'single-dict:{minion}': ['single-dict:{minion}'], 'single-list:{grains[roles]}': ['single-list:web', 'single-list:database'], 'multiple-lists:{grains[roles]}+{grains[aux]}': ['multiple-lists:web+foo', 'multiple-lists:web+bar', 'multiple-lists:database+foo', 'multiple-lists:database+bar'], 'single-list-with-dicts:{grains[id]}+{grains[roles]}+{grains[id]}': ['single-list-with-dicts:{grains[id]}+web+{grains[id]}', 'single-list-with-dicts:{grains[id]}+database+{grains[id]}'], 'deeply-nested-list:{grains[deep][foo][bar][baz]}': ['deeply-nested-list:hello', 'deeply-nested-list:world']} mappings = {'minion': self.grains['id'], 'grains': self.grains} for (case, correct_output) in six.iteritems(cases): output = vault._expand_pattern_lists(case, **mappings) diff = set(output).symmetric_difference(set(correct_output)) if (len(diff) != 0): log.debug('Test {0} failed'.format(case)) log.debug('Expected:\n DCTB {0}\nGot\n DCTB {1}'.format(output, correct_output)) log.debug('Difference:\n DCTB {0}'.format(diff)) self.assertEqual(output, correct_output)
'Ensure _get_policies works as intended, including expansion of lists'
@skipIf(NO_MOCK, NO_MOCK_REASON) def test_get_policies(self):
cases = {'no-tokens-to-replace': ['no-tokens-to-replace'], 'single-dict:{minion}': ['single-dict:test-minion'], 'single-list:{grains[roles]}': ['single-list:web', 'single-list:database'], 'multiple-lists:{grains[roles]}+{grains[aux]}': ['multiple-lists:web+foo', 'multiple-lists:web+bar', 'multiple-lists:database+foo', 'multiple-lists:database+bar'], 'single-list-with-dicts:{grains[id]}+{grains[roles]}+{grains[id]}': ['single-list-with-dicts:test-minion+web+test-minion', 'single-list-with-dicts:test-minion+database+test-minion'], 'deeply-nested-list:{grains[deep][foo][bar][baz]}': ['deeply-nested-list:hello', 'deeply-nested-list:world'], 'should-not-cause-an-exception,but-result-empty:{foo}': [], 'Case-Should-Be-Lowered:{grains[mixedcase]}': ['case-should-be-lowered:up-low-up']} with patch('salt.utils.minions.get_minion_data', MagicMock(return_value=(None, self.grains, None))): for (case, correct_output) in six.iteritems(cases): test_config = {'policies': [case]} output = vault._get_policies('test-minion', test_config) diff = set(output).symmetric_difference(set(correct_output)) if (len(diff) != 0): log.debug('Test {0} failed'.format(case)) log.debug('Expected:\n DCTB {0}\nGot\n DCTB {1}'.format(output, correct_output)) log.debug('Difference:\n DCTB {0}'.format(diff)) self.assertEqual(output, correct_output)
'test jobs.list_jobs runner with search_target args'
def test_list_jobs_with_search_target(self):
mock_jobs_cache = {'20160524035503086853': {'Arguments': [], 'Function': 'test.ping', 'StartTime': '2016, May 24 03:55:03.086853', 'Target': 'node-1-1.com', 'Target-type': 'glob', 'User': 'root'}, '20160524035524895387': {'Arguments': [], 'Function': 'test.ping', 'StartTime': '2016, May 24 03:55:24.895387', 'Target': ['node-1-2.com', 'node-1-1.com'], 'Target-type': 'list', 'User': 'sudo_ubuntu'}} def return_mock_jobs(): return mock_jobs_cache class MockMasterMinion(object, ): returners = {'local_cache.get_jids': return_mock_jobs} def __init__(self, *args, **kwargs): pass returns = {'all': mock_jobs_cache, 'node-1-1.com': mock_jobs_cache, 'node-1-2.com': {'20160524035524895387': mock_jobs_cache['20160524035524895387']}, 'non-existant': {}} with patch.object(salt.minion, 'MasterMinion', MockMasterMinion): self.assertEqual(jobs.list_jobs(), returns['all']) self.assertEqual(jobs.list_jobs(search_target=['node-1-1*', 'node-1-2*']), returns['all']) self.assertEqual(jobs.list_jobs(search_target='node-1-1.com'), returns['node-1-1.com']) self.assertEqual(jobs.list_jobs(search_target='node-1-2.com'), returns['node-1-2.com']) self.assertEqual(jobs.list_jobs(search_target='non-existant'), returns['non-existant'])
'test cache.grains runner'
def test_grains(self):
mock_minion = ['Larry'] mock_ret = {} self.assertEqual(cache.grains(minion=mock_minion), mock_ret) mock_data = 'grain stuff' class MockMaster(object, ): def __init__(self, *args, **kwargs): pass def get_minion_grains(self): return mock_data with patch.object(salt.utils.master, 'MasterPillarUtil', MockMaster): self.assertEqual(cache.grains(), mock_data)
'Test to ensure the SQS engine logs a warning when queue not present'
def test_no_queue_present(self):
with patch('salt.engines.sqs_events.log') as mock_logging: with patch('time.sleep', return_value=None) as mock_sleep: q = None q_name = 'mysqs' mock_fire = MagicMock(return_value=True) sqs_events._process_queue(q, q_name, mock_fire) self.assertTrue(mock_logging.warning.called) self.assertFalse(self.mock_sqs.queue.Queue().get_messages.called)
'Test SQS engine correctly gets and fires messages on minion'
def test_minion_message_fires(self):
msgs = [self.sample_msg(), self.sample_msg()] self.mock_sqs.queue.Queue().get_messages.return_value = msgs q = self.mock_sqs.queue.Queue() q_name = 'mysqs' mock_event = MagicMock(return_value=True) mock_fire = MagicMock(return_value=True) with patch.dict(sqs_events.__salt__, {'event.send': mock_event}): sqs_events._process_queue(q, q_name, mock_fire) self.assertTrue(self.mock_sqs.queue.Queue().get_messages.called) self.assertTrue(all((x.delete.called for x in msgs)))
'Test SQS engine correctly gets and fires messages on master'
def test_master_message_fires(self):
msgs = [self.sample_msg(), self.sample_msg()] self.mock_sqs.queue.Queue().get_messages.return_value = msgs q = self.mock_sqs.queue.Queue() q_name = 'mysqs' mock_fire = MagicMock(return_value=True) sqs_events._process_queue(q, q_name, mock_fire) self.assertTrue(self.mock_sqs.queue.Queue().get_messages.called, len(msgs)) self.assertTrue(mock_fire.called, len(msgs))
'Ensure that obj __repr__ and __str__ methods are yaml friendly.'
@skipIf((not yamlex.available), (SKIP_MESSAGE % 'sls')) def test_sls_repr(self):
def convert(obj): return yamlex.deserialize(yamlex.serialize(obj)) sls_obj = convert(OrderedDict([('foo', 'bar'), ('baz', 'qux')])) assert (sls_obj.__str__() == '{foo: bar, baz: qux}') assert (sls_obj.__repr__() == '{foo: bar, baz: qux}') assert (sls_obj['foo'].__str__() == '"bar"') assert (sls_obj['foo'].__repr__() == '"bar"')
'init :return:'
def __init__(self):
self.reset()
'Reset values :return:'
def reset(self):
self.messages = list()
'Collects the data from the logger of info type. :param data: :return:'
def info(self, data):
self.messages.append({'message': data, 'type': 'info'})
'Collects the data from the logger of warning type. :param data: :return:'
def warning(self, data):
self.messages.append({'message': data, 'type': 'warning'})
'Check if log has message. :param data: :return:'
def has_message(self, msg, log_type=None):
for data in self.messages: if (((data['type'] == log_type) or (not log_type)) and (data['message'].find(msg) > (-1))): return True return False
'Verify if Master is verifying hash_type config option. :return:'
def test_master_daemon_hash_type_verified(self):
def exec_test(child_pipe): def _create_master(): '\n Create master instance\n :return:\n ' obj = daemons.Master() obj.config = {'user': 'dummy', 'hash_type': alg} for attr in ['start_log_info', 'prepare', 'shutdown', 'master']: setattr(obj, attr, MagicMock()) return obj _logger = LoggerMock() ret = True with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)): with patch('salt.cli.daemons.log', _logger): for alg in ['md5', 'sha1']: _create_master().start() ret = (ret and _logger.messages and _logger.has_message('Do not use {alg}'.format(alg=alg), log_type='warning')) _logger.reset() for alg in ['sha224', 'sha256', 'sha384', 'sha512']: _create_master().start() ret = (ret and _logger.messages and (not _logger.has_message('Do not use '))) child_pipe.send(ret) child_pipe.close() self._multiproc_exec_test(exec_test)