desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Ensure the proxy can ping'
def test_can_it_ping(self):
ret = self.run_function('test.ping', minion_tgt='proxytest') self.assertEqual(ret, True)
'Package test 1, really just tests that the virtual function capability is working OK.'
def test_list_pkgs(self):
ret = self.run_function('pkg.list_pkgs', minion_tgt='proxytest') self.assertIn('coreutils', ret) self.assertIn('apache', ret) self.assertIn('redbull', ret)
'Package test 2, really just tests that the virtual function capability is working OK.'
def test_install_pkgs(self):
ret = self.run_function('pkg.install', ['thispkg'], minion_tgt='proxytest') self.assertEqual(ret['thispkg'], '1.0') ret = self.run_function('pkg.list_pkgs', minion_tgt='proxytest') self.assertEqual(ret['apache'], '2.4') self.assertEqual(ret['redbull'], '999.99') self.assertEqual(ret['thispkg'], '1.0')
'This test is currently hard-coded to /tmp to work-around a seeming inability to load custom modules inside the pydsl renderers. This is a FIXME.'
def test_rendering_includes(self):
self.run_function('state.sls', ['pydsl.aaa']) expected = textwrap.dedent(' X1\n X2\n X3\n Y1 extended\n Y2 extended\n Y3\n hello red 1\n hello green 2\n hello blue 3\n ') if salt.utils.platform.is_windows(): expected = 'X1 \r\nX2 \r\nX3 \r\nY1 extended \r\nY2 extended \r\nY3 \r\nhello red 1 \r\nhello green 2 \r\nhello blue 3 \r\n' with salt.utils.files.fopen('/tmp/output', 'r') as f: ret = f.read() os.remove('/tmp/output') self.assertEqual(sorted(ret), sorted(expected))
'Test using a single ext_pillar repo'
def test_single_source(self):
ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: gitpython\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n ') self.assertEqual(ret, {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}})
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the master branch followed by dev, and with pillar_merge_lists disabled.'
def test_multiple_sources_master_dev_no_merge_lists(self):
ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: gitpython\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, {'branch': 'dev', 'mylist': ['dev'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['dev'], 'nested_dict': {'master': True, 'dev': True}}})
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the dev branch followed by master, and with pillar_merge_lists disabled.'
def test_multiple_sources_dev_master_no_merge_lists(self):
ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: gitpython\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - dev {url}\n - master {url}\n ') self.assertEqual(ret, {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['master'], 'nested_dict': {'master': True, 'dev': True}}})
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the master branch followed by dev, and with pillar_merge_lists enabled.'
def test_multiple_sources_master_dev_merge_lists(self):
ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: gitpython\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, {'branch': 'dev', 'mylist': ['master', 'dev'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['master', 'dev'], 'nested_dict': {'master': True, 'dev': True}}})
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the dev branch followed by master, and with pillar_merge_lists enabled.'
def test_multiple_sources_dev_master_merge_lists(self):
ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: gitpython\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - dev {url}\n - master {url}\n ') self.assertEqual(ret, {'branch': 'master', 'mylist': ['dev', 'master'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['dev', 'master'], 'nested_dict': {'master': True, 'dev': True}}})
'Test using pillarenv to restrict results to those from a single branch'
def test_multiple_sources_with_pillarenv(self):
ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: gitpython\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillarenv: base\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}})
'Test with git_pillar_includes enabled. The top_only branch references an SLS file from the master branch, so we should see the key from that SLS file (included_pillar) in the compiled pillar data.'
def test_includes_enabled(self):
ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: gitpython\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n - top_only {url}:\n - env: base\n ') self.assertEqual(ret, {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}, 'included_pillar': True})
'Test with git_pillar_includes enabled. The top_only branch references an SLS file from the master branch, but since includes are disabled it will not find the SLS file and the "included_pillar" key should not be present in the compiled pillar data. We should instead see an error message in the compiled data.'
def test_includes_disabled(self):
ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: gitpython\n git_pillar_includes: False\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n - top_only {url}:\n - env: base\n ') self.assertEqual(ret, {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}, '_errors': ["Specified SLS 'bar' in environment 'base' is not available on the salt master"]})
'Create start the webserver'
@classmethod def setUpClass(cls):
super(TestGitPythonAuthenticatedHTTP, cls).setUpClass() cls.url = 'http://{username}:{password}@127.0.0.1:{port}/repo.git'.format(username=cls.username, password=cls.password, port=cls.nginx_port) cls.ext_opts['url'] = cls.url cls.ext_opts['username'] = cls.username cls.ext_opts['password'] = cls.password
'Test using a single ext_pillar repo'
@requires_system_grains def test_single_source(self, grains):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_nopass}\n git_pillar_privkey: {privkey_nopass}\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n ') self.assertEqual(ret, expected) if (grains['os_family'] == 'Debian'): return ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_withpass}\n git_pillar_privkey: {privkey_withpass}\n git_pillar_passphrase: {passphrase}\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the master branch followed by dev, and with pillar_merge_lists disabled.'
@requires_system_grains def test_multiple_sources_master_dev_no_merge_lists(self, grains):
expected = {'branch': 'dev', 'mylist': ['dev'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['dev'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_nopass}\n git_pillar_privkey: {privkey_nopass}\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n - dev {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n ') self.assertEqual(ret, expected) if (grains['os_family'] == 'Debian'): return ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_withpass}\n git_pillar_privkey: {privkey_withpass}\n git_pillar_passphrase: {passphrase}\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n - dev {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the dev branch followed by master, and with pillar_merge_lists disabled.'
@requires_system_grains def test_multiple_sources_dev_master_no_merge_lists(self, grains):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['master'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_nopass}\n git_pillar_privkey: {privkey_nopass}\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - dev {url}\n - master {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - dev {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n - master {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n ') self.assertEqual(ret, expected) if (grains['os_family'] == 'Debian'): return ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_withpass}\n git_pillar_privkey: {privkey_withpass}\n git_pillar_passphrase: {passphrase}\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - dev {url}\n - master {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - dev {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n - master {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the master branch followed by dev, and with pillar_merge_lists enabled.'
@requires_system_grains def test_multiple_sources_master_dev_merge_lists(self, grains):
expected = {'branch': 'dev', 'mylist': ['master', 'dev'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['master', 'dev'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_nopass}\n git_pillar_privkey: {privkey_nopass}\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n - dev {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n ') self.assertEqual(ret, expected) if (grains['os_family'] == 'Debian'): return ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_withpass}\n git_pillar_privkey: {privkey_withpass}\n git_pillar_passphrase: {passphrase}\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n - dev {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the dev branch followed by master, and with pillar_merge_lists enabled.'
@requires_system_grains def test_multiple_sources_dev_master_merge_lists(self, grains):
expected = {'branch': 'master', 'mylist': ['dev', 'master'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['dev', 'master'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_nopass}\n git_pillar_privkey: {privkey_nopass}\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - dev {url}\n - master {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - dev {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n - master {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n ') self.assertEqual(ret, expected) if (grains['os_family'] == 'Debian'): return ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_withpass}\n git_pillar_privkey: {privkey_withpass}\n git_pillar_passphrase: {passphrase}\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - dev {url}\n - master {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - dev {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n - master {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n ') self.assertEqual(ret, expected)
'Test using pillarenv to restrict results to those from a single branch'
@requires_system_grains def test_multiple_sources_with_pillarenv(self, grains):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_nopass}\n git_pillar_privkey: {privkey_nopass}\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillarenv: base\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillarenv: base\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n - dev {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n ') self.assertEqual(ret, expected) if (grains['os_family'] == 'Debian'): return ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_withpass}\n git_pillar_privkey: {privkey_withpass}\n git_pillar_passphrase: {passphrase}\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillarenv: base\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillarenv: base\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n - dev {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n - passphrase: {passphrase}\n ') self.assertEqual(ret, expected)
'Test with git_pillar_includes enabled. The top_only branch references an SLS file from the master branch, so we should see the "included_pillar" key from that SLS file in the compiled pillar data.'
@requires_system_grains def test_includes_enabled(self, grains):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}, 'included_pillar': True} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_nopass}\n git_pillar_privkey: {privkey_nopass}\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n - top_only {url}:\n - env: base\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n - top_only {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n - env: base\n ') self.assertEqual(ret, expected) if (grains['os_family'] == 'Debian'): return ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_pubkey: {pubkey_withpass}\n git_pillar_privkey: {privkey_withpass}\n git_pillar_passphrase: {passphrase}\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n - top_only {url}:\n - env: base\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n - top_only {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n - env: base\n ') self.assertEqual(ret, expected)
'Test with git_pillar_includes enabled. The top_only branch references an SLS file from the master branch, but since includes are disabled it will not find the SLS file and the "included_pillar" key should not be present in the compiled pillar data. We should instead see an error message in the compiled data.'
@requires_system_grains def test_includes_disabled(self, grains):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}, '_errors': ["Specified SLS 'bar' in environment 'base' is not available on the salt master"]} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_includes: False\n git_pillar_pubkey: {pubkey_nopass}\n git_pillar_privkey: {privkey_nopass}\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n - top_only {url}:\n - env: base\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_includes: False\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n - top_only {url}:\n - pubkey: {pubkey_nopass}\n - privkey: {privkey_nopass}\n - env: base\n ') self.assertEqual(ret, expected) if (grains['os_family'] == 'Debian'): return ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_includes: False\n git_pillar_pubkey: {pubkey_withpass}\n git_pillar_privkey: {privkey_withpass}\n git_pillar_passphrase: {passphrase}\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n - top_only {url}:\n - env: base\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_includes: False\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n - top_only {url}:\n - pubkey: {pubkey_withpass}\n - privkey: {privkey_withpass}\n - passphrase: {passphrase}\n - env: base\n ') self.assertEqual(ret, expected)
'Test using a single ext_pillar repo'
def test_single_source(self):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the master branch followed by dev, and with pillar_merge_lists disabled.'
def test_multiple_sources_master_dev_no_merge_lists(self):
expected = {'branch': 'dev', 'mylist': ['dev'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['dev'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the dev branch followed by master, and with pillar_merge_lists disabled.'
def test_multiple_sources_dev_master_no_merge_lists(self):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['master'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - dev {url}\n - master {url}\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the master branch followed by dev, and with pillar_merge_lists enabled.'
def test_multiple_sources_master_dev_merge_lists(self):
expected = {'branch': 'dev', 'mylist': ['master', 'dev'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['master', 'dev'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the dev branch followed by master, and with pillar_merge_lists enabled.'
def test_multiple_sources_dev_master_merge_lists(self):
expected = {'branch': 'master', 'mylist': ['dev', 'master'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['dev', 'master'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - dev {url}\n - master {url}\n ') self.assertEqual(ret, expected)
'Test using pillarenv to restrict results to those from a single branch'
def test_multiple_sources_with_pillarenv(self):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillarenv: base\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected)
'Test with git_pillar_includes enabled. The top_only branch references an SLS file from the master branch, so we should see the "included_pillar" key from that SLS file in the compiled pillar data.'
def test_includes_enabled(self):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}, 'included_pillar': True} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n - top_only {url}:\n - env: base\n ') self.assertEqual(ret, expected)
'Test with git_pillar_includes enabled. The top_only branch references an SLS file from the master branch, but since includes are disabled it will not find the SLS file and the "included_pillar" key should not be present in the compiled pillar data. We should instead see an error message in the compiled data.'
def test_includes_disabled(self):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}, '_errors': ["Specified SLS 'bar' in environment 'base' is not available on the salt master"]} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_includes: False\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n - top_only {url}:\n - env: base\n ') self.assertEqual(ret, expected)
'Test using a single ext_pillar repo'
def test_single_source(self):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_user: {user}\n git_pillar_password: {password}\n git_pillar_insecure_auth: True\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the master branch followed by dev, and with pillar_merge_lists disabled.'
def test_multiple_sources_master_dev_no_merge_lists(self):
expected = {'branch': 'dev', 'mylist': ['dev'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['dev'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_user: {user}\n git_pillar_password: {password}\n git_pillar_insecure_auth: True\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - master {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n - dev {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the dev branch followed by master, and with pillar_merge_lists disabled.'
def test_multiple_sources_dev_master_no_merge_lists(self):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['master'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_user: {user}\n git_pillar_password: {password}\n git_pillar_insecure_auth: True\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - dev {url}\n - master {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: False\n ext_pillar:\n - git:\n - dev {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n - master {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the master branch followed by dev, and with pillar_merge_lists enabled.'
def test_multiple_sources_master_dev_merge_lists(self):
expected = {'branch': 'dev', 'mylist': ['master', 'dev'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['master', 'dev'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_user: {user}\n git_pillar_password: {password}\n git_pillar_insecure_auth: True\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - master {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n - dev {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n ') self.assertEqual(ret, expected)
'Test using two ext_pillar dirs. Since all git_pillar repos are merged into a single dictionary, ordering matters. This tests with the dev branch followed by master, and with pillar_merge_lists enabled.'
def test_multiple_sources_dev_master_merge_lists(self):
expected = {'branch': 'master', 'mylist': ['dev', 'master'], 'mydict': {'master': True, 'dev': True, 'nested_list': ['dev', 'master'], 'nested_dict': {'master': True, 'dev': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_user: {user}\n git_pillar_password: {password}\n git_pillar_insecure_auth: True\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - dev {url}\n - master {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillar_merge_lists: True\n ext_pillar:\n - git:\n - dev {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n - master {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n ') self.assertEqual(ret, expected)
'Test using pillarenv to restrict results to those from a single branch'
def test_multiple_sources_with_pillarenv(self):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_user: {user}\n git_pillar_password: {password}\n git_pillar_insecure_auth: True\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillarenv: base\n ext_pillar:\n - git:\n - master {url}\n - dev {url}\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n pillarenv: base\n ext_pillar:\n - git:\n - master {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n - dev {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n ') self.assertEqual(ret, expected)
'Test with git_pillar_includes enabled. The top_only branch references an SLS file from the master branch, so we should see the "included_pillar" key from that SLS file in the compiled pillar data.'
def test_includes_enabled(self):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}, 'included_pillar': True} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_user: {user}\n git_pillar_password: {password}\n git_pillar_insecure_auth: True\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n - top_only {url}:\n - env: base\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n - top_only {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n - env: base\n ') self.assertEqual(ret, expected)
'Test with git_pillar_includes enabled. The top_only branch references an SLS file from the master branch, but since includes are disabled it will not find the SLS file and the "included_pillar" key should not be present in the compiled pillar data. We should instead see an error message in the compiled data.'
def test_includes_disabled(self):
expected = {'branch': 'master', 'mylist': ['master'], 'mydict': {'master': True, 'nested_list': ['master'], 'nested_dict': {'master': True}}, '_errors': ["Specified SLS 'bar' in environment 'base' is not available on the salt master"]} ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_includes: False\n git_pillar_user: {user}\n git_pillar_password: {password}\n git_pillar_insecure_auth: True\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}\n - top_only {url}:\n - env: base\n ') self.assertEqual(ret, expected) ret = self.get_pillar(' file_ignore_regex: []\n file_ignore_glob: []\n git_pillar_provider: pygit2\n git_pillar_includes: False\n cachedir: {cachedir}\n extension_modules: {extmods}\n ext_pillar:\n - git:\n - master {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n - top_only {url}:\n - user: {user}\n - password: {password}\n - insecure_auth: True\n - env: base\n ') self.assertEqual(ret, expected)
'Fire an event on the master and ensure that it pings the minion'
@flaky() def test_ping_reaction(self):
e = salt.utils.event.get_event('minion', sock_dir=self.minion_opts['sock_dir'], opts=self.minion_opts) e.fire_event({'a': 'b'}, '/test_event') self.assertMinionEventReceived({'a': 'b'})
'venafi.gen_key'
@with_random_name def test_gen_key_password(self, name):
ret = self.run_run_plus(fun='venafi.gen_key', minion_id='{0}.test.saltstack.com'.format(name), dns_name='{0}.test.saltstack.com'.format(name), zone='Internet', password='SecretSauce') self.assertEqual(ret['out'][0], '-----BEGIN RSA PRIVATE KEY-----') self.assertEqual(ret['out'][1], 'Proc-Type: 4,ENCRYPTED') self.assertEqual(ret['out'][(-1)], '-----END RSA PRIVATE KEY-----')
'venafi.gen_key'
@with_random_name def test_gen_key_without_password(self, name):
ret = self.run_run_plus(fun='venafi.gen_key', minion_id='{0}.test.saltstack.com'.format(name), dns_name='{0}.test.saltstack.com'.format(name), zone='Internet') self.assertEqual(ret['out'][0], '-----BEGIN RSA PRIVATE KEY-----') self.assertNotEqual(ret['out'][1], 'Proc-Type: 4,ENCRYPTED') self.assertEqual(ret['out'][(-1)], '-----END RSA PRIVATE KEY-----')
'venafi.gen_csr'
@with_random_name def test_gen_csr(self, name):
ret = self.run_run_plus(fun='venafi.gen_csr', minion_id='{0}.test.saltstack.com'.format(name), dns_name='{0}.test.saltstack.com'.format(name), country='US', state='Utah', loc='Salt Lake City', org='Salt Stack Inc.', org_unit='Testing', zone='Internet', password='SecretSauce') self.assertEqual(ret['out'][0], '-----BEGIN CERTIFICATE REQUEST-----') self.assertEqual(ret['out'][(-1)], '-----END CERTIFICATE REQUEST-----')
'venafi.request'
@with_random_name def test_request(self, name):
ret = self.run_run_plus(fun='venafi.request', minion_id='{0}.example.com'.format(name), dns_name='{0}.example.com'.format(name), country='US', state='Utah', loc='Salt Lake City', org='Salt Stack Inc.', org_unit='Testing', zone='Internet', password='SecretSauce') self.assertTrue(('request_id' in ret['return']))
'Test that we will wait longer than the job timeout for a minion to return.'
def test_long_running_job(self):
sleep_length = 30 ret = self.run_salt('minion test.sleep {0}'.format(sleep_length), timeout=45) self.assertTrue(isinstance(ret, list), 'Return is not a list. Minion may have returned error: {0}'.format(ret)) self.assertTrue(('True' in ret[1]), 'Minion did not return True after {0} seconds.'.format(sleep_length))
'setup minion blackout mode'
def begin_blackout(self, blackout_data='minion_blackout: True'):
with salt.utils.files.fopen(BLACKOUT_PILLAR, 'w') as wfh: wfh.write(blackout_data) self.run_function('saltutil.refresh_pillar') sleep(5)
'takedown minion blackout mode'
def end_blackout(self):
with salt.utils.files.fopen(BLACKOUT_PILLAR, 'w') as blackout_pillar: blackout_pillar.write(textwrap.dedent(' minion_blackout: False\n ')) self.run_function('saltutil.refresh_pillar') sleep(5)
'Test that basic minion blackout functionality works'
def test_blackout(self):
try: self.begin_blackout() blackout_ret = self.run_function('test.ping') self.assertIn('Minion in blackout mode.', blackout_ret) finally: self.end_blackout() ret = self.run_function('test.ping') self.assertEqual(ret, True)
'Test that minion blackout whitelist works'
def test_blackout_whitelist(self):
try: self.begin_blackout(textwrap.dedent(' minion_blackout: True\n minion_blackout_whitelist:\n - test.ping\n - test.fib\n ')) ping_ret = self.run_function('test.ping') self.assertEqual(ping_ret, True) fib_ret = self.run_function('test.fib', [7]) self.assertTrue(isinstance(fib_ret, list)) self.assertEqual(fib_ret[0], 13) finally: self.end_blackout()
'Test that minion refuses to run non-whitelisted functions during blackout whitelist'
def test_blackout_nonwhitelist(self):
try: self.begin_blackout(textwrap.dedent(' minion_blackout: True\n minion_blackout_whitelist:\n - test.ping\n - test.fib\n ')) state_ret = self.run_function('state.apply') self.assertIn('Minion in blackout mode.', state_ret) cloud_ret = self.run_function('cloud.query', ['list_nodes_full']) self.assertIn('Minion in blackout mode.', cloud_ret) finally: self.end_blackout()
'Test recursive decryption of secrets:vault as well as the fallback to default decryption renderer.'
@requires_system_grains def test_decrypt_pillar_default_renderer(self, grains=None):
decrypt_pillar_opts = yaml.safe_load(textwrap.dedent(" decrypt_pillar:\n - 'secrets:vault'\n ")) opts = self._build_opts(decrypt_pillar_opts) pillar_obj = pillar.Pillar(opts, grains, 'test', 'base') ret = pillar_obj.compile_pillar() self.assertEqual(ret, GPG_PILLAR_DECRYPTED)
'Test recursive decryption of secrets:vault using a pipe instead of a colon as the nesting delimiter.'
@requires_system_grains def test_decrypt_pillar_alternate_delimiter(self, grains=None):
decrypt_pillar_opts = yaml.safe_load(textwrap.dedent(" decrypt_pillar_delimiter: '|'\n decrypt_pillar:\n - 'secrets|vault'\n ")) opts = self._build_opts(decrypt_pillar_opts) pillar_obj = pillar.Pillar(opts, grains, 'test', 'base') ret = pillar_obj.compile_pillar() self.assertEqual(ret, GPG_PILLAR_DECRYPTED)
'Test recursive decryption, only with a more deeply-nested target. This should leave the other keys in secrets:vault encrypted.'
@requires_system_grains def test_decrypt_pillar_deeper_nesting(self, grains=None):
decrypt_pillar_opts = yaml.safe_load(textwrap.dedent(" decrypt_pillar:\n - 'secrets:vault:qux'\n ")) opts = self._build_opts(decrypt_pillar_opts) pillar_obj = pillar.Pillar(opts, grains, 'test', 'base') ret = pillar_obj.compile_pillar() expected = copy.deepcopy(GPG_PILLAR_ENCRYPTED) expected['secrets']['vault']['qux'][(-1)] = GPG_PILLAR_DECRYPTED['secrets']['vault']['qux'][(-1)] self.assertEqual(ret, expected)
'Test recursive decryption of secrets:vault, with the renderer explicitly defined, overriding the default. Setting the default to a nonexistant renderer so we can be sure that the override happened.'
@requires_system_grains def test_decrypt_pillar_explicit_renderer(self, grains=None):
decrypt_pillar_opts = yaml.safe_load(textwrap.dedent(" decrypt_pillar_default: asdf\n decrypt_pillar_renderers:\n - asdf\n - gpg\n decrypt_pillar:\n - 'secrets:vault': gpg\n ")) opts = self._build_opts(decrypt_pillar_opts) pillar_obj = pillar.Pillar(opts, grains, 'test', 'base') ret = pillar_obj.compile_pillar() self.assertEqual(ret, GPG_PILLAR_DECRYPTED)
'Test decryption using a missing renderer. It should fail, leaving the encrypted keys intact, and add an error to the pillar dictionary.'
@requires_system_grains def test_decrypt_pillar_missing_renderer(self, grains=None):
decrypt_pillar_opts = yaml.safe_load(textwrap.dedent(" decrypt_pillar_default: asdf\n decrypt_pillar_renderers:\n - asdf\n decrypt_pillar:\n - 'secrets:vault'\n ")) opts = self._build_opts(decrypt_pillar_opts) pillar_obj = pillar.Pillar(opts, grains, 'test', 'base') ret = pillar_obj.compile_pillar() expected = copy.deepcopy(GPG_PILLAR_ENCRYPTED) expected['_errors'] = ["Failed to decrypt pillar key 'secrets:vault': Decryption renderer 'asdf' is not available"] self.assertEqual(ret, expected)
'Test decryption using a renderer which is not permitted. It should fail, leaving the encrypted keys intact, and add an error to the pillar dictionary.'
@requires_system_grains def test_decrypt_pillar_invalid_renderer(self, grains=None):
decrypt_pillar_opts = yaml.safe_load(textwrap.dedent(" decrypt_pillar_default: foo\n decrypt_pillar_renderers:\n - foo\n - bar\n decrypt_pillar:\n - 'secrets:vault': gpg\n ")) opts = self._build_opts(decrypt_pillar_opts) pillar_obj = pillar.Pillar(opts, grains, 'test', 'base') ret = pillar_obj.compile_pillar() expected = copy.deepcopy(GPG_PILLAR_ENCRYPTED) expected['_errors'] = ["Failed to decrypt pillar key 'secrets:vault': 'gpg' is not a valid decryption renderer. Valid choices are: foo, bar"] self.assertEqual(ret, expected)
'test grains[\'cpu_model\']'
@skipIf((not salt.utils.platform.is_windows()), 'Only run on Windows') def test_win_cpu_model(self):
opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value('HKEY_LOCAL_MACHINE', 'HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0', 'ProcessorNameString').get('vdata') self.assertEqual(self.run_function('grains.items')['cpu_model'], cpu_model_text)
'manage.up'
def test_up(self):
ret = self.run_run_plus('manage.up') self.assertIn('minion', ret['return']) self.assertIn('sub_minion', ret['return']) self.assertTrue(any((('- minion' in out) for out in ret['out']))) self.assertTrue(any((('- sub_minion' in out) for out in ret['out'])))
'manage.down'
def test_down(self):
ret = self.run_run_plus('manage.down') self.assertNotIn('minion', ret['return']) self.assertNotIn('sub_minion', ret['return']) self.assertNotIn('minion', ret['out']) self.assertNotIn('sub_minion', ret['out'])
'fileserver.dir_list'
def test_dir_list(self):
ret = self.run_run_plus(fun='fileserver.dir_list') self.assertIsInstance(ret['return'], list) self.assertTrue(('_modules' in ret['return'])) ret = self.run_run_plus(fun='fileserver.dir_list', backend='roots') self.assertIsInstance(ret['return'], list) self.assertTrue(('_modules' in ret['return'])) ret = self.run_run_plus(fun='fileserver.dir_list', backend=['roots']) self.assertIsInstance(ret['return'], list) self.assertTrue(('_modules' in ret['return']))
'fileserver.empty_dir_list'
def test_empty_dir_list(self):
ret = self.run_run_plus(fun='fileserver.empty_dir_list') self.assertIsInstance(ret['return'], list) self.assertEqual(ret['return'], []) ret = self.run_run_plus(fun='fileserver.empty_dir_list', backend='roots') self.assertIsInstance(ret['return'], list) self.assertEqual(ret['return'], []) ret = self.run_run_plus(fun='fileserver.empty_dir_list', backend=['roots']) self.assertIsInstance(ret['return'], list) self.assertEqual(ret['return'], [])
'fileserver.envs'
def test_envs(self):
ret = self.run_run_plus(fun='fileserver.envs') self.assertIsInstance(ret['return'], list) ret = self.run_run_plus(fun='fileserver.envs', backend='roots') self.assertIsInstance(ret['return'], list) ret = self.run_run_plus(fun='fileserver.envs', backend=['roots']) self.assertIsInstance(ret['return'], list)
'fileserver.clear_file_list_cache If this test fails, then something may have changed in the test suite and we may have more than just "roots" configured in the fileserver backends. This assert will need to be updated accordingly.'
def test_clear_file_list_cache(self):
saltenvs = sorted(self.run_run_plus(fun='fileserver.envs')['return']) @contextlib.contextmanager def gen_cache(): '\n Create file_list cache so we have something to clear\n ' for saltenv in saltenvs: self.run_run_plus(fun='fileserver.file_list', saltenv=saltenv) (yield) with gen_cache(): ret = self.run_run_plus(fun='fileserver.clear_file_list_cache') ret['return']['roots'].sort() self.assertEqual(ret['return'], {'roots': saltenvs}) with gen_cache(): ret = self.run_run_plus(fun='fileserver.clear_file_list_cache', backend='roots') ret['return']['roots'].sort() self.assertEqual(ret['return'], {'roots': saltenvs}) with gen_cache(): ret = self.run_run_plus(fun='fileserver.clear_file_list_cache', backend=['roots']) ret['return']['roots'].sort() self.assertEqual(ret['return'], {'roots': saltenvs}) with gen_cache(): ret = self.run_run_plus(fun='fileserver.clear_file_list_cache', backend='notarealbackend') self.assertEqual(ret['return'], {}) with gen_cache(): ret = self.run_run_plus(fun='fileserver.clear_file_list_cache', saltenv='base') self.assertEqual(ret['return'], {'roots': ['base']}) with gen_cache(): ret = self.run_run_plus(fun='fileserver.clear_file_list_cache', saltenv=['base']) self.assertEqual(ret['return'], {'roots': ['base']}) with gen_cache(): ret = self.run_run_plus(fun='fileserver.clear_file_list_cache', saltenv='notarealsaltenv') self.assertEqual(ret['return'], {}) with gen_cache(): ret = self.run_run_plus(fun='fileserver.clear_file_list_cache', backend='roots', saltenv='base') self.assertEqual(ret['return'], {'roots': ['base']})
'fileserver.file_list'
def test_file_list(self):
ret = self.run_run_plus(fun='fileserver.file_list') self.assertIsInstance(ret['return'], list) self.assertTrue(('grail/scene33' in ret['return'])) ret = self.run_run_plus(fun='fileserver.file_list', backend='roots') self.assertIsInstance(ret['return'], list) self.assertTrue(('grail/scene33' in ret['return'])) ret = self.run_run_plus(fun='fileserver.file_list', backend=['roots']) self.assertIsInstance(ret['return'], list) self.assertTrue(('grail/scene33' in ret['return']))
'fileserver.symlink_list'
@skipIf(salt.utils.platform.is_windows(), 'Git for Windows does not preserve symbolic links when cloning') def test_symlink_list(self):
ret = self.run_run_plus(fun='fileserver.symlink_list') self.assertIsInstance(ret['return'], dict) self.assertTrue(('dest_sym' in ret['return'])) ret = self.run_run_plus(fun='fileserver.symlink_list', backend='roots') self.assertIsInstance(ret['return'], dict) self.assertTrue(('dest_sym' in ret['return'])) ret = self.run_run_plus(fun='fileserver.symlink_list', backend=['roots']) self.assertIsInstance(ret['return'], dict) self.assertTrue(('dest_sym' in ret['return']))
'fileserver.update'
def test_update(self):
ret = self.run_run_plus(fun='fileserver.update') self.assertTrue(ret['return']) ret = self.run_run_plus(fun='fileserver.update', backend='roots') self.assertTrue(ret['return']) ret = self.run_run_plus(fun='fileserver.update', backend=['roots']) self.assertTrue(ret['return'])
'jobs.active'
def test_active(self):
ret = self.run_run_plus('jobs.active') self.assertEqual(ret['return'], {}) self.assertEqual(ret['out'], [])
'jobs.lookup_jid'
def test_lookup_jid(self):
ret = self.run_run_plus('jobs.lookup_jid', '23974239742394') self.assertEqual(ret['return'], {}) self.assertEqual(ret['out'], [])
'jobs.list_jobs'
@skipIf(True, 'to be re-enabled when #23623 is merged') def test_list_jobs(self):
ret = self.run_run_plus('jobs.list_jobs') self.assertIsInstance(ret['return'], dict)
'test return values of salt.cmd'
def test_salt_cmd(self):
ret = self.run_run_plus('salt.cmd', 'test.ping') out_ret = ret.get('out')[0] return_ret = ret.get('return') self.assertEqual(out_ret, 'True') self.assertTrue(return_ret)
'Create the temp file and master.d directory'
def setUp(self):
self.job_dir = os.path.join(self.master_opts['cachedir'], 'jobs') self.hash_type = self.master_opts['hash_type'] self.master_d_dir = os.path.join(self.get_config_dir(), 'master.d') try: os.makedirs(self.master_d_dir) except OSError as exc: if (exc.errno != errno.EEXIST): raise self.conf = tempfile.NamedTemporaryFile(mode='w', suffix='.conf', dir=self.master_d_dir, delete=True)
'Close the tempfile.NamedTemporaryFile object, cleaning it up'
def tearDown(self):
self.conf.close() self.run_run_plus('test.arg', __reload_config=True)
'Remove kwargs and timestamp (things that are variable) so we have a stable value to assert'
@staticmethod def clean_return(data):
data['fun_args'][1] = salt.utils.args.clean_kwargs(**data['fun_args'][1]) data['return']['kwargs'] = salt.utils.args.clean_kwargs(**data['return']['kwargs']) data.pop('_stamp')
'Dump the config dict to the conf file'
def write_conf(self, data):
self.conf.write(yaml.dump(data, default_flow_style=False)) self.conf.flush()
'Test with runner_returns enabled'
def test_runner_returns_disabled(self):
self.write_conf({'runner_returns': False}) ret = self.run_run_plus('test.arg', 'foo', bar='hello world!', __reload_config=True) jid = ret.get('jid') if (jid is None): raise Exception('jid missing from run_run_plus output') serialized_return = os.path.join(salt.utils.jid.jid_dir(jid, self.job_dir, self.hash_type), 'master', 'return.p') self.assertFalse(os.path.isfile(serialized_return))
'Test with runner_returns enabled'
def test_runner_returns_enabled(self):
self.write_conf({'runner_returns': True}) ret = self.run_run_plus('test.arg', 'foo', bar='hello world!', __reload_config=True) jid = ret.get('jid') if (jid is None): raise Exception('jid missing from run_run_plus output') serialized_return = os.path.join(salt.utils.jid.jid_dir(jid, self.job_dir, self.hash_type), 'master', 'return.p') serial = salt.payload.Serial(self.master_opts) with salt.utils.files.fopen(serialized_return, 'rb') as fp_: deserialized = serial.loads(fp_.read()) self.clean_return(deserialized['return']) self.assertEqual(deserialized, {'return': {'fun': 'runner.test.arg', 'fun_args': ['foo', {'bar': 'hello world!'}], 'jid': jid, 'return': {'args': ['foo'], 'kwargs': {'bar': 'hello world!'}}, 'success': True, 'user': RUNTIME_VARS.RUNNING_TESTS_USER}})
'Store, list, fetch, then flush data'
def test_cache(self):
ret = self.run_run_plus('cache.store', bank='cachetest/runner', key='test_cache', data='The time has come the walrus said') ret = self.run_run_plus('cache.list', bank='cachetest/runner') self.assertIn('test_cache', ret['return']) ret = self.run_run_plus('cache.fetch', bank='cachetest/runner', key='test_cache') self.assertIn('The time has come the walrus said', ret['return']) ret = self.run_run_plus('cache.flush', bank='cachetest/runner', key='test_cache') ret = self.run_run_plus('cache.list', bank='cachetest/runner') self.assertNotIn('test_cache', ret['return'])
'helper method to add salt-run return data to a queue'
def add_to_queue(self, q, cmd):
ret = self.run_run(cmd) q.put(ret) q.task_done()
'Ensure the orchestrate runner outputs useful state data. In Issue #31330, the output only contains [\'outputter:\', \' highstate\'], and not the full stateful return. This tests ensures we don\'t regress in that manner again. Also test against some sample "good" output that would be included in a correct orchestrate run.'
def test_orchestrate_output(self):
ret_output = self.run_run('state.orchestrate orch.simple') bad_out = ['outputter:', ' highstate'] good_out = [' Function: salt.state', ' Result: True', 'Succeeded: 1 (changed=1)', 'Failed: 0', 'Total states run: 1'] self.assertIsNot(bad_out, ret_output) for item in good_out: self.assertIn(item, ret_output)
'test salt-run state.orchestrate and failhard with nested orchestration'
def test_orchestrate_nested(self):
if os.path.exists('/tmp/ewu-2016-12-13'): os.remove('/tmp/ewu-2016-12-13') (_, code) = self.run_run('state.orchestrate nested-orch.outer', with_retcode=True) self.assertFalse(os.path.exists('/tmp/ewu-2016-12-13')) self.assertNotEqual(code, 0)
'test to ensure state.event runner returns correct data'
def test_state_event(self):
q = queue.Queue(maxsize=0) cmd = 'state.event salt/job/*/new count=1' expect = '"minions": ["minion"]' server_thread = threading.Thread(target=self.add_to_queue, args=(q, cmd)) server_thread.setDaemon(True) server_thread.start() while q.empty(): self.run_salt('minion test.ping --static') out = q.get() self.assertIn(expect, str(out)) server_thread.join()
'Dump the config dict to the conf file'
def write_conf(self, data):
self.conf.write(yaml.dump(data, default_flow_style=False)) self.conf.flush()
'Test to confirm that the ret event for the orchestration contains the jid for the jobs spawned.'
def test_jid_in_ret_event(self):
self.write_conf({'fileserver_backend': ['roots'], 'file_roots': {'base': [self.base_env]}}) state_sls = os.path.join(self.base_env, 'test_state.sls') with salt.utils.files.fopen(state_sls, 'w') as fp_: fp_.write(textwrap.dedent('\n date:\n cmd.run\n ')) orch_sls = os.path.join(self.base_env, 'test_orch.sls') with salt.utils.files.fopen(orch_sls, 'w') as fp_: fp_.write(textwrap.dedent('\n date_cmd:\n salt.state:\n - tgt: minion\n - sls: test_state\n\n ping_minion:\n salt.function:\n - name: test.ping\n - tgt: minion\n\n fileserver.file_list:\n salt.runner\n\n config.values:\n salt.wheel\n ')) listener = salt.utils.event.get_event('master', sock_dir=self.master_opts['sock_dir'], transport=self.master_opts['transport'], opts=self.master_opts) jid = self.run_run_plus('state.orchestrate', 'test_orch', __reload_config=True).get('jid') if (jid is None): raise Exception('jid missing from run_run_plus output') signal.signal(signal.SIGALRM, self.alarm_handler) signal.alarm(self.timeout) try: while True: event = listener.get_event(full=True) if (event is None): continue if (event['tag'] == 'salt/run/{0}/ret'.format(jid)): ret = event['data']['return']['data']['master'] for job in ret: self.assertTrue(('__jid__' in ret[job])) break finally: del listener signal.alarm(0)
'Test a simple ping'
def test_ping(self):
ret = self.run_function('test.ping') self.assertTrue(ret, 'Ping did not return true')
'Test login while specifying chsh service with bad passwd This test ensures this PR is working correctly: https://github.com/saltstack/salt/pull/31826'
def test_bad_pwd_pam_chsh_service(self):
copyauth_creds = AUTH_CREDS.copy() copyauth_creds['service'] = 'chsh' copyauth_creds['password'] = 'wrong_password' body = urlencode(copyauth_creds) (request, response) = self.request('/login', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) self.assertEqual(response.status, '401 Unauthorized')
'Test login while specifying login service with bad passwd This test ensures this PR is working correctly: https://github.com/saltstack/salt/pull/31826'
def test_bad_pwd_pam_login_service(self):
copyauth_creds = AUTH_CREDS.copy() copyauth_creds['service'] = 'login' copyauth_creds['password'] = 'wrong_password' body = urlencode(copyauth_creds) (request, response) = self.request('/login', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) self.assertEqual(response.status, '401 Unauthorized')
'Test login while specifying chsh service with good passwd This test ensures this PR is working correctly: https://github.com/saltstack/salt/pull/31826'
def test_good_pwd_pam_chsh_service(self):
copyauth_creds = AUTH_CREDS.copy() copyauth_creds['service'] = 'chsh' body = urlencode(copyauth_creds) (request, response) = self.request('/login', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) self.assertEqual(response.status, '200 OK')
'Test login while specifying login service with good passwd This test ensures this PR is working correctly: https://github.com/saltstack/salt/pull/31826'
def test_good_pwd_pam_login_service(self):
copyauth_creds = AUTH_CREDS.copy() copyauth_creds['service'] = 'login' body = urlencode(copyauth_creds) (request, response) = self.request('/login', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) self.assertEqual(response.status, '200 OK')
'Clean up after tests. Delete user'
@destructiveTest @skip_if_not_root def tearDown(self):
super(TestAuthPAM, self).tearDown() user_list = self.run_function('user.list_users') if (USERA in user_list): self.run_function('user.delete', [USERA], remove=True) cherrypy.engine.exit()
'GET requests to the root URL should not require auth'
def test_get_root_noauth(self):
(request, response) = self.request('/') self.assertEqual(response.status, '200 OK')
'POST requests to the root URL redirect to login'
def test_post_root_auth(self):
(request, response) = self.request('/', method='POST', data={}) self.assertEqual(response.status, '401 Unauthorized')
'GET requests to the login URL should not require auth'
def test_login_noauth(self):
(request, response) = self.request('/login') self.assertEqual(response.status, '200 OK')
'Requests to the webhook URL require auth by default'
def test_webhook_auth(self):
(request, response) = self.request('/hook', method='POST', data={}) self.assertEqual(response.status, '401 Unauthorized')
'Test logging in'
def test_good_login(self):
body = urlencode(self.auth_creds) (request, response) = self.request('/login', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) self.assertEqual(response.status, '200 OK') return response
'Test logging in'
def test_bad_login(self):
body = urlencode({'totally': 'invalid_creds'}) (request, response) = self.request('/login', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) self.assertEqual(response.status, '401 Unauthorized')
'Test the run URL with good auth credentials'
def test_run_good_login(self):
cmd = dict(self.low, **dict(self.auth_creds)) body = urlencode(cmd) (request, response) = self.request('/run', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) self.assertEqual(response.status, '200 OK')
'Test the run URL with bad auth credentials'
def test_run_bad_login(self):
cmd = dict(self.low, **{'totally': 'invalid_creds'}) body = urlencode(cmd) (request, response) = self.request('/run', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) self.assertEqual(response.status, '401 Unauthorized')
'Auth can be disabled for requests to the webhook URL'
def test_webhook_noauth(self):
body = urlencode({'foo': 'Foo!'}) (request, response) = self.request('/hook', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) self.assertEqual(response.status, '200 OK')
'Return the token'
def _token(self):
body = urlencode(self.auth_creds) (request, response) = self.request('/login', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) return response.headers['X-Auth-Token']
'Ensure that (singular) arg and kwarg keys (for passing parameters) are supported by runners.'
def test_accepts_arg_kwarg_keys(self):
cmd = dict(self.low) body = json.dumps(cmd) (request, response) = self.request('/', method='POST', body=body, headers={'content-type': 'application/json', 'X-Auth-Token': self._token(), 'Accept': 'application/json'}) resp = json.loads(salt.utils.stringutils.to_str(response.body[0])) self.assertEqual(resp['return'][0]['args'], [1234]) self.assertEqual(resp['return'][0]['kwargs'], {'ext_source': 'redis'})
'Return the token'
def _token(self):
body = urlencode(self.auth_creds) (request, response) = self.request('/login', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) return response.headers['X-Auth-Token']
'Helper function to add a job to the job cache'
def _add_job(self):
cmd = dict(self.low, **dict(self.auth_creds)) body = urlencode(cmd) (request, response) = self.request('/run', method='POST', body=body, headers={'content-type': 'application/x-www-form-urlencoded'}) self.assertEqual(response.status, '200 OK')