desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'Test IPv6 address validation'
| def test_ipv6_addr(self):
| true_addrs = ['::1', '::1/32', '::1/32', '::1/128', '2a03:4000:c:10aa:1017:f00d:aaaa:a']
false_addrs = ['1.1.1.1', '::1/0', '::1/32d', '::1/129']
for addr in true_addrs:
self.assertTrue(net.ipv6_addr(addr))
for addr in false_addrs:
self.assertFalse(net.ipv6_addr(addr))
|
'Clear the cache before every test'
| def setUp(self):
| context_dir = os.path.join(__opts__['cachedir'], 'context')
if os.path.isdir(context_dir):
shutil.rmtree(context_dir)
|
'Tests to ensure the cache is written correctly'
| def test_set_cache(self):
| @salt.utils.cache.context_cache
def _test_set_cache():
'\n This will inherit globals from the test module itself.\n Normally these are injected by the salt loader [salt.loader]\n '
pass
_test_set_cache()
target_cache_file = os.path.join(__opts__['cachedir'], 'context', '{0}.p'.format(__name__))
self.assertTrue(os.path.isfile(target_cache_file), 'Context cache did not write cache file')
with salt.utils.files.fopen(target_cache_file, 'rb') as fp_:
target_cache_data = salt.payload.Serial(__opts__).load(fp_)
self.assertDictEqual(__context__, target_cache_data)
cc = salt.utils.cache.ContextCache(__opts__, __name__)
retrieved_cache = cc.get_cache_context()
self.assertDictEqual(retrieved_cache, __context__)
|
'Tests to ensure that the context cache can rehydrate a wrapped function'
| def test_refill_cache(self):
| @salt.utils.cache.context_cache
def _test_set_cache():
pass
_test_set_cache()
@salt.utils.cache.context_cache
def _test_refill_cache(comparison_context):
self.assertEqual(__context__, comparison_context)
global __context__
__context__ = {}
_test_refill_cache({'a': 'b'})
|
'Make sure you can instantiate, add, update, remove, expire'
| def test_everything(self):
| try:
tmpdir = tempfile.mkdtemp()
path = os.path.join(tmpdir, 'CacheDisk_test')
cd = cache.CacheDisk(0.1, path)
self.assertIsInstance(cd, cache.CacheDisk)
self.assertNotIn('foo', cd)
cd['foo'] = 'bar'
self.assertIn('foo', cd)
self.assertEqual(cd['foo'], 'bar')
del cd['foo']
self.assertNotIn('foo', cd)
cd['foo'] = 'bar'
cd2 = cache.CacheDisk(0.1, path)
self.assertIn('foo', cd2)
self.assertEqual(cd2['foo'], 'bar')
time.sleep(0.2)
self.assertNotIn('foo', cd)
self.assertNotIn('foo', cd2)
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
|
'Test sanitized input for trimming'
| def test_sanitized_trim(self):
| value = u' sample '
response = clean.trim(value)
assert (response == 'sample')
assert (type(response) == text)
|
'Test sanitized input for filename'
| def test_sanitized_filename(self):
| value = '/absolute/path/to/the/file.txt'
response = clean.filename(value)
assert (response == 'file.txt')
value = '../relative/path/to/the/file.txt'
response = clean.filename(value)
assert (response == 'file.txt')
|
'Test sanitized input for hostname (id)'
| def test_sanitized_hostname(self):
| value = ' ../ ../some/dubious/hostname '
response = clean.hostname(value)
assert (response == 'somedubioushostname')
|
'Ensure that both lowercase and non-lowercase values are supported'
| def test_provider_case_insensitive(self):
| provider = 'GitPython'
for (role_name, role_class) in (('gitfs', salt.utils.gitfs.GitFS), ('git_pillar', salt.utils.gitfs.GitPillar), ('winrepo', salt.utils.gitfs.WinRepo)):
key = '{0}_provider'.format(role_name)
with patch.object(role_class, 'verify_gitpython', MagicMock(return_value=True)):
with patch.object(role_class, 'verify_pygit2', MagicMock(return_value=False)):
args = [OPTS]
if (role_name == 'winrepo'):
args.append('/tmp/winrepo-dir')
with patch.dict(OPTS, {key: provider}):
role_class(*args)
role_class(*args)
|
'Ensure that an invalid provider is not accepted, raising a
FileserverConfigError.'
| def test_valid_provider(self):
| def _get_mock(verify, provider):
'\n Return a MagicMock with the desired return value\n '
return MagicMock(return_value=verify.endswith(provider))
for (role_name, role_class) in (('gitfs', salt.utils.gitfs.GitFS), ('git_pillar', salt.utils.gitfs.GitPillar), ('winrepo', salt.utils.gitfs.WinRepo)):
key = '{0}_provider'.format(role_name)
for provider in salt.utils.gitfs.VALID_PROVIDERS:
verify = 'verify_gitpython'
mock1 = _get_mock(verify, provider)
with patch.object(role_class, verify, mock1):
verify = 'verify_pygit2'
mock2 = _get_mock(verify, provider)
with patch.object(role_class, verify, mock2):
args = [OPTS]
if (role_name == 'winrepo'):
args.append('/tmp/winrepo-dir')
with patch.dict(OPTS, {key: provider}):
role_class(*args)
with patch.dict(OPTS, {key: 'foo'}):
self.assertRaises(FileserverConfigError, role_class, *args)
|
'Test traditional overwrite, wherein a key in the second dict overwrites a key in the first'
| def test_merge_overwrite_traditional(self):
| mdict = copy.deepcopy(self.dict1)
mdict['A'] = 'b'
ret = dictupdate.merge_overwrite(copy.deepcopy(self.dict1), {'A': 'b'})
self.assertEqual(mdict, ret)
|
'Test case wherein the overwrite strategy is used but a key in the second dict is
not present in the first'
| def test_merge_overwrite_missing_source_key(self):
| mdict = copy.deepcopy(self.dict1)
mdict['D'] = 'new'
ret = dictupdate.merge_overwrite(copy.deepcopy(self.dict1), {'D': 'new'})
self.assertEqual(mdict, ret)
|
'Test traditional aggregation, where a val from dict2 overwrites one
present in dict1'
| def test_merge_aggregate_traditional(self):
| mdict = copy.deepcopy(self.dict1)
mdict['A'] = 'b'
ret = dictupdate.merge_overwrite(copy.deepcopy(self.dict1), {'A': 'b'})
self.assertEqual(mdict, ret)
|
'Test traditional list merge, where a key present in dict2 will be converted
to a list'
| def test_merge_list_traditional(self):
| mdict = copy.deepcopy(self.dict1)
mdict['A'] = ['B', 'b']
ret = dictupdate.merge_list(copy.deepcopy(self.dict1), {'A': 'b'})
self.assertEqual(mdict, ret)
|
'This codifies the intended behaviour that items merged into a dict val that is already
a list that those items will *appended* to the list, and not magically merged in'
| def test_merge_list_append(self):
| mdict = copy.deepcopy(self.dict1)
mdict['A'] = ['B', 'b', 'c']
mdict1 = copy.deepcopy(self.dict1)
mdict1['A'] = ['B']
ret = dictupdate.merge_list(mdict1, {'A': ['b', 'c']})
self.assertEqual({'A': [['B'], ['b', 'c']], 'C': {'D': 'E', 'F': {'I': 'J', 'G': 'H'}}}, ret)
|
'Takes a YAML string, puts it into a mock file, passes that to the YAML
SaltYamlSafeLoader and then returns the rendered/parsed YAML data'
| @staticmethod
def _render_yaml(data):
| with patch('salt.utils.files.fopen', mock_open(read_data=data)) as mocked_file:
with salt.utils.files.fopen(mocked_file) as mocked_stream:
return SaltYamlSafeLoader(mocked_stream).get_data()
|
'Test parsing an ordinary path'
| def test_yaml_basics(self):
| self.assertEqual(self._render_yaml('\np1:\n - alpha\n - beta'), {'p1': ['alpha', 'beta']})
|
'Test YAML anchors'
| def test_yaml_merge(self):
| self.assertEqual(self._render_yaml('\np1: &p1\n v1: alpha\np2:\n <<: *p1\n v2: beta'), {'p1': {'v1': 'alpha'}, 'p2': {'v1': 'alpha', 'v2': 'beta'}})
self.assertEqual(self._render_yaml('\np1: &p1\n v1: alpha\np2:\n <<: *p1\n v1: new_alpha'), {'p1': {'v1': 'alpha'}, 'p2': {'v1': 'new_alpha'}})
self.assertEqual(self._render_yaml('\np1: &p1\n v1: &v1\n - t1\n - t2\np2:\n v2: *v1'), {'p2': {'v2': ['t1', 't2']}, 'p1': {'v1': ['t1', 't2']}})
|
'Test that duplicates still throw an error'
| def test_yaml_duplicates(self):
| with self.assertRaises(ConstructorError):
self._render_yaml('\np1: alpha\np1: beta')
with self.assertRaises(ConstructorError):
self._render_yaml('\np1: &p1\n v1: alpha\np2:\n <<: *p1\n v2: beta\n v2: betabeta')
|
'Test a simple string nodegroup'
| def test_nodegroup_comp(self):
| for nodegroup in NODEGROUPS:
expected = EXPECTED[nodegroup]
ret = minions.nodegroup_comp(nodegroup, NODEGROUPS)
self.assertEqual(ret, expected)
|
'Ensure we just return False if we pass in invalid or undefined paths.
Refs #8259'
| def test_valid_id_exception_handler(self):
| opts = {'pki_dir': '/tmp/whatever'}
self.assertFalse(valid_id(opts, None))
|
'Test that verify_log works as expected'
| @skipIf(NO_MOCK, NO_MOCK_REASON)
def test_verify_log(self):
| message = 'Insecure logging configuration detected! Sensitive data may be logged.'
mock_cheese = MagicMock()
with patch.object(log, 'warning', mock_cheese):
verify_log({'log_level': 'cheeseshop'})
mock_cheese.assert_called_once_with(message)
mock_trace = MagicMock()
with patch.object(log, 'warning', mock_trace):
verify_log({'log_level': 'trace'})
mock_trace.assert_called_once_with(message)
mock_none = MagicMock()
with patch.object(log, 'warning', mock_none):
verify_log({})
mock_none.assert_called_once_with(message)
mock_info = MagicMock()
with patch.object(log, 'warning', mock_info):
verify_log({'log_level': 'info'})
self.assertTrue((mock_info.call_count == 0))
|
'Tests ensuring the job exists and deleting it'
| def test_delete_job_exists(self):
| self.schedule.opts.update({'schedule': {'foo': 'bar'}, 'pillar': {}})
self.assertIn('foo', self.schedule.opts['schedule'])
self.schedule.delete_job('foo')
self.assertNotIn('foo', self.schedule.opts['schedule'])
|
'Tests ignoring deletion job from pillar'
| def test_delete_job_in_pillar(self):
| self.schedule.opts.update({'pillar': {'schedule': {'foo': 'bar'}}, 'schedule': {}})
self.assertIn('foo', self.schedule.opts['pillar']['schedule'])
self.schedule.delete_job('foo')
self.assertIn('foo', self.schedule.opts['pillar']['schedule'])
|
'Tests removing job from intervals'
| def test_delete_job_intervals(self):
| self.schedule.opts.update({'pillar': {}, 'schedule': {}})
self.schedule.intervals = {'foo': 'bar'}
self.schedule.delete_job('foo')
self.assertNotIn('foo', self.schedule.intervals)
|
'Tests ensuring jobs exists and deleting them by prefix'
| def test_delete_job_prefix(self):
| self.schedule.opts.update({'schedule': {'foobar': 'bar', 'foobaz': 'baz', 'fooboo': 'boo'}, 'pillar': {}})
ret = copy.deepcopy(self.schedule.opts)
del ret['schedule']['foobar']
del ret['schedule']['foobaz']
self.schedule.delete_job_prefix('fooba')
self.assertEqual(self.schedule.opts, ret)
|
'Tests ignoring deletion jobs by prefix from pillar'
| def test_delete_job_prefix_in_pillar(self):
| self.schedule.opts.update({'pillar': {'schedule': {'foobar': 'bar', 'foobaz': 'baz', 'fooboo': 'boo'}}, 'schedule': {}})
ret = copy.deepcopy(self.schedule.opts)
self.schedule.delete_job_prefix('fooba')
self.assertEqual(self.schedule.opts, ret)
|
'Tests if data is a dictionary'
| def test_add_job_data_not_dict(self):
| data = 'foo'
self.assertRaises(ValueError, Schedule.add_job, self.schedule, data)
|
'Tests if more than one job is scheduled at a time'
| def test_add_job_multiple_jobs(self):
| data = {'key1': 'value1', 'key2': 'value2'}
self.assertRaises(ValueError, Schedule.add_job, self.schedule, data)
|
'Tests adding a job to the schedule'
| def test_add_job(self):
| data = {'foo': {'bar': 'baz'}}
ret = copy.deepcopy(self.schedule.opts)
ret.update({'schedule': {'foo': {'bar': 'baz', 'enabled': True}, 'hello': {'world': 'peace', 'enabled': True}}, 'pillar': {}})
self.schedule.opts.update({'schedule': {'hello': {'world': 'peace', 'enabled': True}}, 'pillar': {}})
Schedule.add_job(self.schedule, data)
self.assertEqual(self.schedule.opts, ret)
|
'Tests enabling a job'
| def test_enable_job(self):
| self.schedule.opts.update({'schedule': {'name': {'enabled': 'foo'}}})
Schedule.enable_job(self.schedule, 'name')
self.assertTrue(self.schedule.opts['schedule']['name']['enabled'])
|
'Tests ignoring enable a job from pillar'
| def test_enable_job_pillar(self):
| self.schedule.opts.update({'pillar': {'schedule': {'name': {'enabled': False}}}})
Schedule.enable_job(self.schedule, 'name', persist=False)
self.assertFalse(self.schedule.opts['pillar']['schedule']['name']['enabled'])
|
'Tests disabling a job'
| def test_disable_job(self):
| self.schedule.opts.update({'schedule': {'name': {'enabled': 'foo'}}, 'pillar': {}})
Schedule.disable_job(self.schedule, 'name')
self.assertFalse(self.schedule.opts['schedule']['name']['enabled'])
|
'Tests ignoring disable a job in pillar'
| def test_disable_job_pillar(self):
| self.schedule.opts.update({'pillar': {'schedule': {'name': {'enabled': True}}}, 'schedule': {}})
Schedule.disable_job(self.schedule, 'name', persist=False)
self.assertTrue(self.schedule.opts['pillar']['schedule']['name']['enabled'])
|
'Tests modifying a job in the scheduler'
| def test_modify_job(self):
| schedule = {'foo': 'bar'}
self.schedule.opts.update({'schedule': {'name': 'baz'}, 'pillar': {}})
ret = copy.deepcopy(self.schedule.opts)
ret.update({'schedule': {'name': {'foo': 'bar'}}})
Schedule.modify_job(self.schedule, 'name', schedule)
self.assertEqual(self.schedule.opts, ret)
|
'Tests modifying a job in the scheduler if jobs not exists'
| def test_modify_job_not_exists(self):
| schedule = {'foo': 'bar'}
self.schedule.opts.update({'schedule': {}, 'pillar': {}})
ret = copy.deepcopy(self.schedule.opts)
ret.update({'schedule': {'name': {'foo': 'bar'}}})
Schedule.modify_job(self.schedule, 'name', schedule)
self.assertEqual(self.schedule.opts, ret)
|
'Tests ignoring modification of job from pillar'
| def test_modify_job_pillar(self):
| schedule = {'foo': 'bar'}
self.schedule.opts.update({'schedule': {}, 'pillar': {'schedule': {'name': 'baz'}}})
ret = copy.deepcopy(self.schedule.opts)
Schedule.modify_job(self.schedule, 'name', schedule, persist=False)
self.assertEqual(self.schedule.opts, ret)
|
'Tests enabling the scheduler'
| def test_enable_schedule(self):
| self.schedule.opts.update({'schedule': {'enabled': 'foo'}, 'pillar': {}})
Schedule.enable_schedule(self.schedule)
self.assertTrue(self.schedule.opts['schedule']['enabled'])
|
'Tests disabling the scheduler'
| def test_disable_schedule(self):
| self.schedule.opts.update({'schedule': {'enabled': 'foo'}, 'pillar': {}})
Schedule.disable_schedule(self.schedule)
self.assertFalse(self.schedule.opts['schedule']['enabled'])
|
'Tests reloading the schedule from saved schedule where both the
saved schedule and self.schedule.opts contain a schedule key'
| def test_reload_update_schedule_key(self):
| saved = {'schedule': {'foo': 'bar'}}
ret = copy.deepcopy(self.schedule.opts)
ret.update({'schedule': {'foo': 'bar', 'hello': 'world'}})
self.schedule.opts.update({'schedule': {'hello': 'world'}})
Schedule.reload(self.schedule, saved)
self.assertEqual(self.schedule.opts, ret)
|
'Tests reloading the schedule from saved schedule that does not
contain a schedule key but self.schedule.opts does'
| def test_reload_update_schedule_no_key(self):
| saved = {'foo': 'bar'}
ret = copy.deepcopy(self.schedule.opts)
ret.update({'schedule': {'foo': 'bar', 'hello': 'world'}})
self.schedule.opts.update({'schedule': {'hello': 'world'}})
Schedule.reload(self.schedule, saved)
self.assertEqual(self.schedule.opts, ret)
|
'Tests reloading the schedule from saved schedule that does not
contain a schedule key and neither does self.schedule.opts'
| def test_reload_no_schedule_in_opts(self):
| saved = {'foo': 'bar'}
ret = copy.deepcopy(self.schedule.opts)
ret['schedule'] = {'foo': 'bar'}
self.schedule.opts.pop('schedule', None)
Schedule.reload(self.schedule, saved)
self.assertEqual(self.schedule.opts, ret)
|
'Tests reloading the schedule from saved schedule that contains
a schedule key, but self.schedule.opts does not'
| def test_reload_schedule_in_saved_but_not_opts(self):
| saved = {'schedule': {'foo': 'bar'}}
ret = copy.deepcopy(self.schedule.opts)
ret['schedule'] = {'foo': 'bar'}
self.schedule.opts.pop('schedule', None)
Schedule.reload(self.schedule, saved)
self.assertEqual(self.schedule.opts, ret)
|
'Tests eval if the schedule is not a dictionary'
| def test_eval_schedule_is_not_dict(self):
| self.schedule.opts.update({'schedule': '', 'pillar': {'schedule': {}}})
self.assertRaises(ValueError, Schedule.eval, self.schedule)
|
'Tests eval if the schedule from pillar is not a dictionary'
| def test_eval_schedule_is_not_dict_in_pillar(self):
| self.schedule.opts.update({'schedule': {}, 'pillar': {'schedule': ''}})
self.assertRaises(ValueError, Schedule.eval, self.schedule)
|
'This tests joining paths that contain a mix of components with unicode
strings and non-unicode strings with the unicode characters as binary.
This is no longer something we need to concern ourselves with in
Python 3, but the test should nonetheless pass on Python 3. Really what
we\'re testing here is that we don\'t get a UnicodeDecodeError when
running on Python 2.'
| @skipIf(salt.utils.platform.is_windows(), '*nix-only test')
def test_mixed_unicode_and_binary(self):
| a = u'/foo/bar'
b = '\xd0\x94'
expected = u'/foo/bar/\u0414'
actual = salt.utils.path.join(a, b)
self.assertEqual(actual, expected)
|
'test execute_return_success function
command not supported'
| def test_execute_return_success_not_supported(self):
| mock_cmd = MagicMock(return_value={'retcode': 0, 'stdout': 'not supported', 'stderr': 'error'})
with patch.object(mac_utils, '_run_all', mock_cmd):
self.assertRaises(CommandExecutionError, mac_utils.execute_return_success, 'dir c:\\')
|
'test execute_return_success function
command failed'
| def test_execute_return_success_command_failed(self):
| mock_cmd = MagicMock(return_value={'retcode': 1, 'stdout': 'spongebob', 'stderr': 'error'})
with patch.object(mac_utils, '_run_all', mock_cmd):
self.assertRaises(CommandExecutionError, mac_utils.execute_return_success, 'dir c:\\')
|
'test execute_return_success function
command succeeded'
| def test_execute_return_success_command_succeeded(self):
| mock_cmd = MagicMock(return_value={'retcode': 0, 'stdout': 'spongebob'})
with patch.object(mac_utils, '_run_all', mock_cmd):
ret = mac_utils.execute_return_success('dir c:\\')
self.assertEqual(ret, True)
|
'test execute_return_result function
command failed'
| def test_execute_return_result_command_failed(self):
| mock_cmd = MagicMock(return_value={'retcode': 1, 'stdout': 'spongebob', 'stderr': 'squarepants'})
with patch.object(mac_utils, '_run_all', mock_cmd):
self.assertRaises(CommandExecutionError, mac_utils.execute_return_result, 'dir c:\\')
|
'test execute_return_result function
command succeeded'
| def test_execute_return_result_command_succeeded(self):
| mock_cmd = MagicMock(return_value={'retcode': 0, 'stdout': 'spongebob'})
with patch.object(mac_utils, '_run_all', mock_cmd):
ret = mac_utils.execute_return_result('dir c:\\')
self.assertEqual(ret, 'spongebob')
|
'test parse_return function
space after colon'
| def test_parse_return_space(self):
| self.assertEqual(mac_utils.parse_return('spongebob: squarepants'), 'squarepants')
|
'test parse_return function
new line after colon'
| def test_parse_return_new_line(self):
| self.assertEqual(mac_utils.parse_return('spongebob:\nsquarepants'), 'squarepants')
|
'test parse_return function
no delimiter'
| def test_parse_return_no_delimiter(self):
| self.assertEqual(mac_utils.parse_return('squarepants'), 'squarepants')
|
'test validate_enabled function
test on'
| def test_validate_enabled_on(self):
| self.assertEqual(mac_utils.validate_enabled('On'), 'on')
|
'test validate_enabled function
test off'
| def test_validate_enabled_off(self):
| self.assertEqual(mac_utils.validate_enabled('Off'), 'off')
|
'test validate_enabled function
test bad string'
| def test_validate_enabled_bad_string(self):
| self.assertRaises(SaltInvocationError, mac_utils.validate_enabled, 'bad string')
|
'test validate_enabled function
test non zero'
| def test_validate_enabled_non_zero(self):
| for x in range(1, 179, 3):
self.assertEqual(mac_utils.validate_enabled(x), 'on')
|
'test validate_enabled function
test 0'
| def test_validate_enabled_0(self):
| self.assertEqual(mac_utils.validate_enabled(0), 'off')
|
'test validate_enabled function
test True'
| def test_validate_enabled_true(self):
| self.assertEqual(mac_utils.validate_enabled(True), 'on')
|
'test validate_enabled function
test False'
| def test_validate_enabled_false(self):
| self.assertEqual(mac_utils.validate_enabled(False), 'off')
|
'Test that salt.utils.systemd.booted() returns True when minion is
systemd-booted.'
| def test_booted(self):
| with patch('os.stat', side_effect=_booted_effect):
self.assertTrue(_systemd.booted())
context = {}
self.assertTrue(_systemd.booted(context))
self.assertEqual(context, {'salt.utils.systemd.booted': True})
|
'Test that salt.utils.systemd.booted() returns False when minion is not
systemd-booted.'
| def test_not_booted(self):
| with patch('os.stat', side_effect=_not_booted_effect):
self.assertFalse(_systemd.booted())
context = {}
self.assertFalse(_systemd.booted(context))
self.assertEqual(context, {'salt.utils.systemd.booted': False})
|
'Test that the context data is returned when present. To ensure we\'re
getting data from the context dict, we use a non-boolean value to
differentiate it from the True/False return this function normally
produces.'
| def test_booted_return_from_context(self):
| context = {'salt.utils.systemd.booted': 'foo'}
self.assertEqual(_systemd.booted(context), 'foo')
|
'Test with invalid context data. The context value must be a dict, so
this should raise a SaltInvocationError.'
| def test_booted_invalid_context(self):
| with self.assertRaises(SaltInvocationError):
_systemd.booted(99999)
|
'Test that salt.utils.systemd.booted() returns True when minion is
systemd-booted.'
| def test_version(self):
| with patch('subprocess.Popen') as popen_mock:
_version = 231
output = 'systemd {0}\n-SYSVINIT'.format(_version)
popen_mock.return_value = Mock(communicate=(lambda *args, **kwargs: (output, None)), pid=(lambda : 12345), retcode=0)
self.assertEqual(_systemd.version(), _version)
context = {}
self.assertTrue(_systemd.version(context))
self.assertEqual(context, {'salt.utils.systemd.version': _version})
|
'Test that the context data is returned when present. To ensure we\'re
getting data from the context dict, we use a non-integer value to
differentiate it from the integer return this function normally
produces.'
| def test_version_return_from_context(self):
| context = {'salt.utils.systemd.version': 'foo'}
self.assertEqual(_systemd.version(context), 'foo')
|
'Test with invalid context data. The context value must be a dict, so
this should raise a SaltInvocationError.'
| def test_version_invalid_context(self):
| with self.assertRaises(SaltInvocationError):
_systemd.version(99999)
|
'Test with invalid context data. The context value must be a dict, so
this should raise a SaltInvocationError.'
| def test_version_parse_problem(self):
| with patch('subprocess.Popen') as popen_mock:
popen_mock.return_value = Mock(communicate=(lambda *args, **kwargs: ('invalid', None)), pid=(lambda : 12345), retcode=0)
self.assertIsNone(_systemd.version())
context = {}
self.assertIsNone(_systemd.version(context))
self.assertEqual(context, {})
|
'Scopes are available in systemd>=205. Make sure that this function
returns the expected boolean. We do three separate unit tests for
versions 204 through 206 because mock doesn\'t like us altering the
return_value in a loop.'
| def test_has_scope_systemd204(self):
| with patch('subprocess.Popen') as popen_mock:
_expected = False
_version = 204
_output = 'systemd {0}\n-SYSVINIT'.format(_version)
popen_mock.return_value = Mock(communicate=(lambda *args, **kwargs: (_output, None)), pid=(lambda : 12345), retcode=0)
with patch('os.stat', side_effect=_booted_effect):
self.assertEqual(_systemd.has_scope(), _expected)
context = {}
self.assertEqual(_systemd.has_scope(context), _expected)
self.assertEqual(context, {'salt.utils.systemd.booted': True, 'salt.utils.systemd.version': _version})
|
'Scopes are available in systemd>=205. Make sure that this function
returns the expected boolean. We do three separate unit tests for
versions 204 through 206 because mock doesn\'t like us altering the
return_value in a loop.'
| def test_has_scope_systemd205(self):
| with patch('subprocess.Popen') as popen_mock:
_expected = True
_version = 205
_output = 'systemd {0}\n-SYSVINIT'.format(_version)
popen_mock.return_value = Mock(communicate=(lambda *args, **kwargs: (_output, None)), pid=(lambda : 12345), retcode=0)
with patch('os.stat', side_effect=_booted_effect):
self.assertEqual(_systemd.has_scope(), _expected)
context = {}
self.assertEqual(_systemd.has_scope(context), _expected)
self.assertEqual(context, {'salt.utils.systemd.booted': True, 'salt.utils.systemd.version': _version})
|
'Scopes are available in systemd>=205. Make sure that this function
returns the expected boolean. We do three separate unit tests for
versions 204 through 206 because mock doesn\'t like us altering the
return_value in a loop.'
| def test_has_scope_systemd206(self):
| with patch('subprocess.Popen') as popen_mock:
_expected = True
_version = 206
_output = 'systemd {0}\n-SYSVINIT'.format(_version)
popen_mock.return_value = Mock(communicate=(lambda *args, **kwargs: (_output, None)), pid=(lambda : 12345), retcode=0)
with patch('os.stat', side_effect=_booted_effect):
self.assertEqual(_systemd.has_scope(), _expected)
context = {}
self.assertEqual(_systemd.has_scope(context), _expected)
self.assertEqual(context, {'salt.utils.systemd.booted': True, 'salt.utils.systemd.version': _version})
|
'Test the case where the system is not systemd-booted. We should not be
performing a version check in these cases as there is no need.'
| def test_has_scope_no_systemd(self):
| with patch('os.stat', side_effect=_not_booted_effect):
self.assertFalse(_systemd.has_scope())
context = {}
self.assertFalse(_systemd.has_scope(context))
self.assertEqual(context, {'salt.utils.systemd.booted': False})
|
'Test the case where the system is systemd-booted, but we failed to
parse the "systemctl --version" output.'
| def test_has_scope_version_parse_problem(self):
| with patch('subprocess.Popen') as popen_mock:
popen_mock.return_value = Mock(communicate=(lambda *args, **kwargs: ('invalid', None)), pid=(lambda : 12345), retcode=0)
with patch('os.stat', side_effect=_booted_effect):
self.assertFalse(_systemd.has_scope())
context = {}
self.assertFalse(_systemd.has_scope(context))
self.assertEqual(context, {'salt.utils.systemd.booted': True})
|
'Test with invalid context data. The context value must be a dict, so
this should raise a SaltInvocationError.'
| def test_has_scope_invalid_context(self):
| with self.assertRaises(SaltInvocationError):
_systemd.has_scope(99999)
|
'Test that the helper classes do what we expect within a regular async env'
| @tornado.testing.gen_test
def test_helpers(self):
| ha = HelperA()
ret = (yield ha.sleep())
self.assertTrue(ret)
hb = HelperB()
ret = (yield hb.sleep())
self.assertFalse(ret)
|
'Test that we can wrap an async caller.'
| def test_basic_wrap(self):
| sync = async.SyncWrapper(HelperA)
ret = sync.sleep()
self.assertTrue(ret)
|
'Test when the async wrapper object itself creates a wrap of another thing
This works fine since the second wrap is based on the first\'s IOLoop so we
don\'t have to worry about complex start/stop mechanics'
| def test_double(self):
| sync = async.SyncWrapper(HelperB)
ret = sync.sleep()
self.assertFalse(ret)
|
'Test async wrappers initiated from the same IOLoop, to ensure that
we don\'t wire up both to the same IOLoop (since it causes MANY problems).'
| def test_double_sameloop(self):
| a = async.SyncWrapper(HelperA)
sync = async.SyncWrapper(HelperB, (a,))
ret = sync.sleep()
self.assertFalse(ret)
|
'NOTE: When this test fails it\'s usually because the IP address has
changed. In these cases, we just need to update the IP address in the
assertion.'
| def test_host_to_ips(self):
| def _side_effect(host, *args):
try:
return {'github.com': [(2, 1, 6, '', ('192.30.255.112', 0)), (2, 1, 6, '', ('192.30.255.113', 0))], 'ipv6host.foo': [(10, 1, 6, '', ('2001:a71::1', 0, 0, 0))]}[host]
except KeyError:
raise socket.gaierror((-2), 'Name or service not known')
getaddrinfo_mock = MagicMock(side_effect=_side_effect)
with patch.object(socket, 'getaddrinfo', getaddrinfo_mock):
ret = network.host_to_ips('github.com')
self.assertEqual(ret, ['192.30.255.112', '192.30.255.113'])
ret = network.host_to_ips('ipv6host.foo')
self.assertEqual(ret, ['2001:a71::1'])
ret = network.host_to_ips('someothersite.com')
self.assertEqual(ret, None)
|
'Test if minion IDs are distinct in the pool.
:return:'
| def test_generate_minion_id_distinct(self):
| with patch('platform.node', MagicMock(return_value='nodename')):
with patch('socket.gethostname', MagicMock(return_value='hostname')):
with patch('socket.getfqdn', MagicMock(return_value='hostname.domainname.blank')):
with patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'attrname', ('127.0.1.1', 0))])):
with patch('salt.utils.files.fopen', MagicMock(return_value=False)):
with patch('os.path.exists', MagicMock(return_value=False)):
with patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '5.6.7.8'])):
self.assertEqual(network._generate_minion_id(), ['hostname.domainname.blank', 'nodename', 'hostname', '1.2.3.4', '5.6.7.8'])
|
'Test if minion IDs can be named 127.foo
:return:'
| def test_generate_minion_id_127_name(self):
| with patch('platform.node', MagicMock(return_value='127')):
with patch('socket.gethostname', MagicMock(return_value='127')):
with patch('socket.getfqdn', MagicMock(return_value='127.domainname.blank')):
with patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'attrname', ('127.0.1.1', 0))])):
with patch('salt.utils.files.fopen', MagicMock(return_value=False)):
with patch('os.path.exists', MagicMock(return_value=False)):
with patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '5.6.7.8'])):
self.assertEqual(network._generate_minion_id(), ['127.domainname.blank', '127', '1.2.3.4', '5.6.7.8'])
|
'Test if minion IDs can be named starting from "127"
:return:'
| def test_generate_minion_id_127_name_startswith(self):
| with patch('platform.node', MagicMock(return_value='127890')):
with patch('socket.gethostname', MagicMock(return_value='127890')):
with patch('socket.getfqdn', MagicMock(return_value='127890.domainname.blank')):
with patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'attrname', ('127.0.1.1', 0))])):
with patch('salt.utils.files.fopen', MagicMock(return_value=False)):
with patch('os.path.exists', MagicMock(return_value=False)):
with patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '5.6.7.8'])):
self.assertEqual(network._generate_minion_id(), ['127890.domainname.blank', '127890', '1.2.3.4', '5.6.7.8'])
|
'Test if IP addresses in the minion IDs are distinct in the pool
:return:'
| def test_generate_minion_id_duplicate(self):
| with patch('platform.node', MagicMock(return_value='hostname')):
with patch('socket.gethostname', MagicMock(return_value='hostname')):
with patch('socket.getfqdn', MagicMock(return_value='hostname')):
with patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'hostname', ('127.0.1.1', 0))])):
with patch('salt.utils.files.fopen', MagicMock(return_value=False)):
with patch('os.path.exists', MagicMock(return_value=False)):
with patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '1.2.3.4', '1.2.3.4'])):
self.assertEqual(network._generate_minion_id(), ['hostname', '1.2.3.4'])
|
'Test if platform.node is used for the first occurrence.
The platform.node is most common hostname resolver before anything else.
:return:'
| def test_generate_minion_id_platform_used(self):
| with patch('platform.node', MagicMock(return_value='very.long.and.complex.domain.name')):
with patch('socket.gethostname', MagicMock(return_value='hostname')):
with patch('socket.getfqdn', MagicMock(return_value='')):
with patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'hostname', ('127.0.1.1', 0))])):
with patch('salt.utils.files.fopen', MagicMock(return_value=False)):
with patch('os.path.exists', MagicMock(return_value=False)):
with patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '1.2.3.4', '1.2.3.4'])):
self.assertEqual(network.generate_minion_id(), 'very.long.and.complex.domain.name')
|
'Test if localhost is filtered from the first occurrence.
:return:'
| def test_generate_minion_id_platform_localhost_filtered(self):
| with patch('platform.node', MagicMock(return_value='localhost')):
with patch('socket.gethostname', MagicMock(return_value='pick.me')):
with patch('socket.getfqdn', MagicMock(return_value='hostname.domainname.blank')):
with patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'hostname', ('127.0.1.1', 0))])):
with patch('salt.utils.files.fopen', MagicMock(return_value=False)):
with patch('os.path.exists', MagicMock(return_value=False)):
with patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '1.2.3.4', '1.2.3.4'])):
self.assertEqual(network.generate_minion_id(), 'hostname.domainname.blank')
|
'Test if any of the localhost is filtered from everywhere.
:return:'
| def test_generate_minion_id_platform_localhost_filtered_all(self):
| with patch('platform.node', MagicMock(return_value='localhost')):
with patch('socket.gethostname', MagicMock(return_value='ip6-loopback')):
with patch('socket.getfqdn', MagicMock(return_value='ip6-localhost')):
with patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'localhost', ('127.0.1.1', 0))])):
with patch('salt.utils.files.fopen', MagicMock(return_value=False)):
with patch('os.path.exists', MagicMock(return_value=False)):
with patch('salt.utils.network.ip_addrs', MagicMock(return_value=['127.0.0.1', '::1', 'fe00::0', 'fe02::1', '1.2.3.4'])):
self.assertEqual(network.generate_minion_id(), '1.2.3.4')
|
'Test if there is no other choice but localhost.
:return:'
| def test_generate_minion_id_platform_localhost_only(self):
| with patch('platform.node', MagicMock(return_value='localhost')):
with patch('socket.gethostname', MagicMock(return_value='ip6-loopback')):
with patch('socket.getfqdn', MagicMock(return_value='ip6-localhost')):
with patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'localhost', ('127.0.1.1', 0))])):
with patch('salt.utils.files.fopen', MagicMock(return_value=False)):
with patch('os.path.exists', MagicMock(return_value=False)):
with patch('salt.utils.network.ip_addrs', MagicMock(return_value=['127.0.0.1', '::1', 'fe00::0', 'fe02::1'])):
self.assertEqual(network.generate_minion_id(), 'localhost')
|
'Test if fqdn is picked up.
:return:'
| def test_generate_minion_id_platform_fqdn(self):
| with patch('platform.node', MagicMock(return_value='localhost')):
with patch('socket.gethostname', MagicMock(return_value='ip6-loopback')):
with patch('socket.getfqdn', MagicMock(return_value='pick.me')):
with patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'localhost', ('127.0.1.1', 0))])):
with patch('salt.utils.files.fopen', MagicMock(return_value=False)):
with patch('os.path.exists', MagicMock(return_value=False)):
with patch('salt.utils.network.ip_addrs', MagicMock(return_value=['127.0.0.1', '::1', 'fe00::0', 'fe02::1'])):
self.assertEqual(network.generate_minion_id(), 'pick.me')
|
'Test if addinfo is picked up.
:return:'
| def test_generate_minion_id_platform_localhost_addrinfo(self):
| with patch('platform.node', MagicMock(return_value='localhost')):
with patch('socket.gethostname', MagicMock(return_value='ip6-loopback')):
with patch('socket.getfqdn', MagicMock(return_value='ip6-localhost')):
with patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'pick.me', ('127.0.1.1', 0))])):
with patch('salt.utils.files.fopen', MagicMock(return_value=False)):
with patch('os.path.exists', MagicMock(return_value=False)):
with patch('salt.utils.network.ip_addrs', MagicMock(return_value=['127.0.0.1', '::1', 'fe00::0', 'fe02::1'])):
self.assertEqual(network.generate_minion_id(), 'pick.me')
|
'Test if IP address is the only what is used as a Minion ID in case no DNS name.
:return:'
| def test_generate_minion_id_platform_ip_addr_only(self):
| with patch('platform.node', MagicMock(return_value='localhost')):
with patch('socket.gethostname', MagicMock(return_value='ip6-loopback')):
with patch('socket.getfqdn', MagicMock(return_value='ip6-localhost')):
with patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'localhost', ('127.0.1.1', 0))])):
with patch('salt.utils.files.fopen', MagicMock(return_value=False)):
with patch('os.path.exists', MagicMock(return_value=False)):
with patch('salt.utils.network.ip_addrs', MagicMock(return_value=['127.0.0.1', '::1', 'fe00::0', 'fe02::1', '1.2.3.4'])):
self.assertEqual(network.generate_minion_id(), '1.2.3.4')
|
'Make sure that the process is alive 2s later'
| def test_basic(self):
| def spin():
salt.utils.appendproctitle('test_basic')
while True:
time.sleep(1)
process_manager = salt.utils.process.ProcessManager()
process_manager.add_process(spin)
initial_pid = next(six.iterkeys(process_manager._process_map))
time.sleep(2)
process_manager.check_children()
try:
assert (initial_pid == next(six.iterkeys(process_manager._process_map)))
finally:
process_manager.stop_restarting()
process_manager.kill_children()
time.sleep(0.5)
if process_manager._process_map.keys():
process_manager.send_signal_to_processes(signal.SIGKILL)
process_manager.stop_restarting()
process_manager.kill_children()
|
'Make sure that the process is alive 2s later'
| def test_restarting(self):
| def die():
salt.utils.appendproctitle('test_restarting')
process_manager = salt.utils.process.ProcessManager()
process_manager.add_process(die)
initial_pid = next(six.iterkeys(process_manager._process_map))
time.sleep(2)
process_manager.check_children()
try:
assert (initial_pid != next(six.iterkeys(process_manager._process_map)))
finally:
process_manager.stop_restarting()
process_manager.kill_children()
time.sleep(0.5)
if process_manager._process_map.keys():
process_manager.send_signal_to_processes(signal.SIGKILL)
process_manager.stop_restarting()
process_manager.kill_children()
|
'Make sure the threadpool can do things'
| def test_basic(self):
| def incr_counter(counter):
counter.value += 1
counter = multiprocessing.Value('i', 0)
pool = salt.utils.process.ThreadPool()
sent = pool.fire_async(incr_counter, args=(counter,))
self.assertTrue(sent)
time.sleep(1)
self.assertEqual(counter.value, 1)
self.assertEqual(pool._job_queue.qsize(), 0)
|
'Make sure that a full threadpool acts as we expect'
| def test_full_queue(self):
| def incr_counter(counter):
counter.value += 1
counter = multiprocessing.Value('i', 0)
pool = salt.utils.process.ThreadPool(0, 1)
sent = pool.fire_async(incr_counter, args=(counter,))
self.assertTrue(sent)
sent = pool.fire_async(incr_counter, args=(counter,))
self.assertFalse(sent)
time.sleep(1)
self.assertEqual(counter.value, 0)
self.assertEqual(pool._job_queue.qsize(), 1)
|
'Tests sanitizing a url when the hide_fields kwarg is None.'
| def test_sanitize_url_hide_fields_none(self):
| mock_url = 'https://api.testing.com/?&foo=bar&test=testing'
ret = http.sanitize_url(mock_url, hide_fields=None)
self.assertEqual(ret, mock_url)
|
'Tests sanitizing a url when no elements should be sanitized.'
| def test_sanitize_url_no_elements(self):
| mock_url = 'https://api.testing.com/?&foo=bar&test=testing'
ret = http.sanitize_url(mock_url, [''])
self.assertEqual(ret, mock_url)
|
'Tests sanitizing a url with only a single element to be sanitized.'
| def test_sanitize_url_single_element(self):
| mock_url = 'https://api.testing.com/?&keep_it_secret=abcdefghijklmn&api_action=module.function'
mock_ret = 'https://api.testing.com/?&keep_it_secret=XXXXXXXXXX&api_action=module.function'
ret = http.sanitize_url(mock_url, ['keep_it_secret'])
self.assertEqual(ret, mock_ret)
|
'Tests sanitizing a url with multiple elements to be sanitized.'
| def test_sanitize_url_multiple_elements(self):
| mock_url = 'https://api.testing.com/?rootPass=badpassword%21&skipChecks=True&api_key=abcdefghijklmn&NodeID=12345&api_action=module.function'
mock_ret = 'https://api.testing.com/?rootPass=XXXXXXXXXX&skipChecks=True&api_key=XXXXXXXXXX&NodeID=12345&api_action=module.function'
ret = http.sanitize_url(mock_url, ['api_key', 'rootPass'])
self.assertEqual(ret, mock_ret)
|
'Tests when zero elements need to be sanitized.'
| def test_sanitize_components_no_elements(self):
| mock_component_list = ['foo=bar', 'bar=baz', 'hello=world']
mock_ret = 'foo=bar&bar=baz&hello=world&'
ret = http._sanitize_url_components(mock_component_list, 'api_key')
self.assertEqual(ret, mock_ret)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.