body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
---|---|---|---|---|---|---|---|---|---|
148da82e30401d00dfbec24c1a53b548c1f1c91c99cfd9e099ee1c0eb87d6100 | def test_loading_settings_onto_instrument(self):
'\n Tests storing and reading of parameters.\n Tests for different types of parameters including\n - array\n - float\n - int\n - dict\n - bool\n '
arr = np.linspace(12, 42, 11)
self.mock_parabola.array_like(arr)
self.mock_parabola.x(42.23)
self.mock_parabola.y(2)
self.mock_parabola.status(True)
self.mock_parabola.dict_like({'a': {'b': [2, 3, 5]}})
self.mock_parabola.nested_lists_like([[1], [8, 9]])
self.mock_parabola.complex_like((1.0 + 4j))
self.MC.set_sweep_function(self.mock_parabola.x)
self.MC.set_sweep_points([0, 1])
self.MC.set_detector_function(self.mock_parabola.skewed_parabola)
self.MC.run('test_MC_snapshot_storing')
self.mock_parabola.array_like((arr + 5))
self.mock_parabola.x(13)
np.testing.assert_array_equal(self.mock_parabola.array_like(), (arr + 5))
self.assertEqual(self.mock_parabola.x(), 13)
gen.load_settings_onto_instrument_v2(self.mock_parabola, label='test_MC_snapshot_storing')
np.testing.assert_array_equal(self.mock_parabola.array_like(), arr)
self.assertEqual(self.mock_parabola.x(), 42.23)
gen.load_settings_onto_instrument_v2(self.mock_parabola_2, load_from_instr=self.mock_parabola.name, label='test_MC_snapshot_storing')
self.assertEqual(self.mock_parabola_2.y(), 2)
self.assertEqual(self.mock_parabola_2.status(), True)
self.assertEqual(self.mock_parabola_2.dict_like(), {'a': {'b': [2, 3, 5]}})
self.assertEqual(self.mock_parabola_2.nested_lists_like(), [[1], [8, 9]])
self.assertEqual(self.mock_parabola_2.complex_like(), (1.0 + 4j)) | Tests storing and reading of parameters.
Tests for different types of parameters including
- array
- float
- int
- dict
- bool | pycqed/tests/test_hdf5_datasaving_loading.py | test_loading_settings_onto_instrument | nuttamas/PycQED_py3 | 60 | python | def test_loading_settings_onto_instrument(self):
'\n Tests storing and reading of parameters.\n Tests for different types of parameters including\n - array\n - float\n - int\n - dict\n - bool\n '
arr = np.linspace(12, 42, 11)
self.mock_parabola.array_like(arr)
self.mock_parabola.x(42.23)
self.mock_parabola.y(2)
self.mock_parabola.status(True)
self.mock_parabola.dict_like({'a': {'b': [2, 3, 5]}})
self.mock_parabola.nested_lists_like([[1], [8, 9]])
self.mock_parabola.complex_like((1.0 + 4j))
self.MC.set_sweep_function(self.mock_parabola.x)
self.MC.set_sweep_points([0, 1])
self.MC.set_detector_function(self.mock_parabola.skewed_parabola)
self.MC.run('test_MC_snapshot_storing')
self.mock_parabola.array_like((arr + 5))
self.mock_parabola.x(13)
np.testing.assert_array_equal(self.mock_parabola.array_like(), (arr + 5))
self.assertEqual(self.mock_parabola.x(), 13)
gen.load_settings_onto_instrument_v2(self.mock_parabola, label='test_MC_snapshot_storing')
np.testing.assert_array_equal(self.mock_parabola.array_like(), arr)
self.assertEqual(self.mock_parabola.x(), 42.23)
gen.load_settings_onto_instrument_v2(self.mock_parabola_2, load_from_instr=self.mock_parabola.name, label='test_MC_snapshot_storing')
self.assertEqual(self.mock_parabola_2.y(), 2)
self.assertEqual(self.mock_parabola_2.status(), True)
self.assertEqual(self.mock_parabola_2.dict_like(), {'a': {'b': [2, 3, 5]}})
self.assertEqual(self.mock_parabola_2.nested_lists_like(), [[1], [8, 9]])
self.assertEqual(self.mock_parabola_2.complex_like(), (1.0 + 4j)) | def test_loading_settings_onto_instrument(self):
'\n Tests storing and reading of parameters.\n Tests for different types of parameters including\n - array\n - float\n - int\n - dict\n - bool\n '
arr = np.linspace(12, 42, 11)
self.mock_parabola.array_like(arr)
self.mock_parabola.x(42.23)
self.mock_parabola.y(2)
self.mock_parabola.status(True)
self.mock_parabola.dict_like({'a': {'b': [2, 3, 5]}})
self.mock_parabola.nested_lists_like([[1], [8, 9]])
self.mock_parabola.complex_like((1.0 + 4j))
self.MC.set_sweep_function(self.mock_parabola.x)
self.MC.set_sweep_points([0, 1])
self.MC.set_detector_function(self.mock_parabola.skewed_parabola)
self.MC.run('test_MC_snapshot_storing')
self.mock_parabola.array_like((arr + 5))
self.mock_parabola.x(13)
np.testing.assert_array_equal(self.mock_parabola.array_like(), (arr + 5))
self.assertEqual(self.mock_parabola.x(), 13)
gen.load_settings_onto_instrument_v2(self.mock_parabola, label='test_MC_snapshot_storing')
np.testing.assert_array_equal(self.mock_parabola.array_like(), arr)
self.assertEqual(self.mock_parabola.x(), 42.23)
gen.load_settings_onto_instrument_v2(self.mock_parabola_2, load_from_instr=self.mock_parabola.name, label='test_MC_snapshot_storing')
self.assertEqual(self.mock_parabola_2.y(), 2)
self.assertEqual(self.mock_parabola_2.status(), True)
self.assertEqual(self.mock_parabola_2.dict_like(), {'a': {'b': [2, 3, 5]}})
self.assertEqual(self.mock_parabola_2.nested_lists_like(), [[1], [8, 9]])
self.assertEqual(self.mock_parabola_2.complex_like(), (1.0 + 4j))<|docstring|>Tests storing and reading of parameters.
Tests for different types of parameters including
- array
- float
- int
- dict
- bool<|endoftext|> |
73a81367dd9e5a88045ef947c3b9231a99783f5eed7f1ba879db778df6dc8e88 | def setUp(self):
'Sets up the needed objects used throughout the test.'
self._resolver_context = context.Context()
test_file = self._GetTestFilePath(['syslog.rc4'])
self._SkipIfPathNotExists(test_file)
path_spec = os_path_spec.OSPathSpec(location=test_file)
self._encrypted_stream_path_spec = encrypted_stream_path_spec.EncryptedStreamPathSpec(encryption_method=definitions.ENCRYPTION_METHOD_RC4, parent=path_spec)
resolver.Resolver.key_chain.SetCredential(self._encrypted_stream_path_spec, 'key', self._RC4_KEY) | Sets up the needed objects used throughout the test. | tests/vfs/encrypted_stream_file_system.py | setUp | Acidburn0zzz/dfvfs | 1 | python | def setUp(self):
self._resolver_context = context.Context()
test_file = self._GetTestFilePath(['syslog.rc4'])
self._SkipIfPathNotExists(test_file)
path_spec = os_path_spec.OSPathSpec(location=test_file)
self._encrypted_stream_path_spec = encrypted_stream_path_spec.EncryptedStreamPathSpec(encryption_method=definitions.ENCRYPTION_METHOD_RC4, parent=path_spec)
resolver.Resolver.key_chain.SetCredential(self._encrypted_stream_path_spec, 'key', self._RC4_KEY) | def setUp(self):
self._resolver_context = context.Context()
test_file = self._GetTestFilePath(['syslog.rc4'])
self._SkipIfPathNotExists(test_file)
path_spec = os_path_spec.OSPathSpec(location=test_file)
self._encrypted_stream_path_spec = encrypted_stream_path_spec.EncryptedStreamPathSpec(encryption_method=definitions.ENCRYPTION_METHOD_RC4, parent=path_spec)
resolver.Resolver.key_chain.SetCredential(self._encrypted_stream_path_spec, 'key', self._RC4_KEY)<|docstring|>Sets up the needed objects used throughout the test.<|endoftext|> |
e25a5060d1400f78356cbc8b45a763dc0c04266e543839aa6c585b36ee900a81 | def testOpenAndClose(self):
'Test the open and close functionality.'
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
file_system.Close() | Test the open and close functionality. | tests/vfs/encrypted_stream_file_system.py | testOpenAndClose | Acidburn0zzz/dfvfs | 1 | python | def testOpenAndClose(self):
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
file_system.Close() | def testOpenAndClose(self):
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
file_system.Close()<|docstring|>Test the open and close functionality.<|endoftext|> |
4560a2544a99e8e1e1387a1beb755e29240474922d47d711ff76ce5ce47dc146 | def testFileEntryExistsByPathSpec(self):
'Test the file entry exists by path specification functionality.'
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
self.assertTrue(file_system.FileEntryExistsByPathSpec(self._encrypted_stream_path_spec))
file_system.Close() | Test the file entry exists by path specification functionality. | tests/vfs/encrypted_stream_file_system.py | testFileEntryExistsByPathSpec | Acidburn0zzz/dfvfs | 1 | python | def testFileEntryExistsByPathSpec(self):
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
self.assertTrue(file_system.FileEntryExistsByPathSpec(self._encrypted_stream_path_spec))
file_system.Close() | def testFileEntryExistsByPathSpec(self):
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
self.assertTrue(file_system.FileEntryExistsByPathSpec(self._encrypted_stream_path_spec))
file_system.Close()<|docstring|>Test the file entry exists by path specification functionality.<|endoftext|> |
441a0a84c3bff9cda99d952aeb8624b47d1c6a02fc2f6c60bbbab95e4f591590 | def testGetFileEntryByPathSpec(self):
'Tests the GetFileEntryByPathSpec function.'
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
file_entry = file_system.GetFileEntryByPathSpec(self._encrypted_stream_path_spec)
self.assertIsNotNone(file_entry)
self.assertEqual(file_entry.name, '')
file_system.Close() | Tests the GetFileEntryByPathSpec function. | tests/vfs/encrypted_stream_file_system.py | testGetFileEntryByPathSpec | Acidburn0zzz/dfvfs | 1 | python | def testGetFileEntryByPathSpec(self):
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
file_entry = file_system.GetFileEntryByPathSpec(self._encrypted_stream_path_spec)
self.assertIsNotNone(file_entry)
self.assertEqual(file_entry.name, )
file_system.Close() | def testGetFileEntryByPathSpec(self):
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
file_entry = file_system.GetFileEntryByPathSpec(self._encrypted_stream_path_spec)
self.assertIsNotNone(file_entry)
self.assertEqual(file_entry.name, )
file_system.Close()<|docstring|>Tests the GetFileEntryByPathSpec function.<|endoftext|> |
50d59fb99f8a5d018e497b0a1e32859639562c89bd2cbd522a837c14ba5d3e4c | def testGetRootFileEntry(self):
'Test the get root file entry functionality.'
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
file_entry = file_system.GetRootFileEntry()
self.assertIsNotNone(file_entry)
self.assertEqual(file_entry.name, '')
file_system.Close() | Test the get root file entry functionality. | tests/vfs/encrypted_stream_file_system.py | testGetRootFileEntry | Acidburn0zzz/dfvfs | 1 | python | def testGetRootFileEntry(self):
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
file_entry = file_system.GetRootFileEntry()
self.assertIsNotNone(file_entry)
self.assertEqual(file_entry.name, )
file_system.Close() | def testGetRootFileEntry(self):
file_system = encrypted_stream_file_system.EncryptedStreamFileSystem(self._resolver_context)
self.assertIsNotNone(file_system)
file_system.Open(self._encrypted_stream_path_spec)
file_entry = file_system.GetRootFileEntry()
self.assertIsNotNone(file_entry)
self.assertEqual(file_entry.name, )
file_system.Close()<|docstring|>Test the get root file entry functionality.<|endoftext|> |
7aedc32b522a89b1a95847871ea5e6310615d394da3f047438350b50358d2c5f | def g_r(wr, Y, weights, q, lambda_h, lambda_J, r):
'optimized version of g_r_slow'
(B, N) = Y.shape
h_r = wr[:q]
J_r = np.reshape(wr[q:], (q, q, (N - 1)))
indices = np.concatenate([np.arange(r), np.arange((r + 1), N)])
fval = 0
for i in range(B):
seq = Y[(i, indices)]
logPot = (h_r + np.sum(J_r[(:, seq, np.arange((N - 1)))], axis=1))
z = np.sum(np.exp(logPot))
fval += (weights[i] * (np.log(z) - logPot[Y[(i, r)]]))
fval += ((lambda_h * np.sum((h_r ** 2))) + (lambda_J * np.sum((J_r ** 2))))
return fval | optimized version of g_r_slow | scripts/plmDCA.py | g_r | bhedelius/Python-plmDCA | 0 | python | def g_r(wr, Y, weights, q, lambda_h, lambda_J, r):
(B, N) = Y.shape
h_r = wr[:q]
J_r = np.reshape(wr[q:], (q, q, (N - 1)))
indices = np.concatenate([np.arange(r), np.arange((r + 1), N)])
fval = 0
for i in range(B):
seq = Y[(i, indices)]
logPot = (h_r + np.sum(J_r[(:, seq, np.arange((N - 1)))], axis=1))
z = np.sum(np.exp(logPot))
fval += (weights[i] * (np.log(z) - logPot[Y[(i, r)]]))
fval += ((lambda_h * np.sum((h_r ** 2))) + (lambda_J * np.sum((J_r ** 2))))
return fval | def g_r(wr, Y, weights, q, lambda_h, lambda_J, r):
(B, N) = Y.shape
h_r = wr[:q]
J_r = np.reshape(wr[q:], (q, q, (N - 1)))
indices = np.concatenate([np.arange(r), np.arange((r + 1), N)])
fval = 0
for i in range(B):
seq = Y[(i, indices)]
logPot = (h_r + np.sum(J_r[(:, seq, np.arange((N - 1)))], axis=1))
z = np.sum(np.exp(logPot))
fval += (weights[i] * (np.log(z) - logPot[Y[(i, r)]]))
fval += ((lambda_h * np.sum((h_r ** 2))) + (lambda_J * np.sum((J_r ** 2))))
return fval<|docstring|>optimized version of g_r_slow<|endoftext|> |
05d94dc7e590bc78608ec3e2ecc47ddf29d03814a5ebfed09f23a55493986fa1 | def g_r_grad(wr, Y, weights, q, lambda_h, lambda_J, r):
'optimized version of the gradient of g_r_slow'
(B, N) = Y.shape
h_r = wr[:q]
J_r = np.reshape(wr[q:], (q, q, (N - 1)))
indices = np.concatenate([np.arange(r), np.arange((r + 1), N)])
grad1 = np.zeros(q)
grad2 = np.zeros((q, q, (N - 1)))
for i in range(B):
seq = Y[(i, indices)]
logPot = (h_r + np.sum(J_r[(:, seq, np.arange((N - 1)))], axis=1))
z = np.sum(np.exp(logPot))
nodeBel = np.exp((logPot - np.log(z)))
grad1[Y[(i, r)]] -= weights[i]
grad1 += np.sum((weights[i] * nodeBel))
for n in range((N - 1)):
grad2[(Y[(i, r)], Y[(i, indices[n])], n)] -= weights[i]
grad2[(:, Y[(i, indices[n])], n)] += (weights[i] * nodeBel)
grad1 += ((lambda_h * 2) * np.sum(h_r))
grad2 += ((lambda_J * 2) * J_r)
grad = np.concatenate([grad1.flatten(), grad2.flatten()])
return grad | optimized version of the gradient of g_r_slow | scripts/plmDCA.py | g_r_grad | bhedelius/Python-plmDCA | 0 | python | def g_r_grad(wr, Y, weights, q, lambda_h, lambda_J, r):
(B, N) = Y.shape
h_r = wr[:q]
J_r = np.reshape(wr[q:], (q, q, (N - 1)))
indices = np.concatenate([np.arange(r), np.arange((r + 1), N)])
grad1 = np.zeros(q)
grad2 = np.zeros((q, q, (N - 1)))
for i in range(B):
seq = Y[(i, indices)]
logPot = (h_r + np.sum(J_r[(:, seq, np.arange((N - 1)))], axis=1))
z = np.sum(np.exp(logPot))
nodeBel = np.exp((logPot - np.log(z)))
grad1[Y[(i, r)]] -= weights[i]
grad1 += np.sum((weights[i] * nodeBel))
for n in range((N - 1)):
grad2[(Y[(i, r)], Y[(i, indices[n])], n)] -= weights[i]
grad2[(:, Y[(i, indices[n])], n)] += (weights[i] * nodeBel)
grad1 += ((lambda_h * 2) * np.sum(h_r))
grad2 += ((lambda_J * 2) * J_r)
grad = np.concatenate([grad1.flatten(), grad2.flatten()])
return grad | def g_r_grad(wr, Y, weights, q, lambda_h, lambda_J, r):
(B, N) = Y.shape
h_r = wr[:q]
J_r = np.reshape(wr[q:], (q, q, (N - 1)))
indices = np.concatenate([np.arange(r), np.arange((r + 1), N)])
grad1 = np.zeros(q)
grad2 = np.zeros((q, q, (N - 1)))
for i in range(B):
seq = Y[(i, indices)]
logPot = (h_r + np.sum(J_r[(:, seq, np.arange((N - 1)))], axis=1))
z = np.sum(np.exp(logPot))
nodeBel = np.exp((logPot - np.log(z)))
grad1[Y[(i, r)]] -= weights[i]
grad1 += np.sum((weights[i] * nodeBel))
for n in range((N - 1)):
grad2[(Y[(i, r)], Y[(i, indices[n])], n)] -= weights[i]
grad2[(:, Y[(i, indices[n])], n)] += (weights[i] * nodeBel)
grad1 += ((lambda_h * 2) * np.sum(h_r))
grad2 += ((lambda_J * 2) * J_r)
grad = np.concatenate([grad1.flatten(), grad2.flatten()])
return grad<|docstring|>optimized version of the gradient of g_r_slow<|endoftext|> |
64f035ae1b12aef8d37e644b781ecd49958cdeddc817b2f6b78eea933d84a930 | def delete_student_section_attendance_event_by_id(self, id, **kwargs):
"Deletes an existing resource using the resource identifier. # noqa: E501\n\n The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_student_section_attendance_event_by_id(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_student_section_attendance_event_by_id_with_http_info(id, **kwargs)
else:
data = self.delete_student_section_attendance_event_by_id_with_http_info(id, **kwargs)
return data | Deletes an existing resource using the resource identifier. # noqa: E501
The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_student_section_attendance_event_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | delete_student_section_attendance_event_by_id | xmarcosx/edfi-notebook | 2 | python | def delete_student_section_attendance_event_by_id(self, id, **kwargs):
"Deletes an existing resource using the resource identifier. # noqa: E501\n\n The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_student_section_attendance_event_by_id(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_student_section_attendance_event_by_id_with_http_info(id, **kwargs)
else:
data = self.delete_student_section_attendance_event_by_id_with_http_info(id, **kwargs)
return data | def delete_student_section_attendance_event_by_id(self, id, **kwargs):
"Deletes an existing resource using the resource identifier. # noqa: E501\n\n The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_student_section_attendance_event_by_id(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_student_section_attendance_event_by_id_with_http_info(id, **kwargs)
else:
data = self.delete_student_section_attendance_event_by_id_with_http_info(id, **kwargs)
return data<|docstring|>Deletes an existing resource using the resource identifier. # noqa: E501
The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_student_section_attendance_event_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
67b46b6bc1ef47d14f72a6a0d19b4179171c182196ec7d4041e6aacd514b9b4b | def delete_student_section_attendance_event_by_id_with_http_info(self, id, **kwargs):
"Deletes an existing resource using the resource identifier. # noqa: E501\n\n The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_student_section_attendance_event_by_id_with_http_info(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n "
all_params = ['id', 'if_match']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method delete_student_section_attendance_event_by_id" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('id' not in params) or (params['id'] is None))):
raise ValueError('Missing the required parameter `id` when calling `delete_student_section_attendance_event_by_id`')
collection_formats = {}
path_params = {}
if ('id' in params):
path_params['id'] = params['id']
query_params = []
header_params = {}
if ('if_match' in params):
header_params['If-Match'] = params['if_match']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/{id}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | Deletes an existing resource using the resource identifier. # noqa: E501
The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_student_section_attendance_event_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | delete_student_section_attendance_event_by_id_with_http_info | xmarcosx/edfi-notebook | 2 | python | def delete_student_section_attendance_event_by_id_with_http_info(self, id, **kwargs):
"Deletes an existing resource using the resource identifier. # noqa: E501\n\n The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_student_section_attendance_event_by_id_with_http_info(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n "
all_params = ['id', 'if_match']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method delete_student_section_attendance_event_by_id" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('id' not in params) or (params['id'] is None))):
raise ValueError('Missing the required parameter `id` when calling `delete_student_section_attendance_event_by_id`')
collection_formats = {}
path_params = {}
if ('id' in params):
path_params['id'] = params['id']
query_params = []
header_params = {}
if ('if_match' in params):
header_params['If-Match'] = params['if_match']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/{id}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | def delete_student_section_attendance_event_by_id_with_http_info(self, id, **kwargs):
"Deletes an existing resource using the resource identifier. # noqa: E501\n\n The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_student_section_attendance_event_by_id_with_http_info(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n "
all_params = ['id', 'if_match']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method delete_student_section_attendance_event_by_id" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('id' not in params) or (params['id'] is None))):
raise ValueError('Missing the required parameter `id` when calling `delete_student_section_attendance_event_by_id`')
collection_formats = {}
path_params = {}
if ('id' in params):
path_params['id'] = params['id']
query_params = []
header_params = {}
if ('if_match' in params):
header_params['If-Match'] = params['if_match']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/{id}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)<|docstring|>Deletes an existing resource using the resource identifier. # noqa: E501
The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_student_section_attendance_event_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
3a792616ea3bcd0c3cd5db6b87556573d930b972e8ab6fd0225e4f2e1804b0f1 | def deletes_student_section_attendance_events(self, **kwargs):
'Retrieves deleted resources based on change version. # noqa: E501\n\n The DELETES operation is used to retrieve deleted resources. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.deletes_student_section_attendance_events(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.deletes_student_section_attendance_events_with_http_info(**kwargs)
else:
data = self.deletes_student_section_attendance_events_with_http_info(**kwargs)
return data | Retrieves deleted resources based on change version. # noqa: E501
The DELETES operation is used to retrieve deleted resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_student_section_attendance_events(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:return: list[EdFiStudentSectionAttendanceEvent]
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | deletes_student_section_attendance_events | xmarcosx/edfi-notebook | 2 | python | def deletes_student_section_attendance_events(self, **kwargs):
'Retrieves deleted resources based on change version. # noqa: E501\n\n The DELETES operation is used to retrieve deleted resources. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.deletes_student_section_attendance_events(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.deletes_student_section_attendance_events_with_http_info(**kwargs)
else:
data = self.deletes_student_section_attendance_events_with_http_info(**kwargs)
return data | def deletes_student_section_attendance_events(self, **kwargs):
'Retrieves deleted resources based on change version. # noqa: E501\n\n The DELETES operation is used to retrieve deleted resources. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.deletes_student_section_attendance_events(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.deletes_student_section_attendance_events_with_http_info(**kwargs)
else:
data = self.deletes_student_section_attendance_events_with_http_info(**kwargs)
return data<|docstring|>Retrieves deleted resources based on change version. # noqa: E501
The DELETES operation is used to retrieve deleted resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_student_section_attendance_events(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:return: list[EdFiStudentSectionAttendanceEvent]
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
70be9e917856aee922c901e17c77a8d63f03256ec394bb44fb74e39d1744966b | def deletes_student_section_attendance_events_with_http_info(self, **kwargs):
'Retrieves deleted resources based on change version. # noqa: E501\n\n The DELETES operation is used to retrieve deleted resources. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.deletes_student_section_attendance_events_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method deletes_student_section_attendance_events" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] > 500))):
raise ValueError('Invalid value for parameter `limit` when calling `deletes_student_section_attendance_events`, must be a value less than or equal to `500`')
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] < 0))):
raise ValueError('Invalid value for parameter `limit` when calling `deletes_student_section_attendance_events`, must be a value greater than or equal to `0`')
collection_formats = {}
path_params = {}
query_params = []
if ('offset' in params):
query_params.append(('offset', params['offset']))
if ('limit' in params):
query_params.append(('limit', params['limit']))
if ('min_change_version' in params):
query_params.append(('minChangeVersion', params['min_change_version']))
if ('max_change_version' in params):
query_params.append(('maxChangeVersion', params['max_change_version']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/deletes', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[EdFiStudentSectionAttendanceEvent]', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | Retrieves deleted resources based on change version. # noqa: E501
The DELETES operation is used to retrieve deleted resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_student_section_attendance_events_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:return: list[EdFiStudentSectionAttendanceEvent]
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | deletes_student_section_attendance_events_with_http_info | xmarcosx/edfi-notebook | 2 | python | def deletes_student_section_attendance_events_with_http_info(self, **kwargs):
'Retrieves deleted resources based on change version. # noqa: E501\n\n The DELETES operation is used to retrieve deleted resources. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.deletes_student_section_attendance_events_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method deletes_student_section_attendance_events" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] > 500))):
raise ValueError('Invalid value for parameter `limit` when calling `deletes_student_section_attendance_events`, must be a value less than or equal to `500`')
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] < 0))):
raise ValueError('Invalid value for parameter `limit` when calling `deletes_student_section_attendance_events`, must be a value greater than or equal to `0`')
collection_formats = {}
path_params = {}
query_params = []
if ('offset' in params):
query_params.append(('offset', params['offset']))
if ('limit' in params):
query_params.append(('limit', params['limit']))
if ('min_change_version' in params):
query_params.append(('minChangeVersion', params['min_change_version']))
if ('max_change_version' in params):
query_params.append(('maxChangeVersion', params['max_change_version']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/deletes', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[EdFiStudentSectionAttendanceEvent]', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | def deletes_student_section_attendance_events_with_http_info(self, **kwargs):
'Retrieves deleted resources based on change version. # noqa: E501\n\n The DELETES operation is used to retrieve deleted resources. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.deletes_student_section_attendance_events_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method deletes_student_section_attendance_events" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] > 500))):
raise ValueError('Invalid value for parameter `limit` when calling `deletes_student_section_attendance_events`, must be a value less than or equal to `500`')
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] < 0))):
raise ValueError('Invalid value for parameter `limit` when calling `deletes_student_section_attendance_events`, must be a value greater than or equal to `0`')
collection_formats = {}
path_params = {}
query_params = []
if ('offset' in params):
query_params.append(('offset', params['offset']))
if ('limit' in params):
query_params.append(('limit', params['limit']))
if ('min_change_version' in params):
query_params.append(('minChangeVersion', params['min_change_version']))
if ('max_change_version' in params):
query_params.append(('maxChangeVersion', params['max_change_version']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/deletes', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[EdFiStudentSectionAttendanceEvent]', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)<|docstring|>Retrieves deleted resources based on change version. # noqa: E501
The DELETES operation is used to retrieve deleted resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_student_section_attendance_events_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:return: list[EdFiStudentSectionAttendanceEvent]
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
4d84e1aea01d7ff2227e7b2f14fc9a84d5821c7309b613bf08be9596e483d9e1 | def get_student_section_attendance_events(self, **kwargs):
'Retrieves specific resources using the resource\'s property values (using the "Get" pattern). # noqa: E501\n\n This GET operation provides access to resources using the "Get" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :param bool total_count: Indicates if the total number of items available should be returned in the \'Total-Count\' header of the response. If set to false, \'Total-Count\' header will not be provided.\n :param str attendance_event_category_descriptor: A code describing the attendance event, for example: Present Unexcused absence Excused absence Tardy.\n :param date event_date: Date for this attendance event.\n :param str local_course_code: The local code assigned by the School that identifies the course offering provided for the instruction of students.\n :param int school_id: The identifier assigned to a school.\n :param int school_year: The identifier for the school year.\n :param str section_identifier: The local identifier assigned to a section.\n :param str session_name: The identifier for the calendar for the academic session (e.g., 2010/11, 2011 Summer).\n :param str student_unique_id: A unique alphanumeric code assigned to a student.\n :param str educational_environment_descriptor: The setting in which a child receives education and related services. This attribute is only used if it differs from the EducationalEnvironment of the Section. This is only used in the AttendanceEvent if different from the associated Section.\n :param str arrival_time: The time of day the student arrived for the attendance event in ISO 8601 format.\n :param str attendance_event_reason: The reported reason for a student\'s absence.\n :param str departure_time: The time of day the student departed for the attendance event in ISO 8601 format.\n :param float event_duration: The amount of time for the event as recognized by the school: 1 day = 1, 1/2 day = 0.5, 1/3 day = 0.33.\n :param str id: \n :param int section_attendance_duration: The duration in minutes of the section attendance event.\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_student_section_attendance_events_with_http_info(**kwargs)
else:
data = self.get_student_section_attendance_events_with_http_info(**kwargs)
return data | Retrieves specific resources using the resource's property values (using the "Get" pattern). # noqa: E501
This GET operation provides access to resources using the "Get" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_student_section_attendance_events(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param bool total_count: Indicates if the total number of items available should be returned in the 'Total-Count' header of the response. If set to false, 'Total-Count' header will not be provided.
:param str attendance_event_category_descriptor: A code describing the attendance event, for example: Present Unexcused absence Excused absence Tardy.
:param date event_date: Date for this attendance event.
:param str local_course_code: The local code assigned by the School that identifies the course offering provided for the instruction of students.
:param int school_id: The identifier assigned to a school.
:param int school_year: The identifier for the school year.
:param str section_identifier: The local identifier assigned to a section.
:param str session_name: The identifier for the calendar for the academic session (e.g., 2010/11, 2011 Summer).
:param str student_unique_id: A unique alphanumeric code assigned to a student.
:param str educational_environment_descriptor: The setting in which a child receives education and related services. This attribute is only used if it differs from the EducationalEnvironment of the Section. This is only used in the AttendanceEvent if different from the associated Section.
:param str arrival_time: The time of day the student arrived for the attendance event in ISO 8601 format.
:param str attendance_event_reason: The reported reason for a student's absence.
:param str departure_time: The time of day the student departed for the attendance event in ISO 8601 format.
:param float event_duration: The amount of time for the event as recognized by the school: 1 day = 1, 1/2 day = 0.5, 1/3 day = 0.33.
:param str id:
:param int section_attendance_duration: The duration in minutes of the section attendance event.
:return: list[EdFiStudentSectionAttendanceEvent]
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | get_student_section_attendance_events | xmarcosx/edfi-notebook | 2 | python | def get_student_section_attendance_events(self, **kwargs):
'Retrieves specific resources using the resource\'s property values (using the "Get" pattern). # noqa: E501\n\n This GET operation provides access to resources using the "Get" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :param bool total_count: Indicates if the total number of items available should be returned in the \'Total-Count\' header of the response. If set to false, \'Total-Count\' header will not be provided.\n :param str attendance_event_category_descriptor: A code describing the attendance event, for example: Present Unexcused absence Excused absence Tardy.\n :param date event_date: Date for this attendance event.\n :param str local_course_code: The local code assigned by the School that identifies the course offering provided for the instruction of students.\n :param int school_id: The identifier assigned to a school.\n :param int school_year: The identifier for the school year.\n :param str section_identifier: The local identifier assigned to a section.\n :param str session_name: The identifier for the calendar for the academic session (e.g., 2010/11, 2011 Summer).\n :param str student_unique_id: A unique alphanumeric code assigned to a student.\n :param str educational_environment_descriptor: The setting in which a child receives education and related services. This attribute is only used if it differs from the EducationalEnvironment of the Section. This is only used in the AttendanceEvent if different from the associated Section.\n :param str arrival_time: The time of day the student arrived for the attendance event in ISO 8601 format.\n :param str attendance_event_reason: The reported reason for a student\'s absence.\n :param str departure_time: The time of day the student departed for the attendance event in ISO 8601 format.\n :param float event_duration: The amount of time for the event as recognized by the school: 1 day = 1, 1/2 day = 0.5, 1/3 day = 0.33.\n :param str id: \n :param int section_attendance_duration: The duration in minutes of the section attendance event.\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_student_section_attendance_events_with_http_info(**kwargs)
else:
data = self.get_student_section_attendance_events_with_http_info(**kwargs)
return data | def get_student_section_attendance_events(self, **kwargs):
'Retrieves specific resources using the resource\'s property values (using the "Get" pattern). # noqa: E501\n\n This GET operation provides access to resources using the "Get" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :param bool total_count: Indicates if the total number of items available should be returned in the \'Total-Count\' header of the response. If set to false, \'Total-Count\' header will not be provided.\n :param str attendance_event_category_descriptor: A code describing the attendance event, for example: Present Unexcused absence Excused absence Tardy.\n :param date event_date: Date for this attendance event.\n :param str local_course_code: The local code assigned by the School that identifies the course offering provided for the instruction of students.\n :param int school_id: The identifier assigned to a school.\n :param int school_year: The identifier for the school year.\n :param str section_identifier: The local identifier assigned to a section.\n :param str session_name: The identifier for the calendar for the academic session (e.g., 2010/11, 2011 Summer).\n :param str student_unique_id: A unique alphanumeric code assigned to a student.\n :param str educational_environment_descriptor: The setting in which a child receives education and related services. This attribute is only used if it differs from the EducationalEnvironment of the Section. This is only used in the AttendanceEvent if different from the associated Section.\n :param str arrival_time: The time of day the student arrived for the attendance event in ISO 8601 format.\n :param str attendance_event_reason: The reported reason for a student\'s absence.\n :param str departure_time: The time of day the student departed for the attendance event in ISO 8601 format.\n :param float event_duration: The amount of time for the event as recognized by the school: 1 day = 1, 1/2 day = 0.5, 1/3 day = 0.33.\n :param str id: \n :param int section_attendance_duration: The duration in minutes of the section attendance event.\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_student_section_attendance_events_with_http_info(**kwargs)
else:
data = self.get_student_section_attendance_events_with_http_info(**kwargs)
return data<|docstring|>Retrieves specific resources using the resource's property values (using the "Get" pattern). # noqa: E501
This GET operation provides access to resources using the "Get" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_student_section_attendance_events(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param bool total_count: Indicates if the total number of items available should be returned in the 'Total-Count' header of the response. If set to false, 'Total-Count' header will not be provided.
:param str attendance_event_category_descriptor: A code describing the attendance event, for example: Present Unexcused absence Excused absence Tardy.
:param date event_date: Date for this attendance event.
:param str local_course_code: The local code assigned by the School that identifies the course offering provided for the instruction of students.
:param int school_id: The identifier assigned to a school.
:param int school_year: The identifier for the school year.
:param str section_identifier: The local identifier assigned to a section.
:param str session_name: The identifier for the calendar for the academic session (e.g., 2010/11, 2011 Summer).
:param str student_unique_id: A unique alphanumeric code assigned to a student.
:param str educational_environment_descriptor: The setting in which a child receives education and related services. This attribute is only used if it differs from the EducationalEnvironment of the Section. This is only used in the AttendanceEvent if different from the associated Section.
:param str arrival_time: The time of day the student arrived for the attendance event in ISO 8601 format.
:param str attendance_event_reason: The reported reason for a student's absence.
:param str departure_time: The time of day the student departed for the attendance event in ISO 8601 format.
:param float event_duration: The amount of time for the event as recognized by the school: 1 day = 1, 1/2 day = 0.5, 1/3 day = 0.33.
:param str id:
:param int section_attendance_duration: The duration in minutes of the section attendance event.
:return: list[EdFiStudentSectionAttendanceEvent]
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
bfe138139dcc1a72f653fafd8fb4d2d1d0829c96a3180bdb434eaaac08ad6c97 | def get_student_section_attendance_events_with_http_info(self, **kwargs):
'Retrieves specific resources using the resource\'s property values (using the "Get" pattern). # noqa: E501\n\n This GET operation provides access to resources using the "Get" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :param bool total_count: Indicates if the total number of items available should be returned in the \'Total-Count\' header of the response. If set to false, \'Total-Count\' header will not be provided.\n :param str attendance_event_category_descriptor: A code describing the attendance event, for example: Present Unexcused absence Excused absence Tardy.\n :param date event_date: Date for this attendance event.\n :param str local_course_code: The local code assigned by the School that identifies the course offering provided for the instruction of students.\n :param int school_id: The identifier assigned to a school.\n :param int school_year: The identifier for the school year.\n :param str section_identifier: The local identifier assigned to a section.\n :param str session_name: The identifier for the calendar for the academic session (e.g., 2010/11, 2011 Summer).\n :param str student_unique_id: A unique alphanumeric code assigned to a student.\n :param str educational_environment_descriptor: The setting in which a child receives education and related services. This attribute is only used if it differs from the EducationalEnvironment of the Section. This is only used in the AttendanceEvent if different from the associated Section.\n :param str arrival_time: The time of day the student arrived for the attendance event in ISO 8601 format.\n :param str attendance_event_reason: The reported reason for a student\'s absence.\n :param str departure_time: The time of day the student departed for the attendance event in ISO 8601 format.\n :param float event_duration: The amount of time for the event as recognized by the school: 1 day = 1, 1/2 day = 0.5, 1/3 day = 0.33.\n :param str id: \n :param int section_attendance_duration: The duration in minutes of the section attendance event.\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version', 'total_count', 'attendance_event_category_descriptor', 'event_date', 'local_course_code', 'school_id', 'school_year', 'section_identifier', 'session_name', 'student_unique_id', 'educational_environment_descriptor', 'arrival_time', 'attendance_event_reason', 'departure_time', 'event_duration', 'id', 'section_attendance_duration']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_student_section_attendance_events" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] > 500))):
raise ValueError('Invalid value for parameter `limit` when calling `get_student_section_attendance_events`, must be a value less than or equal to `500`')
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] < 0))):
raise ValueError('Invalid value for parameter `limit` when calling `get_student_section_attendance_events`, must be a value greater than or equal to `0`')
if (self.api_client.client_side_validation and (('attendance_event_category_descriptor' in params) and (len(params['attendance_event_category_descriptor']) > 306))):
raise ValueError('Invalid value for parameter `attendance_event_category_descriptor` when calling `get_student_section_attendance_events`, length must be less than or equal to `306`')
if (self.api_client.client_side_validation and (('local_course_code' in params) and (len(params['local_course_code']) > 60))):
raise ValueError('Invalid value for parameter `local_course_code` when calling `get_student_section_attendance_events`, length must be less than or equal to `60`')
if (self.api_client.client_side_validation and (('section_identifier' in params) and (len(params['section_identifier']) > 255))):
raise ValueError('Invalid value for parameter `section_identifier` when calling `get_student_section_attendance_events`, length must be less than or equal to `255`')
if (self.api_client.client_side_validation and (('session_name' in params) and (len(params['session_name']) > 60))):
raise ValueError('Invalid value for parameter `session_name` when calling `get_student_section_attendance_events`, length must be less than or equal to `60`')
if (self.api_client.client_side_validation and (('student_unique_id' in params) and (len(params['student_unique_id']) > 32))):
raise ValueError('Invalid value for parameter `student_unique_id` when calling `get_student_section_attendance_events`, length must be less than or equal to `32`')
if (self.api_client.client_side_validation and (('educational_environment_descriptor' in params) and (len(params['educational_environment_descriptor']) > 306))):
raise ValueError('Invalid value for parameter `educational_environment_descriptor` when calling `get_student_section_attendance_events`, length must be less than or equal to `306`')
if (self.api_client.client_side_validation and (('attendance_event_reason' in params) and (len(params['attendance_event_reason']) > 255))):
raise ValueError('Invalid value for parameter `attendance_event_reason` when calling `get_student_section_attendance_events`, length must be less than or equal to `255`')
collection_formats = {}
path_params = {}
query_params = []
if ('offset' in params):
query_params.append(('offset', params['offset']))
if ('limit' in params):
query_params.append(('limit', params['limit']))
if ('min_change_version' in params):
query_params.append(('minChangeVersion', params['min_change_version']))
if ('max_change_version' in params):
query_params.append(('maxChangeVersion', params['max_change_version']))
if ('total_count' in params):
query_params.append(('totalCount', params['total_count']))
if ('attendance_event_category_descriptor' in params):
query_params.append(('attendanceEventCategoryDescriptor', params['attendance_event_category_descriptor']))
if ('event_date' in params):
query_params.append(('eventDate', params['event_date']))
if ('local_course_code' in params):
query_params.append(('localCourseCode', params['local_course_code']))
if ('school_id' in params):
query_params.append(('schoolId', params['school_id']))
if ('school_year' in params):
query_params.append(('schoolYear', params['school_year']))
if ('section_identifier' in params):
query_params.append(('sectionIdentifier', params['section_identifier']))
if ('session_name' in params):
query_params.append(('sessionName', params['session_name']))
if ('student_unique_id' in params):
query_params.append(('studentUniqueId', params['student_unique_id']))
if ('educational_environment_descriptor' in params):
query_params.append(('educationalEnvironmentDescriptor', params['educational_environment_descriptor']))
if ('arrival_time' in params):
query_params.append(('arrivalTime', params['arrival_time']))
if ('attendance_event_reason' in params):
query_params.append(('attendanceEventReason', params['attendance_event_reason']))
if ('departure_time' in params):
query_params.append(('departureTime', params['departure_time']))
if ('event_duration' in params):
query_params.append(('eventDuration', params['event_duration']))
if ('id' in params):
query_params.append(('id', params['id']))
if ('section_attendance_duration' in params):
query_params.append(('sectionAttendanceDuration', params['section_attendance_duration']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[EdFiStudentSectionAttendanceEvent]', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | Retrieves specific resources using the resource's property values (using the "Get" pattern). # noqa: E501
This GET operation provides access to resources using the "Get" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_student_section_attendance_events_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param bool total_count: Indicates if the total number of items available should be returned in the 'Total-Count' header of the response. If set to false, 'Total-Count' header will not be provided.
:param str attendance_event_category_descriptor: A code describing the attendance event, for example: Present Unexcused absence Excused absence Tardy.
:param date event_date: Date for this attendance event.
:param str local_course_code: The local code assigned by the School that identifies the course offering provided for the instruction of students.
:param int school_id: The identifier assigned to a school.
:param int school_year: The identifier for the school year.
:param str section_identifier: The local identifier assigned to a section.
:param str session_name: The identifier for the calendar for the academic session (e.g., 2010/11, 2011 Summer).
:param str student_unique_id: A unique alphanumeric code assigned to a student.
:param str educational_environment_descriptor: The setting in which a child receives education and related services. This attribute is only used if it differs from the EducationalEnvironment of the Section. This is only used in the AttendanceEvent if different from the associated Section.
:param str arrival_time: The time of day the student arrived for the attendance event in ISO 8601 format.
:param str attendance_event_reason: The reported reason for a student's absence.
:param str departure_time: The time of day the student departed for the attendance event in ISO 8601 format.
:param float event_duration: The amount of time for the event as recognized by the school: 1 day = 1, 1/2 day = 0.5, 1/3 day = 0.33.
:param str id:
:param int section_attendance_duration: The duration in minutes of the section attendance event.
:return: list[EdFiStudentSectionAttendanceEvent]
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | get_student_section_attendance_events_with_http_info | xmarcosx/edfi-notebook | 2 | python | def get_student_section_attendance_events_with_http_info(self, **kwargs):
'Retrieves specific resources using the resource\'s property values (using the "Get" pattern). # noqa: E501\n\n This GET operation provides access to resources using the "Get" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :param bool total_count: Indicates if the total number of items available should be returned in the \'Total-Count\' header of the response. If set to false, \'Total-Count\' header will not be provided.\n :param str attendance_event_category_descriptor: A code describing the attendance event, for example: Present Unexcused absence Excused absence Tardy.\n :param date event_date: Date for this attendance event.\n :param str local_course_code: The local code assigned by the School that identifies the course offering provided for the instruction of students.\n :param int school_id: The identifier assigned to a school.\n :param int school_year: The identifier for the school year.\n :param str section_identifier: The local identifier assigned to a section.\n :param str session_name: The identifier for the calendar for the academic session (e.g., 2010/11, 2011 Summer).\n :param str student_unique_id: A unique alphanumeric code assigned to a student.\n :param str educational_environment_descriptor: The setting in which a child receives education and related services. This attribute is only used if it differs from the EducationalEnvironment of the Section. This is only used in the AttendanceEvent if different from the associated Section.\n :param str arrival_time: The time of day the student arrived for the attendance event in ISO 8601 format.\n :param str attendance_event_reason: The reported reason for a student\'s absence.\n :param str departure_time: The time of day the student departed for the attendance event in ISO 8601 format.\n :param float event_duration: The amount of time for the event as recognized by the school: 1 day = 1, 1/2 day = 0.5, 1/3 day = 0.33.\n :param str id: \n :param int section_attendance_duration: The duration in minutes of the section attendance event.\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version', 'total_count', 'attendance_event_category_descriptor', 'event_date', 'local_course_code', 'school_id', 'school_year', 'section_identifier', 'session_name', 'student_unique_id', 'educational_environment_descriptor', 'arrival_time', 'attendance_event_reason', 'departure_time', 'event_duration', 'id', 'section_attendance_duration']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_student_section_attendance_events" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] > 500))):
raise ValueError('Invalid value for parameter `limit` when calling `get_student_section_attendance_events`, must be a value less than or equal to `500`')
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] < 0))):
raise ValueError('Invalid value for parameter `limit` when calling `get_student_section_attendance_events`, must be a value greater than or equal to `0`')
if (self.api_client.client_side_validation and (('attendance_event_category_descriptor' in params) and (len(params['attendance_event_category_descriptor']) > 306))):
raise ValueError('Invalid value for parameter `attendance_event_category_descriptor` when calling `get_student_section_attendance_events`, length must be less than or equal to `306`')
if (self.api_client.client_side_validation and (('local_course_code' in params) and (len(params['local_course_code']) > 60))):
raise ValueError('Invalid value for parameter `local_course_code` when calling `get_student_section_attendance_events`, length must be less than or equal to `60`')
if (self.api_client.client_side_validation and (('section_identifier' in params) and (len(params['section_identifier']) > 255))):
raise ValueError('Invalid value for parameter `section_identifier` when calling `get_student_section_attendance_events`, length must be less than or equal to `255`')
if (self.api_client.client_side_validation and (('session_name' in params) and (len(params['session_name']) > 60))):
raise ValueError('Invalid value for parameter `session_name` when calling `get_student_section_attendance_events`, length must be less than or equal to `60`')
if (self.api_client.client_side_validation and (('student_unique_id' in params) and (len(params['student_unique_id']) > 32))):
raise ValueError('Invalid value for parameter `student_unique_id` when calling `get_student_section_attendance_events`, length must be less than or equal to `32`')
if (self.api_client.client_side_validation and (('educational_environment_descriptor' in params) and (len(params['educational_environment_descriptor']) > 306))):
raise ValueError('Invalid value for parameter `educational_environment_descriptor` when calling `get_student_section_attendance_events`, length must be less than or equal to `306`')
if (self.api_client.client_side_validation and (('attendance_event_reason' in params) and (len(params['attendance_event_reason']) > 255))):
raise ValueError('Invalid value for parameter `attendance_event_reason` when calling `get_student_section_attendance_events`, length must be less than or equal to `255`')
collection_formats = {}
path_params = {}
query_params = []
if ('offset' in params):
query_params.append(('offset', params['offset']))
if ('limit' in params):
query_params.append(('limit', params['limit']))
if ('min_change_version' in params):
query_params.append(('minChangeVersion', params['min_change_version']))
if ('max_change_version' in params):
query_params.append(('maxChangeVersion', params['max_change_version']))
if ('total_count' in params):
query_params.append(('totalCount', params['total_count']))
if ('attendance_event_category_descriptor' in params):
query_params.append(('attendanceEventCategoryDescriptor', params['attendance_event_category_descriptor']))
if ('event_date' in params):
query_params.append(('eventDate', params['event_date']))
if ('local_course_code' in params):
query_params.append(('localCourseCode', params['local_course_code']))
if ('school_id' in params):
query_params.append(('schoolId', params['school_id']))
if ('school_year' in params):
query_params.append(('schoolYear', params['school_year']))
if ('section_identifier' in params):
query_params.append(('sectionIdentifier', params['section_identifier']))
if ('session_name' in params):
query_params.append(('sessionName', params['session_name']))
if ('student_unique_id' in params):
query_params.append(('studentUniqueId', params['student_unique_id']))
if ('educational_environment_descriptor' in params):
query_params.append(('educationalEnvironmentDescriptor', params['educational_environment_descriptor']))
if ('arrival_time' in params):
query_params.append(('arrivalTime', params['arrival_time']))
if ('attendance_event_reason' in params):
query_params.append(('attendanceEventReason', params['attendance_event_reason']))
if ('departure_time' in params):
query_params.append(('departureTime', params['departure_time']))
if ('event_duration' in params):
query_params.append(('eventDuration', params['event_duration']))
if ('id' in params):
query_params.append(('id', params['id']))
if ('section_attendance_duration' in params):
query_params.append(('sectionAttendanceDuration', params['section_attendance_duration']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[EdFiStudentSectionAttendanceEvent]', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | def get_student_section_attendance_events_with_http_info(self, **kwargs):
'Retrieves specific resources using the resource\'s property values (using the "Get" pattern). # noqa: E501\n\n This GET operation provides access to resources using the "Get" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset: Indicates how many items should be skipped before returning results.\n :param int limit: Indicates the maximum number of items that should be returned in the results.\n :param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion\n :param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion\n :param bool total_count: Indicates if the total number of items available should be returned in the \'Total-Count\' header of the response. If set to false, \'Total-Count\' header will not be provided.\n :param str attendance_event_category_descriptor: A code describing the attendance event, for example: Present Unexcused absence Excused absence Tardy.\n :param date event_date: Date for this attendance event.\n :param str local_course_code: The local code assigned by the School that identifies the course offering provided for the instruction of students.\n :param int school_id: The identifier assigned to a school.\n :param int school_year: The identifier for the school year.\n :param str section_identifier: The local identifier assigned to a section.\n :param str session_name: The identifier for the calendar for the academic session (e.g., 2010/11, 2011 Summer).\n :param str student_unique_id: A unique alphanumeric code assigned to a student.\n :param str educational_environment_descriptor: The setting in which a child receives education and related services. This attribute is only used if it differs from the EducationalEnvironment of the Section. This is only used in the AttendanceEvent if different from the associated Section.\n :param str arrival_time: The time of day the student arrived for the attendance event in ISO 8601 format.\n :param str attendance_event_reason: The reported reason for a student\'s absence.\n :param str departure_time: The time of day the student departed for the attendance event in ISO 8601 format.\n :param float event_duration: The amount of time for the event as recognized by the school: 1 day = 1, 1/2 day = 0.5, 1/3 day = 0.33.\n :param str id: \n :param int section_attendance_duration: The duration in minutes of the section attendance event.\n :return: list[EdFiStudentSectionAttendanceEvent]\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version', 'total_count', 'attendance_event_category_descriptor', 'event_date', 'local_course_code', 'school_id', 'school_year', 'section_identifier', 'session_name', 'student_unique_id', 'educational_environment_descriptor', 'arrival_time', 'attendance_event_reason', 'departure_time', 'event_duration', 'id', 'section_attendance_duration']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_student_section_attendance_events" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] > 500))):
raise ValueError('Invalid value for parameter `limit` when calling `get_student_section_attendance_events`, must be a value less than or equal to `500`')
if (self.api_client.client_side_validation and (('limit' in params) and (params['limit'] < 0))):
raise ValueError('Invalid value for parameter `limit` when calling `get_student_section_attendance_events`, must be a value greater than or equal to `0`')
if (self.api_client.client_side_validation and (('attendance_event_category_descriptor' in params) and (len(params['attendance_event_category_descriptor']) > 306))):
raise ValueError('Invalid value for parameter `attendance_event_category_descriptor` when calling `get_student_section_attendance_events`, length must be less than or equal to `306`')
if (self.api_client.client_side_validation and (('local_course_code' in params) and (len(params['local_course_code']) > 60))):
raise ValueError('Invalid value for parameter `local_course_code` when calling `get_student_section_attendance_events`, length must be less than or equal to `60`')
if (self.api_client.client_side_validation and (('section_identifier' in params) and (len(params['section_identifier']) > 255))):
raise ValueError('Invalid value for parameter `section_identifier` when calling `get_student_section_attendance_events`, length must be less than or equal to `255`')
if (self.api_client.client_side_validation and (('session_name' in params) and (len(params['session_name']) > 60))):
raise ValueError('Invalid value for parameter `session_name` when calling `get_student_section_attendance_events`, length must be less than or equal to `60`')
if (self.api_client.client_side_validation and (('student_unique_id' in params) and (len(params['student_unique_id']) > 32))):
raise ValueError('Invalid value for parameter `student_unique_id` when calling `get_student_section_attendance_events`, length must be less than or equal to `32`')
if (self.api_client.client_side_validation and (('educational_environment_descriptor' in params) and (len(params['educational_environment_descriptor']) > 306))):
raise ValueError('Invalid value for parameter `educational_environment_descriptor` when calling `get_student_section_attendance_events`, length must be less than or equal to `306`')
if (self.api_client.client_side_validation and (('attendance_event_reason' in params) and (len(params['attendance_event_reason']) > 255))):
raise ValueError('Invalid value for parameter `attendance_event_reason` when calling `get_student_section_attendance_events`, length must be less than or equal to `255`')
collection_formats = {}
path_params = {}
query_params = []
if ('offset' in params):
query_params.append(('offset', params['offset']))
if ('limit' in params):
query_params.append(('limit', params['limit']))
if ('min_change_version' in params):
query_params.append(('minChangeVersion', params['min_change_version']))
if ('max_change_version' in params):
query_params.append(('maxChangeVersion', params['max_change_version']))
if ('total_count' in params):
query_params.append(('totalCount', params['total_count']))
if ('attendance_event_category_descriptor' in params):
query_params.append(('attendanceEventCategoryDescriptor', params['attendance_event_category_descriptor']))
if ('event_date' in params):
query_params.append(('eventDate', params['event_date']))
if ('local_course_code' in params):
query_params.append(('localCourseCode', params['local_course_code']))
if ('school_id' in params):
query_params.append(('schoolId', params['school_id']))
if ('school_year' in params):
query_params.append(('schoolYear', params['school_year']))
if ('section_identifier' in params):
query_params.append(('sectionIdentifier', params['section_identifier']))
if ('session_name' in params):
query_params.append(('sessionName', params['session_name']))
if ('student_unique_id' in params):
query_params.append(('studentUniqueId', params['student_unique_id']))
if ('educational_environment_descriptor' in params):
query_params.append(('educationalEnvironmentDescriptor', params['educational_environment_descriptor']))
if ('arrival_time' in params):
query_params.append(('arrivalTime', params['arrival_time']))
if ('attendance_event_reason' in params):
query_params.append(('attendanceEventReason', params['attendance_event_reason']))
if ('departure_time' in params):
query_params.append(('departureTime', params['departure_time']))
if ('event_duration' in params):
query_params.append(('eventDuration', params['event_duration']))
if ('id' in params):
query_params.append(('id', params['id']))
if ('section_attendance_duration' in params):
query_params.append(('sectionAttendanceDuration', params['section_attendance_duration']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[EdFiStudentSectionAttendanceEvent]', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)<|docstring|>Retrieves specific resources using the resource's property values (using the "Get" pattern). # noqa: E501
This GET operation provides access to resources using the "Get" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_student_section_attendance_events_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param bool total_count: Indicates if the total number of items available should be returned in the 'Total-Count' header of the response. If set to false, 'Total-Count' header will not be provided.
:param str attendance_event_category_descriptor: A code describing the attendance event, for example: Present Unexcused absence Excused absence Tardy.
:param date event_date: Date for this attendance event.
:param str local_course_code: The local code assigned by the School that identifies the course offering provided for the instruction of students.
:param int school_id: The identifier assigned to a school.
:param int school_year: The identifier for the school year.
:param str section_identifier: The local identifier assigned to a section.
:param str session_name: The identifier for the calendar for the academic session (e.g., 2010/11, 2011 Summer).
:param str student_unique_id: A unique alphanumeric code assigned to a student.
:param str educational_environment_descriptor: The setting in which a child receives education and related services. This attribute is only used if it differs from the EducationalEnvironment of the Section. This is only used in the AttendanceEvent if different from the associated Section.
:param str arrival_time: The time of day the student arrived for the attendance event in ISO 8601 format.
:param str attendance_event_reason: The reported reason for a student's absence.
:param str departure_time: The time of day the student departed for the attendance event in ISO 8601 format.
:param float event_duration: The amount of time for the event as recognized by the school: 1 day = 1, 1/2 day = 0.5, 1/3 day = 0.33.
:param str id:
:param int section_attendance_duration: The duration in minutes of the section attendance event.
:return: list[EdFiStudentSectionAttendanceEvent]
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
5c4151f115e348fb73746048ce6ccf50a2b6d03a03a87be8528fffa33aa4071a | def get_student_section_attendance_events_by_id(self, id, **kwargs):
'Retrieves a specific resource using the resource\'s identifier (using the "Get By Id" pattern). # noqa: E501\n\n This GET operation retrieves a resource by the specified resource identifier. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events_by_id(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.\n :return: EdFiStudentSectionAttendanceEvent\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_student_section_attendance_events_by_id_with_http_info(id, **kwargs)
else:
data = self.get_student_section_attendance_events_by_id_with_http_info(id, **kwargs)
return data | Retrieves a specific resource using the resource's identifier (using the "Get By Id" pattern). # noqa: E501
This GET operation retrieves a resource by the specified resource identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_student_section_attendance_events_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.
:return: EdFiStudentSectionAttendanceEvent
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | get_student_section_attendance_events_by_id | xmarcosx/edfi-notebook | 2 | python | def get_student_section_attendance_events_by_id(self, id, **kwargs):
'Retrieves a specific resource using the resource\'s identifier (using the "Get By Id" pattern). # noqa: E501\n\n This GET operation retrieves a resource by the specified resource identifier. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events_by_id(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.\n :return: EdFiStudentSectionAttendanceEvent\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_student_section_attendance_events_by_id_with_http_info(id, **kwargs)
else:
data = self.get_student_section_attendance_events_by_id_with_http_info(id, **kwargs)
return data | def get_student_section_attendance_events_by_id(self, id, **kwargs):
'Retrieves a specific resource using the resource\'s identifier (using the "Get By Id" pattern). # noqa: E501\n\n This GET operation retrieves a resource by the specified resource identifier. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events_by_id(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.\n :return: EdFiStudentSectionAttendanceEvent\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_student_section_attendance_events_by_id_with_http_info(id, **kwargs)
else:
data = self.get_student_section_attendance_events_by_id_with_http_info(id, **kwargs)
return data<|docstring|>Retrieves a specific resource using the resource's identifier (using the "Get By Id" pattern). # noqa: E501
This GET operation retrieves a resource by the specified resource identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_student_section_attendance_events_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.
:return: EdFiStudentSectionAttendanceEvent
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
8a19490a8f65abf826def673d71aa1736b4bd4f3b8d02326d8a6ddef34646bac | def get_student_section_attendance_events_by_id_with_http_info(self, id, **kwargs):
'Retrieves a specific resource using the resource\'s identifier (using the "Get By Id" pattern). # noqa: E501\n\n This GET operation retrieves a resource by the specified resource identifier. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events_by_id_with_http_info(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.\n :return: EdFiStudentSectionAttendanceEvent\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['id', 'if_none_match']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_student_section_attendance_events_by_id" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('id' not in params) or (params['id'] is None))):
raise ValueError('Missing the required parameter `id` when calling `get_student_section_attendance_events_by_id`')
collection_formats = {}
path_params = {}
if ('id' in params):
path_params['id'] = params['id']
query_params = []
header_params = {}
if ('if_none_match' in params):
header_params['If-None-Match'] = params['if_none_match']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/{id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='EdFiStudentSectionAttendanceEvent', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | Retrieves a specific resource using the resource's identifier (using the "Get By Id" pattern). # noqa: E501
This GET operation retrieves a resource by the specified resource identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_student_section_attendance_events_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.
:return: EdFiStudentSectionAttendanceEvent
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | get_student_section_attendance_events_by_id_with_http_info | xmarcosx/edfi-notebook | 2 | python | def get_student_section_attendance_events_by_id_with_http_info(self, id, **kwargs):
'Retrieves a specific resource using the resource\'s identifier (using the "Get By Id" pattern). # noqa: E501\n\n This GET operation retrieves a resource by the specified resource identifier. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events_by_id_with_http_info(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.\n :return: EdFiStudentSectionAttendanceEvent\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['id', 'if_none_match']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_student_section_attendance_events_by_id" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('id' not in params) or (params['id'] is None))):
raise ValueError('Missing the required parameter `id` when calling `get_student_section_attendance_events_by_id`')
collection_formats = {}
path_params = {}
if ('id' in params):
path_params['id'] = params['id']
query_params = []
header_params = {}
if ('if_none_match' in params):
header_params['If-None-Match'] = params['if_none_match']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/{id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='EdFiStudentSectionAttendanceEvent', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | def get_student_section_attendance_events_by_id_with_http_info(self, id, **kwargs):
'Retrieves a specific resource using the resource\'s identifier (using the "Get By Id" pattern). # noqa: E501\n\n This GET operation retrieves a resource by the specified resource identifier. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_student_section_attendance_events_by_id_with_http_info(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.\n :return: EdFiStudentSectionAttendanceEvent\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['id', 'if_none_match']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_student_section_attendance_events_by_id" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('id' not in params) or (params['id'] is None))):
raise ValueError('Missing the required parameter `id` when calling `get_student_section_attendance_events_by_id`')
collection_formats = {}
path_params = {}
if ('id' in params):
path_params['id'] = params['id']
query_params = []
header_params = {}
if ('if_none_match' in params):
header_params['If-None-Match'] = params['if_none_match']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/{id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='EdFiStudentSectionAttendanceEvent', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)<|docstring|>Retrieves a specific resource using the resource's identifier (using the "Get By Id" pattern). # noqa: E501
This GET operation retrieves a resource by the specified resource identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_student_section_attendance_events_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.
:return: EdFiStudentSectionAttendanceEvent
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
7277f731cfa923af9286d164165e22831821c3be704b135a47e62b6a0217ae8f | def post_student_section_attendance_event(self, student_section_attendance_event, **kwargs):
'Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501\n\n The POST operation can be used to create or update resources. In database terms, this is often referred to as an "upsert" operation (insert + update). Clients should NOT include the resource "id" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by "id"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.post_student_section_attendance_event(student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_student_section_attendance_event_with_http_info(student_section_attendance_event, **kwargs)
else:
data = self.post_student_section_attendance_event_with_http_info(student_section_attendance_event, **kwargs)
return data | Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501
The POST operation can be used to create or update resources. In database terms, this is often referred to as an "upsert" operation (insert + update). Clients should NOT include the resource "id" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by "id"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_student_section_attendance_event(student_section_attendance_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | post_student_section_attendance_event | xmarcosx/edfi-notebook | 2 | python | def post_student_section_attendance_event(self, student_section_attendance_event, **kwargs):
'Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501\n\n The POST operation can be used to create or update resources. In database terms, this is often referred to as an "upsert" operation (insert + update). Clients should NOT include the resource "id" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by "id"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.post_student_section_attendance_event(student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_student_section_attendance_event_with_http_info(student_section_attendance_event, **kwargs)
else:
data = self.post_student_section_attendance_event_with_http_info(student_section_attendance_event, **kwargs)
return data | def post_student_section_attendance_event(self, student_section_attendance_event, **kwargs):
'Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501\n\n The POST operation can be used to create or update resources. In database terms, this is often referred to as an "upsert" operation (insert + update). Clients should NOT include the resource "id" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by "id"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.post_student_section_attendance_event(student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_student_section_attendance_event_with_http_info(student_section_attendance_event, **kwargs)
else:
data = self.post_student_section_attendance_event_with_http_info(student_section_attendance_event, **kwargs)
return data<|docstring|>Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501
The POST operation can be used to create or update resources. In database terms, this is often referred to as an "upsert" operation (insert + update). Clients should NOT include the resource "id" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by "id"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_student_section_attendance_event(student_section_attendance_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
747ae93e498d5fe94abaf6671b28931bc397785e7aeadc9100d2f927620ce267 | def post_student_section_attendance_event_with_http_info(self, student_section_attendance_event, **kwargs):
'Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501\n\n The POST operation can be used to create or update resources. In database terms, this is often referred to as an "upsert" operation (insert + update). Clients should NOT include the resource "id" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by "id"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.post_student_section_attendance_event_with_http_info(student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['student_section_attendance_event']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method post_student_section_attendance_event" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('student_section_attendance_event' not in params) or (params['student_section_attendance_event'] is None))):
raise ValueError('Missing the required parameter `student_section_attendance_event` when calling `post_student_section_attendance_event`')
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if ('student_section_attendance_event' in params):
body_params = params['student_section_attendance_event']
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501
The POST operation can be used to create or update resources. In database terms, this is often referred to as an "upsert" operation (insert + update). Clients should NOT include the resource "id" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by "id"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_student_section_attendance_event_with_http_info(student_section_attendance_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | post_student_section_attendance_event_with_http_info | xmarcosx/edfi-notebook | 2 | python | def post_student_section_attendance_event_with_http_info(self, student_section_attendance_event, **kwargs):
'Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501\n\n The POST operation can be used to create or update resources. In database terms, this is often referred to as an "upsert" operation (insert + update). Clients should NOT include the resource "id" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by "id"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.post_student_section_attendance_event_with_http_info(student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['student_section_attendance_event']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method post_student_section_attendance_event" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('student_section_attendance_event' not in params) or (params['student_section_attendance_event'] is None))):
raise ValueError('Missing the required parameter `student_section_attendance_event` when calling `post_student_section_attendance_event`')
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if ('student_section_attendance_event' in params):
body_params = params['student_section_attendance_event']
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | def post_student_section_attendance_event_with_http_info(self, student_section_attendance_event, **kwargs):
'Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501\n\n The POST operation can be used to create or update resources. In database terms, this is often referred to as an "upsert" operation (insert + update). Clients should NOT include the resource "id" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by "id"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.post_student_section_attendance_event_with_http_info(student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['student_section_attendance_event']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method post_student_section_attendance_event" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('student_section_attendance_event' not in params) or (params['student_section_attendance_event'] is None))):
raise ValueError('Missing the required parameter `student_section_attendance_event` when calling `post_student_section_attendance_event`')
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if ('student_section_attendance_event' in params):
body_params = params['student_section_attendance_event']
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)<|docstring|>Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501
The POST operation can be used to create or update resources. In database terms, this is often referred to as an "upsert" operation (insert + update). Clients should NOT include the resource "id" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by "id"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_student_section_attendance_event_with_http_info(student_section_attendance_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
366a11888ffa6220ae6e2fb6233a94cfbf3f10ab9092a2f60e808641f8cc66ed | def put_student_section_attendance_event(self, id, student_section_attendance_event, **kwargs):
'Updates or creates a resource based on the resource identifier. # noqa: E501\n\n The PUT operation is used to update or create a resource by identifier. If the resource doesn\'t exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource "id" is provided in the JSON body, it will be ignored as well. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.put_student_section_attendance_event(id, student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.put_student_section_attendance_event_with_http_info(id, student_section_attendance_event, **kwargs)
else:
data = self.put_student_section_attendance_event_with_http_info(id, student_section_attendance_event, **kwargs)
return data | Updates or creates a resource based on the resource identifier. # noqa: E501
The PUT operation is used to update or create a resource by identifier. If the resource doesn't exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource "id" is provided in the JSON body, it will be ignored as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_student_section_attendance_event(id, student_section_attendance_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)
:param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | put_student_section_attendance_event | xmarcosx/edfi-notebook | 2 | python | def put_student_section_attendance_event(self, id, student_section_attendance_event, **kwargs):
'Updates or creates a resource based on the resource identifier. # noqa: E501\n\n The PUT operation is used to update or create a resource by identifier. If the resource doesn\'t exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource "id" is provided in the JSON body, it will be ignored as well. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.put_student_section_attendance_event(id, student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.put_student_section_attendance_event_with_http_info(id, student_section_attendance_event, **kwargs)
else:
data = self.put_student_section_attendance_event_with_http_info(id, student_section_attendance_event, **kwargs)
return data | def put_student_section_attendance_event(self, id, student_section_attendance_event, **kwargs):
'Updates or creates a resource based on the resource identifier. # noqa: E501\n\n The PUT operation is used to update or create a resource by identifier. If the resource doesn\'t exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource "id" is provided in the JSON body, it will be ignored as well. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.put_student_section_attendance_event(id, student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.put_student_section_attendance_event_with_http_info(id, student_section_attendance_event, **kwargs)
else:
data = self.put_student_section_attendance_event_with_http_info(id, student_section_attendance_event, **kwargs)
return data<|docstring|>Updates or creates a resource based on the resource identifier. # noqa: E501
The PUT operation is used to update or create a resource by identifier. If the resource doesn't exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource "id" is provided in the JSON body, it will be ignored as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_student_section_attendance_event(id, student_section_attendance_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)
:param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
c4766ae0e0dd9e1c099fd956b6349ed4d3ddf07cd5c16de8f64699f2efbc6b39 | def put_student_section_attendance_event_with_http_info(self, id, student_section_attendance_event, **kwargs):
'Updates or creates a resource based on the resource identifier. # noqa: E501\n\n The PUT operation is used to update or create a resource by identifier. If the resource doesn\'t exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource "id" is provided in the JSON body, it will be ignored as well. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.put_student_section_attendance_event_with_http_info(id, student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['id', 'student_section_attendance_event', 'if_match']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method put_student_section_attendance_event" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('id' not in params) or (params['id'] is None))):
raise ValueError('Missing the required parameter `id` when calling `put_student_section_attendance_event`')
if (self.api_client.client_side_validation and (('student_section_attendance_event' not in params) or (params['student_section_attendance_event'] is None))):
raise ValueError('Missing the required parameter `student_section_attendance_event` when calling `put_student_section_attendance_event`')
collection_formats = {}
path_params = {}
if ('id' in params):
path_params['id'] = params['id']
query_params = []
header_params = {}
if ('if_match' in params):
header_params['If-Match'] = params['if_match']
form_params = []
local_var_files = {}
body_params = None
if ('student_section_attendance_event' in params):
body_params = params['student_section_attendance_event']
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/{id}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | Updates or creates a resource based on the resource identifier. # noqa: E501
The PUT operation is used to update or create a resource by identifier. If the resource doesn't exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource "id" is provided in the JSON body, it will be ignored as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_student_section_attendance_event_with_http_info(id, student_section_attendance_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)
:param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread. | src/v5.1/resources/swagger_client/api/student_section_attendance_events_api.py | put_student_section_attendance_event_with_http_info | xmarcosx/edfi-notebook | 2 | python | def put_student_section_attendance_event_with_http_info(self, id, student_section_attendance_event, **kwargs):
'Updates or creates a resource based on the resource identifier. # noqa: E501\n\n The PUT operation is used to update or create a resource by identifier. If the resource doesn\'t exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource "id" is provided in the JSON body, it will be ignored as well. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.put_student_section_attendance_event_with_http_info(id, student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['id', 'student_section_attendance_event', 'if_match']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method put_student_section_attendance_event" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('id' not in params) or (params['id'] is None))):
raise ValueError('Missing the required parameter `id` when calling `put_student_section_attendance_event`')
if (self.api_client.client_side_validation and (('student_section_attendance_event' not in params) or (params['student_section_attendance_event'] is None))):
raise ValueError('Missing the required parameter `student_section_attendance_event` when calling `put_student_section_attendance_event`')
collection_formats = {}
path_params = {}
if ('id' in params):
path_params['id'] = params['id']
query_params = []
header_params = {}
if ('if_match' in params):
header_params['If-Match'] = params['if_match']
form_params = []
local_var_files = {}
body_params = None
if ('student_section_attendance_event' in params):
body_params = params['student_section_attendance_event']
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/{id}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | def put_student_section_attendance_event_with_http_info(self, id, student_section_attendance_event, **kwargs):
'Updates or creates a resource based on the resource identifier. # noqa: E501\n\n The PUT operation is used to update or create a resource by identifier. If the resource doesn\'t exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource "id" is provided in the JSON body, it will be ignored as well. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.put_student_section_attendance_event_with_http_info(id, student_section_attendance_event, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: A resource identifier that uniquely identifies the resource. (required)\n :param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)\n :param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = ['id', 'student_section_attendance_event', 'if_match']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method put_student_section_attendance_event" % key))
params[key] = val
del params['kwargs']
if (self.api_client.client_side_validation and (('id' not in params) or (params['id'] is None))):
raise ValueError('Missing the required parameter `id` when calling `put_student_section_attendance_event`')
if (self.api_client.client_side_validation and (('student_section_attendance_event' not in params) or (params['student_section_attendance_event'] is None))):
raise ValueError('Missing the required parameter `student_section_attendance_event` when calling `put_student_section_attendance_event`')
collection_formats = {}
path_params = {}
if ('id' in params):
path_params['id'] = params['id']
query_params = []
header_params = {}
if ('if_match' in params):
header_params['If-Match'] = params['if_match']
form_params = []
local_var_files = {}
body_params = None
if ('student_section_attendance_event' in params):
body_params = params['student_section_attendance_event']
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['oauth2_client_credentials']
return self.api_client.call_api('/ed-fi/studentSectionAttendanceEvents/{id}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)<|docstring|>Updates or creates a resource based on the resource identifier. # noqa: E501
The PUT operation is used to update or create a resource by identifier. If the resource doesn't exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource "id" is provided in the JSON body, it will be ignored as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_student_section_attendance_event_with_http_info(id, student_section_attendance_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param EdFiStudentSectionAttendanceEvent student_section_attendance_event: The JSON representation of the "studentSectionAttendanceEvent" resource to be created or updated. (required)
:param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.<|endoftext|> |
9d098a2e73e21161f2b9bb63fa884e6c4f8f98cff9f0eab27932085b16b46c5f | @property
def blueprint(self) -> Blueprint:
'Return blueprint that this entity represents'
return TorsionStiffnessItemBlueprint() | Return blueprint that this entity represents | src/sima/riflex/torsionstiffnessitem.py | blueprint | SINTEF/simapy | 0 | python | @property
def blueprint(self) -> Blueprint:
return TorsionStiffnessItemBlueprint() | @property
def blueprint(self) -> Blueprint:
return TorsionStiffnessItemBlueprint()<|docstring|>Return blueprint that this entity represents<|endoftext|> |
5e5149e585a844ec44a1f1b5771ba391b0e59935253d06f2742e72d9a772344a | @name.setter
def name(self, value: str):
'Set name'
self.__name = str(value) | Set name | src/sima/riflex/torsionstiffnessitem.py | name | SINTEF/simapy | 0 | python | @name.setter
def name(self, value: str):
self.__name = str(value) | @name.setter
def name(self, value: str):
self.__name = str(value)<|docstring|>Set name<|endoftext|> |
a9610f8f0ea0f2d82a99114070999eaa47d1c3f8adaedd20569b377fa63a8527 | @description.setter
def description(self, value: str):
'Set description'
self.__description = str(value) | Set description | src/sima/riflex/torsionstiffnessitem.py | description | SINTEF/simapy | 0 | python | @description.setter
def description(self, value: str):
self.__description = str(value) | @description.setter
def description(self, value: str):
self.__description = str(value)<|docstring|>Set description<|endoftext|> |
04883e50e332e89ed063cc3ea9e9aa0051221a57d8a6d5d8ed3d15e1a386e480 | @_id.setter
def _id(self, value: str):
'Set _id'
self.___id = str(value) | Set _id | src/sima/riflex/torsionstiffnessitem.py | _id | SINTEF/simapy | 0 | python | @_id.setter
def _id(self, value: str):
self.___id = str(value) | @_id.setter
def _id(self, value: str):
self.___id = str(value)<|docstring|>Set _id<|endoftext|> |
4b20dcfbfd371cd3587389a3c3386e7420fb909dae4b3692a5880e8da15d9f65 | @scriptableValues.setter
def scriptableValues(self, value: List[ScriptableValue]):
'Set scriptableValues'
if (not isinstance(value, Sequence)):
raise Exception('Expected sequense, but was ', type(value))
self.__scriptableValues = value | Set scriptableValues | src/sima/riflex/torsionstiffnessitem.py | scriptableValues | SINTEF/simapy | 0 | python | @scriptableValues.setter
def scriptableValues(self, value: List[ScriptableValue]):
if (not isinstance(value, Sequence)):
raise Exception('Expected sequense, but was ', type(value))
self.__scriptableValues = value | @scriptableValues.setter
def scriptableValues(self, value: List[ScriptableValue]):
if (not isinstance(value, Sequence)):
raise Exception('Expected sequense, but was ', type(value))
self.__scriptableValues = value<|docstring|>Set scriptableValues<|endoftext|> |
afb5848e434213b0aff551e9fd107a516a634728ab4b4a50a5864d801364c389 | @property
def torsionMoment(self) -> float:
'Torsion moment'
return self.__torsionMoment | Torsion moment | src/sima/riflex/torsionstiffnessitem.py | torsionMoment | SINTEF/simapy | 0 | python | @property
def torsionMoment(self) -> float:
return self.__torsionMoment | @property
def torsionMoment(self) -> float:
return self.__torsionMoment<|docstring|>Torsion moment<|endoftext|> |
12981d7451bfd0af2d001b6b40a9aa74db9f2275ecc535bfa57593e1b2634868 | @torsionMoment.setter
def torsionMoment(self, value: float):
'Set torsionMoment'
self.__torsionMoment = float(value) | Set torsionMoment | src/sima/riflex/torsionstiffnessitem.py | torsionMoment | SINTEF/simapy | 0 | python | @torsionMoment.setter
def torsionMoment(self, value: float):
self.__torsionMoment = float(value) | @torsionMoment.setter
def torsionMoment(self, value: float):
self.__torsionMoment = float(value)<|docstring|>Set torsionMoment<|endoftext|> |
f8775a0d97e2de42657d64ca90aa657af7dc9be5645d09d12a3716db5f109990 | @property
def torsionAngle(self) -> float:
'Torsion angle/length'
return self.__torsionAngle | Torsion angle/length | src/sima/riflex/torsionstiffnessitem.py | torsionAngle | SINTEF/simapy | 0 | python | @property
def torsionAngle(self) -> float:
return self.__torsionAngle | @property
def torsionAngle(self) -> float:
return self.__torsionAngle<|docstring|>Torsion angle/length<|endoftext|> |
3805b419c77a349dda2331982754d382f61b137d90cd7417e126c062ece422c6 | @torsionAngle.setter
def torsionAngle(self, value: float):
'Set torsionAngle'
self.__torsionAngle = float(value) | Set torsionAngle | src/sima/riflex/torsionstiffnessitem.py | torsionAngle | SINTEF/simapy | 0 | python | @torsionAngle.setter
def torsionAngle(self, value: float):
self.__torsionAngle = float(value) | @torsionAngle.setter
def torsionAngle(self, value: float):
self.__torsionAngle = float(value)<|docstring|>Set torsionAngle<|endoftext|> |
1e75a2ff79158be66310e32a9c08b9d44b4f12cc9f702a7eb14cc8f92567c242 | def make_encoder(opt, embeddings, intent_size, output_size, use_history=False, hidden_depth=1, identity=None, hidden_size=None):
'\n Various encoder dispatcher function.\n Args:\n opt: the option in current environment.\n embeddings (Embeddings): vocab embeddings for this encoder.\n '
diaact_size = (intent_size + 1)
extra_size = (3 + 2)
if (hidden_size is None):
hidden_size = opt.hidden_size
if (not opt.use_utterance):
embeddings = None
encoder = CurrentEncoder(((diaact_size * opt.state_length) + extra_size), embeddings, output_size, hidden_depth=hidden_depth)
return encoder | Various encoder dispatcher function.
Args:
opt: the option in current environment.
embeddings (Embeddings): vocab embeddings for this encoder. | craigslistbargain/neural/dsac_model_builder.py | make_encoder | ijcai2022-5500/alpha_nego | 0 | python | def make_encoder(opt, embeddings, intent_size, output_size, use_history=False, hidden_depth=1, identity=None, hidden_size=None):
'\n Various encoder dispatcher function.\n Args:\n opt: the option in current environment.\n embeddings (Embeddings): vocab embeddings for this encoder.\n '
diaact_size = (intent_size + 1)
extra_size = (3 + 2)
if (hidden_size is None):
hidden_size = opt.hidden_size
if (not opt.use_utterance):
embeddings = None
encoder = CurrentEncoder(((diaact_size * opt.state_length) + extra_size), embeddings, output_size, hidden_depth=hidden_depth)
return encoder | def make_encoder(opt, embeddings, intent_size, output_size, use_history=False, hidden_depth=1, identity=None, hidden_size=None):
'\n Various encoder dispatcher function.\n Args:\n opt: the option in current environment.\n embeddings (Embeddings): vocab embeddings for this encoder.\n '
diaact_size = (intent_size + 1)
extra_size = (3 + 2)
if (hidden_size is None):
hidden_size = opt.hidden_size
if (not opt.use_utterance):
embeddings = None
encoder = CurrentEncoder(((diaact_size * opt.state_length) + extra_size), embeddings, output_size, hidden_depth=hidden_depth)
return encoder<|docstring|>Various encoder dispatcher function.
Args:
opt: the option in current environment.
embeddings (Embeddings): vocab embeddings for this encoder.<|endoftext|> |
73dd53eff536be79838401e9f35ea647e3ae0c17df16df01ab3e6b8de791b27c | def make_decoder(input_size, intent_size, price_size, hidden_size, is_actor=True, hidden_depth=2, num_quantiles=32):
'\n Various decoder dispatcher function.\n Args:\n opt: the option in current environment.\n embeddings (Embeddings): vocab embeddings for this decoder.\n '
if is_actor:
return MixedPolicy(input_size, intent_size, price_size, hidden_size=hidden_size, hidden_depth=hidden_depth)
else:
return QuantileMlp(input_size=((input_size + intent_size) + price_size), output_size=1, num_quantiles=num_quantiles, hidden_sizes=[hidden_size, hidden_size]) | Various decoder dispatcher function.
Args:
opt: the option in current environment.
embeddings (Embeddings): vocab embeddings for this decoder. | craigslistbargain/neural/dsac_model_builder.py | make_decoder | ijcai2022-5500/alpha_nego | 0 | python | def make_decoder(input_size, intent_size, price_size, hidden_size, is_actor=True, hidden_depth=2, num_quantiles=32):
'\n Various decoder dispatcher function.\n Args:\n opt: the option in current environment.\n embeddings (Embeddings): vocab embeddings for this decoder.\n '
if is_actor:
return MixedPolicy(input_size, intent_size, price_size, hidden_size=hidden_size, hidden_depth=hidden_depth)
else:
return QuantileMlp(input_size=((input_size + intent_size) + price_size), output_size=1, num_quantiles=num_quantiles, hidden_sizes=[hidden_size, hidden_size]) | def make_decoder(input_size, intent_size, price_size, hidden_size, is_actor=True, hidden_depth=2, num_quantiles=32):
'\n Various decoder dispatcher function.\n Args:\n opt: the option in current environment.\n embeddings (Embeddings): vocab embeddings for this decoder.\n '
if is_actor:
return MixedPolicy(input_size, intent_size, price_size, hidden_size=hidden_size, hidden_depth=hidden_depth)
else:
return QuantileMlp(input_size=((input_size + intent_size) + price_size), output_size=1, num_quantiles=num_quantiles, hidden_sizes=[hidden_size, hidden_size])<|docstring|>Various decoder dispatcher function.
Args:
opt: the option in current environment.
embeddings (Embeddings): vocab embeddings for this decoder.<|endoftext|> |
bdbd288d5dcb14292822c8bebea04de77104d2abaeded96db7275f3f641763c9 | def record_time_interval(section, start_time, line_break=False):
'Record a time interval since the last timestamp'
end_time = time.time()
delta = (end_time - start_time)
if (delta < 1):
delta *= 1000
units = 'ms'
else:
units = 's'
if line_break:
logger.info('PROCESS_TIME:{:>36} {} {}\n'.format(section, round(delta, 1), units))
else:
logger.info('PROCESS_TIME:{:>36} {} {}'.format(section, round(delta, 1), units))
return end_time | Record a time interval since the last timestamp | src/utils.py | record_time_interval | acwooding/dimension_reduction | 4 | python | def record_time_interval(section, start_time, line_break=False):
end_time = time.time()
delta = (end_time - start_time)
if (delta < 1):
delta *= 1000
units = 'ms'
else:
units = 's'
if line_break:
logger.info('PROCESS_TIME:{:>36} {} {}\n'.format(section, round(delta, 1), units))
else:
logger.info('PROCESS_TIME:{:>36} {} {}'.format(section, round(delta, 1), units))
return end_time | def record_time_interval(section, start_time, line_break=False):
end_time = time.time()
delta = (end_time - start_time)
if (delta < 1):
delta *= 1000
units = 'ms'
else:
units = 's'
if line_break:
logger.info('PROCESS_TIME:{:>36} {} {}\n'.format(section, round(delta, 1), units))
else:
logger.info('PROCESS_TIME:{:>36} {} {}'.format(section, round(delta, 1), units))
return end_time<|docstring|>Record a time interval since the last timestamp<|endoftext|> |
f4f18c03a043195e580eb758e75250222186957bcefeae8f4715a6197f7ed4e8 | def usage():
' This script requires several input arguments to work correctly. Check they exist, otherwise print usage and exit.\n '
if (not (len(sys.argv) == 4)):
print('===============================================================================================================================')
print(' Remap the optics group assigment of particles based on a micrograph name match criteria:')
print(' $ remap_optics_groups.py particles.star mic_name_match_critera optics_group_value ')
print(' Example command: ')
print(' $ remap_optics_groups.py particles.star Name*012.mrc 12 ')
print('===============================================================================================================================')
sys.exit()
else:
return | This script requires several input arguments to work correctly. Check they exist, otherwise print usage and exit. | star_file_tools/remap_optics_groups.py | usage | akeszei/em_toolkit | 0 | python | def usage():
' \n '
if (not (len(sys.argv) == 4)):
print('===============================================================================================================================')
print(' Remap the optics group assigment of particles based on a micrograph name match criteria:')
print(' $ remap_optics_groups.py particles.star mic_name_match_critera optics_group_value ')
print(' Example command: ')
print(' $ remap_optics_groups.py particles.star Name*012.mrc 12 ')
print('===============================================================================================================================')
sys.exit()
else:
return | def usage():
' \n '
if (not (len(sys.argv) == 4)):
print('===============================================================================================================================')
print(' Remap the optics group assigment of particles based on a micrograph name match criteria:')
print(' $ remap_optics_groups.py particles.star mic_name_match_critera optics_group_value ')
print(' Example command: ')
print(' $ remap_optics_groups.py particles.star Name*012.mrc 12 ')
print('===============================================================================================================================')
sys.exit()
else:
return<|docstring|>This script requires several input arguments to work correctly. Check they exist, otherwise print usage and exit.<|endoftext|> |
245b6b93341fc0143f22d9a5a2ba0c06d6fbf78ab15d9b4704f97f530d629dc1 | def header_length(file):
" For an input .STAR file, define the length of the header and\n return the last line in the header. Header length is determined by\n finding the highest line number starting with '_' character\n "
with open(file, 'r') as f:
line_num = 0
header_lines = []
for line in f:
line_num += 1
first_character = ''
line = line.strip()
line_to_list = line.split()
if (len(line) == 0):
continue
first_character = list(line_to_list[0])[0]
if (first_character == '_'):
header_lines.append(line_num)
if DEBUG:
print(('Line # %d = ' % line_num), end='')
print(line, ' --> ', line_to_list, sep=' ')
return max(header_lines) | For an input .STAR file, define the length of the header and
return the last line in the header. Header length is determined by
finding the highest line number starting with '_' character | star_file_tools/remap_optics_groups.py | header_length | akeszei/em_toolkit | 0 | python | def header_length(file):
" For an input .STAR file, define the length of the header and\n return the last line in the header. Header length is determined by\n finding the highest line number starting with '_' character\n "
with open(file, 'r') as f:
line_num = 0
header_lines = []
for line in f:
line_num += 1
first_character =
line = line.strip()
line_to_list = line.split()
if (len(line) == 0):
continue
first_character = list(line_to_list[0])[0]
if (first_character == '_'):
header_lines.append(line_num)
if DEBUG:
print(('Line # %d = ' % line_num), end=)
print(line, ' --> ', line_to_list, sep=' ')
return max(header_lines) | def header_length(file):
" For an input .STAR file, define the length of the header and\n return the last line in the header. Header length is determined by\n finding the highest line number starting with '_' character\n "
with open(file, 'r') as f:
line_num = 0
header_lines = []
for line in f:
line_num += 1
first_character =
line = line.strip()
line_to_list = line.split()
if (len(line) == 0):
continue
first_character = list(line_to_list[0])[0]
if (first_character == '_'):
header_lines.append(line_num)
if DEBUG:
print(('Line # %d = ' % line_num), end=)
print(line, ' --> ', line_to_list, sep=' ')
return max(header_lines)<|docstring|>For an input .STAR file, define the length of the header and
return the last line in the header. Header length is determined by
finding the highest line number starting with '_' character<|endoftext|> |
b6ef74fc8f64c79eb36762fc419e411134ccce1af269d7a63ce4419c3fbeff61 | def find_star_column(file, column_type, header_length):
" For an input .STAR file, search through the header and find the column numbers assigned to a given column_type (e.g. 'rlnMicrographName', ...)\n "
column_num = None
with open(file, 'r') as f:
line_num = 0
for line in f:
line_num += 1
if (column_type in line):
column_num = int(line.split()[1].replace('#', ''))
if (line_num >= header_length):
if (column_num is None):
print(('Input .STAR file: %s, is missing a column for: %s' % (file, column_type)))
sys.exit()
else:
if DEBUG:
print(('Column value for %s is %s' % (column_type, column_num)))
return column_num | For an input .STAR file, search through the header and find the column numbers assigned to a given column_type (e.g. 'rlnMicrographName', ...) | star_file_tools/remap_optics_groups.py | find_star_column | akeszei/em_toolkit | 0 | python | def find_star_column(file, column_type, header_length):
" \n "
column_num = None
with open(file, 'r') as f:
line_num = 0
for line in f:
line_num += 1
if (column_type in line):
column_num = int(line.split()[1].replace('#', ))
if (line_num >= header_length):
if (column_num is None):
print(('Input .STAR file: %s, is missing a column for: %s' % (file, column_type)))
sys.exit()
else:
if DEBUG:
print(('Column value for %s is %s' % (column_type, column_num)))
return column_num | def find_star_column(file, column_type, header_length):
" \n "
column_num = None
with open(file, 'r') as f:
line_num = 0
for line in f:
line_num += 1
if (column_type in line):
column_num = int(line.split()[1].replace('#', ))
if (line_num >= header_length):
if (column_num is None):
print(('Input .STAR file: %s, is missing a column for: %s' % (file, column_type)))
sys.exit()
else:
if DEBUG:
print(('Column value for %s is %s' % (column_type, column_num)))
return column_num<|docstring|>For an input .STAR file, search through the header and find the column numbers assigned to a given column_type (e.g. 'rlnMicrographName', ...)<|endoftext|> |
01ac0cc00b754f8ff8f51a3e247c905da848eaee088a4a0724c77fdbc2ceda8b | def find_star_info(line, column):
" For a given .STAR file line entry, extract the data at the given column index.\n If the column does not exist (e.g. for a header line read in), return 'False'\n "
line_to_list = line.split()
try:
column_value = line_to_list[(column - 1)]
return column_value
except:
return False | For a given .STAR file line entry, extract the data at the given column index.
If the column does not exist (e.g. for a header line read in), return 'False' | star_file_tools/remap_optics_groups.py | find_star_info | akeszei/em_toolkit | 0 | python | def find_star_info(line, column):
" For a given .STAR file line entry, extract the data at the given column index.\n If the column does not exist (e.g. for a header line read in), return 'False'\n "
line_to_list = line.split()
try:
column_value = line_to_list[(column - 1)]
return column_value
except:
return False | def find_star_info(line, column):
" For a given .STAR file line entry, extract the data at the given column index.\n If the column does not exist (e.g. for a header line read in), return 'False'\n "
line_to_list = line.split()
try:
column_value = line_to_list[(column - 1)]
return column_value
except:
return False<|docstring|>For a given .STAR file line entry, extract the data at the given column index.
If the column does not exist (e.g. for a header line read in), return 'False'<|endoftext|> |
9bcb26c8d20837ff1a3eb9eff68c186bc0f2b9ebaa20730bd82d244056d9ea05 | def extract_mic_name(input_string):
" Parse the entry for 'rlnMicrographName' to extract only the micrograph name without any path names etc...\n "
mic_name = os.path.basename(input_string)
return mic_name | Parse the entry for 'rlnMicrographName' to extract only the micrograph name without any path names etc... | star_file_tools/remap_optics_groups.py | extract_mic_name | akeszei/em_toolkit | 0 | python | def extract_mic_name(input_string):
" \n "
mic_name = os.path.basename(input_string)
return mic_name | def extract_mic_name(input_string):
" \n "
mic_name = os.path.basename(input_string)
return mic_name<|docstring|>Parse the entry for 'rlnMicrographName' to extract only the micrograph name without any path names etc...<|endoftext|> |
c0eeceac260f130fa7d772c2341b5be8f549a0be7f0b91c903a58ac7045b1c64 | def get_container_pipeline() -> RenderingPipeline:
'Returns setup for container'
return RenderingPipeline(pipes=[apply_attributes, render_container_children], name='container pipeline') | Returns setup for container | pyviews/containers.py | get_container_pipeline | eumis/pyviews | 6 | python | def get_container_pipeline() -> RenderingPipeline:
return RenderingPipeline(pipes=[apply_attributes, render_container_children], name='container pipeline') | def get_container_pipeline() -> RenderingPipeline:
return RenderingPipeline(pipes=[apply_attributes, render_container_children], name='container pipeline')<|docstring|>Returns setup for container<|endoftext|> |
894942dd6546ab4880aca424c6039fbcfbd5421073079852d9a48e33cbc14ffb | def render_container_children(node, context: RenderingContext):
'Renders container children'
render_children(node, context, get_child_context) | Renders container children | pyviews/containers.py | render_container_children | eumis/pyviews | 6 | python | def render_container_children(node, context: RenderingContext):
render_children(node, context, get_child_context) | def render_container_children(node, context: RenderingContext):
render_children(node, context, get_child_context)<|docstring|>Renders container children<|endoftext|> |
f90a249e810fff568f2b978d8002b2be8bd73a7bedbc7d855942f51d19583a6e | def get_view_pipeline() -> RenderingPipeline:
'Returns setup for container'
return RenderingPipeline(pipes=[apply_attributes, render_view_content, rerender_on_view_change], name='view pipeline') | Returns setup for container | pyviews/containers.py | get_view_pipeline | eumis/pyviews | 6 | python | def get_view_pipeline() -> RenderingPipeline:
return RenderingPipeline(pipes=[apply_attributes, render_view_content, rerender_on_view_change], name='view pipeline') | def get_view_pipeline() -> RenderingPipeline:
return RenderingPipeline(pipes=[apply_attributes, render_view_content, rerender_on_view_change], name='view pipeline')<|docstring|>Returns setup for container<|endoftext|> |
b530d6bf9a1d9a27966eeec630b79f66650269b1d7b9b7e79a3c5f26d7d40d9f | def render_view_content(node: View, context: RenderingContext):
'Finds view by name attribute and renders it as view node child'
if node.name:
child_context = get_child_context(node.xml_node, node, context)
content = render_view(node.name, child_context)
node.add_child(content) | Finds view by name attribute and renders it as view node child | pyviews/containers.py | render_view_content | eumis/pyviews | 6 | python | def render_view_content(node: View, context: RenderingContext):
if node.name:
child_context = get_child_context(node.xml_node, node, context)
content = render_view(node.name, child_context)
node.add_child(content) | def render_view_content(node: View, context: RenderingContext):
if node.name:
child_context = get_child_context(node.xml_node, node, context)
content = render_view(node.name, child_context)
node.add_child(content)<|docstring|>Finds view by name attribute and renders it as view node child<|endoftext|> |
35f1adfb4c2773444c7285468578c592a29adc687115d89aba33fa015d489a84 | def rerender_on_view_change(node: View, context: RenderingContext):
'Subscribes to name change and renders new view'
node.observe('name', (lambda _, __: _rerender_view(node, context))) | Subscribes to name change and renders new view | pyviews/containers.py | rerender_on_view_change | eumis/pyviews | 6 | python | def rerender_on_view_change(node: View, context: RenderingContext):
node.observe('name', (lambda _, __: _rerender_view(node, context))) | def rerender_on_view_change(node: View, context: RenderingContext):
node.observe('name', (lambda _, __: _rerender_view(node, context)))<|docstring|>Subscribes to name change and renders new view<|endoftext|> |
77a8833b3c3c695f4283b5c21ba161b54b109c7cbf94242a9e9f0bf7edf92b11 | def get_for_pipeline() -> RenderingPipeline:
'Returns setup for For node'
return RenderingPipeline(pipes=[apply_attributes, render_for_items, rerender_on_items_change], name='for pipeline') | Returns setup for For node | pyviews/containers.py | get_for_pipeline | eumis/pyviews | 6 | python | def get_for_pipeline() -> RenderingPipeline:
return RenderingPipeline(pipes=[apply_attributes, render_for_items, rerender_on_items_change], name='for pipeline') | def get_for_pipeline() -> RenderingPipeline:
return RenderingPipeline(pipes=[apply_attributes, render_for_items, rerender_on_items_change], name='for pipeline')<|docstring|>Returns setup for For node<|endoftext|> |
bf805ab2225d4a7fdda3ac2542056e6e384c83031cb1ab1ad1f27c40421058fb | def render_for_items(node: For, context: RenderingContext):
'Renders For children'
_render_for_children(node, node.items, context) | Renders For children | pyviews/containers.py | render_for_items | eumis/pyviews | 6 | python | def render_for_items(node: For, context: RenderingContext):
_render_for_children(node, node.items, context) | def render_for_items(node: For, context: RenderingContext):
_render_for_children(node, node.items, context)<|docstring|>Renders For children<|endoftext|> |
b967fc6ce0be9b760ccc725eccef60c3d207d4c422c87d05cc1827d336572801 | def rerender_on_items_change(node: For, context: RenderingContext):
'Subscribes to items change and updates children'
node.observe('items', (lambda _, __: _on_items_changed(node, context))) | Subscribes to items change and updates children | pyviews/containers.py | rerender_on_items_change | eumis/pyviews | 6 | python | def rerender_on_items_change(node: For, context: RenderingContext):
node.observe('items', (lambda _, __: _on_items_changed(node, context))) | def rerender_on_items_change(node: For, context: RenderingContext):
node.observe('items', (lambda _, __: _on_items_changed(node, context)))<|docstring|>Subscribes to items change and updates children<|endoftext|> |
2fa80f49db4099b0242a47e6729b1daf30734529719b1fefcf95f53d7d362236 | def get_if_pipeline() -> RenderingPipeline:
'Returns setup for For node'
return RenderingPipeline(pipes=[apply_attributes, render_if, rerender_on_condition_change], name='if pipeline') | Returns setup for For node | pyviews/containers.py | get_if_pipeline | eumis/pyviews | 6 | python | def get_if_pipeline() -> RenderingPipeline:
return RenderingPipeline(pipes=[apply_attributes, render_if, rerender_on_condition_change], name='if pipeline') | def get_if_pipeline() -> RenderingPipeline:
return RenderingPipeline(pipes=[apply_attributes, render_if, rerender_on_condition_change], name='if pipeline')<|docstring|>Returns setup for For node<|endoftext|> |
a876b4bf95db8bcefa8ebdd85a38bddc8b07b6098c9ab99f64e96a26bf139d0f | def render_if(node: If, context: RenderingContext):
'Renders children nodes if condition is true'
if node.condition:
render_children(node, context, get_child_context) | Renders children nodes if condition is true | pyviews/containers.py | render_if | eumis/pyviews | 6 | python | def render_if(node: If, context: RenderingContext):
if node.condition:
render_children(node, context, get_child_context) | def render_if(node: If, context: RenderingContext):
if node.condition:
render_children(node, context, get_child_context)<|docstring|>Renders children nodes if condition is true<|endoftext|> |
55795c178c30f9521717c9fb3b789cacb4334b2f2d4e183c26af8604ffa289c5 | def rerender_on_condition_change(node: If, context: RenderingContext):
'Rerenders if on condition change'
node.observe('condition', (lambda _, __: _on_condition_change(node, context))) | Rerenders if on condition change | pyviews/containers.py | rerender_on_condition_change | eumis/pyviews | 6 | python | def rerender_on_condition_change(node: If, context: RenderingContext):
node.observe('condition', (lambda _, __: _on_condition_change(node, context))) | def rerender_on_condition_change(node: If, context: RenderingContext):
node.observe('condition', (lambda _, __: _on_condition_change(node, context)))<|docstring|>Rerenders if on condition change<|endoftext|> |
85f6ff763074a8f0ce50f851c23bb1f9c84ad7b4070238746205e123b0be2447 | @property
def name(self) -> str:
'Returns view name'
return self._name | Returns view name | pyviews/containers.py | name | eumis/pyviews | 6 | python | @property
def name(self) -> str:
return self._name | @property
def name(self) -> str:
return self._name<|docstring|>Returns view name<|endoftext|> |
8051b6504e35aa421c935d52b7d0631a20afb1654355695ae6e3b2a06ef1842d | @property
def items(self):
'Returns items'
return self._items | Returns items | pyviews/containers.py | items | eumis/pyviews | 6 | python | @property
def items(self):
return self._items | @property
def items(self):
return self._items<|docstring|>Returns items<|endoftext|> |
d70e649ab0d64457b0a7a37de15a47e651392d3ae6f6c3ba3bd17b043343cea2 | @property
def condition(self):
'Returns condition'
return self._condition | Returns condition | pyviews/containers.py | condition | eumis/pyviews | 6 | python | @property
def condition(self):
return self._condition | @property
def condition(self):
return self._condition<|docstring|>Returns condition<|endoftext|> |
2edea4ede59b64e625a1469fee2e83ba204ecb7963dd9a152c7d25748f10d936 | def shuffle(random_state, *args):
'\n random_state: int\n args: List[Tensor]\n\n returns: List[Tensor]\n '
torch.manual_seed(random_state)
size = args[0].size(0)
perm = torch.randperm(size)
res = [x[perm] for x in args]
return res | random_state: int
args: List[Tensor]
returns: List[Tensor] | scripts/helper_utils/pre_process.py | shuffle | hemanthkandula/CLAN | 4 | python | def shuffle(random_state, *args):
'\n random_state: int\n args: List[Tensor]\n\n returns: List[Tensor]\n '
torch.manual_seed(random_state)
size = args[0].size(0)
perm = torch.randperm(size)
res = [x[perm] for x in args]
return res | def shuffle(random_state, *args):
'\n random_state: int\n args: List[Tensor]\n\n returns: List[Tensor]\n '
torch.manual_seed(random_state)
size = args[0].size(0)
perm = torch.randperm(size)
res = [x[perm] for x in args]
return res<|docstring|>random_state: int
args: List[Tensor]
returns: List[Tensor]<|endoftext|> |
5de064c0bcacc13ef5866adece3e3743383460a2c26d4cbe419c350b224ea7bd | def load_senti_corpus(path, vocab, encoding='utf-8', maxlen=512, random_state=None, labels=['__pos__', '__neg__']):
'\n path: str\n vocab: Vocab\n encoding: str\n maxlen: int\n random_state: int\n labels: List[str]\n\n returns: LongTensor of shape (size, maxlen), LongTensor of shape (size,)\n '
(corpus, y) = ([], [])
l2i = {l: i for (i, l) in enumerate(labels)}
with open(path, 'r', encoding=encoding) as fin:
for line in fin:
(label, text) = line.rstrip().split(' ', 1)
y.append(l2i[label])
corpus.append(([(vocab.w2idx[w] if (w in vocab) else vocab.w2idx[UNK_TOK]) for w in text.split(' ')] + [vocab.w2idx[EOS_TOK]]))
size = len(corpus)
X = torch.full((size, maxlen), vocab.w2idx[PAD_TOK], dtype=torch.int64)
l = torch.empty(size, dtype=torch.int64)
y = torch.tensor(y)
for (i, xs) in enumerate(corpus):
sl = min(len(xs), maxlen)
l[i] = sl
X[(i, :sl)] = torch.tensor(xs[:sl])
if (random_state is not None):
(X, y, l) = shuffle(random_state, X, y, l)
return (X, y, l) | path: str
vocab: Vocab
encoding: str
maxlen: int
random_state: int
labels: List[str]
returns: LongTensor of shape (size, maxlen), LongTensor of shape (size,) | scripts/helper_utils/pre_process.py | load_senti_corpus | hemanthkandula/CLAN | 4 | python | def load_senti_corpus(path, vocab, encoding='utf-8', maxlen=512, random_state=None, labels=['__pos__', '__neg__']):
'\n path: str\n vocab: Vocab\n encoding: str\n maxlen: int\n random_state: int\n labels: List[str]\n\n returns: LongTensor of shape (size, maxlen), LongTensor of shape (size,)\n '
(corpus, y) = ([], [])
l2i = {l: i for (i, l) in enumerate(labels)}
with open(path, 'r', encoding=encoding) as fin:
for line in fin:
(label, text) = line.rstrip().split(' ', 1)
y.append(l2i[label])
corpus.append(([(vocab.w2idx[w] if (w in vocab) else vocab.w2idx[UNK_TOK]) for w in text.split(' ')] + [vocab.w2idx[EOS_TOK]]))
size = len(corpus)
X = torch.full((size, maxlen), vocab.w2idx[PAD_TOK], dtype=torch.int64)
l = torch.empty(size, dtype=torch.int64)
y = torch.tensor(y)
for (i, xs) in enumerate(corpus):
sl = min(len(xs), maxlen)
l[i] = sl
X[(i, :sl)] = torch.tensor(xs[:sl])
if (random_state is not None):
(X, y, l) = shuffle(random_state, X, y, l)
return (X, y, l) | def load_senti_corpus(path, vocab, encoding='utf-8', maxlen=512, random_state=None, labels=['__pos__', '__neg__']):
'\n path: str\n vocab: Vocab\n encoding: str\n maxlen: int\n random_state: int\n labels: List[str]\n\n returns: LongTensor of shape (size, maxlen), LongTensor of shape (size,)\n '
(corpus, y) = ([], [])
l2i = {l: i for (i, l) in enumerate(labels)}
with open(path, 'r', encoding=encoding) as fin:
for line in fin:
(label, text) = line.rstrip().split(' ', 1)
y.append(l2i[label])
corpus.append(([(vocab.w2idx[w] if (w in vocab) else vocab.w2idx[UNK_TOK]) for w in text.split(' ')] + [vocab.w2idx[EOS_TOK]]))
size = len(corpus)
X = torch.full((size, maxlen), vocab.w2idx[PAD_TOK], dtype=torch.int64)
l = torch.empty(size, dtype=torch.int64)
y = torch.tensor(y)
for (i, xs) in enumerate(corpus):
sl = min(len(xs), maxlen)
l[i] = sl
X[(i, :sl)] = torch.tensor(xs[:sl])
if (random_state is not None):
(X, y, l) = shuffle(random_state, X, y, l)
return (X, y, l)<|docstring|>path: str
vocab: Vocab
encoding: str
maxlen: int
random_state: int
labels: List[str]
returns: LongTensor of shape (size, maxlen), LongTensor of shape (size,)<|endoftext|> |
2aaaab8c88f34114af85d8f5073a211212c1cff2a9d38538db97f8651304bb83 | def load_lm_corpus(path, vocab, encoding='utf-8', random_state=None):
'\n path: str\n vocab: Vocab\n encoding: str\n random_state: int (optional)\n\n returns: torch.LongTensor of shape (corpus_size,)\n '
if (random_state is None):
with open(path, 'r', encoding=encoding) as f:
ntokens = 0
for line in f:
words = (line.rstrip().split(' ') + [EOS_TOK])
ntokens += len(words)
with open(path, 'r', encoding=encoding) as f:
ids = torch.LongTensor(ntokens)
p = 0
for line in f:
words = (line.rstrip().split(' ') + [EOS_TOK])
for w in words:
if (w in vocab.w2idx):
ids[p] = vocab.w2idx[w]
else:
ids[p] = vocab.w2idx[UNK_TOK]
p += 1
else:
with open(path, 'r', encoding=encoding) as f:
corpus = [((line.rstrip() + ' ') + EOS_TOK) for line in f]
ntokens = sum((len(line.split(' ')) for line in corpus))
ids = torch.LongTensor(ntokens)
p = 0
np.random.seed(random_state)
for i in np.random.permutation(len(corpus)):
for w in corpus[i].split(' '):
if (w in vocab.w2idx):
ids[p] = vocab.w2idx[w]
else:
ids[p] = vocab.w2idx[UNK_TOK]
p += 1
return ids | path: str
vocab: Vocab
encoding: str
random_state: int (optional)
returns: torch.LongTensor of shape (corpus_size,) | scripts/helper_utils/pre_process.py | load_lm_corpus | hemanthkandula/CLAN | 4 | python | def load_lm_corpus(path, vocab, encoding='utf-8', random_state=None):
'\n path: str\n vocab: Vocab\n encoding: str\n random_state: int (optional)\n\n returns: torch.LongTensor of shape (corpus_size,)\n '
if (random_state is None):
with open(path, 'r', encoding=encoding) as f:
ntokens = 0
for line in f:
words = (line.rstrip().split(' ') + [EOS_TOK])
ntokens += len(words)
with open(path, 'r', encoding=encoding) as f:
ids = torch.LongTensor(ntokens)
p = 0
for line in f:
words = (line.rstrip().split(' ') + [EOS_TOK])
for w in words:
if (w in vocab.w2idx):
ids[p] = vocab.w2idx[w]
else:
ids[p] = vocab.w2idx[UNK_TOK]
p += 1
else:
with open(path, 'r', encoding=encoding) as f:
corpus = [((line.rstrip() + ' ') + EOS_TOK) for line in f]
ntokens = sum((len(line.split(' ')) for line in corpus))
ids = torch.LongTensor(ntokens)
p = 0
np.random.seed(random_state)
for i in np.random.permutation(len(corpus)):
for w in corpus[i].split(' '):
if (w in vocab.w2idx):
ids[p] = vocab.w2idx[w]
else:
ids[p] = vocab.w2idx[UNK_TOK]
p += 1
return ids | def load_lm_corpus(path, vocab, encoding='utf-8', random_state=None):
'\n path: str\n vocab: Vocab\n encoding: str\n random_state: int (optional)\n\n returns: torch.LongTensor of shape (corpus_size,)\n '
if (random_state is None):
with open(path, 'r', encoding=encoding) as f:
ntokens = 0
for line in f:
words = (line.rstrip().split(' ') + [EOS_TOK])
ntokens += len(words)
with open(path, 'r', encoding=encoding) as f:
ids = torch.LongTensor(ntokens)
p = 0
for line in f:
words = (line.rstrip().split(' ') + [EOS_TOK])
for w in words:
if (w in vocab.w2idx):
ids[p] = vocab.w2idx[w]
else:
ids[p] = vocab.w2idx[UNK_TOK]
p += 1
else:
with open(path, 'r', encoding=encoding) as f:
corpus = [((line.rstrip() + ' ') + EOS_TOK) for line in f]
ntokens = sum((len(line.split(' ')) for line in corpus))
ids = torch.LongTensor(ntokens)
p = 0
np.random.seed(random_state)
for i in np.random.permutation(len(corpus)):
for w in corpus[i].split(' '):
if (w in vocab.w2idx):
ids[p] = vocab.w2idx[w]
else:
ids[p] = vocab.w2idx[UNK_TOK]
p += 1
return ids<|docstring|>path: str
vocab: Vocab
encoding: str
random_state: int (optional)
returns: torch.LongTensor of shape (corpus_size,)<|endoftext|> |
373601f2be42101d37484bac1ea8926144789ffad913378d2daa249c4e25fdcc | def load_vectors_with_vocab(path, vocab, maxload=(- 1)):
'\n path: str\n vocab: Vocab\n maxload: int\n '
count = 0
with open(path, 'r', encoding='utf-8', newline='\n', errors='ignore') as fin:
(n, d) = map(int, fin.readline().split(' '))
x = np.zeros([len(vocab), d])
words = []
for (i, line) in enumerate(fin):
if ((maxload > 0) and (i >= maxload)):
break
tokens = line.rstrip().split(' ')
if (tokens[0] in vocab):
x[vocab.w2idx[tokens[0]]] = np.array(tokens[1:], dtype=float)
count += 1
return (x, count) | path: str
vocab: Vocab
maxload: int | scripts/helper_utils/pre_process.py | load_vectors_with_vocab | hemanthkandula/CLAN | 4 | python | def load_vectors_with_vocab(path, vocab, maxload=(- 1)):
'\n path: str\n vocab: Vocab\n maxload: int\n '
count = 0
with open(path, 'r', encoding='utf-8', newline='\n', errors='ignore') as fin:
(n, d) = map(int, fin.readline().split(' '))
x = np.zeros([len(vocab), d])
words = []
for (i, line) in enumerate(fin):
if ((maxload > 0) and (i >= maxload)):
break
tokens = line.rstrip().split(' ')
if (tokens[0] in vocab):
x[vocab.w2idx[tokens[0]]] = np.array(tokens[1:], dtype=float)
count += 1
return (x, count) | def load_vectors_with_vocab(path, vocab, maxload=(- 1)):
'\n path: str\n vocab: Vocab\n maxload: int\n '
count = 0
with open(path, 'r', encoding='utf-8', newline='\n', errors='ignore') as fin:
(n, d) = map(int, fin.readline().split(' '))
x = np.zeros([len(vocab), d])
words = []
for (i, line) in enumerate(fin):
if ((maxload > 0) and (i >= maxload)):
break
tokens = line.rstrip().split(' ')
if (tokens[0] in vocab):
x[vocab.w2idx[tokens[0]]] = np.array(tokens[1:], dtype=float)
count += 1
return (x, count)<|docstring|>path: str
vocab: Vocab
maxload: int<|endoftext|> |
7d5f6e43f9b1693d91134458ff38b13d11ebe12ced168b44780672603f08a701 | def load_lexicon(path, src_vocab, trg_vocab, encoding='utf-8', verbose=False):
'\n path: str\n src_vocab: Vocab\n trg_vocab: Vocab\n encoding: str\n verbose: bool\n\n returns: collections.defautldict\n '
lexicon = collections.defaultdict(set)
vocab = set()
with open(path, 'r', encoding='utf-8') as fin:
for line in fin:
(src, trg) = line.rstrip().split()
if ((src in src_vocab) and (trg in trg_vocab)):
lexicon[src_vocab.w2idx[src]].add(trg_vocab.w2idx[trg])
vocab.add(src)
if verbose:
print('[{}] OOV rate = {:.4f}'.format(path, (1 - (len(lexicon) / len(vocab)))))
return (lexicon, len(vocab)) | path: str
src_vocab: Vocab
trg_vocab: Vocab
encoding: str
verbose: bool
returns: collections.defautldict | scripts/helper_utils/pre_process.py | load_lexicon | hemanthkandula/CLAN | 4 | python | def load_lexicon(path, src_vocab, trg_vocab, encoding='utf-8', verbose=False):
'\n path: str\n src_vocab: Vocab\n trg_vocab: Vocab\n encoding: str\n verbose: bool\n\n returns: collections.defautldict\n '
lexicon = collections.defaultdict(set)
vocab = set()
with open(path, 'r', encoding='utf-8') as fin:
for line in fin:
(src, trg) = line.rstrip().split()
if ((src in src_vocab) and (trg in trg_vocab)):
lexicon[src_vocab.w2idx[src]].add(trg_vocab.w2idx[trg])
vocab.add(src)
if verbose:
print('[{}] OOV rate = {:.4f}'.format(path, (1 - (len(lexicon) / len(vocab)))))
return (lexicon, len(vocab)) | def load_lexicon(path, src_vocab, trg_vocab, encoding='utf-8', verbose=False):
'\n path: str\n src_vocab: Vocab\n trg_vocab: Vocab\n encoding: str\n verbose: bool\n\n returns: collections.defautldict\n '
lexicon = collections.defaultdict(set)
vocab = set()
with open(path, 'r', encoding='utf-8') as fin:
for line in fin:
(src, trg) = line.rstrip().split()
if ((src in src_vocab) and (trg in trg_vocab)):
lexicon[src_vocab.w2idx[src]].add(trg_vocab.w2idx[trg])
vocab.add(src)
if verbose:
print('[{}] OOV rate = {:.4f}'.format(path, (1 - (len(lexicon) / len(vocab)))))
return (lexicon, len(vocab))<|docstring|>path: str
src_vocab: Vocab
trg_vocab: Vocab
encoding: str
verbose: bool
returns: collections.defautldict<|endoftext|> |
a1be048c9075456a154a9ff3dc31520d23b4a8ca1f62e06efed2902f9d174e49 | def _match_all_files(file_path: Text) -> Text:
'Return expression to match all files at given path.'
return (file_path + '*') | Return expression to match all files at given path. | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py | _match_all_files | TheWayOfTheRob/model-analysis | 1 | python | def _match_all_files(file_path: Text) -> Text:
return (file_path + '*') | def _match_all_files(file_path: Text) -> Text:
return (file_path + '*')<|docstring|>Return expression to match all files at given path.<|endoftext|> |
e102e10040195f69d51313b06bc6e2af9068e84803d54cc6b4498a76b7bad071 | def _parquet_column_iterator(paths: Iterable[str], column_name: str) -> Iterator[pa.Buffer]:
'Yields values from a bytes column in a set of parquet file partitions.'
dataset = pa.parquet.ParquetDataset(paths)
table = dataset.read(columns=[column_name])
for record_batch in table.to_batches():
value_array = record_batch.column(0)
for value in value_array:
(yield value.as_buffer()) | Yields values from a bytes column in a set of parquet file partitions. | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py | _parquet_column_iterator | TheWayOfTheRob/model-analysis | 1 | python | def _parquet_column_iterator(paths: Iterable[str], column_name: str) -> Iterator[pa.Buffer]:
dataset = pa.parquet.ParquetDataset(paths)
table = dataset.read(columns=[column_name])
for record_batch in table.to_batches():
value_array = record_batch.column(0)
for value in value_array:
(yield value.as_buffer()) | def _parquet_column_iterator(paths: Iterable[str], column_name: str) -> Iterator[pa.Buffer]:
dataset = pa.parquet.ParquetDataset(paths)
table = dataset.read(columns=[column_name])
for record_batch in table.to_batches():
value_array = record_batch.column(0)
for value in value_array:
(yield value.as_buffer())<|docstring|>Yields values from a bytes column in a set of parquet file partitions.<|endoftext|> |
71e61bae1ee6a93102e2eaab6785247f36a645f92ed78a57e431f5cad560802a | def _raw_value_iterator(paths: Iterable[Text], output_file_format: Text) -> Iterator[Union[(pa.Buffer, bytes)]]:
'Returns an iterator of raw per-record values from supported file formats.\n\n When reading parquet format files, values from the column with name\n _SERIALIZED_VALUE_PARQUET_COLUMN_NAME will be read.\n\n Args:\n paths: The paths from which to read records\n output_file_format: The format of the files from which to read records.\n\n Returns:\n An iterator which yields serialized values.\n\n Raises:\n ValueError when the output_file_format is unknown.\n '
if (output_file_format == _PARQUET_FORMAT):
return _parquet_column_iterator(paths, _SERIALIZED_VALUE_PARQUET_COLUMN_NAME)
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
return itertools.chain(*(tf.compat.v1.python_io.tf_record_iterator(path) for path in paths))
raise ValueError('Formats "{}" are currently supported but got output_file_format={}'.format(_SUPPORTED_FORMATS, output_file_format)) | Returns an iterator of raw per-record values from supported file formats.
When reading parquet format files, values from the column with name
_SERIALIZED_VALUE_PARQUET_COLUMN_NAME will be read.
Args:
paths: The paths from which to read records
output_file_format: The format of the files from which to read records.
Returns:
An iterator which yields serialized values.
Raises:
ValueError when the output_file_format is unknown. | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py | _raw_value_iterator | TheWayOfTheRob/model-analysis | 1 | python | def _raw_value_iterator(paths: Iterable[Text], output_file_format: Text) -> Iterator[Union[(pa.Buffer, bytes)]]:
'Returns an iterator of raw per-record values from supported file formats.\n\n When reading parquet format files, values from the column with name\n _SERIALIZED_VALUE_PARQUET_COLUMN_NAME will be read.\n\n Args:\n paths: The paths from which to read records\n output_file_format: The format of the files from which to read records.\n\n Returns:\n An iterator which yields serialized values.\n\n Raises:\n ValueError when the output_file_format is unknown.\n '
if (output_file_format == _PARQUET_FORMAT):
return _parquet_column_iterator(paths, _SERIALIZED_VALUE_PARQUET_COLUMN_NAME)
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
return itertools.chain(*(tf.compat.v1.python_io.tf_record_iterator(path) for path in paths))
raise ValueError('Formats "{}" are currently supported but got output_file_format={}'.format(_SUPPORTED_FORMATS, output_file_format)) | def _raw_value_iterator(paths: Iterable[Text], output_file_format: Text) -> Iterator[Union[(pa.Buffer, bytes)]]:
'Returns an iterator of raw per-record values from supported file formats.\n\n When reading parquet format files, values from the column with name\n _SERIALIZED_VALUE_PARQUET_COLUMN_NAME will be read.\n\n Args:\n paths: The paths from which to read records\n output_file_format: The format of the files from which to read records.\n\n Returns:\n An iterator which yields serialized values.\n\n Raises:\n ValueError when the output_file_format is unknown.\n '
if (output_file_format == _PARQUET_FORMAT):
return _parquet_column_iterator(paths, _SERIALIZED_VALUE_PARQUET_COLUMN_NAME)
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
return itertools.chain(*(tf.compat.v1.python_io.tf_record_iterator(path) for path in paths))
raise ValueError('Formats "{}" are currently supported but got output_file_format={}'.format(_SUPPORTED_FORMATS, output_file_format))<|docstring|>Returns an iterator of raw per-record values from supported file formats.
When reading parquet format files, values from the column with name
_SERIALIZED_VALUE_PARQUET_COLUMN_NAME will be read.
Args:
paths: The paths from which to read records
output_file_format: The format of the files from which to read records.
Returns:
An iterator which yields serialized values.
Raises:
ValueError when the output_file_format is unknown.<|endoftext|> |
bbc94cfa813fdbd2225d7c3ecd04bfd946f5c168f12c962a7faffb7aebbc54de | def load_and_deserialize_metrics(output_path: Text, output_file_format: Text='', slice_specs: Optional[Iterable[slicer.SingleSliceSpec]]=None) -> Iterator[metrics_for_slice_pb2.MetricsForSlice]:
"Read and deserialize the MetricsForSlice records.\n\n Args:\n output_path: Path or pattern to search for metrics files under. If a\n directory is passed, files matching 'metrics*' will be searched for.\n output_file_format: Optional file extension to filter files by.\n slice_specs: A set of SingleSliceSpecs to use for filtering returned\n metrics. The metrics for a given slice key will be returned if that slice\n key matches any of the slice_specs.\n\n Yields:\n MetricsForSlice protos found in matching files.\n "
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.METRICS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = ((pattern + '.') + output_file_format)
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
metrics = metrics_for_slice_pb2.MetricsForSlice.FromString(value)
if (slice_specs and (not slicer.slice_key_matches_slice_specs(slicer.deserialize_slice_key(metrics.slice_key), slice_specs))):
continue
(yield metrics) | Read and deserialize the MetricsForSlice records.
Args:
output_path: Path or pattern to search for metrics files under. If a
directory is passed, files matching 'metrics*' will be searched for.
output_file_format: Optional file extension to filter files by.
slice_specs: A set of SingleSliceSpecs to use for filtering returned
metrics. The metrics for a given slice key will be returned if that slice
key matches any of the slice_specs.
Yields:
MetricsForSlice protos found in matching files. | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py | load_and_deserialize_metrics | TheWayOfTheRob/model-analysis | 1 | python | def load_and_deserialize_metrics(output_path: Text, output_file_format: Text=, slice_specs: Optional[Iterable[slicer.SingleSliceSpec]]=None) -> Iterator[metrics_for_slice_pb2.MetricsForSlice]:
"Read and deserialize the MetricsForSlice records.\n\n Args:\n output_path: Path or pattern to search for metrics files under. If a\n directory is passed, files matching 'metrics*' will be searched for.\n output_file_format: Optional file extension to filter files by.\n slice_specs: A set of SingleSliceSpecs to use for filtering returned\n metrics. The metrics for a given slice key will be returned if that slice\n key matches any of the slice_specs.\n\n Yields:\n MetricsForSlice protos found in matching files.\n "
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.METRICS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = ((pattern + '.') + output_file_format)
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
metrics = metrics_for_slice_pb2.MetricsForSlice.FromString(value)
if (slice_specs and (not slicer.slice_key_matches_slice_specs(slicer.deserialize_slice_key(metrics.slice_key), slice_specs))):
continue
(yield metrics) | def load_and_deserialize_metrics(output_path: Text, output_file_format: Text=, slice_specs: Optional[Iterable[slicer.SingleSliceSpec]]=None) -> Iterator[metrics_for_slice_pb2.MetricsForSlice]:
"Read and deserialize the MetricsForSlice records.\n\n Args:\n output_path: Path or pattern to search for metrics files under. If a\n directory is passed, files matching 'metrics*' will be searched for.\n output_file_format: Optional file extension to filter files by.\n slice_specs: A set of SingleSliceSpecs to use for filtering returned\n metrics. The metrics for a given slice key will be returned if that slice\n key matches any of the slice_specs.\n\n Yields:\n MetricsForSlice protos found in matching files.\n "
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.METRICS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = ((pattern + '.') + output_file_format)
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
metrics = metrics_for_slice_pb2.MetricsForSlice.FromString(value)
if (slice_specs and (not slicer.slice_key_matches_slice_specs(slicer.deserialize_slice_key(metrics.slice_key), slice_specs))):
continue
(yield metrics)<|docstring|>Read and deserialize the MetricsForSlice records.
Args:
output_path: Path or pattern to search for metrics files under. If a
directory is passed, files matching 'metrics*' will be searched for.
output_file_format: Optional file extension to filter files by.
slice_specs: A set of SingleSliceSpecs to use for filtering returned
metrics. The metrics for a given slice key will be returned if that slice
key matches any of the slice_specs.
Yields:
MetricsForSlice protos found in matching files.<|endoftext|> |
505e8faaa31f3426de1b5d3c06ab2e879ba84e9c273ecf0957755b5306ae2a86 | def load_and_deserialize_plots(output_path: Text, output_file_format: Text='', slice_specs: Optional[Iterable[slicer.SingleSliceSpec]]=None) -> Iterator[metrics_for_slice_pb2.PlotsForSlice]:
"Read and deserialize the PlotsForSlice records.\n\n Args:\n output_path: Path or pattern to search for plots files under. If a directory\n is passed, files matching 'plots*' will be searched for.\n output_file_format: Optional file extension to filter files by.\n slice_specs: A set of SingleSliceSpecs to use for filtering returned plots.\n The plots for a given slice key will be returned if that slice key matches\n any of the slice_specs.\n\n Yields:\n PlotsForSlice protos found in matching files.\n "
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.PLOTS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = ((pattern + '.') + output_file_format)
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
plots = metrics_for_slice_pb2.PlotsForSlice.FromString(value)
if (slice_specs and (not slicer.slice_key_matches_slice_specs(slicer.deserialize_slice_key(plots.slice_key), slice_specs))):
continue
(yield plots) | Read and deserialize the PlotsForSlice records.
Args:
output_path: Path or pattern to search for plots files under. If a directory
is passed, files matching 'plots*' will be searched for.
output_file_format: Optional file extension to filter files by.
slice_specs: A set of SingleSliceSpecs to use for filtering returned plots.
The plots for a given slice key will be returned if that slice key matches
any of the slice_specs.
Yields:
PlotsForSlice protos found in matching files. | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py | load_and_deserialize_plots | TheWayOfTheRob/model-analysis | 1 | python | def load_and_deserialize_plots(output_path: Text, output_file_format: Text=, slice_specs: Optional[Iterable[slicer.SingleSliceSpec]]=None) -> Iterator[metrics_for_slice_pb2.PlotsForSlice]:
"Read and deserialize the PlotsForSlice records.\n\n Args:\n output_path: Path or pattern to search for plots files under. If a directory\n is passed, files matching 'plots*' will be searched for.\n output_file_format: Optional file extension to filter files by.\n slice_specs: A set of SingleSliceSpecs to use for filtering returned plots.\n The plots for a given slice key will be returned if that slice key matches\n any of the slice_specs.\n\n Yields:\n PlotsForSlice protos found in matching files.\n "
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.PLOTS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = ((pattern + '.') + output_file_format)
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
plots = metrics_for_slice_pb2.PlotsForSlice.FromString(value)
if (slice_specs and (not slicer.slice_key_matches_slice_specs(slicer.deserialize_slice_key(plots.slice_key), slice_specs))):
continue
(yield plots) | def load_and_deserialize_plots(output_path: Text, output_file_format: Text=, slice_specs: Optional[Iterable[slicer.SingleSliceSpec]]=None) -> Iterator[metrics_for_slice_pb2.PlotsForSlice]:
"Read and deserialize the PlotsForSlice records.\n\n Args:\n output_path: Path or pattern to search for plots files under. If a directory\n is passed, files matching 'plots*' will be searched for.\n output_file_format: Optional file extension to filter files by.\n slice_specs: A set of SingleSliceSpecs to use for filtering returned plots.\n The plots for a given slice key will be returned if that slice key matches\n any of the slice_specs.\n\n Yields:\n PlotsForSlice protos found in matching files.\n "
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.PLOTS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = ((pattern + '.') + output_file_format)
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
plots = metrics_for_slice_pb2.PlotsForSlice.FromString(value)
if (slice_specs and (not slicer.slice_key_matches_slice_specs(slicer.deserialize_slice_key(plots.slice_key), slice_specs))):
continue
(yield plots)<|docstring|>Read and deserialize the PlotsForSlice records.
Args:
output_path: Path or pattern to search for plots files under. If a directory
is passed, files matching 'plots*' will be searched for.
output_file_format: Optional file extension to filter files by.
slice_specs: A set of SingleSliceSpecs to use for filtering returned plots.
The plots for a given slice key will be returned if that slice key matches
any of the slice_specs.
Yields:
PlotsForSlice protos found in matching files.<|endoftext|> |
3829ea553803d22471d8a932d13b05c68681c27b97a960dff2ac8a07d375f40d | def load_and_deserialize_validation_result(output_path: Text, output_file_format: Text='') -> validation_result_pb2.ValidationResult:
"Read and deserialize the ValidationResult record.\n\n Args:\n output_path: Path or pattern to search for validation file under. If a\n directory is passed, a file matching 'validations*' will be searched for.\n output_file_format: Optional file extension to filter file by.\n\n Returns:\n ValidationResult proto.\n "
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.VALIDATIONS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = ((pattern + '.') + output_file_format)
validation_records = []
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
validation_records.append(validation_result_pb2.ValidationResult.FromString(value))
assert (len(validation_records) == 1)
return validation_records[0] | Read and deserialize the ValidationResult record.
Args:
output_path: Path or pattern to search for validation file under. If a
directory is passed, a file matching 'validations*' will be searched for.
output_file_format: Optional file extension to filter file by.
Returns:
ValidationResult proto. | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py | load_and_deserialize_validation_result | TheWayOfTheRob/model-analysis | 1 | python | def load_and_deserialize_validation_result(output_path: Text, output_file_format: Text=) -> validation_result_pb2.ValidationResult:
"Read and deserialize the ValidationResult record.\n\n Args:\n output_path: Path or pattern to search for validation file under. If a\n directory is passed, a file matching 'validations*' will be searched for.\n output_file_format: Optional file extension to filter file by.\n\n Returns:\n ValidationResult proto.\n "
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.VALIDATIONS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = ((pattern + '.') + output_file_format)
validation_records = []
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
validation_records.append(validation_result_pb2.ValidationResult.FromString(value))
assert (len(validation_records) == 1)
return validation_records[0] | def load_and_deserialize_validation_result(output_path: Text, output_file_format: Text=) -> validation_result_pb2.ValidationResult:
"Read and deserialize the ValidationResult record.\n\n Args:\n output_path: Path or pattern to search for validation file under. If a\n directory is passed, a file matching 'validations*' will be searched for.\n output_file_format: Optional file extension to filter file by.\n\n Returns:\n ValidationResult proto.\n "
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.VALIDATIONS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = ((pattern + '.') + output_file_format)
validation_records = []
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
validation_records.append(validation_result_pb2.ValidationResult.FromString(value))
assert (len(validation_records) == 1)
return validation_records[0]<|docstring|>Read and deserialize the ValidationResult record.
Args:
output_path: Path or pattern to search for validation file under. If a
directory is passed, a file matching 'validations*' will be searched for.
output_file_format: Optional file extension to filter file by.
Returns:
ValidationResult proto.<|endoftext|> |
f9924b3675dafb5d3d0f94d77d4d429db33a7c2ad5fbb1d6fc35f7c957ae5f02 | def _convert_to_array_value(array: np.ndarray) -> metrics_for_slice_pb2.ArrayValue:
'Converts NumPy array to ArrayValue.'
result = metrics_for_slice_pb2.ArrayValue()
result.shape[:] = array.shape
if (array.dtype == 'int32'):
result.data_type = metrics_for_slice_pb2.ArrayValue.INT32
result.int32_values[:] = array.flatten()
elif (array.dtype == 'int64'):
result.data_type = metrics_for_slice_pb2.ArrayValue.INT64
result.int64_values[:] = array.flatten()
elif (array.dtype == 'float32'):
result.data_type = metrics_for_slice_pb2.ArrayValue.FLOAT32
result.float32_values[:] = array.flatten()
elif (array.dtype == 'float64'):
result.data_type = metrics_for_slice_pb2.ArrayValue.FLOAT64
result.float64_values[:] = array.flatten()
else:
result.data_type = metrics_for_slice_pb2.ArrayValue.BYTES
result.bytes_values[:] = [tf.compat.as_bytes(x) for x in array.astype(six.text_type).flatten()]
return result | Converts NumPy array to ArrayValue. | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py | _convert_to_array_value | TheWayOfTheRob/model-analysis | 1 | python | def _convert_to_array_value(array: np.ndarray) -> metrics_for_slice_pb2.ArrayValue:
result = metrics_for_slice_pb2.ArrayValue()
result.shape[:] = array.shape
if (array.dtype == 'int32'):
result.data_type = metrics_for_slice_pb2.ArrayValue.INT32
result.int32_values[:] = array.flatten()
elif (array.dtype == 'int64'):
result.data_type = metrics_for_slice_pb2.ArrayValue.INT64
result.int64_values[:] = array.flatten()
elif (array.dtype == 'float32'):
result.data_type = metrics_for_slice_pb2.ArrayValue.FLOAT32
result.float32_values[:] = array.flatten()
elif (array.dtype == 'float64'):
result.data_type = metrics_for_slice_pb2.ArrayValue.FLOAT64
result.float64_values[:] = array.flatten()
else:
result.data_type = metrics_for_slice_pb2.ArrayValue.BYTES
result.bytes_values[:] = [tf.compat.as_bytes(x) for x in array.astype(six.text_type).flatten()]
return result | def _convert_to_array_value(array: np.ndarray) -> metrics_for_slice_pb2.ArrayValue:
result = metrics_for_slice_pb2.ArrayValue()
result.shape[:] = array.shape
if (array.dtype == 'int32'):
result.data_type = metrics_for_slice_pb2.ArrayValue.INT32
result.int32_values[:] = array.flatten()
elif (array.dtype == 'int64'):
result.data_type = metrics_for_slice_pb2.ArrayValue.INT64
result.int64_values[:] = array.flatten()
elif (array.dtype == 'float32'):
result.data_type = metrics_for_slice_pb2.ArrayValue.FLOAT32
result.float32_values[:] = array.flatten()
elif (array.dtype == 'float64'):
result.data_type = metrics_for_slice_pb2.ArrayValue.FLOAT64
result.float64_values[:] = array.flatten()
else:
result.data_type = metrics_for_slice_pb2.ArrayValue.BYTES
result.bytes_values[:] = [tf.compat.as_bytes(x) for x in array.astype(six.text_type).flatten()]
return result<|docstring|>Converts NumPy array to ArrayValue.<|endoftext|> |
fba667013d93743d1f430a0774a426c6bcd149e1b08669ffcd02761a4ba2ff01 | def convert_slice_metrics_to_proto(metrics: Tuple[(slicer.SliceKeyType, Dict[(Any, Any)])], add_metrics_callbacks: List[types.AddMetricsCallbackType]) -> metrics_for_slice_pb2.MetricsForSlice:
'Converts the given slice metrics into serialized proto MetricsForSlice.\n\n Args:\n metrics: The slice metrics.\n add_metrics_callbacks: A list of metric callbacks. This should be the same\n list as the one passed to tfma.Evaluate().\n\n Returns:\n The MetricsForSlice proto.\n\n Raises:\n TypeError: If the type of the feature value in slice key cannot be\n recognized.\n '
result = metrics_for_slice_pb2.MetricsForSlice()
(slice_key, slice_metrics) = metrics
result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
slice_metrics = slice_metrics.copy()
if (metric_keys.ERROR_METRIC in slice_metrics):
logging.warning('Error for slice: %s with error message: %s ', slice_key, slice_metrics[metric_keys.ERROR_METRIC])
result.metrics[metric_keys.ERROR_METRIC].debug_message = slice_metrics[metric_keys.ERROR_METRIC]
return result
if (add_metrics_callbacks and (not any((isinstance(k, metric_types.MetricKey) for k in slice_metrics.keys())))):
for add_metrics_callback in add_metrics_callbacks:
if hasattr(add_metrics_callback, 'populate_stats_and_pop'):
add_metrics_callback.populate_stats_and_pop(slice_key, slice_metrics, result.metrics)
for key in sorted(slice_metrics.keys()):
value = slice_metrics[key]
metric_value = metrics_for_slice_pb2.MetricValue()
if isinstance(value, metrics_for_slice_pb2.ConfusionMatrixAtThresholds):
metric_value.confusion_matrix_at_thresholds.CopyFrom(value)
elif isinstance(value, metrics_for_slice_pb2.MultiClassConfusionMatrixAtThresholds):
metric_value.multi_class_confusion_matrix_at_thresholds.CopyFrom(value)
elif isinstance(value, types.ValueWithTDistribution):
(_, lower_bound, upper_bound) = math_util.calculate_confidence_interval(value)
metric_value.bounded_value.value.value = value.unsampled_value
metric_value.bounded_value.lower_bound.value = lower_bound
metric_value.bounded_value.upper_bound.value = upper_bound
metric_value.bounded_value.methodology = metrics_for_slice_pb2.BoundedValue.POISSON_BOOTSTRAP
metric_value.confidence_interval.lower_bound.value = lower_bound
metric_value.confidence_interval.upper_bound.value = upper_bound
t_dist_value = metrics_for_slice_pb2.TDistributionValue()
t_dist_value.sample_mean.value = value.sample_mean
t_dist_value.sample_standard_deviation.value = value.sample_standard_deviation
t_dist_value.sample_degrees_of_freedom.value = value.sample_degrees_of_freedom
t_dist_value.unsampled_value.value = value.unsampled_value
metric_value.confidence_interval.t_distribution_value.CopyFrom(t_dist_value)
elif isinstance(value, six.binary_type):
metric_value.bytes_value = value
elif isinstance(value, six.text_type):
metric_value.bytes_value = value.encode('utf8')
elif isinstance(value, np.ndarray):
metric_value.array_value.CopyFrom(_convert_to_array_value(value))
else:
try:
metric_value.double_value.value = float(value)
except (TypeError, ValueError) as e:
metric_value.unknown_type.value = str(value)
metric_value.unknown_type.error = e.message
if isinstance(key, metric_types.MetricKey):
key_and_value = result.metric_keys_and_values.add()
key_and_value.key.CopyFrom(key.to_proto())
key_and_value.value.CopyFrom(metric_value)
else:
result.metrics[key].CopyFrom(metric_value)
return result | Converts the given slice metrics into serialized proto MetricsForSlice.
Args:
metrics: The slice metrics.
add_metrics_callbacks: A list of metric callbacks. This should be the same
list as the one passed to tfma.Evaluate().
Returns:
The MetricsForSlice proto.
Raises:
TypeError: If the type of the feature value in slice key cannot be
recognized. | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py | convert_slice_metrics_to_proto | TheWayOfTheRob/model-analysis | 1 | python | def convert_slice_metrics_to_proto(metrics: Tuple[(slicer.SliceKeyType, Dict[(Any, Any)])], add_metrics_callbacks: List[types.AddMetricsCallbackType]) -> metrics_for_slice_pb2.MetricsForSlice:
'Converts the given slice metrics into serialized proto MetricsForSlice.\n\n Args:\n metrics: The slice metrics.\n add_metrics_callbacks: A list of metric callbacks. This should be the same\n list as the one passed to tfma.Evaluate().\n\n Returns:\n The MetricsForSlice proto.\n\n Raises:\n TypeError: If the type of the feature value in slice key cannot be\n recognized.\n '
result = metrics_for_slice_pb2.MetricsForSlice()
(slice_key, slice_metrics) = metrics
result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
slice_metrics = slice_metrics.copy()
if (metric_keys.ERROR_METRIC in slice_metrics):
logging.warning('Error for slice: %s with error message: %s ', slice_key, slice_metrics[metric_keys.ERROR_METRIC])
result.metrics[metric_keys.ERROR_METRIC].debug_message = slice_metrics[metric_keys.ERROR_METRIC]
return result
if (add_metrics_callbacks and (not any((isinstance(k, metric_types.MetricKey) for k in slice_metrics.keys())))):
for add_metrics_callback in add_metrics_callbacks:
if hasattr(add_metrics_callback, 'populate_stats_and_pop'):
add_metrics_callback.populate_stats_and_pop(slice_key, slice_metrics, result.metrics)
for key in sorted(slice_metrics.keys()):
value = slice_metrics[key]
metric_value = metrics_for_slice_pb2.MetricValue()
if isinstance(value, metrics_for_slice_pb2.ConfusionMatrixAtThresholds):
metric_value.confusion_matrix_at_thresholds.CopyFrom(value)
elif isinstance(value, metrics_for_slice_pb2.MultiClassConfusionMatrixAtThresholds):
metric_value.multi_class_confusion_matrix_at_thresholds.CopyFrom(value)
elif isinstance(value, types.ValueWithTDistribution):
(_, lower_bound, upper_bound) = math_util.calculate_confidence_interval(value)
metric_value.bounded_value.value.value = value.unsampled_value
metric_value.bounded_value.lower_bound.value = lower_bound
metric_value.bounded_value.upper_bound.value = upper_bound
metric_value.bounded_value.methodology = metrics_for_slice_pb2.BoundedValue.POISSON_BOOTSTRAP
metric_value.confidence_interval.lower_bound.value = lower_bound
metric_value.confidence_interval.upper_bound.value = upper_bound
t_dist_value = metrics_for_slice_pb2.TDistributionValue()
t_dist_value.sample_mean.value = value.sample_mean
t_dist_value.sample_standard_deviation.value = value.sample_standard_deviation
t_dist_value.sample_degrees_of_freedom.value = value.sample_degrees_of_freedom
t_dist_value.unsampled_value.value = value.unsampled_value
metric_value.confidence_interval.t_distribution_value.CopyFrom(t_dist_value)
elif isinstance(value, six.binary_type):
metric_value.bytes_value = value
elif isinstance(value, six.text_type):
metric_value.bytes_value = value.encode('utf8')
elif isinstance(value, np.ndarray):
metric_value.array_value.CopyFrom(_convert_to_array_value(value))
else:
try:
metric_value.double_value.value = float(value)
except (TypeError, ValueError) as e:
metric_value.unknown_type.value = str(value)
metric_value.unknown_type.error = e.message
if isinstance(key, metric_types.MetricKey):
key_and_value = result.metric_keys_and_values.add()
key_and_value.key.CopyFrom(key.to_proto())
key_and_value.value.CopyFrom(metric_value)
else:
result.metrics[key].CopyFrom(metric_value)
return result | def convert_slice_metrics_to_proto(metrics: Tuple[(slicer.SliceKeyType, Dict[(Any, Any)])], add_metrics_callbacks: List[types.AddMetricsCallbackType]) -> metrics_for_slice_pb2.MetricsForSlice:
'Converts the given slice metrics into serialized proto MetricsForSlice.\n\n Args:\n metrics: The slice metrics.\n add_metrics_callbacks: A list of metric callbacks. This should be the same\n list as the one passed to tfma.Evaluate().\n\n Returns:\n The MetricsForSlice proto.\n\n Raises:\n TypeError: If the type of the feature value in slice key cannot be\n recognized.\n '
result = metrics_for_slice_pb2.MetricsForSlice()
(slice_key, slice_metrics) = metrics
result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
slice_metrics = slice_metrics.copy()
if (metric_keys.ERROR_METRIC in slice_metrics):
logging.warning('Error for slice: %s with error message: %s ', slice_key, slice_metrics[metric_keys.ERROR_METRIC])
result.metrics[metric_keys.ERROR_METRIC].debug_message = slice_metrics[metric_keys.ERROR_METRIC]
return result
if (add_metrics_callbacks and (not any((isinstance(k, metric_types.MetricKey) for k in slice_metrics.keys())))):
for add_metrics_callback in add_metrics_callbacks:
if hasattr(add_metrics_callback, 'populate_stats_and_pop'):
add_metrics_callback.populate_stats_and_pop(slice_key, slice_metrics, result.metrics)
for key in sorted(slice_metrics.keys()):
value = slice_metrics[key]
metric_value = metrics_for_slice_pb2.MetricValue()
if isinstance(value, metrics_for_slice_pb2.ConfusionMatrixAtThresholds):
metric_value.confusion_matrix_at_thresholds.CopyFrom(value)
elif isinstance(value, metrics_for_slice_pb2.MultiClassConfusionMatrixAtThresholds):
metric_value.multi_class_confusion_matrix_at_thresholds.CopyFrom(value)
elif isinstance(value, types.ValueWithTDistribution):
(_, lower_bound, upper_bound) = math_util.calculate_confidence_interval(value)
metric_value.bounded_value.value.value = value.unsampled_value
metric_value.bounded_value.lower_bound.value = lower_bound
metric_value.bounded_value.upper_bound.value = upper_bound
metric_value.bounded_value.methodology = metrics_for_slice_pb2.BoundedValue.POISSON_BOOTSTRAP
metric_value.confidence_interval.lower_bound.value = lower_bound
metric_value.confidence_interval.upper_bound.value = upper_bound
t_dist_value = metrics_for_slice_pb2.TDistributionValue()
t_dist_value.sample_mean.value = value.sample_mean
t_dist_value.sample_standard_deviation.value = value.sample_standard_deviation
t_dist_value.sample_degrees_of_freedom.value = value.sample_degrees_of_freedom
t_dist_value.unsampled_value.value = value.unsampled_value
metric_value.confidence_interval.t_distribution_value.CopyFrom(t_dist_value)
elif isinstance(value, six.binary_type):
metric_value.bytes_value = value
elif isinstance(value, six.text_type):
metric_value.bytes_value = value.encode('utf8')
elif isinstance(value, np.ndarray):
metric_value.array_value.CopyFrom(_convert_to_array_value(value))
else:
try:
metric_value.double_value.value = float(value)
except (TypeError, ValueError) as e:
metric_value.unknown_type.value = str(value)
metric_value.unknown_type.error = e.message
if isinstance(key, metric_types.MetricKey):
key_and_value = result.metric_keys_and_values.add()
key_and_value.key.CopyFrom(key.to_proto())
key_and_value.value.CopyFrom(metric_value)
else:
result.metrics[key].CopyFrom(metric_value)
return result<|docstring|>Converts the given slice metrics into serialized proto MetricsForSlice.
Args:
metrics: The slice metrics.
add_metrics_callbacks: A list of metric callbacks. This should be the same
list as the one passed to tfma.Evaluate().
Returns:
The MetricsForSlice proto.
Raises:
TypeError: If the type of the feature value in slice key cannot be
recognized.<|endoftext|> |
365bfc4e951cf7afd6ee40bf096cf25259989dcf6fb4c45148e8bc982a277bf5 | def convert_slice_plots_to_proto(plots: Tuple[(slicer.SliceKeyType, Dict[(Any, Any)])], add_metrics_callbacks: List[types.AddMetricsCallbackType]) -> metrics_for_slice_pb2.PlotsForSlice:
'Converts the given slice plots into PlotsForSlice proto.\n\n Args:\n plots: The slice plots.\n add_metrics_callbacks: A list of metric callbacks. This should be the same\n list as the one passed to tfma.Evaluate().\n\n Returns:\n The PlotsForSlice proto.\n '
result = metrics_for_slice_pb2.PlotsForSlice()
(slice_key, slice_plots) = plots
result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
slice_plots = slice_plots.copy()
if (metric_keys.ERROR_METRIC in slice_plots):
logging.warning('Error for slice: %s with error message: %s ', slice_key, slice_plots[metric_keys.ERROR_METRIC])
error_metric = slice_plots.pop(metric_keys.ERROR_METRIC)
result.plots[metric_keys.ERROR_METRIC].debug_message = error_metric
return result
if (add_metrics_callbacks and (not any((isinstance(k, metric_types.MetricKey) for k in slice_plots.keys())))):
for add_metrics_callback in add_metrics_callbacks:
if hasattr(add_metrics_callback, 'populate_plots_and_pop'):
add_metrics_callback.populate_plots_and_pop(slice_plots, result.plots)
plots_by_key = {}
for key in sorted(slice_plots.keys()):
value = slice_plots[key]
if isinstance(key, metric_types.MetricKey):
parent_key = key._replace(name=None)
else:
continue
if (parent_key not in plots_by_key):
key_and_value = result.plot_keys_and_values.add()
key_and_value.key.CopyFrom(parent_key.to_proto())
plots_by_key[parent_key] = key_and_value.value
if isinstance(value, metrics_for_slice_pb2.CalibrationHistogramBuckets):
plots_by_key[parent_key].calibration_histogram_buckets.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(value, metrics_for_slice_pb2.ConfusionMatrixAtThresholds):
plots_by_key[parent_key].confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(value, metrics_for_slice_pb2.MultiClassConfusionMatrixAtThresholds):
plots_by_key[parent_key].multi_class_confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(value, metrics_for_slice_pb2.MultiLabelConfusionMatrixAtThresholds):
plots_by_key[parent_key].multi_label_confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
if slice_plots:
if (add_metrics_callbacks is None):
add_metrics_callbacks = []
raise NotImplementedError(('some plots were not converted or popped. keys: %s. add_metrics_callbacks were: %s' % (slice_plots.keys(), [x.name for x in add_metrics_callbacks])))
return result | Converts the given slice plots into PlotsForSlice proto.
Args:
plots: The slice plots.
add_metrics_callbacks: A list of metric callbacks. This should be the same
list as the one passed to tfma.Evaluate().
Returns:
The PlotsForSlice proto. | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py | convert_slice_plots_to_proto | TheWayOfTheRob/model-analysis | 1 | python | def convert_slice_plots_to_proto(plots: Tuple[(slicer.SliceKeyType, Dict[(Any, Any)])], add_metrics_callbacks: List[types.AddMetricsCallbackType]) -> metrics_for_slice_pb2.PlotsForSlice:
'Converts the given slice plots into PlotsForSlice proto.\n\n Args:\n plots: The slice plots.\n add_metrics_callbacks: A list of metric callbacks. This should be the same\n list as the one passed to tfma.Evaluate().\n\n Returns:\n The PlotsForSlice proto.\n '
result = metrics_for_slice_pb2.PlotsForSlice()
(slice_key, slice_plots) = plots
result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
slice_plots = slice_plots.copy()
if (metric_keys.ERROR_METRIC in slice_plots):
logging.warning('Error for slice: %s with error message: %s ', slice_key, slice_plots[metric_keys.ERROR_METRIC])
error_metric = slice_plots.pop(metric_keys.ERROR_METRIC)
result.plots[metric_keys.ERROR_METRIC].debug_message = error_metric
return result
if (add_metrics_callbacks and (not any((isinstance(k, metric_types.MetricKey) for k in slice_plots.keys())))):
for add_metrics_callback in add_metrics_callbacks:
if hasattr(add_metrics_callback, 'populate_plots_and_pop'):
add_metrics_callback.populate_plots_and_pop(slice_plots, result.plots)
plots_by_key = {}
for key in sorted(slice_plots.keys()):
value = slice_plots[key]
if isinstance(key, metric_types.MetricKey):
parent_key = key._replace(name=None)
else:
continue
if (parent_key not in plots_by_key):
key_and_value = result.plot_keys_and_values.add()
key_and_value.key.CopyFrom(parent_key.to_proto())
plots_by_key[parent_key] = key_and_value.value
if isinstance(value, metrics_for_slice_pb2.CalibrationHistogramBuckets):
plots_by_key[parent_key].calibration_histogram_buckets.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(value, metrics_for_slice_pb2.ConfusionMatrixAtThresholds):
plots_by_key[parent_key].confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(value, metrics_for_slice_pb2.MultiClassConfusionMatrixAtThresholds):
plots_by_key[parent_key].multi_class_confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(value, metrics_for_slice_pb2.MultiLabelConfusionMatrixAtThresholds):
plots_by_key[parent_key].multi_label_confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
if slice_plots:
if (add_metrics_callbacks is None):
add_metrics_callbacks = []
raise NotImplementedError(('some plots were not converted or popped. keys: %s. add_metrics_callbacks were: %s' % (slice_plots.keys(), [x.name for x in add_metrics_callbacks])))
return result | def convert_slice_plots_to_proto(plots: Tuple[(slicer.SliceKeyType, Dict[(Any, Any)])], add_metrics_callbacks: List[types.AddMetricsCallbackType]) -> metrics_for_slice_pb2.PlotsForSlice:
'Converts the given slice plots into PlotsForSlice proto.\n\n Args:\n plots: The slice plots.\n add_metrics_callbacks: A list of metric callbacks. This should be the same\n list as the one passed to tfma.Evaluate().\n\n Returns:\n The PlotsForSlice proto.\n '
result = metrics_for_slice_pb2.PlotsForSlice()
(slice_key, slice_plots) = plots
result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
slice_plots = slice_plots.copy()
if (metric_keys.ERROR_METRIC in slice_plots):
logging.warning('Error for slice: %s with error message: %s ', slice_key, slice_plots[metric_keys.ERROR_METRIC])
error_metric = slice_plots.pop(metric_keys.ERROR_METRIC)
result.plots[metric_keys.ERROR_METRIC].debug_message = error_metric
return result
if (add_metrics_callbacks and (not any((isinstance(k, metric_types.MetricKey) for k in slice_plots.keys())))):
for add_metrics_callback in add_metrics_callbacks:
if hasattr(add_metrics_callback, 'populate_plots_and_pop'):
add_metrics_callback.populate_plots_and_pop(slice_plots, result.plots)
plots_by_key = {}
for key in sorted(slice_plots.keys()):
value = slice_plots[key]
if isinstance(key, metric_types.MetricKey):
parent_key = key._replace(name=None)
else:
continue
if (parent_key not in plots_by_key):
key_and_value = result.plot_keys_and_values.add()
key_and_value.key.CopyFrom(parent_key.to_proto())
plots_by_key[parent_key] = key_and_value.value
if isinstance(value, metrics_for_slice_pb2.CalibrationHistogramBuckets):
plots_by_key[parent_key].calibration_histogram_buckets.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(value, metrics_for_slice_pb2.ConfusionMatrixAtThresholds):
plots_by_key[parent_key].confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(value, metrics_for_slice_pb2.MultiClassConfusionMatrixAtThresholds):
plots_by_key[parent_key].multi_class_confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(value, metrics_for_slice_pb2.MultiLabelConfusionMatrixAtThresholds):
plots_by_key[parent_key].multi_label_confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
if slice_plots:
if (add_metrics_callbacks is None):
add_metrics_callbacks = []
raise NotImplementedError(('some plots were not converted or popped. keys: %s. add_metrics_callbacks were: %s' % (slice_plots.keys(), [x.name for x in add_metrics_callbacks])))
return result<|docstring|>Converts the given slice plots into PlotsForSlice proto.
Args:
plots: The slice plots.
add_metrics_callbacks: A list of metric callbacks. This should be the same
list as the one passed to tfma.Evaluate().
Returns:
The PlotsForSlice proto.<|endoftext|> |
5fac467cb386a708e3cc6370e04cbcc0b6750fe3fb7f5ab4b6f61c0fed82cae9 | def MetricsPlotsAndValidationsWriter(output_paths: Dict[(Text, Text)], eval_config: config.EvalConfig, add_metrics_callbacks: Optional[List[types.AddMetricsCallbackType]]=None, metrics_key: Text=constants.METRICS_KEY, plots_key: Text=constants.PLOTS_KEY, validations_key: Text=constants.VALIDATIONS_KEY, output_file_format: Text='') -> writer.Writer:
"Returns metrics and plots writer.\n\n Note, sharding will be enabled by default if a output_file_format is provided.\n The files will be named <output_path>-SSSSS-of-NNNNN.<output_file_format>\n where SSSSS is the shard number and NNNNN is the number of shards.\n\n Args:\n output_paths: Output paths keyed by output key (e.g. 'metrics', 'plots',\n 'validation').\n eval_config: Eval config.\n add_metrics_callbacks: Optional list of metric callbacks (if used).\n metrics_key: Name to use for metrics key in Evaluation output.\n plots_key: Name to use for plots key in Evaluation output.\n validations_key: Name to use for validations key in Evaluation output.\n output_file_format: File format to use when saving files. Currently\n 'tfrecord' and 'parquet' are supported. If using parquet, the output\n metrics and plots files will contain two columns: 'slice_key' and\n 'serialized_value'. The 'slice_key' column will be a structured column\n matching the metrics_for_slice_pb2.SliceKey proto. the 'serialized_value'\n column will contain a serialized MetricsForSlice or PlotsForSlice\n proto. The validation result file will contain a single column\n 'serialized_value' which will contain a single serialized ValidationResult\n proto.\n "
return writer.Writer(stage_name='WriteMetricsAndPlots', ptransform=_WriteMetricsPlotsAndValidations(output_paths=output_paths, eval_config=eval_config, add_metrics_callbacks=(add_metrics_callbacks or []), metrics_key=metrics_key, plots_key=plots_key, validations_key=validations_key, output_file_format=output_file_format)) | Returns metrics and plots writer.
Note, sharding will be enabled by default if a output_file_format is provided.
The files will be named <output_path>-SSSSS-of-NNNNN.<output_file_format>
where SSSSS is the shard number and NNNNN is the number of shards.
Args:
output_paths: Output paths keyed by output key (e.g. 'metrics', 'plots',
'validation').
eval_config: Eval config.
add_metrics_callbacks: Optional list of metric callbacks (if used).
metrics_key: Name to use for metrics key in Evaluation output.
plots_key: Name to use for plots key in Evaluation output.
validations_key: Name to use for validations key in Evaluation output.
output_file_format: File format to use when saving files. Currently
'tfrecord' and 'parquet' are supported. If using parquet, the output
metrics and plots files will contain two columns: 'slice_key' and
'serialized_value'. The 'slice_key' column will be a structured column
matching the metrics_for_slice_pb2.SliceKey proto. the 'serialized_value'
column will contain a serialized MetricsForSlice or PlotsForSlice
proto. The validation result file will contain a single column
'serialized_value' which will contain a single serialized ValidationResult
proto. | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py | MetricsPlotsAndValidationsWriter | TheWayOfTheRob/model-analysis | 1 | python | def MetricsPlotsAndValidationsWriter(output_paths: Dict[(Text, Text)], eval_config: config.EvalConfig, add_metrics_callbacks: Optional[List[types.AddMetricsCallbackType]]=None, metrics_key: Text=constants.METRICS_KEY, plots_key: Text=constants.PLOTS_KEY, validations_key: Text=constants.VALIDATIONS_KEY, output_file_format: Text=) -> writer.Writer:
"Returns metrics and plots writer.\n\n Note, sharding will be enabled by default if a output_file_format is provided.\n The files will be named <output_path>-SSSSS-of-NNNNN.<output_file_format>\n where SSSSS is the shard number and NNNNN is the number of shards.\n\n Args:\n output_paths: Output paths keyed by output key (e.g. 'metrics', 'plots',\n 'validation').\n eval_config: Eval config.\n add_metrics_callbacks: Optional list of metric callbacks (if used).\n metrics_key: Name to use for metrics key in Evaluation output.\n plots_key: Name to use for plots key in Evaluation output.\n validations_key: Name to use for validations key in Evaluation output.\n output_file_format: File format to use when saving files. Currently\n 'tfrecord' and 'parquet' are supported. If using parquet, the output\n metrics and plots files will contain two columns: 'slice_key' and\n 'serialized_value'. The 'slice_key' column will be a structured column\n matching the metrics_for_slice_pb2.SliceKey proto. the 'serialized_value'\n column will contain a serialized MetricsForSlice or PlotsForSlice\n proto. The validation result file will contain a single column\n 'serialized_value' which will contain a single serialized ValidationResult\n proto.\n "
return writer.Writer(stage_name='WriteMetricsAndPlots', ptransform=_WriteMetricsPlotsAndValidations(output_paths=output_paths, eval_config=eval_config, add_metrics_callbacks=(add_metrics_callbacks or []), metrics_key=metrics_key, plots_key=plots_key, validations_key=validations_key, output_file_format=output_file_format)) | def MetricsPlotsAndValidationsWriter(output_paths: Dict[(Text, Text)], eval_config: config.EvalConfig, add_metrics_callbacks: Optional[List[types.AddMetricsCallbackType]]=None, metrics_key: Text=constants.METRICS_KEY, plots_key: Text=constants.PLOTS_KEY, validations_key: Text=constants.VALIDATIONS_KEY, output_file_format: Text=) -> writer.Writer:
"Returns metrics and plots writer.\n\n Note, sharding will be enabled by default if a output_file_format is provided.\n The files will be named <output_path>-SSSSS-of-NNNNN.<output_file_format>\n where SSSSS is the shard number and NNNNN is the number of shards.\n\n Args:\n output_paths: Output paths keyed by output key (e.g. 'metrics', 'plots',\n 'validation').\n eval_config: Eval config.\n add_metrics_callbacks: Optional list of metric callbacks (if used).\n metrics_key: Name to use for metrics key in Evaluation output.\n plots_key: Name to use for plots key in Evaluation output.\n validations_key: Name to use for validations key in Evaluation output.\n output_file_format: File format to use when saving files. Currently\n 'tfrecord' and 'parquet' are supported. If using parquet, the output\n metrics and plots files will contain two columns: 'slice_key' and\n 'serialized_value'. The 'slice_key' column will be a structured column\n matching the metrics_for_slice_pb2.SliceKey proto. the 'serialized_value'\n column will contain a serialized MetricsForSlice or PlotsForSlice\n proto. The validation result file will contain a single column\n 'serialized_value' which will contain a single serialized ValidationResult\n proto.\n "
return writer.Writer(stage_name='WriteMetricsAndPlots', ptransform=_WriteMetricsPlotsAndValidations(output_paths=output_paths, eval_config=eval_config, add_metrics_callbacks=(add_metrics_callbacks or []), metrics_key=metrics_key, plots_key=plots_key, validations_key=validations_key, output_file_format=output_file_format))<|docstring|>Returns metrics and plots writer.
Note, sharding will be enabled by default if a output_file_format is provided.
The files will be named <output_path>-SSSSS-of-NNNNN.<output_file_format>
where SSSSS is the shard number and NNNNN is the number of shards.
Args:
output_paths: Output paths keyed by output key (e.g. 'metrics', 'plots',
'validation').
eval_config: Eval config.
add_metrics_callbacks: Optional list of metric callbacks (if used).
metrics_key: Name to use for metrics key in Evaluation output.
plots_key: Name to use for plots key in Evaluation output.
validations_key: Name to use for validations key in Evaluation output.
output_file_format: File format to use when saving files. Currently
'tfrecord' and 'parquet' are supported. If using parquet, the output
metrics and plots files will contain two columns: 'slice_key' and
'serialized_value'. The 'slice_key' column will be a structured column
matching the metrics_for_slice_pb2.SliceKey proto. the 'serialized_value'
column will contain a serialized MetricsForSlice or PlotsForSlice
proto. The validation result file will contain a single column
'serialized_value' which will contain a single serialized ValidationResult
proto.<|endoftext|> |
7e665d5ac55dc2311b35a01806271ff7eb7c3eb7d29d42ab1244eb31010a72aa | @beam.ptransform_fn
@beam.typehints.with_output_types(beam.pvalue.PDone)
def _WriteMetricsPlotsAndValidations(evaluation: evaluator.Evaluation, output_paths: Dict[(Text, Text)], eval_config: config.EvalConfig, add_metrics_callbacks: List[types.AddMetricsCallbackType], metrics_key: Text, plots_key: Text, validations_key: Text, output_file_format: Text) -> beam.pvalue.PDone:
'PTransform to write metrics and plots.'
if (output_file_format and (output_file_format not in _SUPPORTED_FORMATS)):
raise ValueError('only "{}" formats are currently supported but got output_file_format={}'.format(_SUPPORTED_FORMATS, output_file_format))
def convert_slice_key_to_parquet_dict(slice_key: metrics_for_slice_pb2.SliceKey) -> _SliceKeyDictPythonType:
single_slice_key_dicts = []
for single_slice_key in slice_key.single_slice_keys:
kind = single_slice_key.WhichOneof('kind')
if (not kind):
continue
single_slice_key_dicts.append({kind: getattr(single_slice_key, kind)})
return {_SINGLE_SLICE_KEYS_PARQUET_FIELD_NAME: single_slice_key_dicts}
def convert_to_parquet_columns(value: Union[(metrics_for_slice_pb2.MetricsForSlice, metrics_for_slice_pb2.PlotsForSlice)]) -> Dict[(Text, Union[(_SliceKeyDictPythonType, bytes)])]:
return {_SLICE_KEY_PARQUET_COLUMN_NAME: convert_slice_key_to_parquet_dict(value.slice_key), _SERIALIZED_VALUE_PARQUET_COLUMN_NAME: value.SerializeToString()}
if ((metrics_key in evaluation) and (constants.METRICS_KEY in output_paths)):
metrics = (evaluation[metrics_key] | ('ConvertSliceMetricsToProto' >> beam.Map(convert_slice_metrics_to_proto, add_metrics_callbacks=add_metrics_callbacks)))
file_path_prefix = output_paths[constants.METRICS_KEY]
if (output_file_format == _PARQUET_FORMAT):
_ = ((metrics | ('ConvertToParquetColumns' >> beam.Map(convert_to_parquet_columns))) | ('WriteMetricsToParquet' >> beam.io.WriteToParquet(file_path_prefix=file_path_prefix, schema=_SLICED_PARQUET_SCHEMA, file_name_suffix=('.' + output_file_format))))
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
_ = (metrics | ('WriteMetrics' >> beam.io.WriteToTFRecord(file_path_prefix=file_path_prefix, shard_name_template=(None if output_file_format else ''), file_name_suffix=(('.' + output_file_format) if output_file_format else ''), coder=beam.coders.ProtoCoder(metrics_for_slice_pb2.MetricsForSlice))))
if ((plots_key in evaluation) and (constants.PLOTS_KEY in output_paths)):
plots = (evaluation[plots_key] | ('ConvertSlicePlotsToProto' >> beam.Map(convert_slice_plots_to_proto, add_metrics_callbacks=add_metrics_callbacks)))
file_path_prefix = output_paths[constants.PLOTS_KEY]
if (output_file_format == _PARQUET_FORMAT):
_ = ((plots | ('ConvertPlotsToParquetColumns' >> beam.Map(convert_to_parquet_columns))) | ('WritePlotsToParquet' >> beam.io.WriteToParquet(file_path_prefix=file_path_prefix, schema=_SLICED_PARQUET_SCHEMA, file_name_suffix=('.' + output_file_format))))
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
_ = (plots | ('WritePlotsToTFRecord' >> beam.io.WriteToTFRecord(file_path_prefix=file_path_prefix, shard_name_template=(None if output_file_format else ''), file_name_suffix=(('.' + output_file_format) if output_file_format else ''), coder=beam.coders.ProtoCoder(metrics_for_slice_pb2.PlotsForSlice))))
if ((validations_key in evaluation) and (constants.VALIDATIONS_KEY in output_paths)):
validations = (evaluation[validations_key] | ('MergeValidationResults' >> beam.CombineGlobally(_CombineValidations(eval_config))))
file_path_prefix = output_paths[constants.VALIDATIONS_KEY]
shard_name_template = ''
if (output_file_format == _PARQUET_FORMAT):
_ = ((validations | ('ConvertValidationsToParquetColumns' >> beam.Map((lambda v: {_SERIALIZED_VALUE_PARQUET_COLUMN_NAME: v.SerializeToString()})))) | ('WriteValidationsToParquet' >> beam.io.WriteToParquet(file_path_prefix=file_path_prefix, shard_name_template=shard_name_template, schema=_UNSLICED_PARQUET_SCHEMA, file_name_suffix=('.' + output_file_format))))
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
_ = (validations | ('WriteValidationsToTFRecord' >> beam.io.WriteToTFRecord(file_path_prefix=file_path_prefix, shard_name_template=shard_name_template, file_name_suffix=(('.' + output_file_format) if output_file_format else ''), coder=beam.coders.ProtoCoder(validation_result_pb2.ValidationResult))))
return beam.pvalue.PDone(list(evaluation.values())[0].pipeline) | PTransform to write metrics and plots. | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py | _WriteMetricsPlotsAndValidations | TheWayOfTheRob/model-analysis | 1 | python | @beam.ptransform_fn
@beam.typehints.with_output_types(beam.pvalue.PDone)
def _WriteMetricsPlotsAndValidations(evaluation: evaluator.Evaluation, output_paths: Dict[(Text, Text)], eval_config: config.EvalConfig, add_metrics_callbacks: List[types.AddMetricsCallbackType], metrics_key: Text, plots_key: Text, validations_key: Text, output_file_format: Text) -> beam.pvalue.PDone:
if (output_file_format and (output_file_format not in _SUPPORTED_FORMATS)):
raise ValueError('only "{}" formats are currently supported but got output_file_format={}'.format(_SUPPORTED_FORMATS, output_file_format))
def convert_slice_key_to_parquet_dict(slice_key: metrics_for_slice_pb2.SliceKey) -> _SliceKeyDictPythonType:
single_slice_key_dicts = []
for single_slice_key in slice_key.single_slice_keys:
kind = single_slice_key.WhichOneof('kind')
if (not kind):
continue
single_slice_key_dicts.append({kind: getattr(single_slice_key, kind)})
return {_SINGLE_SLICE_KEYS_PARQUET_FIELD_NAME: single_slice_key_dicts}
def convert_to_parquet_columns(value: Union[(metrics_for_slice_pb2.MetricsForSlice, metrics_for_slice_pb2.PlotsForSlice)]) -> Dict[(Text, Union[(_SliceKeyDictPythonType, bytes)])]:
return {_SLICE_KEY_PARQUET_COLUMN_NAME: convert_slice_key_to_parquet_dict(value.slice_key), _SERIALIZED_VALUE_PARQUET_COLUMN_NAME: value.SerializeToString()}
if ((metrics_key in evaluation) and (constants.METRICS_KEY in output_paths)):
metrics = (evaluation[metrics_key] | ('ConvertSliceMetricsToProto' >> beam.Map(convert_slice_metrics_to_proto, add_metrics_callbacks=add_metrics_callbacks)))
file_path_prefix = output_paths[constants.METRICS_KEY]
if (output_file_format == _PARQUET_FORMAT):
_ = ((metrics | ('ConvertToParquetColumns' >> beam.Map(convert_to_parquet_columns))) | ('WriteMetricsToParquet' >> beam.io.WriteToParquet(file_path_prefix=file_path_prefix, schema=_SLICED_PARQUET_SCHEMA, file_name_suffix=('.' + output_file_format))))
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
_ = (metrics | ('WriteMetrics' >> beam.io.WriteToTFRecord(file_path_prefix=file_path_prefix, shard_name_template=(None if output_file_format else ), file_name_suffix=(('.' + output_file_format) if output_file_format else ), coder=beam.coders.ProtoCoder(metrics_for_slice_pb2.MetricsForSlice))))
if ((plots_key in evaluation) and (constants.PLOTS_KEY in output_paths)):
plots = (evaluation[plots_key] | ('ConvertSlicePlotsToProto' >> beam.Map(convert_slice_plots_to_proto, add_metrics_callbacks=add_metrics_callbacks)))
file_path_prefix = output_paths[constants.PLOTS_KEY]
if (output_file_format == _PARQUET_FORMAT):
_ = ((plots | ('ConvertPlotsToParquetColumns' >> beam.Map(convert_to_parquet_columns))) | ('WritePlotsToParquet' >> beam.io.WriteToParquet(file_path_prefix=file_path_prefix, schema=_SLICED_PARQUET_SCHEMA, file_name_suffix=('.' + output_file_format))))
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
_ = (plots | ('WritePlotsToTFRecord' >> beam.io.WriteToTFRecord(file_path_prefix=file_path_prefix, shard_name_template=(None if output_file_format else ), file_name_suffix=(('.' + output_file_format) if output_file_format else ), coder=beam.coders.ProtoCoder(metrics_for_slice_pb2.PlotsForSlice))))
if ((validations_key in evaluation) and (constants.VALIDATIONS_KEY in output_paths)):
validations = (evaluation[validations_key] | ('MergeValidationResults' >> beam.CombineGlobally(_CombineValidations(eval_config))))
file_path_prefix = output_paths[constants.VALIDATIONS_KEY]
shard_name_template =
if (output_file_format == _PARQUET_FORMAT):
_ = ((validations | ('ConvertValidationsToParquetColumns' >> beam.Map((lambda v: {_SERIALIZED_VALUE_PARQUET_COLUMN_NAME: v.SerializeToString()})))) | ('WriteValidationsToParquet' >> beam.io.WriteToParquet(file_path_prefix=file_path_prefix, shard_name_template=shard_name_template, schema=_UNSLICED_PARQUET_SCHEMA, file_name_suffix=('.' + output_file_format))))
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
_ = (validations | ('WriteValidationsToTFRecord' >> beam.io.WriteToTFRecord(file_path_prefix=file_path_prefix, shard_name_template=shard_name_template, file_name_suffix=(('.' + output_file_format) if output_file_format else ), coder=beam.coders.ProtoCoder(validation_result_pb2.ValidationResult))))
return beam.pvalue.PDone(list(evaluation.values())[0].pipeline) | @beam.ptransform_fn
@beam.typehints.with_output_types(beam.pvalue.PDone)
def _WriteMetricsPlotsAndValidations(evaluation: evaluator.Evaluation, output_paths: Dict[(Text, Text)], eval_config: config.EvalConfig, add_metrics_callbacks: List[types.AddMetricsCallbackType], metrics_key: Text, plots_key: Text, validations_key: Text, output_file_format: Text) -> beam.pvalue.PDone:
if (output_file_format and (output_file_format not in _SUPPORTED_FORMATS)):
raise ValueError('only "{}" formats are currently supported but got output_file_format={}'.format(_SUPPORTED_FORMATS, output_file_format))
def convert_slice_key_to_parquet_dict(slice_key: metrics_for_slice_pb2.SliceKey) -> _SliceKeyDictPythonType:
single_slice_key_dicts = []
for single_slice_key in slice_key.single_slice_keys:
kind = single_slice_key.WhichOneof('kind')
if (not kind):
continue
single_slice_key_dicts.append({kind: getattr(single_slice_key, kind)})
return {_SINGLE_SLICE_KEYS_PARQUET_FIELD_NAME: single_slice_key_dicts}
def convert_to_parquet_columns(value: Union[(metrics_for_slice_pb2.MetricsForSlice, metrics_for_slice_pb2.PlotsForSlice)]) -> Dict[(Text, Union[(_SliceKeyDictPythonType, bytes)])]:
return {_SLICE_KEY_PARQUET_COLUMN_NAME: convert_slice_key_to_parquet_dict(value.slice_key), _SERIALIZED_VALUE_PARQUET_COLUMN_NAME: value.SerializeToString()}
if ((metrics_key in evaluation) and (constants.METRICS_KEY in output_paths)):
metrics = (evaluation[metrics_key] | ('ConvertSliceMetricsToProto' >> beam.Map(convert_slice_metrics_to_proto, add_metrics_callbacks=add_metrics_callbacks)))
file_path_prefix = output_paths[constants.METRICS_KEY]
if (output_file_format == _PARQUET_FORMAT):
_ = ((metrics | ('ConvertToParquetColumns' >> beam.Map(convert_to_parquet_columns))) | ('WriteMetricsToParquet' >> beam.io.WriteToParquet(file_path_prefix=file_path_prefix, schema=_SLICED_PARQUET_SCHEMA, file_name_suffix=('.' + output_file_format))))
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
_ = (metrics | ('WriteMetrics' >> beam.io.WriteToTFRecord(file_path_prefix=file_path_prefix, shard_name_template=(None if output_file_format else ), file_name_suffix=(('.' + output_file_format) if output_file_format else ), coder=beam.coders.ProtoCoder(metrics_for_slice_pb2.MetricsForSlice))))
if ((plots_key in evaluation) and (constants.PLOTS_KEY in output_paths)):
plots = (evaluation[plots_key] | ('ConvertSlicePlotsToProto' >> beam.Map(convert_slice_plots_to_proto, add_metrics_callbacks=add_metrics_callbacks)))
file_path_prefix = output_paths[constants.PLOTS_KEY]
if (output_file_format == _PARQUET_FORMAT):
_ = ((plots | ('ConvertPlotsToParquetColumns' >> beam.Map(convert_to_parquet_columns))) | ('WritePlotsToParquet' >> beam.io.WriteToParquet(file_path_prefix=file_path_prefix, schema=_SLICED_PARQUET_SCHEMA, file_name_suffix=('.' + output_file_format))))
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
_ = (plots | ('WritePlotsToTFRecord' >> beam.io.WriteToTFRecord(file_path_prefix=file_path_prefix, shard_name_template=(None if output_file_format else ), file_name_suffix=(('.' + output_file_format) if output_file_format else ), coder=beam.coders.ProtoCoder(metrics_for_slice_pb2.PlotsForSlice))))
if ((validations_key in evaluation) and (constants.VALIDATIONS_KEY in output_paths)):
validations = (evaluation[validations_key] | ('MergeValidationResults' >> beam.CombineGlobally(_CombineValidations(eval_config))))
file_path_prefix = output_paths[constants.VALIDATIONS_KEY]
shard_name_template =
if (output_file_format == _PARQUET_FORMAT):
_ = ((validations | ('ConvertValidationsToParquetColumns' >> beam.Map((lambda v: {_SERIALIZED_VALUE_PARQUET_COLUMN_NAME: v.SerializeToString()})))) | ('WriteValidationsToParquet' >> beam.io.WriteToParquet(file_path_prefix=file_path_prefix, shard_name_template=shard_name_template, schema=_UNSLICED_PARQUET_SCHEMA, file_name_suffix=('.' + output_file_format))))
elif ((not output_file_format) or (output_file_format == _TFRECORD_FORMAT)):
_ = (validations | ('WriteValidationsToTFRecord' >> beam.io.WriteToTFRecord(file_path_prefix=file_path_prefix, shard_name_template=shard_name_template, file_name_suffix=(('.' + output_file_format) if output_file_format else ), coder=beam.coders.ProtoCoder(validation_result_pb2.ValidationResult))))
return beam.pvalue.PDone(list(evaluation.values())[0].pipeline)<|docstring|>PTransform to write metrics and plots.<|endoftext|> |
1ab91fb50e7af7b7ed969255d12d7fbc871ac394ad7fb191becb07e7bd83df27 | def parse_argument():
'\n Parse input arguments.\n\n Returns:\n - parsed arguments\n '
parser = argparse.ArgumentParser(description='Populate FoodON.')
parser.add_argument('--config_file', default=DEFAULT_CONFIG_FILE, help='Path to the .ini configuration file.')
return parser.parse_args() | Parse input arguments.
Returns:
- parsed arguments | populate_foodon.py | parse_argument | g-simmons/LOVE | 7 | python | def parse_argument():
'\n Parse input arguments.\n\n Returns:\n - parsed arguments\n '
parser = argparse.ArgumentParser(description='Populate FoodON.')
parser.add_argument('--config_file', default=DEFAULT_CONFIG_FILE, help='Path to the .ini configuration file.')
return parser.parse_args() | def parse_argument():
'\n Parse input arguments.\n\n Returns:\n - parsed arguments\n '
parser = argparse.ArgumentParser(description='Populate FoodON.')
parser.add_argument('--config_file', default=DEFAULT_CONFIG_FILE, help='Path to the .ini configuration file.')
return parser.parse_args()<|docstring|>Parse input arguments.
Returns:
- parsed arguments<|endoftext|> |
eb7065fb6d3cc412d171d6a673844c9761cf0fc3f142e9c3fe3077f2a91ed6ea | def main():
'\n Main function.\n '
args = parse_argument()
configparser = ConfigParser(args.config_file)
set_logging(configparser.getstr('logfile'))
parse_foodon = ParseFoodOn(configparser.getstr('foodon_parse_config'))
classes_dict = parse_foodon.get_candidate_classes()
(classes_dict_skeleton, candidate_entities) = parse_foodon.get_seeded_skeleton(classes_dict)
scoring_manager = ScoringManager(classes_dict_skeleton, candidate_entities, configparser.getstr('scoring_config'))
scoring_manager.run_iteration() | Main function. | populate_foodon.py | main | g-simmons/LOVE | 7 | python | def main():
'\n \n '
args = parse_argument()
configparser = ConfigParser(args.config_file)
set_logging(configparser.getstr('logfile'))
parse_foodon = ParseFoodOn(configparser.getstr('foodon_parse_config'))
classes_dict = parse_foodon.get_candidate_classes()
(classes_dict_skeleton, candidate_entities) = parse_foodon.get_seeded_skeleton(classes_dict)
scoring_manager = ScoringManager(classes_dict_skeleton, candidate_entities, configparser.getstr('scoring_config'))
scoring_manager.run_iteration() | def main():
'\n \n '
args = parse_argument()
configparser = ConfigParser(args.config_file)
set_logging(configparser.getstr('logfile'))
parse_foodon = ParseFoodOn(configparser.getstr('foodon_parse_config'))
classes_dict = parse_foodon.get_candidate_classes()
(classes_dict_skeleton, candidate_entities) = parse_foodon.get_seeded_skeleton(classes_dict)
scoring_manager = ScoringManager(classes_dict_skeleton, candidate_entities, configparser.getstr('scoring_config'))
scoring_manager.run_iteration()<|docstring|>Main function.<|endoftext|> |
1a7f60b75b51a489cbc72149a63dc907bd5b65eaab4160e2d1c09b645028cc7a | def _getDigest(self, line, mode):
'\n specify what part in the line to be processed and highlighted\n Args:\n mode: 0, return the full path\n 1, return the name only\n 2, return the directory name\n '
return line[:line.find('\t')] | specify what part in the line to be processed and highlighted
Args:
mode: 0, return the full path
1, return the name only
2, return the directory name | plugged/LeaderF/autoload/leaderf/python/leaderf/tagExpl.py | _getDigest | CHANTXU64/.vim | 1,914 | python | def _getDigest(self, line, mode):
'\n specify what part in the line to be processed and highlighted\n Args:\n mode: 0, return the full path\n 1, return the name only\n 2, return the directory name\n '
return line[:line.find('\t')] | def _getDigest(self, line, mode):
'\n specify what part in the line to be processed and highlighted\n Args:\n mode: 0, return the full path\n 1, return the name only\n 2, return the directory name\n '
return line[:line.find('\t')]<|docstring|>specify what part in the line to be processed and highlighted
Args:
mode: 0, return the full path
1, return the name only
2, return the directory name<|endoftext|> |
5fa069979b6998924382ceeb41fe85a549680010e7b2d35e0c0ad9edb5c228f4 | def _getDigestStartPos(self, line, mode):
'\n return the start position of the digest returned by _getDigest()\n Args:\n mode: 0, return the start postion of full path\n 1, return the start postion of name only\n 2, return the start postion of directory name\n '
return 0 | return the start position of the digest returned by _getDigest()
Args:
mode: 0, return the start postion of full path
1, return the start postion of name only
2, return the start postion of directory name | plugged/LeaderF/autoload/leaderf/python/leaderf/tagExpl.py | _getDigestStartPos | CHANTXU64/.vim | 1,914 | python | def _getDigestStartPos(self, line, mode):
'\n return the start position of the digest returned by _getDigest()\n Args:\n mode: 0, return the start postion of full path\n 1, return the start postion of name only\n 2, return the start postion of directory name\n '
return 0 | def _getDigestStartPos(self, line, mode):
'\n return the start position of the digest returned by _getDigest()\n Args:\n mode: 0, return the start postion of full path\n 1, return the start postion of name only\n 2, return the start postion of directory name\n '
return 0<|docstring|>return the start position of the digest returned by _getDigest()
Args:
mode: 0, return the start postion of full path
1, return the start postion of name only
2, return the start postion of directory name<|endoftext|> |
973bbdf5232434a6e7a570bfcef3e478d55e5b336d74c4c03f288f4c6c2092af | def parse(self, response):
'\n `parse` should always `yield` Meeting items.\n\n Change the `_parse_title`, `_parse_start`, etc methods to fit your scraping\n needs.\n '
self.links = self._parse_all_links(response)
for date in self.links.keys():
if ('Agenda' in self.links[date].keys()):
(yield scrapy.Request(self.links[date]['Agenda'], callback=self._meeting, cb_kwargs=dict(date=date)))
elif ('Minutes' in self.links[date].keys()):
(yield scrapy.Request(self.links[date]['Minutes'], callback=self._meeting, cb_kwargs=dict(date=date))) | `parse` should always `yield` Meeting items.
Change the `_parse_title`, `_parse_start`, etc methods to fit your scraping
needs. | city_scrapers/spiders/il_corrections.py | parse | yijun-li-20/city-scrapers | 255 | python | def parse(self, response):
'\n `parse` should always `yield` Meeting items.\n\n Change the `_parse_title`, `_parse_start`, etc methods to fit your scraping\n needs.\n '
self.links = self._parse_all_links(response)
for date in self.links.keys():
if ('Agenda' in self.links[date].keys()):
(yield scrapy.Request(self.links[date]['Agenda'], callback=self._meeting, cb_kwargs=dict(date=date)))
elif ('Minutes' in self.links[date].keys()):
(yield scrapy.Request(self.links[date]['Minutes'], callback=self._meeting, cb_kwargs=dict(date=date))) | def parse(self, response):
'\n `parse` should always `yield` Meeting items.\n\n Change the `_parse_title`, `_parse_start`, etc methods to fit your scraping\n needs.\n '
self.links = self._parse_all_links(response)
for date in self.links.keys():
if ('Agenda' in self.links[date].keys()):
(yield scrapy.Request(self.links[date]['Agenda'], callback=self._meeting, cb_kwargs=dict(date=date)))
elif ('Minutes' in self.links[date].keys()):
(yield scrapy.Request(self.links[date]['Minutes'], callback=self._meeting, cb_kwargs=dict(date=date)))<|docstring|>`parse` should always `yield` Meeting items.
Change the `_parse_title`, `_parse_start`, etc methods to fit your scraping
needs.<|endoftext|> |
4befdf727207fdc2f86e05839947f87cf25441d872f40b4bd038de50cb46c501 | def _parse_pdf(self, response):
'Parse dates and details from schedule PDF'
lp = LAParams(line_margin=0.1)
out_str = StringIO()
extract_text_to_fp(BytesIO(response.body), out_str, laparams=lp)
pdf_text = out_str.getvalue()
return pdf_text.lower() | Parse dates and details from schedule PDF | city_scrapers/spiders/il_corrections.py | _parse_pdf | yijun-li-20/city-scrapers | 255 | python | def _parse_pdf(self, response):
lp = LAParams(line_margin=0.1)
out_str = StringIO()
extract_text_to_fp(BytesIO(response.body), out_str, laparams=lp)
pdf_text = out_str.getvalue()
return pdf_text.lower() | def _parse_pdf(self, response):
lp = LAParams(line_margin=0.1)
out_str = StringIO()
extract_text_to_fp(BytesIO(response.body), out_str, laparams=lp)
pdf_text = out_str.getvalue()
return pdf_text.lower()<|docstring|>Parse dates and details from schedule PDF<|endoftext|> |
916ba2c26b8f5eeb34edd5edc0a5405665855512fd6eaf6190ffaa4e2aed5ee2 | def _parse_title(self, pdf_text):
'Parse or generate meeting title.'
title = 'Adult Advisory Board Meeting'
if ('subcommittee' in pdf_text):
title = "Adult Advisory Board / Women's Subcommittee Meeting"
return title | Parse or generate meeting title. | city_scrapers/spiders/il_corrections.py | _parse_title | yijun-li-20/city-scrapers | 255 | python | def _parse_title(self, pdf_text):
title = 'Adult Advisory Board Meeting'
if ('subcommittee' in pdf_text):
title = "Adult Advisory Board / Women's Subcommittee Meeting"
return title | def _parse_title(self, pdf_text):
title = 'Adult Advisory Board Meeting'
if ('subcommittee' in pdf_text):
title = "Adult Advisory Board / Women's Subcommittee Meeting"
return title<|docstring|>Parse or generate meeting title.<|endoftext|> |
614f2240ba232c0d602b799ba326c01d1efc299dfb20f2af98444e08f0d8117a | def _parse_times(self, date, pdf_text, start=True):
'Parse start datetime as a naive datetime object.'
times = re.findall('(\\d{1,2}:\\d{2} ?(am|a\\.m\\.|pm|p\\.m\\.))', pdf_text)
if start:
try:
start_time = times[0][0].replace('.', '')
except IndexError:
start_time = '12:00am'
return self._try_time_format(date, start_time)
else:
try:
end_time = times[1][0].replace('.', '')
return self._try_time_format(date, end_time)
except IndexError:
return None | Parse start datetime as a naive datetime object. | city_scrapers/spiders/il_corrections.py | _parse_times | yijun-li-20/city-scrapers | 255 | python | def _parse_times(self, date, pdf_text, start=True):
times = re.findall('(\\d{1,2}:\\d{2} ?(am|a\\.m\\.|pm|p\\.m\\.))', pdf_text)
if start:
try:
start_time = times[0][0].replace('.', )
except IndexError:
start_time = '12:00am'
return self._try_time_format(date, start_time)
else:
try:
end_time = times[1][0].replace('.', )
return self._try_time_format(date, end_time)
except IndexError:
return None | def _parse_times(self, date, pdf_text, start=True):
times = re.findall('(\\d{1,2}:\\d{2} ?(am|a\\.m\\.|pm|p\\.m\\.))', pdf_text)
if start:
try:
start_time = times[0][0].replace('.', )
except IndexError:
start_time = '12:00am'
return self._try_time_format(date, start_time)
else:
try:
end_time = times[1][0].replace('.', )
return self._try_time_format(date, end_time)
except IndexError:
return None<|docstring|>Parse start datetime as a naive datetime object.<|endoftext|> |
af71ff15cc810d3b02f5faea23c98691dc5279814145d407429d8d0345dfc61e | def _try_time_format(self, date, time):
'Try time formatting with and without spacing'
try:
time_object = datetime.strptime(f'{date} {time}', '%B %d, %Y %I:%M%p')
except ValueError:
time_object = datetime.strptime(f'{date} {time}', '%B %d, %Y %I:%M %p')
return time_object | Try time formatting with and without spacing | city_scrapers/spiders/il_corrections.py | _try_time_format | yijun-li-20/city-scrapers | 255 | python | def _try_time_format(self, date, time):
try:
time_object = datetime.strptime(f'{date} {time}', '%B %d, %Y %I:%M%p')
except ValueError:
time_object = datetime.strptime(f'{date} {time}', '%B %d, %Y %I:%M %p')
return time_object | def _try_time_format(self, date, time):
try:
time_object = datetime.strptime(f'{date} {time}', '%B %d, %Y %I:%M%p')
except ValueError:
time_object = datetime.strptime(f'{date} {time}', '%B %d, %Y %I:%M %p')
return time_object<|docstring|>Try time formatting with and without spacing<|endoftext|> |
f392bc81d14d36090a73d57c9482dac3c4aa8262161c5868b5a233e090b5a682 | def _parse_location(self, pdf_text):
'Parse or generate location.'
location_lookup = {'logan correctional center': {'address': '1096 1350th St, Lincoln, IL 62656', 'name': 'Logan Correctional Center'}, 'vandalia correctional center': {'address': 'US-51, Vandalia, IL 62471', 'name': 'Vandalia Correctional Center'}, 'thompson center': {'address': '100 W. Randolph, Suite 4-200, Chicago, IL 60601', 'name': 'James R. Thompson Center'}, 'joliet treatment center': {'address': '2848 McDonough St, Joliet, IL 60431', 'name': 'Joliet Treatment Center'}, 'no known location': {'name': 'TBD', 'address': ''}}
for location in location_lookup.keys():
if (location in pdf_text):
return location_lookup[location]
return location_lookup['no known location'] | Parse or generate location. | city_scrapers/spiders/il_corrections.py | _parse_location | yijun-li-20/city-scrapers | 255 | python | def _parse_location(self, pdf_text):
location_lookup = {'logan correctional center': {'address': '1096 1350th St, Lincoln, IL 62656', 'name': 'Logan Correctional Center'}, 'vandalia correctional center': {'address': 'US-51, Vandalia, IL 62471', 'name': 'Vandalia Correctional Center'}, 'thompson center': {'address': '100 W. Randolph, Suite 4-200, Chicago, IL 60601', 'name': 'James R. Thompson Center'}, 'joliet treatment center': {'address': '2848 McDonough St, Joliet, IL 60431', 'name': 'Joliet Treatment Center'}, 'no known location': {'name': 'TBD', 'address': }}
for location in location_lookup.keys():
if (location in pdf_text):
return location_lookup[location]
return location_lookup['no known location'] | def _parse_location(self, pdf_text):
location_lookup = {'logan correctional center': {'address': '1096 1350th St, Lincoln, IL 62656', 'name': 'Logan Correctional Center'}, 'vandalia correctional center': {'address': 'US-51, Vandalia, IL 62471', 'name': 'Vandalia Correctional Center'}, 'thompson center': {'address': '100 W. Randolph, Suite 4-200, Chicago, IL 60601', 'name': 'James R. Thompson Center'}, 'joliet treatment center': {'address': '2848 McDonough St, Joliet, IL 60431', 'name': 'Joliet Treatment Center'}, 'no known location': {'name': 'TBD', 'address': }}
for location in location_lookup.keys():
if (location in pdf_text):
return location_lookup[location]
return location_lookup['no known location']<|docstring|>Parse or generate location.<|endoftext|> |
020a4f21b1624ed6dcb82bf3cb38d8df2497c310a8797724d019de8b31be4e3f | def _parse_all_links(self, response):
' Gather dates, links '
link_dict = defaultdict(dict)
for link in response.css('a'):
date = link.re_first('((Jan(uary)?|Feb(ruary)?|Mar(ch)?|Apr(il)?|\n May|Jun(e)?|Jul(y)?|Aug(ust)?|Sep(tember)?|Oct(ober)?|Nov(ember)?|\n Dec(ember)?)\\s+\\d{1,2},\\s+\\d{4})|((1[0-2]|0?[1-9])/(3[01]|\n [12][0-9]|0?[1-9])/(?:[0-9]{2})?[0-9]{2})')
if (date is not None):
for item in ['Notice', 'Agenda', 'Minutes']:
if (item in link.attrib['href']):
link_dict[date][item] = response.urljoin(link.attrib['href'])
return link_dict | Gather dates, links | city_scrapers/spiders/il_corrections.py | _parse_all_links | yijun-li-20/city-scrapers | 255 | python | def _parse_all_links(self, response):
' '
link_dict = defaultdict(dict)
for link in response.css('a'):
date = link.re_first('((Jan(uary)?|Feb(ruary)?|Mar(ch)?|Apr(il)?|\n May|Jun(e)?|Jul(y)?|Aug(ust)?|Sep(tember)?|Oct(ober)?|Nov(ember)?|\n Dec(ember)?)\\s+\\d{1,2},\\s+\\d{4})|((1[0-2]|0?[1-9])/(3[01]|\n [12][0-9]|0?[1-9])/(?:[0-9]{2})?[0-9]{2})')
if (date is not None):
for item in ['Notice', 'Agenda', 'Minutes']:
if (item in link.attrib['href']):
link_dict[date][item] = response.urljoin(link.attrib['href'])
return link_dict | def _parse_all_links(self, response):
' '
link_dict = defaultdict(dict)
for link in response.css('a'):
date = link.re_first('((Jan(uary)?|Feb(ruary)?|Mar(ch)?|Apr(il)?|\n May|Jun(e)?|Jul(y)?|Aug(ust)?|Sep(tember)?|Oct(ober)?|Nov(ember)?|\n Dec(ember)?)\\s+\\d{1,2},\\s+\\d{4})|((1[0-2]|0?[1-9])/(3[01]|\n [12][0-9]|0?[1-9])/(?:[0-9]{2})?[0-9]{2})')
if (date is not None):
for item in ['Notice', 'Agenda', 'Minutes']:
if (item in link.attrib['href']):
link_dict[date][item] = response.urljoin(link.attrib['href'])
return link_dict<|docstring|>Gather dates, links<|endoftext|> |
dc0e84aed0806e944ac0a0618d71d5cbefd4ed57ab59523e47198d5126a2ed4a | def _parse_links(self, date):
'Parse or generate links.'
link_list = []
for (key, value) in self.links[date].items():
link_list.append({'title': key, 'href': value})
return link_list | Parse or generate links. | city_scrapers/spiders/il_corrections.py | _parse_links | yijun-li-20/city-scrapers | 255 | python | def _parse_links(self, date):
link_list = []
for (key, value) in self.links[date].items():
link_list.append({'title': key, 'href': value})
return link_list | def _parse_links(self, date):
link_list = []
for (key, value) in self.links[date].items():
link_list.append({'title': key, 'href': value})
return link_list<|docstring|>Parse or generate links.<|endoftext|> |
a6a4a0d62bee26a65a388f7347e2df2239568a4cf18f44381c4685473e4d7f76 | def get_version(path):
'Get the version info from the mpld3 package without importing it'
import ast
with open(path) as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module) if (isinstance(node, ast.Assign) and (node.targets[0].id == '__version__')))
try:
return next(version)
except StopIteration:
raise ValueError('version could not be located') | Get the version info from the mpld3 package without importing it | setup.py | get_version | ManuelZierl/slurmpy | 79 | python | def get_version(path):
import ast
with open(path) as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module) if (isinstance(node, ast.Assign) and (node.targets[0].id == '__version__')))
try:
return next(version)
except StopIteration:
raise ValueError('version could not be located') | def get_version(path):
import ast
with open(path) as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module) if (isinstance(node, ast.Assign) and (node.targets[0].id == '__version__')))
try:
return next(version)
except StopIteration:
raise ValueError('version could not be located')<|docstring|>Get the version info from the mpld3 package without importing it<|endoftext|> |
659bd6c1796158d60048100256e647ee9f967af8fc099ddc56366ef91f2e62e5 | def serialize(self):
'Serialize the struct into protobuf string'
job_config = self.get_proto_job_config()
return job_config.SerializeToString() | Serialize the struct into protobuf string | python/ray/job_config.py | serialize | scrarlet/ray | 3 | python | def serialize(self):
job_config = self.get_proto_job_config()
return job_config.SerializeToString() | def serialize(self):
job_config = self.get_proto_job_config()
return job_config.SerializeToString()<|docstring|>Serialize the struct into protobuf string<|endoftext|> |
bedb693eaee805bcf993ce49086148b29759651f1fa31ce437d013b188a334b5 | def get_proto_job_config(self):
'Return the prototype structure of JobConfig'
if (self._cached_pb is None):
self._cached_pb = ray.gcs_utils.JobConfig()
if (self.ray_namespace is None):
self._cached_pb.ray_namespace = str(uuid.uuid4())
else:
self._cached_pb.ray_namespace = self.ray_namespace
for key in self.worker_env:
self._cached_pb.worker_env[key] = self.worker_env[key]
self._cached_pb.num_java_workers_per_process = self.num_java_workers_per_process
self._cached_pb.jvm_options.extend(self.jvm_options)
self._cached_pb.code_search_path.extend(self.code_search_path)
self._cached_pb.runtime_env.CopyFrom(self._get_proto_runtime())
self._cached_pb.serialized_runtime_env = self.get_serialized_runtime_env()
for (k, v) in self.metadata.items():
self._cached_pb.metadata[k] = v
return self._cached_pb | Return the prototype structure of JobConfig | python/ray/job_config.py | get_proto_job_config | scrarlet/ray | 3 | python | def get_proto_job_config(self):
if (self._cached_pb is None):
self._cached_pb = ray.gcs_utils.JobConfig()
if (self.ray_namespace is None):
self._cached_pb.ray_namespace = str(uuid.uuid4())
else:
self._cached_pb.ray_namespace = self.ray_namespace
for key in self.worker_env:
self._cached_pb.worker_env[key] = self.worker_env[key]
self._cached_pb.num_java_workers_per_process = self.num_java_workers_per_process
self._cached_pb.jvm_options.extend(self.jvm_options)
self._cached_pb.code_search_path.extend(self.code_search_path)
self._cached_pb.runtime_env.CopyFrom(self._get_proto_runtime())
self._cached_pb.serialized_runtime_env = self.get_serialized_runtime_env()
for (k, v) in self.metadata.items():
self._cached_pb.metadata[k] = v
return self._cached_pb | def get_proto_job_config(self):
if (self._cached_pb is None):
self._cached_pb = ray.gcs_utils.JobConfig()
if (self.ray_namespace is None):
self._cached_pb.ray_namespace = str(uuid.uuid4())
else:
self._cached_pb.ray_namespace = self.ray_namespace
for key in self.worker_env:
self._cached_pb.worker_env[key] = self.worker_env[key]
self._cached_pb.num_java_workers_per_process = self.num_java_workers_per_process
self._cached_pb.jvm_options.extend(self.jvm_options)
self._cached_pb.code_search_path.extend(self.code_search_path)
self._cached_pb.runtime_env.CopyFrom(self._get_proto_runtime())
self._cached_pb.serialized_runtime_env = self.get_serialized_runtime_env()
for (k, v) in self.metadata.items():
self._cached_pb.metadata[k] = v
return self._cached_pb<|docstring|>Return the prototype structure of JobConfig<|endoftext|> |
4cca0aca3eae408b5d37d8ffe3b1fa84f0cc4efe4c420a945794725102947627 | def get_runtime_env_uris(self):
'Get the uris of runtime environment'
if self.runtime_env.get('uris'):
return self.runtime_env.get('uris')
return [] | Get the uris of runtime environment | python/ray/job_config.py | get_runtime_env_uris | scrarlet/ray | 3 | python | def get_runtime_env_uris(self):
if self.runtime_env.get('uris'):
return self.runtime_env.get('uris')
return [] | def get_runtime_env_uris(self):
if self.runtime_env.get('uris'):
return self.runtime_env.get('uris')
return []<|docstring|>Get the uris of runtime environment<|endoftext|> |
61f4c2449d6dfbd39a294a811c52d615892cb91b09c2804b4f2c2d8fb07ca301 | def get_serialized_runtime_env(self) -> str:
'Return the JSON-serialized parsed runtime env dict'
return self._parsed_runtime_env.serialize() | Return the JSON-serialized parsed runtime env dict | python/ray/job_config.py | get_serialized_runtime_env | scrarlet/ray | 3 | python | def get_serialized_runtime_env(self) -> str:
return self._parsed_runtime_env.serialize() | def get_serialized_runtime_env(self) -> str:
return self._parsed_runtime_env.serialize()<|docstring|>Return the JSON-serialized parsed runtime env dict<|endoftext|> |
115de9370d9f40f3b0d2d2d33e7c48a2b44d19360c6fd21649b520908d203fc3 | def is_task_executor_process(task: Task, process: psutil.Process):
'\n check the process if task executor or not by command\n :param task:\n :param process:\n :return:\n '
try:
cmdline = process.cmdline()
except Exception as e:
schedule_logger(task.f_job_id).warning(e)
return False
else:
schedule_logger(task.f_job_id).info(cmdline)
if (task.f_worker_id and (task.f_worker_id in cmdline)):
return True
if (len(cmdline) != len(task.f_cmd)):
return False
for (i, v) in enumerate(task.f_cmd):
if (cmdline[i] != str(v)):
return False
return True | check the process if task executor or not by command
:param task:
:param process:
:return: | python/fate_flow/utils/process_utils.py | is_task_executor_process | PromiseChan/FATE-Flow | 22 | python | def is_task_executor_process(task: Task, process: psutil.Process):
'\n check the process if task executor or not by command\n :param task:\n :param process:\n :return:\n '
try:
cmdline = process.cmdline()
except Exception as e:
schedule_logger(task.f_job_id).warning(e)
return False
else:
schedule_logger(task.f_job_id).info(cmdline)
if (task.f_worker_id and (task.f_worker_id in cmdline)):
return True
if (len(cmdline) != len(task.f_cmd)):
return False
for (i, v) in enumerate(task.f_cmd):
if (cmdline[i] != str(v)):
return False
return True | def is_task_executor_process(task: Task, process: psutil.Process):
'\n check the process if task executor or not by command\n :param task:\n :param process:\n :return:\n '
try:
cmdline = process.cmdline()
except Exception as e:
schedule_logger(task.f_job_id).warning(e)
return False
else:
schedule_logger(task.f_job_id).info(cmdline)
if (task.f_worker_id and (task.f_worker_id in cmdline)):
return True
if (len(cmdline) != len(task.f_cmd)):
return False
for (i, v) in enumerate(task.f_cmd):
if (cmdline[i] != str(v)):
return False
return True<|docstring|>check the process if task executor or not by command
:param task:
:param process:
:return:<|endoftext|> |
3d46c4770c2425bd2a90edecde26ad6d752ea335608638896c8a4ba9b6680c4a | def help(self):
'help() gets invoked when we get the ``results`` message\n with no arguments'
parts = []
for choice in Choice.objects.all():
part = ('%s: %d' % (choice.name, choice.votes))
parts.append(part)
msg = '; '.join(parts)
self.respond(msg) | help() gets invoked when we get the ``results`` message
with no arguments | voting/handlers.py | help | dragGH102/rapidsms-example-with-rest-services | 0 | python | def help(self):
'help() gets invoked when we get the ``results`` message\n with no arguments'
parts = []
for choice in Choice.objects.all():
part = ('%s: %d' % (choice.name, choice.votes))
parts.append(part)
msg = '; '.join(parts)
self.respond(msg) | def help(self):
'help() gets invoked when we get the ``results`` message\n with no arguments'
parts = []
for choice in Choice.objects.all():
part = ('%s: %d' % (choice.name, choice.votes))
parts.append(part)
msg = '; '.join(parts)
self.respond(msg)<|docstring|>help() gets invoked when we get the ``results`` message
with no arguments<|endoftext|> |
5a5ab440899d4468dc0a5b8a4278f302aa67cce32018238d66b459c9313d7b60 | def handle(self, text):
"This gets called if any arguments are given along with\n ``RESULTS``, but we don't care; just call help() as if they\n passed no arguments"
self.help() | This gets called if any arguments are given along with
``RESULTS``, but we don't care; just call help() as if they
passed no arguments | voting/handlers.py | handle | dragGH102/rapidsms-example-with-rest-services | 0 | python | def handle(self, text):
"This gets called if any arguments are given along with\n ``RESULTS``, but we don't care; just call help() as if they\n passed no arguments"
self.help() | def handle(self, text):
"This gets called if any arguments are given along with\n ``RESULTS``, but we don't care; just call help() as if they\n passed no arguments"
self.help()<|docstring|>This gets called if any arguments are given along with
``RESULTS``, but we don't care; just call help() as if they
passed no arguments<|endoftext|> |
5c91f7761e57677856dd94061d3febacc3a18df72b4470977c323e966b89fb86 | def help(self):
'Respond with the valid commands. Example response:\n ``Valid commands: VOTE <Moe|Larry|Curly>``\n '
choices = '|'.join(Choice.objects.values_list('name', flat=True))
self.respond(('Valid commands: VOTE <%s>' % choices)) | Respond with the valid commands. Example response:
``Valid commands: VOTE <Moe|Larry|Curly>`` | voting/handlers.py | help | dragGH102/rapidsms-example-with-rest-services | 0 | python | def help(self):
'Respond with the valid commands. Example response:\n ``Valid commands: VOTE <Moe|Larry|Curly>``\n '
choices = '|'.join(Choice.objects.values_list('name', flat=True))
self.respond(('Valid commands: VOTE <%s>' % choices)) | def help(self):
'Respond with the valid commands. Example response:\n ``Valid commands: VOTE <Moe|Larry|Curly>``\n '
choices = '|'.join(Choice.objects.values_list('name', flat=True))
self.respond(('Valid commands: VOTE <%s>' % choices))<|docstring|>Respond with the valid commands. Example response:
``Valid commands: VOTE <Moe|Larry|Curly>``<|endoftext|> |
9d3df847bade95093d86451b6e621a109397cb909aef103a9df4eb2ea81b3715 | def bash(command, terminal=None, *args, **kwargs):
'Execute command in a terminal.'
if (terminal is None):
terminal = current().context.terminal
r = terminal(command, *args, **kwargs)
return r | Execute command in a terminal. | tests/steps.py | bash | roshanths/altinity-dashboard | 1 | python | def bash(command, terminal=None, *args, **kwargs):
if (terminal is None):
terminal = current().context.terminal
r = terminal(command, *args, **kwargs)
return r | def bash(command, terminal=None, *args, **kwargs):
if (terminal is None):
terminal = current().context.terminal
r = terminal(command, *args, **kwargs)
return r<|docstring|>Execute command in a terminal.<|endoftext|> |
b75d8276e5dcfef822f54b0a9b74d19efecd569a1788785126a6cc0e36694db4 | @TextStep(Given)
def open_terminal(self, command=['/bin/bash'], timeout=100):
'Open host terminal.'
with Shell(command=command) as terminal:
terminal.timeout = timeout
terminal('echo 1')
try:
(yield terminal)
finally:
with Cleanup('closing terminal'):
terminal.close() | Open host terminal. | tests/steps.py | open_terminal | roshanths/altinity-dashboard | 1 | python | @TextStep(Given)
def open_terminal(self, command=['/bin/bash'], timeout=100):
with Shell(command=command) as terminal:
terminal.timeout = timeout
terminal('echo 1')
try:
(yield terminal)
finally:
with Cleanup('closing terminal'):
terminal.close() | @TextStep(Given)
def open_terminal(self, command=['/bin/bash'], timeout=100):
with Shell(command=command) as terminal:
terminal.timeout = timeout
terminal('echo 1')
try:
(yield terminal)
finally:
with Cleanup('closing terminal'):
terminal.close()<|docstring|>Open host terminal.<|endoftext|> |
9018320b2278feb22e20bdff99adcb338f01b327054d20321df8d4f9d0700092 | @TestStep(Given)
def webdriver(self, browser='chrome', selenium_hub_url='http://127.0.0.1:4444/wd/hub', timeout=300, local=None, local_webdriver_path=None):
'Create webdriver instance.'
driver = None
start_time = time.time()
try_number = 0
try:
with Given('I create new webdriver instance'):
if local:
if (browser == 'chrome'):
default_download_directory = (str(os.path.dirname(os.path.abspath(__file__))) + '/download')
prefs = {'download.default_directory': default_download_directory}
chrome_options = selenium_webdriver.ChromeOptions()
chrome_options.add_argument('--incognito')
chrome_options.add_argument('disable-infobars')
chrome_options.add_argument('start-maximized')
chrome_options.add_experimental_option('prefs', prefs)
driver = selenium_webdriver.Chrome(options=chrome_options, executable_path=local_webdriver_path)
else:
fail('only support chrome')
else:
while True:
try:
driver = selenium_webdriver.Remote(command_executor=selenium_hub_url, desired_capabilities={'browserName': browser, 'javascriptEnabled': True})
break
except Exception:
now = time.time()
if ((now - start_time) >= timeout):
raise
time.sleep(1)
try_number += 1
with And('I set implicit wait time', description=f'{self.context.global_wait_time} sec'):
driver.implicit_wait = self.context.global_wait_time
driver.implicitly_wait(self.context.global_wait_time)
(yield driver)
finally:
with Finally('close webdriver'):
driver.close() | Create webdriver instance. | tests/steps.py | webdriver | roshanths/altinity-dashboard | 1 | python | @TestStep(Given)
def webdriver(self, browser='chrome', selenium_hub_url='http://127.0.0.1:4444/wd/hub', timeout=300, local=None, local_webdriver_path=None):
driver = None
start_time = time.time()
try_number = 0
try:
with Given('I create new webdriver instance'):
if local:
if (browser == 'chrome'):
default_download_directory = (str(os.path.dirname(os.path.abspath(__file__))) + '/download')
prefs = {'download.default_directory': default_download_directory}
chrome_options = selenium_webdriver.ChromeOptions()
chrome_options.add_argument('--incognito')
chrome_options.add_argument('disable-infobars')
chrome_options.add_argument('start-maximized')
chrome_options.add_experimental_option('prefs', prefs)
driver = selenium_webdriver.Chrome(options=chrome_options, executable_path=local_webdriver_path)
else:
fail('only support chrome')
else:
while True:
try:
driver = selenium_webdriver.Remote(command_executor=selenium_hub_url, desired_capabilities={'browserName': browser, 'javascriptEnabled': True})
break
except Exception:
now = time.time()
if ((now - start_time) >= timeout):
raise
time.sleep(1)
try_number += 1
with And('I set implicit wait time', description=f'{self.context.global_wait_time} sec'):
driver.implicit_wait = self.context.global_wait_time
driver.implicitly_wait(self.context.global_wait_time)
(yield driver)
finally:
with Finally('close webdriver'):
driver.close() | @TestStep(Given)
def webdriver(self, browser='chrome', selenium_hub_url='http://127.0.0.1:4444/wd/hub', timeout=300, local=None, local_webdriver_path=None):
driver = None
start_time = time.time()
try_number = 0
try:
with Given('I create new webdriver instance'):
if local:
if (browser == 'chrome'):
default_download_directory = (str(os.path.dirname(os.path.abspath(__file__))) + '/download')
prefs = {'download.default_directory': default_download_directory}
chrome_options = selenium_webdriver.ChromeOptions()
chrome_options.add_argument('--incognito')
chrome_options.add_argument('disable-infobars')
chrome_options.add_argument('start-maximized')
chrome_options.add_experimental_option('prefs', prefs)
driver = selenium_webdriver.Chrome(options=chrome_options, executable_path=local_webdriver_path)
else:
fail('only support chrome')
else:
while True:
try:
driver = selenium_webdriver.Remote(command_executor=selenium_hub_url, desired_capabilities={'browserName': browser, 'javascriptEnabled': True})
break
except Exception:
now = time.time()
if ((now - start_time) >= timeout):
raise
time.sleep(1)
try_number += 1
with And('I set implicit wait time', description=f'{self.context.global_wait_time} sec'):
driver.implicit_wait = self.context.global_wait_time
driver.implicitly_wait(self.context.global_wait_time)
(yield driver)
finally:
with Finally('close webdriver'):
driver.close()<|docstring|>Create webdriver instance.<|endoftext|> |
54e341d9aac53d9fe2c1fc3fc44cd3cbbcc91d9c10c6c4265e59f96a02e308d8 | @TestStep(Given)
def alert(self, message, sleep=0.25):
'Create alert popup in the browser window.'
driver: WebDriver = self.context.driver
driver.execute_script(f'alert("{message}");')
time.sleep(sleep)
driver.switch_to.alert.accept() | Create alert popup in the browser window. | tests/steps.py | alert | roshanths/altinity-dashboard | 1 | python | @TestStep(Given)
def alert(self, message, sleep=0.25):
driver: WebDriver = self.context.driver
driver.execute_script(f'alert("{message}");')
time.sleep(sleep)
driver.switch_to.alert.accept() | @TestStep(Given)
def alert(self, message, sleep=0.25):
driver: WebDriver = self.context.driver
driver.execute_script(f'alert("{message}");')
time.sleep(sleep)
driver.switch_to.alert.accept()<|docstring|>Create alert popup in the browser window.<|endoftext|> |
34be1be4fad13a24bcea188e0e938bb78e860ebfa2684db34592c8e0f7560277 | @TestStep(Given)
def wait_for_element_to_be_clickable(self, timeout=None, poll_frequency=None, select_type=None, element=None):
'An Expectation for checking an element is visible and enabled such that\n you can click it.\n select_type - option that follows after SelectBy. (Examples: CSS, ID, XPATH, NAME)\n element - locator in string format(Example: "organizationId").\n '
driver = self.context.driver
if (timeout is None):
timeout = 30
if (poll_frequency is None):
poll_frequency = 1
wait = WebDriverWait(driver, timeout, poll_frequency)
wait.until(EC.element_to_be_clickable((select_type, element))) | An Expectation for checking an element is visible and enabled such that
you can click it.
select_type - option that follows after SelectBy. (Examples: CSS, ID, XPATH, NAME)
element - locator in string format(Example: "organizationId"). | tests/steps.py | wait_for_element_to_be_clickable | roshanths/altinity-dashboard | 1 | python | @TestStep(Given)
def wait_for_element_to_be_clickable(self, timeout=None, poll_frequency=None, select_type=None, element=None):
'An Expectation for checking an element is visible and enabled such that\n you can click it.\n select_type - option that follows after SelectBy. (Examples: CSS, ID, XPATH, NAME)\n element - locator in string format(Example: "organizationId").\n '
driver = self.context.driver
if (timeout is None):
timeout = 30
if (poll_frequency is None):
poll_frequency = 1
wait = WebDriverWait(driver, timeout, poll_frequency)
wait.until(EC.element_to_be_clickable((select_type, element))) | @TestStep(Given)
def wait_for_element_to_be_clickable(self, timeout=None, poll_frequency=None, select_type=None, element=None):
'An Expectation for checking an element is visible and enabled such that\n you can click it.\n select_type - option that follows after SelectBy. (Examples: CSS, ID, XPATH, NAME)\n element - locator in string format(Example: "organizationId").\n '
driver = self.context.driver
if (timeout is None):
timeout = 30
if (poll_frequency is None):
poll_frequency = 1
wait = WebDriverWait(driver, timeout, poll_frequency)
wait.until(EC.element_to_be_clickable((select_type, element)))<|docstring|>An Expectation for checking an element is visible and enabled such that
you can click it.
select_type - option that follows after SelectBy. (Examples: CSS, ID, XPATH, NAME)
element - locator in string format(Example: "organizationId").<|endoftext|> |
02b44b1391c253ef879f96424d3ec5ebf1a454b2355d7beed4df056bc2de4e9b | @TestStep(Given)
def wait_for_element_to_be_invisible(self, select_type=None, element=None):
'An Expectation for checking that an element is either invisible or not\n present on the DOM. Locator is used to find the element.\n '
driver = self.context.driver
wait = WebDriverWait(driver, 60)
wait.until(EC.invisibility_of_element_located((select_type, element))) | An Expectation for checking that an element is either invisible or not
present on the DOM. Locator is used to find the element. | tests/steps.py | wait_for_element_to_be_invisible | roshanths/altinity-dashboard | 1 | python | @TestStep(Given)
def wait_for_element_to_be_invisible(self, select_type=None, element=None):
'An Expectation for checking that an element is either invisible or not\n present on the DOM. Locator is used to find the element.\n '
driver = self.context.driver
wait = WebDriverWait(driver, 60)
wait.until(EC.invisibility_of_element_located((select_type, element))) | @TestStep(Given)
def wait_for_element_to_be_invisible(self, select_type=None, element=None):
'An Expectation for checking that an element is either invisible or not\n present on the DOM. Locator is used to find the element.\n '
driver = self.context.driver
wait = WebDriverWait(driver, 60)
wait.until(EC.invisibility_of_element_located((select_type, element)))<|docstring|>An Expectation for checking that an element is either invisible or not
present on the DOM. Locator is used to find the element.<|endoftext|> |
a10853ffe54fec87ab8d360ca194e4e0d0b91a72ca49fbb9caf7f3b6c5c5548c | @TestStep(Given)
def wait_for_element_to_be_visible(self, select_type=None, element=None, timeout=30):
'An expectation for checking that an element is present on the DOM of a\n page and visible. Visibility means that the element is not only displayed\n but also has a height and width that is greater than 0.\n select_type - option that follows after SelectBy. (Examples: CSS, ID, XPATH, NAME)\n element - locator in string format(Example: "organizationId").\n '
driver = self.context.driver
wait = WebDriverWait(driver, timeout)
wait.until(EC.visibility_of_element_located((select_type, element))) | An expectation for checking that an element is present on the DOM of a
page and visible. Visibility means that the element is not only displayed
but also has a height and width that is greater than 0.
select_type - option that follows after SelectBy. (Examples: CSS, ID, XPATH, NAME)
element - locator in string format(Example: "organizationId"). | tests/steps.py | wait_for_element_to_be_visible | roshanths/altinity-dashboard | 1 | python | @TestStep(Given)
def wait_for_element_to_be_visible(self, select_type=None, element=None, timeout=30):
'An expectation for checking that an element is present on the DOM of a\n page and visible. Visibility means that the element is not only displayed\n but also has a height and width that is greater than 0.\n select_type - option that follows after SelectBy. (Examples: CSS, ID, XPATH, NAME)\n element - locator in string format(Example: "organizationId").\n '
driver = self.context.driver
wait = WebDriverWait(driver, timeout)
wait.until(EC.visibility_of_element_located((select_type, element))) | @TestStep(Given)
def wait_for_element_to_be_visible(self, select_type=None, element=None, timeout=30):
'An expectation for checking that an element is present on the DOM of a\n page and visible. Visibility means that the element is not only displayed\n but also has a height and width that is greater than 0.\n select_type - option that follows after SelectBy. (Examples: CSS, ID, XPATH, NAME)\n element - locator in string format(Example: "organizationId").\n '
driver = self.context.driver
wait = WebDriverWait(driver, timeout)
wait.until(EC.visibility_of_element_located((select_type, element)))<|docstring|>An expectation for checking that an element is present on the DOM of a
page and visible. Visibility means that the element is not only displayed
but also has a height and width that is greater than 0.
select_type - option that follows after SelectBy. (Examples: CSS, ID, XPATH, NAME)
element - locator in string format(Example: "organizationId").<|endoftext|> |
6134519dba06679dd7ffa122c34316b666047db792b24d9d6c19a4642c06ec79 | @TestStep(When)
def run_adash_on_chrome(self):
'Run Altinity dashboard url on Chrome.'
driver: WebDriver = self.context.driver
open_altinity_dashboard = 'http://0.0.0.0:8080'
with Given('Adash is running in the VM'):
with When('start the chrome with Adash url and find `Details` element'):
for attempt in retries(count=5, timeout=20, delay=10):
with attempt:
driver.get(open_altinity_dashboard)
WebDriverWait(driver, 10).until(EC.visibility_of_element_located((SelectBy.XPATH, '/html/body/div[1]/div/main/section/div[1]/div[2]/article/div[1]'))) | Run Altinity dashboard url on Chrome. | tests/steps.py | run_adash_on_chrome | roshanths/altinity-dashboard | 1 | python | @TestStep(When)
def run_adash_on_chrome(self):
driver: WebDriver = self.context.driver
open_altinity_dashboard = 'http://0.0.0.0:8080'
with Given('Adash is running in the VM'):
with When('start the chrome with Adash url and find `Details` element'):
for attempt in retries(count=5, timeout=20, delay=10):
with attempt:
driver.get(open_altinity_dashboard)
WebDriverWait(driver, 10).until(EC.visibility_of_element_located((SelectBy.XPATH, '/html/body/div[1]/div/main/section/div[1]/div[2]/article/div[1]'))) | @TestStep(When)
def run_adash_on_chrome(self):
driver: WebDriver = self.context.driver
open_altinity_dashboard = 'http://0.0.0.0:8080'
with Given('Adash is running in the VM'):
with When('start the chrome with Adash url and find `Details` element'):
for attempt in retries(count=5, timeout=20, delay=10):
with attempt:
driver.get(open_altinity_dashboard)
WebDriverWait(driver, 10).until(EC.visibility_of_element_located((SelectBy.XPATH, '/html/body/div[1]/div/main/section/div[1]/div[2]/article/div[1]')))<|docstring|>Run Altinity dashboard url on Chrome.<|endoftext|> |
4a45476e74825cea21b8b7d56f318e4fad6660c5b1195849ae70e54d9ece821b | @TestStep(When)
def delete_cho_remove_ch(self, timeout=15):
'Delete ClickHouse Operator and Installation from Altinity dashboard.'
driver: WebDriver = self.context.driver
cho_tab = '/html/body/div[1]/div/div/div/nav/ul/li[2]/a'
ch_install = '/html/body/div[1]/div/div/div/nav/ul/li[3]/a'
chi_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[7]/div'
chi_toggle_delete = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[7]/div/ul/li[2]/button'
chi_delete_confm_btn = '/html/body/div[3]/div/div/div/footer/button[1]'
cho_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[6]/div'
cho_toggle_delete = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[6]/div/ul/li[2]/button'
cho_delete_confm_btn = '/html/body/div[4]/div/div/div/footer/button[1]'
with Given('Adash is visible in chrome'):
with When('I click on `ClickHouse Installations` tab in the Adash'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=ch_install)
ch_installs = driver.find_element(SelectBy.XPATH, ch_install)
ch_installs.click()
with And('I click toggle button to delete the ClickHouse Installation'):
for attempt in retries(count=5, timeout=10, delay=2):
with attempt:
wait_for_element_to_be_clickable(timeout=10, select_type=SelectBy.XPATH, element=chi_toggle_btn)
chi_toggle_btns = driver.find_element(SelectBy.XPATH, chi_toggle_btn)
chi_toggle_btns.click()
with And('I click on `Delete` drop down'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=chi_toggle_delete)
chi_toggle_deletes = driver.find_element(SelectBy.XPATH, chi_toggle_delete)
chi_toggle_deletes.click()
with And('I click on `Delete` confirmation button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=chi_delete_confm_btn)
chi_delete_confm_btns = driver.find_element(SelectBy.XPATH, chi_delete_confm_btn)
chi_delete_confm_btns.click()
with And('I wait until `ClickHouse Installation` terminates'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_invisible(select_type=SelectBy.XPATH, element=chi_toggle_btn)
with When('I click on `ClickHouse Operator` tab in the Adash'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_tab)
cho_tabs = driver.find_element(SelectBy.XPATH, cho_tab)
cho_tabs.click()
with And('I click toggle button to delete the ClickHouse Operator'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_clickable(timeout=10, select_type=SelectBy.XPATH, element=cho_toggle_btn)
cho_toggle_btns = driver.find_element(SelectBy.XPATH, cho_toggle_btn)
cho_toggle_btns.click()
with And('I click on `Delete` drop down'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_toggle_delete)
cho_toggle_deletes = driver.find_element(SelectBy.XPATH, cho_toggle_delete)
cho_toggle_deletes.click()
with And('I click on `Delete` confirmation button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_delete_confm_btn)
cho_delete_confm_btns = driver.find_element(SelectBy.XPATH, cho_delete_confm_btn)
cho_delete_confm_btns.click()
with And('I wait until `ClickHouse Operator` terminates'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_invisible(select_type=SelectBy.XPATH, element=cho_toggle_btn) | Delete ClickHouse Operator and Installation from Altinity dashboard. | tests/steps.py | delete_cho_remove_ch | roshanths/altinity-dashboard | 1 | python | @TestStep(When)
def delete_cho_remove_ch(self, timeout=15):
driver: WebDriver = self.context.driver
cho_tab = '/html/body/div[1]/div/div/div/nav/ul/li[2]/a'
ch_install = '/html/body/div[1]/div/div/div/nav/ul/li[3]/a'
chi_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[7]/div'
chi_toggle_delete = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[7]/div/ul/li[2]/button'
chi_delete_confm_btn = '/html/body/div[3]/div/div/div/footer/button[1]'
cho_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[6]/div'
cho_toggle_delete = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[6]/div/ul/li[2]/button'
cho_delete_confm_btn = '/html/body/div[4]/div/div/div/footer/button[1]'
with Given('Adash is visible in chrome'):
with When('I click on `ClickHouse Installations` tab in the Adash'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=ch_install)
ch_installs = driver.find_element(SelectBy.XPATH, ch_install)
ch_installs.click()
with And('I click toggle button to delete the ClickHouse Installation'):
for attempt in retries(count=5, timeout=10, delay=2):
with attempt:
wait_for_element_to_be_clickable(timeout=10, select_type=SelectBy.XPATH, element=chi_toggle_btn)
chi_toggle_btns = driver.find_element(SelectBy.XPATH, chi_toggle_btn)
chi_toggle_btns.click()
with And('I click on `Delete` drop down'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=chi_toggle_delete)
chi_toggle_deletes = driver.find_element(SelectBy.XPATH, chi_toggle_delete)
chi_toggle_deletes.click()
with And('I click on `Delete` confirmation button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=chi_delete_confm_btn)
chi_delete_confm_btns = driver.find_element(SelectBy.XPATH, chi_delete_confm_btn)
chi_delete_confm_btns.click()
with And('I wait until `ClickHouse Installation` terminates'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_invisible(select_type=SelectBy.XPATH, element=chi_toggle_btn)
with When('I click on `ClickHouse Operator` tab in the Adash'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_tab)
cho_tabs = driver.find_element(SelectBy.XPATH, cho_tab)
cho_tabs.click()
with And('I click toggle button to delete the ClickHouse Operator'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_clickable(timeout=10, select_type=SelectBy.XPATH, element=cho_toggle_btn)
cho_toggle_btns = driver.find_element(SelectBy.XPATH, cho_toggle_btn)
cho_toggle_btns.click()
with And('I click on `Delete` drop down'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_toggle_delete)
cho_toggle_deletes = driver.find_element(SelectBy.XPATH, cho_toggle_delete)
cho_toggle_deletes.click()
with And('I click on `Delete` confirmation button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_delete_confm_btn)
cho_delete_confm_btns = driver.find_element(SelectBy.XPATH, cho_delete_confm_btn)
cho_delete_confm_btns.click()
with And('I wait until `ClickHouse Operator` terminates'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_invisible(select_type=SelectBy.XPATH, element=cho_toggle_btn) | @TestStep(When)
def delete_cho_remove_ch(self, timeout=15):
driver: WebDriver = self.context.driver
cho_tab = '/html/body/div[1]/div/div/div/nav/ul/li[2]/a'
ch_install = '/html/body/div[1]/div/div/div/nav/ul/li[3]/a'
chi_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[7]/div'
chi_toggle_delete = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[7]/div/ul/li[2]/button'
chi_delete_confm_btn = '/html/body/div[3]/div/div/div/footer/button[1]'
cho_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[6]/div'
cho_toggle_delete = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[6]/div/ul/li[2]/button'
cho_delete_confm_btn = '/html/body/div[4]/div/div/div/footer/button[1]'
with Given('Adash is visible in chrome'):
with When('I click on `ClickHouse Installations` tab in the Adash'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=ch_install)
ch_installs = driver.find_element(SelectBy.XPATH, ch_install)
ch_installs.click()
with And('I click toggle button to delete the ClickHouse Installation'):
for attempt in retries(count=5, timeout=10, delay=2):
with attempt:
wait_for_element_to_be_clickable(timeout=10, select_type=SelectBy.XPATH, element=chi_toggle_btn)
chi_toggle_btns = driver.find_element(SelectBy.XPATH, chi_toggle_btn)
chi_toggle_btns.click()
with And('I click on `Delete` drop down'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=chi_toggle_delete)
chi_toggle_deletes = driver.find_element(SelectBy.XPATH, chi_toggle_delete)
chi_toggle_deletes.click()
with And('I click on `Delete` confirmation button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=chi_delete_confm_btn)
chi_delete_confm_btns = driver.find_element(SelectBy.XPATH, chi_delete_confm_btn)
chi_delete_confm_btns.click()
with And('I wait until `ClickHouse Installation` terminates'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_invisible(select_type=SelectBy.XPATH, element=chi_toggle_btn)
with When('I click on `ClickHouse Operator` tab in the Adash'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_tab)
cho_tabs = driver.find_element(SelectBy.XPATH, cho_tab)
cho_tabs.click()
with And('I click toggle button to delete the ClickHouse Operator'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_clickable(timeout=10, select_type=SelectBy.XPATH, element=cho_toggle_btn)
cho_toggle_btns = driver.find_element(SelectBy.XPATH, cho_toggle_btn)
cho_toggle_btns.click()
with And('I click on `Delete` drop down'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_toggle_delete)
cho_toggle_deletes = driver.find_element(SelectBy.XPATH, cho_toggle_delete)
cho_toggle_deletes.click()
with And('I click on `Delete` confirmation button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_delete_confm_btn)
cho_delete_confm_btns = driver.find_element(SelectBy.XPATH, cho_delete_confm_btn)
cho_delete_confm_btns.click()
with And('I wait until `ClickHouse Operator` terminates'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_invisible(select_type=SelectBy.XPATH, element=cho_toggle_btn)<|docstring|>Delete ClickHouse Operator and Installation from Altinity dashboard.<|endoftext|> |
d756e46185daf8aa20465ec410a282d1abf941df292147ec03c64c046fc8f18e | @TestStep(When)
def deploy_cho_install_ch(self, timeout=15):
'Deploy ClickHouse Operator on Altinity dashboard.'
driver: WebDriver = self.context.driver
cho_tab = '/html/body/div[1]/div/div/div/nav/ul/li[2]/a'
add_cho = '/html/body/div[1]/div/main/section/div/div[2]/button'
select_ns = "//*[@id='pf-context-selector-toggle-id-0']"
select_default_ns = '/html/body/div[6]/div/div/ul/li[1]/button'
click_deploy = '/html/body/div[5]/div/div/div/footer/button[1]'
ch_install = '/html/body/div[1]/div/div/div/nav/ul/li[3]/a'
select_template = "//*[@id='pf-context-selector-toggle-id-0']"
select_template_dropdown = '/html/body/div[7]/div/div/ul/li[12]/button'
select_ns_chi = "//*[@id='pf-context-selector-toggle-id-0']"
select_type = "//*[@id='pf-context-selector-search-button-id-0']"
select_ns_search = '/html/body/div[8]/div/div/div/div/input'
select_ns_search_icon = "//*[@id='pf-context-selector-search-button-id-0']"
select_ns_default_chi = '/html/body/div[8]/div/div/ul/li[1]/button'
Create_btn_chi = '/html/body/div[5]/div/div/div/footer/div/div[4]/button[1]'
chi_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[7]/div'
cho_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[6]/div'
with Given('Adash is visible in chrome'):
with When('I click on `ClickHouse Operator` tab in the Adash'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_tab)
cho_tabs = driver.find_element(SelectBy.XPATH, cho_tab)
cho_tabs.click()
with And('I click on `+` button to add ClickHouse Operator'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=add_cho)
add_chos = driver.find_element(SelectBy.XPATH, add_cho)
add_chos.click()
with And('I click on `Select a Namespace:` drop down'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns)
select_nss = driver.find_element(SelectBy.XPATH, select_ns)
select_nss.click()
with And('I click on `default` namespace'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_default_ns)
select_default_nss = driver.find_element(SelectBy.XPATH, select_default_ns)
select_default_nss.click()
with And('I click on `Deploy` button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=click_deploy)
click_deploys = driver.find_element(SelectBy.XPATH, click_deploy)
click_deploys.click()
time.sleep(2)
with And('I wait until `ClickHouse Operator` become visible'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_visible(select_type=SelectBy.XPATH, element=cho_toggle_btn)
with And('I click on `ClickHouse Installations` tab'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=ch_install)
ch_installs = driver.find_element(SelectBy.XPATH, ch_install)
ch_installs.click()
with And('I click on `+` button to add ClickHouse Installations'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=add_cho)
add_chos = driver.find_element(SelectBy.XPATH, add_cho)
add_chos.click()
time.sleep(2)
with And('I click on template example dropdown'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_template)
select_templates = driver.find_element(SelectBy.XPATH, select_template)
select_templates.click()
with And('I select a installation template'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_template_dropdown)
select_template_dropdowns = driver.find_element(SelectBy.XPATH, select_template_dropdown)
select_template_dropdowns.click()
with And('I click on `Select a Namespace To Deploy To:` dropdown'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns_chi)
select_ns_chis = driver.find_element(SelectBy.XPATH, select_ns_chi)
select_ns_chis.click()
wait_for_element_to_be_clickable(select_type=SelectBy.XPATH, element=select_ns_search_icon)
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns_search)
select_ns_searchs = driver.find_element(SelectBy.XPATH, select_ns_search)
select_ns_searchs.send_keys(Keys.TAB)
select_ns_search_icons = driver.find_element(SelectBy.XPATH, select_ns_search_icon)
select_ns_search_icons.send_keys(Keys.TAB)
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns_default_chi)
select_ns_default_chis = driver.find_element(SelectBy.XPATH, select_ns_default_chi)
select_ns_default_chis.send_keys(Keys.ENTER)
with And('I click `Create` button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=Create_btn_chi)
Create_btn_chis = driver.find_element(SelectBy.XPATH, Create_btn_chi)
Create_btn_chis.click() | Deploy ClickHouse Operator on Altinity dashboard. | tests/steps.py | deploy_cho_install_ch | roshanths/altinity-dashboard | 1 | python | @TestStep(When)
def deploy_cho_install_ch(self, timeout=15):
driver: WebDriver = self.context.driver
cho_tab = '/html/body/div[1]/div/div/div/nav/ul/li[2]/a'
add_cho = '/html/body/div[1]/div/main/section/div/div[2]/button'
select_ns = "//*[@id='pf-context-selector-toggle-id-0']"
select_default_ns = '/html/body/div[6]/div/div/ul/li[1]/button'
click_deploy = '/html/body/div[5]/div/div/div/footer/button[1]'
ch_install = '/html/body/div[1]/div/div/div/nav/ul/li[3]/a'
select_template = "//*[@id='pf-context-selector-toggle-id-0']"
select_template_dropdown = '/html/body/div[7]/div/div/ul/li[12]/button'
select_ns_chi = "//*[@id='pf-context-selector-toggle-id-0']"
select_type = "//*[@id='pf-context-selector-search-button-id-0']"
select_ns_search = '/html/body/div[8]/div/div/div/div/input'
select_ns_search_icon = "//*[@id='pf-context-selector-search-button-id-0']"
select_ns_default_chi = '/html/body/div[8]/div/div/ul/li[1]/button'
Create_btn_chi = '/html/body/div[5]/div/div/div/footer/div/div[4]/button[1]'
chi_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[7]/div'
cho_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[6]/div'
with Given('Adash is visible in chrome'):
with When('I click on `ClickHouse Operator` tab in the Adash'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_tab)
cho_tabs = driver.find_element(SelectBy.XPATH, cho_tab)
cho_tabs.click()
with And('I click on `+` button to add ClickHouse Operator'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=add_cho)
add_chos = driver.find_element(SelectBy.XPATH, add_cho)
add_chos.click()
with And('I click on `Select a Namespace:` drop down'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns)
select_nss = driver.find_element(SelectBy.XPATH, select_ns)
select_nss.click()
with And('I click on `default` namespace'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_default_ns)
select_default_nss = driver.find_element(SelectBy.XPATH, select_default_ns)
select_default_nss.click()
with And('I click on `Deploy` button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=click_deploy)
click_deploys = driver.find_element(SelectBy.XPATH, click_deploy)
click_deploys.click()
time.sleep(2)
with And('I wait until `ClickHouse Operator` become visible'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_visible(select_type=SelectBy.XPATH, element=cho_toggle_btn)
with And('I click on `ClickHouse Installations` tab'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=ch_install)
ch_installs = driver.find_element(SelectBy.XPATH, ch_install)
ch_installs.click()
with And('I click on `+` button to add ClickHouse Installations'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=add_cho)
add_chos = driver.find_element(SelectBy.XPATH, add_cho)
add_chos.click()
time.sleep(2)
with And('I click on template example dropdown'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_template)
select_templates = driver.find_element(SelectBy.XPATH, select_template)
select_templates.click()
with And('I select a installation template'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_template_dropdown)
select_template_dropdowns = driver.find_element(SelectBy.XPATH, select_template_dropdown)
select_template_dropdowns.click()
with And('I click on `Select a Namespace To Deploy To:` dropdown'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns_chi)
select_ns_chis = driver.find_element(SelectBy.XPATH, select_ns_chi)
select_ns_chis.click()
wait_for_element_to_be_clickable(select_type=SelectBy.XPATH, element=select_ns_search_icon)
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns_search)
select_ns_searchs = driver.find_element(SelectBy.XPATH, select_ns_search)
select_ns_searchs.send_keys(Keys.TAB)
select_ns_search_icons = driver.find_element(SelectBy.XPATH, select_ns_search_icon)
select_ns_search_icons.send_keys(Keys.TAB)
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns_default_chi)
select_ns_default_chis = driver.find_element(SelectBy.XPATH, select_ns_default_chi)
select_ns_default_chis.send_keys(Keys.ENTER)
with And('I click `Create` button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=Create_btn_chi)
Create_btn_chis = driver.find_element(SelectBy.XPATH, Create_btn_chi)
Create_btn_chis.click() | @TestStep(When)
def deploy_cho_install_ch(self, timeout=15):
driver: WebDriver = self.context.driver
cho_tab = '/html/body/div[1]/div/div/div/nav/ul/li[2]/a'
add_cho = '/html/body/div[1]/div/main/section/div/div[2]/button'
select_ns = "//*[@id='pf-context-selector-toggle-id-0']"
select_default_ns = '/html/body/div[6]/div/div/ul/li[1]/button'
click_deploy = '/html/body/div[5]/div/div/div/footer/button[1]'
ch_install = '/html/body/div[1]/div/div/div/nav/ul/li[3]/a'
select_template = "//*[@id='pf-context-selector-toggle-id-0']"
select_template_dropdown = '/html/body/div[7]/div/div/ul/li[12]/button'
select_ns_chi = "//*[@id='pf-context-selector-toggle-id-0']"
select_type = "//*[@id='pf-context-selector-search-button-id-0']"
select_ns_search = '/html/body/div[8]/div/div/div/div/input'
select_ns_search_icon = "//*[@id='pf-context-selector-search-button-id-0']"
select_ns_default_chi = '/html/body/div[8]/div/div/ul/li[1]/button'
Create_btn_chi = '/html/body/div[5]/div/div/div/footer/div/div[4]/button[1]'
chi_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[7]/div'
cho_toggle_btn = '/html/body/div[1]/div/main/section/table/tbody/tr[1]/td[6]/div'
with Given('Adash is visible in chrome'):
with When('I click on `ClickHouse Operator` tab in the Adash'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=cho_tab)
cho_tabs = driver.find_element(SelectBy.XPATH, cho_tab)
cho_tabs.click()
with And('I click on `+` button to add ClickHouse Operator'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=add_cho)
add_chos = driver.find_element(SelectBy.XPATH, add_cho)
add_chos.click()
with And('I click on `Select a Namespace:` drop down'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns)
select_nss = driver.find_element(SelectBy.XPATH, select_ns)
select_nss.click()
with And('I click on `default` namespace'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_default_ns)
select_default_nss = driver.find_element(SelectBy.XPATH, select_default_ns)
select_default_nss.click()
with And('I click on `Deploy` button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=click_deploy)
click_deploys = driver.find_element(SelectBy.XPATH, click_deploy)
click_deploys.click()
time.sleep(2)
with And('I wait until `ClickHouse Operator` become visible'):
for attempt in retries(count=5, timeout=timeout, delay=2):
with attempt:
wait_for_element_to_be_visible(select_type=SelectBy.XPATH, element=cho_toggle_btn)
with And('I click on `ClickHouse Installations` tab'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=ch_install)
ch_installs = driver.find_element(SelectBy.XPATH, ch_install)
ch_installs.click()
with And('I click on `+` button to add ClickHouse Installations'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=add_cho)
add_chos = driver.find_element(SelectBy.XPATH, add_cho)
add_chos.click()
time.sleep(2)
with And('I click on template example dropdown'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_template)
select_templates = driver.find_element(SelectBy.XPATH, select_template)
select_templates.click()
with And('I select a installation template'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_template_dropdown)
select_template_dropdowns = driver.find_element(SelectBy.XPATH, select_template_dropdown)
select_template_dropdowns.click()
with And('I click on `Select a Namespace To Deploy To:` dropdown'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns_chi)
select_ns_chis = driver.find_element(SelectBy.XPATH, select_ns_chi)
select_ns_chis.click()
wait_for_element_to_be_clickable(select_type=SelectBy.XPATH, element=select_ns_search_icon)
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns_search)
select_ns_searchs = driver.find_element(SelectBy.XPATH, select_ns_search)
select_ns_searchs.send_keys(Keys.TAB)
select_ns_search_icons = driver.find_element(SelectBy.XPATH, select_ns_search_icon)
select_ns_search_icons.send_keys(Keys.TAB)
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=select_ns_default_chi)
select_ns_default_chis = driver.find_element(SelectBy.XPATH, select_ns_default_chi)
select_ns_default_chis.send_keys(Keys.ENTER)
with And('I click `Create` button'):
wait_for_element_to_be_clickable(timeout=timeout, select_type=SelectBy.XPATH, element=Create_btn_chi)
Create_btn_chis = driver.find_element(SelectBy.XPATH, Create_btn_chi)
Create_btn_chis.click()<|docstring|>Deploy ClickHouse Operator on Altinity dashboard.<|endoftext|> |
709315b985dee51dbe31d40add98135909659ba823ebc916d1910fdeca88cb5f | @TestStep(Finally)
def halt_vagrant(self):
'Halt the running vagrant VM.'
with Finally('I halt the vm'):
os.system('vagrant halt') | Halt the running vagrant VM. | tests/steps.py | halt_vagrant | roshanths/altinity-dashboard | 1 | python | @TestStep(Finally)
def halt_vagrant(self):
with Finally('I halt the vm'):
os.system('vagrant halt') | @TestStep(Finally)
def halt_vagrant(self):
with Finally('I halt the vm'):
os.system('vagrant halt')<|docstring|>Halt the running vagrant VM.<|endoftext|> |
73bc547e014b5ddaf47e48bfe6983d85691d616691aded61773ad5ebe399efd1 | @commands.command(aliases=['p'])
async def ping(self, ctx):
"Get the bot's current websocket and API latency."
start_time = time.time()
message = (await ctx.send(embed=nextcord.Embed(title='Testing Ping...', color=nextcord.Color.random())))
end_time = time.time()
(await message.edit(embed=nextcord.Embed(title=f'''Latency: {round((self.bot.latency * 1000))}ms
API: {round(((end_time - start_time) * 1000))}ms''', color=nextcord.Color.random()))) | Get the bot's current websocket and API latency. | cogs/mod.py | ping | TechnoFrost27/MainesianUtilities | 0 | python | @commands.command(aliases=['p'])
async def ping(self, ctx):
start_time = time.time()
message = (await ctx.send(embed=nextcord.Embed(title='Testing Ping...', color=nextcord.Color.random())))
end_time = time.time()
(await message.edit(embed=nextcord.Embed(title=f'Latency: {round((self.bot.latency * 1000))}ms
API: {round(((end_time - start_time) * 1000))}ms', color=nextcord.Color.random()))) | @commands.command(aliases=['p'])
async def ping(self, ctx):
start_time = time.time()
message = (await ctx.send(embed=nextcord.Embed(title='Testing Ping...', color=nextcord.Color.random())))
end_time = time.time()
(await message.edit(embed=nextcord.Embed(title=f'Latency: {round((self.bot.latency * 1000))}ms
API: {round(((end_time - start_time) * 1000))}ms', color=nextcord.Color.random())))<|docstring|>Get the bot's current websocket and API latency.<|endoftext|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.